sjunges 7 years ago
parent
commit
ade4b5bf72
  1. 2
      .gitignore
  2. 398
      .travis.yml
  3. 11
      CHANGELOG.md
  4. 92
      CMakeLists.txt
  5. 4
      README.md
  6. 4
      resources/3rdparty/CMakeLists.txt
  7. 3190
      resources/3rdparty/cudd-3.0.0/Makefile.in
  8. 1256
      resources/3rdparty/cudd-3.0.0/aclocal.m4
  9. 19890
      resources/3rdparty/cudd-3.0.0/configure
  10. 15
      resources/cmake/find_modules/FindGurobi.cmake
  11. 4
      resources/cmake/find_modules/FindZ3.cmake
  12. 52
      resources/cmake/macros/GetGitRevisionDescription.cmake
  13. 13
      resources/cmake/macros/GetGitRevisionDescription.cmake.in
  14. 20
      resources/cmake/macros/export.cmake
  15. 11
      resources/cmake/stormConfigVersion.cmake.in
  16. 554
      resources/examples/testfiles/ctmc/cluster2.drn
  17. 4
      resources/examples/testfiles/dft/and.dft
  18. 8
      resources/examples/testfiles/dft/fdep.dft
  19. 5
      resources/examples/testfiles/dft/fdep2.dft
  20. 5
      resources/examples/testfiles/dft/fdep3.dft
  21. 7
      resources/examples/testfiles/dft/fdep4.dft
  22. 7
      resources/examples/testfiles/dft/fdep5.dft
  23. 4
      resources/examples/testfiles/dft/or.dft
  24. 4
      resources/examples/testfiles/dft/pand.dft
  25. 12
      resources/examples/testfiles/dft/pdep.dft
  26. 9
      resources/examples/testfiles/dft/pdep2.dft
  27. 5
      resources/examples/testfiles/dft/pdep3.dft
  28. 7
      resources/examples/testfiles/dft/pdep4.dft
  29. 5
      resources/examples/testfiles/dft/por.dft
  30. 5
      resources/examples/testfiles/dft/seq.dft
  31. 6
      resources/examples/testfiles/dft/seq2.dft
  32. 6
      resources/examples/testfiles/dft/seq3.dft
  33. 7
      resources/examples/testfiles/dft/seq4.dft
  34. 9
      resources/examples/testfiles/dft/seq5.dft
  35. 5
      resources/examples/testfiles/dft/spare.dft
  36. 8
      resources/examples/testfiles/dft/spare2.dft
  37. 10
      resources/examples/testfiles/dft/spare3.dft
  38. 9
      resources/examples/testfiles/dft/spare4.dft
  39. 9
      resources/examples/testfiles/dft/spare5.dft
  40. 7
      resources/examples/testfiles/dft/spare6.dft
  41. 5
      resources/examples/testfiles/dft/spare7.dft
  42. 7
      resources/examples/testfiles/dft/spare8.dft
  43. 5
      resources/examples/testfiles/dft/voting.dft
  44. 5
      resources/examples/testfiles/dft/voting2.dft
  45. 5
      resources/examples/testfiles/dft/voting3.dft
  46. 6
      resources/examples/testfiles/dft/voting4.dft
  47. 32337
      resources/examples/testfiles/dtmc/crowds-5-5.drn
  48. 71
      resources/examples/testfiles/ma/jobscheduler.drn
  49. 12
      resources/examples/testfiles/mdp/prism-mec-example1.nm
  50. 13
      resources/examples/testfiles/mdp/prism-mec-example2.nm
  51. 2
      resources/examples/testfiles/mdp/two_dice.drn
  52. 2
      src/storm-cli-utilities/CMakeLists.txt
  53. 27
      src/storm-cli-utilities/cli.cpp
  54. 7
      src/storm-cli-utilities/cli.h
  55. 108
      src/storm-cli-utilities/model-handling.h
  56. 2
      src/storm-dft-cli/CMakeLists.txt
  57. 201
      src/storm-dft-cli/storm-dft.cpp
  58. 2
      src/storm-dft/CMakeLists.txt
  59. 67
      src/storm-dft/api/storm-dft.cpp
  60. 117
      src/storm-dft/api/storm-dft.h
  61. 118
      src/storm-dft/builder/DFTBuilder.cpp
  62. 50
      src/storm-dft/builder/DFTBuilder.h
  63. 8
      src/storm-dft/modelchecker/dft/DFTASFChecker.h
  64. 58
      src/storm-dft/modelchecker/dft/DFTModelChecker.cpp
  65. 7
      src/storm-dft/modelchecker/dft/DFTModelChecker.h
  66. 279
      src/storm-dft/parser/DFTGalileoParser.cpp
  67. 58
      src/storm-dft/parser/DFTGalileoParser.h
  68. 4
      src/storm-dft/parser/DFTJsonParser.h
  69. 10
      src/storm-dft/settings/DftSettings.cpp
  70. 9
      src/storm-dft/settings/modules/DftIOSettings.cpp
  71. 10
      src/storm-dft/settings/modules/DftIOSettings.h
  72. 2
      src/storm-dft/storage/BucketPriorityQueue.cpp
  73. 72
      src/storm-dft/storage/dft/DFT.cpp
  74. 16
      src/storm-dft/storage/dft/DFT.h
  75. 826
      src/storm-dft/transformations/DftToGspnTransformator.cpp
  76. 219
      src/storm-dft/transformations/DftToGspnTransformator.h
  77. 2
      src/storm-gspn-cli/CMakeLists.txt
  78. 28
      src/storm-gspn-cli/storm-gspn.cpp
  79. 2
      src/storm-gspn/CMakeLists.txt
  80. 61
      src/storm-gspn/api/storm-gspn.cpp
  81. 17
      src/storm-gspn/api/storm-gspn.h
  82. 2
      src/storm-gspn/parser/GreatSpnEditorProjectParser.cpp
  83. 5
      src/storm-gspn/parser/PnmlParser.cpp
  84. 11
      src/storm-gspn/settings/modules/GSPNExportSettings.cpp
  85. 8
      src/storm-gspn/settings/modules/GSPNExportSettings.h
  86. 76
      src/storm-gspn/storage/gspn/GSPN.cpp
  87. 14
      src/storm-gspn/storage/gspn/GSPN.h
  88. 2
      src/storm-gspn/storage/gspn/GspnBuilder.cpp
  89. 2
      src/storm-gspn/storage/gspn/GspnBuilder.h
  90. 203
      src/storm-gspn/storage/gspn/GspnJsonExporter.cpp
  91. 71
      src/storm-gspn/storage/gspn/GspnJsonExporter.h
  92. 3
      src/storm-gspn/storage/gspn/Place.cpp
  93. 4
      src/storm-gspn/storage/gspn/Place.h
  94. 62
      src/storm-gspn/storm-gspn.h
  95. 2
      src/storm-pars-cli/CMakeLists.txt
  96. 2
      src/storm-pars/CMakeLists.txt
  97. 92
      src/storm-pars/modelchecker/region/SparseDtmcParameterLiftingModelChecker.cpp
  98. 12
      src/storm-pars/settings/ParsSettings.cpp
  99. 2
      src/storm-pgcl-cli/CMakeLists.txt
  100. 2
      src/storm-pgcl/CMakeLists.txt

2
.gitignore

@ -52,7 +52,5 @@ nbproject/
*.out
resources/3rdparty/cudd-3.0.0/Makefile.in
resources/3rdparty/cudd-3.0.0/aclocal.m4
# Python config
stormpy/setup.cfg
# Travis helpers
travis/mtime_cache/cache.json

398
.travis.yml

@ -1,5 +1,3 @@
# This file was inspired from https://github.com/google/fruit
#
# General config
#
@ -27,7 +25,7 @@ notifications:
on_failure: always
on_success: change
recipients:
secure: "Q9CW/PtyWkZwExDrfFFb9n1STGYsRfI6awv1bZHcGwfrDvhpXoMCuF8CwviIfilm7fFJJEoKizTtdRpY0HrOnY/8KY111xrtcFYosxdndv7xbESodIxQOUoIEFCO0mCNHwWYisoi3dAA7H3Yn661EsxluwHjzX5vY0xSbw0n229hlsFz092HwGLSU33zHl7eXpQk+BOFmBTRGlOq9obtDZ17zbHz1oXFZntHK/JDUIYP0b0uq8NvE2jM6CIVdcuSwmIkOhZJoO2L3Py3rBbPci+L2PSK4Smv2UjCPF8KusnOaFIyDB3LcNM9Jqq5ssJMrK/KaO6BiuYrOZXYWZ7KEg3Y/naC8HjOH1dzty+P7oW46sb9F03pTsufqD4R7wcK+9wROTztO6aJPDG/IPH7EWgrBTxqlOkVRwi2eYuQouZpZUW6EMClKbMHMIxCH2S8aOk/r1w2cNwmPEcunnP0nl413x/ByHr4fTPFykPj8pQxIsllYjpdWBRQfDOauKWGzk6LcrFW0qpWP+/aJ2vYu/IoZQMG5lMHbp6Y1Lg09pYm7Q983v3b7D+JvXhOXMyGq91HyPahA2wwKoG1GA4uoZ2I95/IFYNiKkelDd3WTBoFLNF9YFoEJNdCywm1fO2WY4WkyEFBuQcgDA+YpFMJJMxjTbivYk9jvHk2gji//2w="
secure: "BoMQTBWnkh4ZLIHEaKu0tAKDohhVmOQ2pz/fjc+ScKG8mtvXqtpx0TiyePOUV1MuYNZiAP7x4mwABcoid55SwZ4+LPjd8uxXNfOji9B9GW5YqbqRvAeh7Es7dx48MyLYPIezjoryHH9R3Q2zZ9gmxgtw5eirjURcLNTXpKAwq/oOsKvh+vhOx4Qierw98wEXjMV7ipBzE4cfkgUbbX7oxGh1nsAsCew+rRmNLijfmE9tctYdH5W0wE+zC9ik+12Xyk3FwsDIABirPHfeCcEl+b9I0h1a2vZSZIA+sCDkIGKTiv9pCnsthn5LJc9pMLX7B/Wf6xLGMzpSiw3P1ZzjXpOE026WuyhTMVXqZYvbl7cJoNZiLCfTYg3MQVq5NHkq0h0jInIH7QlZYd0hZPOGONwdy17O1QmnX2Weq6G+Ps9siLVKFba37+y5PfRYkiUatJvDf2f7Jdxye6TWrUmlxQkAvs65ioyr8doad7IT1/yaGr/rBpXeQqZp6zNoMYr/cCRAYX6nOrnSszgiIWEc8QMMx+G31v77Uvd++9VG4MG9gbdpexpfYRNzKAxDarSaYEOuaWm2Z6R87bpNcjA+tW0hnBHBzRx0NFYYqXyW0tpVO9+035A9CHrLDLz77r4jJttcRvrP2rTbTBiwuhpmiufRyk3BuWlgzU3yaSuQV3M="
#
# Configurations
@ -35,35 +33,123 @@ notifications:
jobs:
include:
###
# Stage: Build Carl
###
# ubuntu-17.10 - DefaultDebugTravis
- stage: Build Carl
os: linux
compiler: gcc
env: CONFIG=DefaultDebugTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build_carl.sh
after_success:
- docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD";
- docker commit carl movesrwth/carl:travis-debug;
- docker push movesrwth/carl:travis-debug;
# ubuntu-17.10 - DefaultReleaseTravis
- stage: Build Carl
os: linux
compiler: gcc
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build_carl.sh
after_success:
- docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD";
- docker commit carl movesrwth/carl:travis;
- docker push movesrwth/carl:travis;
###
# Stage: Build (1st run)
###
# ubuntu-16.10
# debian-9 - DefaultDebug
- stage: Build (1st run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
install:
- rm -rf build
- travis/install_linux.sh
script:
- travis/build.sh Build1
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# debian-9 - DefaultRelease
- stage: Build (1st run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
install:
- rm -rf build
- travis/install_linux.sh
script:
- travis/build.sh Build1
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultDebugTravis
- stage: Build (1st run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultDebugTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- rm -rf build
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh Build1
- travis/build.sh Build1
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultReleaseTravis
- stage: Build (1st run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- rm -rf build
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh Build1
- travis/build.sh Build1
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultDebug
- stage: Build (1st run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
install:
- rm -rf build
- travis/install_linux.sh
script:
- travis/build.sh Build1
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultRelease
- stage: Build (1st run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
install:
- rm -rf build
- travis/install_linux.sh
script:
- travis/build.sh Build1
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
@ -71,29 +157,82 @@ jobs:
# Stage: Build (2nd run)
###
# ubuntu-16.10
# debian-9 - DefaultDebug
- stage: Build (2nd run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build2
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# debian-9 - DefaultRelease
- stage: Build (2nd run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh Build2
- travis/build.sh Build2
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultDebugTravis
- stage: Build (2nd run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultDebugTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh Build2
- travis/build.sh Build2
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultReleaseTravis
- stage: Build (2nd run)
os: linux
compiler: gcc
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build2
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultDebug
- stage: Build (2nd run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build2
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultRelease
- stage: Build (2nd run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build2
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
@ -101,29 +240,82 @@ jobs:
# Stage: Build (3rd run)
###
# ubuntu-16.10
# debian-9 - DefaultDebug
- stage: Build (3rd run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh Build3
- travis/build.sh Build3
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# debian-9 - DefaultRelease
- stage: Build (3rd run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh Build3
- travis/build.sh Build3
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultDebugTravis
- stage: Build (3rd run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebugTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build3
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultReleaseTravis
- stage: Build (3rd run)
os: linux
compiler: gcc
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build3
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultDebug
- stage: Build (3rd run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build3
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultRelease
- stage: Build (3rd run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh Build3
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
@ -131,29 +323,82 @@ jobs:
# Stage: Build (4th run)
###
# ubuntu-16.10
# debian-9 - DefaultDebug
- stage: Build (4th run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh BuildLast
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# debian-9 - DefaultRelease
- stage: Build (4th run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh BuildLast
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultDebugTravis
- stage: Build (4th run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebugTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh BuildLast
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultReleaseTravis
- stage: Build (4th run)
os: linux
compiler: gcc
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh BuildLast
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultDebug
- stage: Build (4th run)
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh BuildLast
- travis/build.sh BuildLast
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultRelease
- stage: Build (4th run)
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh BuildLast
- travis/build.sh BuildLast
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
@ -161,37 +406,106 @@ jobs:
# Stage: Test all
###
# ubuntu-16.10
# debian-9 - DefaultDebug
- stage: Test all
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh TestAll
- travis/build.sh TestAll
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# debian-9 - DefaultRelease
- stage: Test all
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh TestAll
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-17.10 - DefaultDebugTravis
- stage: Test all
os: linux
compiler: gcc
env: CONFIG=DefaultDebugTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh TestAll
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
after_success:
- docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD";
- docker commit storm mvolk/storm-debug:travis;
- docker push mvolk/storm-debug:travis;
- docker commit storm movesrwth/storm:travis-debug;
- docker push movesrwth/storm:travis-debug;
# ubuntu-17.10 - DefaultReleaseTravis
- stage: Test all
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-16.10 COMPILER=gcc
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis_wait 60 travis/build.sh TestAll
- travis/build.sh TestAll
before_cache:
- docker cp storm:/storm/. .
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
after_success:
- docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD";
- docker commit storm mvolk/storm:travis;
- docker push mvolk/storm:travis;
- docker commit storm movesrwth/storm:travis;
- docker push movesrwth/storm:travis;
# ubuntu-18.04 - DefaultDebug
- stage: Test all
os: linux
compiler: gcc
env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh TestAll
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
# ubuntu-18.04 - DefaultRelease
- stage: Test all
os: linux
compiler: gcc
env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
install:
- travis/install_linux.sh
script:
- travis/build.sh TestAll
before_cache:
- docker cp storm:/opt/storm/. .
after_failure:
- find build -iname '*err*.log' -type f -print -exec cat {} \;
allow_failures:
- stage: Build (1st run)
os: linux
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
- stage: Build (2nd run)
os: linux
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
- stage: Build (3rd run)
os: linux
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
- stage: Build (4th run)
os: linux
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc
- stage: Test all
os: linux
env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-17.10 COMPILER=gcc

11
CHANGELOG.md

@ -7,8 +7,17 @@ The releases of major and minor versions contain an overview of changes since th
Version 1.2.x
-------------
### Version 1.2.1 (to be released)
### Version 1.2.2 (to be released)
- `storm-dft`: improvements in Galileo parser
- `storm-dft`: test cases for DFT analysis
- Sound value iteration (SVI) for DTMCs and MDPs
- Topological solver for linear equation systems and MinMax equation systems.
### Version 1.2.1 (2018/02)
- Multi-dimensional reward bounded reachability properties for DTMCs.
- `storm-dft`: transformation of DFTs to GSPNs
- Several bug fixes
### Version 1.2.0 (2017/12)
- C++ api changes: Building model takes `BuilderOptions` instead of extended list of Booleans, does not depend on settings anymore.

92
CMakeLists.txt

@ -78,6 +78,14 @@ set(BIN_INSTALL_DIR lib/ CACHE PATH "Installation directory for executables")
set(DEF_INSTALL_CMAKE_DIR "lib/CMake/storm")
set(CMAKE_INSTALL_DIR ${DEF_INSTALL_CMAKE_DIR} CACHE PATH "Installation directory for CMake files")
# Add CMake install prefix
foreach(p LIB BIN INCLUDE CMAKE)
set(var ${p}_INSTALL_DIR)
if(NOT IS_ABSOLUTE "${${var}}")
set(${var} "${CMAKE_INSTALL_PREFIX}/${${var}}")
endif()
endforeach()
message("CMAKE_INSTALL_DIR: ${CMAKE_INSTALL_DIR}")
# If the STORM_DEVELOPER option was turned on, by default we target a debug version, otherwise a release version.
@ -179,8 +187,9 @@ elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang")
set(STORM_COMPILER_APPLECLANG ON)
set(CLANG ON)
set(STORM_COMPILER_ID "AppleClang")
set(CMAKE_MACOSX_RPATH ON)
set(CMAKE_MACOSX_RPATH ON)
elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
set(GCC ON)
# using GCC
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 5.0)
message(FATAL_ERROR "gcc version must be at least 5.0.")
@ -222,16 +231,16 @@ if (STORM_COMPILER_CLANG OR STORM_COMPILER_APPLECLANG)
if(FORCE_COLOR)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fcolor-diagnostics")
endif()
if (LINUX)
set(CLANG_STDLIB libstdc++)
else()
set(CLANG_STDLIB libc++)
endif()
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14 -stdlib=${CLANG_STDLIB} -ftemplate-depth=1024")
set (CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -ffast-math -fno-finite-math-only")
set (CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE}")
if(LINUX)
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -rdynamic")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
@ -241,20 +250,20 @@ if (STORM_COMPILER_CLANG OR STORM_COMPILER_APPLECLANG)
endif()
elseif (STORM_COMPILER_GCC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -fprefetch-loop-arrays -ffast-math -fno-finite-math-only")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -fprefetch-loop-arrays")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -rdynamic")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
endif ()
if (STORM_USE_LTO)
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -flto")
# Fix for problems that occurred when using LTO on gcc. This should be removed when it
# is not needed anymore as it makes the the already long link-step potentially longer.
if (STORM_COMPILER_GCC)
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -flto-partition=none")
endif()
message(STATUS "Storm - Enabling link-time optimizations.")
else()
message(STATUS "Storm - Disabling link-time optimizations.")
@ -402,25 +411,25 @@ endif(DOXYGEN_FOUND)
# try to obtain the current version from git.
include(GetGitRevisionDescription)
get_git_head_revision(STORM_VERSION_REFSPEC STORM_VERSION_GIT_HASH)
git_describe_checkout(STORM_GIT_VERSION_STRING)
git_describe(STORM_GIT_VERSION_STRING)
# parse the git tag into variables
string(REGEX REPLACE "^([0-9]+)\\..*" "\\1" STORM_VERSION_MAJOR "${STORM_GIT_VERSION_STRING}")
string(REGEX REPLACE "^[0-9]+\\.([0-9]+).*" "\\1" STORM_VERSION_MINOR "${STORM_GIT_VERSION_STRING}")
string(REGEX REPLACE "^[0-9]+\\.[0-9]+\\.([0-9]+).*" "\\1" STORM_VERSION_PATCH "${STORM_GIT_VERSION_STRING}")
string(REGEX REPLACE "^[0-9]+\\.[0-9]+\\.[0-9]+\\-([0-9]+)\\-.*" "\\1" STORM_VERSION_COMMITS_AHEAD "${STORM_GIT_VERSION_STRING}")
string(REGEX REPLACE "^[0-9]+\\.[0-9]+\\.[0-9]+\\-[0-9]+\\-([a-z0-9]+).*" "\\1" STORM_VERSION_TAG_HASH "${STORM_GIT_VERSION_STRING}")
string(REGEX REPLACE "^[0-9]+\\.[0-9]+\\.[0-9]+\\-[0-9]+\\-[a-z0-9]+\\-(.*)" "\\1" STORM_VERSION_APPENDIX "${STORM_GIT_VERSION_STRING}")
# now check whether the git version lookup failed
if (STORM_VERSION_MAJOR MATCHES "NOTFOUND")
include(version.cmake)
set(STORM_VERSION_GIT_HASH "")
set(STORM_VERSION_COMMITS_AHEAD 0)
set(STORM_VERSION_DIRTY boost::none)
message(WARNING "Storm - git version information not available, statically assuming version ${STORM_VERSION_MAJOR}.${STORM_VERSION_MINOR}.${STORM_VERSION_PATCH}.")
else()
# start with major.minor.patch
string(REGEX MATCH "^([0-9]+)\\.([0-9]+)\\.([0-9]+)(.*)$" STORM_VERSION_MATCH "${STORM_GIT_VERSION_STRING}")
set(STORM_VERSION_MAJOR "${CMAKE_MATCH_1}")
set(STORM_VERSION_MINOR "${CMAKE_MATCH_2}")
set(STORM_VERSION_PATCH "${CMAKE_MATCH_3}")
set(STORM_GIT_VERSION_REST "${CMAKE_MATCH_4}")
# parse rest of the form (-label)-commitsahead-hash-appendix
string(REGEX MATCH "^(\\-([a-z][a-z0-9\\.]+))?\\-([0-9]+)\\-([a-z0-9]+)(\\-.*)?$" STORM_VERSION_REST_MATCH "${STORM_GIT_VERSION_REST}")
set(STORM_VERSION_LABEL "${CMAKE_MATCH_2}") # might be empty
set(STORM_VERSION_COMMITS_AHEAD "${CMAKE_MATCH_3}")
set(STORM_VERSION_TAG_HASH "${CMAKE_MATCH_4}")
set(STORM_VERSION_APPENDIX "${CMAKE_MATCH_5}") # might be empty
set(STORM_VERSION_DIRTY boost::none)
if (NOT "${STORM_GIT_VERSION_STRING}" STREQUAL "")
if ("${STORM_VERSION_APPENDIX}" MATCHES "^.*dirty.*$")
set(STORM_VERSION_DIRTY "true")
else()
@ -428,20 +437,43 @@ else()
endif()
endif()
# now check whether the git version lookup failed
set(STORM_VERSION_SOURCE "VersionSource::Git")
if (STORM_GIT_VERSION_STRING MATCHES "NOTFOUND")
set(STORM_VERSION_SOURCE "VersionSource::Static")
set(STORM_VERSION_COMMITS_AHEAD "boost::none")
include(version.cmake)
message(WARNING "Storm - git version information not available, statically assuming version ${STORM_VERSION_MAJOR}.${STORM_VERSION_MINOR}.${STORM_VERSION_PATCH}.")
endif()
# check whether there is a label ('alpha', 'pre', etc.)
if ("${STORM_VERSION_LABEL}" STREQUAL "")
set(STORM_VERSION_LABEL_STRING "")
else()
set(STORM_VERSION_LABEL_STRING "-${STORM_VERSION_LABEL}")
endif()
# check for development version with commits ahead of latest tag
set(STORM_VERSION_DEV "false")
set(STORM_VERSION_DEV_STRING "")
if(STORM_VERSION_COMMITS_AHEAD)
MATH(EXPR STORM_VERSION_DEV_PATCH "${STORM_VERSION_PATCH}+1")
if ("${STORM_VERSION_LABEL}" STREQUAL "")
# increase patch number to indicate newer version
MATH(EXPR STORM_VERSION_DEV_PATCH "${STORM_VERSION_PATCH}+1")
else()
set(STORM_VERSION_DEV_PATCH "${STORM_VERSION_PATCH}")
endif()
set(STORM_VERSION_DEV "true")
set(STORM_VERSION_DEV_STRING " (dev)")
else()
set(STORM_VERSION_DEV_PATCH ${STORM_VERSION_PATCH})
endif()
# set final Storm version
set(STORM_VERSION "${STORM_VERSION_MAJOR}.${STORM_VERSION_MINOR}.${STORM_VERSION_DEV_PATCH}")
set(STORM_VERSION_STRING "${STORM_VERSION}${STORM_VERSION_DEV_STRING}")
set(STORM_VERSION_STRING "${STORM_VERSION}${STORM_VERSION_LABEL_STRING}${STORM_VERSION_DEV_STRING}")
message(STATUS "Storm - version is ${STORM_VERSION_STRING} (version ${STORM_VERSION_MAJOR}.${STORM_VERSION_MINOR}.${STORM_VERSION_PATCH} + ${STORM_VERSION_COMMITS_AHEAD} commits), building from git: ${STORM_VERSION_GIT_HASH} (dirty: ${STORM_VERSION_DIRTY}).")
message(STATUS "Storm - version is ${STORM_VERSION_STRING} (version ${STORM_VERSION_MAJOR}.${STORM_VERSION_MINOR}.${STORM_VERSION_PATCH}${STORM_VERSION_LABEL_STRING} + ${STORM_VERSION_COMMITS_AHEAD} commits), building from git: ${STORM_VERSION_GIT_HASH} (dirty: ${STORM_VERSION_DIRTY}).")
# Configure a header file to pass some of the CMake settings to the source code
@ -476,4 +508,8 @@ add_subdirectory(src)
include(export)
install(FILES ${CMAKE_BINARY_DIR}/stormConfig.install.cmake DESTINATION ${CMAKE_INSTALL_DIR} RENAME stormConfig.cmake)
install(FILES ${CMAKE_BINARY_DIR}/stormConfigVersion.cmake DESTINATION ${CMAKE_INSTALL_DIR})
install(EXPORT storm_Targets FILE stormTargets.cmake DESTINATION ${CMAKE_INSTALL_DIR})
include(StormCPackConfig.cmake)

4
README.md

@ -30,15 +30,15 @@ Authors
Storm has been developed at RWTH Aachen University.
###### Principal developers
* Christian Dehnert
* Christian Hensel
* Sebastian Junges
* Joost-Pieter Katoen
* Tim Quatmann
* Matthias Volk
###### Developers (lexicographical order)
* Philipp Berger
* David Korzeniewski
* Tim Quatmann
###### Contributors (lexicographical order)
* Dimitri Bohlender

4
resources/3rdparty/CMakeLists.txt

@ -157,9 +157,9 @@ if(Z3_FOUND)
endif()
if(STORM_HAVE_Z3_OPTIMIZE)
message (STATUS "Storm - Linking with Z3. (Z3 version supports optimization)")
message (STATUS "Storm - Linking with Z3 (version ${Z3_VERSION}). (Z3 version supports optimization)")
else()
message (STATUS "Storm - Linking with Z3. (Z3 version does not support optimization)")
message (STATUS "Storm - Linking with Z3 (version ${Z3_VERSION}). (Z3 version does not support optimization)")
endif()
add_imported_library(z3 SHARED ${Z3_LIBRARIES} ${Z3_INCLUDE_DIRS})

3190
resources/3rdparty/cudd-3.0.0/Makefile.in
File diff suppressed because it is too large
View File

1256
resources/3rdparty/cudd-3.0.0/aclocal.m4
File diff suppressed because it is too large
View File

19890
resources/3rdparty/cudd-3.0.0/configure
File diff suppressed because it is too large
View File

15
resources/cmake/find_modules/FindGurobi.cmake

@ -25,9 +25,7 @@ find_path(GUROBI_INCLUDE_DIR
"/Library/gurobi651/mac64/include"
"/Library/gurobi652/mac64/include"
"/Library/gurobi702/mac64/include"
"C:\\libs\\gurobi502\\include"
"C:\\gurobi600\\win64\\include"
"${GUROBI_ROOT}/include"
"${GUROBI_ROOT}/include"
)
find_library( GUROBI_LIBRARY
@ -42,6 +40,7 @@ find_library( GUROBI_LIBRARY
gurobi60
gurobi65
gurobi70
gurobi75
PATHS "$ENV{GUROBI_HOME}/lib"
"/Library/gurobi502/mac64/lib"
"/Library/gurobi602/mac64/lib"
@ -51,9 +50,7 @@ find_library( GUROBI_LIBRARY
"/Library/gurobi651/mac64/lib"
"/Library/gurobi652/mac64/lib"
"/Library/gurobi702/mac64/lib"
"C:\\libs\\gurobi502\\lib"
"C:\\gurobi600\\win64\\lib"
"${GUROBI_ROOT}/lib"
"${GUROBI_ROOT}/lib"
)
find_library( GUROBI_CXX_LIBRARY
@ -67,9 +64,7 @@ find_library( GUROBI_CXX_LIBRARY
"/Library/gurobi651/mac64/lib"
"/Library/gurobi652/mac64/lib"
"/Library/gurobi702/mac64/lib"
"C:\\libs\\gurobi502\\lib"
"C:\\gurobi600\\win64\\lib"
"${GUROBI_ROOT}/lib"
"${GUROBI_ROOT}/lib"
)
set(GUROBI_INCLUDE_DIRS "${GUROBI_INCLUDE_DIR}" )
@ -86,4 +81,4 @@ find_package_handle_standard_args(GUROBI DEFAULT_MSG
mark_as_advanced(GUROBI_INCLUDE_DIR GUROBI_LIBRARY GUROBI_CXX_LIBRARY)
endif(GUROBI_INCLUDE_DIR)
endif(GUROBI_INCLUDE_DIR)

4
resources/cmake/find_modules/FindZ3.cmake

@ -10,7 +10,7 @@
# find include dir by searching for a concrete file, which definitely must be in it
find_path(Z3_INCLUDE_DIR
NAMES z3++.h
PATHS ENV PATH INCLUDE "/usr/include/z3" "/usr/local/include/z3/" "${Z3_ROOT}/include"
PATHS ENV PATH INCLUDE "${Z3_ROOT}/include" "/usr/include/z3" "/usr/local/include/z3/"
)
# find library
@ -44,4 +44,4 @@ ENDIF (NOT Z3_FIND_QUIETLY)
#message(${Z3_LIBRARY})
# make the set variables only visible in advanced mode
mark_as_advanced(Z3_LIBRARY Z3_INCLUDE_DIR Z3_SOLVER, Z3_EXEC)
mark_as_advanced(Z3_LIBRARY Z3_INCLUDE_DIR Z3_SOLVER, Z3_EXEC)

52
resources/cmake/macros/GetGitRevisionDescription.cmake

@ -18,6 +18,12 @@
# and adjusting the output so that it tests false if there was no exact
# matching tag.
#
# git_local_changes(<var>)
#
# Returns either "CLEAN" or "DIRTY" with respect to uncommitted changes.
# Uses the return code of "git diff-index --quiet HEAD --".
# Does not regard untracked files.
#
# Requires CMake 2.6 or newer (uses the 'function' command)
#
# Original Author:
@ -37,10 +43,10 @@ set(__get_git_revision_description YES)
# We must run the following at "include" time, not at function call time,
# to find the path to this module rather than the path to a calling list file
get_filename_component(_gitdescmoddir GetGitRevisionDescription.cmake PATH)
get_filename_component(_gitdescmoddir ${CMAKE_CURRENT_LIST_FILE} PATH)
function(get_git_head_revision _refspecvar _hashvar)
set(GIT_PARENT_DIR ${PROJECT_SOURCE_DIR})
set(GIT_PARENT_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
set(GIT_DIR "${GIT_PARENT_DIR}/.git")
while(NOT EXISTS "${GIT_DIR}") # .git dir not found, search parent directories
set(GIT_PREVIOUS_PARENT "${GIT_PARENT_DIR}")
@ -71,7 +77,7 @@ function(get_git_head_revision _refspecvar _hashvar)
set(HEAD_FILE "${GIT_DATA}/HEAD")
configure_file("${GIT_DIR}/HEAD" "${HEAD_FILE}" COPYONLY)
configure_file("${PROJECT_SOURCE_DIR}/resources/cmake/macros/GetGitRevisionDescription.cmake.in"
configure_file("${_gitdescmoddir}/GetGitRevisionDescription.cmake.in"
"${GIT_DATA}/grabRef.cmake"
@ONLY)
include("${GIT_DATA}/grabRef.cmake")
@ -110,7 +116,7 @@ function(git_describe _var)
${hash}
${ARGN}
WORKING_DIRECTORY
"${CMAKE_SOURCE_DIR}"
"${CMAKE_CURRENT_SOURCE_DIR}"
RESULT_VARIABLE
res
OUTPUT_VARIABLE
@ -124,7 +130,12 @@ function(git_describe _var)
set(${_var} "${out}" PARENT_SCOPE)
endfunction()
function(git_describe_checkout _var)
function(git_get_exact_tag _var)
git_describe(out --exact-match ${ARGN})
set(${_var} "${out}" PARENT_SCOPE)
endfunction()
function(git_local_changes _var)
if(NOT GIT_FOUND)
find_package(Git QUIET)
endif()
@ -138,39 +149,20 @@ function(git_describe_checkout _var)
return()
endif()
# TODO sanitize
#if((${ARGN}" MATCHES "&&") OR
# (ARGN MATCHES "||") OR
# (ARGN MATCHES "\\;"))
# message("Please report the following error to the project!")
# message(FATAL_ERROR "Looks like someone's doing something nefarious with git_describe! Passed arguments ${ARGN}")
#endif()
#message(STATUS "Arguments to execute_process: ${ARGN}")
execute_process(COMMAND
"${GIT_EXECUTABLE}"
describe
--tags
--dirty=-dirty
--long
${ARGN}
diff-index --quiet HEAD --
WORKING_DIRECTORY
"${CMAKE_SOURCE_DIR}"
"${CMAKE_CURRENT_SOURCE_DIR}"
RESULT_VARIABLE
res
OUTPUT_VARIABLE
out
ERROR_QUIET
OUTPUT_STRIP_TRAILING_WHITESPACE)
if(NOT res EQUAL 0)
set(out "${out}-${res}-NOTFOUND")
if(res EQUAL 0)
set(${_var} "CLEAN" PARENT_SCOPE)
else()
set(${_var} "DIRTY" PARENT_SCOPE)
endif()
set(${_var} "${out}" PARENT_SCOPE)
endfunction()
function(git_get_exact_tag _var)
git_describe(out --exact-match ${ARGN})
set(${_var} "${out}" PARENT_SCOPE)
endfunction()

13
resources/cmake/macros/GetGitRevisionDescription.cmake.in

@ -1,4 +1,4 @@
#
#
# Internal file for GetGitRevisionDescription.cmake
#
# Requires CMake 2.6 or newer (uses the 'function' command)
@ -23,9 +23,12 @@ if(HEAD_CONTENTS MATCHES "ref")
string(REPLACE "ref: " "" HEAD_REF "${HEAD_CONTENTS}")
if(EXISTS "@GIT_DIR@/${HEAD_REF}")
configure_file("@GIT_DIR@/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
elseif(EXISTS "@GIT_DIR@/logs/${HEAD_REF}")
configure_file("@GIT_DIR@/logs/${HEAD_REF}" "@GIT_DATA@/head-ref" COPYONLY)
set(HEAD_HASH "${HEAD_REF}")
else()
configure_file("@GIT_DIR@/packed-refs" "@GIT_DATA@/packed-refs" COPYONLY)
file(READ "@GIT_DATA@/packed-refs" PACKED_REFS)
if(${PACKED_REFS} MATCHES "([0-9a-z]*) ${HEAD_REF}")
set(HEAD_HASH "${CMAKE_MATCH_1}")
endif()
endif()
else()
# detached HEAD
@ -35,4 +38,4 @@ endif()
if(NOT HEAD_HASH)
file(READ "@GIT_DATA@/head-ref" HEAD_HASH LIMIT 1024)
string(STRIP "${HEAD_HASH}" HEAD_HASH)
endif()
endif()

20
resources/cmake/macros/export.cmake

@ -8,7 +8,7 @@ message(STATUS "Registered with cmake")
export(PACKAGE storm)
set(DEP_TARGETS "")
foreach(dt ${STORM_DEP_TARGETS})
foreach(dt ${STORM_DEP_TARGETS})
export_target(DEP_TARGETS ${dt})
endforeach()
@ -19,10 +19,26 @@ endforeach()
include(CMakePackageConfigHelpers)
write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/stormConfigVersion.cmake
VERSION 0.1.0
COMPATIBILITY SameMajorVersion )
# For the build tree
set(CONF_INCLUDE_DIRS "${CMAKE_BINARY_DIR}/include/")
configure_package_config_file(
resources/cmake/stormConfig.cmake.in
${PROJECT_BINARY_DIR}/stormConfig.cmake
INSTALL_DESTINATION ${CMAKE_INSTALL_DIR}
PATH_VARS INCLUDE_INSTALL_DIR #SYSCONFIG_INSTALL_DIR
PATH_VARS INCLUDE_INSTALL_DIR
)
# For the install tree
file(RELATIVE_PATH REL_INCLUDE_DIR "${CMAKE_INSTALL_DIR}" "${INCLUDE_INSTALL_DIR}")
set(CONF_INCLUDE_DIRS "\${storm_CMAKE_DIR}/${REL_INCLUDE_DIR}/storm")
configure_package_config_file(
resources/cmake/stormConfig.cmake.in
${PROJECT_BINARY_DIR}/stormConfig.install.cmake
INSTALL_DESTINATION ${CMAKE_INSTALL_DIR}
PATH_VARS INCLUDE_INSTALL_DIR
)

11
resources/cmake/stormConfigVersion.cmake.in

@ -0,0 +1,11 @@
set(PACKAGE_VERSION "@storm_VERSION@")
# Check whether the requested PACKAGE_FIND_VERSION is compatible
if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}")
set(PACKAGE_VERSION_COMPATIBLE FALSE)
else()
set(PACKAGE_VERSION_COMPATIBLE TRUE)
if ("${PACKAGE_VERSION}" VERSION_EQUAL "${PACKAGE_FIND_VERSION}")
set(PACKAGE_VERSION_EXACT TRUE)
endif()
endif()

554
resources/examples/testfiles/ctmc/cluster2.drn
File diff suppressed because it is too large
View File

4
resources/examples/testfiles/dft/and.dft

@ -0,0 +1,4 @@
toplevel "A";
"A" and "B" "C";
"B" lambda=0.5 dorm=0.3;
"C" lambda=0.5 dorm=0.3;

8
resources/examples/testfiles/dft/fdep.dft

@ -0,0 +1,8 @@
toplevel "System";
"System" or "Power" "Machine";
"Power" fdep "B_Power" "P" "B";
"Machine" or "P" "B";
"B_Power" lambda=0.5 dorm=0;
"P" lambda=0.5 dorm=0;
"B" lambda=0.5 dorm=0.5;

5
resources/examples/testfiles/dft/fdep2.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" and "B" "C";
"F" fdep "B" "C";
"B" lambda=0.5 dorm=0;
"C" lambda=0.5 dorm=0;

5
resources/examples/testfiles/dft/fdep3.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" and "B" "C" "F";
"F" fdep "B" "C";
"B" lambda=0.4 dorm=0;
"C" lambda=0.8 dorm=0;

7
resources/examples/testfiles/dft/fdep4.dft

@ -0,0 +1,7 @@
toplevel "A";
"A" or "F" "B";
"F" fdep "E" "C" "D";
"B" wsp "C" "D";
"C" lambda=1 dorm=0;
"D" lambda=1 dorm=0.5;
"E" lambda=0.5 dorm=0;

7
resources/examples/testfiles/dft/fdep5.dft

@ -0,0 +1,7 @@
toplevel "A";
"A" and "B" "C" "D" "E";
"F" fdep "B" "C" "D";
"B" lambda=0.5 dorm=0;
"C" lambda=0.5 dorm=0;
"D" lambda=0.5 dorm=0;
"E" lambda=0.5 dorm=0;

4
resources/examples/testfiles/dft/or.dft

@ -0,0 +1,4 @@
toplevel "A";
"A" or "B" "C";
"B" lambda=0.5 dorm=0.3;
"C" lambda=0.5 dorm=0.3;

4
resources/examples/testfiles/dft/pand.dft

@ -0,0 +1,4 @@
toplevel "A";
"A" pand "B" "C";
"B" lambda=0.4 dorm=0.3;
"C" lambda=0.2 dorm=0.3;

12
resources/examples/testfiles/dft/pdep.dft

@ -0,0 +1,12 @@
// From Junges2015
// Example 3.19
toplevel "SF";
"SF" or "A" "B" "PDEP";
"A" pand "S" "MA";
"B" and "MA" "MB";
"PDEP" pdep=0.2 "MA" "S";
"S" lambda=0.5 dorm=0;
"MA" lambda=0.5 dorm=0;
"MB" lambda=0.5 dorm=0;

9
resources/examples/testfiles/dft/pdep2.dft

@ -0,0 +1,9 @@
toplevel "SF";
"SF" or "A" "B" "PDEP";
"A" pand "S" "MA";
"B" and "MA" "MB";
"PDEP" pdep=0.2 "MA" "S" "MB";
"S" lambda=0.5 dorm=0;
"MA" lambda=0.5 dorm=0;
"MB" lambda=0.5 dorm=0;

5
resources/examples/testfiles/dft/pdep3.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" and "B" "C" "F";
"F" pdep=0.3 "B" "C";
"B" lambda=0.4 dorm=0;
"C" lambda=0.8 dorm=0;

7
resources/examples/testfiles/dft/pdep4.dft

@ -0,0 +1,7 @@
toplevel "SF";
"SF" pand "S" "A" "B";
"PDEP" pdep=0.2 "S" "A" "B";
"S" lambda=0.5 dorm=0;
"A" lambda=0.5 dorm=0;
"B" lambda=0.5 dorm=0;

5
resources/examples/testfiles/dft/por.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" por "B" "C" "D";
"B" lambda=0.4 dorm=0.0;
"C" lambda=0.2 dorm=0.0;
"D" lambda=0.2 dorm=0.0;

5
resources/examples/testfiles/dft/seq.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" and "B" "C";
"X" seq "B" "C";
"B" lambda=0.5 dorm=0.3;
"C" lambda=0.5 dorm=0.3;

6
resources/examples/testfiles/dft/seq2.dft

@ -0,0 +1,6 @@
toplevel "A";
"A" and "B" "C" "D";
"X" seq "B" "C" "D";
"B" lambda=0.5 dorm=0.3;
"C" lambda=0.5 dorm=0.3;
"D" lambda=0.5 dorm=0.3;

6
resources/examples/testfiles/dft/seq3.dft

@ -0,0 +1,6 @@
toplevel "A";
"A" and "C" "D";
"X" seq "B" "C" "D";
"B" lambda=0.5 dorm=0.3;
"C" lambda=0.5 dorm=0.3;
"D" lambda=0.5 dorm=0.3;

7
resources/examples/testfiles/dft/seq4.dft

@ -0,0 +1,7 @@
toplevel "A";
"A" and "T1" "B3";
"T1" or "B1" "B2";
"X" seq "B1" "B2" "B3";
"B1" lambda=0.5 dorm=0.3;
"B2" lambda=0.5 dorm=0.3;
"B3" lambda=0.5 dorm=0.3;

9
resources/examples/testfiles/dft/seq5.dft

@ -0,0 +1,9 @@
toplevel "A";
"A" and "T1" "T2";
"T1" pand "B1" "B2";
"T2" pand "B3" "B4";
"X" seq "B4" "B3";
"B1" lambda=0.7 dorm=0.3;
"B2" lambda=0.5 dorm=0.3;
"B3" lambda=0.5 dorm=0.3;
"B4" lambda=0.7 dorm=0.3;

5
resources/examples/testfiles/dft/spare.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" wsp "I" "M";
"I" lambda=0.5 dorm=0.3;
"M" lambda=0.5 dorm=0.3;

8
resources/examples/testfiles/dft/spare2.dft

@ -0,0 +1,8 @@
toplevel "A";
"A" or "B" "C";
"B" wsp "I" "J";
"C" wsp "M" "J";
"I" lambda=0.5 dorm=0.3;
"J" lambda=0.5 dorm=0.3;
"M" lambda=0.5 dorm=0.3;

10
resources/examples/testfiles/dft/spare3.dft

@ -0,0 +1,10 @@
toplevel "A";
"A" or "B" "C" "D";
"B" wsp "I" "M";
"C" wsp "J" "M";
"D" wsp "K" "M";
"I" lambda=0.5 dorm=0.3;
"J" lambda=0.5 dorm=0.3;
"K" lambda=0.5 dorm=0.3;
"M" lambda=0.5 dorm=0.3;

9
resources/examples/testfiles/dft/spare4.dft

@ -0,0 +1,9 @@
toplevel "A";
"A" and "B" "C";
"B" wsp "I" "J" "K";
"C" wsp "M" "J";
"I" lambda=0.5 dorm=0.3;
"J" lambda=0.5 dorm=0.3;
"K" lambda=0.5 dorm=0.3;
"M" lambda=0.5 dorm=0.3;

9
resources/examples/testfiles/dft/spare5.dft

@ -0,0 +1,9 @@
toplevel "A";
"A" wsp "I" "B";
"B" or "C" "J";
"C" or "K" "L";
"I" lambda=0.5 dorm=0;
"J" lambda=0.5 dorm=0;
"K" lambda=0.5 dorm=0;
"L" lambda=0.5 dorm=0;

7
resources/examples/testfiles/dft/spare6.dft

@ -0,0 +1,7 @@
toplevel "A";
"A" or "I" "B";
"B" wsp "J" "M";
"I" lambda=0.5 dorm=0.5;
"J" lambda=0.5 dorm=0.5;
"M" lambda=0.5 dorm=0.5;

5
resources/examples/testfiles/dft/spare7.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" wsp "K" "J" "I";
"I" lambda=0.5 dorm=0.5;
"J" lambda=1 dorm=0.5;
"K" lambda=0.5 dorm=0.5;

7
resources/examples/testfiles/dft/spare8.dft

@ -0,0 +1,7 @@
toplevel "A";
"A" wsp "I" "B";
"B" wsp "J" "K";
"I" lambda=0.5 dorm=0.3;
"J" lambda=0.5 dorm=0.3;
"K" lambda=0.5 dorm=0.3;

5
resources/examples/testfiles/dft/voting.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" 1of3 "B" "C" "D";
"B" lambda=0.1 dorm=0;
"C" lambda=0.2 dorm=0;
"D" lambda=0.3 dorm=0;

5
resources/examples/testfiles/dft/voting2.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" 1of3 "B" "C" "D";
"B" lambda=0.3 dorm=0;
"C" lambda=0.4 dorm=0;
"D" lambda=1 dorm=0;

5
resources/examples/testfiles/dft/voting3.dft

@ -0,0 +1,5 @@
toplevel "A";
"A" 2of3 "B" "C" "D";
"B" lambda=0.3 dorm=0;
"C" lambda=0.4 dorm=0;
"D" lambda=1 dorm=0;

6
resources/examples/testfiles/dft/voting4.dft

@ -0,0 +1,6 @@
toplevel "A";
"A" 2of3 "B" "C" "D";
"D" or "E";
"B" lambda=1 dorm=0.0;
"C" lambda=1 dorm=0.0;
"E" lambda=1 dorm=0.0;

32337
resources/examples/testfiles/dtmc/crowds-5-5.drn
File diff suppressed because it is too large
View File

71
resources/examples/testfiles/ma/jobscheduler.drn

@ -0,0 +1,71 @@
// Exported by storm
// Original model type: Markov Automaton
@type: Markov Automaton
@parameters
@reward_models
avg_waiting_time
@nr_states
17
@model
state 0 !0 [1] init
action 0 [0]
1 : 1
action 1 [0]
2 : 1
action 2 [0]
3 : 1
state 1 !3 [1]
action 0 [0]
4 : 0.333333
5 : 0.666667
state 2 !4 [1]
action 0 [0]
4 : 0.25
6 : 0.75
state 3 !5 [1]
action 0 [0]
5 : 0.4
6 : 0.6
state 4 !0 [0.666667] one_job_finished slowest_before_fastest
action 0 [0]
7 : 1
state 5 !0 [0.666667] one_job_finished
action 0 [0]
8 : 1
state 6 !0 [0.666667] one_job_finished
action 0 [0]
9 : 1
state 7 !5 [0.666667] one_job_finished slowest_before_fastest
action 0 [0]
10 : 0.4
11 : 0.6
state 8 !4 [0.666667] one_job_finished
action 0 [0]
10 : 0.25
12 : 0.75
state 9 !3 [0.666667] one_job_finished
action 0 [0]
11 : 0.333333
12 : 0.666667
state 10 !0 [0.333333] half_of_jobs_finished slowest_before_fastest
action 0 [0]
13 : 1
state 11 !0 [0.333333] half_of_jobs_finished
action 0 [0]
14 : 1
state 12 !0 [0.333333] half_of_jobs_finished
action 0 [0]
15 : 1
state 13 !3 [0.333333] half_of_jobs_finished slowest_before_fastest
action 0 [0]
16 : 1
state 14 !2 [0.333333] half_of_jobs_finished
action 0 [0]
16 : 1
state 15 !1 [0.333333] half_of_jobs_finished
action 0 [0]
16 : 1
state 16 !1 [0] all_jobs_finished deadlock
action 0 [0]
16 : 1

12
resources/examples/testfiles/mdp/prism-mec-example1.nm

@ -0,0 +1,12 @@
mdp
module test
x : [0..2];
[] x=0 -> true;
[] x=0 -> 0.5 : (x'=1) + 0.5: (x'=2);
[] x=1 -> (x'=0);
[] x=2 -> true;
endmodule

13
resources/examples/testfiles/mdp/prism-mec-example2.nm

@ -0,0 +1,13 @@
mdp
module test
x : [0..2];
[] x=0 -> true;
[] x=0 -> 0.5 : (x'=1) + 0.5: (x'=1);
[] x=0 -> (x'=2);
[] x=1 -> (x'=0);
[] x=2 -> true;
endmodule

2
resources/examples/testfiles/mdp/two_dice.drn

@ -3,6 +3,8 @@
@type: MDP
@parameters
@reward_models
coinflips
@nr_states
169
@model

2
src/storm-cli-utilities/CMakeLists.txt

@ -36,5 +36,5 @@ add_custom_target(copy_storm_cli_util_headers DEPENDS ${STORM_CLI_UTIL_OUTPUT_HE
add_dependencies(storm-cli-utilities copy_storm_pars_headers)
# installation
install(TARGETS storm-cli-utilities RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-cli-utilities EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

27
src/storm-cli-utilities/cli.cpp

@ -11,6 +11,7 @@
#include <type_traits>
#include <ctime>
#include <boost/algorithm/string/replace.hpp>
#include "storm-cli-utilities/model-handling.h"
@ -63,7 +64,27 @@ namespace storm {
storm::utility::cleanUp();
return 0;
}
std::string shellQuoteSingleIfNecessary(const std::string& arg) {
// quote empty argument
if (arg.empty()) {
return "''";
}
if (arg.find_first_not_of("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_./=") != std::string::npos) {
// contains potentially unsafe character, needs quoting
if (arg.find('\'') != std::string::npos) {
// contains ', we have to replace all ' with '\''
std::string escaped(arg);
boost::replace_all(escaped, "'", "'\\''");
return "'" + escaped + "'";
} else {
return "'" + arg + "'";
}
}
return arg;
}
void printHeader(std::string const& name, const int argc, const char** argv) {
STORM_PRINT(name << " " << storm::utility::StormVersion::shortVersionString() << std::endl << std::endl);
@ -71,7 +92,7 @@ namespace storm {
// "Compute" the command line argument string with which storm was invoked.
std::stringstream commandStream;
for (int i = 1; i < argc; ++i) {
commandStream << argv[i] << " ";
commandStream << " " << shellQuoteSingleIfNecessary(argv[i]);
}
std::string command = commandStream.str();
@ -79,7 +100,7 @@ namespace storm {
if (!command.empty()) {
std::time_t result = std::time(nullptr);
STORM_PRINT("Date: " << std::ctime(&result));
STORM_PRINT("Command line arguments: " << commandStream.str() << std::endl);
STORM_PRINT("Command line arguments:" << commandStream.str() << std::endl);
STORM_PRINT("Current working directory: " << storm::utility::cli::getCurrentWorkingDirectory() << std::endl << std::endl);
}
}

7
src/storm-cli-utilities/cli.h

@ -11,6 +11,13 @@ namespace storm {
*/
int64_t process(const int argc, const char** argv);
/*!
* For a command-line argument, returns a quoted version
* with single quotes if it contains unsafe characters.
* Otherwise, just returns the unquoted argument.
*/
std::string shellQuoteSingleIfNecessary(const std::string& arg);
void printHeader(std::string const& name, const int argc, const char** argv);
void printVersion(std::string const& name);

108
src/storm-cli-utilities/model-handling.h

@ -23,6 +23,7 @@
#include "storm/models/sparse/StandardRewardModel.h"
#include "storm/models/symbolic/StandardRewardModel.h"
#include "storm/models/symbolic/MarkovAutomaton.h"
#include "storm/settings/SettingsManager.h"
#include "storm/settings/modules/ResourceSettings.h"
@ -189,6 +190,7 @@ namespace storm {
options.setBuildChoiceOrigins(counterexampleGeneratorSettings.isMinimalCommandSetGenerationSet());
options.setBuildAllLabels(buildSettings.isBuildFullModelSet());
options.setBuildAllRewardModels(buildSettings.isBuildFullModelSet());
options.setAddOutOfBoundsState(buildSettings.isBuildOutOfBoundsStateSet());
if (buildSettings.isBuildFullModelSet()) {
options.clearTerminalStates();
}
@ -216,7 +218,7 @@ namespace storm {
auto buildSettings = storm::settings::getModule<storm::settings::modules::BuildSettings>();
std::shared_ptr<storm::models::ModelBase> result;
if (input.model) {
if (engine == storm::settings::modules::CoreSettings::Engine::Dd || engine == storm::settings::modules::CoreSettings::Engine::Hybrid || engine == storm::settings::modules::CoreSettings::Engine::AbstractionRefinement) {
if (engine == storm::settings::modules::CoreSettings::Engine::Dd || engine == storm::settings::modules::CoreSettings::Engine::Hybrid || engine == storm::settings::modules::CoreSettings::Engine::DdSparse || engine == storm::settings::modules::CoreSettings::Engine::AbstractionRefinement) {
result = buildModelDd<DdType, ValueType>(input);
} else if (engine == storm::settings::modules::CoreSettings::Engine::Sparse) {
result = buildModelSparse<ValueType>(input, buildSettings);
@ -305,37 +307,82 @@ namespace storm {
}
template <storm::dd::DdType DdType, typename ValueType>
std::shared_ptr<storm::models::Model<ValueType>> preprocessDdModelBisimulation(std::shared_ptr<storm::models::symbolic::Model<DdType, ValueType>> const& model, SymbolicInput const& input, storm::settings::modules::BisimulationSettings const& bisimulationSettings) {
typename std::enable_if<DdType != storm::dd::DdType::Sylvan && !std::is_same<ValueType, double>::value, std::shared_ptr<storm::models::Model<ValueType>>>::type
preprocessDdMarkovAutomaton(std::shared_ptr<storm::models::symbolic::Model<DdType, ValueType>> const& model) {
return model;
}
template <storm::dd::DdType DdType, typename ValueType>
typename std::enable_if<DdType == storm::dd::DdType::Sylvan || std::is_same<ValueType, double>::value, std::shared_ptr<storm::models::Model<ValueType>>>::type
preprocessDdMarkovAutomaton(std::shared_ptr<storm::models::symbolic::Model<DdType, ValueType>> const& model) {
auto ma = model->template as<storm::models::symbolic::MarkovAutomaton<DdType, ValueType>>();
if (!ma->isClosed()) {
return std::make_shared<storm::models::symbolic::MarkovAutomaton<DdType, ValueType>>(ma->close());
} else {
return model;
}
}
template <storm::dd::DdType DdType, typename ValueType, typename ExportValueType = ValueType>
std::shared_ptr<storm::models::Model<ExportValueType>> preprocessDdModelBisimulation(std::shared_ptr<storm::models::symbolic::Model<DdType, ValueType>> const& model, SymbolicInput const& input, storm::settings::modules::BisimulationSettings const& bisimulationSettings) {
STORM_LOG_WARN_COND(!bisimulationSettings.isWeakBisimulationSet(), "Weak bisimulation is currently not supported on DDs. Falling back to strong bisimulation.");
STORM_LOG_INFO("Performing bisimulation minimization...");
return storm::api::performBisimulationMinimization<DdType, ValueType>(model, createFormulasToRespect(input.properties), storm::storage::BisimulationType::Strong, bisimulationSettings.getSignatureMode());
return storm::api::performBisimulationMinimization<DdType, ValueType, ExportValueType>(model, createFormulasToRespect(input.properties), storm::storage::BisimulationType::Strong, bisimulationSettings.getSignatureMode());
}
template <storm::dd::DdType DdType, typename ValueType>
template <storm::dd::DdType DdType, typename ValueType, typename ExportValueType = ValueType>
std::pair<std::shared_ptr<storm::models::ModelBase>, bool> preprocessDdModel(std::shared_ptr<storm::models::symbolic::Model<DdType, ValueType>> const& model, SymbolicInput const& input) {
auto bisimulationSettings = storm::settings::getModule<storm::settings::modules::BisimulationSettings>();
auto generalSettings = storm::settings::getModule<storm::settings::modules::GeneralSettings>();
std::pair<std::shared_ptr<storm::models::Model<ValueType>>, bool> result = std::make_pair(model, false);
std::pair<std::shared_ptr<storm::models::Model<ValueType>>, bool> intermediateResult = std::make_pair(model, false);
if (model->isOfType(storm::models::ModelType::MarkovAutomaton)) {
intermediateResult.first = preprocessDdMarkovAutomaton(intermediateResult.first->template as<storm::models::symbolic::Model<DdType, ValueType>>());
intermediateResult.second = true;
}
std::unique_ptr<std::pair<std::shared_ptr<storm::models::Model<ExportValueType>>, bool>> result;
auto symbolicModel = intermediateResult.first->template as<storm::models::symbolic::Model<DdType, ValueType>>();
if (generalSettings.isBisimulationSet()) {
result.first = preprocessDdModelBisimulation(model, input, bisimulationSettings);
result.second = true;
std::shared_ptr<storm::models::Model<ExportValueType>> newModel = preprocessDdModelBisimulation<DdType, ValueType, ExportValueType>(symbolicModel, input, bisimulationSettings);
result = std::make_unique<std::pair<std::shared_ptr<storm::models::Model<ExportValueType>>, bool>>(newModel, true);
} else {
result = std::make_unique<std::pair<std::shared_ptr<storm::models::Model<ExportValueType>>, bool>>(symbolicModel->template toValueType<ExportValueType>(), !std::is_same<ValueType, ExportValueType>::value);
}
return result;
if (result && result->first->isSymbolicModel() && storm::settings::getModule<storm::settings::modules::CoreSettings>().getEngine() == storm::settings::modules::CoreSettings::Engine::DdSparse) {
// Mark as changed.
result->second = true;
std::shared_ptr<storm::models::symbolic::Model<DdType, ExportValueType>> symbolicModel = result->first->template as<storm::models::symbolic::Model<DdType, ExportValueType>>();
if (symbolicModel->isOfType(storm::models::ModelType::Dtmc)) {
storm::transformer::SymbolicDtmcToSparseDtmcTransformer<DdType, ExportValueType> transformer;
result->first = transformer.translate(*symbolicModel->template as<storm::models::symbolic::Dtmc<DdType, ExportValueType>>());
} else if (symbolicModel->isOfType(storm::models::ModelType::Ctmc)) {
storm::transformer::SymbolicCtmcToSparseCtmcTransformer<DdType, ExportValueType> transformer;
result->first = transformer.translate(*symbolicModel->template as<storm::models::symbolic::Ctmc<DdType, ExportValueType>>());
} else if (symbolicModel->isOfType(storm::models::ModelType::Mdp)) {
storm::transformer::SymbolicMdpToSparseMdpTransformer<DdType, ExportValueType> transformer;
result->first = transformer.translate(*symbolicModel->template as<storm::models::symbolic::Mdp<DdType, ExportValueType>>());
} else {
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "The translation to a sparse model is not supported for the given model type.");
}
}
return *result;
}
template <storm::dd::DdType DdType, typename ValueType>
template <storm::dd::DdType DdType, typename BuildValueType, typename ExportValueType = BuildValueType>
std::pair<std::shared_ptr<storm::models::ModelBase>, bool> preprocessModel(std::shared_ptr<storm::models::ModelBase> const& model, SymbolicInput const& input) {
storm::utility::Stopwatch preprocessingWatch(true);
std::pair<std::shared_ptr<storm::models::ModelBase>, bool> result = std::make_pair(model, false);
if (model->isSparseModel()) {
result = preprocessSparseModel<ValueType>(result.first->as<storm::models::sparse::Model<ValueType>>(), input);
result = preprocessSparseModel<BuildValueType>(result.first->as<storm::models::sparse::Model<BuildValueType>>(), input);
} else {
STORM_LOG_ASSERT(model->isSymbolicModel(), "Unexpected model type.");
result = preprocessDdModel<DdType, ValueType>(result.first->as<storm::models::symbolic::Model<DdType, ValueType>>(), input);
result = preprocessDdModel<DdType, BuildValueType, ExportValueType>(result.first->as<storm::models::symbolic::Model<DdType, BuildValueType>>(), input);
}
preprocessingWatch.stop();
@ -377,8 +424,6 @@ namespace storm {
auto counterexampleSettings = storm::settings::getModule<storm::settings::modules::CounterexampleGeneratorSettings>();
if (counterexampleSettings.isMinimalCommandSetGenerationSet()) {
STORM_LOG_THROW(input.model && input.model.get().isPrismProgram(), storm::exceptions::NotSupportedException, "Minimal command set counterexamples are only supported for PRISM model input.");
storm::prism::Program const& program = input.model.get().asPrismProgram();
bool useMilp = counterexampleSettings.isUseMilpBasedMinimalCommandSetGenerationSet();
for (auto const& property : input.properties) {
@ -387,13 +432,14 @@ namespace storm {
storm::utility::Stopwatch watch(true);
if (useMilp) {
STORM_LOG_THROW(sparseModel->isOfType(storm::models::ModelType::Mdp), storm::exceptions::NotSupportedException, "Counterexample generation using MILP is currently only supported for MDPs.");
counterexample = storm::api::computePrismHighLevelCounterexampleMilp(program, sparseModel->template as<storm::models::sparse::Mdp<ValueType>>(), property.getRawFormula());
counterexample = storm::api::computeHighLevelCounterexampleMilp(input.model.get(), sparseModel->template as<storm::models::sparse::Mdp<ValueType>>(), property.getRawFormula());
} else {
STORM_LOG_THROW(sparseModel->isOfType(storm::models::ModelType::Dtmc) || sparseModel->isOfType(storm::models::ModelType::Mdp), storm::exceptions::NotSupportedException, "Counterexample generation using MaxSAT is currently only supported for discrete-time models.");
if (sparseModel->isOfType(storm::models::ModelType::Dtmc)) {
counterexample = storm::api::computePrismHighLevelCounterexampleMaxSmt(program, sparseModel->template as<storm::models::sparse::Dtmc<ValueType>>(), property.getRawFormula());
counterexample = storm::api::computeHighLevelCounterexampleMaxSmt(input.model.get(), sparseModel->template as<storm::models::sparse::Dtmc<ValueType>>(), property.getRawFormula());
} else {
counterexample = storm::api::computePrismHighLevelCounterexampleMaxSmt(program, sparseModel->template as<storm::models::sparse::Mdp<ValueType>>(), property.getRawFormula());
counterexample = storm::api::computeHighLevelCounterexampleMaxSmt(input.model.get(), sparseModel->template as<storm::models::sparse::Mdp<ValueType>>(), property.getRawFormula());
}
}
watch.stop();
@ -617,13 +663,13 @@ namespace storm {
}
}
template <storm::dd::DdType DdType, typename ValueType>
template <storm::dd::DdType DdType, typename BuildValueType, typename VerificationValueType = BuildValueType>
std::shared_ptr<storm::models::ModelBase> buildPreprocessExportModelWithValueTypeAndDdlib(SymbolicInput const& input, storm::settings::modules::CoreSettings::Engine engine) {
auto ioSettings = storm::settings::getModule<storm::settings::modules::IOSettings>();
auto buildSettings = storm::settings::getModule<storm::settings::modules::BuildSettings>();
std::shared_ptr<storm::models::ModelBase> model;
if (!buildSettings.isNoBuildModelSet()) {
model = buildModel<DdType, ValueType>(engine, input, ioSettings);
model = buildModel<DdType, BuildValueType>(engine, input, ioSettings);
}
if (model) {
@ -633,17 +679,17 @@ namespace storm {
STORM_LOG_THROW(model || input.properties.empty(), storm::exceptions::InvalidSettingsException, "No input model.");
if (model) {
auto preprocessingResult = preprocessModel<DdType, ValueType>(model, input);
auto preprocessingResult = preprocessModel<DdType, BuildValueType, VerificationValueType>(model, input);
if (preprocessingResult.second) {
model = preprocessingResult.first;
model->printModelInformationToStream(std::cout);
}
exportModel<DdType, ValueType>(model, input);
exportModel<DdType, BuildValueType>(model, input);
}
return model;
}
template <storm::dd::DdType DdType, typename ValueType>
template <storm::dd::DdType DdType, typename BuildValueType, typename VerificationValueType = BuildValueType>
void processInputWithValueTypeAndDdlib(SymbolicInput const& input) {
auto coreSettings = storm::settings::getModule<storm::settings::modules::CoreSettings>();
auto abstractionSettings = storm::settings::getModule<storm::settings::modules::AbstractionSettings>();
@ -652,19 +698,19 @@ namespace storm {
storm::settings::modules::CoreSettings::Engine engine = coreSettings.getEngine();
if (engine == storm::settings::modules::CoreSettings::Engine::AbstractionRefinement && abstractionSettings.getAbstractionRefinementMethod() == storm::settings::modules::AbstractionSettings::Method::Games) {
verifyWithAbstractionRefinementEngine<DdType, ValueType>(input);
verifyWithAbstractionRefinementEngine<DdType, VerificationValueType>(input);
} else if (engine == storm::settings::modules::CoreSettings::Engine::Exploration) {
verifyWithExplorationEngine<ValueType>(input);
verifyWithExplorationEngine<VerificationValueType>(input);
} else {
std::shared_ptr<storm::models::ModelBase> model = buildPreprocessExportModelWithValueTypeAndDdlib<DdType, ValueType>(input, engine);
std::shared_ptr<storm::models::ModelBase> model = buildPreprocessExportModelWithValueTypeAndDdlib<DdType, BuildValueType, VerificationValueType>(input, engine);
if (model) {
if (coreSettings.isCounterexampleSet()) {
auto ioSettings = storm::settings::getModule<storm::settings::modules::IOSettings>();
generateCounterexamples<ValueType>(model, input);
generateCounterexamples<VerificationValueType>(model, input);
} else {
auto ioSettings = storm::settings::getModule<storm::settings::modules::IOSettings>();
verifyModel<DdType, ValueType>(model, input, coreSettings);
verifyModel<DdType, VerificationValueType>(model, input, coreSettings);
}
}
}
@ -674,12 +720,16 @@ namespace storm {
void processInputWithValueType(SymbolicInput const& input) {
auto coreSettings = storm::settings::getModule<storm::settings::modules::CoreSettings>();
auto generalSettings = storm::settings::getModule<storm::settings::modules::GeneralSettings>();
auto bisimulationSettings = storm::settings::getModule<storm::settings::modules::BisimulationSettings>();
if (coreSettings.getDdLibraryType() == storm::dd::DdType::CUDD && coreSettings.isDdLibraryTypeSetFromDefaultValue() && generalSettings.isExactSet()) {
STORM_LOG_INFO("Switching to DD library sylvan to allow for rational arithmetic.");
processInputWithValueTypeAndDdlib<storm::dd::DdType::Sylvan, ValueType>(input);
processInputWithValueTypeAndDdlib<storm::dd::DdType::Sylvan, storm::RationalNumber>(input);
} else if (coreSettings.getDdLibraryType() == storm::dd::DdType::CUDD && coreSettings.isDdLibraryTypeSetFromDefaultValue() && std::is_same<ValueType, double>::value && generalSettings.isBisimulationSet() && bisimulationSettings.useExactArithmeticInDdBisimulation()) {
STORM_LOG_INFO("Switching to DD library sylvan to allow for rational arithmetic.");
processInputWithValueTypeAndDdlib<storm::dd::DdType::Sylvan, storm::RationalNumber, double>(input);
} else if (coreSettings.getDdLibraryType() == storm::dd::DdType::CUDD) {
processInputWithValueTypeAndDdlib<storm::dd::DdType::CUDD, ValueType>(input);
processInputWithValueTypeAndDdlib<storm::dd::DdType::CUDD, double>(input);
} else {
STORM_LOG_ASSERT(coreSettings.getDdLibraryType() == storm::dd::DdType::Sylvan, "Unknown DD library.");
processInputWithValueTypeAndDdlib<storm::dd::DdType::Sylvan, ValueType>(input);

2
src/storm-dft-cli/CMakeLists.txt

@ -6,4 +6,4 @@ set_target_properties(storm-dft-cli PROPERTIES OUTPUT_NAME "storm-dft")
add_dependencies(binaries storm-dft-cli)
# installation
install(TARGETS storm-dft-cli RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-dft-cli EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

201
src/storm-dft-cli/storm-dft.cpp

@ -1,166 +1,65 @@
#include "storm-dft/settings/DftSettings.h"
#include "storm-dft/api/storm-dft.h"
#include "storm-dft/settings/DftSettings.h"
#include "storm-dft/settings/modules/DftIOSettings.h"
#include "storm-dft/settings/modules/FaultTreeSettings.h"
#include "storm/settings/modules/IOSettings.h"
#include "storm/settings/modules/ResourceSettings.h"
#include "storm/utility/initialize.h"
#include "storm/api/storm.h"
#include "storm-cli-utilities/cli.h"
#include "storm-dft/parser/DFTGalileoParser.h"
#include "storm-dft/parser/DFTJsonParser.h"
#include "storm-dft/modelchecker/dft/DFTModelChecker.h"
#include "storm-dft/modelchecker/dft/DFTASFChecker.h"
#include "storm-dft/transformations/DftToGspnTransformator.h"
#include "storm-dft/storage/dft/DftJsonExporter.h"
#include "storm-gspn/storage/gspn/GSPN.h"
#include "storm-gspn/storm-gspn.h"
#include <boost/lexical_cast.hpp>
#include <memory>
/*!
* Analyse the given DFT according to the given properties.
* We first load the DFT from the given file, then build the corresponding model and last check against the given properties.
*
* @param properties PCTL formulas capturing the properties to check.
* @param symred Flag whether symmetry reduction should be used.
* @param allowModularisation Flag whether modularisation should be applied if possible.
* @param enableDC Flag whether Don't Care propagation should be used.
* @param approximationError Allowed approximation error.
*/
template <typename ValueType>
void analyzeDFT(std::vector<std::string> const& properties, bool symred, bool allowModularisation, bool enableDC, double approximationError) {
storm::settings::modules::DftIOSettings const& dftIOSettings = storm::settings::getModule<storm::settings::modules::DftIOSettings>();
std::shared_ptr<storm::storage::DFT<ValueType>> dft;
// Build DFT from given file.
if (dftIOSettings.isDftJsonFileSet()) {
storm::parser::DFTJsonParser<ValueType> parser;
std::cout << "Loading DFT from file " << dftIOSettings.getDftJsonFilename() << std::endl;
dft = std::make_shared<storm::storage::DFT<ValueType>>(parser.parseJson(dftIOSettings.getDftJsonFilename()));
} else {
storm::parser::DFTGalileoParser<ValueType> parser;
std::cout << "Loading DFT from file " << dftIOSettings.getDftFilename() << std::endl;
dft = std::make_shared<storm::storage::DFT<ValueType>>(parser.parseDFT(dftIOSettings.getDftFilename()));
}
// Build properties
std::string propString = properties[0];
for (size_t i = 1; i < properties.size(); ++i) {
propString += ";" + properties[i];
}
std::vector<std::shared_ptr<storm::logic::Formula const>> props = storm::api::extractFormulasFromProperties(storm::api::parseProperties(propString));
STORM_LOG_ASSERT(props.size() > 0, "No properties found.");
// Check model
storm::modelchecker::DFTModelChecker<ValueType> modelChecker;
modelChecker.check(*dft, props, symred, allowModularisation, enableDC, approximationError);
modelChecker.printTimings();
modelChecker.printResults();
}
/*!
* Analyze the DFT with use of SMT solving.
*
* @param filename Path to DFT file in Galileo format.
* Process commandline options and start computations.
*/
template<typename ValueType>
void analyzeWithSMT(std::string filename) {
std::cout << "Running DFT analysis on file " << filename << " with use of SMT" << std::endl;
storm::parser::DFTGalileoParser<ValueType> parser;
storm::storage::DFT<ValueType> dft = parser.parseDFT(filename);
storm::modelchecker::DFTASFChecker asfChecker(dft);
asfChecker.convert();
asfChecker.toFile("test.smt2");
//bool sat = dftSmtBuilder.check();
//std::cout << "SMT result: " << sat << std::endl;
}
void processOptions() {
// Start by setting some urgent options (log levels, resources, etc.)
storm::cli::setUrgentOptions();
// storm::cli::processOptions();
storm::settings::modules::DftIOSettings const& dftIOSettings = storm::settings::getModule<storm::settings::modules::DftIOSettings>();
storm::settings::modules::FaultTreeSettings const& faultTreeSettings = storm::settings::getModule<storm::settings::modules::FaultTreeSettings>();
storm::settings::modules::GeneralSettings const& generalSettings = storm::settings::getModule<storm::settings::modules::GeneralSettings>();
storm::settings::modules::IOSettings const& ioSettings = storm::settings::getModule<storm::settings::modules::IOSettings>();
if (!dftIOSettings.isDftFileSet() && !dftIOSettings.isDftJsonFileSet()) {
STORM_LOG_THROW(false, storm::exceptions::InvalidSettingsException, "No input model.");
}
// Build DFT from given file
std::shared_ptr<storm::storage::DFT<ValueType>> dft;
if (dftIOSettings.isDftJsonFileSet()) {
STORM_LOG_DEBUG("Loading DFT from file " << dftIOSettings.getDftJsonFilename());
dft = storm::api::loadDFTJson<ValueType>(dftIOSettings.getDftJsonFilename());
} else {
STORM_LOG_DEBUG("Loading DFT from file " << dftIOSettings.getDftFilename());
dft = storm::api::loadDFTGalileo<ValueType>(dftIOSettings.getDftFilename());
}
if (dftIOSettings.isDisplayStatsSet()) {
std::cout << "=============DFT Statistics==============" << std::endl;
dft->writeStatsToStream(std::cout);
std::cout << "=========================================" << std::endl;
}
if (dftIOSettings.isExportToJson()) {
STORM_LOG_THROW(dftIOSettings.isDftFileSet(), storm::exceptions::InvalidSettingsException, "No input model in Galileo format given.");
storm::parser::DFTGalileoParser<double> parser;
storm::storage::DFT<double> dft = parser.parseDFT(dftIOSettings.getDftFilename());
// Export to json
storm::storage::DftJsonExporter<double>::toFile(dft, dftIOSettings.getExportJsonFilename());
storm::utility::cleanUp();
storm::api::exportDFTToJson<ValueType>(*dft, dftIOSettings.getExportJsonFilename());
return;
}
if (dftIOSettings.isTransformToGspn()) {
std::shared_ptr<storm::storage::DFT<double>> dft;
if (dftIOSettings.isDftJsonFileSet()) {
storm::parser::DFTJsonParser<double> parser;
dft = std::make_shared<storm::storage::DFT<double>>(parser.parseJson(dftIOSettings.getDftJsonFilename()));
} else {
storm::parser::DFTGalileoParser<double> parser(true, false);
dft = std::make_shared<storm::storage::DFT<double>>(parser.parseDFT(dftIOSettings.getDftFilename()));
}
storm::transformations::dft::DftToGspnTransformator<double> gspnTransformator(*dft);
gspnTransformator.transform();
storm::gspn::GSPN* gspn = gspnTransformator.obtainGSPN();
uint64_t toplevelFailedPlace = gspnTransformator.toplevelFailedPlaceId();
storm::handleGSPNExportSettings(*gspn);
std::shared_ptr<storm::expressions::ExpressionManager> const& exprManager = gspn->getExpressionManager();
storm::builder::JaniGSPNBuilder builder(*gspn);
storm::jani::Model* model = builder.build();
storm::jani::Variable const& topfailedVar = builder.getPlaceVariable(toplevelFailedPlace);
storm::expressions::Expression targetExpression = exprManager->integer(1) == topfailedVar.getExpressionVariable().getExpression();
auto evtlFormula = std::make_shared<storm::logic::AtomicExpressionFormula>(targetExpression);
auto tbFormula = std::make_shared<storm::logic::BoundedUntilFormula>(std::make_shared<storm::logic::BooleanLiteralFormula>(true), evtlFormula, storm::logic::TimeBound(false, exprManager->integer(0)), storm::logic::TimeBound(false, exprManager->integer(10)), storm::logic::TimeBoundReference(storm::logic::TimeBoundType::Time));
auto tbUntil = std::make_shared<storm::logic::ProbabilityOperatorFormula>(tbFormula);
auto evFormula = std::make_shared<storm::logic::EventuallyFormula>(evtlFormula, storm::logic::FormulaContext::Time);
auto rewFormula = std::make_shared<storm::logic::TimeOperatorFormula>(evFormula, storm::logic::OperatorInformation(), storm::logic::RewardMeasureType::Expectation);
storm::settings::modules::JaniExportSettings const& janiSettings = storm::settings::getModule<storm::settings::modules::JaniExportSettings>();
if (janiSettings.isJaniFileSet()) {
storm::api::exportJaniModel(*model, {storm::jani::Property("time-bounded", tbUntil), storm::jani::Property("mttf", rewFormula)}, janiSettings.getJaniFilename());
}
delete model;
delete gspn;
storm::utility::cleanUp();
// Transform to GSPN
storm::api::transformToGSPN(*dft);
return;
}
bool parametric = false;
#ifdef STORM_HAVE_CARL
parametric = generalSettings.isParametricSet();
#endif
#ifdef STORM_HAVE_Z3
if (faultTreeSettings.solveWithSMT()) {
// Solve with SMT
if (parametric) {
// analyzeWithSMT<storm::RationalFunction>(dftSettings.getDftFilename());
} else {
analyzeWithSMT<double>(dftIOSettings.getDftFilename());
}
storm::utility::cleanUp();
STORM_LOG_DEBUG("Running DFT analysis with use of SMT");
storm::api::exportDFTToSMT(*dft, "test.smt2");
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Only exported to SMT file 'test.smt2' but analysis is not supported.");
return;
}
#endif
@ -172,9 +71,8 @@ void processOptions() {
optimizationDirection = "max";
}
// Construct properties to check for
// Construct properties to analyse
std::vector<std::string> properties;
if (ioSettings.isPropertySet()) {
properties.push_back(ioSettings.getProperty());
}
@ -197,38 +95,31 @@ void processOptions() {
}
}
if (properties.empty()) {
STORM_LOG_THROW(false, storm::exceptions::InvalidSettingsException, "No property given.");
// Build properties
STORM_LOG_THROW(!properties.empty(), storm::exceptions::InvalidSettingsException, "No property given.");
std::string propString = properties[0];
for (size_t i = 1; i < properties.size(); ++i) {
propString += ";" + properties[i];
}
std::vector<std::shared_ptr<storm::logic::Formula const>> props = storm::api::extractFormulasFromProperties(storm::api::parseProperties(propString));
STORM_LOG_ASSERT(props.size() > 0, "No properties found.");
// Set possible approximation error
double approximationError = 0.0;
// Carry out the actual analysis
if (faultTreeSettings.isApproximationErrorSet()) {
approximationError = faultTreeSettings.getApproximationError();
}
// From this point on we are ready to carry out the actual computations.
if (parametric) {
#ifdef STORM_HAVE_CARL
analyzeDFT<storm::RationalFunction>(properties, faultTreeSettings.useSymmetryReduction(), faultTreeSettings.useModularisation(), !faultTreeSettings.isDisableDC(), approximationError);
#else
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Parameters are not supported in this build.");
#endif
// Approximate analysis
storm::api::analyzeDFTApprox<ValueType>(*dft, props, faultTreeSettings.useSymmetryReduction(), faultTreeSettings.useModularisation(), !faultTreeSettings.isDisableDC(), faultTreeSettings.getApproximationError(), true);
} else {
analyzeDFT<double>(properties, faultTreeSettings.useSymmetryReduction(), faultTreeSettings.useModularisation(), !faultTreeSettings.isDisableDC(), approximationError);
storm::api::analyzeDFT<ValueType>(*dft, props, faultTreeSettings.useSymmetryReduction(), faultTreeSettings.useModularisation(), !faultTreeSettings.isDisableDC(), true);
}
}
/*!
* Entry point for the DyFTeE backend.
* Entry point for Storm-DFT.
*
* @param argc The argc argument of main().
* @param argv The argv argument of main().
* @return Return code, 0 if successfull, not 0 otherwise.
*/
/*!
* Main entry point of the executable storm-pars.
*/
int main(const int argc, const char** argv) {
try {
storm::utility::setUp();
@ -240,8 +131,12 @@ int main(const int argc, const char** argv) {
return -1;
}
processOptions();
//storm::pars::processOptions();
storm::settings::modules::GeneralSettings const& generalSettings = storm::settings::getModule<storm::settings::modules::GeneralSettings>();
if (generalSettings.isParametricSet()) {
processOptions<storm::RationalFunction>();
} else {
processOptions<double>();
}
totalTimer.stop();
if (storm::settings::getModule<storm::settings::modules::ResourceSettings>().isPrintTimeAndMemorySet()) {
@ -252,10 +147,10 @@ int main(const int argc, const char** argv) {
storm::utility::cleanUp();
return 0;
} catch (storm::exceptions::BaseException const& exception) {
STORM_LOG_ERROR("An exception caused Storm-DyFTeE to terminate. The message of the exception is: " << exception.what());
STORM_LOG_ERROR("An exception caused Storm-DFT to terminate. The message of the exception is: " << exception.what());
return 1;
} catch (std::exception const& exception) {
STORM_LOG_ERROR("An unexpected exception occurred and caused Storm-DyFTeE to terminate. The message of this exception is: " << exception.what());
STORM_LOG_ERROR("An unexpected exception occurred and caused Storm-DFT to terminate. The message of this exception is: " << exception.what());
return 2;
}
}

2
src/storm-dft/CMakeLists.txt

@ -36,5 +36,5 @@ add_custom_target(copy_storm_dft_headers DEPENDS ${STORM_DFT_OUTPUT_HEADERS} ${S
add_dependencies(storm-dft copy_storm_dft_headers)
# installation
install(TARGETS storm-dft RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-dft EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

67
src/storm-dft/api/storm-dft.cpp

@ -0,0 +1,67 @@
#include "storm-dft/api/storm-dft.h"
namespace storm {
namespace api {
template<>
void exportDFTToJson(storm::storage::DFT<double> const& dft, std::string const& file) {
storm::storage::DftJsonExporter<double>::toFile(dft, file);
}
template<>
void exportDFTToJson(storm::storage::DFT<storm::RationalFunction> const& dft, std::string const& file) {
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Export to JSON not supported for this data type.");
}
template<>
void exportDFTToSMT(storm::storage::DFT<double> const& dft, std::string const& file) {
storm::modelchecker::DFTASFChecker asfChecker(dft);
asfChecker.convert();
asfChecker.toFile(file);
}
template<>
void exportDFTToSMT(storm::storage::DFT<storm::RationalFunction> const& dft, std::string const& file) {
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Export to SMT does not support this data type.");
}
template<>
void transformToGSPN(storm::storage::DFT<double> const& dft) {
// Transform to GSPN
storm::transformations::dft::DftToGspnTransformator<double> gspnTransformator(dft);
bool smart = true;
gspnTransformator.transform(smart);
storm::gspn::GSPN* gspn = gspnTransformator.obtainGSPN();
uint64_t toplevelFailedPlace = gspnTransformator.toplevelFailedPlaceId();
storm::api::handleGSPNExportSettings(*gspn);
std::shared_ptr<storm::expressions::ExpressionManager> const& exprManager = gspn->getExpressionManager();
storm::builder::JaniGSPNBuilder builder(*gspn);
storm::jani::Model* model = builder.build();
storm::jani::Variable const& topfailedVar = builder.getPlaceVariable(toplevelFailedPlace);
storm::expressions::Expression targetExpression = exprManager->integer(1) == topfailedVar.getExpressionVariable().getExpression();
auto evtlFormula = std::make_shared<storm::logic::AtomicExpressionFormula>(targetExpression);
auto tbFormula = std::make_shared<storm::logic::BoundedUntilFormula>(std::make_shared<storm::logic::BooleanLiteralFormula>(true), evtlFormula, storm::logic::TimeBound(false, exprManager->integer(0)), storm::logic::TimeBound(false, exprManager->integer(10)), storm::logic::TimeBoundReference(storm::logic::TimeBoundType::Time));
auto tbUntil = std::make_shared<storm::logic::ProbabilityOperatorFormula>(tbFormula);
auto evFormula = std::make_shared<storm::logic::EventuallyFormula>(evtlFormula, storm::logic::FormulaContext::Time);
auto rewFormula = std::make_shared<storm::logic::TimeOperatorFormula>(evFormula, storm::logic::OperatorInformation(), storm::logic::RewardMeasureType::Expectation);
storm::settings::modules::JaniExportSettings const& janiSettings = storm::settings::getModule<storm::settings::modules::JaniExportSettings>();
if (janiSettings.isJaniFileSet()) {
storm::api::exportJaniModel(*model, {storm::jani::Property("time-bounded", tbUntil), storm::jani::Property("mttf", rewFormula)}, janiSettings.getJaniFilename());
}
delete model;
delete gspn;
}
template<>
void transformToGSPN(storm::storage::DFT<storm::RationalFunction> const& dft) {
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Transformation to GSPN not supported for this data type.");
}
}
}

117
src/storm-dft/api/storm-dft.h

@ -0,0 +1,117 @@
#pragma once
#include <type_traits>
#include "storm-dft/parser/DFTGalileoParser.h"
#include "storm-dft/parser/DFTJsonParser.h"
#include "storm-dft/storage/dft/DftJsonExporter.h"
#include "storm-dft/modelchecker/dft/DFTModelChecker.h"
#include "storm-dft/modelchecker/dft/DFTASFChecker.h"
#include "storm-dft/transformations/DftToGspnTransformator.h"
#include "storm-gspn/api/storm-gspn.h"
namespace storm {
namespace api {
/*!
* Load DFT from Galileo file.
*
* @param file File containing DFT description in Galileo format.
*
* @return DFT.
*/
template<typename ValueType>
std::shared_ptr<storm::storage::DFT<ValueType>> loadDFTGalileo(std::string const& file) {
return std::make_shared<storm::storage::DFT<ValueType>>(storm::parser::DFTGalileoParser<ValueType>::parseDFT(file));
}
/*!
* Load DFT from JSON file.
*
* @param file File containing DFT description in JSON format.
*
* @return DFT.
*/
template<typename ValueType>
std::shared_ptr<storm::storage::DFT<ValueType>> loadDFTJson(std::string const& file) {
storm::parser::DFTJsonParser<ValueType> parser;
return std::make_shared<storm::storage::DFT<ValueType>>(parser.parseJson(file));
}
/*!
* Analyse the given DFT according to the given properties.
* First the Markov model is built from the DFT and then this model is checked against the given properties.
*
* @param dft DFT.
* @param properties PCTL formulas capturing the properties to check.
* @param symred Flag whether symmetry reduction should be used.
* @param allowModularisation Flag whether modularisation should be applied if possible.
* @param enableDC Flag whether Don't Care propagation should be used.
*
* @return Result.
*/
template <typename ValueType>
typename storm::modelchecker::DFTModelChecker<ValueType>::dft_results analyzeDFT(storm::storage::DFT<ValueType> const& dft, std::vector<std::shared_ptr<storm::logic::Formula const>> const& properties, bool symred, bool allowModularisation, bool enableDC, bool printOutput) {
storm::modelchecker::DFTModelChecker<ValueType> modelChecker;
typename storm::modelchecker::DFTModelChecker<ValueType>::dft_results results = modelChecker.check(dft, properties, symred, allowModularisation, enableDC, 0.0);
if (printOutput) {
modelChecker.printTimings();
modelChecker.printResults();
}
return results;
}
/*!
* Approximate the analysis result of the given DFT according to the given properties.
* First the Markov model is built from the DFT and then this model is checked against the given properties.
*
* @param dft DFT.
* @param properties PCTL formulas capturing the properties to check.
* @param symred Flag whether symmetry reduction should be used.
* @param allowModularisation Flag whether modularisation should be applied if possible.
* @param enableDC Flag whether Don't Care propagation should be used.
* @param approximationError Allowed approximation error.
*
* @return Result.
*/
template <typename ValueType>
typename storm::modelchecker::DFTModelChecker<ValueType>::dft_results analyzeDFTApprox(storm::storage::DFT<ValueType> const& dft, std::vector<std::shared_ptr<storm::logic::Formula const>> const& properties, bool symred, bool allowModularisation, bool enableDC, double approximationError, bool printOutput) {
storm::modelchecker::DFTModelChecker<ValueType> modelChecker;
typename storm::modelchecker::DFTModelChecker<ValueType>::dft_results results = modelChecker.check(dft, properties, symred, allowModularisation, enableDC, approximationError);
if (printOutput) {
modelChecker.printTimings();
modelChecker.printResults();
}
return results;
}
/*!
* Export DFT to JSON file.
*
* @param dft DFT.
* @param file File.
*/
template<typename ValueType>
void exportDFTToJson(storm::storage::DFT<ValueType> const& dft, std::string const& file);
/*!
* Export DFT to SMT encoding.
*
* @param dft DFT.
* @param file File.
*/
template<typename ValueType>
void exportDFTToSMT(storm::storage::DFT<ValueType> const& dft, std::string const& file);
/*!
* Transform DFT to GSPN.
*
* @param dft DFT.
*/
template<typename ValueType>
void transformToGSPN(storm::storage::DFT<ValueType> const& dft);
}
}

118
src/storm-dft/storage/dft/DFTBuilder.cpp → src/storm-dft/builder/DFTBuilder.cpp

@ -11,15 +11,15 @@
namespace storm {
namespace storage {
namespace builder {
template<typename ValueType>
std::size_t DFTBuilder<ValueType>::mUniqueOffset = 0;
template<typename ValueType>
DFT<ValueType> DFTBuilder<ValueType>::build() {
storm::storage::DFT<ValueType> DFTBuilder<ValueType>::build() {
for(auto& elem : mChildNames) {
DFTGatePointer gate = std::static_pointer_cast<DFTGate<ValueType>>(elem.first);
DFTGatePointer gate = std::static_pointer_cast<storm::storage::DFTGate<ValueType>>(elem.first);
for(auto const& child : elem.second) {
auto itFind = mElements.find(child);
if (itFind != mElements.end()) {
@ -54,16 +54,16 @@ namespace storm {
for(auto& elem : mDependencyChildNames) {
bool first = true;
std::vector<std::shared_ptr<DFTBE<ValueType>>> dependencies;
std::vector<std::shared_ptr<storm::storage::DFTBE<ValueType>>> dependencies;
for(auto const& childName : elem.second) {
auto itFind = mElements.find(childName);
STORM_LOG_ASSERT(itFind != mElements.end(), "Child '" << childName << "' not found");
DFTElementPointer childElement = itFind->second;
if (!first) {
STORM_LOG_ASSERT(childElement->isBasicElement(), "Child '" << childName << "' of dependency '" << elem.first->name() << "' must be BE.");
dependencies.push_back(std::static_pointer_cast<DFTBE<ValueType>>(childElement));
dependencies.push_back(std::static_pointer_cast<storm::storage::DFTBE<ValueType>>(childElement));
} else {
elem.first->setTriggerElement(std::static_pointer_cast<DFTGate<ValueType>>(childElement));
elem.first->setTriggerElement(std::static_pointer_cast<storm::storage::DFTGate<ValueType>>(childElement));
childElement->addOutgoingDependency(elem.first);
}
first = false;
@ -92,7 +92,7 @@ namespace storm {
}
STORM_LOG_ASSERT(!mTopLevelIdentifier.empty(), "No top level element.");
DFT<ValueType> dft(elems, mElements[mTopLevelIdentifier]);
storm::storage::DFT<ValueType> dft(elems, mElements[mTopLevelIdentifier]);
// Set layout info
for (auto& elem : mElements) {
@ -113,7 +113,7 @@ namespace storm {
if(elem->nrChildren() == 0 || elem->isDependency()) {
elem->setRank(0);
} else {
DFTGatePointer gate = std::static_pointer_cast<DFTGate<ValueType>>(elem);
DFTGatePointer gate = std::static_pointer_cast<storm::storage::DFTGate<ValueType>>(elem);
unsigned maxrnk = 0;
unsigned newrnk = 0;
@ -131,7 +131,7 @@ namespace storm {
}
template<typename ValueType>
bool DFTBuilder<ValueType>::addRestriction(std::string const& name, std::vector<std::string> const& children, DFTElementType tp) {
bool DFTBuilder<ValueType>::addRestriction(std::string const& name, std::vector<std::string> const& children, storm::storage::DFTElementType tp) {
if (children.size() <= 1) {
STORM_LOG_ERROR("Sequence enforcers require at least two children");
}
@ -140,10 +140,10 @@ namespace storm {
}
DFTRestrictionPointer restr;
switch (tp) {
case DFTElementType::SEQ:
restr = std::make_shared<DFTSeq<ValueType>>(mNextId++, name);
case storm::storage::DFTElementType::SEQ:
restr = std::make_shared<storm::storage::DFTSeq<ValueType>>(mNextId++, name);
break;
case DFTElementType::MUTEX:
case storm::storage::DFTElementType::MUTEX:
// TODO notice that mutex state generation support is lacking anyway, as DONT CARE propagation would be broken for this.
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Gate type not supported.");
break;
@ -159,7 +159,7 @@ namespace storm {
}
template<typename ValueType>
bool DFTBuilder<ValueType>::addStandardGate(std::string const& name, std::vector<std::string> const& children, DFTElementType tp) {
bool DFTBuilder<ValueType>::addStandardGate(std::string const& name, std::vector<std::string> const& children, storm::storage::DFTElementType tp) {
STORM_LOG_ASSERT(children.size() > 0, "No child for " << name);
if(mElements.count(name) != 0) {
// Element with that name already exists.
@ -167,28 +167,28 @@ namespace storm {
}
DFTElementPointer element;
switch(tp) {
case DFTElementType::AND:
element = std::make_shared<DFTAnd<ValueType>>(mNextId++, name);
case storm::storage::DFTElementType::AND:
element = std::make_shared<storm::storage::DFTAnd<ValueType>>(mNextId++, name);
break;
case DFTElementType::OR:
element = std::make_shared<DFTOr<ValueType>>(mNextId++, name);
case storm::storage::DFTElementType::OR:
element = std::make_shared<storm::storage::DFTOr<ValueType>>(mNextId++, name);
break;
case DFTElementType::PAND:
element = std::make_shared<DFTPand<ValueType>>(mNextId++, name, pandDefaultInclusive);
case storm::storage::DFTElementType::PAND:
element = std::make_shared<storm::storage::DFTPand<ValueType>>(mNextId++, name, pandDefaultInclusive);
break;
case DFTElementType::POR:
element = std::make_shared<DFTPor<ValueType>>(mNextId++, name, porDefaultInclusive);
case storm::storage::DFTElementType::POR:
element = std::make_shared<storm::storage::DFTPor<ValueType>>(mNextId++, name, porDefaultInclusive);
break;
case DFTElementType::SPARE:
element = std::make_shared<DFTSpare<ValueType>>(mNextId++, name);
case storm::storage::DFTElementType::SPARE:
element = std::make_shared<storm::storage::DFTSpare<ValueType>>(mNextId++, name);
break;
case DFTElementType::BE:
case DFTElementType::VOT:
case DFTElementType::PDEP:
case storm::storage::DFTElementType::BE:
case storm::storage::DFTElementType::VOT:
case storm::storage::DFTElementType::PDEP:
// Handled separately
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Gate type handled separately.");
case DFTElementType::CONSTF:
case DFTElementType::CONSTS:
case storm::storage::DFTElementType::CONSTF:
case storm::storage::DFTElementType::CONSTS:
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Gate type not supported.");
default:
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Gate type not known.");
@ -199,29 +199,29 @@ namespace storm {
}
template<typename ValueType>
void DFTBuilder<ValueType>::topoVisit(DFTElementPointer const& n, std::map<DFTElementPointer, topoSortColour, OrderElementsById<ValueType>>& visited, DFTElementVector& L) {
void DFTBuilder<ValueType>::topoVisit(DFTElementPointer const& n, std::map<DFTElementPointer, topoSortColour, storm::storage::OrderElementsById<ValueType>>& visited, DFTElementVector& L) {
if(visited[n] == topoSortColour::GREY) {
throw storm::exceptions::WrongFormatException("DFT is cyclic");
} else if(visited[n] == topoSortColour::WHITE) {
if(n->isGate()) {
visited[n] = topoSortColour::GREY;
for (DFTElementPointer const& c : std::static_pointer_cast<DFTGate<ValueType>>(n)->children()) {
for (DFTElementPointer const& c : std::static_pointer_cast<storm::storage::DFTGate<ValueType>>(n)->children()) {
topoVisit(c, visited, L);
}
}
// TODO restrictions and dependencies have no parents, so this can be done more efficiently.
if(n->isRestriction()) {
visited[n] = topoSortColour::GREY;
for (DFTElementPointer const& c : std::static_pointer_cast<DFTRestriction<ValueType>>(n)->children()) {
for (DFTElementPointer const& c : std::static_pointer_cast<storm::storage::DFTRestriction<ValueType>>(n)->children()) {
topoVisit(c, visited, L);
}
}
if(n->isDependency()) {
visited[n] = topoSortColour::GREY;
for (DFTElementPointer const& c : std::static_pointer_cast<DFTDependency<ValueType>>(n)->dependentEvents()) {
for (DFTElementPointer const& c : std::static_pointer_cast<storm::storage::DFTDependency<ValueType>>(n)->dependentEvents()) {
topoVisit(c, visited, L);
}
topoVisit(std::static_pointer_cast<DFTDependency<ValueType>>(n)->triggerEvent(), visited, L);
topoVisit(std::static_pointer_cast<storm::storage::DFTDependency<ValueType>>(n)->triggerEvent(), visited, L);
}
visited[n] = topoSortColour::BLACK;
L.push_back(n);
@ -229,8 +229,8 @@ namespace storm {
}
template<typename ValueType>
std::vector<std::shared_ptr<DFTElement<ValueType>>> DFTBuilder<ValueType>::topoSort() {
std::map<DFTElementPointer, topoSortColour, OrderElementsById<ValueType>> visited;
std::vector<std::shared_ptr<storm::storage::DFTElement<ValueType>>> DFTBuilder<ValueType>::topoSort() {
std::map<DFTElementPointer, topoSortColour, storm::storage::OrderElementsById<ValueType>> visited;
for(auto const& e : mElements) {
visited.insert(std::make_pair(e.second, topoSortColour::WHITE));
}
@ -252,22 +252,22 @@ namespace storm {
void DFTBuilder<ValueType>::copyElement(DFTElementPointer element) {
std::vector<std::string> children;
switch (element->type()) {
case DFTElementType::AND:
case DFTElementType::OR:
case DFTElementType::PAND:
case DFTElementType::POR:
case DFTElementType::SPARE:
case DFTElementType::VOT:
case storm::storage::DFTElementType::AND:
case storm::storage::DFTElementType::OR:
case storm::storage::DFTElementType::PAND:
case storm::storage::DFTElementType::POR:
case storm::storage::DFTElementType::SPARE:
case storm::storage::DFTElementType::VOT:
{
for (DFTElementPointer const& elem : std::static_pointer_cast<DFTGate<ValueType>>(element)->children()) {
for (DFTElementPointer const& elem : std::static_pointer_cast<storm::storage::DFTGate<ValueType>>(element)->children()) {
children.push_back(elem->name());
}
copyGate(std::static_pointer_cast<DFTGate<ValueType>>(element), children);
copyGate(std::static_pointer_cast<storm::storage::DFTGate<ValueType>>(element), children);
break;
}
case DFTElementType::BE:
case storm::storage::DFTElementType::BE:
{
std::shared_ptr<DFTBE<ValueType>> be = std::static_pointer_cast<DFTBE<ValueType>>(element);
std::shared_ptr<storm::storage::DFTBE<ValueType>> be = std::static_pointer_cast<storm::storage::DFTBE<ValueType>>(element);
ValueType dormancyFactor = storm::utility::zero<ValueType>();
if (be->canFail()) {
dormancyFactor = be->passiveFailureRate() / be->activeFailureRate();
@ -275,14 +275,14 @@ namespace storm {
addBasicElement(be->name(), be->activeFailureRate(), dormancyFactor, be->isTransient());
break;
}
case DFTElementType::CONSTF:
case DFTElementType::CONSTS:
case storm::storage::DFTElementType::CONSTF:
case storm::storage::DFTElementType::CONSTS:
// TODO
STORM_LOG_ASSERT(false, "Const elements not supported.");
break;
case DFTElementType::PDEP:
case storm::storage::DFTElementType::PDEP:
{
DFTDependencyPointer dependency = std::static_pointer_cast<DFTDependency<ValueType>>(element);
DFTDependencyPointer dependency = std::static_pointer_cast<storm::storage::DFTDependency<ValueType>>(element);
children.push_back(dependency->triggerEvent()->name());
for(auto const& depEv : dependency->dependentEvents()) {
children.push_back(depEv->name());
@ -290,10 +290,10 @@ namespace storm {
addDepElement(element->name(), children, dependency->probability());
break;
}
case DFTElementType::SEQ:
case DFTElementType::MUTEX:
case storm::storage::DFTElementType::SEQ:
case storm::storage::DFTElementType::MUTEX:
{
for (DFTElementPointer const& elem : std::static_pointer_cast<DFTRestriction<ValueType>>(element)->children()) {
for (DFTElementPointer const& elem : std::static_pointer_cast<storm::storage::DFTRestriction<ValueType>>(element)->children()) {
children.push_back(elem->name());
}
addRestriction(element->name(), children, element->type());
@ -308,15 +308,15 @@ namespace storm {
template<typename ValueType>
void DFTBuilder<ValueType>::copyGate(DFTGatePointer gate, std::vector<std::string> const& children) {
switch (gate->type()) {
case DFTElementType::AND:
case DFTElementType::OR:
case DFTElementType::PAND:
case DFTElementType::POR:
case DFTElementType::SPARE:
case storm::storage::DFTElementType::AND:
case storm::storage::DFTElementType::OR:
case storm::storage::DFTElementType::PAND:
case storm::storage::DFTElementType::POR:
case storm::storage::DFTElementType::SPARE:
addStandardGate(gate->name(), children, gate->type());
break;
case DFTElementType::VOT:
addVotElement(gate->name(), std::static_pointer_cast<DFTVot<ValueType>>(gate)->threshold(), children);
case storm::storage::DFTElementType::VOT:
addVotElement(gate->name(), std::static_pointer_cast<storm::storage::DFTVot<ValueType>>(gate)->threshold(), children);
break;
default:
STORM_LOG_ASSERT(false, "Dft type not known.");

50
src/storm-dft/storage/dft/DFTBuilder.h → src/storm-dft/builder/DFTBuilder.h

@ -11,18 +11,22 @@
namespace storm {
namespace storage {
// Forward declaration
template<typename ValueType>
class DFT;
}
namespace builder {
template<typename ValueType>
class DFTBuilder {
using DFTElementPointer = std::shared_ptr<DFTElement<ValueType>>;
using DFTElementPointer = std::shared_ptr<storm::storage::DFTElement<ValueType>>;
using DFTElementVector = std::vector<DFTElementPointer>;
using DFTGatePointer = std::shared_ptr<DFTGate<ValueType>>;
using DFTGatePointer = std::shared_ptr<storm::storage::DFTGate<ValueType>>;
using DFTGateVector = std::vector<DFTGatePointer>;
using DFTDependencyPointer = std::shared_ptr<DFTDependency<ValueType>>;
using DFTRestrictionPointer = std::shared_ptr<DFTRestriction<ValueType>>;
using DFTDependencyPointer = std::shared_ptr<storm::storage::DFTDependency<ValueType>>;
using DFTRestrictionPointer = std::shared_ptr<storm::storage::DFTRestriction<ValueType>>;
private:
std::size_t mNextId = 0;
@ -34,7 +38,7 @@ namespace storm {
std::unordered_map<DFTDependencyPointer, std::vector<std::string>> mDependencyChildNames;
std::vector<DFTDependencyPointer> mDependencies;
std::vector<DFTRestrictionPointer> mRestrictions;
std::unordered_map<std::string, DFTLayoutInfo> mLayoutInfo;
std::unordered_map<std::string, storm::storage::DFTLayoutInfo> mLayoutInfo;
public:
DFTBuilder(bool defaultInclusive = true, bool binaryDependencies = true) : pandDefaultInclusive(defaultInclusive), porDefaultInclusive(defaultInclusive), binaryDependencies(binaryDependencies) {
@ -42,47 +46,47 @@ namespace storm {
}
bool addAndElement(std::string const& name, std::vector<std::string> const& children) {
return addStandardGate(name, children, DFTElementType::AND);
return addStandardGate(name, children, storm::storage::DFTElementType::AND);
}
bool addOrElement(std::string const& name, std::vector<std::string> const& children) {
return addStandardGate(name, children, DFTElementType::OR);
return addStandardGate(name, children, storm::storage::DFTElementType::OR);
}
bool addPandElement(std::string const& name, std::vector<std::string> const& children) {
return addStandardGate(name, children, DFTElementType::PAND);
return addStandardGate(name, children, storm::storage::DFTElementType::PAND);
}
bool addPandElement(std::string const& name, std::vector<std::string> const& children, bool inclusive) {
bool tmpDefault = pandDefaultInclusive;
pandDefaultInclusive = inclusive;
bool result = addStandardGate(name, children, DFTElementType::PAND);
bool result = addStandardGate(name, children, storm::storage::DFTElementType::PAND);
pandDefaultInclusive = tmpDefault;
return result;
}
bool addPorElement(std::string const& name, std::vector<std::string> const& children) {
return addStandardGate(name, children, DFTElementType::POR);
return addStandardGate(name, children, storm::storage::DFTElementType::POR);
}
bool addPorElement(std::string const& name, std::vector<std::string> const& children, bool inclusive) {
bool tmpDefault = porDefaultInclusive;
porDefaultInclusive = inclusive;
bool result = addStandardGate(name, children, DFTElementType::POR);
bool result = addStandardGate(name, children, storm::storage::DFTElementType::POR);
pandDefaultInclusive = tmpDefault;
return result;
}
bool addSpareElement(std::string const& name, std::vector<std::string> const& children) {
return addStandardGate(name, children, DFTElementType::SPARE);
return addStandardGate(name, children, storm::storage::DFTElementType::SPARE);
}
bool addSequenceEnforcer(std::string const& name, std::vector<std::string> const& children) {
return addRestriction(name, children, DFTElementType::SEQ);
return addRestriction(name, children, storm::storage::DFTElementType::SEQ);
}
bool addMutex(std::string const& name, std::vector<std::string> const& children) {
return addRestriction(name, children, DFTElementType::MUTEX);
return addRestriction(name, children, storm::storage::DFTElementType::MUTEX);
}
bool addDepElement(std::string const& name, std::vector<std::string> const& children, ValueType probability) {
@ -125,15 +129,13 @@ namespace storm {
}
STORM_LOG_ASSERT(storm::utility::isOne(probability) || children.size() == 2,
"PDep with multiple children supported.");
DFTDependencyPointer element = std::make_shared<DFTDependency<ValueType>>(mNextId++,
nameDep,
probability);
DFTDependencyPointer element = std::make_shared<storm::storage::DFTDependency<ValueType>>(mNextId++, nameDep, probability);
mElements[element->name()] = element;
mDependencyChildNames[element] = {trigger, children[i]};
mDependencies.push_back(element);
}
} else {
DFTDependencyPointer element = std::make_shared<DFTDependency<ValueType>>(mNextId++, name, probability);
DFTDependencyPointer element = std::make_shared<storm::storage::DFTDependency<ValueType>>(mNextId++, name, probability);
mElements[element->name()] = element;
mDependencyChildNames[element] = children;
mDependencies.push_back(element);
@ -161,7 +163,7 @@ namespace storm {
STORM_LOG_ERROR("Voting gates with threshold higher than the number of children is not supported.");
return false;
}
DFTElementPointer element = std::make_shared<DFTVot<ValueType>>(mNextId++, name, threshold);
DFTElementPointer element = std::make_shared<storm::storage::DFTVot<ValueType>>(mNextId++, name, threshold);
mElements[name] = element;
mChildNames[element] = children;
@ -173,7 +175,7 @@ namespace storm {
//failureRate > 0
//0 <= dormancyFactor <= 1
mElements[name] = std::make_shared<DFTBE<ValueType>>(mNextId++, name, failureRate, dormancyFactor, transient);
mElements[name] = std::make_shared<storm::storage::DFTBE<ValueType>>(mNextId++, name, failureRate, dormancyFactor, transient);
return true;
}
@ -189,7 +191,7 @@ namespace storm {
std::string getUniqueName(std::string name);
DFT<ValueType> build();
storm::storage::DFT<ValueType> build();
/**
* Copy element and insert it again in the builder.
@ -211,13 +213,13 @@ namespace storm {
unsigned computeRank(DFTElementPointer const& elem);
bool addStandardGate(std::string const& name, std::vector<std::string> const& children, DFTElementType tp);
bool addStandardGate(std::string const& name, std::vector<std::string> const& children, storm::storage::DFTElementType tp);
bool addRestriction(std::string const& name, std::vector<std::string> const& children, DFTElementType tp);
bool addRestriction(std::string const& name, std::vector<std::string> const& children, storm::storage::DFTElementType tp);
enum class topoSortColour {WHITE, BLACK, GREY};
void topoVisit(DFTElementPointer const& n, std::map<DFTElementPointer, topoSortColour, OrderElementsById<ValueType>>& visited, DFTElementVector& L);
void topoVisit(DFTElementPointer const& n, std::map<DFTElementPointer, topoSortColour, storm::storage::OrderElementsById<ValueType>>& visited, DFTElementVector& L);
DFTElementVector topoSort();

8
src/storm-dft/modelchecker/dft/DFTASFChecker.h

@ -33,13 +33,15 @@ namespace storm {
}
friend bool operator<(SpareAndChildPair const& p1, SpareAndChildPair const& p2) {
return p1.spareIndex < p2.spareIndex || (p1.spareIndex == p2.spareIndex && p1.childIndex < p2.childIndex);
}
private:
uint64_t spareIndex;
uint64_t childIndex;
};
bool operator<(SpareAndChildPair const& p1, SpareAndChildPair const& p2) {
return p1.spareIndex < p2.spareIndex || (p1.spareIndex == p2.spareIndex && p1.childIndex < p2.childIndex);
}
class DFTASFChecker {
using ValueType = double;

58
src/storm-dft/modelchecker/dft/DFTModelChecker.cpp

@ -14,7 +14,7 @@ namespace storm {
namespace modelchecker {
template<typename ValueType>
void DFTModelChecker<ValueType>::check(storm::storage::DFT<ValueType> const& origDft, std::vector<std::shared_ptr<const storm::logic::Formula>> const& properties, bool symred, bool allowModularisation, bool enableDC, double approximationError) {
typename DFTModelChecker<ValueType>::dft_results DFTModelChecker<ValueType>::check(storm::storage::DFT<ValueType> const& origDft, std::vector<std::shared_ptr<const storm::logic::Formula>> const& properties, bool symred, bool allowModularisation, bool enableDC, double approximationError) {
// Initialize
this->approximationError = approximationError;
totalTimer.start();
@ -33,11 +33,11 @@ namespace storm {
for (ValueType result : resultsValue) {
checkResults.push_back(result);
}
} else {
checkResults = checkHelper(dft, properties, symred, allowModularisation, enableDC, approximationError);
}
totalTimer.stop();
return checkResults;
}
template<typename ValueType>
@ -54,14 +54,14 @@ namespace storm {
case storm::storage::DFTElementType::AND:
STORM_LOG_TRACE("top modularisation called AND");
dfts = dft.topModularisation();
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules.");
STORM_LOG_TRACE("Modularisation into " << dfts.size() << " submodules.");
nrK = dfts.size();
nrM = dfts.size();
break;
case storm::storage::DFTElementType::OR:
STORM_LOG_TRACE("top modularisation called OR");
dfts = dft.topModularisation();
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules.");
STORM_LOG_TRACE("Modularisation into " << dfts.size() << " submodules.");
nrK = 0;
nrM = dfts.size();
invResults = true;
@ -69,7 +69,7 @@ namespace storm {
case storm::storage::DFTElementType::VOT:
STORM_LOG_TRACE("top modularisation called VOT");
dfts = dft.topModularisation();
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules.");
STORM_LOG_TRACE("Modularisation into " << dfts.size() << " submodules.");
nrK = std::static_pointer_cast<storm::storage::DFTVot<ValueType> const>(dft.getTopLevelGate())->threshold();
nrM = dfts.size();
if(nrK <= nrM/2) {
@ -176,7 +176,7 @@ namespace storm {
bool firstTime = true;
std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> composedModel;
for (auto const ft : dfts) {
STORM_LOG_INFO("Building Model via parallel composition...");
STORM_LOG_DEBUG("Building Model via parallel composition...");
explorationTimer.start();
// Find symmetries
@ -185,12 +185,12 @@ namespace storm {
if(symred) {
auto colouring = ft.colourDFT();
symmetries = ft.findSymmetries(colouring);
STORM_LOG_INFO("Found " << symmetries.groups.size() << " symmetries.");
STORM_LOG_DEBUG("Found " << symmetries.groups.size() << " symmetries.");
STORM_LOG_TRACE("Symmetries: " << std::endl << symmetries);
}
// Build a single CTMC
STORM_LOG_INFO("Building Model...");
STORM_LOG_DEBUG("Building Model...");
storm::builder::ExplicitDFTModelBuilder<ValueType> builder(ft, symmetries, enableDC);
typename storm::builder::ExplicitDFTModelBuilder<ValueType>::LabelOptions labeloptions(properties);
builder.buildModel(labeloptions, 0, 0.0);
@ -226,7 +226,7 @@ namespace storm {
}
}
composedModel->printModelInformationToStream(std::cout);
//composedModel->printModelInformationToStream(std::cout);
return composedModel;
} else {
// No composition was possible
@ -237,17 +237,17 @@ namespace storm {
if(symred) {
auto colouring = dft.colourDFT();
symmetries = dft.findSymmetries(colouring);
STORM_LOG_INFO("Found " << symmetries.groups.size() << " symmetries.");
STORM_LOG_DEBUG("Found " << symmetries.groups.size() << " symmetries.");
STORM_LOG_TRACE("Symmetries: " << std::endl << symmetries);
}
// Build a single CTMC
STORM_LOG_INFO("Building Model...");
STORM_LOG_DEBUG("Building Model...");
storm::builder::ExplicitDFTModelBuilder<ValueType> builder(dft, symmetries, enableDC);
typename storm::builder::ExplicitDFTModelBuilder<ValueType>::LabelOptions labeloptions(properties);
builder.buildModel(labeloptions, 0, 0.0);
std::shared_ptr<storm::models::sparse::Model<ValueType>> model = builder.getModel();
model->printModelInformationToStream(std::cout);
//model->printModelInformationToStream(std::cout);
explorationTimer.stop();
STORM_LOG_THROW(model->isOfType(storm::models::ModelType::Ctmc), storm::exceptions::NotSupportedException, "Parallel composition only applicable for CTMCs");
return model->template as<storm::models::sparse::Ctmc<ValueType>>();
@ -264,7 +264,7 @@ namespace storm {
if(symred) {
auto colouring = dft.colourDFT();
symmetries = dft.findSymmetries(colouring);
STORM_LOG_INFO("Found " << symmetries.groups.size() << " symmetries.");
STORM_LOG_DEBUG("Found " << symmetries.groups.size() << " symmetries.");
STORM_LOG_TRACE("Symmetries: " << std::endl << symmetries);
}
@ -292,7 +292,7 @@ namespace storm {
if (iteration > 0) {
explorationTimer.start();
}
STORM_LOG_INFO("Building model...");
STORM_LOG_DEBUG("Building model...");
// TODO Matthias refine model using existing model and MC results
builder.buildModel(labeloptions, iteration, approximationError);
explorationTimer.stop();
@ -301,10 +301,10 @@ namespace storm {
// TODO Matthias: possible to do bisimulation on approximated model and not on concrete one?
// Build model for lower bound
STORM_LOG_INFO("Getting model for lower bound...");
STORM_LOG_DEBUG("Getting model for lower bound...");
model = builder.getModelApproximation(true, !probabilityFormula);
// We only output the info from the lower bound as the info for the upper bound is the same
model->printModelInformationToStream(std::cout);
//model->printModelInformationToStream(std::cout);
buildingTimer.stop();
// Check lower bounds
@ -314,7 +314,7 @@ namespace storm {
approxResult.first = newResult[0];
// Build model for upper bound
STORM_LOG_INFO("Getting model for upper bound...");
STORM_LOG_DEBUG("Getting model for upper bound...");
buildingTimer.start();
model = builder.getModelApproximation(false, !probabilityFormula);
buildingTimer.stop();
@ -326,25 +326,25 @@ namespace storm {
++iteration;
STORM_LOG_ASSERT(comparator.isLess(approxResult.first, approxResult.second) || comparator.isEqual(approxResult.first, approxResult.second), "Under-approximation " << approxResult.first << " is greater than over-approximation " << approxResult.second);
STORM_LOG_INFO("Result after iteration " << iteration << ": (" << std::setprecision(10) << approxResult.first << ", " << approxResult.second << ")");
//STORM_LOG_INFO("Result after iteration " << iteration << ": (" << std::setprecision(10) << approxResult.first << ", " << approxResult.second << ")");
totalTimer.stop();
printTimings();
totalTimer.start();
STORM_LOG_THROW(!storm::utility::isInfinity<ValueType>(approxResult.first) && !storm::utility::isInfinity<ValueType>(approxResult.second), storm::exceptions::NotSupportedException, "Approximation does not work if result might be infinity.");
} while (!isApproximationSufficient(approxResult.first, approxResult.second, approximationError, probabilityFormula));
STORM_LOG_INFO("Finished approximation after " << iteration << " iteration" << (iteration > 1 ? "s." : "."));
//STORM_LOG_INFO("Finished approximation after " << iteration << " iteration" << (iteration > 1 ? "s." : "."));
dft_results results;
results.push_back(approxResult);
return results;
} else {
// Build a single Markov Automaton
STORM_LOG_INFO("Building Model...");
STORM_LOG_DEBUG("Building Model...");
storm::builder::ExplicitDFTModelBuilder<ValueType> builder(dft, symmetries, enableDC);
typename storm::builder::ExplicitDFTModelBuilder<ValueType>::LabelOptions labeloptions(properties, storm::settings::getModule<storm::settings::modules::IOSettings>().isExportExplicitSet());
builder.buildModel(labeloptions, 0, 0.0);
std::shared_ptr<storm::models::sparse::Model<ValueType>> model = builder.getModel();
model->printModelInformationToStream(std::cout);
//model->printModelInformationToStream(std::cout);
explorationTimer.stop();
// Export the model if required
@ -373,15 +373,15 @@ namespace storm {
// Bisimulation
if (model->isOfType(storm::models::ModelType::Ctmc) && storm::settings::getModule<storm::settings::modules::GeneralSettings>().isBisimulationSet()) {
bisimulationTimer.start();
STORM_LOG_INFO("Bisimulation...");
STORM_LOG_DEBUG("Bisimulation...");
model = storm::api::performDeterministicSparseBisimulationMinimization<storm::models::sparse::Ctmc<ValueType>>(model->template as<storm::models::sparse::Ctmc<ValueType>>(), properties, storm::storage::BisimulationType::Weak)->template as<storm::models::sparse::Ctmc<ValueType>>();
STORM_LOG_INFO("No. states (Bisimulation): " << model->getNumberOfStates());
STORM_LOG_INFO("No. transitions (Bisimulation): " << model->getNumberOfTransitions());
STORM_LOG_DEBUG("No. states (Bisimulation): " << model->getNumberOfStates());
STORM_LOG_DEBUG("No. transitions (Bisimulation): " << model->getNumberOfTransitions());
bisimulationTimer.stop();
}
// Check the model
STORM_LOG_INFO("Model checking...");
STORM_LOG_DEBUG("Model checking...");
modelCheckingTimer.start();
std::vector<ValueType> results;
@ -390,18 +390,18 @@ namespace storm {
for (auto property : properties) {
singleModelCheckingTimer.reset();
singleModelCheckingTimer.start();
STORM_PRINT_AND_LOG("Model checking property " << *property << " ..." << std::endl);
//STORM_PRINT_AND_LOG("Model checking property " << *property << " ..." << std::endl);
std::unique_ptr<storm::modelchecker::CheckResult> result(storm::api::verifyWithSparseEngine<ValueType>(model, storm::api::createTask<ValueType>(property, true)));
STORM_LOG_ASSERT(result, "Result does not exist.");
result->filter(storm::modelchecker::ExplicitQualitativeCheckResult(model->getInitialStates()));
ValueType resultValue = result->asExplicitQuantitativeCheckResult<ValueType>().getValueMap().begin()->second;
STORM_PRINT_AND_LOG("Result (initial states): " << resultValue << std::endl);
//STORM_PRINT_AND_LOG("Result (initial states): " << resultValue << std::endl);
results.push_back(resultValue);
singleModelCheckingTimer.stop();
STORM_PRINT_AND_LOG("Time for model checking: " << singleModelCheckingTimer << "." << std::endl);
//STORM_PRINT_AND_LOG("Time for model checking: " << singleModelCheckingTimer << "." << std::endl);
}
modelCheckingTimer.stop();
STORM_LOG_INFO("Model checking done.");
STORM_LOG_DEBUG("Model checking done.");
return results;
}

7
src/storm-dft/modelchecker/dft/DFTModelChecker.h

@ -17,12 +17,11 @@ namespace storm {
template<typename ValueType>
class DFTModelChecker {
public:
typedef std::pair<ValueType, ValueType> approximation_result;
typedef std::vector<boost::variant<ValueType, approximation_result>> dft_results;
typedef std::vector<std::shared_ptr<storm::logic::Formula const>> property_vector;
public:
/*!
* Constructor.
*/
@ -38,8 +37,10 @@ namespace storm {
* @param allowModularisation Flag indication if modularisation is allowed
* @param enableDC Flag indicating if dont care propagation should be used
* @param approximationError Error allowed for approximation. Value 0 indicates no approximation
*
* @return Model checking results for the given properties.
*/
void check(storm::storage::DFT<ValueType> const& origDft, property_vector const& properties, bool symred = true, bool allowModularisation = true, bool enableDC = true, double approximationError = 0.0);
dft_results check(storm::storage::DFT<ValueType> const& origDft, property_vector const& properties, bool symred = true, bool allowModularisation = true, bool enableDC = true, double approximationError = 0.0);
/*!
* Print timings of all operations to stream.

279
src/storm-dft/parser/DFTGalileoParser.cpp

@ -2,12 +2,14 @@
#include <iostream>
#include <fstream>
#include <regex>
#include <boost/algorithm/string.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/algorithm/string/replace.hpp>
#include "storm/exceptions/NotImplementedException.h"
#include "storm/exceptions/FileIoException.h"
#include "storm/exceptions/NotSupportedException.h"
#include "storm/exceptions/WrongFormatException.h"
#include "storm/utility/macros.h"
#include "storm/utility/file.h"
@ -15,156 +17,229 @@ namespace storm {
namespace parser {
template<typename ValueType>
storm::storage::DFT<ValueType> DFTGalileoParser<ValueType>::parseDFT(const std::string& filename) {
readFile(filename);
storm::storage::DFT<ValueType> dft = builder.build();
STORM_LOG_DEBUG("Elements:" << std::endl << dft.getElementsString());
STORM_LOG_DEBUG("Spare Modules:" << std::endl << dft.getSpareModulesString());
return dft;
}
template<typename ValueType>
std::string DFTGalileoParser<ValueType>::stripQuotsFromName(std::string const& name) {
std::string DFTGalileoParser<ValueType>::parseName(std::string const& name) {
size_t firstQuots = name.find("\"");
size_t secondQuots = name.find("\"", firstQuots+1);
std::string parsedName;
if(firstQuots == std::string::npos) {
return name;
if (firstQuots == std::string::npos) {
parsedName = name;
} else {
STORM_LOG_THROW(secondQuots != std::string::npos, storm::exceptions::FileIoException, "No ending quotation mark found in " << name);
return name.substr(firstQuots+1,secondQuots-1);
STORM_LOG_THROW(secondQuots != std::string::npos, storm::exceptions::WrongFormatException, "No ending quotation mark found in " << name);
parsedName = name.substr(firstQuots+1,secondQuots-1);
}
}
template<typename ValueType>
std::string DFTGalileoParser<ValueType>::parseNodeIdentifier(std::string const& name) {
return boost::replace_all_copy(name, "'", "__prime__");
return boost::replace_all_copy(parsedName, "'", "__prime__");
}
template<typename ValueType>
void DFTGalileoParser<ValueType>::readFile(const std::string& filename) {
// constants
std::string toplevelToken = "toplevel";
std::string toplevelId;
std::string parametricToken = "param";
storm::storage::DFT<ValueType> DFTGalileoParser<ValueType>::parseDFT(const std::string& filename, bool defaultInclusive, bool binaryDependencies) {
storm::builder::DFTBuilder<ValueType> builder(defaultInclusive, binaryDependencies);
ValueParser<ValueType> valueParser;
// Regular expression to detect comments
// taken from: https://stackoverflow.com/questions/9449887/removing-c-c-style-comments-using-boostregex
const std::regex commentRegex("(/\\*([^*]|(\\*+[^*/]))*\\*+/)|(//.*)");
std::ifstream file;
storm::utility::openFile(filename, file);
std::string line;
size_t lineNo = 0;
std::string toplevelId = "";
bool comment = false; // Indicates whether the current line is part of a multiline comment
while (std::getline(file, line)) {
bool success = true;
STORM_LOG_TRACE("Parsing: " << line);
size_t commentstarts = line.find("//");
line = line.substr(0, commentstarts);
size_t firstsemicolon = line.find(";");
line = line.substr(0, firstsemicolon);
if (line.find_first_not_of(' ') == std::string::npos) {
// Only whitespace
continue;
++lineNo;
// First consider comments
if (comment) {
// Line is part of multiline comment -> search for end of this comment
size_t commentEnd = line.find("*/");
if (commentEnd == std::string::npos) {
continue;
} else {
// Remove comment
line = line.substr(commentEnd + 2);
comment = false;
}
}
// Remove comments
line = std::regex_replace(line, commentRegex, "");
// Check if multiline comment starts
size_t commentStart = line.find("/*");
if (commentStart != std::string::npos) {
// Remove comment
line = line.substr(0, commentStart);
comment = true;
}
// Top level indicator.
if(boost::starts_with(line, toplevelToken)) {
toplevelId = stripQuotsFromName(line.substr(toplevelToken.size() + 1));
boost::trim(line);
if (line.empty()) {
// Empty line
continue;
}
else if (boost::starts_with(line, parametricToken)) {
#ifdef STORM_HAVE_CARL
// Remove semicolon
STORM_LOG_THROW(line.back() == ';', storm::exceptions::WrongFormatException, "Semicolon expected at the end of line " << lineNo << ".");
line.pop_back();
// Split line into tokens
boost::trim(line);
std::vector<std::string> tokens;
boost::split(tokens, line, boost::is_any_of(" \t"), boost::token_compress_on);
if (tokens[0] == "toplevel") {
// Top level indicator
STORM_LOG_THROW(toplevelId == "", storm::exceptions::WrongFormatException, "Toplevel element already defined.");
STORM_LOG_THROW(tokens.size() == 2, storm::exceptions::WrongFormatException, "Expected element id after 'toplevel' in line " << lineNo << ".");
toplevelId = parseName(tokens[1]);
} else if (tokens[0] == "param") {
// Parameters
STORM_LOG_THROW(tokens.size() == 2, storm::exceptions::WrongFormatException, "Expected parameter name after 'param' in line " << lineNo << ".");
STORM_LOG_THROW((std::is_same<ValueType, storm::RationalFunction>::value), storm::exceptions::NotSupportedException, "Parameters only allowed when using rational functions.");
std::string parameter = stripQuotsFromName(line.substr(parametricToken.size() + 1));
storm::expressions::Variable var = manager->declareRationalVariable(parameter);
identifierMapping.emplace(var.getName(), var);
parser.setIdentifierMapping(identifierMapping);
STORM_LOG_TRACE("Added parameter: " << var.getName());
#else
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Parameters are not supported in this build.");
#endif
valueParser.addParameter(parseName(tokens[1]));
} else {
std::vector<std::string> tokens;
boost::split(tokens, line, boost::is_any_of(" "));
std::string name(parseNodeIdentifier(stripQuotsFromName(tokens[0])));
// DFT element
std::string name = parseName(tokens[0]);
std::vector<std::string> childNames;
for(unsigned i = 2; i < tokens.size(); ++i) {
childNames.push_back(parseNodeIdentifier(stripQuotsFromName(tokens[i])));
childNames.push_back(parseName(tokens[i]));
}
if(tokens[1] == "and") {
bool success = true;
// Add element according to type
std::string type = tokens[1];
if (type == "and") {
success = builder.addAndElement(name, childNames);
} else if (tokens[1] == "or") {
} else if (type == "or") {
success = builder.addOrElement(name, childNames);
} else if (boost::starts_with(tokens[1], "vot")) {
success = builder.addVotElement(name, boost::lexical_cast<unsigned>(tokens[1].substr(3)), childNames);
} else if (tokens[1].find("of") != std::string::npos) {
size_t pos = tokens[1].find("of");
unsigned threshold = boost::lexical_cast<unsigned>(tokens[1].substr(0, pos));
unsigned count = boost::lexical_cast<unsigned>(tokens[1].substr(pos + 2));
STORM_LOG_THROW(count == childNames.size(), storm::exceptions::FileIoException, "Voting gate does not correspond to number of children.");
} else if (boost::starts_with(type, "vot")) {
unsigned threshold = NumberParser<unsigned>::parse(type.substr(3));
success = builder.addVotElement(name, threshold, childNames);
} else if (type.find("of") != std::string::npos) {
size_t pos = type.find("of");
unsigned threshold = NumberParser<unsigned>::parse(type.substr(0, pos));
unsigned count = NumberParser<unsigned>::parse(type.substr(pos + 2));
STORM_LOG_THROW(count == childNames.size(), storm::exceptions::WrongFormatException, "Voting gate number " << count << " does not correspond to number of children " << childNames.size() << "in line " << lineNo << ".");
success = builder.addVotElement(name, threshold, childNames);
} else if (tokens[1] == "pand") {
success = builder.addPandElement(name, childNames);
} else if (tokens[1] == "pand-inc") {
} else if (type == "pand") {
success = builder.addPandElement(name, childNames, defaultInclusive);
} else if (type == "pand-inc") {
success = builder.addPandElement(name, childNames, true);
} else if (tokens[1] == "pand-ex") {
} else if (type == "pand-ex") {
success = builder.addPandElement(name, childNames, false);
} else if (tokens[1] == "por") {
success = builder.addPorElement(name, childNames);
} else if (tokens[1] == "por-ex") {
} else if (type == "por") {
success = builder.addPorElement(name, childNames, defaultInclusive);
} else if (type == "por-ex") {
success = builder.addPorElement(name, childNames, false);
} else if (tokens[1] == "por-inc") {
} else if (type == "por-inc") {
success = builder.addPorElement(name, childNames, true);
} else if (tokens[1] == "wsp" || tokens[1] == "csp") {
} else if (type == "wsp" || type == "csp" || type == "hsp") {
success = builder.addSpareElement(name, childNames);
} else if (tokens[1] == "seq") {
} else if (type == "seq") {
success = builder.addSequenceEnforcer(name, childNames);
} else if (tokens[1] == "fdep") {
} else if (type == "fdep") {
STORM_LOG_THROW(childNames.size() >= 2, storm::exceptions::WrongFormatException, "FDEP gate needs at least two children in line " << lineNo << ".");
success = builder.addDepElement(name, childNames, storm::utility::one<ValueType>());
} else if (boost::starts_with(tokens[1], "pdep=")) {
ValueType probability = parseRationalExpression(tokens[1].substr(5));
} else if (boost::starts_with(type, "pdep=")) {
ValueType probability = valueParser.parseValue(type.substr(5));
success = builder.addDepElement(name, childNames, probability);
} else if (boost::starts_with(tokens[1], "lambda=")) {
ValueType failureRate = parseRationalExpression(tokens[1].substr(7));
ValueType dormancyFactor = parseRationalExpression(tokens[2].substr(5));
success = builder.addBasicElement(name, failureRate, dormancyFactor, false); // TODO set transient BEs
} else if (type.find("=") != std::string::npos) {
success = parseBasicElement(tokens, builder, valueParser);
} else {
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Type name: " << tokens[1] << " not recognized.");
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Type name: " << type << " in line " << lineNo << " not recognized.");
success = false;
}
STORM_LOG_THROW(success, storm::exceptions::FileIoException, "Error while adding element '" << name << "' of line '" << line << "'.");
STORM_LOG_THROW(success, storm::exceptions::FileIoException, "Error while adding element '" << name << "' in line " << lineNo << ".");
}
}
if(!builder.setTopLevel(toplevelId)) {
STORM_LOG_THROW(false, storm::exceptions::FileIoException, "Top level id unknown.");
if (!builder.setTopLevel(toplevelId)) {
STORM_LOG_THROW(false, storm::exceptions::FileIoException, "Top level id '" << toplevelId << "' unknown.");
}
storm::utility::closeFile(file);
// Build DFT
storm::storage::DFT<ValueType> dft = builder.build();
STORM_LOG_DEBUG("DFT Elements:" << std::endl << dft.getElementsString());
STORM_LOG_DEBUG("Spare Modules:" << std::endl << dft.getSpareModulesString());
return dft;
}
template<typename ValueType>
ValueType DFTGalileoParser<ValueType>::parseRationalExpression(std::string const& expr) {
STORM_LOG_ASSERT(false, "Specialized method should be called.");
return 0;
}
bool DFTGalileoParser<ValueType>::parseBasicElement(std::vector<std::string> const& tokens, storm::builder::DFTBuilder<ValueType>& builder, ValueParser<ValueType>& valueParser) {
// Default values
Distribution distribution = Distribution::None;
ValueType firstValDistribution = storm::utility::zero<ValueType>();
ValueType secondValDistribution = storm::utility::zero<ValueType>();
ValueType dormancyFactor = storm::utility::one<ValueType>();
size_t replication = 1;
template<>
double DFTGalileoParser<double>::parseRationalExpression(std::string const& expr) {
return boost::lexical_cast<double>(expr);
}
// Parse properties and determine distribution
for (size_t i = 1; i < tokens.size(); ++i) {
std::string token = tokens[i];
if (boost::starts_with(token, "prob=")) {
STORM_LOG_THROW(distribution == Distribution::None, storm::exceptions::WrongFormatException, "A different distribution was already defined for this basic element.");
firstValDistribution = valueParser.parseValue(token.substr(5));
distribution = Distribution::Constant;
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Constant distribution is not supported.");
} else if (boost::starts_with(token, "lambda=")) {
STORM_LOG_THROW(distribution == Distribution::None, storm::exceptions::WrongFormatException, "A different distribution was already defined for this basic element.");
firstValDistribution = valueParser.parseValue(token.substr(7));
distribution = Distribution::Exponential;
} else if (boost::starts_with(token, "rate=")) {
STORM_LOG_THROW(distribution == Distribution::None || distribution == Distribution::Weibull, storm::exceptions::WrongFormatException, "A different distribution was already defined for this basic element.");
firstValDistribution = valueParser.parseValue(token.substr(5));
distribution = Distribution::Weibull;
} else if (boost::starts_with(token, "shape=")) {
STORM_LOG_THROW(distribution == Distribution::None || distribution == Distribution::Weibull, storm::exceptions::WrongFormatException, "A different distribution was already defined for this basic element.");
secondValDistribution = valueParser.parseValue(token.substr(6));
distribution = Distribution::Weibull;
} else if (boost::starts_with(token, "mean=")) {
STORM_LOG_THROW(distribution == Distribution::None || distribution == Distribution::LogNormal, storm::exceptions::WrongFormatException, "A different distribution was already defined for this basic element.");
firstValDistribution = valueParser.parseValue(token.substr(5));
distribution = Distribution::LogNormal;
} else if (boost::starts_with(token, "stddev=")) {
STORM_LOG_THROW(distribution == Distribution::None || distribution == Distribution::LogNormal, storm::exceptions::WrongFormatException, "A different distribution was already defined for this basic element.");
secondValDistribution = valueParser.parseValue(token.substr(7));
distribution = Distribution::LogNormal;
} else if (boost::starts_with(token, "cov=")) {
STORM_LOG_WARN("Coverage is not supported and will be ignored.");
} else if (boost::starts_with(token, "res=")) {
STORM_LOG_WARN("Restoration is not supported and will be ignored.");
} else if (boost::starts_with(token, "repl=")) {
replication = NumberParser<unsigned>::parse(token.substr(5));
STORM_LOG_THROW(replication == 1, storm::exceptions::NotSupportedException, "Replication > 1 is not supported.");
} else if (boost::starts_with(token, "dorm=")) {
dormancyFactor = valueParser.parseValue(token.substr(5));
}
}
// Explicitly instantiate the class.
template class DFTGalileoParser<double>;
switch (distribution) {
case Constant:
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Constant distribution is not supported.");
break;
case Exponential:
return builder.addBasicElement(parseName(tokens[0]), firstValDistribution, dormancyFactor, false); // TODO set transient BEs
break;
case Weibull:
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Weibull distribution is not supported.");
break;
case LogNormal:
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "LogNormal distribution is not supported.");
break;
case None:
// go-through
default:
STORM_LOG_THROW(false, storm::exceptions::WrongFormatException, "No distribution for basic element defined.");
break;
}
return false;
#ifdef STORM_HAVE_CARL
template<>
storm::RationalFunction DFTGalileoParser<storm::RationalFunction>::parseRationalExpression(std::string const& expr) {
STORM_LOG_TRACE("Translating expression: " << expr);
storm::expressions::Expression expression = parser.parseFromString(expr);
STORM_LOG_TRACE("Expression: " << expression);
storm::RationalFunction rationalFunction = evaluator.asRational(expression);
STORM_LOG_TRACE("Parsed expression: " << rationalFunction);
return rationalFunction;
}
// Explicitly instantiate the class.
template class DFTGalileoParser<double>;
template class DFTGalileoParser<RationalFunction>;
#endif
}
}

58
src/storm-dft/parser/DFTGalileoParser.h

@ -7,39 +7,53 @@
#include "storm/storage/expressions/ExpressionEvaluator.h"
#include "storm-dft/storage/dft/DFT.h"
#include "storm-dft/storage/dft/DFTBuilder.h"
#include "storm-dft/builder/DFTBuilder.h"
#include "storm/parser/ValueParser.h"
namespace storm {
namespace parser {
/*!
* Parser for DFT in the Galileo format.
*/
template<typename ValueType>
class DFTGalileoParser {
storm::storage::DFTBuilder<ValueType> builder;
std::shared_ptr<storm::expressions::ExpressionManager> manager;
storm::parser::ExpressionParser parser;
storm::expressions::ExpressionEvaluator<ValueType> evaluator;
std::unordered_map<std::string, storm::expressions::Expression> identifierMapping;
public:
DFTGalileoParser(bool defaultInclusive = true, bool binaryDependencies = true) : builder(defaultInclusive, binaryDependencies), manager(new storm::expressions::ExpressionManager()), parser(*manager), evaluator(*manager) {
}
storm::storage::DFT<ValueType> parseDFT(std::string const& filename);
/*!
* Parse DFT in Galileo format and build DFT.
*
* @param filename File.
* @param defaultInclusive Flag indicating if priority gates are inclusive by default.
* @param binaryDependencies Flag indicating if dependencies should be converted to binary dependencies.
*
* @return DFT.
*/
static storm::storage::DFT<ValueType> parseDFT(std::string const& filename, bool defaultInclusive = true, bool binaryDependencies = true);
private:
void readFile(std::string const& filename);
std::string stripQuotsFromName(std::string const& name);
std::string parseNodeIdentifier(std::string const& name);
ValueType parseRationalExpression(std::string const& expr);
bool defaultInclusive;
/*!
* Parse element name (strip quotation marks, etc.).
*
* @param name Element name.
*
* @return Name.
*/
static std::string parseName(std::string const& name);
/*!
* Parse basic element and add it to builder.
*
* @param tokens Tokens defining the basic element.
* @param builder DFTBuilder.
* @param valueParser ValueParser.
*
* @return True iff the parsing and creation was successful.
*/
static bool parseBasicElement(std::vector<std::string> const& tokens, storm::builder::DFTBuilder<ValueType>& builder, ValueParser<ValueType>& valueParser);
enum Distribution { None, Constant, Exponential, Weibull, LogNormal };
};
}
}

4
src/storm-dft/parser/DFTJsonParser.h

@ -7,7 +7,7 @@
#include "storm/storage/expressions/ExpressionEvaluator.h"
#include "storm-dft/storage/dft/DFT.h"
#include "storm-dft/storage/dft/DFTBuilder.h"
#include "storm-dft/builder/DFTBuilder.h"
// JSON parser
#include "json.hpp"
@ -19,7 +19,7 @@ namespace storm {
template<typename ValueType>
class DFTJsonParser {
storm::storage::DFTBuilder<ValueType> builder;
storm::builder::DFTBuilder<ValueType> builder;
std::shared_ptr<storm::expressions::ExpressionManager> manager;

10
src/storm-dft/settings/DftSettings.cpp

@ -9,10 +9,14 @@
#include "storm/settings/modules/IOSettings.h"
#include "storm/settings/modules/DebugSettings.h"
#include "storm/settings/modules/EigenEquationSolverSettings.h"
#include "storm/settings/modules/ModelCheckerSettings.h"
#include "storm/settings/modules/GmmxxEquationSolverSettings.h"
#include "storm/settings/modules/NativeEquationSolverSettings.h"
#include "storm/settings/modules/MultiplierSettings.h"
#include "storm/settings/modules/TopologicalEquationSolverSettings.h"
#include "storm/settings/modules/EliminationSettings.h"
#include "storm/settings/modules/MinMaxEquationSolverSettings.h"
#include "storm/settings/modules/GameSolverSettings.h"
#include "storm/settings/modules/BisimulationSettings.h"
#include "storm/settings/modules/ResourceSettings.h"
#include "storm/settings/modules/JaniExportSettings.h"
@ -33,11 +37,15 @@ namespace storm {
storm::settings::addModule<storm::settings::modules::CoreSettings>();
storm::settings::addModule<storm::settings::modules::DebugSettings>();
storm::settings::addModule<storm::settings::modules::NativeEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::ModelCheckerSettings>();
storm::settings::addModule<storm::settings::modules::GmmxxEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::EigenEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::NativeEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::TopologicalEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::EliminationSettings>();
storm::settings::addModule<storm::settings::modules::MinMaxEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::MultiplierSettings>();
storm::settings::addModule<storm::settings::modules::GameSolverSettings>(false);
// storm::settings::addModule<storm::settings::modules::BisimulationSettings>();
storm::settings::addModule<storm::settings::modules::ResourceSettings>();

9
src/storm-dft/settings/modules/DftIOSettings.cpp

@ -26,6 +26,8 @@ namespace storm {
const std::string DftIOSettings::maxValueOptionName = "max";
const std::string DftIOSettings::transformToGspnOptionName = "gspn";
const std::string DftIOSettings::exportToJsonOptionName = "export-json";
const std::string DftIOSettings::displayStatsOptionName = "show-dft-stats";
DftIOSettings::DftIOSettings() : ModuleSettings(moduleName) {
this->addOption(storm::settings::OptionBuilder(moduleName, dftFileOptionName, false, "Parses the model given in the Galileo format.").setShortName(dftFileOptionShortName)
@ -40,6 +42,8 @@ namespace storm {
this->addOption(storm::settings::OptionBuilder(moduleName, maxValueOptionName, false, "Compute maximal value in case of non-determinism.").build());
this->addOption(storm::settings::OptionBuilder(moduleName, transformToGspnOptionName, false, "Transform DFT to GSPN.").build());
this->addOption(storm::settings::OptionBuilder(moduleName, exportToJsonOptionName, false, "Export the model to the Cytoscape JSON format.").addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "The name of the JSON file to export to.").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, displayStatsOptionName, false, "Print stats to stdout").build());
}
bool DftIOSettings::isDftFileSet() const {
@ -109,6 +113,10 @@ namespace storm {
return this->getOption(exportToJsonOptionName).getArgumentByName("filename").getValueAsString();
}
bool DftIOSettings::isDisplayStatsSet() const {
return this->getOption(displayStatsOptionName).getHasOptionBeenSet();
}
void DftIOSettings::finalize() {
}
@ -121,3 +129,4 @@ namespace storm {
} // namespace modules
} // namespace settings
} // namespace storm

10
src/storm-dft/settings/modules/DftIOSettings.h

@ -121,6 +121,13 @@ namespace storm {
* @return The name of the json file to export to.
*/
std::string getExportJsonFilename() const;
/*!
* Retrieves whether statistics for the DFT should be displayed.
*
* @return True if the statistics option was set.
*/
bool isDisplayStatsSet() const;
bool check() const override;
void finalize() override;
@ -143,7 +150,8 @@ namespace storm {
static const std::string maxValueOptionName;
static const std::string transformToGspnOptionName;
static const std::string exportToJsonOptionName;
static const std::string displayStatsOptionName;
};
} // namespace modules

2
src/storm-dft/storage/BucketPriorityQueue.cpp

@ -9,7 +9,7 @@ namespace storm {
template<typename ValueType>
BucketPriorityQueue<ValueType>::BucketPriorityQueue(size_t nrBuckets, double lowerValue, double ratio) : lowerValue(lowerValue), logBase(std::log(ratio)), nrBuckets(nrBuckets), nrUnsortedItems(0), buckets(nrBuckets), currentBucket(nrBuckets) {
compare = ([this](HeuristicPointer a, HeuristicPointer b) {
compare = ([](HeuristicPointer a, HeuristicPointer b) {
return *a < *b;
});
}

72
src/storm-dft/storage/dft/DFT.cpp

@ -7,7 +7,7 @@
#include "storm/utility/iota_n.h"
#include "storm/utility/vector.h"
#include "storm-dft/storage/dft/DFTBuilder.h"
#include "storm-dft/builder/DFTBuilder.h"
#include "storm-dft/storage/dft/DFTIsomorphism.h"
@ -271,7 +271,7 @@ namespace storm {
std::vector<DFT<ValueType>> res;
for(auto const& subdft : subdfts) {
DFTBuilder<ValueType> builder;
storm::builder::DFTBuilder<ValueType> builder;
for(size_t id : subdft.second) {
builder.copyElement(mElements[id]);
@ -293,22 +293,22 @@ namespace storm {
}
return max;
}
template<typename ValueType>
DFT<ValueType> DFT<ValueType>::optimize() const {
std::vector<size_t> modIdea = findModularisationRewrite();
STORM_LOG_DEBUG("Modularisation idea: " << storm::utility::vector::toString(modIdea));
if (modIdea.empty()) {
// No rewrite needed
return *this;
}
std::vector<std::vector<size_t>> rewriteIds;
rewriteIds.push_back(modIdea);
DFTBuilder<ValueType> builder;
storm::builder::DFTBuilder<ValueType> builder;
// Accumulate elements which must be rewritten
std::set<size_t> rewriteSet;
for (std::vector<size_t> rewrites : rewriteIds) {
@ -320,7 +320,7 @@ namespace storm {
builder.copyElement(elem);
}
}
// Add rewritten elements
for (std::vector<size_t> rewrites : rewriteIds) {
STORM_LOG_ASSERT(rewrites.size() > 1, "No rewritten elements.");
@ -359,7 +359,7 @@ namespace storm {
STORM_LOG_ASSERT(false, "Dft type can not be rewritten.");
break;
}
// Add parent with the new child newParent and all its remaining children
childrenNames.clear();
childrenNames.push_back(newParentName);
@ -371,14 +371,42 @@ namespace storm {
}
builder.copyGate(originalParent, childrenNames);
}
builder.setTopLevel(mElements[mTopLevelIndex]->name());
// TODO use reference?
DFT<ValueType> newDft = builder.build();
STORM_LOG_TRACE(newDft.getElementsString());
return newDft.optimize();
}
template<typename ValueType>
size_t DFT<ValueType>::nrDynamicElements() const {
size_t noDyn = 0;
for (auto const& elem : mElements) {
switch (elem->type()) {
case DFTElementType::AND:
case DFTElementType::OR:
case DFTElementType::VOT:
case DFTElementType::BE:
case DFTElementType::CONSTF:
case DFTElementType::CONSTS:
break;
case DFTElementType::PAND:
case DFTElementType::SPARE:
case DFTElementType::POR:
case DFTElementType::SEQ:
case DFTElementType::MUTEX:
case DFTElementType::PDEP:
noDyn += 1;
break;
default:
STORM_LOG_ASSERT(false, "DFT element type " << elem->type() << " not known.");
break;
}
}
return noDyn;
}
template<typename ValueType>
std::string DFT<ValueType>::getElementsString() const {
std::stringstream stream;
@ -722,11 +750,10 @@ namespace storm {
// suitable parent gate! - Lets check the independent submodules of the children
auto const& children = std::static_pointer_cast<DFTGate<ValueType>>(e)->children();
for(auto const& child : children) {
auto ISD = std::static_pointer_cast<DFTGate<ValueType>>(child)->independentSubDft(true);
// In the ISD, check for other children:
std::vector<size_t> rewrite = {e->id(), child->id()};
for(size_t isdElemId : ISD) {
if(isdElemId == child->id()) continue;
@ -737,13 +764,13 @@ namespace storm {
if(rewrite.size() > 2 && rewrite.size() < children.size() - 1) {
return rewrite;
}
}
}
}
}
}
return {};
}
template<typename ValueType>
std::tuple<std::vector<size_t>, std::vector<size_t>, std::vector<size_t>> DFT<ValueType>::getSortedParentAndDependencyIds(size_t index) const {
@ -766,6 +793,13 @@ namespace storm {
std::sort(outgoingDeps.begin(), outgoingDeps.end());
return std::make_tuple(parents, ingoingDeps, outgoingDeps);
}
template<typename ValueType>
void DFT<ValueType>::writeStatsToStream(std::ostream& stream) const {
stream << "Number of BEs: " << nrBasicElements() << std::endl;
stream << "Number of dynamic elements: " << nrDynamicElements() << std::endl;
stream << "Number of elements: " << nrElements() << std::endl;
}
// Explicitly instantiate the class.
template class DFT<double>;

16
src/storm-dft/storage/dft/DFT.h

@ -18,6 +18,11 @@
#include "storm-dft/storage/dft/DFTLayoutInfo.h"
namespace storm {
namespace builder {
// Forward declaration
template<typename T> class DFTBuilder;
}
namespace storage {
template<typename ValueType>
@ -32,11 +37,8 @@ namespace storm {
};
// Forward declarations
// Forward declaration
template<typename T> class DFTColouring;
template<typename T> class DFTBuilder;
/**
* Represents a Dynamic Fault Tree
@ -76,7 +78,7 @@ namespace storm {
DFT<ValueType> optimize() const;
void copyElements(std::vector<size_t> elements, DFTBuilder<ValueType> builder) const;
void copyElements(std::vector<size_t> elements, storm::builder::DFTBuilder<ValueType> builder) const;
size_t stateVectorSize() const {
return mStateVectorSize;
@ -89,6 +91,8 @@ namespace storm {
size_t nrBasicElements() const {
return mNrOfBEs;
}
size_t nrDynamicElements() const;
size_t getTopLevelIndex() const {
return mTopLevelIndex;
@ -275,6 +279,8 @@ namespace storm {
return mLayoutInfo.at(id);
}
void writeStatsToStream(std::ostream& stream) const;
private:
std::tuple<std::vector<size_t>, std::vector<size_t>, std::vector<size_t>> getSortedParentAndDependencyIds(size_t index) const;

826
src/storm-dft/transformations/DftToGspnTransformator.cpp
File diff suppressed because it is too large
View File

219
src/storm-dft/transformations/DftToGspnTransformator.h

@ -25,7 +25,7 @@ namespace storm {
/*!
* Transform the DFT to a GSPN.
*/
void transform();
void transform(bool smart = true);
/*!
* Extract Gspn by building
@ -33,129 +33,174 @@ namespace storm {
*/
gspn::GSPN* obtainGSPN();
/*!
* Get failed place id of top level element.
*/
uint64_t toplevelFailedPlaceId();
private:
/*
* Draw all elements of the GSPN.
*/
void drawGSPNElements();
/*
* Draw restrictions between the elements of the GSPN (i.e. SEQ or MUTEX).
/*!
* Translate all elements of the GSPN.
*/
void drawGSPNRestrictions();
/*
* Draw a Petri net Basic Event.
void translateGSPNElements();
/*!
* Translate a GSPN Basic Event.
*
* @param dftBE The Basic Event.
*/
void drawBE(std::shared_ptr<storm::storage::DFTBE<ValueType> const> dftBE, bool isRepresentative);
/*
* Draw a Petri net AND.
void translateBE(std::shared_ptr<storm::storage::DFTBE<ValueType> const> dftBE);
/*!
* Translate a GSPN CONSTF (Constant Failure, a Basic Event that has already failed).
*
* @param dftPor The CONSTF Basic Event.
*/
void translateCONSTF(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftConstF);
/*!
* Translate a GSPN CONSTS (Constant Save, a Basic Event that cannot fail).
*
* @param dftPor The CONSTS Basic Event.
*/
void translateCONSTS(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftConstS);
/*!
* Translate a GSPN AND.
*
* @param dftAnd The AND gate.
*/
void drawAND(std::shared_ptr<storm::storage::DFTAnd<ValueType> const> dftAnd, bool isRepresentative);
void translateAND(std::shared_ptr<storm::storage::DFTAnd<ValueType> const> dftAnd);
/*
* Draw a Petri net OR.
/*!
* Translate a GSPN OR.
*
* @param dftOr The OR gate.
*/
void drawOR(std::shared_ptr<storm::storage::DFTOr<ValueType> const> dftOr, bool isRepresentative);
void translateOR(std::shared_ptr<storm::storage::DFTOr<ValueType> const> dftOr);
/*
* Draw a Petri net VOT.
/*!
* Translate a GSPN VOT.
*
* @param dftVot The VOT gate.
*/
void drawVOT(std::shared_ptr<storm::storage::DFTVot<ValueType> const> dftVot, bool isRepresentative);
void translateVOT(std::shared_ptr<storm::storage::DFTVot<ValueType> const> dftVot);
/*
* Draw a Petri net PAND.
* This PAND is inklusive (children are allowed to fail simultaneously and the PAND will fail nevertheless).
*
/*!
* Translate a GSPN PAND.
*
* @param dftPand The PAND gate.
* @param inclusive Flag wether the PAND is inclusive (children are allowed to fail simultaneously and the PAND will fail nevertheless)
*/
void drawPAND(std::shared_ptr<storm::storage::DFTPand<ValueType> const> dftPand, bool isRepresentative);
/*
* Draw a Petri net SPARE.
void translatePAND(std::shared_ptr<storm::storage::DFTPand<ValueType> const> dftPand, bool inclusive = true);
/*!
* Translate a GSPN POR.
*
* @param dftPor The POR gate.
* @param inclusive Flag wether the POR is inclusive (children are allowed to fail simultaneously and the POR will fail nevertheless)
*/
void translatePOR(std::shared_ptr<storm::storage::DFTPor<ValueType> const> dftPor, bool inclusive = true);
/*!
* Translate a GSPN SPARE.
*
* @param dftSpare The SPARE gate.
*/
void drawSPARE(std::shared_ptr<storm::storage::DFTSpare<ValueType> const> dftSpare, bool isRepresentative);
/*
* Draw a Petri net POR.
* This POR is inklusive (children are allowed to fail simultaneously and the POR will fail nevertheless).
*
* @param dftPor The POR gate.
*/
void drawPOR(std::shared_ptr<storm::storage::DFTPor<ValueType> const> dftPor, bool isRepresentative);
/*
* Draw a Petri net CONSTF (Constant Failure, a Basic Event that has already failed).
*
* @param dftPor The CONSTF Basic Event.
*/
void drawCONSTF(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftConstF, bool isRepresentative);
/*
* Draw a Petri net CONSTS (Constant Save, a Basic Event that cannot fail).
*
* @param dftPor The CONSTS Basic Event.
*/
void drawCONSTS(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftConstS, bool isRepresentative);
void translateSPARE(std::shared_ptr<storm::storage::DFTSpare<ValueType> const> dftSpare);
/*!
* Translate a GSPN PDEP (FDEP is included with a probability of 1).
*
* @param dftDependency The PDEP gate.
*/
void translatePDEP(std::shared_ptr<storm::storage::DFTDependency<ValueType> const> dftDependency);
/*
* Draw a Petri net PDEP (FDEP is included with a firerate of 1).
/*!
* Translate a GSPN SEQ.
*
* @param dftSeq The SEQ gate.
*/
void translateSeq(std::shared_ptr<storm::storage::DFTSeq<ValueType> const> dftSeq);
/*!
* Check if the element is active intially.
*
* @param dFTElement DFT element.
*
* @return True iff element is active initially.
*/
void drawPDEP(std::shared_ptr<storm::storage::DFTDependency<ValueType> const> dftDependency);
void drawSeq(std::shared_ptr<storm::storage::DFTSeq<ValueType> const> dftSeq);
/*
* Return true if BE is active (corresponding place contains one initial token) or false if BE is inactive (corresponding place contains no initial token).
*
* @param dFTElement DFT element.
*/
bool isBEActive(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dFTElement);
bool isActiveInitially(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dFTElement);
/*
* Get the priority of the element.
* The priority is two times the length of the shortest path to the top event.
*
* @param priority The priority of the gate. Top Event has priority 0, its children 2, its grandchildren 4, ...
*
* @param dftElement The element whose priority shall be determined.
*/
uint64_t getFailPriority(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dFTElement);
/*!
* Get the priority of the element.
* The priority is two times the length of the shortest path to the top event.
*
* @param priority The priority of the gate. Top Event has priority 0, its children 2, its grandchildren 4, ...
*
* @param dftElement The element whose priority shall be determined.
*/
uint64_t getFailPriority(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dFTElement);
uint64_t addUnavailableNode(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftElement, storm::gspn::LayoutInfo const& layoutInfo, bool initialAvailable = true);
uint64_t addDisabledPlace(std::shared_ptr<storm::storage::DFTBE<ValueType> const> dftBe);
/*!
* Add failed place for an element.
*
* @param dftElement Element.
* @param layoutInfo Information about layout.
* @param initialFailed Flag indicating whether the element is initially failed.
*
* @return Id of added failed place.
*/
uint64_t addFailedPlace(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftElement, storm::gspn::LayoutInfo const& layoutInfo, bool initialFailed = false);
/*!
* Add unavailable place for element.
*
* @param dftElement Element.
* @param layoutInfo Information about layout.
* @param initialAvailable Flag indicating whether the element is available initially.
*
* @return Id of added unavailable place.
*/
uint64_t addUnavailablePlace(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftElement, storm::gspn::LayoutInfo const& layoutInfo, bool initialAvailable = true);
/*!
* Add disabled place for element.
*
* @param dftBe Basic Element.
* @param layoutInfo Information about layout.
*
* @return Id of added disabled place.
*/
uint64_t addDisabledPlace(std::shared_ptr<storm::storage::DFTBE<ValueType> const> dftBe, storm::gspn::LayoutInfo const& layoutInfo);
/*!
* Get failed place for element.
*
* @param dftElement Element.
*
* @return Id of failed place corresponding to the given element.
*/
uint64_t getFailedPlace(std::shared_ptr<storm::storage::DFTElement<ValueType> const> dftElement) {
return failedPlaces.at(dftElement->id());
}
storm::storage::DFT<ValueType> const& mDft;
storm::gspn::GspnBuilder builder;
std::vector<uint64_t> failedNodes;
std::map<uint64_t, uint64_t> unavailableNodes;
std::map<uint64_t, uint64_t> activeNodes;
std::map<uint64_t, uint64_t> disabledNodes;
bool smart = true;
// Interface places for DFT elements
std::vector<uint64_t> failedPlaces;
std::map<uint64_t, uint64_t> unavailablePlaces;
std::map<uint64_t, uint64_t> activePlaces;
std::map<uint64_t, uint64_t> disabledPlaces;
bool smart;
static constexpr const char* STR_FAILING = "_failing"; // Name standard for transitions that point towards a place, which in turn indicates the failure of a gate.
static constexpr const char* STR_FAILED = "_failed"; // Name standard for place which indicates the failure of a gate.
static constexpr const char* STR_FAILSAVING = "_failsaving"; // Name standard for transition that point towards a place, which in turn indicates the failsave state of a gate.
static constexpr const char* STR_FAILSAVE = "_failsave"; // Name standard for place which indicates the failsave state of a gate.
static constexpr const char* STR_ACTIVATED = "_activated"; // Name standard for place which indicates the activity.
static constexpr const char* STR_ACTIVATING = "_activating"; // Name standard for transition that point towards a place, which in turn indicates its activity.
static constexpr const char* STR_ACTIVATED = "_active"; // Name standard for place which indicates the activity.
};
}
}

2
src/storm-gspn-cli/CMakeLists.txt

@ -5,4 +5,4 @@ set_target_properties(storm-gspn-cli PROPERTIES OUTPUT_NAME "storm-gspn")
add_dependencies(binaries storm-gspn-cli)
# installation
install(TARGETS storm-gspn-cli RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-gspn-cli EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

28
src/storm-gspn-cli/storm-gspn.cpp

@ -3,7 +3,7 @@
#include "storm-gspn/storage/gspn/GSPN.h"
#include "storm-gspn/storage/gspn/GspnBuilder.h"
#include "storm-gspn/builder/JaniGSPNBuilder.h"
#include "storm-gspn/storm-gspn.h"
#include "storm-gspn/api/storm-gspn.h"
#include "storm/exceptions/BaseException.h"
#include "storm/exceptions/WrongFormatException.h"
@ -44,6 +44,7 @@ void initializeSettings() {
// Register all known settings modules.
storm::settings::addModule<storm::settings::modules::GeneralSettings>();
storm::settings::addModule<storm::settings::modules::IOSettings>();
storm::settings::addModule<storm::settings::modules::GSPNSettings>();
storm::settings::addModule<storm::settings::modules::GSPNExportSettings>();
storm::settings::addModule<storm::settings::modules::CoreSettings>();
@ -92,7 +93,7 @@ int main(const int argc, const char **argv) {
auto gspn = parser.parse(storm::settings::getModule<storm::settings::modules::GSPNSettings>().getGspnFilename());
std::string formulaString = "";
if (!storm::settings::getModule<storm::settings::modules::IOSettings>().isPropertySet()) {
if (storm::settings::getModule<storm::settings::modules::IOSettings>().isPropertySet()) {
formulaString = storm::settings::getModule<storm::settings::modules::IOSettings>().getProperty();
}
boost::optional<std::set<std::string>> propertyFilter;
@ -108,10 +109,10 @@ int main(const int argc, const char **argv) {
gspn->setCapacities(capacities);
}
storm::handleGSPNExportSettings(*gspn);
storm::api::handleGSPNExportSettings(*gspn);
if(storm::settings::getModule<storm::settings::modules::JaniExportSettings>().isJaniFileSet()) {
storm::jani::Model* model = storm::buildJani(*gspn);
storm::jani::Model* model = storm::api::buildJani(*gspn);
storm::api::exportJaniModel(*model, properties, storm::settings::getModule<storm::settings::modules::JaniExportSettings>().getJaniFilename());
delete model;
}
@ -124,25 +125,6 @@ int main(const int argc, const char **argv) {
// auto builder = storm::builder::ExplicitGspnModelBuilder<>();
// auto ma = builder.translateGspn(gspn, formula);
//
// // write gspn into output file
// if (!outputFile.empty()) {
// std::ofstream file;
// file.open(outputFile);
// if (outputType == "pnml") {
// gspn.toPnml(file);
// }
// if (outputType == "pnpro") {
// gspn.toPnpro(file);
// }
// if (outputType == "dot") {
// gspn.writeDotToStream(file);
// }
// if (outputType == "ma") {
// ma.writeDotToStream(file);
// }
// file.close();
// }
// All operations have now been performed, so we clean up everything and terminate.
storm::utility::cleanUp();
return 0;

2
src/storm-gspn/CMakeLists.txt

@ -15,4 +15,4 @@ set(STORM_TARGETS ${STORM_TARGETS} PARENT_SCOPE)
target_link_libraries(storm-gspn storm ${STORM_GSPN_LINK_LIBRARIES})
# installation
install(TARGETS storm-gspn RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-gspn EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

61
src/storm-gspn/api/storm-gspn.cpp

@ -0,0 +1,61 @@
#include "storm-gspn/api/storm-gspn.h"
#include "storm/settings/SettingsManager.h"
#include "storm/utility/file.h"
#include "storm-gspn/settings/modules/GSPNExportSettings.h"
namespace storm {
namespace api {
storm::jani::Model* buildJani(storm::gspn::GSPN const& gspn) {
storm::builder::JaniGSPNBuilder builder(gspn);
return builder.build();
}
void handleGSPNExportSettings(storm::gspn::GSPN const& gspn) {
storm::settings::modules::GSPNExportSettings const& exportSettings = storm::settings::getModule<storm::settings::modules::GSPNExportSettings>();
if (exportSettings.isWriteToDotSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteToDotFilename(), fs);
gspn.writeDotToStream(fs);
storm::utility::closeFile(fs);
}
if (exportSettings.isWriteToPnproSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteToPnproFilename(), fs);
gspn.toPnpro(fs);
storm::utility::closeFile(fs);
}
if (exportSettings.isWriteToPnmlSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteToPnmlFilename(), fs);
gspn.toPnml(fs);
storm::utility::closeFile(fs);
}
if (exportSettings.isWriteToJsonSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteToJsonFilename(), fs);
gspn.toJson(fs);
storm::utility::closeFile(fs);
}
if (exportSettings.isDisplayStatsSet()) {
std::cout << "============GSPN Statistics==============" << std::endl;
gspn.writeStatsToStream(std::cout);
std::cout << "=========================================" << std::endl;
}
if (exportSettings.isWriteStatsToFileSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteStatsFilename(), fs);
gspn.writeStatsToStream(fs);
storm::utility::closeFile(fs);
}
}
}
}

17
src/storm-gspn/api/storm-gspn.h

@ -0,0 +1,17 @@
#pragma once
#include "storm/storage/jani/Model.h"
#include "storm-gspn/storage/gspn/GSPN.h"
#include "storm-gspn/builder/JaniGSPNBuilder.h"
namespace storm {
namespace api {
/**
* Builds JANI model from GSPN.
*/
storm::jani::Model* buildJani(storm::gspn::GSPN const& gspn);
void handleGSPNExportSettings(storm::gspn::GSPN const& gspn);
}
}

2
src/storm-gspn/parser/GreatSpnEditorProjectParser.cpp

@ -212,7 +212,7 @@ namespace storm {
STORM_PRINT_AND_LOG("unknown child (node=" + storm::adapters::XMLtoString(node->getNodeName()) + "): " + name + "\n");
}
}
builder.addPlace(-1, initialTokens, placeName);
builder.addPlace(boost::none, initialTokens, placeName);
}
bool ignoreTransitionAttribute(std::string const& name) {

5
src/storm-gspn/parser/PnmlParser.cpp

@ -104,7 +104,7 @@ namespace storm {
std::string placeName;
// the first entry is false if the corresponding information was not found in the pnml file
std::pair<bool, uint_fast64_t> numberOfInitialTokens(false, defaultNumberOfInitialTokens);
std::pair<bool, int_fast64_t> capacity(false, defaultCapacity);
std::pair<bool, boost::optional<uint64_t>> capacity(false, boost::none);
// traverse attributes
for (uint_fast64_t i = 0; i < node->getAttributes()->getLength(); ++i) {
@ -150,10 +150,9 @@ namespace storm {
}
if (!capacity.first) {
// no information about the capacity is found
// use default capacity
STORM_PRINT_AND_LOG("unknown capacity (place=" + placeName + ")\n");
}
builder.addPlace(capacity.first ? capacity.second : -1, numberOfInitialTokens.first ? numberOfInitialTokens.second : 0, placeName);
builder.addPlace(capacity.second, numberOfInitialTokens.first ? numberOfInitialTokens.second : 0, placeName);
}
void PnmlParser::traverseTransition(xercesc::DOMNode const* const node) {

11
src/storm-gspn/settings/modules/GSPNExportSettings.cpp

@ -19,6 +19,7 @@ namespace storm {
const std::string GSPNExportSettings::writeToPnmlOptionName = "to-pnml";
const std::string GSPNExportSettings::writeToPnproOptionName = "to-pnpro";
const std::string GSPNExportSettings::writeToJsonOptionName = "to-json";
const std::string GSPNExportSettings::writeStatsOptionName = "to-stats";
const std::string GSPNExportSettings::displayStatsOptionName = "show-stats";
@ -29,6 +30,7 @@ namespace storm {
this->addOption(storm::settings::OptionBuilder(moduleName, writeToDotOptionName, false, "Destination for the dot output.").addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "path to file").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, writeToPnmlOptionName, false, "Destination for the pnml output").addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "path to file").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, writeToPnproOptionName, false, "Destination for the pnpro output").addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "path to file").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, writeToJsonOptionName, false, "Destination for the json output").addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "path to file").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, writeStatsOptionName, false, "Destination for the stats file").addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "path to file").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, displayStatsOptionName, false, "Print stats to stdout").build());
}
@ -56,7 +58,14 @@ namespace storm {
std::string GSPNExportSettings::getWriteToPnproFilename() const {
return this->getOption(writeToPnproOptionName).getArgumentByName("filename").getValueAsString();
}
bool GSPNExportSettings::isWriteToJsonSet() const {
return this->getOption(writeToJsonOptionName).getHasOptionBeenSet();
}
std::string GSPNExportSettings::getWriteToJsonFilename() const {
return this->getOption(writeToJsonOptionName).getArgumentByName("filename").getValueAsString();
}
bool GSPNExportSettings::isDisplayStatsSet() const {
return this->getOption(displayStatsOptionName).getHasOptionBeenSet();

8
src/storm-gspn/settings/modules/GSPNExportSettings.h

@ -37,6 +37,13 @@ namespace storm {
*
*/
std::string getWriteToPnproFilename() const;
bool isWriteToJsonSet() const;
/**
*
*/
std::string getWriteToJsonFilename() const;
bool isDisplayStatsSet() const;
@ -54,6 +61,7 @@ namespace storm {
static const std::string writeToDotOptionName;
static const std::string writeToPnmlOptionName;
static const std::string writeToPnproOptionName;
static const std::string writeToJsonOptionName;
static const std::string displayStatsOptionName;
static const std::string writeStatsOptionName;

76
src/storm-gspn/storage/gspn/GSPN.cpp

@ -6,8 +6,7 @@
#include "storm/utility/macros.h"
#include "storm/exceptions/InvalidArgumentException.h"
#include "storm-gspn/storage/gspn/GspnJsonExporter.h"
namespace storm {
namespace gspn {
@ -396,6 +395,15 @@ namespace storm {
this->transitionLayout = transitionLayout;
}
std::map<uint64_t, LayoutInfo> const& GSPN::getPlaceLayoutInfos() const {
return this->placeLayout;
}
std::map<uint64_t, LayoutInfo> const& GSPN::getTransitionLayoutInfos() const {
return this->transitionLayout;
}
void GSPN::toPnpro(std::ostream &stream) const {
auto space = " ";
auto space2 = " ";
@ -525,6 +533,11 @@ namespace storm {
stream << space3 << "<initialMarking>" << std::endl;
stream << space4 << "<value>Default," << place.getNumberOfInitialTokens() << "</value>" << std::endl;
stream << space3 << "</initialMarking>" << std::endl;
if(place.hasRestrictedCapacity()) {
stream << space3 << "<capacity>" << std::endl;
stream << space4 << "<value>Default," << place.getCapacity() << "</value>" << std::endl;
stream << space3 << "</capacity>" << std::endl;
}
stream << space2 << "</place>" << std::endl;
}
@ -607,9 +620,68 @@ namespace storm {
}
}
// add arcs for immediate transitions
for (const auto &trans : timedTransitions) {
// add input arcs
for (auto const& inEntry : trans.getInputPlaces()) {
stream << space2 << "<arc ";
stream << "id=\"arc" << i++ << "\" ";
stream << "source=\"" << places.at(inEntry.first).getName() << "\" ";
stream << "target=\"" << trans.getName() << "\" ";
stream << ">" << std::endl;
stream << space3 << "<inscription>" << std::endl;
stream << space4 << "<value>Default," << inEntry.second << "</value>" << std::endl;
stream << space3 << "</inscription>" << std::endl;
stream << space3 << "<type value=\"normal\" />" << std::endl;
stream << space2 << "</arc>" << std::endl;
}
// add inhibition arcs
for (auto const& inhEntry : trans.getInhibitionPlaces()) {
stream << space2 << "<arc ";
stream << "id=\"arc" << i++ << "\" ";
stream << "source=\"" << places.at(inhEntry.first).getName() << "\" ";
stream << "target=\"" << trans.getName() << "\" ";
stream << ">" << std::endl;
stream << space3 << "<inscription>" << std::endl;
stream << space4 << "<value>Default," << inhEntry.second << "</value>" << std::endl;
stream << space3 << "</inscription>" << std::endl;
stream << space3 << "<type value=\"inhibition\" />" << std::endl;
stream << space2 << "</arc>" << std::endl;
}
// add output arcs
for (auto const& outEntry : trans.getOutputPlaces()) {
stream << space2 << "<arc ";
stream << "id=\"arc" << i++ << "\" ";
stream << "source=\"" << trans.getName() << "\" ";
stream << "target=\"" << places.at(outEntry.first).getName() << "\" ";
stream << ">" << std::endl;
stream << space3 << "<inscription>" << std::endl;
stream << space4 << "<value>Default," << outEntry.second << "</value>" << std::endl;
stream << space3 << "</inscription>" << std::endl;
stream << space3 << "<type value=\"normal\" />" << std::endl;
stream << space2 << "</arc>" << std::endl;
}
}
stream << space << "</net>" << std::endl;
stream << "</pnml>" << std::endl;
}
void GSPN::toJson(std::ostream &stream) const {
return storm::gspn::GspnJsonExporter::toStream(*this, stream);
}
void GSPN::writeStatsToStream(std::ostream& stream) const {
stream << "Number of places: " << getNumberOfPlaces() << std::endl;

14
src/storm-gspn/storage/gspn/GSPN.h

@ -154,7 +154,10 @@ namespace storm {
void setTransitionLayoutInfo(uint64_t transitionId, LayoutInfo const& layout) const;
void setPlaceLayoutInfo(std::map<uint64_t, LayoutInfo> const& placeLayout) const;
void setTransitionLayoutInfo(std::map<uint64_t, LayoutInfo> const& transitionLayout) const;
std::map<uint64_t, LayoutInfo> const& getPlaceLayoutInfos() const;
std::map<uint64_t, LayoutInfo> const& getTransitionLayoutInfos() const;
/*!
* Performe some checks
@ -168,7 +171,14 @@ namespace storm {
void toPnpro(std::ostream& stream) const;
// TODO doc
void toPnml(std::ostream& stream) const;
/*!
* Export GSPN in Json format.
*
* @param stream Outputstream.
*/
void toJson(std::ostream& stream) const;
void writeStatsToStream(std::ostream& stream) const;
private:
storm::gspn::Place* getPlace(uint64_t id);

2
src/storm-gspn/storage/gspn/GspnBuilder.cpp

@ -13,7 +13,7 @@ namespace storm {
gspnName = name;
}
uint_fast64_t GspnBuilder::addPlace(int_fast64_t const& capacity, uint_fast64_t const& initialTokens, std::string const& name) {
uint_fast64_t GspnBuilder::addPlace(boost::optional<uint64_t> capacity, uint_fast64_t const& initialTokens, std::string const& name) {
auto newId = places.size();
auto place = storm::gspn::Place(newId);
place.setCapacity(capacity);

2
src/storm-gspn/storage/gspn/GspnBuilder.h

@ -25,7 +25,7 @@ namespace storm {
* A capacity of -1 indicates an unbounded place.
* @param initialTokens The number of inital tokens in the place.
*/
uint_fast64_t addPlace(int_fast64_t const& capacity = 1, uint_fast64_t const& initialTokens = 0, std::string const& name = "");
uint_fast64_t addPlace(boost::optional<uint64_t> capacity = 1, uint_fast64_t const& initialTokens = 0, std::string const& name = "");
void setPlaceLayoutInfo(uint64_t placeId, LayoutInfo const& layoutInfo);

203
src/storm-gspn/storage/gspn/GspnJsonExporter.cpp

@ -0,0 +1,203 @@
#include "GspnJsonExporter.h"
#include "storm/exceptions/NotImplementedException.h"
#include "storm/exceptions/FileIoException.h"
#include <algorithm>
#include <string>
namespace storm {
namespace gspn {
// Prevent some magic constants
static constexpr const uint64_t scaleFactor = 50;
void GspnJsonExporter::toStream(storm::gspn::GSPN const& gspn, std::ostream& os) {
os << translate(gspn).dump(4) << std::endl;
}
modernjson::json GspnJsonExporter::translate(storm::gspn::GSPN const& gspn) {
modernjson::json jsonGspn;
// Layouts
std::map<uint64_t, LayoutInfo> placeLayout = gspn.getPlaceLayoutInfos();
std::map<uint64_t, LayoutInfo> transitionLayout = gspn.getTransitionLayoutInfos();
double tmpX = 0;
double tmpY = 10;
// Export places
for (const auto &place : gspn.getPlaces()) {
double x = tmpX;
double y = tmpY;
if (placeLayout.count(place.getID()) > 0) {
x = placeLayout.at(place.getID()).x;
y = placeLayout.at(place.getID()).y;
}
tmpX += 3;
modernjson::json jsonPlace = translatePlace(place, x, y);
jsonGspn.push_back(jsonPlace);
}
// Export immediate transitions
for (const auto &transition : gspn.getImmediateTransitions()) {
double x = tmpX;
double y = tmpY;
if (transitionLayout.count(transition.getID()) > 0) {
x = transitionLayout.at(transition.getID()).x;
y = transitionLayout.at(transition.getID()).y;
}
tmpX += 3;
modernjson::json jsonImmediateTransition = translateImmediateTransition(transition, x, y);
jsonGspn.push_back(jsonImmediateTransition);
}
// Export timed transitions
for (const auto &transition : gspn.getTimedTransitions()) {
double x = tmpX;
double y = tmpY;
if (transitionLayout.count(transition.getID()) > 0) {
x = transitionLayout.at(transition.getID()).x;
y = transitionLayout.at(transition.getID()).y;
}
tmpX += 3;
modernjson::json jsonTimedTransition = translateTimedTransition(transition, x, y);
jsonGspn.push_back(jsonTimedTransition);
}
// Export arcs
std::vector<storm::gspn::Place> places = gspn.getPlaces();
// Export arcs for immediate transitions
for (const auto &transition : gspn.getImmediateTransitions()) {
// Export input arcs
for (auto const& entry : transition.getInputPlaces()) {
storm::gspn::Place place = places.at(entry.first);
modernjson::json jsonInputArc = translateArc(transition, place, entry.second, true, ArcType::INPUT);
jsonGspn.push_back(jsonInputArc);
}
// Export inhibitor arcs
for (auto const& entry : transition.getInhibitionPlaces()) {
storm::gspn::Place place = places.at(entry.first);
modernjson::json jsonInputArc = translateArc(transition, place, entry.second, true, ArcType::INHIBITOR);
jsonGspn.push_back(jsonInputArc);
}
// Export output arcs
for (auto const& entry : transition.getOutputPlaces()) {
storm::gspn::Place place = places.at(entry.first);
modernjson::json jsonInputArc = translateArc(transition, place, entry.second, true, ArcType::OUTPUT);
jsonGspn.push_back(jsonInputArc);
}
}
// Export arcs for timed transitions
for (const auto &transition : gspn.getTimedTransitions()) {
// Export input arcs
for (auto const& entry : transition.getInputPlaces()) {
storm::gspn::Place place = places.at(entry.first);
modernjson::json jsonInputArc = translateArc(transition, place, entry.second, false, ArcType::INPUT);
jsonGspn.push_back(jsonInputArc);
}
// Export inhibitor arcs
for (auto const& entry : transition.getInhibitionPlaces()) {
storm::gspn::Place place = places.at(entry.first);
modernjson::json jsonInputArc = translateArc(transition, place, entry.second, false, ArcType::INHIBITOR);
jsonGspn.push_back(jsonInputArc);
}
// Export output arcs
for (auto const& entry : transition.getOutputPlaces()) {
storm::gspn::Place place = places.at(entry.first);
modernjson::json jsonInputArc = translateArc(transition, place, entry.second, false, ArcType::OUTPUT);
jsonGspn.push_back(jsonInputArc);
}
}
return jsonGspn;
}
modernjson::json GspnJsonExporter::translatePlace(storm::gspn::Place const& place, double x, double y) {
modernjson::json data;
data["id"] = toJsonString(place);
data["name"] = place.getName();
data["marking"] = place.getNumberOfInitialTokens();
modernjson::json position;
position["x"] = x * scaleFactor;
position["y"] = y * scaleFactor;
modernjson::json jsonPlace;
jsonPlace["data"] = data;
jsonPlace["position"] = position;
jsonPlace["group"] = "nodes";
jsonPlace["classes"] = "place";
return jsonPlace;
}
modernjson::json GspnJsonExporter::translateImmediateTransition(storm::gspn::ImmediateTransition<double> const& transition, double x, double y) {
modernjson::json data;
data["id"] = toJsonString(transition, true);
data["name"] = transition.getName();
data["priority"] = transition.getPriority();
data["weight"] = transition.getWeight();
modernjson::json position;
position["x"] = x * scaleFactor;
position["y"] = y * scaleFactor;
modernjson::json jsonTrans;
jsonTrans["data"] = data;
jsonTrans["position"] = position;
jsonTrans["group"] = "nodes";
jsonTrans["classes"] = "trans_im";
return jsonTrans;
}
modernjson::json GspnJsonExporter::translateTimedTransition(storm::gspn::TimedTransition<double> const& transition, double x, double y) {
modernjson::json data;
data["id"] = toJsonString(transition, false);
data["name"] = transition.getName();
data["rate"] = transition.getRate();
data["priority"] = transition.getPriority();
modernjson::json position;
position["x"] = x * scaleFactor;
position["y"] = y * scaleFactor;
modernjson::json jsonTrans;
jsonTrans["data"] = data;
jsonTrans["position"] = position;
jsonTrans["group"] = "nodes";
jsonTrans["classes"] = "trans_time";
return jsonTrans;
}
modernjson::json GspnJsonExporter::translateArc(storm::gspn::Transition const& transition, storm::gspn::Place const& place, uint64_t multiplicity, bool immediate, ArcType arctype) {
modernjson::json data;
data["id"] = toJsonString(transition, place, arctype);
data["source"] = toJsonString(place);
data["target"] = toJsonString(transition, immediate);
data["mult"] = multiplicity;
modernjson::json jsonArc;
jsonArc["data"] = data;
//jsonTrans["group"] = "nodes";
switch (arctype) {
case INPUT:
jsonArc["classes"] = "input";
break;
case OUTPUT:
jsonArc["classes"] = "output";
break;
case INHIBITOR:
jsonArc["classes"] = "inhibit";
break;
default:
STORM_LOG_ASSERT(false, "Unknown type " << arctype << " used.");
}
return jsonArc;
}
}
}

71
src/storm-gspn/storage/gspn/GspnJsonExporter.h

@ -0,0 +1,71 @@
#pragma once
#include "storm/utility/macros.h"
#include "storm-gspn/storage/gspn/GSPN.h"
// JSON parser
#include "json.hpp"
namespace modernjson {
using json = nlohmann::basic_json<std::map, std::vector, std::string, bool, int64_t, uint64_t, double, std::allocator>;
}
namespace storm {
namespace gspn {
/**
* Exports a GSPN into the JSON format for visualizing it.
*/
class GspnJsonExporter {
public:
static void toStream(storm::gspn::GSPN const& gspn, std::ostream& os);
static modernjson::json translate(storm::gspn::GSPN const& gspn);
private:
enum ArcType { INPUT, OUTPUT, INHIBITOR };
static modernjson::json translatePlace(storm::gspn::Place const& place, double x, double y);
static modernjson::json translateImmediateTransition(storm::gspn::ImmediateTransition<double> const& transition, double x, double y);
static modernjson::json translateTimedTransition(storm::gspn::TimedTransition<double> const& transition, double x, double y);
static modernjson::json translateArc(storm::gspn::Transition const& transition, storm::gspn::Place const& place, uint64_t multiplicity, bool immediate, ArcType arctype);
std::string static inline toJsonString(storm::gspn::Place const& place) {
std::stringstream stream;
stream << "p" << place.getID();
return stream.str();
}
std::string static inline toJsonString(storm::gspn::Transition const& transition, bool immediate) {
std::stringstream stream;
stream << (immediate ? "i" : "t") << transition.getID();
return stream.str();
}
std::string static inline toJsonString(storm::gspn::Transition const& transition, storm::gspn::Place const& place, ArcType arctype) {
std::stringstream stream;
stream << place.getID();
switch (arctype) {
case INPUT:
stream << "i";
break;
case OUTPUT:
stream << "o";
break;
case INHIBITOR:
stream << "h";
break;
default:
STORM_LOG_ASSERT(false, "Unknown type " << arctype << " used.");
}
stream << transition.getID();
return stream.str();
}
};
}
}

3
src/storm-gspn/storage/gspn/Place.cpp

@ -29,11 +29,12 @@ namespace storm {
return this->numberOfInitialTokens;
}
void Place::setCapacity(uint64_t cap) {
void Place::setCapacity(boost::optional<uint64_t> cap) {
this->capacity = cap;
}
uint64_t Place::getCapacity() const {
assert(hasRestrictedCapacity());
return capacity.get();
}

4
src/storm-gspn/storage/gspn/Place.h

@ -54,9 +54,9 @@ namespace storm {
* Sets the capacity of tokens of this place.
*
* @param capacity The capacity of this place. A non-negative number represents the capacity.
* The value -1 indicates that the capacity is not set.
* boost::none indicates that the flag is not set.
*/
void setCapacity(uint64_t capacity);
void setCapacity(boost::optional<uint64_t> capacity);
/*!
* Returns the capacity of tokens of this place.

62
src/storm-gspn/storm-gspn.h

@ -1,62 +0,0 @@
#pragma once
#include "storm/storage/jani/Model.h"
#include "storm-gspn/builder/JaniGSPNBuilder.h"
#include "storm-gspn/storage/gspn/GSPN.h"
#include "storm/settings/SettingsManager.h"
#include "storm-gspn/settings/modules/GSPNExportSettings.h"
#include "storm/utility/file.h"
namespace storm {
/**
* Builds JANI model from GSPN.
*/
storm::jani::Model* buildJani(storm::gspn::GSPN const& gspn) {
storm::builder::JaniGSPNBuilder builder(gspn);
return builder.build();
}
void handleGSPNExportSettings(storm::gspn::GSPN const& gspn) {
storm::settings::modules::GSPNExportSettings const& exportSettings = storm::settings::getModule<storm::settings::modules::GSPNExportSettings>();
if (exportSettings.isWriteToDotSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteToDotFilename(), fs);
gspn.writeDotToStream(fs);
storm::utility::closeFile(fs);
}
if (exportSettings.isWriteToPnproSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteToPnproFilename(), fs);
gspn.toPnpro(fs);
storm::utility::closeFile(fs);
}
if (exportSettings.isWriteToPnmlSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteToPnmlFilename(), fs);
gspn.toPnml(fs);
storm::utility::closeFile(fs);
}
if (exportSettings.isDisplayStatsSet()) {
std::cout << "============GSPN Statistics==============" << std::endl;
gspn.writeStatsToStream(std::cout);
std::cout << "=========================================" << std::endl;
}
if (exportSettings.isWriteStatsToFileSet()) {
std::ofstream fs;
storm::utility::openFile(exportSettings.getWriteStatsFilename(), fs);
gspn.writeStatsToStream(fs);
storm::utility::closeFile(fs);
}
}
}

2
src/storm-pars-cli/CMakeLists.txt

@ -6,4 +6,4 @@ set_target_properties(storm-pars-cli PROPERTIES OUTPUT_NAME "storm-pars")
add_dependencies(binaries storm-pars-cli)
# installation
install(TARGETS storm-pars-cli RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-pars-cli EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

2
src/storm-pars/CMakeLists.txt

@ -36,5 +36,5 @@ add_custom_target(copy_storm_pars_headers DEPENDS ${STORM_PARS_OUTPUT_HEADERS} $
add_dependencies(storm-pars copy_storm_pars_headers)
# installation
install(TARGETS storm-pars RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-pars EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

92
src/storm-pars/modelchecker/region/SparseDtmcParameterLiftingModelChecker.cpp

@ -10,7 +10,8 @@
#include "storm/modelchecker/prctl/helper/BaierUpperRewardBoundsComputer.h"
#include "storm/models/sparse/Dtmc.h"
#include "storm/models/sparse/StandardRewardModel.h"
#include "storm/solver/StandardMinMaxLinearEquationSolver.h"
#include "storm/solver/MinMaxLinearEquationSolver.h"
#include "storm/solver/Multiplier.h"
#include "storm/utility/vector.h"
#include "storm/utility/graph.h"
#include "storm/utility/NumberTraits.h"
@ -149,9 +150,9 @@ namespace storm {
upperResultBound = storm::utility::one<ConstantType>();
// The solution of the min-max equation system will always be unique (assuming graph-preserving instantiations).
auto req = solverFactory->getRequirements(env, true);
auto req = solverFactory->getRequirements(env, true, boost::none, true);
req.clearBounds();
STORM_LOG_THROW(req.empty(), storm::exceptions::UncheckedRequirementException, "Unchecked solver requirement.");
STORM_LOG_THROW(!req.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + req.getEnabledRequirementsAsString() + " not checked.");
solverFactory->setRequirementsChecked(true);
}
@ -188,13 +189,13 @@ namespace storm {
lowerResultBound = storm::utility::zero<ConstantType>();
// The solution of the min-max equation system will always be unique (assuming graph-preserving instantiations).
auto req = solverFactory->getRequirements(env, true);
auto req = solverFactory->getRequirements(env, true, boost::none, true);
req.clearLowerBounds();
if (req.requiresUpperBounds()) {
if (req.upperBounds()) {
solvingRequiresUpperRewardBounds = true;
req.clearUpperBounds();
}
STORM_LOG_THROW(req.empty(), storm::exceptions::UncheckedRequirementException, "Unchecked solver requirement.");
STORM_LOG_THROW(!req.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + req.getEnabledRequirementsAsString() + " not checked.");
solverFactory->setRequirementsChecked(true);
@ -249,49 +250,50 @@ namespace storm {
parameterLifter->specifyRegion(region, dirForParameters);
auto solver = solverFactory->create(env, parameterLifter->getMatrix());
solver->setHasUniqueSolution();
if (lowerResultBound) solver->setLowerBound(lowerResultBound.get());
if (upperResultBound) {
solver->setUpperBound(upperResultBound.get());
} else if (solvingRequiresUpperRewardBounds) {
// For the min-case, we use DS-MPI, for the max-case variant 2 of the Baier et al. paper (CAV'17).
std::vector<ConstantType> oneStepProbs;
oneStepProbs.reserve(parameterLifter->getMatrix().getRowCount());
for (uint64_t row = 0; row < parameterLifter->getMatrix().getRowCount(); ++row) {
oneStepProbs.push_back(storm::utility::one<ConstantType>() - parameterLifter->getMatrix().getRowSum(row));
}
if (dirForParameters == storm::OptimizationDirection::Minimize) {
storm::modelchecker::helper::DsMpiMdpUpperRewardBoundsComputer<ConstantType> dsmpi(parameterLifter->getMatrix(), parameterLifter->getVector(), oneStepProbs);
solver->setUpperBounds(dsmpi.computeUpperBounds());
} else {
storm::modelchecker::helper::BaierUpperRewardBoundsComputer<ConstantType> baier(parameterLifter->getMatrix(), parameterLifter->getVector(), oneStepProbs);
solver->setUpperBound(baier.computeUpperBound());
}
}
if (!stepBound) solver->setTrackScheduler(true);
if (storm::solver::minimize(dirForParameters) && minSchedChoices && !stepBound) solver->setInitialScheduler(std::move(minSchedChoices.get()));
if (storm::solver::maximize(dirForParameters) && maxSchedChoices && !stepBound) solver->setInitialScheduler(std::move(maxSchedChoices.get()));
if (this->currentCheckTask->isBoundSet() && solver->hasInitialScheduler()) {
// If we reach this point, we know that after applying the hint, the x-values can only become larger (if we maximize) or smaller (if we minimize).
std::unique_ptr<storm::solver::TerminationCondition<ConstantType>> termCond;
storm::storage::BitVector relevantStatesInSubsystem = this->currentCheckTask->isOnlyInitialStatesRelevantSet() ? this->parametricModel->getInitialStates() % maybeStates : storm::storage::BitVector(maybeStates.getNumberOfSetBits(), true);
if (storm::solver::minimize(dirForParameters)) {
// Terminate if the value for ALL relevant states is already below the threshold
termCond = std::make_unique<storm::solver::TerminateIfFilteredExtremumBelowThreshold<ConstantType>> (relevantStatesInSubsystem, true, this->currentCheckTask->getBoundThreshold(), false);
} else {
// Terminate if the value for ALL relevant states is already above the threshold
termCond = std::make_unique<storm::solver::TerminateIfFilteredExtremumExceedsThreshold<ConstantType>> (relevantStatesInSubsystem, true, this->currentCheckTask->getBoundThreshold(), true);
}
solver->setTerminationCondition(std::move(termCond));
}
// Invoke the solver
if (stepBound) {
assert(*stepBound > 0);
x = std::vector<ConstantType>(maybeStates.getNumberOfSetBits(), storm::utility::zero<ConstantType>());
solver->repeatedMultiply(env, dirForParameters, x, &parameterLifter->getVector(), *stepBound);
auto multiplier = storm::solver::MultiplierFactory<ConstantType>().create(env, parameterLifter->getMatrix());
multiplier->repeatedMultiplyAndReduce(env, dirForParameters, x, &parameterLifter->getVector(), *stepBound);
} else {
auto solver = solverFactory->create(env, parameterLifter->getMatrix());
solver->setHasUniqueSolution();
if (lowerResultBound) solver->setLowerBound(lowerResultBound.get());
if (upperResultBound) {
solver->setUpperBound(upperResultBound.get());
} else if (solvingRequiresUpperRewardBounds) {
// For the min-case, we use DS-MPI, for the max-case variant 2 of the Baier et al. paper (CAV'17).
std::vector<ConstantType> oneStepProbs;
oneStepProbs.reserve(parameterLifter->getMatrix().getRowCount());
for (uint64_t row = 0; row < parameterLifter->getMatrix().getRowCount(); ++row) {
oneStepProbs.push_back(storm::utility::one<ConstantType>() - parameterLifter->getMatrix().getRowSum(row));
}
if (dirForParameters == storm::OptimizationDirection::Minimize) {
storm::modelchecker::helper::DsMpiMdpUpperRewardBoundsComputer<ConstantType> dsmpi(parameterLifter->getMatrix(), parameterLifter->getVector(), oneStepProbs);
solver->setUpperBounds(dsmpi.computeUpperBounds());
} else {
storm::modelchecker::helper::BaierUpperRewardBoundsComputer<ConstantType> baier(parameterLifter->getMatrix(), parameterLifter->getVector(), oneStepProbs);
solver->setUpperBound(baier.computeUpperBound());
}
}
solver->setTrackScheduler(true);
if (storm::solver::minimize(dirForParameters) && minSchedChoices) solver->setInitialScheduler(std::move(minSchedChoices.get()));
if (storm::solver::maximize(dirForParameters) && maxSchedChoices) solver->setInitialScheduler(std::move(maxSchedChoices.get()));
if (this->currentCheckTask->isBoundSet() && solver->hasInitialScheduler()) {
// If we reach this point, we know that after applying the hint, the x-values can only become larger (if we maximize) or smaller (if we minimize).
std::unique_ptr<storm::solver::TerminationCondition<ConstantType>> termCond;
storm::storage::BitVector relevantStatesInSubsystem = this->currentCheckTask->isOnlyInitialStatesRelevantSet() ? this->parametricModel->getInitialStates() % maybeStates : storm::storage::BitVector(maybeStates.getNumberOfSetBits(), true);
if (storm::solver::minimize(dirForParameters)) {
// Terminate if the value for ALL relevant states is already below the threshold
termCond = std::make_unique<storm::solver::TerminateIfFilteredExtremumBelowThreshold<ConstantType>> (relevantStatesInSubsystem, true, this->currentCheckTask->getBoundThreshold(), false);
} else {
// Terminate if the value for ALL relevant states is already above the threshold
termCond = std::make_unique<storm::solver::TerminateIfFilteredExtremumExceedsThreshold<ConstantType>> (relevantStatesInSubsystem, true, this->currentCheckTask->getBoundThreshold(), true);
}
solver->setTerminationCondition(std::move(termCond));
}
// Invoke the solver
x.resize(maybeStates.getNumberOfSetBits(), storm::utility::zero<ConstantType>());
solver->solveEquations(env, dirForParameters, x, parameterLifter->getVector());
if(storm::solver::minimize(dirForParameters)) {

12
src/storm-pars/settings/ParsSettings.cpp

@ -9,19 +9,21 @@
#include "storm/settings/modules/CoreSettings.h"
#include "storm/settings/modules/IOSettings.h"
#include "storm/settings/modules/BuildSettings.h"
#include "storm/settings/modules/ModelCheckerSettings.h"
#include "storm/settings/modules/DebugSettings.h"
#include "storm/settings/modules/SylvanSettings.h"
#include "storm/settings/modules/EigenEquationSolverSettings.h"
#include "storm/settings/modules/GmmxxEquationSolverSettings.h"
#include "storm/settings/modules/NativeEquationSolverSettings.h"
#include "storm/settings/modules/TopologicalEquationSolverSettings.h"
#include "storm/settings/modules/EliminationSettings.h"
#include "storm/settings/modules/MinMaxEquationSolverSettings.h"
#include "storm/settings/modules/GameSolverSettings.h"
#include "storm/settings/modules/BisimulationSettings.h"
#include "storm/settings/modules/TopologicalValueIterationEquationSolverSettings.h"
#include "storm/settings/modules/ResourceSettings.h"
#include "storm/settings/modules/JaniExportSettings.h"
#include "storm/settings/modules/JitBuilderSettings.h"
#include "storm/settings/modules/MultiplierSettings.h"
namespace storm {
@ -37,22 +39,22 @@ namespace storm {
storm::settings::addModule<storm::settings::modules::RegionSettings>();
storm::settings::addModule<storm::settings::modules::BuildSettings>();
storm::settings::addModule<storm::settings::modules::CounterexampleGeneratorSettings>();
storm::settings::addModule<storm::settings::modules::ModelCheckerSettings>();
storm::settings::addModule<storm::settings::modules::DebugSettings>();
storm::settings::addModule<storm::settings::modules::SylvanSettings>();
storm::settings::addModule<storm::settings::modules::GmmxxEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::EigenEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::NativeEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::TopologicalEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::EliminationSettings>();
storm::settings::addModule<storm::settings::modules::MinMaxEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::GameSolverSettings>();
storm::settings::addModule<storm::settings::modules::BisimulationSettings>();
storm::settings::addModule<storm::settings::modules::TopologicalValueIterationEquationSolverSettings>();
storm::settings::addModule<storm::settings::modules::ResourceSettings>();
storm::settings::addModule<storm::settings::modules::JaniExportSettings>();
storm::settings::addModule<storm::settings::modules::JitBuilderSettings>();
storm::settings::addModule<storm::settings::modules::MultiplierSettings>();
}
}
}
}

2
src/storm-pgcl-cli/CMakeLists.txt

@ -5,4 +5,4 @@ set_target_properties(storm-pgcl-cli PROPERTIES OUTPUT_NAME "storm-pgcl")
add_dependencies(binaries storm-pgcl-cli)
# installation
install(TARGETS storm-pgcl-cli RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-pgcl-cli EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

2
src/storm-pgcl/CMakeLists.txt

@ -13,4 +13,4 @@ add_library(storm-pgcl SHARED ${STORM_PGCL_SOURCES} ${STORM_PGCL_HEADERS})
target_link_libraries(storm-pgcl storm)
# installation
install(TARGETS storm-pgcl RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)
install(TARGETS storm-pgcl EXPORT storm_Targets RUNTIME DESTINATION bin LIBRARY DESTINATION lib OPTIONAL)

Some files were not shown because too many files changed in this diff

|||||||
100:0
Loading…
Cancel
Save