Browse Source
Merge branch 'future' of https://sselab.de/lab9/private/git/storm into future
Merge branch 'future' of https://sselab.de/lab9/private/git/storm into future
Former-commit-id:mainbeff71d7ce
[formerly4a4db4f831
] Former-commit-id:211cc676cb
72 changed files with 4025 additions and 533 deletions
-
5examples/dft/and_approx.dft
-
6examples/dft/and_approx_param.dft
-
5examples/dft/approx.dft
-
7examples/dft/fdep4.dft
-
58examples/dft/mas.dft
-
6examples/dft/nonmonoton.dft
-
8examples/dft/nonmonoton_param.dft
-
1resources/3rdparty/CMakeLists.txt
-
95src/builder/DftExplorationHeuristic.cpp
-
190src/builder/DftExplorationHeuristic.h
-
120src/builder/DftSmtBuilder.cpp
-
49src/builder/DftSmtBuilder.h
-
7src/builder/ExplicitDFTModelBuilder.cpp
-
2src/builder/ExplicitDFTModelBuilder.h
-
731src/builder/ExplicitDFTModelBuilderApprox.cpp
-
335src/builder/ExplicitDFTModelBuilderApprox.h
-
172src/builder/ParallelCompositionBuilder.cpp
-
24src/builder/ParallelCompositionBuilder.h
-
209src/generator/DftNextStateGenerator.cpp
-
73src/generator/DftNextStateGenerator.h
-
3src/generator/StateBehavior.cpp
-
191src/modelchecker/DFTAnalyser.h
-
413src/modelchecker/dft/DFTASFChecker.cpp
-
61src/modelchecker/dft/DFTASFChecker.h
-
441src/modelchecker/dft/DFTModelChecker.cpp
-
140src/modelchecker/dft/DFTModelChecker.h
-
14src/models/sparse/MarkovAutomaton.cpp
-
7src/models/sparse/MarkovAutomaton.h
-
4src/models/sparse/Model.cpp
-
24src/models/sparse/StandardRewardModel.cpp
-
2src/models/sparse/StateLabeling.cpp
-
2src/models/sparse/StateLabeling.h
-
18src/permissivesched/MILPPermissiveSchedulers.h
-
2src/permissivesched/PermissiveSchedulerPenalty.h
-
4src/permissivesched/PermissiveSchedulers.cpp
-
2src/permissivesched/PermissiveSchedulers.h
-
14src/permissivesched/SmtBasedPermissiveSchedulers.h
-
47src/settings/modules/DFTSettings.cpp
-
42src/settings/modules/DFTSettings.h
-
1src/solver/AbstractEquationSolver.h
-
1src/solver/MinMaxLinearEquationSolver.h
-
4src/solver/OptimizationDirection.cpp
-
212src/storage/BucketPriorityQueue.cpp
-
73src/storage/BucketPriorityQueue.h
-
71src/storage/DynamicPriorityQueue.h
-
10src/storage/FlexibleSparseMatrix.cpp
-
85src/storage/SparseMatrix.cpp
-
9src/storage/SparseMatrix.h
-
6src/storage/bisimulation/BisimulationDecomposition.h
-
2src/storage/bisimulation/DeterministicModelBisimulationDecomposition.cpp
-
62src/storage/dft/DFT.cpp
-
20src/storage/dft/DFT.h
-
2src/storage/dft/DFTBuilder.cpp
-
18src/storage/dft/DFTElementState.h
-
41src/storage/dft/DFTIsomorphism.h
-
130src/storage/dft/DFTState.cpp
-
99src/storage/dft/DFTState.h
-
2src/storage/dft/DFTStateSpaceGenerationQueues.h
-
5src/storage/expressions/Variable.cpp
-
3src/storage/expressions/Variable.h
-
3src/storage/prism/Assignment.cpp
-
3src/storage/prism/Command.cpp
-
4src/storage/prism/Program.cpp
-
2src/storage/prism/Program.h
-
1src/storage/sparse/StateStorage.cpp
-
79src/storm-dyftee.cpp
-
2src/utility/bitoperations.h
-
37src/utility/constants.cpp
-
5src/utility/constants.h
-
10src/utility/logging.h
-
17src/utility/vector.cpp
-
5src/utility/vector.h
@ -0,0 +1,5 @@ |
|||||
|
toplevel "A"; |
||||
|
"A" and "B" "C" "D"; |
||||
|
"B" lambda=1 dorm=0; |
||||
|
"C" lambda=100 dorm=0; |
||||
|
"D" lambda=50 dorm=0; |
@ -0,0 +1,6 @@ |
|||||
|
param x; |
||||
|
toplevel "A"; |
||||
|
"A" and "B" "C" "D"; |
||||
|
"B" lambda=1 dorm=0; |
||||
|
"C" lambda=100 dorm=0; |
||||
|
"D" lambda=100*x dorm=0; |
@ -0,0 +1,5 @@ |
|||||
|
toplevel "A"; |
||||
|
"A" and "B" "C" "D"; |
||||
|
"B" lambda=0.1 dorm=0; |
||||
|
"C" lambda=10 dorm=0; |
||||
|
"D" lambda=0.1 dorm=0; |
@ -0,0 +1,7 @@ |
|||||
|
toplevel "A"; |
||||
|
"A" or "F" "B"; |
||||
|
"F" fdep "E" "C" "D"; |
||||
|
"B" wsp "C" "D"; |
||||
|
"C" lambda=1 dorm=0; |
||||
|
"D" lambda=1 dorm=0.5; |
||||
|
"E" lambda=0.5 dorm=0; |
@ -0,0 +1,58 @@ |
|||||
|
toplevel "MAS"; |
||||
|
"MAS" or "CPU" "DB" "MB" "VMB" "MEM" "VMS"; |
||||
|
"CPU" or "CW" "SO1" "SO2" "PG" "SM"; |
||||
|
"CW" and "CWA" "CWB"; |
||||
|
"SO1" and "SO1A" "SO1B"; |
||||
|
"SO2" and "SO2A" "SO2B"; |
||||
|
"PG" and "PGA" "PGB"; |
||||
|
"SM" and "SMA" "SMB"; |
||||
|
"CWA" csp "CWAev" "S1" "S2"; |
||||
|
"CWB" csp "CWBev" "S1" "S2"; |
||||
|
"SO1A" csp "SO1Aev" "S1" "S2"; |
||||
|
"SO1B" csp "SO1Bev" "S1" "S2"; |
||||
|
"SO2A" csp "SO2Aev" "S1" "S2"; |
||||
|
"SO2B" csp "SO2Bev" "S1" "S2"; |
||||
|
"PGA" csp "PGAev" "S1" "S2"; |
||||
|
"PGB" csp "PGBev" "S1" "S2"; |
||||
|
"SMA" csp "SMAev" "S1" "S2"; |
||||
|
"SMB" csp "SMBev" "S1" "S2"; |
||||
|
"CWAev" lambda=1.0e-6 dorm=0; |
||||
|
"CWBev" lambda=1.0e-6 dorm=0; |
||||
|
"SO1Aev" lambda=1.0e-6 dorm=0; |
||||
|
"SO1Bev" lambda=1.0e-6 dorm=0; |
||||
|
"SO2Aev" lambda=1.0e-6 dorm=0; |
||||
|
"SO2Bev" lambda=1.0e-6 dorm=0; |
||||
|
"PGAev" lambda=1.0e-6 dorm=0; |
||||
|
"PGBev" lambda=1.0e-6 dorm=0; |
||||
|
"SMAev" lambda=1.0e-6 dorm=0; |
||||
|
"SMBev" lambda=1.0e-6 dorm=0; |
||||
|
"S1" lambda=1.0e-6 dorm=0; |
||||
|
"S2" lambda=1.0e-6 dorm=0; |
||||
|
"DB" and "DB1" "DB2" "DB3"; |
||||
|
"DB1" lambda=5.0e-6 dorm=0; |
||||
|
"DB2" lambda=5.0e-6 dorm=0; |
||||
|
"DB3" lambda=5.0e-6 dorm=0; |
||||
|
"MB" and "MB1" "MB2" "MB3"; |
||||
|
"MB1" lambda=5.0e-6 dorm=0; |
||||
|
"MB2" lambda=5.0e-6 dorm=0; |
||||
|
"MB3" lambda=5.0e-6 dorm=0; |
||||
|
"VMB" and "VMB1" "VMB2"; |
||||
|
"VMB1" lambda=5.0e-6 dorm=0; |
||||
|
"VMB2" lambda=5.0e-6 dorm=0; |
||||
|
"MEM" and "MEM1" "MEM2"; |
||||
|
"MEM1" lambda=1.0e-5 dorm=0; |
||||
|
"MEM2" lambda=1.0e-5 dorm=0; |
||||
|
"VMS" or "VM1" "VM2"; |
||||
|
"VM1" and "VM1A" "VM1B"; |
||||
|
"VM2" and "VM2A" "VM2B"; |
||||
|
"VM1A" csp "VM1Aev" "VMS1" "VMS2"; |
||||
|
"VM1B" csp "VM1Bev" "VMS1" "VMS2"; |
||||
|
"VM2A" csp "VM2Aev" "VMS1" "VMS2"; |
||||
|
"VM2B" csp "VM2Bev" "VMS1" "VMS2"; |
||||
|
"VM1Aev" lambda=1.0e-6 dorm=0; |
||||
|
"VM1Bev" lambda=1.0e-6 dorm=0; |
||||
|
"VM2Aev" lambda=1.0e-6 dorm=0; |
||||
|
"VM2Bev" lambda=1.0e-6 dorm=0; |
||||
|
"VMS1" lambda=1.0e-6 dorm=0; |
||||
|
"VMS2" lambda=1.0e-6 dorm=0; |
||||
|
|
@ -0,0 +1,6 @@ |
|||||
|
toplevel "A"; |
||||
|
"A" or "B" "Z"; |
||||
|
"B" pand "D" "S"; |
||||
|
"Z" lambda=1 dorm=0; |
||||
|
"D" lambda=100 dorm=0; |
||||
|
"S" lambda=50 dorm=0; |
@ -0,0 +1,95 @@ |
|||||
|
#include "src/builder/DftExplorationHeuristic.h"
|
||||
|
#include "src/adapters/CarlAdapter.h"
|
||||
|
#include "src/utility/macros.h"
|
||||
|
#include "src/utility/constants.h"
|
||||
|
#include "src/exceptions/NotImplementedException.h"
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace builder { |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
DFTExplorationHeuristic<ValueType>::DFTExplorationHeuristic(size_t id) : id(id), expand(true), lowerBound(storm::utility::zero<ValueType>()), upperBound(storm::utility::infinity<ValueType>()), depth(0), probability(storm::utility::one<ValueType>()) { |
||||
|
// Intentionally left empty
|
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
DFTExplorationHeuristic<ValueType>::DFTExplorationHeuristic(size_t id, DFTExplorationHeuristic const& predecessor, ValueType rate, ValueType exitRate) : id(id), expand(false), lowerBound(storm::utility::zero<ValueType>()), upperBound(storm::utility::zero<ValueType>()), depth(predecessor.depth + 1) { |
||||
|
STORM_LOG_ASSERT(storm::utility::zero<ValueType>() < exitRate, "Exit rate is 0"); |
||||
|
probability = predecessor.probability * rate/exitRate; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void DFTExplorationHeuristic<ValueType>::setBounds(ValueType lowerBound, ValueType upperBound) { |
||||
|
this->lowerBound = lowerBound; |
||||
|
this->upperBound = upperBound; |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
bool DFTExplorationHeuristicProbability<double>::updateHeuristicValues(DFTExplorationHeuristic<double> const& predecessor, double rate, double exitRate) { |
||||
|
STORM_LOG_ASSERT(exitRate > 0, "Exit rate is 0"); |
||||
|
probability += predecessor.getProbability() * rate/exitRate; |
||||
|
return true; |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
bool DFTExplorationHeuristicProbability<storm::RationalFunction>::updateHeuristicValues(DFTExplorationHeuristic<storm::RationalFunction> const& predecessor, storm::RationalFunction rate, storm::RationalFunction exitRate) { |
||||
|
STORM_LOG_THROW(false, storm::exceptions::NotImplementedException, "Heuristic rate ration does not work for rational functions."); |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
double DFTExplorationHeuristicProbability<double>::getPriority() const { |
||||
|
return probability; |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
double DFTExplorationHeuristicProbability<storm::RationalFunction>::getPriority() const { |
||||
|
STORM_LOG_THROW(false, storm::exceptions::NotImplementedException, "Heuristic rate ration does not work for rational functions."); |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
bool DFTExplorationHeuristicBoundDifference<double>::updateHeuristicValues(DFTExplorationHeuristic<double> const& predecessor, double rate, double exitRate) { |
||||
|
STORM_LOG_ASSERT(exitRate > 0, "Exit rate is 0"); |
||||
|
probability += predecessor.getProbability() * rate/exitRate; |
||||
|
return true; |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
bool DFTExplorationHeuristicBoundDifference<storm::RationalFunction>::updateHeuristicValues(DFTExplorationHeuristic<storm::RationalFunction> const& predecessor, storm::RationalFunction rate, storm::RationalFunction exitRate) { |
||||
|
STORM_LOG_THROW(false, storm::exceptions::NotImplementedException, "Heuristic rate ration does not work for rational functions."); |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void DFTExplorationHeuristicBoundDifference<ValueType>::setBounds(ValueType lowerBound, ValueType upperBound) { |
||||
|
this->lowerBound = lowerBound; |
||||
|
this->upperBound = upperBound; |
||||
|
difference = (storm::utility::one<ValueType>() / upperBound) - (storm::utility::one<ValueType>() / lowerBound); |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
double DFTExplorationHeuristicBoundDifference<double>::getPriority() const { |
||||
|
return probability * difference; |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
double DFTExplorationHeuristicBoundDifference<storm::RationalFunction>::getPriority() const { |
||||
|
STORM_LOG_THROW(false, storm::exceptions::NotImplementedException, "Heuristic bound difference does not work for rational functions."); |
||||
|
} |
||||
|
|
||||
|
// Instantiate templates.
|
||||
|
template class DFTExplorationHeuristic<double>; |
||||
|
template class DFTExplorationHeuristicNone<double>; |
||||
|
template class DFTExplorationHeuristicDepth<double>; |
||||
|
template class DFTExplorationHeuristicProbability<double>; |
||||
|
template class DFTExplorationHeuristicBoundDifference<double>; |
||||
|
|
||||
|
#ifdef STORM_HAVE_CARL
|
||||
|
template class DFTExplorationHeuristic<storm::RationalFunction>; |
||||
|
template class DFTExplorationHeuristicNone<storm::RationalFunction>; |
||||
|
template class DFTExplorationHeuristicDepth<storm::RationalFunction>; |
||||
|
template class DFTExplorationHeuristicProbability<storm::RationalFunction>; |
||||
|
template class DFTExplorationHeuristicBoundDifference<storm::RationalFunction>; |
||||
|
#endif
|
||||
|
} |
||||
|
} |
@ -0,0 +1,190 @@ |
|||||
|
#ifndef STORM_BUILDER_DFTEXPLORATIONHEURISTIC_H_ |
||||
|
#define STORM_BUILDER_DFTEXPLORATIONHEURISTIC_H_ |
||||
|
|
||||
|
#include <memory> |
||||
|
|
||||
|
namespace storm { |
||||
|
|
||||
|
namespace builder { |
||||
|
|
||||
|
/*! |
||||
|
* Enum representing the heuristic used for deciding which states to expand. |
||||
|
*/ |
||||
|
enum class ApproximationHeuristic { NONE, DEPTH, PROBABILITY, BOUNDDIFFERENCE }; |
||||
|
|
||||
|
|
||||
|
/*! |
||||
|
* General super class for appoximation heuristics. |
||||
|
*/ |
||||
|
template<typename ValueType> |
||||
|
class DFTExplorationHeuristic { |
||||
|
|
||||
|
public: |
||||
|
DFTExplorationHeuristic(size_t id); |
||||
|
|
||||
|
DFTExplorationHeuristic(size_t id, DFTExplorationHeuristic const& predecessor, ValueType rate, ValueType exitRate); |
||||
|
|
||||
|
void setBounds(ValueType lowerBound, ValueType upperBound); |
||||
|
|
||||
|
virtual bool updateHeuristicValues(DFTExplorationHeuristic const& predecessor, ValueType rate, ValueType exitRate) = 0; |
||||
|
|
||||
|
virtual double getPriority() const = 0; |
||||
|
|
||||
|
virtual bool isSkip(double approximationThreshold) const = 0; |
||||
|
|
||||
|
void markExpand() { |
||||
|
expand = true; |
||||
|
} |
||||
|
|
||||
|
size_t getId() const { |
||||
|
return id; |
||||
|
} |
||||
|
|
||||
|
bool isExpand() { |
||||
|
return expand; |
||||
|
} |
||||
|
|
||||
|
size_t getDepth() const { |
||||
|
return depth; |
||||
|
} |
||||
|
|
||||
|
ValueType getProbability() const { |
||||
|
return probability; |
||||
|
} |
||||
|
|
||||
|
ValueType getLowerBound() const { |
||||
|
return lowerBound; |
||||
|
} |
||||
|
|
||||
|
ValueType getUpperBound() const { |
||||
|
return upperBound; |
||||
|
} |
||||
|
|
||||
|
protected: |
||||
|
size_t id; |
||||
|
bool expand; |
||||
|
ValueType lowerBound; |
||||
|
ValueType upperBound; |
||||
|
size_t depth; |
||||
|
ValueType probability; |
||||
|
}; |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
class DFTExplorationHeuristicNone : public DFTExplorationHeuristic<ValueType> { |
||||
|
public: |
||||
|
DFTExplorationHeuristicNone(size_t id) : DFTExplorationHeuristic<ValueType>(id) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
DFTExplorationHeuristicNone(size_t id, DFTExplorationHeuristicNone<ValueType> const& predecessor, ValueType rate, ValueType exitRate) : DFTExplorationHeuristic<ValueType>(id, predecessor, rate, exitRate) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
bool updateHeuristicValues(DFTExplorationHeuristic<ValueType> const& predecessor, ValueType rate, ValueType exitRate) override { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
double getPriority() const override { |
||||
|
return 0; |
||||
|
} |
||||
|
|
||||
|
bool isSkip(double approximationThreshold) const override { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
bool operator<(DFTExplorationHeuristicNone<ValueType> const& other) const { |
||||
|
return this->id > other.id; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
class DFTExplorationHeuristicDepth : public DFTExplorationHeuristic<ValueType> { |
||||
|
public: |
||||
|
DFTExplorationHeuristicDepth(size_t id) : DFTExplorationHeuristic<ValueType>(id) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
DFTExplorationHeuristicDepth(size_t id, DFTExplorationHeuristicDepth<ValueType> const& predecessor, ValueType rate, ValueType exitRate) : DFTExplorationHeuristic<ValueType>(id, predecessor, rate, exitRate) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
bool updateHeuristicValues(DFTExplorationHeuristic<ValueType> const& predecessor, ValueType rate, ValueType exitRate) override { |
||||
|
if (predecessor.getDepth() < this->depth) { |
||||
|
this->depth = predecessor.getDepth(); |
||||
|
return true; |
||||
|
} |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
double getPriority() const override { |
||||
|
return this->depth; |
||||
|
} |
||||
|
|
||||
|
bool isSkip(double approximationThreshold) const override { |
||||
|
return !this->expand && this->depth > approximationThreshold; |
||||
|
} |
||||
|
|
||||
|
bool operator<(DFTExplorationHeuristicDepth<ValueType> const& other) const { |
||||
|
return this->depth > other.depth; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
class DFTExplorationHeuristicProbability : public DFTExplorationHeuristic<ValueType> { |
||||
|
public: |
||||
|
DFTExplorationHeuristicProbability(size_t id) : DFTExplorationHeuristic<ValueType>(id) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
DFTExplorationHeuristicProbability(size_t id, DFTExplorationHeuristicProbability<ValueType> const& predecessor, ValueType rate, ValueType exitRate) : DFTExplorationHeuristic<ValueType>(id, predecessor, rate, exitRate) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
bool updateHeuristicValues(DFTExplorationHeuristic<ValueType> const& predecessor, ValueType rate, ValueType exitRate) override; |
||||
|
|
||||
|
double getPriority() const override; |
||||
|
|
||||
|
bool isSkip(double approximationThreshold) const override { |
||||
|
return !this->expand && this->getPriority() < approximationThreshold; |
||||
|
} |
||||
|
|
||||
|
bool operator<(DFTExplorationHeuristicProbability<ValueType> const& other) const { |
||||
|
return this->getPriority() < other.getPriority(); |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
class DFTExplorationHeuristicBoundDifference : public DFTExplorationHeuristic<ValueType> { |
||||
|
public: |
||||
|
DFTExplorationHeuristicBoundDifference(size_t id) : DFTExplorationHeuristic<ValueType>(id) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
DFTExplorationHeuristicBoundDifference(size_t id, DFTExplorationHeuristicBoundDifference<ValueType> const& predecessor, ValueType rate, ValueType exitRate) : DFTExplorationHeuristic<ValueType>(id, predecessor, rate, exitRate) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
void setBounds(ValueType lowerBound, ValueType upperBound); |
||||
|
|
||||
|
bool updateHeuristicValues(DFTExplorationHeuristic<ValueType> const& predecessor, ValueType rate, ValueType exitRate) override; |
||||
|
|
||||
|
double getPriority() const override; |
||||
|
|
||||
|
bool isSkip(double approximationThreshold) const override { |
||||
|
return !this->expand && this->getPriority() < approximationThreshold; |
||||
|
} |
||||
|
|
||||
|
bool operator<(DFTExplorationHeuristicBoundDifference<ValueType> const& other) const { |
||||
|
return this->getPriority() < other.getPriority(); |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
ValueType difference; |
||||
|
}; |
||||
|
|
||||
|
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
#endif /* STORM_BUILDER_DFTEXPLORATIONHEURISTIC_H_ */ |
@ -0,0 +1,120 @@ |
|||||
|
#include "src/builder/DftSmtBuilder.h"
|
||||
|
#include "src/exceptions/NotImplementedException.h"
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace builder { |
||||
|
|
||||
|
template <typename ValueType> |
||||
|
DFTSMTBuilder<ValueType>::DFTSMTBuilder() : manager(std::make_shared<storm::expressions::ExpressionManager>()) { |
||||
|
solver = storm::utility::solver::SmtSolverFactory().create(*manager); |
||||
|
} |
||||
|
|
||||
|
template <typename ValueType> |
||||
|
void DFTSMTBuilder<ValueType>::convertToSMT(storm::storage::DFT<ValueType> const& dft) { |
||||
|
std::cout << "Convert DFT to SMT" << std::endl; |
||||
|
timeMax = manager->integer(dft.nrBasicElements()); |
||||
|
timeFailSafe = manager->integer(dft.nrBasicElements() + 1); |
||||
|
timeZero = manager->integer(0); |
||||
|
|
||||
|
// Convert all elements
|
||||
|
for (size_t i = 0; i < dft.nrElements(); ++i) { |
||||
|
std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(i); |
||||
|
std::cout << "Consider " << element->toString() << std::endl; |
||||
|
if (element->isBasicElement()) { |
||||
|
storm::expressions::Variable varBE = convert(std::static_pointer_cast<storm::storage::DFTBE<ValueType> const>(element)); |
||||
|
varsBE.push_back(varBE); |
||||
|
} else if (element->isGate()) { |
||||
|
storm::expressions::Variable varGate = convert(std::static_pointer_cast<storm::storage::DFTGate<ValueType> const>(element)); |
||||
|
if (dft.getTopLevelIndex() == i) { |
||||
|
topLevel = varGate; |
||||
|
} |
||||
|
} else if (element->isDependency()) { |
||||
|
convert(std::static_pointer_cast<storm::storage::DFTDependency<ValueType> const>(element)); |
||||
|
} else if (element->isRestriction()) { |
||||
|
convert(std::static_pointer_cast<storm::storage::DFTRestriction<ValueType> const>(element)); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// No simultaneous fails can occur
|
||||
|
for (size_t i = 0; i < varsBE.size() - 1; ++i) { |
||||
|
storm::expressions::Expression be = varsBE[i]; |
||||
|
for (size_t j = i + 1; j < varsBE.size(); ++j) { |
||||
|
storm::expressions::Expression assertion = be != varsBE[j]; |
||||
|
solver->add(assertion); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// For every time-point one BE must fail
|
||||
|
for (size_t time = 1; time <= dft.nrBasicElements(); ++time) { |
||||
|
storm::expressions::Expression exprTime = manager->integer(time); |
||||
|
storm::expressions::Expression assertion = varsBE[0] == exprTime; |
||||
|
for (size_t i = 1; i < varsBE.size(); ++i) { |
||||
|
assertion = assertion || varsBE[i] == exprTime; |
||||
|
} |
||||
|
assertion = assertion || topLevel <= exprTime; |
||||
|
solver->add(assertion); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
|
||||
|
template <typename ValueType> |
||||
|
storm::expressions::Variable DFTSMTBuilder<ValueType>::convert(std::shared_ptr<storm::storage::DFTBE<ValueType> const> const& be) { |
||||
|
storm::expressions::Variable var = manager->declareIntegerVariable(be->name()); |
||||
|
storm::expressions::Expression assertion = timeZero < var && var < timeFailSafe; |
||||
|
solver->add(assertion); |
||||
|
return var; |
||||
|
} |
||||
|
|
||||
|
template <typename ValueType> |
||||
|
storm::expressions::Variable DFTSMTBuilder<ValueType>::convert(std::shared_ptr<storm::storage::DFTGate<ValueType> const> const& gate) { |
||||
|
storm::expressions::Variable var = manager->declareIntegerVariable(gate->name()); |
||||
|
storm::expressions::Expression assertion = timeZero < var && var <= timeFailSafe; |
||||
|
solver->add(assertion); |
||||
|
return var; |
||||
|
} |
||||
|
|
||||
|
template <typename ValueType> |
||||
|
storm::expressions::Variable DFTSMTBuilder<ValueType>::convert(std::shared_ptr<storm::storage::DFTDependency<ValueType> const> const& dependency) { |
||||
|
STORM_LOG_THROW(false, storm::exceptions::NotImplementedException, "The dependency cannot be converted into SMT."); |
||||
|
} |
||||
|
|
||||
|
template <typename ValueType> |
||||
|
storm::expressions::Variable DFTSMTBuilder<ValueType>::convert(std::shared_ptr<storm::storage::DFTRestriction<ValueType> const> const& restriction) { |
||||
|
STORM_LOG_THROW(false, storm::exceptions::NotImplementedException, "The restriction cannot be converted into SMT."); |
||||
|
} |
||||
|
|
||||
|
template <typename ValueType> |
||||
|
bool DFTSMTBuilder<ValueType>::check() const { |
||||
|
std::cout << "Check" << std::endl; |
||||
|
storm::solver::SmtSolver::CheckResult result = solver->check(); |
||||
|
switch (result) { |
||||
|
case solver::SmtSolver::CheckResult::Sat: |
||||
|
{ |
||||
|
std::cout << "SAT with model:" << std::endl; |
||||
|
std::shared_ptr<storm::solver::SmtSolver::ModelReference> model = solver->getModel(); |
||||
|
for (auto const& pair : *manager) { |
||||
|
std::cout << pair.first.getName() << "->" << model->getIntegerValue(pair.first) << std::endl; |
||||
|
} |
||||
|
return true; |
||||
|
} |
||||
|
case solver::SmtSolver::CheckResult::Unsat: |
||||
|
return false; |
||||
|
case solver::SmtSolver::CheckResult::Unknown: |
||||
|
default: |
||||
|
STORM_LOG_ASSERT(false, "Result is unknown."); |
||||
|
return false; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// Explicitly instantiate the class.
|
||||
|
template class DFTSMTBuilder<double>; |
||||
|
|
||||
|
#ifdef STORM_HAVE_CARL
|
||||
|
template class DFTSMTBuilder<storm::RationalFunction>; |
||||
|
#endif
|
||||
|
|
||||
|
} // namespace builder
|
||||
|
} // namespace storm
|
||||
|
|
||||
|
|
@ -0,0 +1,49 @@ |
|||||
|
#ifndef DFTSMTBUILDER_H |
||||
|
#define DFTSMTBUILDER_H |
||||
|
|
||||
|
#include "src/solver/SmtSolver.h" |
||||
|
#include "src/utility/solver.h" |
||||
|
#include "src/storage/dft/DFT.h" |
||||
|
|
||||
|
namespace storm { |
||||
|
namespace builder { |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
class DFTSMTBuilder { |
||||
|
|
||||
|
public: |
||||
|
DFTSMTBuilder(); |
||||
|
|
||||
|
void convertToSMT(storm::storage::DFT<ValueType> const& dft); |
||||
|
|
||||
|
bool check() const; |
||||
|
|
||||
|
private: |
||||
|
|
||||
|
std::shared_ptr<storm::solver::SmtSolver> solver; |
||||
|
|
||||
|
std::shared_ptr<storm::expressions::ExpressionManager> manager; |
||||
|
|
||||
|
storm::expressions::Expression timeMax; |
||||
|
|
||||
|
storm::expressions::Expression timeFailSafe; |
||||
|
|
||||
|
storm::expressions::Expression timeZero; |
||||
|
|
||||
|
storm::expressions::Expression topLevel; |
||||
|
|
||||
|
std::vector<storm::expressions::Variable> varsBE; |
||||
|
|
||||
|
storm::expressions::Variable convert(std::shared_ptr<storm::storage::DFTBE<ValueType> const> const& be); |
||||
|
|
||||
|
storm::expressions::Variable convert(std::shared_ptr<storm::storage::DFTGate<ValueType> const> const& gate); |
||||
|
|
||||
|
storm::expressions::Variable convert(std::shared_ptr<storm::storage::DFTDependency<ValueType> const> const& dependency); |
||||
|
|
||||
|
storm::expressions::Variable convert(std::shared_ptr<storm::storage::DFTRestriction<ValueType> const> const& restriction); |
||||
|
|
||||
|
}; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
#endif /* DFTSMTBUILDER_H */ |
@ -0,0 +1,731 @@ |
|||||
|
#include "src/builder/ExplicitDFTModelBuilderApprox.h"
|
||||
|
#include <src/models/sparse/MarkovAutomaton.h>
|
||||
|
#include <src/models/sparse/Ctmc.h>
|
||||
|
#include <src/utility/constants.h>
|
||||
|
#include <src/utility/vector.h>
|
||||
|
#include "src/utility/bitoperations.h"
|
||||
|
#include <src/exceptions/UnexpectedException.h>
|
||||
|
#include "src/settings/modules/DFTSettings.h"
|
||||
|
#include "src/settings/SettingsManager.h"
|
||||
|
#include <map>
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace builder { |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
ExplicitDFTModelBuilderApprox<ValueType, StateType>::ModelComponents::ModelComponents() : transitionMatrix(), stateLabeling(), markovianStates(), exitRates(), choiceLabeling() { |
||||
|
// Intentionally left empty.
|
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
ExplicitDFTModelBuilderApprox<ValueType, StateType>::MatrixBuilder::MatrixBuilder(bool canHaveNondeterminism) : mappingOffset(0), stateRemapping(), currentRowGroup(0), currentRow(0), canHaveNondeterminism((canHaveNondeterminism)) { |
||||
|
// Create matrix builder
|
||||
|
builder = storm::storage::SparseMatrixBuilder<ValueType>(0, 0, 0, false, canHaveNondeterminism, 0); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
ExplicitDFTModelBuilderApprox<ValueType, StateType>::ExplicitDFTModelBuilderApprox(storm::storage::DFT<ValueType> const& dft, storm::storage::DFTIndependentSymmetries const& symmetries, bool enableDC) : |
||||
|
dft(dft), |
||||
|
stateGenerationInfo(std::make_shared<storm::storage::DFTStateGenerationInfo>(dft.buildStateGenerationInfo(symmetries))), |
||||
|
enableDC(enableDC), |
||||
|
usedHeuristic(storm::settings::getModule<storm::settings::modules::DFTSettings>().getApproximationHeuristic()), |
||||
|
generator(dft, *stateGenerationInfo, enableDC, mergeFailedStates), |
||||
|
matrixBuilder(!generator.isDeterministicModel()), |
||||
|
stateStorage(((dft.stateVectorSize() / 64) + 1) * 64), |
||||
|
// TODO Matthias: make choosable
|
||||
|
//explorationQueue(dft.nrElements()+1, 0, 1)
|
||||
|
explorationQueue(200, 0, 0.9) |
||||
|
{ |
||||
|
// Intentionally left empty.
|
||||
|
// TODO Matthias: remove again
|
||||
|
usedHeuristic = storm::builder::ApproximationHeuristic::BOUNDDIFFERENCE; |
||||
|
|
||||
|
// Compute independent subtrees
|
||||
|
if (dft.topLevelType() == storm::storage::DFTElementType::OR) { |
||||
|
// We only support this for approximation with top level element OR
|
||||
|
for (auto const& child : dft.getGate(dft.getTopLevelIndex())->children()) { |
||||
|
// Consider all children of the top level gate
|
||||
|
std::vector<size_t> isubdft; |
||||
|
if (child->nrParents() > 1 || child->hasOutgoingDependencies()) { |
||||
|
STORM_LOG_TRACE("child " << child->name() << "does not allow modularisation."); |
||||
|
isubdft.clear(); |
||||
|
} else if (dft.isGate(child->id())) { |
||||
|
isubdft = dft.getGate(child->id())->independentSubDft(false); |
||||
|
} else { |
||||
|
STORM_LOG_ASSERT(dft.isBasicElement(child->id()), "Child is no BE."); |
||||
|
if(dft.getBasicElement(child->id())->hasIngoingDependencies()) { |
||||
|
STORM_LOG_TRACE("child " << child->name() << "does not allow modularisation."); |
||||
|
isubdft.clear(); |
||||
|
} else { |
||||
|
isubdft = {child->id()}; |
||||
|
} |
||||
|
} |
||||
|
if(isubdft.empty()) { |
||||
|
subtreeBEs.clear(); |
||||
|
break; |
||||
|
} else { |
||||
|
// Add new independent subtree
|
||||
|
std::vector<size_t> subtree; |
||||
|
for (size_t id : isubdft) { |
||||
|
if (dft.isBasicElement(id)) { |
||||
|
subtree.push_back(id); |
||||
|
} |
||||
|
} |
||||
|
subtreeBEs.push_back(subtree); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
if (subtreeBEs.empty()) { |
||||
|
// Consider complete tree
|
||||
|
std::vector<size_t> subtree; |
||||
|
for (size_t id = 0; id < dft.nrElements(); ++id) { |
||||
|
if (dft.isBasicElement(id)) { |
||||
|
subtree.push_back(id); |
||||
|
} |
||||
|
} |
||||
|
subtreeBEs.push_back(subtree); |
||||
|
} |
||||
|
|
||||
|
for (auto tree : subtreeBEs) { |
||||
|
std::stringstream stream; |
||||
|
stream << "Subtree: "; |
||||
|
for (size_t id : tree) { |
||||
|
stream << id << ", "; |
||||
|
} |
||||
|
STORM_LOG_TRACE(stream.str()); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void ExplicitDFTModelBuilderApprox<ValueType, StateType>::buildModel(LabelOptions const& labelOpts, size_t iteration, double approximationThreshold) { |
||||
|
STORM_LOG_TRACE("Generating DFT state space"); |
||||
|
|
||||
|
if (iteration < 1) { |
||||
|
// Initialize
|
||||
|
modelComponents.markovianStates = storm::storage::BitVector(INITIAL_BITVECTOR_SIZE); |
||||
|
|
||||
|
if(mergeFailedStates) { |
||||
|
// Introduce explicit fail state
|
||||
|
storm::generator::StateBehavior<ValueType, StateType> behavior = generator.createMergeFailedState([this] (DFTStatePointer const& state) { |
||||
|
this->failedStateId = newIndex++; |
||||
|
matrixBuilder.stateRemapping.push_back(0); |
||||
|
return this->failedStateId; |
||||
|
} ); |
||||
|
|
||||
|
matrixBuilder.setRemapping(failedStateId); |
||||
|
STORM_LOG_ASSERT(!behavior.empty(), "Behavior is empty."); |
||||
|
matrixBuilder.newRowGroup(); |
||||
|
setMarkovian(behavior.begin()->isMarkovian()); |
||||
|
|
||||
|
// Now add self loop.
|
||||
|
// TODO Matthias: maybe use general method.
|
||||
|
STORM_LOG_ASSERT(behavior.getNumberOfChoices() == 1, "Wrong number of choices for failed state."); |
||||
|
STORM_LOG_ASSERT(behavior.begin()->size() == 1, "Wrong number of transitions for failed state."); |
||||
|
std::pair<StateType, ValueType> stateProbabilityPair = *(behavior.begin()->begin()); |
||||
|
STORM_LOG_ASSERT(stateProbabilityPair.first == failedStateId, "No self loop for failed state."); |
||||
|
STORM_LOG_ASSERT(storm::utility::isOne<ValueType>(stateProbabilityPair.second), "Probability for failed state != 1."); |
||||
|
matrixBuilder.addTransition(stateProbabilityPair.first, stateProbabilityPair.second); |
||||
|
matrixBuilder.finishRow(); |
||||
|
} |
||||
|
|
||||
|
// Build initial state
|
||||
|
this->stateStorage.initialStateIndices = generator.getInitialStates(std::bind(&ExplicitDFTModelBuilderApprox::getOrAddStateIndex, this, std::placeholders::_1)); |
||||
|
STORM_LOG_ASSERT(stateStorage.initialStateIndices.size() == 1, "Only one initial state assumed."); |
||||
|
initialStateIndex = stateStorage.initialStateIndices[0]; |
||||
|
STORM_LOG_TRACE("Initial state: " << initialStateIndex); |
||||
|
// Initialize heuristic values for inital state
|
||||
|
STORM_LOG_ASSERT(!statesNotExplored.at(initialStateIndex).second, "Heuristic for initial state is already initialized"); |
||||
|
ExplorationHeuristicPointer heuristic = std::make_shared<ExplorationHeuristic>(initialStateIndex); |
||||
|
heuristic->markExpand(); |
||||
|
statesNotExplored[initialStateIndex].second = heuristic; |
||||
|
explorationQueue.push(heuristic); |
||||
|
} else { |
||||
|
initializeNextIteration(); |
||||
|
} |
||||
|
|
||||
|
if (approximationThreshold > 0) { |
||||
|
switch (usedHeuristic) { |
||||
|
case storm::builder::ApproximationHeuristic::NONE: |
||||
|
// Do not change anything
|
||||
|
approximationThreshold = dft.nrElements()+10; |
||||
|
break; |
||||
|
case storm::builder::ApproximationHeuristic::DEPTH: |
||||
|
approximationThreshold = iteration; |
||||
|
break; |
||||
|
case storm::builder::ApproximationHeuristic::PROBABILITY: |
||||
|
case storm::builder::ApproximationHeuristic::BOUNDDIFFERENCE: |
||||
|
approximationThreshold = 10 * std::pow(2, iteration); |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
exploreStateSpace(approximationThreshold); |
||||
|
|
||||
|
size_t stateSize = stateStorage.getNumberOfStates() + (mergeFailedStates ? 1 : 0); |
||||
|
modelComponents.markovianStates.resize(stateSize); |
||||
|
modelComponents.deterministicModel = generator.isDeterministicModel(); |
||||
|
|
||||
|
// Fix the entries in the transition matrix according to the mapping of ids to row group indices
|
||||
|
STORM_LOG_ASSERT(matrixBuilder.stateRemapping[initialStateIndex] == initialStateIndex, "Initial state should not be remapped."); |
||||
|
// TODO Matthias: do not consider all rows?
|
||||
|
STORM_LOG_TRACE("Remap matrix: " << matrixBuilder.stateRemapping << ", offset: " << matrixBuilder.mappingOffset); |
||||
|
matrixBuilder.remap(); |
||||
|
|
||||
|
STORM_LOG_TRACE("State remapping: " << matrixBuilder.stateRemapping); |
||||
|
STORM_LOG_TRACE("Markovian states: " << modelComponents.markovianStates); |
||||
|
STORM_LOG_DEBUG("Model has " << stateSize << " states"); |
||||
|
STORM_LOG_DEBUG("Model is " << (generator.isDeterministicModel() ? "deterministic" : "non-deterministic")); |
||||
|
|
||||
|
// Build transition matrix
|
||||
|
modelComponents.transitionMatrix = matrixBuilder.builder.build(stateSize, stateSize); |
||||
|
if (stateSize <= 15) { |
||||
|
STORM_LOG_TRACE("Transition matrix: " << std::endl << modelComponents.transitionMatrix); |
||||
|
} else { |
||||
|
STORM_LOG_TRACE("Transition matrix: too big to print"); |
||||
|
} |
||||
|
|
||||
|
buildLabeling(labelOpts); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void ExplicitDFTModelBuilderApprox<ValueType, StateType>::initializeNextIteration() { |
||||
|
STORM_LOG_TRACE("Refining DFT state space"); |
||||
|
|
||||
|
// TODO Matthias: should be easier now as skipped states all are at the end of the matrix
|
||||
|
// Push skipped states to explore queue
|
||||
|
// TODO Matthias: remove
|
||||
|
for (auto const& skippedState : skippedStates) { |
||||
|
statesNotExplored[skippedState.second.first->getId()] = skippedState.second; |
||||
|
explorationQueue.push(skippedState.second.second); |
||||
|
} |
||||
|
|
||||
|
// Initialize matrix builder again
|
||||
|
// TODO Matthias: avoid copy
|
||||
|
std::vector<uint_fast64_t> copyRemapping = matrixBuilder.stateRemapping; |
||||
|
matrixBuilder = MatrixBuilder(!generator.isDeterministicModel()); |
||||
|
matrixBuilder.stateRemapping = copyRemapping; |
||||
|
StateType nrStates = modelComponents.transitionMatrix.getRowGroupCount(); |
||||
|
STORM_LOG_ASSERT(nrStates == matrixBuilder.stateRemapping.size(), "No. of states does not coincide with mapping size."); |
||||
|
|
||||
|
// Start by creating a remapping from the old indices to the new indices
|
||||
|
std::vector<StateType> indexRemapping = std::vector<StateType>(nrStates, 0); |
||||
|
auto iterSkipped = skippedStates.begin(); |
||||
|
size_t skippedBefore = 0; |
||||
|
for (size_t i = 0; i < indexRemapping.size(); ++i) { |
||||
|
while (iterSkipped != skippedStates.end() && iterSkipped->first <= i) { |
||||
|
STORM_LOG_ASSERT(iterSkipped->first < indexRemapping.size(), "Skipped is too high."); |
||||
|
++skippedBefore; |
||||
|
++iterSkipped; |
||||
|
} |
||||
|
indexRemapping[i] = i - skippedBefore; |
||||
|
} |
||||
|
|
||||
|
// Set remapping
|
||||
|
size_t nrExpandedStates = nrStates - skippedBefore; |
||||
|
matrixBuilder.mappingOffset = nrStates; |
||||
|
STORM_LOG_TRACE("# expanded states: " << nrExpandedStates); |
||||
|
StateType skippedIndex = nrExpandedStates; |
||||
|
std::map<StateType, std::pair<DFTStatePointer, ExplorationHeuristicPointer>> skippedStatesNew; |
||||
|
for (size_t id = 0; id < matrixBuilder.stateRemapping.size(); ++id) { |
||||
|
StateType index = matrixBuilder.stateRemapping[id]; |
||||
|
auto itFind = skippedStates.find(index); |
||||
|
if (itFind != skippedStates.end()) { |
||||
|
// Set new mapping for skipped state
|
||||
|
matrixBuilder.stateRemapping[id] = skippedIndex; |
||||
|
skippedStatesNew[skippedIndex] = itFind->second; |
||||
|
indexRemapping[index] = skippedIndex; |
||||
|
++skippedIndex; |
||||
|
} else { |
||||
|
// Set new mapping for expanded state
|
||||
|
matrixBuilder.stateRemapping[id] = indexRemapping[index]; |
||||
|
} |
||||
|
} |
||||
|
STORM_LOG_TRACE("New state remapping: " << matrixBuilder.stateRemapping); |
||||
|
std::stringstream ss; |
||||
|
ss << "Index remapping:" << std::endl; |
||||
|
for (auto tmp : indexRemapping) { |
||||
|
ss << tmp << " "; |
||||
|
} |
||||
|
STORM_LOG_TRACE(ss.str()); |
||||
|
|
||||
|
// Remap markovian states
|
||||
|
storm::storage::BitVector markovianStatesNew = storm::storage::BitVector(modelComponents.markovianStates.size(), true); |
||||
|
// Iterate over all not set bits
|
||||
|
modelComponents.markovianStates.complement(); |
||||
|
size_t index = modelComponents.markovianStates.getNextSetIndex(0); |
||||
|
while (index < modelComponents.markovianStates.size()) { |
||||
|
markovianStatesNew.set(indexRemapping[index], false); |
||||
|
index = modelComponents.markovianStates.getNextSetIndex(index+1); |
||||
|
} |
||||
|
STORM_LOG_ASSERT(modelComponents.markovianStates.size() - modelComponents.markovianStates.getNumberOfSetBits() == markovianStatesNew.getNumberOfSetBits(), "Remapping of markovian states is wrong."); |
||||
|
STORM_LOG_ASSERT(markovianStatesNew.size() == nrStates, "No. of states does not coincide with markovian size."); |
||||
|
modelComponents.markovianStates = markovianStatesNew; |
||||
|
|
||||
|
// Build submatrix for expanded states
|
||||
|
// TODO Matthias: only use row groups when necessary
|
||||
|
for (StateType oldRowGroup = 0; oldRowGroup < modelComponents.transitionMatrix.getRowGroupCount(); ++oldRowGroup) { |
||||
|
if (indexRemapping[oldRowGroup] < nrExpandedStates) { |
||||
|
// State is expanded -> copy to new matrix
|
||||
|
matrixBuilder.newRowGroup(); |
||||
|
for (StateType oldRow = modelComponents.transitionMatrix.getRowGroupIndices()[oldRowGroup]; oldRow < modelComponents.transitionMatrix.getRowGroupIndices()[oldRowGroup+1]; ++oldRow) { |
||||
|
for (typename storm::storage::SparseMatrix<ValueType>::const_iterator itEntry = modelComponents.transitionMatrix.begin(oldRow); itEntry != modelComponents.transitionMatrix.end(oldRow); ++itEntry) { |
||||
|
auto itFind = skippedStates.find(itEntry->getColumn()); |
||||
|
if (itFind != skippedStates.end()) { |
||||
|
// Set id for skipped states as we remap it later
|
||||
|
matrixBuilder.addTransition(matrixBuilder.mappingOffset + itFind->second.first->getId(), itEntry->getValue()); |
||||
|
} else { |
||||
|
// Set newly remapped index for expanded states
|
||||
|
matrixBuilder.addTransition(indexRemapping[itEntry->getColumn()], itEntry->getValue()); |
||||
|
} |
||||
|
} |
||||
|
matrixBuilder.finishRow(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
skippedStates = skippedStatesNew; |
||||
|
|
||||
|
STORM_LOG_ASSERT(matrixBuilder.getCurrentRowGroup() == nrExpandedStates, "Row group size does not match."); |
||||
|
skippedStates.clear(); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void ExplicitDFTModelBuilderApprox<ValueType, StateType>::exploreStateSpace(double approximationThreshold) { |
||||
|
size_t nrExpandedStates = 0; |
||||
|
size_t nrSkippedStates = 0; |
||||
|
// TODO Matthias: do not empty queue every time but break before
|
||||
|
while (!explorationQueue.empty()) { |
||||
|
// Get the first state in the queue
|
||||
|
ExplorationHeuristicPointer currentExplorationHeuristic = explorationQueue.popTop(); |
||||
|
StateType currentId = currentExplorationHeuristic->getId(); |
||||
|
auto itFind = statesNotExplored.find(currentId); |
||||
|
STORM_LOG_ASSERT(itFind != statesNotExplored.end(), "Id " << currentId << " not found"); |
||||
|
DFTStatePointer currentState = itFind->second.first; |
||||
|
STORM_LOG_ASSERT(currentExplorationHeuristic == itFind->second.second, "Exploration heuristics do not match"); |
||||
|
STORM_LOG_ASSERT(currentState->getId() == currentId, "Ids do not match"); |
||||
|
// Remove it from the list of not explored states
|
||||
|
statesNotExplored.erase(itFind); |
||||
|
STORM_LOG_ASSERT(stateStorage.stateToId.contains(currentState->status()), "State is not contained in state storage."); |
||||
|
STORM_LOG_ASSERT(stateStorage.stateToId.getValue(currentState->status()) == currentId, "Ids of states do not coincide."); |
||||
|
|
||||
|
// Get concrete state if necessary
|
||||
|
if (currentState->isPseudoState()) { |
||||
|
// Create concrete state from pseudo state
|
||||
|
currentState->construct(); |
||||
|
} |
||||
|
STORM_LOG_ASSERT(!currentState->isPseudoState(), "State is pseudo state."); |
||||
|
|
||||
|
// Remember that the current row group was actually filled with the transitions of a different state
|
||||
|
matrixBuilder.setRemapping(currentId); |
||||
|
|
||||
|
matrixBuilder.newRowGroup(); |
||||
|
|
||||
|
// Try to explore the next state
|
||||
|
generator.load(currentState); |
||||
|
|
||||
|
if (approximationThreshold > 0.0 && nrExpandedStates > approximationThreshold && !currentExplorationHeuristic->isExpand()) { |
||||
|
//if (currentExplorationHeuristic->isSkip(approximationThreshold)) {
|
||||
|
// Skip the current state
|
||||
|
++nrSkippedStates; |
||||
|
STORM_LOG_TRACE("Skip expansion of state: " << dft.getStateString(currentState)); |
||||
|
setMarkovian(true); |
||||
|
// Add transition to target state with temporary value 0
|
||||
|
// TODO Matthias: what to do when there is no unique target state?
|
||||
|
matrixBuilder.addTransition(failedStateId, storm::utility::zero<ValueType>()); |
||||
|
// Remember skipped state
|
||||
|
skippedStates[matrixBuilder.getCurrentRowGroup() - 1] = std::make_pair(currentState, currentExplorationHeuristic); |
||||
|
matrixBuilder.finishRow(); |
||||
|
} else { |
||||
|
// Explore the current state
|
||||
|
++nrExpandedStates; |
||||
|
storm::generator::StateBehavior<ValueType, StateType> behavior = generator.expand(std::bind(&ExplicitDFTModelBuilderApprox::getOrAddStateIndex, this, std::placeholders::_1)); |
||||
|
STORM_LOG_ASSERT(!behavior.empty(), "Behavior is empty."); |
||||
|
setMarkovian(behavior.begin()->isMarkovian()); |
||||
|
|
||||
|
// Now add all choices.
|
||||
|
for (auto const& choice : behavior) { |
||||
|
// Add the probabilistic behavior to the matrix.
|
||||
|
for (auto const& stateProbabilityPair : choice) { |
||||
|
STORM_LOG_ASSERT(!storm::utility::isZero(stateProbabilityPair.second), "Probability zero."); |
||||
|
// Set transition to state id + offset. This helps in only remapping all previously skipped states.
|
||||
|
matrixBuilder.addTransition(matrixBuilder.mappingOffset + stateProbabilityPair.first, stateProbabilityPair.second); |
||||
|
// Set heuristic values for reached states
|
||||
|
auto iter = statesNotExplored.find(stateProbabilityPair.first); |
||||
|
if (iter != statesNotExplored.end()) { |
||||
|
// Update heuristic values
|
||||
|
DFTStatePointer state = iter->second.first; |
||||
|
if (!iter->second.second) { |
||||
|
// Initialize heuristic values
|
||||
|
ExplorationHeuristicPointer heuristic = std::make_shared<ExplorationHeuristic>(stateProbabilityPair.first, *currentExplorationHeuristic, stateProbabilityPair.second, choice.getTotalMass()); |
||||
|
iter->second.second = heuristic; |
||||
|
if (state->hasFailed(dft.getTopLevelIndex()) || state->isFailsafe(dft.getTopLevelIndex()) || state->nrFailableDependencies() > 0 || (state->nrFailableDependencies() == 0 && state->nrFailableBEs() == 0)) { |
||||
|
// Do not skip absorbing state or if reached by dependencies
|
||||
|
iter->second.second->markExpand(); |
||||
|
} |
||||
|
if (usedHeuristic == storm::builder::ApproximationHeuristic::BOUNDDIFFERENCE) { |
||||
|
// Compute bounds for heuristic now
|
||||
|
if (state->isPseudoState()) { |
||||
|
// Create concrete state from pseudo state
|
||||
|
state->construct(); |
||||
|
} |
||||
|
STORM_LOG_ASSERT(!currentState->isPseudoState(), "State is pseudo state."); |
||||
|
|
||||
|
// Initialize bounds
|
||||
|
ValueType lowerBound = getLowerBound(state); |
||||
|
ValueType upperBound = getUpperBound(state); |
||||
|
heuristic->setBounds(lowerBound, upperBound); |
||||
|
} |
||||
|
|
||||
|
explorationQueue.push(heuristic); |
||||
|
} else if (!iter->second.second->isExpand()) { |
||||
|
double oldPriority = iter->second.second->getPriority(); |
||||
|
if (iter->second.second->updateHeuristicValues(*currentExplorationHeuristic, stateProbabilityPair.second, choice.getTotalMass())) { |
||||
|
// Update priority queue
|
||||
|
explorationQueue.update(iter->second.second, oldPriority); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
matrixBuilder.finishRow(); |
||||
|
} |
||||
|
} |
||||
|
} // end exploration
|
||||
|
|
||||
|
STORM_LOG_INFO("Expanded " << nrExpandedStates << " states"); |
||||
|
STORM_LOG_INFO("Skipped " << nrSkippedStates << " states"); |
||||
|
STORM_LOG_ASSERT(nrSkippedStates == skippedStates.size(), "Nr skipped states is wrong"); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void ExplicitDFTModelBuilderApprox<ValueType, StateType>::buildLabeling(LabelOptions const& labelOpts) { |
||||
|
// Build state labeling
|
||||
|
modelComponents.stateLabeling = storm::models::sparse::StateLabeling(modelComponents.transitionMatrix.getRowGroupCount()); |
||||
|
// Initial state
|
||||
|
modelComponents.stateLabeling.addLabel("init"); |
||||
|
modelComponents.stateLabeling.addLabelToState("init", initialStateIndex); |
||||
|
// Label all states corresponding to their status (failed, failsafe, failed BE)
|
||||
|
if(labelOpts.buildFailLabel) { |
||||
|
modelComponents.stateLabeling.addLabel("failed"); |
||||
|
} |
||||
|
if(labelOpts.buildFailSafeLabel) { |
||||
|
modelComponents.stateLabeling.addLabel("failsafe"); |
||||
|
} |
||||
|
|
||||
|
// Collect labels for all BE
|
||||
|
std::vector<std::shared_ptr<storage::DFTBE<ValueType>>> basicElements = dft.getBasicElements(); |
||||
|
for (std::shared_ptr<storage::DFTBE<ValueType>> elem : basicElements) { |
||||
|
if(labelOpts.beLabels.count(elem->name()) > 0) { |
||||
|
modelComponents.stateLabeling.addLabel(elem->name() + "_fail"); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Set labels to states
|
||||
|
if(mergeFailedStates) { |
||||
|
modelComponents.stateLabeling.addLabelToState("failed", failedStateId); |
||||
|
} |
||||
|
for (auto const& stateIdPair : stateStorage.stateToId) { |
||||
|
storm::storage::BitVector state = stateIdPair.first; |
||||
|
size_t stateId = stateIdPair.second; |
||||
|
if (!mergeFailedStates && labelOpts.buildFailLabel && dft.hasFailed(state, *stateGenerationInfo)) { |
||||
|
modelComponents.stateLabeling.addLabelToState("failed", stateId); |
||||
|
} |
||||
|
if (labelOpts.buildFailSafeLabel && dft.isFailsafe(state, *stateGenerationInfo)) { |
||||
|
modelComponents.stateLabeling.addLabelToState("failsafe", stateId); |
||||
|
}; |
||||
|
// Set fail status for each BE
|
||||
|
for (std::shared_ptr<storage::DFTBE<ValueType>> elem : basicElements) { |
||||
|
if (labelOpts.beLabels.count(elem->name()) > 0 && storm::storage::DFTState<ValueType>::hasFailed(state, stateGenerationInfo->getStateIndex(elem->id())) ) { |
||||
|
modelComponents.stateLabeling.addLabelToState(elem->name() + "_fail", stateId); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> ExplicitDFTModelBuilderApprox<ValueType, StateType>::getModel() { |
||||
|
STORM_LOG_ASSERT(skippedStates.size() == 0, "Concrete model has skipped states"); |
||||
|
return createModel(false); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> ExplicitDFTModelBuilderApprox<ValueType, StateType>::getModelApproximation(bool lowerBound) { |
||||
|
// TODO Matthias: handle case with no skipped states
|
||||
|
changeMatrixBound(modelComponents.transitionMatrix, lowerBound); |
||||
|
return createModel(true); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> ExplicitDFTModelBuilderApprox<ValueType, StateType>::createModel(bool copy) { |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> model; |
||||
|
|
||||
|
if (modelComponents.deterministicModel) { |
||||
|
// Build CTMC
|
||||
|
if (copy) { |
||||
|
model = std::make_shared<storm::models::sparse::Ctmc<ValueType>>(modelComponents.transitionMatrix, modelComponents.stateLabeling); |
||||
|
} else { |
||||
|
model = std::make_shared<storm::models::sparse::Ctmc<ValueType>>(std::move(modelComponents.transitionMatrix), std::move(modelComponents.stateLabeling)); |
||||
|
} |
||||
|
} else { |
||||
|
// Build MA
|
||||
|
// Compute exit rates
|
||||
|
// TODO Matthias: avoid computing multiple times
|
||||
|
modelComponents.exitRates = std::vector<ValueType>(modelComponents.markovianStates.size()); |
||||
|
std::vector<typename storm::storage::SparseMatrix<ValueType>::index_type> indices = modelComponents.transitionMatrix.getRowGroupIndices(); |
||||
|
for (StateType stateIndex = 0; stateIndex < modelComponents.markovianStates.size(); ++stateIndex) { |
||||
|
if (modelComponents.markovianStates[stateIndex]) { |
||||
|
modelComponents.exitRates[stateIndex] = modelComponents.transitionMatrix.getRowSum(indices[stateIndex]); |
||||
|
} else { |
||||
|
modelComponents.exitRates[stateIndex] = storm::utility::zero<ValueType>(); |
||||
|
} |
||||
|
} |
||||
|
STORM_LOG_TRACE("Exit rates: " << modelComponents.exitRates); |
||||
|
|
||||
|
std::shared_ptr<storm::models::sparse::MarkovAutomaton<ValueType>> ma; |
||||
|
if (copy) { |
||||
|
ma = std::make_shared<storm::models::sparse::MarkovAutomaton<ValueType>>(modelComponents.transitionMatrix, modelComponents.stateLabeling, modelComponents.markovianStates, modelComponents.exitRates); |
||||
|
} else { |
||||
|
ma = std::make_shared<storm::models::sparse::MarkovAutomaton<ValueType>>(std::move(modelComponents.transitionMatrix), std::move(modelComponents.stateLabeling), std::move(modelComponents.markovianStates), std::move(modelComponents.exitRates)); |
||||
|
} |
||||
|
if (ma->hasOnlyTrivialNondeterminism()) { |
||||
|
// Markov automaton can be converted into CTMC
|
||||
|
// TODO Matthias: change components which were not moved accordingly
|
||||
|
model = ma->convertToCTMC(); |
||||
|
} else { |
||||
|
model = ma; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
STORM_LOG_DEBUG("No. states: " << model->getNumberOfStates()); |
||||
|
STORM_LOG_DEBUG("No. transitions: " << model->getNumberOfTransitions()); |
||||
|
if (model->getNumberOfStates() <= 15) { |
||||
|
STORM_LOG_TRACE("Transition matrix: " << std::endl << model->getTransitionMatrix()); |
||||
|
} else { |
||||
|
STORM_LOG_TRACE("Transition matrix: too big to print"); |
||||
|
} |
||||
|
return model; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void ExplicitDFTModelBuilderApprox<ValueType, StateType>::changeMatrixBound(storm::storage::SparseMatrix<ValueType> & matrix, bool lowerBound) const { |
||||
|
// Set lower bound for skipped states
|
||||
|
for (auto it = skippedStates.begin(); it != skippedStates.end(); ++it) { |
||||
|
auto matrixEntry = matrix.getRow(it->first, 0).begin(); |
||||
|
STORM_LOG_ASSERT(matrixEntry->getColumn() == failedStateId, "Transition has wrong target state."); |
||||
|
STORM_LOG_ASSERT(!it->second.first->isPseudoState(), "State is still pseudo state."); |
||||
|
|
||||
|
ExplorationHeuristicPointer heuristic = it->second.second; |
||||
|
if (storm::utility::isZero(heuristic->getUpperBound())) { |
||||
|
// Initialize bounds
|
||||
|
ValueType lowerBound = getLowerBound(it->second.first); |
||||
|
ValueType upperBound = getUpperBound(it->second.first); |
||||
|
heuristic->setBounds(lowerBound, upperBound); |
||||
|
} |
||||
|
|
||||
|
// Change bound
|
||||
|
if (lowerBound) { |
||||
|
matrixEntry->setValue(it->second.second->getLowerBound()); |
||||
|
} else { |
||||
|
matrixEntry->setValue(it->second.second->getUpperBound()); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
ValueType ExplicitDFTModelBuilderApprox<ValueType, StateType>::getLowerBound(DFTStatePointer const& state) const { |
||||
|
// Get the lower bound by considering the failure of all possible BEs
|
||||
|
ValueType lowerBound = storm::utility::zero<ValueType>(); |
||||
|
for (size_t index = 0; index < state->nrFailableBEs(); ++index) { |
||||
|
lowerBound += state->getFailableBERate(index); |
||||
|
} |
||||
|
return lowerBound; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
ValueType ExplicitDFTModelBuilderApprox<ValueType, StateType>::getUpperBound(DFTStatePointer const& state) const { |
||||
|
// Get the upper bound by considering the failure of all BEs
|
||||
|
ValueType upperBound = storm::utility::one<ValueType>(); |
||||
|
ValueType rateSum = storm::utility::zero<ValueType>(); |
||||
|
|
||||
|
// Compute for each independent subtree
|
||||
|
for (std::vector<size_t> const& subtree : subtreeBEs) { |
||||
|
// Get all possible rates
|
||||
|
std::vector<ValueType> rates; |
||||
|
storm::storage::BitVector coldBEs(subtree.size(), false); |
||||
|
for (size_t i = 0; i < subtree.size(); ++i) { |
||||
|
size_t id = subtree[i]; |
||||
|
if (state->isOperational(id)) { |
||||
|
// Get BE rate
|
||||
|
ValueType rate = state->getBERate(id); |
||||
|
if (storm::utility::isZero<ValueType>(rate)) { |
||||
|
// Get active failure rate for cold BE
|
||||
|
rate = dft.getBasicElement(id)->activeFailureRate(); |
||||
|
// Mark BE as cold
|
||||
|
coldBEs.set(i, true); |
||||
|
} |
||||
|
rates.push_back(rate); |
||||
|
rateSum += rate; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
STORM_LOG_ASSERT(rates.size() > 0, "No rates failable"); |
||||
|
|
||||
|
// Sort rates
|
||||
|
std::sort(rates.begin(), rates.end()); |
||||
|
std::vector<size_t> rateCount(rates.size(), 0); |
||||
|
size_t lastIndex = 0; |
||||
|
for (size_t i = 0; i < rates.size(); ++i) { |
||||
|
if (rates[lastIndex] != rates[i]) { |
||||
|
lastIndex = i; |
||||
|
} |
||||
|
++rateCount[lastIndex]; |
||||
|
} |
||||
|
|
||||
|
for (size_t i = 0; i < rates.size(); ++i) { |
||||
|
// Cold BEs cannot fail in the first step
|
||||
|
if (!coldBEs.get(i) && rateCount[i] > 0) { |
||||
|
std::iter_swap(rates.begin() + i, rates.end() - 1); |
||||
|
// Compute AND MTTF of subtree without current rate and scale with current rate
|
||||
|
upperBound += rates.back() * rateCount[i] * computeMTTFAnd(rates, rates.size() - 1); |
||||
|
// Swap back
|
||||
|
// TODO try to avoid swapping back
|
||||
|
std::iter_swap(rates.begin() + i, rates.end() - 1); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
STORM_LOG_TRACE("Upper bound is " << (rateSum / upperBound) << " for state " << state->getId()); |
||||
|
STORM_LOG_ASSERT(!storm::utility::isZero(upperBound), "Upper bound is 0"); |
||||
|
STORM_LOG_ASSERT(!storm::utility::isZero(rateSum), "State is absorbing"); |
||||
|
return rateSum / upperBound; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
ValueType ExplicitDFTModelBuilderApprox<ValueType, StateType>::computeMTTFAnd(std::vector<ValueType> const& rates, size_t size) const { |
||||
|
ValueType result = storm::utility::zero<ValueType>(); |
||||
|
if (size == 0) { |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
// TODO Matthias: works only for <64 BEs!
|
||||
|
// WARNING: this code produces wrong results for more than 32 BEs
|
||||
|
/*for (size_t i = 1; i < 4 && i <= rates.size(); ++i) {
|
||||
|
size_t permutation = smallestIntWithNBitsSet(static_cast<size_t>(i)); |
||||
|
ValueType sum = storm::utility::zero<ValueType>(); |
||||
|
do { |
||||
|
ValueType permResult = storm::utility::zero<ValueType>(); |
||||
|
for(size_t j = 0; j < rates.size(); ++j) { |
||||
|
if(permutation & static_cast<size_t>(1 << static_cast<size_t>(j))) { |
||||
|
// WARNING: if the first bit is set, it also recognizes the 32nd bit as set
|
||||
|
// TODO: fix
|
||||
|
permResult += rates[j]; |
||||
|
} |
||||
|
} |
||||
|
permutation = nextBitPermutation(permutation); |
||||
|
STORM_LOG_ASSERT(!storm::utility::isZero(permResult), "PermResult is 0"); |
||||
|
sum += storm::utility::one<ValueType>() / permResult; |
||||
|
} while(permutation < (static_cast<size_t>(1) << rates.size()) && permutation != 0); |
||||
|
if (i % 2 == 0) { |
||||
|
result -= sum; |
||||
|
} else { |
||||
|
result += sum; |
||||
|
} |
||||
|
}*/ |
||||
|
|
||||
|
// Compute result with permutations of size <= 3
|
||||
|
ValueType one = storm::utility::one<ValueType>(); |
||||
|
for (size_t i1 = 0; i1 < size; ++i1) { |
||||
|
// + 1/a
|
||||
|
ValueType sum = rates[i1]; |
||||
|
result += one / sum; |
||||
|
for (size_t i2 = 0; i2 < i1; ++i2) { |
||||
|
// - 1/(a+b)
|
||||
|
ValueType sum2 = sum + rates[i2]; |
||||
|
result -= one / sum2; |
||||
|
for (size_t i3 = 0; i3 < i2; ++i3) { |
||||
|
// + 1/(a+b+c)
|
||||
|
result += one / (sum2 + rates[i3]); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
STORM_LOG_ASSERT(!storm::utility::isZero(result), "UpperBound is 0"); |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
StateType ExplicitDFTModelBuilderApprox<ValueType, StateType>::getOrAddStateIndex(DFTStatePointer const& state) { |
||||
|
StateType stateId; |
||||
|
bool changed = false; |
||||
|
|
||||
|
if (stateGenerationInfo->hasSymmetries()) { |
||||
|
// Order state by symmetry
|
||||
|
STORM_LOG_TRACE("Check for symmetry: " << dft.getStateString(state)); |
||||
|
changed = state->orderBySymmetry(); |
||||
|
STORM_LOG_TRACE("State " << (changed ? "changed to " : "did not change") << (changed ? dft.getStateString(state) : "")); |
||||
|
} |
||||
|
|
||||
|
if (stateStorage.stateToId.contains(state->status())) { |
||||
|
// State already exists
|
||||
|
stateId = stateStorage.stateToId.getValue(state->status()); |
||||
|
STORM_LOG_TRACE("State " << dft.getStateString(state) << " with id " << stateId << " already exists"); |
||||
|
if (!changed) { |
||||
|
// Check if state is pseudo state
|
||||
|
// If state is explored already the possible pseudo state was already constructed
|
||||
|
auto iter = statesNotExplored.find(stateId); |
||||
|
if (iter != statesNotExplored.end() && iter->second.first->isPseudoState()) { |
||||
|
// Create pseudo state now
|
||||
|
STORM_LOG_ASSERT(iter->second.first->getId() == stateId, "Ids do not match."); |
||||
|
STORM_LOG_ASSERT(iter->second.first->status() == state->status(), "Pseudo states do not coincide."); |
||||
|
state->setId(stateId); |
||||
|
// Update mapping to map to concrete state now
|
||||
|
// TODO Matthias: just change pointer?
|
||||
|
statesNotExplored[stateId] = std::make_pair(state, iter->second.second); |
||||
|
// We do not push the new state on the exploration queue as the pseudo state was already pushed
|
||||
|
STORM_LOG_TRACE("Created pseudo state " << dft.getStateString(state)); |
||||
|
} |
||||
|
} |
||||
|
} else { |
||||
|
// State does not exist yet
|
||||
|
STORM_LOG_ASSERT(state->isPseudoState() == changed, "State type (pseudo/concrete) wrong."); |
||||
|
// Create new state
|
||||
|
state->setId(newIndex++); |
||||
|
stateId = stateStorage.stateToId.findOrAdd(state->status(), state->getId()); |
||||
|
STORM_LOG_ASSERT(stateId == state->getId(), "Ids do not match."); |
||||
|
// Insert state as not yet explored
|
||||
|
ExplorationHeuristicPointer nullHeuristic; |
||||
|
statesNotExplored[stateId] = std::make_pair(state, nullHeuristic); |
||||
|
// Reserve one slot for the new state in the remapping
|
||||
|
matrixBuilder.stateRemapping.push_back(0); |
||||
|
STORM_LOG_TRACE("New " << (state->isPseudoState() ? "pseudo" : "concrete") << " state: " << dft.getStateString(state)); |
||||
|
} |
||||
|
return stateId; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void ExplicitDFTModelBuilderApprox<ValueType, StateType>::setMarkovian(bool markovian) { |
||||
|
if (matrixBuilder.getCurrentRowGroup() > modelComponents.markovianStates.size()) { |
||||
|
// Resize BitVector
|
||||
|
modelComponents.markovianStates.resize(modelComponents.markovianStates.size() + INITIAL_BITVECTOR_SIZE); |
||||
|
} |
||||
|
modelComponents.markovianStates.set(matrixBuilder.getCurrentRowGroup() - 1, markovian); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void ExplicitDFTModelBuilderApprox<ValueType, StateType>::printNotExplored() const { |
||||
|
std::cout << "states not explored:" << std::endl; |
||||
|
for (auto it : statesNotExplored) { |
||||
|
std::cout << it.first << " -> " << dft.getStateString(it.second.first) << std::endl; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// Explicitly instantiate the class.
|
||||
|
template class ExplicitDFTModelBuilderApprox<double>; |
||||
|
|
||||
|
#ifdef STORM_HAVE_CARL
|
||||
|
template class ExplicitDFTModelBuilderApprox<storm::RationalFunction>; |
||||
|
#endif
|
||||
|
|
||||
|
} // namespace builder
|
||||
|
} // namespace storm
|
||||
|
|
||||
|
|
@ -0,0 +1,335 @@ |
|||||
|
#ifndef EXPLICITDFTMODELBUILDERAPPROX_H |
||||
|
#define EXPLICITDFTMODELBUILDERAPPROX_H |
||||
|
|
||||
|
#include "src/builder/DftExplorationHeuristic.h" |
||||
|
#include "src/models/sparse/StateLabeling.h" |
||||
|
#include "src/models/sparse/StandardRewardModel.h" |
||||
|
#include "src/models/sparse/Model.h" |
||||
|
#include "src/generator/DftNextStateGenerator.h" |
||||
|
#include "src/storage/SparseMatrix.h" |
||||
|
#include "src/storage/sparse/StateStorage.h" |
||||
|
#include "src/storage/dft/DFT.h" |
||||
|
#include "src/storage/dft/SymmetricUnits.h" |
||||
|
#include "src/storage/BucketPriorityQueue.h" |
||||
|
#include <boost/container/flat_set.hpp> |
||||
|
#include <boost/optional/optional.hpp> |
||||
|
#include <stack> |
||||
|
#include <unordered_set> |
||||
|
#include <limits> |
||||
|
|
||||
|
namespace storm { |
||||
|
namespace builder { |
||||
|
|
||||
|
/*! |
||||
|
* Build a Markov chain from DFT. |
||||
|
*/ |
||||
|
template<typename ValueType, typename StateType = uint32_t> |
||||
|
class ExplicitDFTModelBuilderApprox { |
||||
|
|
||||
|
using DFTStatePointer = std::shared_ptr<storm::storage::DFTState<ValueType>>; |
||||
|
// TODO Matthias: make choosable |
||||
|
using ExplorationHeuristic = DFTExplorationHeuristicBoundDifference<ValueType>; |
||||
|
using ExplorationHeuristicPointer = std::shared_ptr<ExplorationHeuristic>; |
||||
|
|
||||
|
|
||||
|
// A structure holding the individual components of a model. |
||||
|
struct ModelComponents { |
||||
|
// Constructor |
||||
|
ModelComponents(); |
||||
|
|
||||
|
// The transition matrix. |
||||
|
storm::storage::SparseMatrix<ValueType> transitionMatrix; |
||||
|
|
||||
|
// The state labeling. |
||||
|
storm::models::sparse::StateLabeling stateLabeling; |
||||
|
|
||||
|
// The Markovian states. |
||||
|
storm::storage::BitVector markovianStates; |
||||
|
|
||||
|
// The exit rates. |
||||
|
std::vector<ValueType> exitRates; |
||||
|
|
||||
|
// A vector that stores a labeling for each choice. |
||||
|
boost::optional<std::vector<boost::container::flat_set<StateType>>> choiceLabeling; |
||||
|
|
||||
|
// A flag indicating if the model is deterministic. |
||||
|
bool deterministicModel; |
||||
|
}; |
||||
|
|
||||
|
// A class holding the information for building the transition matrix. |
||||
|
class MatrixBuilder { |
||||
|
public: |
||||
|
// Constructor |
||||
|
MatrixBuilder(bool canHaveNondeterminism); |
||||
|
|
||||
|
/*! |
||||
|
* Set a mapping from a state id to the index in the matrix. |
||||
|
* |
||||
|
* @param id Id of the state. |
||||
|
*/ |
||||
|
void setRemapping(StateType id) { |
||||
|
STORM_LOG_ASSERT(id < stateRemapping.size(), "Invalid index for remapping."); |
||||
|
stateRemapping[id] = currentRowGroup; |
||||
|
} |
||||
|
|
||||
|
/*! |
||||
|
* Create a new row group if the model is nondeterministic. |
||||
|
*/ |
||||
|
void newRowGroup() { |
||||
|
if (canHaveNondeterminism) { |
||||
|
builder.newRowGroup(currentRow); |
||||
|
} |
||||
|
++currentRowGroup; |
||||
|
} |
||||
|
|
||||
|
/*! |
||||
|
* Add a transition from the current row. |
||||
|
* |
||||
|
* @param index Target index |
||||
|
* @param value Value of transition |
||||
|
*/ |
||||
|
void addTransition(StateType index, ValueType value) { |
||||
|
builder.addNextValue(currentRow, index, value); |
||||
|
} |
||||
|
|
||||
|
/*! |
||||
|
* Finish the current row. |
||||
|
*/ |
||||
|
void finishRow() { |
||||
|
++currentRow; |
||||
|
} |
||||
|
|
||||
|
/*! |
||||
|
* Remap the columns in the matrix. |
||||
|
*/ |
||||
|
void remap() { |
||||
|
builder.replaceColumns(stateRemapping, mappingOffset); |
||||
|
} |
||||
|
|
||||
|
/*! |
||||
|
* Get the current row group. |
||||
|
* |
||||
|
* @return The current row group. |
||||
|
*/ |
||||
|
StateType getCurrentRowGroup() { |
||||
|
return currentRowGroup; |
||||
|
} |
||||
|
|
||||
|
// Matrix builder. |
||||
|
storm::storage::SparseMatrixBuilder<ValueType> builder; |
||||
|
|
||||
|
// Offset to distinguish states which will not be remapped anymore and those which will. |
||||
|
size_t mappingOffset; |
||||
|
|
||||
|
// A mapping from state ids to the row group indices in which they actually reside. |
||||
|
// TODO Matthias: avoid hack with fixed int type |
||||
|
std::vector<uint_fast64_t> stateRemapping; |
||||
|
|
||||
|
private: |
||||
|
|
||||
|
// Index of the current row group. |
||||
|
StateType currentRowGroup; |
||||
|
|
||||
|
// Index of the current row. |
||||
|
StateType currentRow; |
||||
|
|
||||
|
// Flag indicating if row groups are needed. |
||||
|
bool canHaveNondeterminism; |
||||
|
}; |
||||
|
|
||||
|
public: |
||||
|
// A structure holding the labeling options. |
||||
|
struct LabelOptions { |
||||
|
bool buildFailLabel = true; |
||||
|
bool buildFailSafeLabel = false; |
||||
|
std::set<std::string> beLabels = {}; |
||||
|
}; |
||||
|
|
||||
|
/*! |
||||
|
* Constructor. |
||||
|
* |
||||
|
* @param dft DFT. |
||||
|
* @param symmetries Symmetries in the dft. |
||||
|
* @param enableDC Flag indicating if dont care propagation should be used. |
||||
|
*/ |
||||
|
ExplicitDFTModelBuilderApprox(storm::storage::DFT<ValueType> const& dft, storm::storage::DFTIndependentSymmetries const& symmetries, bool enableDC); |
||||
|
|
||||
|
/*! |
||||
|
* Build model from DFT. |
||||
|
* |
||||
|
* @param labelOpts Options for labeling. |
||||
|
* @param iteration Current number of iteration. |
||||
|
* @param approximationThreshold Threshold determining when to skip exploring states. |
||||
|
*/ |
||||
|
void buildModel(LabelOptions const& labelOpts, size_t iteration, double approximationThreshold = 0.0); |
||||
|
|
||||
|
/*! |
||||
|
* Get the built model. |
||||
|
* |
||||
|
* @return The model built from the DFT. |
||||
|
*/ |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> getModel(); |
||||
|
|
||||
|
/*! |
||||
|
* Get the built approximation model for either the lower or upper bound. |
||||
|
* |
||||
|
* @param lowerBound If true, the lower bound model is returned, else the upper bound model |
||||
|
* |
||||
|
* @return The model built from the DFT. |
||||
|
*/ |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> getModelApproximation(bool lowerBound = true); |
||||
|
|
||||
|
private: |
||||
|
|
||||
|
/*! |
||||
|
* Explore state space of DFT. |
||||
|
* |
||||
|
* @param approximationThreshold Threshold to determine when to skip states. |
||||
|
*/ |
||||
|
void exploreStateSpace(double approximationThreshold); |
||||
|
|
||||
|
/*! |
||||
|
* Initialize the matrix for a refinement iteration. |
||||
|
*/ |
||||
|
void initializeNextIteration(); |
||||
|
|
||||
|
/*! |
||||
|
* Build the labeling. |
||||
|
* |
||||
|
* @param labelOpts Options for labeling. |
||||
|
*/ |
||||
|
void buildLabeling(LabelOptions const& labelOpts); |
||||
|
|
||||
|
/*! |
||||
|
* Add a state to the explored states (if not already there). It also handles pseudo states. |
||||
|
* |
||||
|
* @param state The state to add. |
||||
|
* |
||||
|
* @return Id of state. |
||||
|
*/ |
||||
|
StateType getOrAddStateIndex(DFTStatePointer const& state); |
||||
|
|
||||
|
/*! |
||||
|
* Set markovian flag for the current state. |
||||
|
* |
||||
|
* @param markovian Flag indicating if the state is markovian. |
||||
|
*/ |
||||
|
void setMarkovian(bool markovian); |
||||
|
|
||||
|
/** |
||||
|
* Change matrix to reflect the lower or upper approximation bound. |
||||
|
* |
||||
|
* @param matrix Matrix to change. The change are reflected here. |
||||
|
* @param lowerBound Flag indicating if the lower bound should be used. Otherwise the upper bound is used. |
||||
|
*/ |
||||
|
void changeMatrixBound(storm::storage::SparseMatrix<ValueType> & matrix, bool lowerBound) const; |
||||
|
|
||||
|
/*! |
||||
|
* Get lower bound approximation for state. |
||||
|
* |
||||
|
* @return Lower bound approximation. |
||||
|
*/ |
||||
|
ValueType getLowerBound(DFTStatePointer const& state) const; |
||||
|
|
||||
|
/*! |
||||
|
* Get upper bound approximation for state. |
||||
|
* |
||||
|
* @return Upper bound approximation. |
||||
|
*/ |
||||
|
ValueType getUpperBound(DFTStatePointer const& state) const; |
||||
|
|
||||
|
/*! |
||||
|
* Compute the MTTF of an AND gate via a closed formula. |
||||
|
* The used formula is 1/( 1/a + 1/b + 1/c + ... - 1/(a+b) - 1/(a+c) - ... + 1/(a+b+c) + ... - ...) |
||||
|
* |
||||
|
* @param rates List of rates of children of AND. |
||||
|
* @param size Only indices < size are considered in the vector. |
||||
|
* @return MTTF. |
||||
|
*/ |
||||
|
ValueType computeMTTFAnd(std::vector<ValueType> const& rates, size_t size) const; |
||||
|
|
||||
|
/*! |
||||
|
* Compares the priority of two states. |
||||
|
* |
||||
|
* @param idA Id of first state |
||||
|
* @param idB Id of second state |
||||
|
* |
||||
|
* @return True if the priority of the first state is greater then the priority of the second one. |
||||
|
*/ |
||||
|
bool isPriorityGreater(StateType idA, StateType idB) const; |
||||
|
|
||||
|
void printNotExplored() const; |
||||
|
|
||||
|
/*! |
||||
|
* Create the model model from the model components. |
||||
|
* |
||||
|
* @param copy If true, all elements of the model component are copied (used for approximation). If false |
||||
|
* they are moved to reduce the memory overhead. |
||||
|
* |
||||
|
* @return The model built from the model components. |
||||
|
*/ |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> createModel(bool copy); |
||||
|
|
||||
|
// Initial size of the bitvector. |
||||
|
const size_t INITIAL_BITVECTOR_SIZE = 20000; |
||||
|
// Offset used for pseudo states. |
||||
|
const StateType OFFSET_PSEUDO_STATE = std::numeric_limits<StateType>::max() / 2; |
||||
|
|
||||
|
// Dft |
||||
|
storm::storage::DFT<ValueType> const& dft; |
||||
|
|
||||
|
// General information for state generation |
||||
|
// TODO Matthias: use const reference |
||||
|
std::shared_ptr<storm::storage::DFTStateGenerationInfo> stateGenerationInfo; |
||||
|
|
||||
|
// Flag indication if dont care propagation should be used. |
||||
|
bool enableDC = true; |
||||
|
|
||||
|
//TODO Matthias: make changeable |
||||
|
const bool mergeFailedStates = true; |
||||
|
|
||||
|
// Heuristic used for approximation |
||||
|
storm::builder::ApproximationHeuristic usedHeuristic; |
||||
|
|
||||
|
// Current id for new state |
||||
|
size_t newIndex = 0; |
||||
|
|
||||
|
// Id of failed state |
||||
|
size_t failedStateId = 0; |
||||
|
|
||||
|
// Id of initial state |
||||
|
size_t initialStateIndex = 0; |
||||
|
|
||||
|
// Next state generator for exploring the state space |
||||
|
storm::generator::DftNextStateGenerator<ValueType, StateType> generator; |
||||
|
|
||||
|
// Structure for the components of the model. |
||||
|
ModelComponents modelComponents; |
||||
|
|
||||
|
// Structure for the transition matrix builder. |
||||
|
MatrixBuilder matrixBuilder; |
||||
|
|
||||
|
// Internal information about the states that were explored. |
||||
|
storm::storage::sparse::StateStorage<StateType> stateStorage; |
||||
|
|
||||
|
// A priority queue of states that still need to be explored. |
||||
|
storm::storage::BucketPriorityQueue<ValueType> explorationQueue; |
||||
|
//storm::storage::DynamicPriorityQueue<ExplorationHeuristicPointer, std::vector<ExplorationHeuristicPointer>, std::function<bool(ExplorationHeuristicPointer, ExplorationHeuristicPointer)>> explorationQueue; |
||||
|
|
||||
|
// A mapping of not yet explored states from the id to the tuple (state object, heuristic values). |
||||
|
std::map<StateType, std::pair<DFTStatePointer, ExplorationHeuristicPointer>> statesNotExplored; |
||||
|
|
||||
|
// Holds all skipped states which were not yet expanded. More concretely it is a mapping from matrix indices |
||||
|
// to the corresponding skipped states. |
||||
|
// Notice that we need an ordered map here to easily iterate in increasing order over state ids. |
||||
|
// TODO remove again |
||||
|
std::map<StateType, std::pair<DFTStatePointer, ExplorationHeuristicPointer>> skippedStates; |
||||
|
|
||||
|
// List of independent subtrees and the BEs contained in them. |
||||
|
std::vector<std::vector<size_t>> subtreeBEs; |
||||
|
}; |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
#endif /* EXPLICITDFTMODELBUILDERAPPROX_H */ |
@ -0,0 +1,172 @@ |
|||||
|
#include "src/builder/ParallelCompositionBuilder.h"
|
||||
|
#include "src/models/sparse/StandardRewardModel.h"
|
||||
|
#include <src/utility/constants.h>
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace builder { |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> ParallelCompositionBuilder<ValueType>::compose(std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> const& ctmcA, std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> const& ctmcB, bool labelAnd) { |
||||
|
STORM_LOG_TRACE("Parallel composition"); |
||||
|
|
||||
|
storm::storage::SparseMatrix<ValueType> matrixA = ctmcA->getTransitionMatrix(); |
||||
|
storm::storage::SparseMatrix<ValueType> matrixB = ctmcB->getTransitionMatrix(); |
||||
|
storm::models::sparse::StateLabeling labelingA = ctmcA->getStateLabeling(); |
||||
|
storm::models::sparse::StateLabeling labelingB = ctmcB->getStateLabeling(); |
||||
|
size_t sizeA = ctmcA->getNumberOfStates(); |
||||
|
size_t sizeB = ctmcB->getNumberOfStates(); |
||||
|
size_t size = sizeA * sizeB; |
||||
|
size_t rowIndex = 0; |
||||
|
|
||||
|
// Build matrix
|
||||
|
storm::storage::SparseMatrixBuilder<ValueType> builder(size, size, 0, true, false, 0); |
||||
|
|
||||
|
for (size_t stateA = 0; stateA < sizeA; ++stateA) { |
||||
|
for (size_t stateB = 0; stateB < sizeB; ++stateB) { |
||||
|
STORM_LOG_ASSERT(rowIndex == stateA * sizeB + stateB, "Row " << rowIndex << " is not correct"); |
||||
|
|
||||
|
auto rowA = matrixA.getRow(stateA); |
||||
|
auto itA = rowA.begin(); |
||||
|
auto rowB = matrixB.getRow(stateB); |
||||
|
auto itB = rowB.begin(); |
||||
|
|
||||
|
// First consider all target states of A < the current stateA
|
||||
|
while (itA != rowA.end() && itA->getColumn() < stateA) { |
||||
|
builder.addNextValue(rowIndex, itA->getColumn() * sizeB + stateB, itA->getValue()); |
||||
|
++itA; |
||||
|
} |
||||
|
|
||||
|
// Then consider all target states of B
|
||||
|
while (itB != rowB.end()) { |
||||
|
builder.addNextValue(rowIndex, stateA * sizeB + itB->getColumn(), itB->getValue()); |
||||
|
++itB; |
||||
|
} |
||||
|
|
||||
|
// Last consider all remaining target states of A > the current stateA
|
||||
|
while (itA != rowA.end()) { |
||||
|
builder.addNextValue(rowIndex, itA->getColumn() * sizeB + stateB, itA->getValue()); |
||||
|
++itA; |
||||
|
} |
||||
|
|
||||
|
++rowIndex; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
storm::storage::SparseMatrix<ValueType> matrixComposed = builder.build(); |
||||
|
STORM_LOG_ASSERT(matrixComposed.getRowCount() == size, "Row count is not correct"); |
||||
|
STORM_LOG_ASSERT(matrixComposed.getColumnCount() == size, "Column count is not correct"); |
||||
|
|
||||
|
// Build labeling
|
||||
|
storm::models::sparse::StateLabeling labeling = storm::models::sparse::StateLabeling(sizeA * sizeB); |
||||
|
|
||||
|
if (labelAnd) { |
||||
|
for (std::string const& label : labelingA.getLabels()) { |
||||
|
if (labelingB.containsLabel(label)) { |
||||
|
// Only consider labels contained in both CTMCs
|
||||
|
storm::storage::BitVector labelStates(size, false); |
||||
|
for (auto entryA : labelingA.getStates(label)) { |
||||
|
for (auto entryB : labelingB.getStates(label)) { |
||||
|
labelStates.set(entryA * sizeB + entryB); |
||||
|
} |
||||
|
} |
||||
|
labeling.addLabel(label, labelStates); |
||||
|
} |
||||
|
} |
||||
|
} else { |
||||
|
// Set labels from A
|
||||
|
for (std::string const& label : labelingA.getLabels()) { |
||||
|
if (label == "init") { |
||||
|
// Initial states must be initial in both CTMCs
|
||||
|
STORM_LOG_ASSERT(labelingB.containsLabel(label), "B does not have init."); |
||||
|
storm::storage::BitVector labelStates(size, false); |
||||
|
for (auto entryA : labelingA.getStates(label)) { |
||||
|
for (auto entryB : labelingB.getStates(label)) { |
||||
|
labelStates.set(entryA * sizeB + entryB); |
||||
|
} |
||||
|
} |
||||
|
labeling.addLabel(label, labelStates); |
||||
|
} else { |
||||
|
storm::storage::BitVector labelStates(size, false); |
||||
|
for (auto entry : labelingA.getStates(label)) { |
||||
|
for (size_t index = entry * sizeB; index < entry * sizeB + sizeB; ++index) { |
||||
|
labelStates.set(index, true); |
||||
|
} |
||||
|
} |
||||
|
labeling.addLabel(label, labelStates); |
||||
|
} |
||||
|
} |
||||
|
// Set labels from B
|
||||
|
for (std::string const& label : labelingB.getLabels()) { |
||||
|
if (label == "init") { |
||||
|
continue; |
||||
|
} |
||||
|
if (labeling.containsLabel(label)) { |
||||
|
// Label is already there from A
|
||||
|
for (auto entry : labelingB.getStates(label)) { |
||||
|
for (size_t index = 0; index < sizeA; ++index) { |
||||
|
labeling.addLabelToState(label, index * sizeB + entry); |
||||
|
} |
||||
|
} |
||||
|
} else { |
||||
|
storm::storage::BitVector labelStates(size, false); |
||||
|
for (auto entry : labelingB.getStates(label)) { |
||||
|
for (size_t index = 0; index < sizeA; ++index) { |
||||
|
labelStates.set(index * sizeB + entry, true); |
||||
|
} |
||||
|
} |
||||
|
labeling.addLabel(label, labelStates); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// Build CTMC
|
||||
|
std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> composedCtmc = std::make_shared<storm::models::sparse::Ctmc<ValueType>>(matrixComposed, labeling); |
||||
|
|
||||
|
// Print for debugging
|
||||
|
/*std::cout << "Matrix A:" << std::endl;
|
||||
|
std::cout << matrixA << std::endl; |
||||
|
std::cout << "Matrix B:" << std::endl; |
||||
|
std::cout << matrixB << std::endl; |
||||
|
std::cout << "Composed matrix: " << std::endl << matrixComposed; |
||||
|
std::cout << "Labeling A" << std::endl; |
||||
|
labelingA.printLabelingInformationToStream(std::cout); |
||||
|
for (size_t stateA = 0; stateA < sizeA; ++stateA) { |
||||
|
std::cout << "State " << stateA << ": "; |
||||
|
for (auto label : labelingA.getLabelsOfState(stateA)) { |
||||
|
std::cout << label << ", "; |
||||
|
} |
||||
|
std::cout << std::endl; |
||||
|
} |
||||
|
std::cout << "Labeling B" << std::endl; |
||||
|
labelingB.printLabelingInformationToStream(std::cout); |
||||
|
for (size_t stateB = 0; stateB < sizeB; ++stateB) { |
||||
|
std::cout << "State " << stateB << ": "; |
||||
|
for (auto label : labelingB.getLabelsOfState(stateB)) { |
||||
|
std::cout << label << ", "; |
||||
|
} |
||||
|
std::cout << std::endl; |
||||
|
} |
||||
|
std::cout << "Labeling Composed" << std::endl; |
||||
|
labeling.printLabelingInformationToStream(std::cout); |
||||
|
for (size_t state = 0; state < size; ++state) { |
||||
|
std::cout << "State " << state << ": "; |
||||
|
for (auto label : labeling.getLabelsOfState(state)) { |
||||
|
std::cout << label << ", "; |
||||
|
} |
||||
|
std::cout << std::endl; |
||||
|
}*/ |
||||
|
|
||||
|
return composedCtmc; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
// Explicitly instantiate the class.
|
||||
|
template class ParallelCompositionBuilder<double>; |
||||
|
|
||||
|
#ifdef STORM_HAVE_CARL
|
||||
|
template class ParallelCompositionBuilder<storm::RationalFunction>; |
||||
|
#endif
|
||||
|
|
||||
|
} // namespace builder
|
||||
|
} // namespace storm
|
@ -0,0 +1,24 @@ |
|||||
|
#ifndef PARALLELCOMPOSITIONBUILDER_H |
||||
|
#define PARALLELCOMPOSITIONBUILDER_H |
||||
|
|
||||
|
#include <src/models/sparse/Ctmc.h> |
||||
|
|
||||
|
namespace storm { |
||||
|
namespace builder { |
||||
|
|
||||
|
/*! |
||||
|
* Build a parallel composition of Markov chains. |
||||
|
*/ |
||||
|
template<typename ValueType> |
||||
|
class ParallelCompositionBuilder { |
||||
|
|
||||
|
public: |
||||
|
|
||||
|
static std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> compose(std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> const& ctmcA, std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> const& ctmcB, bool labelAnd); |
||||
|
|
||||
|
}; |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
#endif /* EXPLICITDFTMODELBUPARALLELCOMPOSITIONBUILDER_HILDERAPPROX_H */ |
@ -0,0 +1,209 @@ |
|||||
|
#include "src/generator/DftNextStateGenerator.h"
|
||||
|
|
||||
|
#include "src/utility/constants.h"
|
||||
|
#include "src/utility/macros.h"
|
||||
|
#include "src/exceptions/NotImplementedException.h"
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace generator { |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
DftNextStateGenerator<ValueType, StateType>::DftNextStateGenerator(storm::storage::DFT<ValueType> const& dft, storm::storage::DFTStateGenerationInfo const& stateGenerationInfo, bool enableDC, bool mergeFailedStates) : mDft(dft), mStateGenerationInfo(stateGenerationInfo), state(nullptr), enableDC(enableDC), mergeFailedStates(mergeFailedStates) { |
||||
|
deterministicModel = !mDft.canHaveNondeterminism(); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
bool DftNextStateGenerator<ValueType, StateType>::isDeterministicModel() const { |
||||
|
return deterministicModel; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
std::vector<StateType> DftNextStateGenerator<ValueType, StateType>::getInitialStates(StateToIdCallback const& stateToIdCallback) { |
||||
|
DFTStatePointer initialState = std::make_shared<storm::storage::DFTState<ValueType>>(mDft, mStateGenerationInfo, 0); |
||||
|
|
||||
|
// Register initial state
|
||||
|
StateType id = stateToIdCallback(initialState); |
||||
|
|
||||
|
initialState->setId(id); |
||||
|
return {id}; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void DftNextStateGenerator<ValueType, StateType>::load(storm::storage::BitVector const& state) { |
||||
|
// Load the state from bitvector
|
||||
|
size_t id = 0; //TODO Matthias: set correct id
|
||||
|
this->state = std::make_shared<storm::storage::DFTState<ValueType>>(state, mDft, mStateGenerationInfo, id); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
void DftNextStateGenerator<ValueType, StateType>::load(DFTStatePointer const& state) { |
||||
|
// Store a pointer to the state itself, because we need to be able to access it when expanding it.
|
||||
|
this->state = state; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
StateBehavior<ValueType, StateType> DftNextStateGenerator<ValueType, StateType>::expand(StateToIdCallback const& stateToIdCallback) { |
||||
|
STORM_LOG_TRACE("Explore state: " << mDft.getStateString(state)); |
||||
|
|
||||
|
// Prepare the result, in case we return early.
|
||||
|
StateBehavior<ValueType, StateType> result; |
||||
|
|
||||
|
// Initialization
|
||||
|
bool hasDependencies = state->nrFailableDependencies() > 0; |
||||
|
size_t failableCount = hasDependencies ? state->nrFailableDependencies() : state->nrFailableBEs(); |
||||
|
size_t currentFailable = 0; |
||||
|
Choice<ValueType, StateType> choice(0, !hasDependencies); |
||||
|
|
||||
|
// Check for absorbing state
|
||||
|
if (mDft.hasFailed(state) || mDft.isFailsafe(state) || failableCount == 0) { |
||||
|
// Add self loop
|
||||
|
choice.addProbability(state->getId(), storm::utility::one<ValueType>()); |
||||
|
STORM_LOG_TRACE("Added self loop for " << state->getId()); |
||||
|
// No further exploration required
|
||||
|
result.addChoice(std::move(choice)); |
||||
|
result.setExpanded(); |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
// Let BE fail
|
||||
|
while (currentFailable < failableCount) { |
||||
|
STORM_LOG_ASSERT(!mDft.hasFailed(state), "Dft has failed."); |
||||
|
|
||||
|
// Construct new state as copy from original one
|
||||
|
DFTStatePointer newState = state->copy(); |
||||
|
std::pair<std::shared_ptr<storm::storage::DFTBE<ValueType> const>, bool> nextBEPair = newState->letNextBEFail(currentFailable); |
||||
|
std::shared_ptr<storm::storage::DFTBE<ValueType> const>& nextBE = nextBEPair.first; |
||||
|
STORM_LOG_ASSERT(nextBE, "NextBE is null."); |
||||
|
STORM_LOG_ASSERT(nextBEPair.second == hasDependencies, "Failure due to dependencies does not match."); |
||||
|
STORM_LOG_TRACE("With the failure of: " << nextBE->name() << " [" << nextBE->id() << "] in " << mDft.getStateString(state)); |
||||
|
|
||||
|
// Propagate
|
||||
|
storm::storage::DFTStateSpaceGenerationQueues<ValueType> queues; |
||||
|
|
||||
|
// Propagate failure
|
||||
|
for (DFTGatePointer parent : nextBE->parents()) { |
||||
|
if (newState->isOperational(parent->id())) { |
||||
|
queues.propagateFailure(parent); |
||||
|
} |
||||
|
} |
||||
|
// Propagate failures
|
||||
|
while (!queues.failurePropagationDone()) { |
||||
|
DFTGatePointer next = queues.nextFailurePropagation(); |
||||
|
next->checkFails(*newState, queues); |
||||
|
newState->updateFailableDependencies(next->id()); |
||||
|
} |
||||
|
|
||||
|
// Check restrictions
|
||||
|
for (DFTRestrictionPointer restr : nextBE->restrictions()) { |
||||
|
queues.checkRestrictionLater(restr); |
||||
|
} |
||||
|
// Check restrictions
|
||||
|
while(!queues.restrictionChecksDone()) { |
||||
|
DFTRestrictionPointer next = queues.nextRestrictionCheck(); |
||||
|
next->checkFails(*newState, queues); |
||||
|
newState->updateFailableDependencies(next->id()); |
||||
|
} |
||||
|
|
||||
|
if(newState->isInvalid()) { |
||||
|
// Continue with next possible state
|
||||
|
++currentFailable; |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
// Get the id of the successor state
|
||||
|
StateType newStateId; |
||||
|
if (newState->hasFailed(mDft.getTopLevelIndex()) && mergeFailedStates) { |
||||
|
// Use unique failed state
|
||||
|
newStateId = mergeFailedStateId; |
||||
|
} else { |
||||
|
// Propagate failsafe
|
||||
|
while (!queues.failsafePropagationDone()) { |
||||
|
DFTGatePointer next = queues.nextFailsafePropagation(); |
||||
|
next->checkFailsafe(*newState, queues); |
||||
|
} |
||||
|
|
||||
|
// Propagate dont cares
|
||||
|
while (enableDC && !queues.dontCarePropagationDone()) { |
||||
|
DFTElementPointer next = queues.nextDontCarePropagation(); |
||||
|
next->checkDontCareAnymore(*newState, queues); |
||||
|
} |
||||
|
|
||||
|
// Update failable dependencies
|
||||
|
newState->updateFailableDependencies(nextBE->id()); |
||||
|
newState->updateDontCareDependencies(nextBE->id()); |
||||
|
|
||||
|
// Add new state
|
||||
|
newStateId = stateToIdCallback(newState); |
||||
|
} |
||||
|
|
||||
|
// Set transitions
|
||||
|
if (hasDependencies) { |
||||
|
// Failure is due to dependency -> add non-deterministic choice
|
||||
|
ValueType probability = mDft.getDependency(state->getDependencyId(currentFailable))->probability(); |
||||
|
choice.addProbability(newStateId, probability); |
||||
|
STORM_LOG_TRACE("Added transition to " << newStateId << " with probability " << probability); |
||||
|
|
||||
|
if (!storm::utility::isOne(probability)) { |
||||
|
// Add transition to state where dependency was unsuccessful
|
||||
|
DFTStatePointer unsuccessfulState = state->copy(); |
||||
|
unsuccessfulState->letDependencyBeUnsuccessful(currentFailable); |
||||
|
// Add state
|
||||
|
StateType unsuccessfulStateId = stateToIdCallback(unsuccessfulState); |
||||
|
ValueType remainingProbability = storm::utility::one<ValueType>() - probability; |
||||
|
choice.addProbability(unsuccessfulStateId, remainingProbability); |
||||
|
STORM_LOG_TRACE("Added transition to " << unsuccessfulStateId << " with remaining probability " << remainingProbability); |
||||
|
} |
||||
|
result.addChoice(std::move(choice)); |
||||
|
} else { |
||||
|
// Failure is due to "normal" BE failure
|
||||
|
// Set failure rate according to activation
|
||||
|
bool isActive = true; |
||||
|
if (mDft.hasRepresentant(nextBE->id())) { |
||||
|
// Active must be checked for the state we are coming from as this state is responsible for the rate
|
||||
|
isActive = state->isActive(mDft.getRepresentant(nextBE->id())); |
||||
|
} |
||||
|
ValueType rate = isActive ? nextBE->activeFailureRate() : nextBE->passiveFailureRate(); |
||||
|
STORM_LOG_ASSERT(!storm::utility::isZero(rate), "Rate is 0."); |
||||
|
choice.addProbability(newStateId, rate); |
||||
|
STORM_LOG_TRACE("Added transition to " << newStateId << " with " << (isActive ? "active" : "passive") << " failure rate " << rate); |
||||
|
} |
||||
|
|
||||
|
++currentFailable; |
||||
|
} // end while failing BE
|
||||
|
|
||||
|
if (!hasDependencies) { |
||||
|
// Add all rates as one choice
|
||||
|
result.addChoice(std::move(choice)); |
||||
|
} |
||||
|
|
||||
|
STORM_LOG_TRACE("Finished exploring state: " << mDft.getStateString(state)); |
||||
|
result.setExpanded(); |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType, typename StateType> |
||||
|
StateBehavior<ValueType, StateType> DftNextStateGenerator<ValueType, StateType>::createMergeFailedState(StateToIdCallback const& stateToIdCallback) { |
||||
|
STORM_LOG_ASSERT(mergeFailedStates, "No unique failed state used."); |
||||
|
// Introduce explicit fail state
|
||||
|
DFTStatePointer failedState = std::make_shared<storm::storage::DFTState<ValueType>>(mDft, mStateGenerationInfo, 0); |
||||
|
mergeFailedStateId = stateToIdCallback(failedState); |
||||
|
STORM_LOG_TRACE("Introduce fail state with id: " << mergeFailedStateId); |
||||
|
|
||||
|
// Add self loop
|
||||
|
Choice<ValueType, StateType> choice(0, true); |
||||
|
choice.addProbability(mergeFailedStateId, storm::utility::one<ValueType>()); |
||||
|
|
||||
|
// No further exploration required
|
||||
|
StateBehavior<ValueType, StateType> result; |
||||
|
result.addChoice(std::move(choice)); |
||||
|
result.setExpanded(); |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
template class DftNextStateGenerator<double>; |
||||
|
|
||||
|
#ifdef STORM_HAVE_CARL
|
||||
|
template class DftNextStateGenerator<storm::RationalFunction>; |
||||
|
#endif
|
||||
|
} |
||||
|
} |
@ -0,0 +1,73 @@ |
|||||
|
#ifndef STORM_GENERATOR_DFTNEXTSTATEGENERATOR_H_ |
||||
|
#define STORM_GENERATOR_DFTNEXTSTATEGENERATOR_H_ |
||||
|
|
||||
|
#include "src/generator/NextStateGenerator.h" |
||||
|
#include "src/storage/dft/DFT.h" |
||||
|
|
||||
|
#include "src/utility/ConstantsComparator.h" |
||||
|
|
||||
|
namespace storm { |
||||
|
namespace generator { |
||||
|
|
||||
|
/*! |
||||
|
* Next state generator for DFTs. |
||||
|
*/ |
||||
|
template<typename ValueType, typename StateType = uint32_t> |
||||
|
class DftNextStateGenerator { |
||||
|
// TODO: inherit from NextStateGenerator |
||||
|
|
||||
|
using DFTStatePointer = std::shared_ptr<storm::storage::DFTState<ValueType>>; |
||||
|
using DFTElementPointer = std::shared_ptr<storm::storage::DFTElement<ValueType>>; |
||||
|
using DFTGatePointer = std::shared_ptr<storm::storage::DFTGate<ValueType>>; |
||||
|
using DFTRestrictionPointer = std::shared_ptr<storm::storage::DFTRestriction<ValueType>>; |
||||
|
|
||||
|
public: |
||||
|
typedef std::function<StateType (DFTStatePointer const&)> StateToIdCallback; |
||||
|
|
||||
|
DftNextStateGenerator(storm::storage::DFT<ValueType> const& dft, storm::storage::DFTStateGenerationInfo const& stateGenerationInfo, bool enableDC, bool mergeFailedStates); |
||||
|
|
||||
|
bool isDeterministicModel() const; |
||||
|
std::vector<StateType> getInitialStates(StateToIdCallback const& stateToIdCallback); |
||||
|
|
||||
|
void load(storm::storage::BitVector const& state); |
||||
|
void load(DFTStatePointer const& state); |
||||
|
StateBehavior<ValueType, StateType> expand(StateToIdCallback const& stateToIdCallback); |
||||
|
|
||||
|
/*! |
||||
|
* Create unique failed state. |
||||
|
* |
||||
|
* @param stateToIdCallback Callback for state. The callback should just return the id and not use the state. |
||||
|
* |
||||
|
* @return Behavior of state. |
||||
|
*/ |
||||
|
StateBehavior<ValueType, StateType> createMergeFailedState(StateToIdCallback const& stateToIdCallback); |
||||
|
|
||||
|
private: |
||||
|
|
||||
|
// The dft used for the generation of next states. |
||||
|
storm::storage::DFT<ValueType> const& mDft; |
||||
|
|
||||
|
// General information for the state generation. |
||||
|
storm::storage::DFTStateGenerationInfo const& mStateGenerationInfo; |
||||
|
|
||||
|
// Current state |
||||
|
DFTStatePointer state; |
||||
|
|
||||
|
// Flag indicating if dont care propagation is enabled. |
||||
|
bool enableDC; |
||||
|
|
||||
|
// Flag indication if all failed states should be merged into one. |
||||
|
bool mergeFailedStates = true; |
||||
|
|
||||
|
// Id of the merged failed state |
||||
|
StateType mergeFailedStateId = 0; |
||||
|
|
||||
|
// Flag indicating if the model is deterministic. |
||||
|
bool deterministicModel = false; |
||||
|
|
||||
|
}; |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
#endif /* STORM_GENERATOR_DFTNEXTSTATEGENERATOR_H_ */ |
@ -1,191 +0,0 @@ |
|||||
#pragma once |
|
||||
|
|
||||
#include "logic/Formula.h" |
|
||||
#include "parser/DFTGalileoParser.h" |
|
||||
#include "builder/ExplicitDFTModelBuilder.h" |
|
||||
#include "modelchecker/results/CheckResult.h" |
|
||||
#include "utility/storm.h" |
|
||||
#include "storage/dft/DFTIsomorphism.h" |
|
||||
#include "utility/bitoperations.h" |
|
||||
|
|
||||
|
|
||||
#include <chrono> |
|
||||
|
|
||||
template<typename ValueType> |
|
||||
class DFTAnalyser { |
|
||||
|
|
||||
std::chrono::duration<double> buildingTime = std::chrono::duration<double>::zero(); |
|
||||
std::chrono::duration<double> explorationTime = std::chrono::duration<double>::zero(); |
|
||||
std::chrono::duration<double> bisimulationTime = std::chrono::duration<double>::zero(); |
|
||||
std::chrono::duration<double> modelCheckingTime = std::chrono::duration<double>::zero(); |
|
||||
std::chrono::duration<double> totalTime = std::chrono::duration<double>::zero(); |
|
||||
ValueType checkResult = storm::utility::zero<ValueType>(); |
|
||||
public: |
|
||||
void check(storm::storage::DFT<ValueType> const& origDft , std::shared_ptr<const storm::logic::Formula> const& formula, bool symred = true, bool allowModularisation = true, bool enableDC = true) { |
|
||||
|
|
||||
// Building DFT |
|
||||
std::chrono::high_resolution_clock::time_point totalStart = std::chrono::high_resolution_clock::now(); |
|
||||
|
|
||||
|
|
||||
// Optimizing DFT |
|
||||
storm::storage::DFT<ValueType> dft = origDft.optimize(); |
|
||||
checkResult = checkHelper(dft, formula, symred, allowModularisation, enableDC); |
|
||||
totalTime = std::chrono::high_resolution_clock::now() - totalStart; |
|
||||
|
|
||||
|
|
||||
|
|
||||
} |
|
||||
|
|
||||
private: |
|
||||
ValueType checkHelper(storm::storage::DFT<ValueType> const& dft , std::shared_ptr<const storm::logic::Formula> const& formula, bool symred = true, bool allowModularisation = true, bool enableDC = true) { |
|
||||
STORM_LOG_TRACE("Check helper called"); |
|
||||
bool invResults = false; |
|
||||
std::vector<storm::storage::DFT<ValueType>> dfts = {dft}; |
|
||||
std::vector<ValueType> res; |
|
||||
size_t nrK = 0; // K out of M |
|
||||
size_t nrM = 0; // K out of M |
|
||||
|
|
||||
if(allowModularisation) { |
|
||||
if(dft.topLevelType() == storm::storage::DFTElementType::AND) { |
|
||||
STORM_LOG_TRACE("top modularisation called AND"); |
|
||||
dfts = dft.topModularisation(); |
|
||||
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
|
||||
nrK = dfts.size(); |
|
||||
nrM = dfts.size(); |
|
||||
} |
|
||||
if(dft.topLevelType() == storm::storage::DFTElementType::OR) { |
|
||||
STORM_LOG_TRACE("top modularisation called OR"); |
|
||||
dfts = dft.topModularisation(); |
|
||||
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
|
||||
nrK = 0; |
|
||||
nrM = dfts.size(); |
|
||||
invResults = true; |
|
||||
} |
|
||||
if(dft.topLevelType() == storm::storage::DFTElementType::VOT) { |
|
||||
STORM_LOG_TRACE("top modularisation called VOT"); |
|
||||
dfts = dft.topModularisation(); |
|
||||
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
|
||||
nrK = std::static_pointer_cast<storm::storage::DFTVot<ValueType> const>(dft.getTopLevelGate())->threshold(); |
|
||||
nrM = dfts.size(); |
|
||||
if(nrK <= nrM/2) { |
|
||||
nrK -= 1; |
|
||||
invResults = true; |
|
||||
} |
|
||||
} |
|
||||
if(dfts.size() > 1) { |
|
||||
STORM_LOG_TRACE("Recursive CHECK Call"); |
|
||||
for(auto const ft : dfts) { |
|
||||
res.push_back(checkHelper(ft, formula, symred, allowModularisation)); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
if(res.empty()) { |
|
||||
// Model based modularisation. |
|
||||
auto const& models = buildMarkovModels(dfts, formula, symred, enableDC); |
|
||||
|
|
||||
|
|
||||
for (auto const& model : models) { |
|
||||
// Model checking |
|
||||
STORM_LOG_INFO("Model checking..."); |
|
||||
std::chrono::high_resolution_clock::time_point modelCheckingStart = std::chrono::high_resolution_clock::now(); |
|
||||
std::unique_ptr<storm::modelchecker::CheckResult> result(storm::verifySparseModel(model, formula)); |
|
||||
STORM_LOG_INFO("Model checking done."); |
|
||||
STORM_LOG_ASSERT(result, "Result does not exist."); |
|
||||
result->filter(storm::modelchecker::ExplicitQualitativeCheckResult(model->getInitialStates())); |
|
||||
modelCheckingTime += std::chrono::high_resolution_clock::now() - modelCheckingStart; |
|
||||
res.push_back(result->asExplicitQuantitativeCheckResult<ValueType>().getValueMap().begin()->second); |
|
||||
} |
|
||||
} |
|
||||
if(nrM <= 1) { |
|
||||
// No modularisation done. |
|
||||
STORM_LOG_ASSERT(res.size()==1, "Result size not 1."); |
|
||||
return res[0]; |
|
||||
} |
|
||||
|
|
||||
STORM_LOG_TRACE("Combining all results... K=" << nrK << "; M=" << nrM << "; invResults=" << (invResults?"On":"Off")); |
|
||||
ValueType result = storm::utility::zero<ValueType>(); |
|
||||
int limK = invResults ? -1 : nrM+1; |
|
||||
int chK = invResults ? -1 : 1; |
|
||||
for(int cK = nrK; cK != limK; cK += chK ) { |
|
||||
STORM_LOG_ASSERT(cK >= 0, "ck negative."); |
|
||||
size_t permutation = smallestIntWithNBitsSet(static_cast<size_t>(cK)); |
|
||||
do { |
|
||||
STORM_LOG_TRACE("Permutation="<<permutation); |
|
||||
ValueType permResult = storm::utility::one<ValueType>(); |
|
||||
for(size_t i = 0; i < res.size(); ++i) { |
|
||||
if(permutation & (1 << i)) { |
|
||||
permResult *= res[i]; |
|
||||
} else { |
|
||||
permResult *= storm::utility::one<ValueType>() - res[i]; |
|
||||
} |
|
||||
} |
|
||||
STORM_LOG_TRACE("Result for permutation:"<<permResult); |
|
||||
permutation = nextBitPermutation(permutation); |
|
||||
result += permResult; |
|
||||
} while(permutation < (1 << nrM) && permutation != 0); |
|
||||
} |
|
||||
if(invResults) { |
|
||||
return storm::utility::one<ValueType>() - result; |
|
||||
} |
|
||||
return result; |
|
||||
} |
|
||||
|
|
||||
|
|
||||
std::vector<std::shared_ptr<storm::models::sparse::Model<ValueType>>> buildMarkovModels(std::vector<storm::storage::DFT<ValueType>> const& dfts, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool enableDC) { |
|
||||
std::vector<std::shared_ptr<storm::models::sparse::Model<ValueType>>> models; |
|
||||
for(auto& dft : dfts) { |
|
||||
std::chrono::high_resolution_clock::time_point buildingStart = std::chrono::high_resolution_clock::now(); |
|
||||
|
|
||||
std::map<size_t, std::vector<std::vector<size_t>>> emptySymmetry; |
|
||||
storm::storage::DFTIndependentSymmetries symmetries(emptySymmetry); |
|
||||
if(symred) { |
|
||||
auto colouring = dft.colourDFT(); |
|
||||
symmetries = dft.findSymmetries(colouring); |
|
||||
STORM_LOG_INFO("Found " << symmetries.groups.size() << " symmetries."); |
|
||||
STORM_LOG_TRACE("Symmetries: " << std::endl << symmetries); |
|
||||
} |
|
||||
std::chrono::high_resolution_clock::time_point buildingEnd = std::chrono::high_resolution_clock::now(); |
|
||||
buildingTime += buildingEnd - buildingStart; |
|
||||
|
|
||||
// Building Markov Automaton |
|
||||
STORM_LOG_INFO("Building Model..."); |
|
||||
storm::builder::ExplicitDFTModelBuilder<ValueType> builder(dft, symmetries, enableDC); |
|
||||
typename storm::builder::ExplicitDFTModelBuilder<ValueType>::LabelOptions labeloptions; // TODO initialize this with the formula |
|
||||
std::shared_ptr<storm::models::sparse::Model<ValueType>> model = builder.buildModel(labeloptions); |
|
||||
//model->printModelInformationToStream(std::cout); |
|
||||
STORM_LOG_INFO("No. states (Explored): " << model->getNumberOfStates()); |
|
||||
STORM_LOG_INFO("No. transitions (Explored): " << model->getNumberOfTransitions()); |
|
||||
std::chrono::high_resolution_clock::time_point explorationEnd = std::chrono::high_resolution_clock::now(); |
|
||||
explorationTime += explorationEnd -buildingEnd; |
|
||||
|
|
||||
// Bisimulation |
|
||||
if (model->isOfType(storm::models::ModelType::Ctmc) && storm::settings::getModule<storm::settings::modules::GeneralSettings>().isBisimulationSet()) { |
|
||||
STORM_LOG_INFO("Bisimulation..."); |
|
||||
model = storm::performDeterministicSparseBisimulationMinimization<storm::models::sparse::Ctmc<ValueType>>(model->template as<storm::models::sparse::Ctmc<ValueType>>(), {formula}, storm::storage::BisimulationType::Weak)->template as<storm::models::sparse::Ctmc<ValueType>>(); |
|
||||
//model->printModelInformationToStream(std::cout); |
|
||||
} |
|
||||
STORM_LOG_INFO("No. states (Bisimulation): " << model->getNumberOfStates()); |
|
||||
STORM_LOG_INFO("No. transitions (Bisimulation): " << model->getNumberOfTransitions()); |
|
||||
bisimulationTime += std::chrono::high_resolution_clock::now() - explorationEnd; |
|
||||
models.push_back(model); |
|
||||
} |
|
||||
return models; |
|
||||
} |
|
||||
|
|
||||
public: |
|
||||
void printTimings(std::ostream& os = std::cout) { |
|
||||
os << "Times:" << std::endl; |
|
||||
os << "Building:\t" << buildingTime.count() << std::endl; |
|
||||
os << "Exploration:\t" << explorationTime.count() << std::endl; |
|
||||
os << "Bisimulation:\t" << bisimulationTime.count() << std::endl; |
|
||||
os << "Modelchecking:\t" << modelCheckingTime.count() << std::endl; |
|
||||
os << "Total:\t\t" << totalTime.count() << std::endl; |
|
||||
} |
|
||||
|
|
||||
void printResult(std::ostream& os = std::cout) { |
|
||||
|
|
||||
os << "Result: ["; |
|
||||
os << checkResult << "]" << std::endl; |
|
||||
} |
|
||||
|
|
||||
}; |
|
@ -0,0 +1,413 @@ |
|||||
|
#include "DFTASFChecker.h"
|
||||
|
#include <string>
|
||||
|
|
||||
|
namespace storm { |
||||
|
|
||||
|
namespace modelchecker { |
||||
|
|
||||
|
/*
|
||||
|
* Variable[VarIndex] is the maximum of the others |
||||
|
*/ |
||||
|
class IsMaximum : public DFTConstraint { |
||||
|
public: |
||||
|
IsMaximum(uint64_t varIndex, std::vector<uint64_t> const& varIndices) : varIndex(varIndex), varIndices(varIndices) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
virtual ~IsMaximum() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(and "; |
||||
|
// assert it is largereq than all values.
|
||||
|
for (auto const& ovi : varIndices) { |
||||
|
sstr << "(>= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") "; |
||||
|
} |
||||
|
// assert it is one of the values.
|
||||
|
sstr << "(or "; |
||||
|
for (auto const& ovi : varIndices) { |
||||
|
sstr << "(= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") "; |
||||
|
} |
||||
|
sstr << ")"; // end of the or
|
||||
|
sstr << ")"; // end outer and.
|
||||
|
return sstr.str(); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
private: |
||||
|
uint64_t varIndex; |
||||
|
std::vector<uint64_t> varIndices; |
||||
|
}; |
||||
|
/*
|
||||
|
* First is the minimum of the others |
||||
|
*/ |
||||
|
class IsMinimum : public DFTConstraint { |
||||
|
public: |
||||
|
IsMinimum(uint64_t varIndex, std::vector<uint64_t> const& varIndices) : varIndex(varIndex), varIndices(varIndices) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
virtual ~IsMinimum() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(and "; |
||||
|
// assert it is smallereq than all values.
|
||||
|
for (auto const& ovi : varIndices) { |
||||
|
sstr << "(<= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") "; |
||||
|
} |
||||
|
// assert it is one of the values.
|
||||
|
sstr << "(or "; |
||||
|
for (auto const& ovi : varIndices) { |
||||
|
sstr << "(= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") "; |
||||
|
} |
||||
|
sstr << ")"; // end of the or
|
||||
|
sstr << ")"; // end outer and.
|
||||
|
return sstr.str(); |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
private: |
||||
|
uint64_t varIndex; |
||||
|
std::vector<uint64_t> varIndices; |
||||
|
}; |
||||
|
|
||||
|
class BetweenValues : public DFTConstraint { |
||||
|
public: |
||||
|
BetweenValues(uint64_t varIndex, uint64_t lower, uint64_t upper) : varIndex(varIndex), upperBound(upper) , lowerBound(lower) { |
||||
|
|
||||
|
} |
||||
|
virtual ~BetweenValues() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(and "; |
||||
|
sstr << "(>= " << varNames.at(varIndex) << " " << lowerBound << ")"; |
||||
|
sstr << "(<= " << varNames.at(varIndex) << " " << upperBound << ")"; |
||||
|
sstr << ")"; |
||||
|
return sstr.str(); |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
uint64_t varIndex; |
||||
|
uint64_t upperBound; |
||||
|
uint64_t lowerBound; |
||||
|
}; |
||||
|
|
||||
|
class And : public DFTConstraint { |
||||
|
public: |
||||
|
And(std::vector<std::shared_ptr<DFTConstraint>> const& constraints) : constraints(constraints) {} |
||||
|
|
||||
|
virtual ~And() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(and"; |
||||
|
for(auto const& c : constraints) { |
||||
|
sstr << " " << c->toSmtlib2(varNames); |
||||
|
} |
||||
|
sstr << ")"; |
||||
|
return sstr.str(); |
||||
|
} |
||||
|
private: |
||||
|
std::vector<std::shared_ptr<DFTConstraint>> constraints; |
||||
|
|
||||
|
}; |
||||
|
|
||||
|
class Iff : public DFTConstraint { |
||||
|
public: |
||||
|
Iff(std::shared_ptr<DFTConstraint> l, std::shared_ptr<DFTConstraint> r) : lhs(l), rhs(r) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(= " << lhs->toSmtlib2(varNames) << " " << rhs->toSmtlib2(varNames) << ")"; |
||||
|
return sstr.str(); |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
std::shared_ptr<DFTConstraint> lhs; |
||||
|
std::shared_ptr<DFTConstraint> rhs; |
||||
|
}; |
||||
|
|
||||
|
class IsConstantValue : public DFTConstraint { |
||||
|
public: |
||||
|
IsConstantValue(uint64_t varIndex, uint64_t val) : varIndex(varIndex), value(val) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
virtual ~IsConstantValue() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
assert(varIndex < varNames.size()); |
||||
|
sstr << "(= " << varNames.at(varIndex) << " " << value << ")"; |
||||
|
return sstr.str(); |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
uint64_t varIndex; |
||||
|
uint64_t value; |
||||
|
}; |
||||
|
|
||||
|
class IsEqual : public DFTConstraint { |
||||
|
public: |
||||
|
IsEqual(uint64_t varIndex1, uint64_t varIndex2) :var1Index(varIndex1), var2Index(varIndex2) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
virtual ~IsEqual() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
return "(= " + varNames.at(var1Index) + " " + varNames.at(var2Index) + " )"; |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
uint64_t var1Index; |
||||
|
uint64_t var2Index; |
||||
|
}; |
||||
|
|
||||
|
class IsLEqual : public DFTConstraint { |
||||
|
public: |
||||
|
IsLEqual(uint64_t varIndex1, uint64_t varIndex2) :var1Index(varIndex1), var2Index(varIndex2) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
virtual ~IsLEqual() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
return "(<= " + varNames.at(var1Index) + " " + varNames.at(var2Index) + " )"; |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
uint64_t var1Index; |
||||
|
uint64_t var2Index; |
||||
|
}; |
||||
|
|
||||
|
class PairwiseDifferent : public DFTConstraint { |
||||
|
public: |
||||
|
PairwiseDifferent(std::vector<uint64_t> const& indices) : varIndices(indices) { |
||||
|
|
||||
|
} |
||||
|
virtual ~PairwiseDifferent() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(distinct"; |
||||
|
// for(uint64_t i = 0; i < varIndices.size(); ++i) {
|
||||
|
// for(uint64_t j = i + 1; j < varIndices.size(); ++j) {
|
||||
|
// sstr << "()";
|
||||
|
// }
|
||||
|
// }
|
||||
|
for (auto const& varIndex : varIndices) { |
||||
|
sstr << " " << varNames.at(varIndex); |
||||
|
} |
||||
|
sstr << ")"; |
||||
|
return sstr.str(); |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
std::vector<uint64_t> varIndices; |
||||
|
}; |
||||
|
|
||||
|
class Sorted : public DFTConstraint { |
||||
|
public: |
||||
|
Sorted(std::vector<uint64_t> varIndices) : varIndices(varIndices) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
virtual ~Sorted() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(and "; |
||||
|
for(uint64_t i = 1; i < varIndices.size(); ++i) { |
||||
|
sstr << "(<= " << varNames.at(varIndices.at(i-1)) << " " << varNames.at(varIndices.at(i)) << ")"; |
||||
|
} |
||||
|
sstr << ") "; |
||||
|
return sstr.str(); |
||||
|
} |
||||
|
|
||||
|
|
||||
|
private: |
||||
|
std::vector<uint64_t> varIndices; |
||||
|
}; |
||||
|
|
||||
|
class IfThenElse : public DFTConstraint { |
||||
|
public: |
||||
|
IfThenElse(std::shared_ptr<DFTConstraint> ifC, std::shared_ptr<DFTConstraint> thenC, std::shared_ptr<DFTConstraint> elseC) : ifConstraint(ifC), thenConstraint(thenC), elseConstraint(elseC) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
std::string toSmtlib2(std::vector<std::string> const& varNames) const override { |
||||
|
std::stringstream sstr; |
||||
|
sstr << "(ite " << ifConstraint->toSmtlib2(varNames) << " " << thenConstraint->toSmtlib2(varNames) << " " << elseConstraint->toSmtlib2(varNames) << ")"; |
||||
|
return sstr.str(); |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
std::shared_ptr<DFTConstraint> ifConstraint; |
||||
|
std::shared_ptr<DFTConstraint> thenConstraint; |
||||
|
std::shared_ptr<DFTConstraint> elseConstraint; |
||||
|
}; |
||||
|
|
||||
|
|
||||
|
DFTASFChecker::DFTASFChecker(storm::storage::DFT<double> const& dft) : dft(dft) { |
||||
|
// Intentionally left empty.
|
||||
|
} |
||||
|
|
||||
|
uint64_t DFTASFChecker::getClaimVariableIndex(uint64_t spare, uint64_t child) const { |
||||
|
return claimVariables.at(SpareAndChildPair(spare, child)); |
||||
|
} |
||||
|
|
||||
|
void DFTASFChecker::convert() { |
||||
|
|
||||
|
std::vector<uint64_t> beVariables; |
||||
|
// Convert all elements
|
||||
|
for (size_t i = 0; i < dft.nrElements(); ++i) { |
||||
|
std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(i); |
||||
|
varNames.push_back("t_" + element->name()); |
||||
|
timePointVariables.emplace(i, varNames.size() - 1); |
||||
|
switch (element->type()) { |
||||
|
case storm::storage::DFTElementType::BE: |
||||
|
beVariables.push_back(varNames.size() - 1); |
||||
|
break; |
||||
|
case storm::storage::DFTElementType::SPARE: |
||||
|
{ |
||||
|
auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element); |
||||
|
for( auto const& spareChild : spare->children()) { |
||||
|
varNames.push_back("c_" + element->name() + "_" + spareChild->name()); |
||||
|
claimVariables.emplace(SpareAndChildPair(element->id(), spareChild->id()), varNames.size() - 1); |
||||
|
} |
||||
|
break; |
||||
|
} |
||||
|
default: |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// BE
|
||||
|
constraints.push_back(std::make_shared<PairwiseDifferent>(beVariables)); |
||||
|
constraints.back()->setDescription("No two BEs fail at the same time"); |
||||
|
for( auto const& beV : beVariables) { |
||||
|
constraints.push_back(std::make_shared<BetweenValues>(beV, 1, dft.nrBasicElements())); |
||||
|
} |
||||
|
|
||||
|
// Claim variables
|
||||
|
for (auto const& csvV : claimVariables) { |
||||
|
constraints.push_back(std::make_shared<BetweenValues>(csvV.second, 0, dft.nrBasicElements() + 1)); |
||||
|
} |
||||
|
|
||||
|
for (size_t i = 0; i < dft.nrElements(); ++i) { |
||||
|
std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(i); |
||||
|
if(element->isSpareGate()) { |
||||
|
auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element); |
||||
|
auto const& spareChild = spare->children().front(); |
||||
|
constraints.push_back(std::make_shared<IsConstantValue>(getClaimVariableIndex(spare->id(), spareChild->id()), 0)); |
||||
|
constraints.back()->setDescription("Spare " + spare->name() + " claims first child"); |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
for (size_t i = 0; i < dft.nrElements(); ++i) { |
||||
|
std::vector<uint64_t> childVarIndices; |
||||
|
if (dft.isGate(i)) { |
||||
|
std::shared_ptr<storm::storage::DFTGate<ValueType> const> gate = dft.getGate(i); |
||||
|
for (auto const& child : gate->children()) { |
||||
|
childVarIndices.push_back(timePointVariables.at(child->id())); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(i); |
||||
|
switch (element->type()) { |
||||
|
case storm::storage::DFTElementType::AND: |
||||
|
constraints.push_back(std::make_shared<IsMaximum>(timePointVariables.at(i), childVarIndices)); |
||||
|
constraints.back()->setDescription("And gate"); |
||||
|
break; |
||||
|
case storm::storage::DFTElementType::OR: |
||||
|
constraints.push_back(std::make_shared<IsMinimum>(timePointVariables.at(i), childVarIndices)); |
||||
|
constraints.back()->setDescription("Or gate"); |
||||
|
break; |
||||
|
case storm::storage::DFTElementType::PAND: |
||||
|
constraints.push_back(std::make_shared<IfThenElse>(std::make_shared<Sorted>(childVarIndices), std::make_shared<IsEqual>(timePointVariables.at(i), timePointVariables.at(childVarIndices.back())), std::make_shared<IsConstantValue>(timePointVariables.at(i), dft.nrBasicElements()+1))); |
||||
|
constraints.back()->setDescription("Pand gate"); |
||||
|
case storm::storage::DFTElementType::SPARE: |
||||
|
{ |
||||
|
auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element); |
||||
|
auto const& children = spare->children(); |
||||
|
|
||||
|
constraints.push_back(std::make_shared<Iff>(std::make_shared<IsLEqual>(getClaimVariableIndex(spare->id(), children.back()->id()), childVarIndices.back()), std::make_shared<IsEqual>(timePointVariables.at(i), childVarIndices.back()))); |
||||
|
constraints.back()->setDescription("Last child & claimed -> spare fails"); |
||||
|
std::vector<std::shared_ptr<DFTConstraint>> ifcs; |
||||
|
uint64_t xv = childVarIndices.at(childVarIndices.size()-2); // if v is the child, xv is the time x fails.
|
||||
|
uint64_t csn = getClaimVariableIndex(spare->id(), children.back()->id()); // csn is the moment the spare claims the next child
|
||||
|
uint64_t xn = childVarIndices.back(); // xn is the moment the next child fails
|
||||
|
ifcs.push_back(std::make_shared<IsLEqual>(xv, xn)); |
||||
|
for(auto const& otherSpare : children.back()->parents()) { |
||||
|
if(otherSpare->id() == i) { continue; }// not a OTHER spare.
|
||||
|
ifcs.push_back(std::make_shared<IsConstantValue>(getClaimVariableIndex(otherSpare->id(), children.back()->id()), dft.nrBasicElements()+1)); |
||||
|
} |
||||
|
std::shared_ptr<DFTConstraint> ite = std::make_shared<IfThenElse>(std::make_shared<And>(ifcs), std::make_shared<IsEqual>(csn, xv), std::make_shared<IsEqual>(timePointVariables.at(i), xv)); |
||||
|
constraints.push_back(std::make_shared<Iff>(std::make_shared<IsLEqual>(getClaimVariableIndex(spare->id(), children.at(children.size()-2)->id()), xv), ite)); |
||||
|
constraints.back()->setDescription("1 but last child & claimed"); |
||||
|
|
||||
|
for( uint64_t j = 0; j < children.size() - 2; ++j) { |
||||
|
uint64_t currChild = children.size() - 1 - j; |
||||
|
|
||||
|
} |
||||
|
break; |
||||
|
} |
||||
|
default: |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
constraints.push_back(std::make_shared<IsConstantValue>(dft.getTopLevelIndex(), dft.nrBasicElements()+1)); |
||||
|
} |
||||
|
|
||||
|
void DFTASFChecker::toFile(std::string const& filename) { |
||||
|
std::ofstream ofs; |
||||
|
std::cout << "Writing to " << filename << std::endl; |
||||
|
ofs.open(filename); |
||||
|
ofs << "; time point variables" << std::endl; |
||||
|
for (auto const& timeVarEntry : timePointVariables) { |
||||
|
ofs << "(declare-fun " << varNames[timeVarEntry.second] << "() Int)" << std::endl; |
||||
|
} |
||||
|
ofs << "; claim variables" << std::endl; |
||||
|
for (auto const& claimVarEntry : claimVariables) { |
||||
|
ofs << "(declare-fun " << varNames[claimVarEntry.second] << "() Int)" << std::endl; |
||||
|
} |
||||
|
for (auto const& constraint : constraints) { |
||||
|
ofs << "; " << constraint->description() << std::endl; |
||||
|
ofs << "(assert " << constraint->toSmtlib2(varNames) << ")" << std::endl; |
||||
|
} |
||||
|
ofs << "(check-sat)" << std::endl; |
||||
|
ofs.close(); |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
} |
@ -0,0 +1,61 @@ |
|||||
|
#pragma once |
||||
|
|
||||
|
#include <string> |
||||
|
#include <vector> |
||||
|
#include <unordered_map> |
||||
|
#include "src/storage/dft/DFT.h" |
||||
|
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace modelchecker { |
||||
|
class DFTConstraint { |
||||
|
public: |
||||
|
virtual ~DFTConstraint() { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
virtual std::string toSmtlib2(std::vector<std::string> const& varNames) const = 0; |
||||
|
virtual std::string description() const { return descript; } |
||||
|
|
||||
|
void setDescription(std::string const& descr) { |
||||
|
descript = descr; |
||||
|
} |
||||
|
|
||||
|
private: |
||||
|
std::string descript; |
||||
|
}; |
||||
|
|
||||
|
class SpareAndChildPair { |
||||
|
|
||||
|
public: |
||||
|
SpareAndChildPair(uint64_t spareIndex, uint64_t childIndex) : spareIndex(spareIndex), childIndex(childIndex) { |
||||
|
|
||||
|
} |
||||
|
|
||||
|
uint64_t spareIndex; |
||||
|
uint64_t childIndex; |
||||
|
}; |
||||
|
|
||||
|
bool operator<(SpareAndChildPair const& p1, SpareAndChildPair const& p2) { |
||||
|
return p1.spareIndex < p2.spareIndex || (p1.spareIndex == p2.spareIndex && p1.childIndex < p2.childIndex); |
||||
|
} |
||||
|
|
||||
|
class DFTASFChecker { |
||||
|
using ValueType = double; |
||||
|
|
||||
|
public: |
||||
|
DFTASFChecker(storm::storage::DFT<ValueType> const&); |
||||
|
void convert(); |
||||
|
void toFile(std::string const&); |
||||
|
|
||||
|
private: |
||||
|
uint64_t getClaimVariableIndex(uint64_t spareIndex, uint64_t childIndex) const; |
||||
|
|
||||
|
storm::storage::DFT<ValueType> const& dft; |
||||
|
std::vector<std::string> varNames; |
||||
|
std::unordered_map<uint64_t, uint64_t> timePointVariables; |
||||
|
std::vector<std::shared_ptr<DFTConstraint>> constraints; |
||||
|
std::map<SpareAndChildPair, uint64_t> claimVariables; |
||||
|
}; |
||||
|
} |
||||
|
} |
@ -0,0 +1,441 @@ |
|||||
|
#include "src/modelchecker/dft/DFTModelChecker.h"
|
||||
|
|
||||
|
#include "src/builder/ExplicitDFTModelBuilder.h"
|
||||
|
#include "src/builder/ExplicitDFTModelBuilderApprox.h"
|
||||
|
#include "src/builder/ParallelCompositionBuilder.h"
|
||||
|
#include "src/storage/dft/DFTIsomorphism.h"
|
||||
|
#include "src/settings/modules/DFTSettings.h"
|
||||
|
#include "src/utility/bitoperations.h"
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace modelchecker { |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
DFTModelChecker<ValueType>::DFTModelChecker() { |
||||
|
checkResult = storm::utility::zero<ValueType>(); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void DFTModelChecker<ValueType>::check(storm::storage::DFT<ValueType> const& origDft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool allowModularisation, bool enableDC, double approximationError) { |
||||
|
// Initialize
|
||||
|
this->explorationTime = std::chrono::duration<double>::zero(); |
||||
|
this->buildingTime = std::chrono::duration<double>::zero(); |
||||
|
this->bisimulationTime = std::chrono::duration<double>::zero(); |
||||
|
this->modelCheckingTime = std::chrono::duration<double>::zero(); |
||||
|
this->totalTime = std::chrono::duration<double>::zero(); |
||||
|
this->approximationError = approximationError; |
||||
|
totalStart = std::chrono::high_resolution_clock::now(); |
||||
|
|
||||
|
// Optimizing DFT
|
||||
|
storm::storage::DFT<ValueType> dft = origDft.optimize(); |
||||
|
|
||||
|
// TODO Matthias: check that all paths reach the target state for approximation
|
||||
|
|
||||
|
// Checking DFT
|
||||
|
if (formula->isProbabilityOperatorFormula() || !allowModularisation) { |
||||
|
checkResult = checkHelper(dft, formula, symred, allowModularisation, enableDC, approximationError); |
||||
|
} else { |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> model = buildModelComposition(dft, formula, symred, allowModularisation, enableDC); |
||||
|
// Model checking
|
||||
|
std::unique_ptr<storm::modelchecker::CheckResult> result = checkModel(model, formula); |
||||
|
result->filter(storm::modelchecker::ExplicitQualitativeCheckResult(model->getInitialStates())); |
||||
|
checkResult = result->asExplicitQuantitativeCheckResult<ValueType>().getValueMap().begin()->second; |
||||
|
|
||||
|
} |
||||
|
this->totalTime = std::chrono::high_resolution_clock::now() - totalStart; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
typename DFTModelChecker<ValueType>::dft_result DFTModelChecker<ValueType>::checkHelper(storm::storage::DFT<ValueType> const& dft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool allowModularisation, bool enableDC, double approximationError) { |
||||
|
STORM_LOG_TRACE("Check helper called"); |
||||
|
bool modularisationPossible = allowModularisation; |
||||
|
|
||||
|
// Try modularisation
|
||||
|
if(modularisationPossible) { |
||||
|
bool invResults = false; |
||||
|
std::vector<storm::storage::DFT<ValueType>> dfts; |
||||
|
size_t nrK = 0; // K out of M
|
||||
|
size_t nrM = 0; // K out of M
|
||||
|
|
||||
|
switch (dft.topLevelType()) { |
||||
|
case storm::storage::DFTElementType::AND: |
||||
|
STORM_LOG_TRACE("top modularisation called AND"); |
||||
|
dfts = dft.topModularisation(); |
||||
|
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
||||
|
nrK = dfts.size(); |
||||
|
nrM = dfts.size(); |
||||
|
modularisationPossible = dfts.size() > 1; |
||||
|
break; |
||||
|
case storm::storage::DFTElementType::OR: |
||||
|
STORM_LOG_TRACE("top modularisation called OR"); |
||||
|
dfts = dft.topModularisation(); |
||||
|
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
||||
|
nrK = 0; |
||||
|
nrM = dfts.size(); |
||||
|
invResults = true; |
||||
|
modularisationPossible = dfts.size() > 1; |
||||
|
break; |
||||
|
case storm::storage::DFTElementType::VOT: |
||||
|
STORM_LOG_TRACE("top modularisation called VOT"); |
||||
|
dfts = dft.topModularisation(); |
||||
|
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
||||
|
nrK = std::static_pointer_cast<storm::storage::DFTVot<ValueType> const>(dft.getTopLevelGate())->threshold(); |
||||
|
nrM = dfts.size(); |
||||
|
if(nrK <= nrM/2) { |
||||
|
nrK -= 1; |
||||
|
invResults = true; |
||||
|
} |
||||
|
modularisationPossible = dfts.size() > 1; |
||||
|
break; |
||||
|
default: |
||||
|
// No static gate -> no modularisation applicable
|
||||
|
modularisationPossible = false; |
||||
|
break; |
||||
|
} |
||||
|
|
||||
|
if(modularisationPossible) { |
||||
|
STORM_LOG_TRACE("Recursive CHECK Call"); |
||||
|
if (formula->isProbabilityOperatorFormula()) { |
||||
|
// Recursively call model checking
|
||||
|
std::vector<ValueType> res; |
||||
|
for(auto const ft : dfts) { |
||||
|
dft_result ftResult = checkHelper(ft, formula, symred, true, enableDC, 0.0); |
||||
|
res.push_back(boost::get<ValueType>(ftResult)); |
||||
|
} |
||||
|
|
||||
|
// Combine modularisation results
|
||||
|
STORM_LOG_TRACE("Combining all results... K=" << nrK << "; M=" << nrM << "; invResults=" << (invResults?"On":"Off")); |
||||
|
ValueType result = storm::utility::zero<ValueType>(); |
||||
|
int limK = invResults ? -1 : nrM+1; |
||||
|
int chK = invResults ? -1 : 1; |
||||
|
// WARNING: there is a bug for computing permutations with more than 32 elements
|
||||
|
STORM_LOG_ASSERT(res.size() < 32, "Permutations work only for < 32 elements"); |
||||
|
for(int cK = nrK; cK != limK; cK += chK ) { |
||||
|
STORM_LOG_ASSERT(cK >= 0, "ck negative."); |
||||
|
size_t permutation = smallestIntWithNBitsSet(static_cast<size_t>(cK)); |
||||
|
do { |
||||
|
STORM_LOG_TRACE("Permutation="<<permutation); |
||||
|
ValueType permResult = storm::utility::one<ValueType>(); |
||||
|
for(size_t i = 0; i < res.size(); ++i) { |
||||
|
if(permutation & (1 << i)) { |
||||
|
permResult *= res[i]; |
||||
|
} else { |
||||
|
permResult *= storm::utility::one<ValueType>() - res[i]; |
||||
|
} |
||||
|
} |
||||
|
STORM_LOG_TRACE("Result for permutation:"<<permResult); |
||||
|
permutation = nextBitPermutation(permutation); |
||||
|
result += permResult; |
||||
|
} while(permutation < (1 << nrM) && permutation != 0); |
||||
|
} |
||||
|
if(invResults) { |
||||
|
result = storm::utility::one<ValueType>() - result; |
||||
|
} |
||||
|
return result; |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// If we are here, no modularisation was possible
|
||||
|
STORM_LOG_ASSERT(!modularisationPossible, "Modularisation should not be possible."); |
||||
|
return checkDFT(dft, formula, symred, enableDC, approximationError); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> DFTModelChecker<ValueType>::buildModelComposition(storm::storage::DFT<ValueType> const& dft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool allowModularisation, bool enableDC) { |
||||
|
STORM_LOG_TRACE("Build model via composition"); |
||||
|
// Use parallel composition for CTMCs for expected time
|
||||
|
STORM_LOG_ASSERT(formula->isTimeOperatorFormula(), "Formula is not a time operator formula"); |
||||
|
bool modularisationPossible = allowModularisation; |
||||
|
|
||||
|
// Try modularisation
|
||||
|
if(modularisationPossible) { |
||||
|
std::vector<storm::storage::DFT<ValueType>> dfts; |
||||
|
bool isAnd = true; |
||||
|
|
||||
|
switch (dft.topLevelType()) { |
||||
|
case storm::storage::DFTElementType::AND: |
||||
|
STORM_LOG_TRACE("top modularisation called AND"); |
||||
|
dfts = dft.topModularisation(); |
||||
|
STORM_LOG_TRACE("Modularisation into " << dfts.size() << " submodules."); |
||||
|
modularisationPossible = dfts.size() > 1; |
||||
|
isAnd = true; |
||||
|
break; |
||||
|
case storm::storage::DFTElementType::OR: |
||||
|
STORM_LOG_TRACE("top modularisation called OR"); |
||||
|
dfts = dft.topModularisation(); |
||||
|
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
||||
|
modularisationPossible = dfts.size() > 1; |
||||
|
isAnd = false; |
||||
|
break; |
||||
|
case storm::storage::DFTElementType::VOT: |
||||
|
/*STORM_LOG_TRACE("top modularisation called VOT");
|
||||
|
dfts = dft.topModularisation(); |
||||
|
STORM_LOG_TRACE("Modularsation into " << dfts.size() << " submodules."); |
||||
|
std::static_pointer_cast<storm::storage::DFTVot<ValueType> const>(dft.getTopLevelGate())->threshold(); |
||||
|
*/ |
||||
|
// TODO enable modularisation for voting gate
|
||||
|
modularisationPossible = false; |
||||
|
break; |
||||
|
default: |
||||
|
// No static gate -> no modularisation applicable
|
||||
|
modularisationPossible = false; |
||||
|
break; |
||||
|
} |
||||
|
|
||||
|
if(modularisationPossible) { |
||||
|
STORM_LOG_TRACE("Recursive CHECK Call"); |
||||
|
bool firstTime = true; |
||||
|
std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> composedModel; |
||||
|
for (auto const ft : dfts) { |
||||
|
STORM_LOG_INFO("Building Model via parallel composition..."); |
||||
|
std::chrono::high_resolution_clock::time_point checkingStart = std::chrono::high_resolution_clock::now(); |
||||
|
|
||||
|
// Find symmetries
|
||||
|
std::map<size_t, std::vector<std::vector<size_t>>> emptySymmetry; |
||||
|
storm::storage::DFTIndependentSymmetries symmetries(emptySymmetry); |
||||
|
if(symred) { |
||||
|
auto colouring = ft.colourDFT(); |
||||
|
symmetries = ft.findSymmetries(colouring); |
||||
|
STORM_LOG_INFO("Found " << symmetries.groups.size() << " symmetries."); |
||||
|
STORM_LOG_TRACE("Symmetries: " << std::endl << symmetries); |
||||
|
} |
||||
|
|
||||
|
// Build a single CTMC
|
||||
|
STORM_LOG_INFO("Building Model..."); |
||||
|
storm::builder::ExplicitDFTModelBuilderApprox<ValueType> builder(ft, symmetries, enableDC); |
||||
|
typename storm::builder::ExplicitDFTModelBuilderApprox<ValueType>::LabelOptions labeloptions; // TODO initialize this with the formula
|
||||
|
builder.buildModel(labeloptions, 0, 0.0); |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> model = builder.getModel(); |
||||
|
//model->printModelInformationToStream(std::cout);
|
||||
|
STORM_LOG_INFO("No. states (Explored): " << model->getNumberOfStates()); |
||||
|
STORM_LOG_INFO("No. transitions (Explored): " << model->getNumberOfTransitions()); |
||||
|
explorationTime += std::chrono::high_resolution_clock::now() - checkingStart; |
||||
|
|
||||
|
STORM_LOG_THROW(model->isOfType(storm::models::ModelType::Ctmc), storm::exceptions::NotSupportedException, "Parallel composition only applicable for CTMCs"); |
||||
|
std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> ctmc = model->template as<storm::models::sparse::Ctmc<ValueType>>(); |
||||
|
|
||||
|
ctmc = storm::performDeterministicSparseBisimulationMinimization<storm::models::sparse::Ctmc<ValueType>>(ctmc, {formula}, storm::storage::BisimulationType::Weak)->template as<storm::models::sparse::Ctmc<ValueType>>(); |
||||
|
|
||||
|
if (firstTime) { |
||||
|
composedModel = ctmc; |
||||
|
firstTime = false; |
||||
|
} else { |
||||
|
composedModel = storm::builder::ParallelCompositionBuilder<ValueType>::compose(composedModel, ctmc, isAnd); |
||||
|
} |
||||
|
|
||||
|
// Apply bisimulation
|
||||
|
std::chrono::high_resolution_clock::time_point bisimulationStart = std::chrono::high_resolution_clock::now(); |
||||
|
composedModel = storm::performDeterministicSparseBisimulationMinimization<storm::models::sparse::Ctmc<ValueType>>(composedModel, {formula}, storm::storage::BisimulationType::Weak)->template as<storm::models::sparse::Ctmc<ValueType>>(); |
||||
|
std::chrono::high_resolution_clock::time_point bisimulationEnd = std::chrono::high_resolution_clock::now(); |
||||
|
bisimulationTime += bisimulationEnd - bisimulationStart; |
||||
|
|
||||
|
STORM_LOG_INFO("No. states (Composed): " << composedModel->getNumberOfStates()); |
||||
|
STORM_LOG_INFO("No. transitions (Composed): " << composedModel->getNumberOfTransitions()); |
||||
|
if (composedModel->getNumberOfStates() <= 15) { |
||||
|
STORM_LOG_TRACE("Transition matrix: " << std::endl << composedModel->getTransitionMatrix()); |
||||
|
} else { |
||||
|
STORM_LOG_TRACE("Transition matrix: too big to print"); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
return composedModel; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// If we are here, no composition was possible
|
||||
|
STORM_LOG_ASSERT(!modularisationPossible, "Modularisation should not be possible."); |
||||
|
std::chrono::high_resolution_clock::time_point checkingStart = std::chrono::high_resolution_clock::now(); |
||||
|
// Find symmetries
|
||||
|
std::map<size_t, std::vector<std::vector<size_t>>> emptySymmetry; |
||||
|
storm::storage::DFTIndependentSymmetries symmetries(emptySymmetry); |
||||
|
if(symred) { |
||||
|
auto colouring = dft.colourDFT(); |
||||
|
symmetries = dft.findSymmetries(colouring); |
||||
|
STORM_LOG_INFO("Found " << symmetries.groups.size() << " symmetries."); |
||||
|
STORM_LOG_TRACE("Symmetries: " << std::endl << symmetries); |
||||
|
} |
||||
|
// Build a single CTMC
|
||||
|
STORM_LOG_INFO("Building Model..."); |
||||
|
|
||||
|
|
||||
|
storm::builder::ExplicitDFTModelBuilderApprox<ValueType> builder(dft, symmetries, enableDC); |
||||
|
typename storm::builder::ExplicitDFTModelBuilderApprox<ValueType>::LabelOptions labeloptions; // TODO initialize this with the formula
|
||||
|
builder.buildModel(labeloptions, 0, 0.0); |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> model = builder.getModel(); |
||||
|
//model->printModelInformationToStream(std::cout);
|
||||
|
STORM_LOG_INFO("No. states (Explored): " << model->getNumberOfStates()); |
||||
|
STORM_LOG_INFO("No. transitions (Explored): " << model->getNumberOfTransitions()); |
||||
|
explorationTime += std::chrono::high_resolution_clock::now() - checkingStart; |
||||
|
STORM_LOG_THROW(model->isOfType(storm::models::ModelType::Ctmc), storm::exceptions::NotSupportedException, "Parallel composition only applicable for CTMCs"); |
||||
|
|
||||
|
return model->template as<storm::models::sparse::Ctmc<ValueType>>(); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
typename DFTModelChecker<ValueType>::dft_result DFTModelChecker<ValueType>::checkDFT(storm::storage::DFT<ValueType> const& dft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool enableDC, double approximationError) { |
||||
|
std::chrono::high_resolution_clock::time_point checkingStart = std::chrono::high_resolution_clock::now(); |
||||
|
|
||||
|
// Find symmetries
|
||||
|
std::map<size_t, std::vector<std::vector<size_t>>> emptySymmetry; |
||||
|
storm::storage::DFTIndependentSymmetries symmetries(emptySymmetry); |
||||
|
if(symred) { |
||||
|
auto colouring = dft.colourDFT(); |
||||
|
symmetries = dft.findSymmetries(colouring); |
||||
|
STORM_LOG_INFO("Found " << symmetries.groups.size() << " symmetries."); |
||||
|
STORM_LOG_TRACE("Symmetries: " << std::endl << symmetries); |
||||
|
} |
||||
|
|
||||
|
if (approximationError > 0.0) { |
||||
|
// Comparator for checking the error of the approximation
|
||||
|
storm::utility::ConstantsComparator<ValueType> comparator; |
||||
|
// Build approximate Markov Automata for lower and upper bound
|
||||
|
approximation_result approxResult = std::make_pair(storm::utility::zero<ValueType>(), storm::utility::zero<ValueType>()); |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> model; |
||||
|
storm::builder::ExplicitDFTModelBuilderApprox<ValueType> builder(dft, symmetries, enableDC); |
||||
|
typename storm::builder::ExplicitDFTModelBuilderApprox<ValueType>::LabelOptions labeloptions; // TODO initialize this with the formula
|
||||
|
|
||||
|
bool probabilityFormula = formula->isProbabilityOperatorFormula(); |
||||
|
STORM_LOG_ASSERT((formula->isTimeOperatorFormula() && !probabilityFormula) || (!formula->isTimeOperatorFormula() && probabilityFormula), "Probability formula not initialized correctly"); |
||||
|
size_t iteration = 0; |
||||
|
do { |
||||
|
// Iteratively build finer models
|
||||
|
std::chrono::high_resolution_clock::time_point explorationStart = std::chrono::high_resolution_clock::now(); |
||||
|
STORM_LOG_INFO("Building model..."); |
||||
|
// TODO Matthias refine model using existing model and MC results
|
||||
|
builder.buildModel(labeloptions, iteration, approximationError); |
||||
|
std::chrono::high_resolution_clock::time_point explorationEnd = std::chrono::high_resolution_clock::now(); |
||||
|
explorationTime += explorationEnd - explorationStart; |
||||
|
|
||||
|
// TODO Matthias: possible to do bisimulation on approximated model and not on concrete one?
|
||||
|
|
||||
|
// Build model for lower bound
|
||||
|
STORM_LOG_INFO("Getting model for lower bound..."); |
||||
|
model = builder.getModelApproximation(probabilityFormula ? false : true); |
||||
|
// We only output the info from the lower bound as the info for the upper bound is the same
|
||||
|
STORM_LOG_INFO("No. states: " << model->getNumberOfStates()); |
||||
|
STORM_LOG_INFO("No. transitions: " << model->getNumberOfTransitions()); |
||||
|
buildingTime += std::chrono::high_resolution_clock::now() - explorationEnd; |
||||
|
|
||||
|
// Check lower bound
|
||||
|
std::unique_ptr<storm::modelchecker::CheckResult> result = checkModel(model, formula); |
||||
|
result->filter(storm::modelchecker::ExplicitQualitativeCheckResult(model->getInitialStates())); |
||||
|
ValueType newResult = result->asExplicitQuantitativeCheckResult<ValueType>().getValueMap().begin()->second; |
||||
|
STORM_LOG_ASSERT(iteration == 0 || !comparator.isLess(newResult, approxResult.first), "New under-approximation " << newResult << " is smaller than old result " << approxResult.first); |
||||
|
approxResult.first = newResult; |
||||
|
|
||||
|
// Build model for upper bound
|
||||
|
STORM_LOG_INFO("Getting model for upper bound..."); |
||||
|
explorationEnd = std::chrono::high_resolution_clock::now(); |
||||
|
model = builder.getModelApproximation(probabilityFormula ? true : false); |
||||
|
buildingTime += std::chrono::high_resolution_clock::now() - explorationEnd; |
||||
|
// Check upper bound
|
||||
|
result = checkModel(model, formula); |
||||
|
result->filter(storm::modelchecker::ExplicitQualitativeCheckResult(model->getInitialStates())); |
||||
|
newResult = result->asExplicitQuantitativeCheckResult<ValueType>().getValueMap().begin()->second; |
||||
|
STORM_LOG_ASSERT(iteration == 0 || !comparator.isLess(approxResult.second, newResult), "New over-approximation " << newResult << " is greater than old result " << approxResult.second); |
||||
|
approxResult.second = newResult; |
||||
|
|
||||
|
++iteration; |
||||
|
STORM_LOG_INFO("Result after iteration " << iteration << ": (" << std::setprecision(10) << approxResult.first << ", " << approxResult.second << ")"); |
||||
|
totalTime = std::chrono::high_resolution_clock::now() - totalStart; |
||||
|
printTimings(); |
||||
|
STORM_LOG_THROW(!storm::utility::isInfinity<ValueType>(approxResult.first) && !storm::utility::isInfinity<ValueType>(approxResult.second), storm::exceptions::NotSupportedException, "Approximation does not work if result might be infinity."); |
||||
|
} while (!isApproximationSufficient(approxResult.first, approxResult.second, approximationError, probabilityFormula)); |
||||
|
|
||||
|
STORM_LOG_INFO("Finished approximation after " << iteration << " iteration" << (iteration > 1 ? "s." : ".")); |
||||
|
return approxResult; |
||||
|
} else { |
||||
|
// Build a single Markov Automaton
|
||||
|
STORM_LOG_INFO("Building Model..."); |
||||
|
std::shared_ptr<storm::models::sparse::Model<ValueType>> model; |
||||
|
// TODO Matthias: use only one builder if everything works again
|
||||
|
if (storm::settings::getModule<storm::settings::modules::DFTSettings>().isApproximationErrorSet()) { |
||||
|
storm::builder::ExplicitDFTModelBuilderApprox<ValueType> builder(dft, symmetries, enableDC); |
||||
|
typename storm::builder::ExplicitDFTModelBuilderApprox<ValueType>::LabelOptions labeloptions; // TODO initialize this with the formula
|
||||
|
builder.buildModel(labeloptions, 0, 0.0); |
||||
|
model = builder.getModel(); |
||||
|
} else { |
||||
|
storm::builder::ExplicitDFTModelBuilder<ValueType> builder(dft, symmetries, enableDC); |
||||
|
typename storm::builder::ExplicitDFTModelBuilder<ValueType>::LabelOptions labeloptions; // TODO initialize this with the formula
|
||||
|
model = builder.buildModel(labeloptions); |
||||
|
} |
||||
|
//model->printModelInformationToStream(std::cout);
|
||||
|
STORM_LOG_INFO("No. states (Explored): " << model->getNumberOfStates()); |
||||
|
STORM_LOG_INFO("No. transitions (Explored): " << model->getNumberOfTransitions()); |
||||
|
explorationTime += std::chrono::high_resolution_clock::now() - checkingStart; |
||||
|
|
||||
|
// Model checking
|
||||
|
std::unique_ptr<storm::modelchecker::CheckResult> result = checkModel(model, formula); |
||||
|
result->filter(storm::modelchecker::ExplicitQualitativeCheckResult(model->getInitialStates())); |
||||
|
return result->asExplicitQuantitativeCheckResult<ValueType>().getValueMap().begin()->second; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
std::unique_ptr<storm::modelchecker::CheckResult> DFTModelChecker<ValueType>::checkModel(std::shared_ptr<storm::models::sparse::Model<ValueType>>& model, std::shared_ptr<const storm::logic::Formula> const& formula) { |
||||
|
// Bisimulation
|
||||
|
std::chrono::high_resolution_clock::time_point bisimulationStart = std::chrono::high_resolution_clock::now(); |
||||
|
if (model->isOfType(storm::models::ModelType::Ctmc) && storm::settings::getModule<storm::settings::modules::GeneralSettings>().isBisimulationSet()) { |
||||
|
STORM_LOG_INFO("Bisimulation..."); |
||||
|
model = storm::performDeterministicSparseBisimulationMinimization<storm::models::sparse::Ctmc<ValueType>>(model->template as<storm::models::sparse::Ctmc<ValueType>>(), {formula}, storm::storage::BisimulationType::Weak)->template as<storm::models::sparse::Ctmc<ValueType>>(); |
||||
|
STORM_LOG_INFO("No. states (Bisimulation): " << model->getNumberOfStates()); |
||||
|
STORM_LOG_INFO("No. transitions (Bisimulation): " << model->getNumberOfTransitions()); |
||||
|
} |
||||
|
std::chrono::high_resolution_clock::time_point bisimulationEnd = std::chrono::high_resolution_clock::now(); |
||||
|
bisimulationTime += bisimulationEnd - bisimulationStart; |
||||
|
|
||||
|
// Check the model
|
||||
|
STORM_LOG_INFO("Model checking..."); |
||||
|
std::unique_ptr<storm::modelchecker::CheckResult> result(storm::verifySparseModel(model, formula)); |
||||
|
STORM_LOG_INFO("Model checking done."); |
||||
|
STORM_LOG_ASSERT(result, "Result does not exist."); |
||||
|
modelCheckingTime += std::chrono::high_resolution_clock::now() - bisimulationEnd; |
||||
|
return result; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
bool DFTModelChecker<ValueType>::isApproximationSufficient(ValueType lowerBound, ValueType upperBound, double approximationError, bool relative) { |
||||
|
STORM_LOG_THROW(false, storm::exceptions::NotImplementedException, "Approximation works only for double."); |
||||
|
} |
||||
|
|
||||
|
template<> |
||||
|
bool DFTModelChecker<double>::isApproximationSufficient(double lowerBound, double upperBound, double approximationError, bool relative) { |
||||
|
STORM_LOG_THROW(!std::isnan(lowerBound) && !std::isnan(upperBound), storm::exceptions::NotSupportedException, "Approximation does not work if result is NaN."); |
||||
|
if (relative) { |
||||
|
return upperBound - lowerBound <= approximationError; |
||||
|
} else { |
||||
|
return upperBound - lowerBound <= approximationError * (lowerBound + upperBound) / 2; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void DFTModelChecker<ValueType>::printTimings(std::ostream& os) { |
||||
|
os << "Times:" << std::endl; |
||||
|
os << "Exploration:\t" << explorationTime.count() << std::endl; |
||||
|
os << "Building:\t" << buildingTime.count() << std::endl; |
||||
|
os << "Bisimulation:\t" << bisimulationTime.count() << std::endl; |
||||
|
os << "Modelchecking:\t" << modelCheckingTime.count() << std::endl; |
||||
|
os << "Total:\t\t" << totalTime.count() << std::endl; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void DFTModelChecker<ValueType>::printResult(std::ostream& os) { |
||||
|
os << "Result: ["; |
||||
|
if (this->approximationError > 0.0) { |
||||
|
approximation_result result = boost::get<approximation_result>(checkResult); |
||||
|
os << "(" << result.first << ", " << result.second << ")"; |
||||
|
} else { |
||||
|
os << boost::get<ValueType>(checkResult); |
||||
|
} |
||||
|
os << "]" << std::endl; |
||||
|
} |
||||
|
|
||||
|
|
||||
|
template class DFTModelChecker<double>; |
||||
|
|
||||
|
#ifdef STORM_HAVE_CARL
|
||||
|
template class DFTModelChecker<storm::RationalFunction>; |
||||
|
#endif
|
||||
|
} |
||||
|
} |
@ -0,0 +1,140 @@ |
|||||
|
#ifndef STORM_MODELCHECKER_DFT_DFTMODELCHECKER_H_ |
||||
|
|
||||
|
#include "src/logic/Formula.h" |
||||
|
#include "src/modelchecker/results/CheckResult.h" |
||||
|
#include "src/storage/dft/DFT.h" |
||||
|
#include "src/utility/storm.h" |
||||
|
|
||||
|
#include <chrono> |
||||
|
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace modelchecker { |
||||
|
|
||||
|
/*! |
||||
|
* Analyser for DFTs. |
||||
|
*/ |
||||
|
template<typename ValueType> |
||||
|
class DFTModelChecker { |
||||
|
|
||||
|
typedef std::pair<ValueType, ValueType> approximation_result; |
||||
|
typedef boost::variant<ValueType, approximation_result> dft_result; |
||||
|
|
||||
|
public: |
||||
|
|
||||
|
/*! |
||||
|
* Constructor. |
||||
|
*/ |
||||
|
DFTModelChecker(); |
||||
|
|
||||
|
/*! |
||||
|
* Main method for checking DFTs. |
||||
|
* |
||||
|
* @param origDft Original DFT |
||||
|
* @param formula Formula to check for |
||||
|
* @param symred Flag indicating if symmetry reduction should be used |
||||
|
* @param allowModularisation Flag indication if modularisation is allowed |
||||
|
* @param enableDC Flag indicating if dont care propagation should be used |
||||
|
* @param approximationError Error allowed for approximation. Value 0 indicates no approximation |
||||
|
*/ |
||||
|
void check(storm::storage::DFT<ValueType> const& origDft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred = true, bool allowModularisation = true, bool enableDC = true, double approximationError = 0.0); |
||||
|
|
||||
|
/*! |
||||
|
* Print timings of all operations to stream. |
||||
|
* |
||||
|
* @param os Output stream to write to. |
||||
|
*/ |
||||
|
void printTimings(std::ostream& os = std::cout); |
||||
|
|
||||
|
/*! |
||||
|
* Print result to stream. |
||||
|
* |
||||
|
* @param os Output stream to write to. |
||||
|
*/ |
||||
|
void printResult(std::ostream& os = std::cout); |
||||
|
|
||||
|
private: |
||||
|
|
||||
|
// Timing values |
||||
|
std::chrono::duration<double> buildingTime = std::chrono::duration<double>::zero(); |
||||
|
std::chrono::duration<double> explorationTime = std::chrono::duration<double>::zero(); |
||||
|
std::chrono::duration<double> bisimulationTime = std::chrono::duration<double>::zero(); |
||||
|
std::chrono::duration<double> modelCheckingTime = std::chrono::duration<double>::zero(); |
||||
|
std::chrono::duration<double> totalTime = std::chrono::duration<double>::zero(); |
||||
|
std::chrono::high_resolution_clock::time_point totalStart; |
||||
|
|
||||
|
// Model checking result |
||||
|
dft_result checkResult; |
||||
|
|
||||
|
// Allowed error bound for approximation |
||||
|
double approximationError; |
||||
|
|
||||
|
/*! |
||||
|
* Internal helper for model checking a DFT. |
||||
|
* |
||||
|
* @param dft DFT |
||||
|
* @param formula Formula to check for |
||||
|
* @param symred Flag indicating if symmetry reduction should be used |
||||
|
* @param allowModularisation Flag indication if modularisation is allowed |
||||
|
* @param enableDC Flag indicating if dont care propagation should be used |
||||
|
* @param approximationError Error allowed for approximation. Value 0 indicates no approximation |
||||
|
* |
||||
|
* @return Model checking result (or in case of approximation two results for lower and upper bound) |
||||
|
*/ |
||||
|
dft_result checkHelper(storm::storage::DFT<ValueType> const& dft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool allowModularisation, bool enableDC, double approximationError); |
||||
|
|
||||
|
/*! |
||||
|
* Internal helper for building a CTMC from a DFT via parallel composition. |
||||
|
* |
||||
|
* @param dft DFT |
||||
|
* @param formula Formula to check for |
||||
|
* @param symred Flag indicating if symmetry reduction should be used |
||||
|
* @param allowModularisation Flag indication if modularisation is allowed |
||||
|
* @param enableDC Flag indicating if dont care propagation should be used |
||||
|
* @param approximationError Error allowed for approximation. Value 0 indicates no approximation |
||||
|
* |
||||
|
* @return CTMC representing the DFT |
||||
|
*/ |
||||
|
std::shared_ptr<storm::models::sparse::Ctmc<ValueType>> buildModelComposition(storm::storage::DFT<ValueType> const& dft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool allowModularisation, bool enableDC); |
||||
|
|
||||
|
/*! |
||||
|
* Check model generated from DFT. |
||||
|
* |
||||
|
* @param dft The DFT |
||||
|
* @param formula Formula to check for |
||||
|
* @param symred Flag indicating if symmetry reduction should be used |
||||
|
* @param enableDC Flag indicating if dont care propagation should be used |
||||
|
* @param approximationError Error allowed for approximation. Value 0 indicates no approximation |
||||
|
* |
||||
|
* @return Model checking result |
||||
|
*/ |
||||
|
dft_result checkDFT(storm::storage::DFT<ValueType> const& dft, std::shared_ptr<const storm::logic::Formula> const& formula, bool symred, bool enableDC, double approximationError = 0.0); |
||||
|
|
||||
|
/*! |
||||
|
* Check the given markov model for the given property. |
||||
|
* |
||||
|
* @param model Model to check |
||||
|
* @param formula Formula to check for |
||||
|
* |
||||
|
* @return Model checking result |
||||
|
*/ |
||||
|
std::unique_ptr<storm::modelchecker::CheckResult> checkModel(std::shared_ptr<storm::models::sparse::Model<ValueType>>& model, std::shared_ptr<const storm::logic::Formula> const& formula); |
||||
|
|
||||
|
/*! |
||||
|
* Checks if the computed approximation is sufficient, i.e. |
||||
|
* upperBound - lowerBound <= approximationError * mean(lowerBound, upperBound). |
||||
|
* |
||||
|
* @param lowerBound The lower bound on the result. |
||||
|
* @param upperBound The upper bound on the result. |
||||
|
* @param approximationError The allowed error for approximating. |
||||
|
* @param relative Flag indicating if the error should be relative to 1 or |
||||
|
to the mean of lower and upper bound. |
||||
|
* |
||||
|
* @return True, if the approximation is sufficient. |
||||
|
*/ |
||||
|
bool isApproximationSufficient(ValueType lowerBound, ValueType upperBound, double approximationError, bool relative); |
||||
|
|
||||
|
}; |
||||
|
} |
||||
|
} |
||||
|
#endif /* STORM_MODELCHECKER_DFT_DFTMODELCHECKER_H_ */ |
@ -0,0 +1,212 @@ |
|||||
|
#include "src/storage/BucketPriorityQueue.h"
|
||||
|
#include "src/utility/macros.h"
|
||||
|
#include "src/adapters/CarlAdapter.h"
|
||||
|
|
||||
|
#include <cmath>
|
||||
|
|
||||
|
namespace storm { |
||||
|
namespace storage { |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
BucketPriorityQueue<ValueType>::BucketPriorityQueue(size_t nrBuckets, double lowerValue, double ratio) : lowerValue(lowerValue), logBase(std::log(ratio)), nrBuckets(nrBuckets), nrUnsortedItems(0), buckets(nrBuckets), currentBucket(nrBuckets) { |
||||
|
compare = ([this](HeuristicPointer a, HeuristicPointer b) { |
||||
|
return *a < *b; |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void BucketPriorityQueue<ValueType>::fix() { |
||||
|
if (currentBucket < nrBuckets && nrUnsortedItems > buckets[currentBucket].size() / 10) { |
||||
|
// Fix current bucket
|
||||
|
std::make_heap(buckets[currentBucket].begin(), buckets[currentBucket].end(), compare); |
||||
|
nrUnsortedItems = 0; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
bool BucketPriorityQueue<ValueType>::empty() const { |
||||
|
return currentBucket == nrBuckets && immediateBucket.empty(); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
std::size_t BucketPriorityQueue<ValueType>::size() const { |
||||
|
size_t size = immediateBucket.size(); |
||||
|
for (size_t i = currentBucket; currentBucket < nrBuckets; ++i) { |
||||
|
size += buckets[i].size(); |
||||
|
} |
||||
|
return size; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
typename BucketPriorityQueue<ValueType>::HeuristicPointer const& BucketPriorityQueue<ValueType>::top() const { |
||||
|
if (!immediateBucket.empty()) { |
||||
|
return immediateBucket.back(); |
||||
|
} |
||||
|
STORM_LOG_ASSERT(!empty(), "BucketPriorityQueue is empty"); |
||||
|
return buckets[currentBucket].front(); |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void BucketPriorityQueue<ValueType>::push(HeuristicPointer const& item) { |
||||
|
if (item->isExpand()) { |
||||
|
immediateBucket.push_back(item); |
||||
|
return; |
||||
|
} |
||||
|
size_t bucket = getBucket(item->getPriority()); |
||||
|
if (bucket < currentBucket) { |
||||
|
currentBucket = bucket; |
||||
|
nrUnsortedItems = 0; |
||||
|
} |
||||
|
buckets[bucket].push_back(item); |
||||
|
if (bucket == currentBucket) { |
||||
|
// Insert in first bucket
|
||||
|
if (AUTOSORT) { |
||||
|
std::push_heap(buckets[currentBucket].begin(), buckets[currentBucket].end(), compare); |
||||
|
} else { |
||||
|
++nrUnsortedItems; |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void BucketPriorityQueue<ValueType>::update(HeuristicPointer const& item, double oldPriority) { |
||||
|
STORM_LOG_ASSERT(!item->isExpand(), "Item is marked for expansion"); |
||||
|
size_t newBucket = getBucket(item->getPriority()); |
||||
|
size_t oldBucket = getBucket(oldPriority); |
||||
|
|
||||
|
if (oldBucket == newBucket) { |
||||
|
if (currentBucket < newBucket) { |
||||
|
// No change as the bucket is not sorted yet
|
||||
|
} else { |
||||
|
if (AUTOSORT) { |
||||
|
// Sort first bucket
|
||||
|
fix(); |
||||
|
} else { |
||||
|
++nrUnsortedItems; |
||||
|
} |
||||
|
} |
||||
|
} else { |
||||
|
// Move to new bucket
|
||||
|
STORM_LOG_ASSERT(newBucket < oldBucket, "Will update to higher bucket"); |
||||
|
if (newBucket < currentBucket) { |
||||
|
currentBucket = newBucket; |
||||
|
nrUnsortedItems = 0; |
||||
|
} |
||||
|
// Remove old entry by swap-and-pop
|
||||
|
if (buckets[oldBucket].size() >= 2) { |
||||
|
// Find old index by linear search
|
||||
|
// Notice: using a map to rememeber index was not efficient
|
||||
|
size_t oldIndex = 0; |
||||
|
for ( ; oldIndex < buckets[oldBucket].size(); ++oldIndex) { |
||||
|
if (buckets[oldBucket][oldIndex]->getId() == item->getId()) { |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
STORM_LOG_ASSERT(oldIndex < buckets[oldBucket].size(), "Id " << item->getId() << " not found"); |
||||
|
std::iter_swap(buckets[oldBucket].begin() + oldIndex, buckets[oldBucket].end() - 1); |
||||
|
} |
||||
|
buckets[oldBucket].pop_back(); |
||||
|
// Insert new element
|
||||
|
buckets[newBucket].push_back(item); |
||||
|
if (newBucket == currentBucket) { |
||||
|
if (AUTOSORT) { |
||||
|
// Sort in first bucket
|
||||
|
std::push_heap(buckets[currentBucket].begin(), buckets[currentBucket].end(), compare); |
||||
|
} else { |
||||
|
++nrUnsortedItems; |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void BucketPriorityQueue<ValueType>::pop() { |
||||
|
if (!immediateBucket.empty()) { |
||||
|
immediateBucket.pop_back(); |
||||
|
return; |
||||
|
} |
||||
|
STORM_LOG_ASSERT(!empty(), "BucketPriorityQueue is empty"); |
||||
|
std::pop_heap(buckets[currentBucket].begin(), buckets[currentBucket].end(), compare); |
||||
|
buckets[currentBucket].pop_back(); |
||||
|
if (buckets[currentBucket].empty()) { |
||||
|
// Find next bucket with elements
|
||||
|
for ( ; currentBucket < nrBuckets; ++currentBucket) { |
||||
|
if (!buckets[currentBucket].empty()) { |
||||
|
nrUnsortedItems = buckets[currentBucket].size(); |
||||
|
if (AUTOSORT) { |
||||
|
fix(); |
||||
|
} |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
typename BucketPriorityQueue<ValueType>::HeuristicPointer BucketPriorityQueue<ValueType>::popTop() { |
||||
|
HeuristicPointer item = top(); |
||||
|
pop(); |
||||
|
return item; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
size_t BucketPriorityQueue<ValueType>::getBucket(double priority) const { |
||||
|
STORM_LOG_ASSERT(priority >= lowerValue, "Priority " << priority << " is too low"); |
||||
|
|
||||
|
// For possible values greater 1
|
||||
|
double tmpBucket = std::log(priority - lowerValue) / logBase; |
||||
|
tmpBucket += buckets.size() / 2; |
||||
|
if (tmpBucket < 0) { |
||||
|
tmpBucket = 0; |
||||
|
} |
||||
|
size_t newBucket = tmpBucket; |
||||
|
// For values ensured to be lower 1
|
||||
|
//size_t newBucket = std::log(priority - lowerValue) / logBase;
|
||||
|
if (newBucket >= nrBuckets) { |
||||
|
newBucket = nrBuckets - 1; |
||||
|
} |
||||
|
if (!HIGHER) { |
||||
|
newBucket = nrBuckets-1 - newBucket; |
||||
|
} |
||||
|
//std::cout << "get Bucket: " << priority << ", " << newBucket << std::endl;
|
||||
|
STORM_LOG_ASSERT(newBucket < nrBuckets, "Priority " << priority << " is too high"); |
||||
|
return newBucket; |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void BucketPriorityQueue<ValueType>::print(std::ostream& out) const { |
||||
|
out << "Bucket priority queue with size " << buckets.size() << ", lower value: " << lowerValue << " and logBase: " << logBase << std::endl; |
||||
|
out << "Immediate bucket: "; |
||||
|
for (HeuristicPointer heuristic : immediateBucket) { |
||||
|
out << heuristic->getId() << ", "; |
||||
|
} |
||||
|
out << std::endl; |
||||
|
out << "Current bucket (" << currentBucket << ") has " << nrUnsortedItems << " unsorted items" << std::endl; |
||||
|
for (size_t bucket = 0; bucket < buckets.size(); ++bucket) { |
||||
|
if (!buckets[bucket].empty()) { |
||||
|
out << "Bucket " << bucket << ":" << std::endl; |
||||
|
for (HeuristicPointer heuristic : buckets[bucket]) { |
||||
|
out << "\t" << heuristic->getId() << ": " << heuristic->getPriority() << std::endl; |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
void BucketPriorityQueue<ValueType>::printSizes(std::ostream& out) const { |
||||
|
out << "Bucket sizes: " << immediateBucket.size() << " | "; |
||||
|
for (size_t bucket = 0; bucket < buckets.size(); ++bucket) { |
||||
|
out << buckets[bucket].size() << " "; |
||||
|
} |
||||
|
std::cout << std::endl; |
||||
|
} |
||||
|
|
||||
|
// Template instantiations
|
||||
|
template class BucketPriorityQueue<double>; |
||||
|
|
||||
|
#ifdef STORM_HAVE_CARL
|
||||
|
template class BucketPriorityQueue<storm::RationalFunction>; |
||||
|
#endif
|
||||
|
} |
||||
|
} |
@ -0,0 +1,73 @@ |
|||||
|
#ifndef STORM_STORAGE_BUCKETPRIORITYQUEUE_H_ |
||||
|
#define STORM_STORAGE_BUCKETPRIORITYQUEUE_H_ |
||||
|
|
||||
|
#include "src/builder/DftExplorationHeuristic.h" |
||||
|
#include <algorithm> |
||||
|
#include <functional> |
||||
|
#include <unordered_map> |
||||
|
#include <vector> |
||||
|
|
||||
|
namespace storm { |
||||
|
namespace storage { |
||||
|
|
||||
|
template<typename ValueType> |
||||
|
class BucketPriorityQueue { |
||||
|
|
||||
|
using HeuristicPointer = std::shared_ptr<storm::builder::DFTExplorationHeuristicBoundDifference<ValueType>>; |
||||
|
|
||||
|
public: |
||||
|
explicit BucketPriorityQueue(size_t nrBuckets, double lowerValue, double ratio); |
||||
|
|
||||
|
void fix(); |
||||
|
|
||||
|
bool empty() const; |
||||
|
|
||||
|
std::size_t size() const; |
||||
|
|
||||
|
HeuristicPointer const& top() const; |
||||
|
|
||||
|
void push(HeuristicPointer const& item); |
||||
|
|
||||
|
void update(HeuristicPointer const& item, double oldPriority); |
||||
|
|
||||
|
void pop(); |
||||
|
|
||||
|
HeuristicPointer popTop(); |
||||
|
|
||||
|
void print(std::ostream& out) const; |
||||
|
|
||||
|
void printSizes(std::ostream& out) const; |
||||
|
|
||||
|
private: |
||||
|
|
||||
|
size_t getBucket(double priority) const; |
||||
|
|
||||
|
const double lowerValue; |
||||
|
|
||||
|
const bool HIGHER = true; |
||||
|
|
||||
|
const bool AUTOSORT = false; |
||||
|
|
||||
|
const double logBase; |
||||
|
|
||||
|
const size_t nrBuckets; |
||||
|
|
||||
|
size_t nrUnsortedItems; |
||||
|
|
||||
|
// List of buckets |
||||
|
std::vector<std::vector<HeuristicPointer>> buckets; |
||||
|
|
||||
|
// Bucket containing all items which should be considered immediately |
||||
|
std::vector<HeuristicPointer> immediateBucket; |
||||
|
|
||||
|
// Index of first bucket which contains items |
||||
|
size_t currentBucket; |
||||
|
|
||||
|
std::function<bool(HeuristicPointer, HeuristicPointer)> compare; |
||||
|
|
||||
|
}; |
||||
|
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
#endif // STORM_STORAGE_BUCKETPRIORITYQUEUE_H_ |
@ -0,0 +1,71 @@ |
|||||
|
#ifndef STORM_STORAGE_DYNAMICPRIORITYQUEUE_H_ |
||||
|
#define STORM_STORAGE_DYNAMICPRIORITYQUEUE_H_ |
||||
|
|
||||
|
#include <algorithm> |
||||
|
#include <vector> |
||||
|
|
||||
|
namespace storm { |
||||
|
namespace storage { |
||||
|
|
||||
|
template<typename T, typename Container = std::vector<T>, typename Compare = std::less<T>> |
||||
|
class DynamicPriorityQueue { |
||||
|
|
||||
|
private: |
||||
|
Container container; |
||||
|
Compare compare; |
||||
|
|
||||
|
public: |
||||
|
explicit DynamicPriorityQueue(Compare const& compare) : container(), compare(compare) { |
||||
|
// Intentionally left empty |
||||
|
} |
||||
|
|
||||
|
explicit DynamicPriorityQueue(Container&& container, Compare const& compare) : container(std::move(container)), compare(compare) { |
||||
|
std::make_heap(container.begin(), container.end(), compare); |
||||
|
} |
||||
|
|
||||
|
void fix() { |
||||
|
std::make_heap(container.begin(), container.end(), compare); |
||||
|
} |
||||
|
|
||||
|
bool empty() const { |
||||
|
return container.empty(); |
||||
|
} |
||||
|
|
||||
|
std::size_t size() const { |
||||
|
return container.size(); |
||||
|
} |
||||
|
|
||||
|
const T& top() const { |
||||
|
return container.front(); |
||||
|
} |
||||
|
|
||||
|
void push(const T& item) { |
||||
|
container.push_back(item); |
||||
|
std::push_heap(container.begin(), container.end(), compare); |
||||
|
} |
||||
|
|
||||
|
void push(T&& item) { |
||||
|
container.push_back(std::move(item)); |
||||
|
std::push_heap(container.begin(), container.end(), compare); |
||||
|
} |
||||
|
|
||||
|
void pop() { |
||||
|
std::pop_heap(container.begin(), container.end(), compare); |
||||
|
container.pop_back(); |
||||
|
} |
||||
|
|
||||
|
T popTop() { |
||||
|
T item = top(); |
||||
|
pop(); |
||||
|
return item; |
||||
|
} |
||||
|
|
||||
|
Container getContainer() const { |
||||
|
return container; |
||||
|
} |
||||
|
|
||||
|
}; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
#endif // STORM_STORAGE_DYNAMICPRIORITYQUEUE_H_ |
@ -1,17 +0,0 @@ |
|||||
#include "src/utility/vector.h"
|
|
||||
|
|
||||
//template<typename ValueType>
|
|
||||
//std::ostream& operator<<(std::ostream& out, std::vector<ValueType> const& vector) {
|
|
||||
std::ostream& operator<<(std::ostream& out, std::vector<double> const& vector) { |
|
||||
out << "vector (" << vector.size() << ") [ "; |
|
||||
for (uint_fast64_t i = 0; i < vector.size() - 1; ++i) { |
|
||||
out << vector[i] << ", "; |
|
||||
} |
|
||||
out << vector.back(); |
|
||||
out << " ]"; |
|
||||
return out; |
|
||||
} |
|
||||
|
|
||||
// Explicitly instantiate functions.
|
|
||||
//template std::ostream& operator<<(std::ostream& out, std::vector<double> const& vector);
|
|
||||
//template std::ostream& operator<<(std::ostream& out, std::vector<uint_fast64_t> const& vector);
|
|
Reference in new issue
xxxxxxxxxx