From ee0026e0e6016ddb333a69dab7f9ead755b5b0ec Mon Sep 17 00:00:00 2001 From: dehnert Date: Fri, 13 Dec 2013 17:00:51 +0100 Subject: [PATCH] Fixed minor bug in Markov automata time-bounded reachability. Former-commit-id: 6454223cd30b801095cd7cf85de29426b0fc0524 --- .../SparseMarkovAutomatonCslModelChecker.h | 50 ++++++++----------- src/storage/SparseMatrix.cpp | 9 +++- src/storage/SparseMatrix.h | 10 ---- src/storm.cpp | 2 +- src/utility/vector.h | 5 ++ 5 files changed, 36 insertions(+), 40 deletions(-) diff --git a/src/modelchecker/csl/SparseMarkovAutomatonCslModelChecker.h b/src/modelchecker/csl/SparseMarkovAutomatonCslModelChecker.h index 8c6a29477..dfdb520fd 100644 --- a/src/modelchecker/csl/SparseMarkovAutomatonCslModelChecker.h +++ b/src/modelchecker/csl/SparseMarkovAutomatonCslModelChecker.h @@ -77,7 +77,7 @@ namespace storm { return result; } - static void computeBoundedReachability(bool min, storm::storage::SparseMatrix const& transitionMatrix, std::vector const& nondeterministicChoiceIndices, std::vector const& exitRates, storm::storage::BitVector const& markovianStates, storm::storage::BitVector const& goalStates, storm::storage::BitVector const& markovianNonGoalStates, storm::storage::BitVector const& probabilisticNonGoalStates, std::vector& markovianNonGoalValues, std::vector& probabilisticNonGoalValues, ValueType delta, uint_fast64_t numberOfSteps) { + static void computeBoundedReachabilityProbabilities(bool min, storm::storage::SparseMatrix const& transitionMatrix, std::vector const& nondeterministicChoiceIndices, std::vector const& exitRates, storm::storage::BitVector const& markovianStates, storm::storage::BitVector const& goalStates, storm::storage::BitVector const& markovianNonGoalStates, storm::storage::BitVector const& probabilisticNonGoalStates, std::vector& markovianNonGoalValues, std::vector& probabilisticNonGoalValues, ValueType delta, uint_fast64_t numberOfSteps) { // Start by computing four sparse matrices: // * a matrix aMarkovian with all (discretized) transitions from Markovian non-goal states to all Markovian non-goal states. // * a matrix aMarkovianToProbabilistic with all (discretized) transitions from Markovian non-goal states to all probabilistic non-goal states. @@ -108,12 +108,12 @@ namespace storm { // Digitize aMarkovianToProbabilistic. As there are no self-loops in this case, we only need to apply the digitization formula for regular successors. rowIndex = 0; for (auto state : markovianNonGoalStates) { - for (auto element : aMarkovianToProbabilistic.getRow(rowIndex)) { + for (auto& element : aMarkovianToProbabilistic.getRow(rowIndex)) { element.second = (1 - std::exp(-exitRates[state] * delta)) * element.second; } ++rowIndex; } - + // Initialize the two vectors that hold the variable one-step probabilities to all target states for probabilistic and Markovian (non-goal) states. std::vector bProbabilistic(aProbabilistic.getRowCount()); std::vector bMarkovian(markovianNonGoalStates.getNumberOfSetBits()); @@ -125,7 +125,7 @@ namespace storm { for (auto state : markovianNonGoalStates) { bMarkovianFixed.push_back(storm::utility::constantZero()); - for (auto element : transitionMatrix.getRow(nondeterministicChoiceIndices[state])) { + for (auto& element : transitionMatrix.getRow(nondeterministicChoiceIndices[state])) { if (goalStates.get(element.first)) { bMarkovianFixed.back() += (1 - std::exp(-exitRates[state] * delta)) * element.second; } @@ -160,9 +160,9 @@ namespace storm { } // After the loop, perform one more step of the value iteration for PS states. - aProbabilisticToMarkovian.multiplyWithVector(probabilisticNonGoalValues, bProbabilistic); + aProbabilisticToMarkovian.multiplyWithVector(markovianNonGoalValues, bProbabilistic); storm::utility::vector::addVectorsInPlace(bProbabilistic, bProbabilisticFixed); - nondeterministiclinearEquationSolver->solveEquationSystem(min, aProbabilistic, probabilisticNonGoalValues, bProbabilistic, probabilisticNondeterministicChoiceIndices); + nondeterministiclinearEquationSolver->solveEquationSystem(min, aProbabilistic, probabilisticNonGoalValues, bProbabilistic, probabilisticNondeterministicChoiceIndices, &multiplicationResultScratchMemory, &aProbabilisticScratchMemory); } std::vector checkTimeBoundedEventually(bool min, storm::storage::BitVector const& goalStates, ValueType lowerBound, ValueType upperBound) const { @@ -198,7 +198,7 @@ namespace storm { std::vector vProbabilistic(probabilisticNonGoalStates.getNumberOfSetBits()); std::vector vMarkovian(markovianNonGoalStates.getNumberOfSetBits()); - computeBoundedReachability(min, transitionMatrix, nondeterministicChoiceIndices, exitRates, markovianStates, goalStates, markovianNonGoalStates, probabilisticNonGoalStates, vMarkovian, vProbabilistic, delta, numberOfSteps); + computeBoundedReachabilityProbabilities(min, transitionMatrix, nondeterministicChoiceIndices, exitRates, markovianStates, goalStates, markovianNonGoalStates, probabilisticNonGoalStates, vMarkovian, vProbabilistic, delta, numberOfSteps); // (4) If the lower bound of interval was non-zero, we need to take the current values as the starting values for a subsequent value iteration. if (lowerBound != storm::utility::constantZero()) { @@ -214,10 +214,9 @@ namespace storm { // Compute the number of steps to reach the target interval. numberOfSteps = static_cast(std::ceil(lowerBound / delta)); std::cout << "Performing " << numberOfSteps << " iterations (delta=" << delta << ") for interval [0, " << lowerBound << "]." << std::endl; - // FIXME: According to IMCA, the value of delta needs to be recomputed here, but using the equations from the source this doesn't make sense, because they always evaluate to the original value of delta. // Compute the bounded reachability for interval [0, b-a]. - computeBoundedReachability(min, transitionMatrix, nondeterministicChoiceIndices, exitRates, markovianStates, storm::storage::BitVector(this->getModel().getNumberOfStates()), markovianStates, ~markovianStates, vAllMarkovian, vAllProbabilistic, delta, numberOfSteps); + computeBoundedReachabilityProbabilities(min, transitionMatrix, nondeterministicChoiceIndices, exitRates, markovianStates, storm::storage::BitVector(this->getModel().getNumberOfStates()), markovianStates, ~markovianStates, vAllMarkovian, vAllProbabilistic, delta, numberOfSteps); // Create the result vector out of vAllProbabilistic and vAllMarkovian and return it. std::vector result(this->getModel().getNumberOfStates()); @@ -279,7 +278,7 @@ namespace storm { // Compute the LRA value for the current MEC. lraValuesForEndComponents.push_back(this->computeLraForMaximalEndComponent(min, transitionMatrix, nondeterministicChoiceIndices, this->getModel().getMarkovianStates(), this->getModel().getExitRates(), goalStates, mec, currentMecIndex)); } - + // For fast transition rewriting, we build some auxiliary data structures. storm::storage::BitVector statesNotContainedInAnyMec = ~statesInMecs; uint_fast64_t firstAuxiliaryStateIndex = statesNotContainedInAnyMec.getNumberOfSetBits(); @@ -299,8 +298,7 @@ namespace storm { std::vector b; std::vector sspNondeterministicChoiceIndices; sspNondeterministicChoiceIndices.reserve(numberOfStatesNotInMecs + mecDecomposition.size() + 1); - typename storm::storage::SparseMatrix sspMatrix; - sspMatrix.initialize(); + typename storm::storage::SparseMatrixBuilder sspMatrixBuilder; // If the source state is not contained in any MEC, we copy its choices (and perform the necessary modifications). uint_fast64_t currentChoice = 0; @@ -311,11 +309,10 @@ namespace storm { std::vector auxiliaryStateToProbabilityMap(mecDecomposition.size()); b.push_back(storm::utility::constantZero()); - typename storm::storage::SparseMatrix::Rows row = transitionMatrix.getRow(choice); - for (auto element : row) { + for (auto element : transitionMatrix.getRow(choice)) { if (statesNotContainedInAnyMec.get(element.first)) { // If the target state is not contained in an MEC, we can copy over the entry. - sspMatrix.insertNextValue(currentChoice, statesNotInMecsBeforeIndex[element.first], element.second, true); + sspMatrixBuilder.addNextValue(currentChoice, statesNotInMecsBeforeIndex[element.first], element.second); } else { // If the target state is contained in MEC i, we need to add the probability to the corresponding field in the vector // so that we are able to write the cumulative probability to the MEC into the matrix. @@ -326,7 +323,7 @@ namespace storm { // Now insert all (cumulative) probability values that target an MEC. for (uint_fast64_t mecIndex = 0; mecIndex < auxiliaryStateToProbabilityMap.size(); ++mecIndex) { if (auxiliaryStateToProbabilityMap[mecIndex] != 0) { - sspMatrix.insertNextValue(currentChoice, firstAuxiliaryStateIndex + mecIndex, auxiliaryStateToProbabilityMap[mecIndex], true); + sspMatrixBuilder.addNextValue(currentChoice, firstAuxiliaryStateIndex + mecIndex, auxiliaryStateToProbabilityMap[mecIndex]); } } } @@ -348,11 +345,10 @@ namespace storm { if (choicesInMec.find(choice) == choicesInMec.end()) { b.push_back(storm::utility::constantZero()); - typename storm::storage::SparseMatrix::Rows row = transitionMatrix.getRow(choice); - for (auto element : row) { + for (auto element : transitionMatrix.getRow(choice)) { if (statesNotContainedInAnyMec.get(element.first)) { // If the target state is not contained in an MEC, we can copy over the entry. - sspMatrix.insertNextValue(currentChoice, statesNotInMecsBeforeIndex[element.first], element.second, true); + sspMatrixBuilder.addNextValue(currentChoice, statesNotInMecsBeforeIndex[element.first], element.second); } else { // If the target state is contained in MEC i, we need to add the probability to the corresponding field in the vector // so that we are able to write the cumulative probability to the MEC into the matrix. @@ -369,7 +365,7 @@ namespace storm { b.back() += auxiliaryStateToProbabilityMap[mecIndex] * lraValuesForEndComponents[mecIndex]; } else { // Otherwise, we add a transition to the auxiliary state that is associated with the target MEC. - sspMatrix.insertNextValue(currentChoice, firstAuxiliaryStateIndex + targetMecIndex, auxiliaryStateToProbabilityMap[targetMecIndex], true); + sspMatrixBuilder.addNextValue(currentChoice, firstAuxiliaryStateIndex + targetMecIndex, auxiliaryStateToProbabilityMap[targetMecIndex]); } } } @@ -380,7 +376,6 @@ namespace storm { } // For each auxiliary state, there is the option to achieve the reward value of the LRA associated with the MEC. - sspMatrix.insertEmptyRow(true); ++currentChoice; b.push_back(lraValuesForEndComponents[mecIndex]); } @@ -389,7 +384,8 @@ namespace storm { sspNondeterministicChoiceIndices.push_back(currentChoice); // Finalize the matrix and solve the corresponding system of equations. - sspMatrix.finalize(); + storm::storage::SparseMatrix sspMatrix = sspMatrixBuilder.build(currentChoice + 1); + std::vector x(numberOfStatesNotInMecs + mecDecomposition.size()); nondeterministicLinearEquationSolver->solveEquationSystem(min, sspMatrix, x, b, sspNondeterministicChoiceIndices); @@ -454,8 +450,7 @@ namespace storm { variables.push_back(stateToVariableIndexMap.at(state)); coefficients.push_back(1); - typename storm::storage::SparseMatrix::Rows row = transitionMatrix.getRow(nondeterministicChoiceIndices[state]); - for (auto element : row) { + for (auto element : transitionMatrix.getRow(nondeterministicChoiceIndices[state])) { variables.push_back(stateToVariableIndexMap.at(element.first)); coefficients.push_back(-element.second); } @@ -474,8 +469,7 @@ namespace storm { variables.push_back(stateToVariableIndexMap.at(state)); coefficients.push_back(1); - typename storm::storage::SparseMatrix::Rows row = transitionMatrix.getRow(choice); - for (auto element : row) { + for (auto element : transitionMatrix.getRow(choice)) { variables.push_back(stateToVariableIndexMap.at(element.first)); coefficients.push_back(-element.second); } @@ -524,7 +518,7 @@ namespace storm { // Finally, if this union is non-empty, compute the states such that all schedulers reach some state of the union. if (!unionOfNonGoalBSccs.empty()) { - infinityStates = storm::utility::graph::performProbGreater0A(this->getModel(), this->getModel().getBackwardTransitions(), storm::storage::BitVector(this->getModel().getNumberOfStates(), true), unionOfNonGoalBSccs); + infinityStates = storm::utility::graph::performProbGreater0A(this->getModel().getTransitionMatrix(), this->getModel().getNondeterministicChoiceIndices(), this->getModel().getBackwardTransitions(), storm::storage::BitVector(this->getModel().getNumberOfStates(), true), unionOfNonGoalBSccs); } else { // Otherwise, we have no infinity states. infinityStates = storm::storage::BitVector(this->getModel().getNumberOfStates()); @@ -545,7 +539,7 @@ namespace storm { if (!unionOfNonGoalMaximalEndComponents.empty()) { // Now we need to check for which states there exists a scheduler that reaches one of the previously computed states. - infinityStates = storm::utility::graph::performProbGreater0E(this->getModel(), this->getModel().getBackwardTransitions(), storm::storage::BitVector(this->getModel().getNumberOfStates(), true), unionOfNonGoalMaximalEndComponents); + infinityStates = storm::utility::graph::performProbGreater0E(this->getModel().getTransitionMatrix(), this->getModel().getNondeterministicChoiceIndices(), this->getModel().getBackwardTransitions(), storm::storage::BitVector(this->getModel().getNumberOfStates(), true), unionOfNonGoalMaximalEndComponents); } else { // Otherwise, we have no infinity states. infinityStates = storm::storage::BitVector(this->getModel().getNumberOfStates()); diff --git a/src/storage/SparseMatrix.cpp b/src/storage/SparseMatrix.cpp index d30412c24..1ba121550 100644 --- a/src/storage/SparseMatrix.cpp +++ b/src/storage/SparseMatrix.cpp @@ -1,5 +1,12 @@ #include +// To detect whether the usage of TBB is possible, this include is neccessary +#include "storm-config.h" + +#ifdef STORM_HAVE_INTELTBB +#include "tbb/tbb.h" +#endif + #include "src/storage/SparseMatrix.h" #include "src/exceptions/InvalidStateException.h" @@ -661,7 +668,7 @@ namespace storm { template void SparseMatrix::multiplyWithVector(std::vector const& vector, std::vector& result) const { #ifdef STORM_HAVE_INTELTBB - tbb::parallel_for(tbb::blocked_range(0, result.size()), + tbb::parallel_for(tbb::blocked_range(0, result.size(), 10), [&] (tbb::blocked_range const& range) { uint_fast64_t startRow = range.begin(); uint_fast64_t endRow = range.end(); diff --git a/src/storage/SparseMatrix.h b/src/storage/SparseMatrix.h index 7f62ee16f..587a138bb 100644 --- a/src/storage/SparseMatrix.h +++ b/src/storage/SparseMatrix.h @@ -1,16 +1,6 @@ #ifndef STORM_STORAGE_SPARSEMATRIX_H_ #define STORM_STORAGE_SPARSEMATRIX_H_ -// To detect whether the usage of TBB is possible, this include is neccessary -#include "storm-config.h" - -#ifdef STORM_HAVE_INTELTBB -# include "utility/OsDetection.h" // This fixes a potential dependency ordering problem between GMM and TBB -# include -# include "tbb/tbb.h" -# include -#endif - #include #include #include diff --git a/src/storm.cpp b/src/storm.cpp index 0ffd18d92..c69418bc4 100644 --- a/src/storm.cpp +++ b/src/storm.cpp @@ -467,7 +467,7 @@ int main(const int argc, const char* argv[]) { // std::cout << mc.checkExpectedTime(false, markovAutomaton->getLabeledStates("goal")) << std::endl; // std::cout << mc.checkLongRunAverage(true, markovAutomaton->getLabeledStates("goal")) << std::endl; // std::cout << mc.checkLongRunAverage(false, markovAutomaton->getLabeledStates("goal")) << std::endl; -// std::cout << mc.checkTimeBoundedEventually(true, markovAutomaton->getLabeledStates("goal"), 0, 1) << std::endl; + std::cout << mc.checkTimeBoundedEventually(true, markovAutomaton->getLabeledStates("goal"), 0, 1) << std::endl; std::cout << mc.checkTimeBoundedEventually(true, markovAutomaton->getLabeledStates("goal"), 1, 2) << std::endl; break; } diff --git a/src/utility/vector.h b/src/utility/vector.h index 7f22a52cd..a514efed2 100644 --- a/src/utility/vector.h +++ b/src/utility/vector.h @@ -1,6 +1,11 @@ #ifndef STORM_UTILITY_VECTOR_H_ #define STORM_UTILITY_VECTOR_H_ +#include "storm-config.h" +#ifdef STORM_HAVE_INTELTBB +#include "tbb/tbb.h" +#endif + #include "constants.h" #include #include