Browse Source

Added simple infinite horizon helper for the hybrid engine.

tempestpy_adaptions
Tim Quatmann 4 years ago
parent
commit
aabe3ce776
  1. 11
      src/storm/modelchecker/csl/HybridMarkovAutomatonCslModelChecker.cpp
  2. 99
      src/storm/modelchecker/helper/infinitehorizon/HybridNondeterministicInfiniteHorizonHelper.cpp
  3. 64
      src/storm/modelchecker/helper/infinitehorizon/HybridNondeterministicInfiniteHorizonHelper.h

11
src/storm/modelchecker/csl/HybridMarkovAutomatonCslModelChecker.cpp

@ -5,6 +5,8 @@
#include "storm/modelchecker/csl/helper/SparseMarkovAutomatonCslHelper.h"
#include "storm/modelchecker/csl/helper/HybridMarkovAutomatonCslHelper.h"
#include "storm/modelchecker/prctl/helper/HybridMdpPrctlHelper.h"
#include "storm/modelchecker/helper/infinitehorizon/HybridNondeterministicInfiniteHorizonHelper.h"
#include "storm/modelchecker/helper/utility/SetInformationFromCheckTask.h"
#include "storm/modelchecker/results/SymbolicQualitativeCheckResult.h"
@ -105,15 +107,18 @@ namespace storm {
SymbolicQualitativeCheckResult<DdType> const& subResult = subResultPointer->asSymbolicQualitativeCheckResult<DdType>();
STORM_LOG_THROW(checkTask.isOptimizationDirectionSet(), storm::exceptions::InvalidPropertyException, "Formula needs to specify whether minimal or maximal values are to be computed on nondeterministic model.");
return storm::modelchecker::helper::HybridMarkovAutomatonCslHelper::computeLongRunAverageProbabilities(env, checkTask.getOptimizationDirection(), this->getModel(), this->getModel().getTransitionMatrix(), this->getModel().getMarkovianStates(), this->getModel().getExitRateVector(), subResult.getTruthValuesVector());
storm::modelchecker::helper::HybridNondeterministicInfiniteHorizonHelper<ValueType, DdType> helper(this->getModel(), this->getModel().getTransitionMatrix(), this->getModel().getMarkovianStates(), this->getModel().getExitRateVector());
storm::modelchecker::helper::setInformationFromCheckTaskNondeterministic(helper, checkTask, this->getModel());
return helper.computeLongRunAverageProbabilities(env, subResult.getTruthValuesVector());
}
template<typename ModelType>
std::unique_ptr<CheckResult> HybridMarkovAutomatonCslModelChecker<ModelType>::computeLongRunAverageRewards(Environment const& env, storm::logic::RewardMeasureType rewardMeasureType, CheckTask<storm::logic::LongRunAverageRewardFormula, ValueType> const& checkTask) {
STORM_LOG_THROW(checkTask.isOptimizationDirectionSet(), storm::exceptions::InvalidPropertyException, "Formula needs to specify whether minimal or maximal values are to be computed on nondeterministic model.");
auto rewardModel = storm::utility::createFilteredRewardModel(this->getModel(), checkTask);
return storm::modelchecker::helper::HybridMarkovAutomatonCslHelper::computeLongRunAverageRewards(env, checkTask.getOptimizationDirection(), this->getModel(), this->getModel().getTransitionMatrix(), this->getModel().getMarkovianStates(), this->getModel().getExitRateVector(), rewardModel.get());
storm::modelchecker::helper::HybridNondeterministicInfiniteHorizonHelper<ValueType, DdType> helper(this->getModel(), this->getModel().getTransitionMatrix(), this->getModel().getMarkovianStates(), this->getModel().getExitRateVector());
storm::modelchecker::helper::setInformationFromCheckTaskNondeterministic(helper, checkTask, this->getModel());
return helper.computeLongRunAverageRewards(env, rewardModel.get());
}
// Explicitly instantiate the model checker.

99
src/storm/modelchecker/helper/infinitehorizon/HybridNondeterministicInfiniteHorizonHelper.cpp

@ -0,0 +1,99 @@
#include "HybridNondeterministicInfiniteHorizonHelper.h"
#include "storm/modelchecker/helper/infinitehorizon/SparseNondeterministicInfiniteHorizonHelper.h"
#include "storm/modelchecker/helper/utility/SetInformationFromOtherHelper.h"
#include "storm/utility/macros.h"
#include "storm/exceptions/NotSupportedException.h"
namespace storm {
namespace modelchecker {
namespace helper {
template <typename ValueType, storm::dd::DdType DdType>
HybridNondeterministicInfiniteHorizonHelper<ValueType, DdType>::HybridNondeterministicInfiniteHorizonHelper(storm::models::symbolic::NondeterministicModel<DdType, ValueType> const& model, storm::dd::Add<DdType, ValueType> const& transitionMatrix) : _model(model), _transitionMatrix(transitionMatrix), _markovianStates(nullptr), _exitRates(nullptr) {
// Intentionally left empty.
}
template <typename ValueType, storm::dd::DdType DdType>
HybridNondeterministicInfiniteHorizonHelper<ValueType, DdType>::HybridNondeterministicInfiniteHorizonHelper(storm::models::symbolic::NondeterministicModel<DdType, ValueType> const& model, storm::dd::Add<DdType, ValueType> const& transitionMatrix, storm::dd::Bdd<DdType> const& markovianStates, storm::dd::Add<DdType, ValueType> const& exitRateVector) : _model(model), _transitionMatrix(transitionMatrix), _markovianStates(&markovianStates), _exitRates(&exitRateVector) {
// Intentionally left empty.
}
template <typename ValueType, storm::dd::DdType DdType>
std::unique_ptr<HybridQuantitativeCheckResult<DdType, ValueType>> HybridNondeterministicInfiniteHorizonHelper<ValueType, DdType>::computeLongRunAverageProbabilities(Environment const& env, storm::dd::Bdd<DdType> const& psiStates) {
// Convert this query to an instance for the sparse engine.
// Create ODD for the translation.
storm::dd::Odd odd = _model.getReachableStates().createOdd();
storm::storage::SparseMatrix<ValueType> explicitTransitionMatrix = _transitionMatrix.toMatrix(_model.getNondeterminismVariables(), odd, odd);
std::unique_ptr<storm::modelchecker::helper::SparseNondeterministicInfiniteHorizonHelper<ValueType>> sparseHelper;
std::vector<ValueType> explicitExitRateVector;
storm::storage::BitVector explicitMarkovianStates;
if (isContinuousTime()) {
explicitExitRateVector = _exitRates->toVector(odd);
explicitMarkovianStates = _markovianStates->toVector(odd);
sparseHelper = std::make_unique<storm::modelchecker::helper::SparseNondeterministicInfiniteHorizonHelper<ValueType>>(explicitTransitionMatrix, explicitMarkovianStates, explicitExitRateVector);
} else {
sparseHelper = std::make_unique<storm::modelchecker::helper::SparseNondeterministicInfiniteHorizonHelper<ValueType>>(explicitTransitionMatrix);
}
storm::modelchecker::helper::setInformationFromOtherHelperNondeterministic(*sparseHelper, *this, [&odd](storm::dd::Bdd<DdType> const& s){ return s.toVector(odd); });
STORM_LOG_WARN_COND(!this->isProduceSchedulerSet(), "Scheduler extraction not supported in Hybrid engine.");
auto explicitResult = sparseHelper->computeLongRunAverageProbabilities(env, psiStates.toVector(odd));
return std::make_unique<HybridQuantitativeCheckResult<DdType, ValueType>>(_model.getReachableStates(), _model.getManager().getBddZero(), _model.getManager().template getAddZero<ValueType>(), _model.getReachableStates(), std::move(odd), std::move(explicitResult));
}
template <typename ValueType, storm::dd::DdType DdType>
std::unique_ptr<HybridQuantitativeCheckResult<DdType, ValueType>> HybridNondeterministicInfiniteHorizonHelper<ValueType, DdType>::computeLongRunAverageRewards(Environment const& env, storm::models::symbolic::StandardRewardModel<DdType, ValueType> const& rewardModel) {
// Convert this query to an instance for the sparse engine.
// Create ODD for the translation.
storm::dd::Odd odd = _model.getReachableStates().createOdd();
// Create matrix and reward vectors
storm::storage::SparseMatrix<ValueType> explicitTransitionMatrix;
std::vector<ValueType> explicitStateRewards, explicitActionRewards;
if (rewardModel.hasStateRewards()) {
explicitStateRewards = rewardModel.getStateRewardVector().toVector(odd);
}
if (rewardModel.hasStateActionRewards()) {
// Matrix and action-based vector have to be produced at the same time to guarantee the correct order
auto matrixRewards = _transitionMatrix.toMatrixVector(rewardModel.getStateActionRewardVector(), _model.getNondeterminismVariables(), odd, odd);
explicitTransitionMatrix = std::move(matrixRewards.first);
explicitActionRewards = std::move(matrixRewards.second);
} else {
// Translate matrix only
explicitTransitionMatrix = _transitionMatrix.toMatrix(_model.getNondeterminismVariables(), odd, odd);
}
STORM_LOG_THROW(!rewardModel.hasTransitionRewards(), storm::exceptions::NotSupportedException, "Transition rewards are not supported in this engine.");
// Create remaining components and helper
std::vector<ValueType> explicitExitRateVector;
storm::storage::BitVector explicitMarkovianStates;
std::unique_ptr<storm::modelchecker::helper::SparseNondeterministicInfiniteHorizonHelper<ValueType>> sparseHelper;
if (isContinuousTime()) {
explicitExitRateVector = _exitRates->toVector(odd);
explicitMarkovianStates = _markovianStates->toVector(odd);
sparseHelper = std::make_unique<storm::modelchecker::helper::SparseNondeterministicInfiniteHorizonHelper<ValueType>>(explicitTransitionMatrix, explicitMarkovianStates, explicitExitRateVector);
} else {
sparseHelper = std::make_unique<storm::modelchecker::helper::SparseNondeterministicInfiniteHorizonHelper<ValueType>>(explicitTransitionMatrix);
}
storm::modelchecker::helper::setInformationFromOtherHelperNondeterministic(*sparseHelper, *this, [&odd](storm::dd::Bdd<DdType> const& s){ return s.toVector(odd); });
STORM_LOG_WARN_COND(!this->isProduceSchedulerSet(), "Scheduler extraction not supported in Hybrid engine.");
auto explicitResult = sparseHelper->computeLongRunAverageValues(env, rewardModel.hasStateRewards() ? &explicitStateRewards : nullptr, rewardModel.hasStateActionRewards() ? &explicitActionRewards : nullptr);
return std::make_unique<HybridQuantitativeCheckResult<DdType, ValueType>>(_model.getReachableStates(), _model.getManager().getBddZero(), _model.getManager().template getAddZero<ValueType>(), _model.getReachableStates(), std::move(odd), std::move(explicitResult));
}
template <typename ValueType, storm::dd::DdType DdType>
bool HybridNondeterministicInfiniteHorizonHelper<ValueType, DdType>::isContinuousTime() const {
STORM_LOG_ASSERT((_markovianStates == nullptr) == (_exitRates == nullptr), "Inconsistent information given: Have Markovian states but no exit rates (or vice versa)." );
return _markovianStates != nullptr;
}
template class HybridNondeterministicInfiniteHorizonHelper<double, storm::dd::DdType::CUDD>;
template class HybridNondeterministicInfiniteHorizonHelper<double, storm::dd::DdType::Sylvan>;
template class HybridNondeterministicInfiniteHorizonHelper<storm::RationalNumber, storm::dd::DdType::Sylvan>;
}
}
}

64
src/storm/modelchecker/helper/infinitehorizon/HybridNondeterministicInfiniteHorizonHelper.h

@ -0,0 +1,64 @@
#pragma once
#include "storm/modelchecker/helper/SingleValueModelCheckerHelper.h"
#include "storm/modelchecker/results/HybridQuantitativeCheckResult.h"
#include "storm/models/symbolic/NondeterministicModel.h"
#include "storm/models/symbolic/StandardRewardModel.h"
#include "storm/storage/dd/DdManager.h"
#include "storm/storage/dd/Add.h"
#include "storm/storage/dd/Bdd.h"
namespace storm {
class Environment;
namespace modelchecker {
namespace helper {
/*!
* Helper class for model checking queries that depend on the long run behavior of the (nondeterministic) system.
*/
template <typename ValueType, storm::dd::DdType DdType>
class HybridNondeterministicInfiniteHorizonHelper : public SingleValueModelCheckerHelper<ValueType, DdType> {
public:
/*!
* Initializes the helper for a discrete time (i.e. MDP)
*/
HybridNondeterministicInfiniteHorizonHelper(storm::models::symbolic::NondeterministicModel<DdType, ValueType> const& model, storm::dd::Add<DdType, ValueType> const& transitionMatrix);
/*!
* Initializes the helper for a continuous time (i.e. MA)
*/
HybridNondeterministicInfiniteHorizonHelper(storm::models::symbolic::NondeterministicModel<DdType, ValueType> const& model, storm::dd::Add<DdType, ValueType> const& transitionMatrix, storm::dd::Bdd<DdType> const& markovianStates, storm::dd::Add<DdType, ValueType> const& _exitRates);
/*!
* Computes the long run average probabilities, i.e., the fraction of the time we are in a psiState
* @return a value for each state
*/
std::unique_ptr<HybridQuantitativeCheckResult<DdType, ValueType>> computeLongRunAverageProbabilities(Environment const& env, storm::dd::Bdd<DdType> const& psiStates);
/*!
* Computes the long run average rewards, i.e., the average reward collected per time unit
* @return a value for each state
*/
std::unique_ptr<HybridQuantitativeCheckResult<DdType, ValueType>> computeLongRunAverageRewards(Environment const& env, storm::models::symbolic::StandardRewardModel<DdType, ValueType> const& rewardModel);
protected:
/*!
* @return true iff this is a computation on a continuous time model (i.e. MA)
*/
bool isContinuousTime() const;
private:
storm::models::symbolic::NondeterministicModel<DdType, ValueType> const& _model;
storm::dd::Add<DdType, ValueType> const& _transitionMatrix;
storm::dd::Bdd<DdType> const* _markovianStates;
storm::dd::Add<DdType, ValueType> const* _exitRates;
};
}
}
}
Loading…
Cancel
Save