7 changed files with 0 additions and 1798 deletions
-
387src/storm/modelchecker/multiobjective/deterministicScheds/DetSchedsSimplexChecker.h
-
93src/storm/modelchecker/multiobjective/deterministicScheds/DetSchedsWeightVectorChecker.cpp
-
39src/storm/modelchecker/multiobjective/deterministicScheds/DetSchedsWeightVectorChecker.h
-
777src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicParetoExplorer.cpp
-
245src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicParetoExplorer.h
-
192src/storm/modelchecker/multiobjective/deterministicScheds/MultiObjectiveSchedulerEvaluator.cpp
-
65src/storm/modelchecker/multiobjective/deterministicScheds/MultiObjectiveSchedulerEvaluator.h
@ -1,387 +0,0 @@ |
|||||
#pragma once |
|
||||
|
|
||||
#include <vector> |
|
||||
|
|
||||
#include "storm/storage/geometry/Polytope.h" |
|
||||
#include "storm/modelchecker/multiobjective/deterministicScheds/MultiObjectiveSchedulerEvaluator.h" |
|
||||
#include "storm/storage/expressions/Expressions.h" |
|
||||
#include "storm/utility/solver.h" |
|
||||
#include "storm/solver/LpSolver.h" |
|
||||
#include "storm/modelchecker/results/ExplicitQuantitativeCheckResult.h" |
|
||||
#include "storm/modelchecker/results/ExplicitQualitativeCheckResult.h" |
|
||||
#include "storm/modelchecker/propositional/SparsePropositionalModelChecker.h" |
|
||||
#include "storm/modelchecker/prctl/SparseDtmcPrctlModelChecker.h" |
|
||||
#include "storm/modelchecker/csl/SparseCtmcCslModelChecker.h" |
|
||||
#include "storm/storage/BitVector.h" |
|
||||
#include "storm/utility/graph.h" |
|
||||
#include "storm/utility/vector.h" |
|
||||
#include "storm/utility/Stopwatch.h" |
|
||||
|
|
||||
namespace storm { |
|
||||
|
|
||||
class Environment; |
|
||||
|
|
||||
namespace modelchecker { |
|
||||
namespace multiobjective { |
|
||||
|
|
||||
/*! |
|
||||
* Represents a set of points in euclidean space. |
|
||||
* The set is defined as the union of the polytopes at the leafs of the tree. |
|
||||
* The polytope at inner nodes is always the convex union of its children. |
|
||||
* The sets described by the children of a node are disjoint. |
|
||||
* A child is always non-empty, i.e., isEmpty() should only hold for the root node. |
|
||||
* @tparam GeometryValueType |
|
||||
*/ |
|
||||
template <typename GeometryValueType> |
|
||||
class PolytopeTree { |
|
||||
typedef typename std::shared_ptr<storm::storage::geometry::Polytope<GeometryValueType>> Polytope; |
|
||||
typedef typename std::vector<GeometryValueType> Point; |
|
||||
|
|
||||
public: |
|
||||
PolytopeTree(Polytope const& polytope) : polytope(polytope) {} |
|
||||
|
|
||||
/*! |
|
||||
* Substracts the given rhs from this polytope. |
|
||||
*/ |
|
||||
void setMinus(Polytope const& rhs) { |
|
||||
// This operation only has an effect if the intersection of this and rhs is non-empty. |
|
||||
if (!isEmpty() && !polytope->intersection(rhs)->isEmpty()) { |
|
||||
if (children.empty()) { |
|
||||
// This is a leaf node. |
|
||||
// Apply splitting. |
|
||||
auto newChildren = polytope->setMinus(rhs); |
|
||||
if (newChildren.empty()) { |
|
||||
// Delete this node. |
|
||||
polytope = nullptr; |
|
||||
} else if (newChildren.size() == 1) { |
|
||||
// Replace this node with its only child |
|
||||
polytope = newChildren.front()->clean(); |
|
||||
} else { |
|
||||
// Add the new children to this node. There is no need to traverse them. |
|
||||
for (auto& c : newChildren) { |
|
||||
children.push_back(c->clean()); |
|
||||
} |
|
||||
} |
|
||||
} else { |
|
||||
// This is an inner node. Traverse the children and set this to the convex union of its children. |
|
||||
std::vector<PolytopeTree<GeometryValueType>> newChildren; |
|
||||
Polytope newPolytope = nullptr; |
|
||||
for (auto& c : children) { |
|
||||
c.setMinus(rhs); |
|
||||
if (c.polytope != nullptr) { |
|
||||
newChildren.push_back(c); |
|
||||
if (newPolytope) { |
|
||||
newPolytope->convexUnion(c.polytope); |
|
||||
} else { |
|
||||
newPolytope = c.polytope; |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
polytope = newPolytope; // nullptr, if no children left |
|
||||
children = std::move(newChildren); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
void substractDownwardClosure(Point const& point, GeometryValueType const& eps) { |
|
||||
std::vector<GeometryValueType>(pointPlusEps); |
|
||||
for (auto const& coordinate : point) { |
|
||||
pointPlusEps.push_back(coordinate + eps); |
|
||||
} |
|
||||
auto downwardOfPoint = storm::storage::geometry::Polytope<GeometryValueType>::createDownwardClosure({pointPlusEps}); |
|
||||
setMinus(downwardOfPoint); |
|
||||
} |
|
||||
|
|
||||
bool isEmpty() const { |
|
||||
return polytope == nullptr; |
|
||||
} |
|
||||
|
|
||||
void clear() { |
|
||||
children.clear(); |
|
||||
polytope = nullptr; |
|
||||
} |
|
||||
|
|
||||
Polytope getPolytope() const { |
|
||||
return polytope; |
|
||||
} |
|
||||
|
|
||||
std::vector<PolytopeTree>& getChildren() { |
|
||||
return children; |
|
||||
} |
|
||||
|
|
||||
std::string toString() { |
|
||||
std::stringstream s; |
|
||||
s << "PolytopeTree node with " << getChildren().size() << " children: " << getPolytope()->toString(true) << std::endl << "Vertices: "; |
|
||||
auto vertices = getPolytope()->getVertices(); |
|
||||
for (auto const& v : vertices) { |
|
||||
s << "["; |
|
||||
for (auto const& vi : v) { |
|
||||
s << storm::utility::convertNumber<double>(vi) << ","; |
|
||||
} |
|
||||
s << "]\t"; |
|
||||
} |
|
||||
s << std::endl; |
|
||||
return s.str(); |
|
||||
} |
|
||||
|
|
||||
private: |
|
||||
|
|
||||
|
|
||||
|
|
||||
Polytope polytope; |
|
||||
std::vector<PolytopeTree<GeometryValueType>> children; |
|
||||
}; |
|
||||
|
|
||||
template <typename ModelType, typename GeometryValueType> |
|
||||
class DetSchedsSimplexChecker { |
|
||||
public: |
|
||||
|
|
||||
typedef typename ModelType::ValueType ValueType; |
|
||||
typedef typename std::shared_ptr<storm::storage::geometry::Polytope<GeometryValueType>> Polytope; |
|
||||
typedef typename std::vector<GeometryValueType> Point; |
|
||||
|
|
||||
DetSchedsSimplexChecker(std::shared_ptr<MultiObjectiveSchedulerEvaluator<ModelType>> const& schedulerEvaluator) : schedulerEvaluator(schedulerEvaluator) { |
|
||||
init(); |
|
||||
} |
|
||||
|
|
||||
~DetSchedsSimplexChecker() { |
|
||||
std::cout << "SIMPLEX CHECKER: " << swInit << " seconds for initialization" << std::endl; |
|
||||
std::cout << "SIMPLEX CHECKER: " << swCheck << " seconds for checking, including" << std::endl; |
|
||||
std::cout << "\t " << swLpBuild << " seconds for LP building" << std::endl; |
|
||||
std::cout << "\t " << swLpSolve << " seconds for LP solving" << std::endl; |
|
||||
std::cout << "SIMPLEX CHECKER: " << swAux << " seconds for aux stuff" << std::endl; |
|
||||
} |
|
||||
|
|
||||
std::pair<std::vector<Point>, std::vector<Polytope>> check(storm::Environment const& env, std::vector<GeometryValueType> const& weightVector, PolytopeTree<GeometryValueType>& polytopeTree, GeometryValueType const& eps) { |
|
||||
std::cout << "Checking a Simplex with weight vector " << storm::utility::vector::toString(weightVector) << std::endl << " and root " << polytopeTree.toString() << std::endl << "\t"; |
|
||||
if (polytopeTree.isEmpty()) { |
|
||||
return {{}, {}}; |
|
||||
} |
|
||||
swCheck.start(); |
|
||||
|
|
||||
swLpBuild.start(); |
|
||||
lpModel->push(); |
|
||||
currentObjectiveVariables.clear(); |
|
||||
|
|
||||
// set up objective function for the given weight vector |
|
||||
for (uint64_t objIndex = 0; objIndex < initialStateResults.size(); ++objIndex) { |
|
||||
currentObjectiveVariables.push_back(lpModel->addUnboundedContinuousVariable("w_" + std::to_string(objIndex), storm::utility::convertNumber<ValueType>(weightVector[objIndex]))); |
|
||||
lpModel->addConstraint("", currentObjectiveVariables.back().getExpression() == initialStateResults[objIndex]); |
|
||||
} |
|
||||
lpModel->update(); |
|
||||
swLpBuild.stop(); |
|
||||
|
|
||||
auto result = checkRecursive(weightVector, polytopeTree, eps); |
|
||||
|
|
||||
swLpBuild.start(); |
|
||||
lpModel->pop(); |
|
||||
lpModel->update(); |
|
||||
swLpBuild.stop(); |
|
||||
swCheck.stop(); |
|
||||
std::cout << " done!" << std::endl; |
|
||||
return result; |
|
||||
} |
|
||||
|
|
||||
private: |
|
||||
|
|
||||
std::pair<std::vector<Point>, std::vector<Polytope>> checkRecursive(std::vector<GeometryValueType> const& weightVector, PolytopeTree<GeometryValueType>& polytopeTree, GeometryValueType const& eps) { |
|
||||
std::cout << "."; |
|
||||
std::cout.flush(); |
|
||||
STORM_LOG_ASSERT(!polytopeTree.isEmpty(), "Tree node is empty"); |
|
||||
STORM_LOG_ASSERT(!polytopeTree.getPolytope()->isEmpty(), "Tree node is empty."); |
|
||||
STORM_LOG_TRACE("Checking " << polytopeTree.toString()); |
|
||||
|
|
||||
auto vertices = polytopeTree.getPolytope()->getVertices(); |
|
||||
|
|
||||
std::vector<Point> foundPoints; |
|
||||
std::vector<Polytope> infeasableAreas; |
|
||||
|
|
||||
swLpBuild.start(); |
|
||||
lpModel->push(); |
|
||||
// Assert the constraints of the current polytope |
|
||||
auto nodeConstraints = polytopeTree.getPolytope()->getConstraints(lpModel->getManager(), currentObjectiveVariables); |
|
||||
for (auto const& constr : nodeConstraints) { |
|
||||
lpModel->addConstraint("", constr); |
|
||||
} |
|
||||
lpModel->update(); |
|
||||
swLpBuild.stop(); |
|
||||
|
|
||||
if (polytopeTree.getChildren().empty()) { |
|
||||
// At leaf nodes we need to perform the actual check. |
|
||||
swLpSolve.start(); |
|
||||
lpModel->optimize(); |
|
||||
swLpSolve.stop(); |
|
||||
|
|
||||
if (lpModel->isInfeasible()) { |
|
||||
infeasableAreas.push_back(polytopeTree.getPolytope()); |
|
||||
polytopeTree.clear(); |
|
||||
} else { |
|
||||
STORM_LOG_ASSERT(!lpModel->isUnbounded(), "LP result is unbounded."); |
|
||||
Point newPoint; |
|
||||
for (auto const& objVar : currentObjectiveVariables) { |
|
||||
newPoint.push_back(storm::utility::convertNumber<GeometryValueType>(lpModel->getContinuousValue(objVar))); |
|
||||
} |
|
||||
auto halfspace = storm::storage::geometry::Halfspace<GeometryValueType>(weightVector, storm::utility::vector::dotProduct(weightVector, newPoint)).invert(); |
|
||||
infeasableAreas.push_back(polytopeTree.getPolytope()->intersection(halfspace)); |
|
||||
if (infeasableAreas.back()->isEmpty()) { |
|
||||
infeasableAreas.pop_back(); |
|
||||
} |
|
||||
swAux.start(); |
|
||||
polytopeTree.setMinus(storm::storage::geometry::Polytope<GeometryValueType>::create({halfspace})); |
|
||||
foundPoints.push_back(newPoint); |
|
||||
polytopeTree.substractDownwardClosure(newPoint, eps); |
|
||||
swAux.stop(); |
|
||||
if (!polytopeTree.isEmpty()) { |
|
||||
auto childRes = checkRecursive(weightVector, polytopeTree, eps); |
|
||||
foundPoints.insert(foundPoints.end(), childRes.first.begin(), childRes.first.end()); |
|
||||
infeasableAreas.insert(infeasableAreas.end(), childRes.second.begin(), childRes.second.end()); |
|
||||
} |
|
||||
} |
|
||||
} else { |
|
||||
// Traverse all the children. |
|
||||
for (uint64_t childId = 0; childId < polytopeTree.getChildren().size(); ++childId) { |
|
||||
auto childRes = checkRecursive(weightVector, polytopeTree.getChildren()[childId], eps); |
|
||||
STORM_LOG_ASSERT(polytopeTree.getChildren()[childId].isEmpty(), "expected empty children."); |
|
||||
// Make the results known to the right siblings |
|
||||
for (auto const& newPoint : childRes.first) { |
|
||||
for (uint64_t siblingId = childId + 1; siblingId < polytopeTree.getChildren().size(); ++siblingId) { |
|
||||
polytopeTree.getChildren()[siblingId].substractDownwardClosure(newPoint, eps); |
|
||||
} |
|
||||
} |
|
||||
foundPoints.insert(foundPoints.end(), childRes.first.begin(), childRes.first.end()); |
|
||||
infeasableAreas.insert(infeasableAreas.end(), childRes.second.begin(), childRes.second.end()); |
|
||||
} |
|
||||
// All children are empty now, so this becomes empty. |
|
||||
polytopeTree.clear(); |
|
||||
} |
|
||||
swLpBuild.start(); |
|
||||
lpModel->pop(); |
|
||||
swLpBuild.stop(); |
|
||||
return {foundPoints, infeasableAreas}; |
|
||||
} |
|
||||
|
|
||||
/* Todo |
|
||||
ValueType getChoiceValueSummand(Objective<ValueType> const& objective, uint64_t choiceIndex) { |
|
||||
auto const& model = schedulerEvaluator->getModel(); |
|
||||
storm::modelchecker::SparsePropositionalModelChecker<ModelType> mc(model); |
|
||||
auto const& formula = *objective.formula; |
|
||||
if (formula.isProbabilityOperatorFormula() && formula.getSubformula().isUntilFormula()) { |
|
||||
return storm::utility::zero<ValueType>(); |
|
||||
} else if (formula.getSubformula().isEventuallyFormula() && (formula.isRewardOperatorFormula() || formula.isTimeOperatorFormula())) { |
|
||||
|
|
||||
storm::storage::BitVector rew0States = mc.check(formula.getSubformula().asEventuallyFormula().getSubformula())->asExplicitQualitativeCheckResult().getTruthValuesVector(); |
|
||||
if (formula.isRewardOperatorFormula()) { |
|
||||
auto const& rewModel = formula.asRewardOperatorFormula().hasRewardModelName() ? model.getRewardModel(formula.asRewardOperatorFormula().getRewardModelName()) : model.getUniqueRewardModel(); |
|
||||
storm::storage::BitVector statesWithoutReward = rewModel.getStatesWithZeroReward(model.getTransitionMatrix()); |
|
||||
rew0States = storm::utility::graph::performProb1A(model.getTransitionMatrix(), model.getNondeterministicChoiceIndices(), model.getBackwardTransitions(), statesWithoutReward, rew0States); |
|
||||
} |
|
||||
storm::utility::vector::setVectorValues(results[objIndex], rew0States, storm::utility::zero<ValueType>()); |
|
||||
schedulerIndependentStates.push_back(std::move(rew0States)); |
|
||||
} else if (formula.isRewardOperatorFormula() && formula.getSubformula().isTotalRewardFormula()) { |
|
||||
auto const& rewModel = formula.asRewardOperatorFormula().hasRewardModelName() ? model.getRewardModel(formula.asRewardOperatorFormula().getRewardModelName()) : model.getUniqueRewardModel(); |
|
||||
storm::storage::BitVector statesWithoutReward = rewModel.getStatesWithZeroReward(model.getTransitionMatrix()); |
|
||||
storm::storage::BitVector rew0States = storm::utility::graph::performProbGreater0E(model.getBackwardTransitions(), statesWithoutReward, ~statesWithoutReward); |
|
||||
rew0States.complement(); |
|
||||
storm::utility::vector::setVectorValues(results[objIndex], rew0States, storm::utility::zero<ValueType>()); |
|
||||
schedulerIndependentStates.push_back(std::move(rew0States)); |
|
||||
} else { |
|
||||
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "The given formula " << formula << " is not supported."); |
|
||||
} |
|
||||
}*/ |
|
||||
|
|
||||
void init() { |
|
||||
swInit.start(); |
|
||||
auto const& model = schedulerEvaluator->getModel(); |
|
||||
auto const& objectives = schedulerEvaluator->getObjectives(); |
|
||||
uint64_t numStates = model.getNumberOfStates(); |
|
||||
lpModel = storm::utility::solver::getLpSolver<ValueType>("model"); |
|
||||
lpModel->setOptimizationDirection(storm::solver::OptimizationDirection::Maximize); |
|
||||
initialStateResults.clear(); |
|
||||
|
|
||||
auto one = lpModel->getConstant(storm::utility::one<ValueType>()); |
|
||||
|
|
||||
// Create choice variables and assert that at least one choice is taken at each state. |
|
||||
std::vector<storm::expressions::Expression> choiceVars; |
|
||||
choiceVars.reserve(model.getNumberOfChoices()); |
|
||||
for (uint64_t state = 0; state < numStates; ++state) { |
|
||||
uint64_t numChoices = model.getNumberOfChoices(state); |
|
||||
if (numChoices == 1) { |
|
||||
choiceVars.emplace_back(); |
|
||||
} else { |
|
||||
std::vector<storm::expressions::Expression> localChoices; |
|
||||
for (uint64_t choice = 0; choice < numChoices; ++choice) { |
|
||||
localChoices.push_back(lpModel->addBoundedIntegerVariable("c" + std::to_string(state) + "_" + std::to_string(choice), 0, 1).getExpression()); |
|
||||
} |
|
||||
lpModel->addConstraint("", storm::expressions::sum(localChoices).reduceNesting() >= one); |
|
||||
choiceVars.insert(choiceVars.end(), localChoices.begin(), localChoices.end()); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
for (uint64_t objIndex = 0; objIndex < objectives.size(); ++objIndex) { |
|
||||
Objective<ValueType> const& objective = objectives[objIndex]; |
|
||||
storm::storage::BitVector const& schedulerIndependentStates = schedulerEvaluator->getSchedulerIndependentStates(objIndex); |
|
||||
// Create state variables |
|
||||
std::vector<storm::expressions::Expression> stateVars; |
|
||||
stateVars.reserve(numStates); |
|
||||
for (uint64_t state = 0; state < numStates; ++state) { |
|
||||
if (schedulerIndependentStates.get(state)) { |
|
||||
stateVars.push_back(lpModel->getConstant(schedulerEvaluator->getSchedulerIndependentStateResult(objIndex, state))); |
|
||||
} else { |
|
||||
stateVars.push_back(lpModel->addContinuousVariable("x" + std::to_string(objIndex) + "_" + std::to_string(state), objective.lowerResultBound, objective.upperResultBound).getExpression()); |
|
||||
} |
|
||||
if (state == *model.getInitialStates().begin()) { |
|
||||
initialStateResults.push_back(stateVars.back()); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// Create and assert choice values |
|
||||
for (uint64_t state = 0; state < numStates; ++state) { |
|
||||
if (schedulerIndependentStates.get(state)) { |
|
||||
continue; |
|
||||
} |
|
||||
storm::expressions::Expression stateValue; |
|
||||
uint64_t numChoices = model.getNumberOfChoices(state); |
|
||||
for (uint64_t choice = 0; choice < numChoices; ++choice) { |
|
||||
storm::expressions::Expression choiceValue; |
|
||||
if (objective.formula) |
|
||||
for (auto const& transition : model.getTransitionMatrix().getRow(state, choice)) { |
|
||||
storm::expressions::Expression transitionValue = lpModel->getConstant(transition.getValue()) * stateVars[transition.getColumn()]; |
|
||||
if (choiceValue.isInitialized()) { |
|
||||
choiceValue = choiceValue + transitionValue; |
|
||||
} else { |
|
||||
choiceValue = transitionValue; |
|
||||
} |
|
||||
} |
|
||||
choiceValue = choiceValue.simplify().reduceNesting(); |
|
||||
if (numChoices == 1) { |
|
||||
lpModel->addConstraint("", stateVars[state] == choiceValue); |
|
||||
} else { |
|
||||
uint64_t globalChoiceIndex = model.getTransitionMatrix().getRowGroupIndices()[state] + choice; |
|
||||
storm::expressions::Expression maxDiff = lpModel->getConstant(objective.upperResultBound.get()) * (one - choiceVars[globalChoiceIndex]); |
|
||||
lpModel->addConstraint("", stateVars[state] - choiceValue <= maxDiff); |
|
||||
lpModel->addConstraint("", choiceValue - stateVars[state] <= maxDiff); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
lpModel->update(); |
|
||||
swInit.stop(); |
|
||||
} |
|
||||
|
|
||||
std::shared_ptr<MultiObjectiveSchedulerEvaluator<ModelType>> schedulerEvaluator; |
|
||||
|
|
||||
std::unique_ptr<storm::solver::LpSolver<ValueType>> lpModel; |
|
||||
std::vector<storm::expressions::Expression> initialStateResults; |
|
||||
std::vector<storm::expressions::Variable> currentObjectiveVariables; |
|
||||
|
|
||||
|
|
||||
storm::utility::Stopwatch swInit; |
|
||||
storm::utility::Stopwatch swCheck; |
|
||||
storm::utility::Stopwatch swLpSolve; |
|
||||
storm::utility::Stopwatch swLpBuild; |
|
||||
storm::utility::Stopwatch swAux; |
|
||||
}; |
|
||||
|
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,93 +0,0 @@ |
|||||
#include "storm/modelchecker/multiobjective/deterministicScheds/DetSchedsWeightVectorChecker.h"
|
|
||||
|
|
||||
#include "storm/adapters/RationalNumberAdapter.h"
|
|
||||
#include "storm/models/sparse/Mdp.h"
|
|
||||
#include "storm/models/sparse/MarkovAutomaton.h"
|
|
||||
#include "storm/models/sparse/StandardRewardModel.h"
|
|
||||
|
|
||||
namespace storm { |
|
||||
namespace modelchecker { |
|
||||
namespace multiobjective { |
|
||||
|
|
||||
template <typename ModelType> |
|
||||
DetSchedsWeightVectorChecker<ModelType>::DetSchedsWeightVectorChecker(std::shared_ptr<MultiObjectiveSchedulerEvaluator<ModelType>> const& schedulerEvaluator) : schedulerEvaluator(schedulerEvaluator) { |
|
||||
// Intentionally left empty;
|
|
||||
} |
|
||||
|
|
||||
template <typename ModelType> |
|
||||
std::vector<std::vector<typename DetSchedsWeightVectorChecker<ModelType>::ValueType>> DetSchedsWeightVectorChecker<ModelType>::check(Environment const& env, std::vector<ValueType> const& weightVector) { |
|
||||
std::vector<std::vector<ValueType>> resultStack; |
|
||||
auto const& transitionMatrix = schedulerEvaluator->getModel().getTransitionMatrix(); |
|
||||
auto const& choiceIndices = schedulerEvaluator->getModel().getNondeterministicChoiceIndices(); |
|
||||
|
|
||||
uint64_t const numObjectives = weightVector.size(); |
|
||||
|
|
||||
// perform policy-iteration and store the intermediate results on the stack
|
|
||||
do { |
|
||||
schedulerEvaluator->check(env); |
|
||||
resultStack.push_back(schedulerEvaluator->getInitialStateResults()); |
|
||||
|
|
||||
auto const& stateResults = schedulerEvaluator->getResults(); |
|
||||
|
|
||||
// Check if scheduler choices can be improved
|
|
||||
auto const& scheduler = schedulerEvaluator->getScheduler(); |
|
||||
for (uint64_t state = 0; state < scheduler.size(); ++state) { |
|
||||
uint64_t choiceOffset = choiceIndices[state]; |
|
||||
uint64_t numChoices = choiceIndices[state + 1] - choiceOffset; |
|
||||
uint64_t currChoice = scheduler[state]; |
|
||||
|
|
||||
ValueType currChoiceValue = storm::utility::zero<ValueType>(); |
|
||||
for (uint64_t objIndex = 0; objIndex < numObjectives; ++objIndex) { |
|
||||
STORM_LOG_ASSERT(!storm::utility::isInfinity(stateResults[objIndex][state]), "Scheduler induces value infinity at a state."); |
|
||||
currChoiceValue += weightVector[objIndex] * stateResults[objIndex][state]; |
|
||||
} |
|
||||
|
|
||||
for (uint64_t choice = 0; choice < numChoices; ++choice) { |
|
||||
// Skip the currently selected choice
|
|
||||
if (choice == currChoice) { |
|
||||
continue; |
|
||||
} |
|
||||
|
|
||||
ValueType choiceValue = storm::utility::zero<ValueType>(); |
|
||||
for (uint64_t objIndex = 0; objIndex < numObjectives; ++objIndex) { |
|
||||
if (schedulerEvaluator->getSchedulerIndependentStates(objIndex).get(state)) { |
|
||||
choiceValue += weightVector[objIndex] * stateResults[objIndex][state]; |
|
||||
} else { |
|
||||
ValueType objValue = storm::utility::zero<ValueType>(); |
|
||||
// TODO: Choice rewards?
|
|
||||
for (auto const& entry : transitionMatrix.getRow(choiceOffset + choice)) { |
|
||||
objValue += entry.getValue() * stateResults[objIndex][entry.getColumn()]; |
|
||||
} |
|
||||
choiceValue += weightVector[objIndex] * objValue; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// TODO: For non-exact solving this could 'close' End Components which actually decreases
|
|
||||
if (choiceValue > currChoiceValue) { |
|
||||
schedulerEvaluator->setChoiceAtState(state, choice); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
} while (!schedulerEvaluator->hasCurrentSchedulerBeenChecked()); |
|
||||
return resultStack; |
|
||||
} |
|
||||
|
|
||||
template <typename ModelType> |
|
||||
std::vector<typename DetSchedsWeightVectorChecker<ModelType>::ValueType> const& DetSchedsWeightVectorChecker<ModelType>::getResultForAllStates(uint64_t objIndex) const { |
|
||||
return schedulerEvaluator->getResultForObjective(objIndex); |
|
||||
} |
|
||||
|
|
||||
template <typename ModelType> |
|
||||
std::vector<uint64_t> const& DetSchedsWeightVectorChecker<ModelType>::getScheduler() const { |
|
||||
return schedulerEvaluator->getScheduler(); |
|
||||
} |
|
||||
|
|
||||
template class DetSchedsWeightVectorChecker<storm::models::sparse::Mdp<double>>; |
|
||||
template class DetSchedsWeightVectorChecker<storm::models::sparse::Mdp<storm::RationalNumber>>; |
|
||||
template class DetSchedsWeightVectorChecker<storm::models::sparse::MarkovAutomaton<double>>; |
|
||||
template class DetSchedsWeightVectorChecker<storm::models::sparse::MarkovAutomaton<storm::RationalNumber>>; |
|
||||
|
|
||||
|
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,39 +0,0 @@ |
|||||
#pragma once |
|
||||
|
|
||||
#include <vector> |
|
||||
|
|
||||
#include "storm/modelchecker/multiobjective/deterministicScheds/MultiObjectiveSchedulerEvaluator.h" |
|
||||
|
|
||||
namespace storm { |
|
||||
|
|
||||
class Environment; |
|
||||
|
|
||||
namespace modelchecker { |
|
||||
namespace multiobjective { |
|
||||
|
|
||||
template <typename ModelType> |
|
||||
class DetSchedsWeightVectorChecker { |
|
||||
public: |
|
||||
|
|
||||
typedef typename ModelType::ValueType ValueType; |
|
||||
|
|
||||
DetSchedsWeightVectorChecker(std::shared_ptr<MultiObjectiveSchedulerEvaluator<ModelType>> const& schedulerEvaluator); |
|
||||
|
|
||||
/*! |
|
||||
* Optimizes the objectives in the given direction. |
|
||||
* Returns a sequence of points such that all points are achievable and the last point is the farest point in the given direction. |
|
||||
* After calling this, getResultForAllStates and getScheduler yield results with respect to that last point. |
|
||||
*/ |
|
||||
std::vector<std::vector<ValueType>> check(Environment const& env, std::vector<ValueType> const& weightVector); |
|
||||
|
|
||||
std::vector<ValueType> const& getResultForAllStates(uint64_t objIndex) const; |
|
||||
std::vector<uint64_t> const& getScheduler() const; |
|
||||
|
|
||||
private: |
|
||||
std::shared_ptr<MultiObjectiveSchedulerEvaluator<ModelType>> schedulerEvaluator; |
|
||||
|
|
||||
}; |
|
||||
|
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,777 +0,0 @@ |
|||||
#include <sstream>
|
|
||||
#include <algorithm>
|
|
||||
|
|
||||
|
|
||||
#include "storm/modelchecker/multiobjective/deterministicScheds/DeterministicParetoExplorer.h"
|
|
||||
#include "storm/storage/geometry/coordinates.h"
|
|
||||
#include "storm/models/sparse/MarkovAutomaton.h"
|
|
||||
#include "storm/models/sparse/Mdp.h"
|
|
||||
#include "storm/models/sparse/StandardRewardModel.h"
|
|
||||
#include "storm/modelchecker/multiobjective/MultiObjectivePostprocessing.h"
|
|
||||
#include "storm/modelchecker/results/ExplicitParetoCurveCheckResult.h"
|
|
||||
#include "storm/environment/modelchecker/MultiObjectiveModelCheckerEnvironment.h"
|
|
||||
|
|
||||
#include "storm/utility/export.h"
|
|
||||
#include "storm/utility/solver.h"
|
|
||||
|
|
||||
#include "storm/exceptions/UnexpectedException.h"
|
|
||||
#include "storm/exceptions/InvalidOperationException.h"
|
|
||||
|
|
||||
|
|
||||
namespace storm { |
|
||||
namespace modelchecker { |
|
||||
namespace multiobjective { |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::Point(std::vector<GeometryValueType> const& coordinates) : coordinates(coordinates), paretoOptimal(false), onFacet(false) { |
|
||||
STORM_LOG_ASSERT(!this->coordinates.empty(), "Points with dimension 0 are not supported"); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::Point(std::vector<GeometryValueType>&& coordinates) : coordinates(std::move(coordinates)), paretoOptimal(false), onFacet(false) { |
|
||||
STORM_LOG_ASSERT(!this->coordinates.empty(), "Points with dimension 0 are not supported"); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
std::vector<GeometryValueType>& DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::get() { |
|
||||
return coordinates; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
std::vector<GeometryValueType> const& DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::get() const { |
|
||||
return coordinates; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
uint64_t DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::dimension() const { |
|
||||
STORM_LOG_ASSERT(!coordinates.empty(), "Points with dimension 0 are not supported"); |
|
||||
return coordinates.size(); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::DominanceResult DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::getDominance(Point const& other) const { |
|
||||
STORM_LOG_ASSERT(this->dimension() == other.dimension(), "Non-Equal dimensions of points: [" << this->toString() << "] vs. [" << other.toString() << "]"); |
|
||||
auto thisIt = this->get().begin(); |
|
||||
auto otherIt = other.get().begin(); |
|
||||
auto thisItE = this->get().end(); |
|
||||
|
|
||||
// Find the first entry where the points differ
|
|
||||
while (*thisIt == *otherIt) { |
|
||||
++thisIt; |
|
||||
++otherIt; |
|
||||
if (thisIt == thisItE) { |
|
||||
return DominanceResult::Equal; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
if (*thisIt > *otherIt) { |
|
||||
// *this might dominate other
|
|
||||
for (++thisIt, ++otherIt; thisIt != thisItE; ++thisIt, ++otherIt) { |
|
||||
if (*thisIt < *otherIt) { |
|
||||
return DominanceResult::Incomparable; |
|
||||
} |
|
||||
} |
|
||||
return DominanceResult::Dominates; |
|
||||
} else { |
|
||||
assert(*thisIt < *otherIt); |
|
||||
// *this might be dominated by other
|
|
||||
for (++thisIt, ++otherIt; thisIt != thisItE; ++thisIt, ++otherIt) { |
|
||||
if (*thisIt > *otherIt) { |
|
||||
return DominanceResult::Incomparable; |
|
||||
} |
|
||||
} |
|
||||
return DominanceResult::Dominated; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::setParetoOptimal(bool value) { |
|
||||
paretoOptimal = value; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::isParetoOptimal() const { |
|
||||
return paretoOptimal; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::setOnFacet(bool value) { |
|
||||
onFacet = value; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::liesOnFacet() const { |
|
||||
return onFacet; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
std::string DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point::toString(bool convertToDouble) const { |
|
||||
std::stringstream out; |
|
||||
bool first = true; |
|
||||
for (auto const& pi : this->get()) { |
|
||||
if (first) { |
|
||||
first = false; |
|
||||
} else { |
|
||||
out << ", "; |
|
||||
} |
|
||||
if (convertToDouble) { |
|
||||
out << storm::utility::convertNumber<double>(pi); |
|
||||
} else { |
|
||||
out << pi; |
|
||||
} |
|
||||
} |
|
||||
return out.str(); |
|
||||
} |
|
||||
|
|
||||
// template <class SparseModelType, typename GeometryValueType>
|
|
||||
// bool operator<(typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point const& lhs, typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point const& rhs) {
|
|
||||
// STORM_LOG_ASSERT(lhs.dimension() == rhs.dimension(), "Non-Equal dimensions of points: " << lhs << " vs. " << rhs);
|
|
||||
// for (uint64_t i = 0; i < lhs.dimension(); ++i) {
|
|
||||
// if (lhs.get()[i] < rhs.get()[i]) {
|
|
||||
// return true;
|
|
||||
// } else if (lhs.get()[i] != rhs.get()[i]) {
|
|
||||
// return false;
|
|
||||
// }
|
|
||||
// }
|
|
||||
// return false;
|
|
||||
// }
|
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::Pointset() : currId(1) { |
|
||||
// Intentionally left empty
|
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
boost::optional<typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::PointId> DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::addPoint(Environment const& env, Point&& point) { |
|
||||
|
|
||||
// Find dominated and dominating points
|
|
||||
auto pointsIt = points.begin(); |
|
||||
while (pointsIt != points.end()) { |
|
||||
switch (point.getDominance(pointsIt->second)) { |
|
||||
case Point::DominanceResult::Incomparable: |
|
||||
// Nothing to be done for this point
|
|
||||
++pointsIt; |
|
||||
break; |
|
||||
case Point::DominanceResult::Dominates: |
|
||||
// Found a point in the set that is dominated by the new point, so we erase it
|
|
||||
if (pointsIt->second.isParetoOptimal()) { |
|
||||
STORM_LOG_WARN("Potential precision issues: Found a point that dominates another point which was flagged as pareto optimal. Distance of points is " << std::sqrt(storm::utility::convertNumber<double>(storm::storage::geometry::squaredEuclideanDistance(pointsIt->second.get(), point.get())))); |
|
||||
point.setParetoOptimal(true); |
|
||||
} |
|
||||
if (pointsIt->second.liesOnFacet()) { |
|
||||
// Do not erase points that lie on a facet
|
|
||||
++pointsIt; |
|
||||
} else { |
|
||||
pointsIt = points.erase(pointsIt); |
|
||||
} |
|
||||
break; |
|
||||
case Point::DominanceResult::Dominated: |
|
||||
// The new point is dominated by another point.
|
|
||||
return boost::none; |
|
||||
case Point::DominanceResult::Equal: |
|
||||
if (point.isParetoOptimal()) { |
|
||||
pointsIt->second.setParetoOptimal(); |
|
||||
} |
|
||||
if (point.liesOnFacet()) { |
|
||||
pointsIt->second.setOnFacet(); |
|
||||
} |
|
||||
return pointsIt->first; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
if (env.modelchecker().multi().isPrintResultsSet()) { |
|
||||
std::cout << "## achievable point: [" << point.toString(true) << "]" << std::endl; |
|
||||
} |
|
||||
|
|
||||
points.emplace_hint(points.end(), currId, std::move(point)); |
|
||||
return currId++; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Point const& DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::getPoint(PointId const& id) const { |
|
||||
return points.at(id); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::iterator_type DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::begin() const { |
|
||||
return points.begin(); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::iterator_type DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::end() const { |
|
||||
return points.end(); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
uint64_t DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::size() const { |
|
||||
return points.size(); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Polytope DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::downwardClosure() const { |
|
||||
std::vector<std::vector<GeometryValueType>> pointsAsVector; |
|
||||
pointsAsVector.reserve(size()); |
|
||||
for (auto const& p : points) { |
|
||||
pointsAsVector.push_back(p.second.get()); |
|
||||
} |
|
||||
return storm::storage::geometry::Polytope<GeometryValueType>::createDownwardClosure(std::move(pointsAsVector)); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::collectPointsInPolytope(std::set<PointId>& collectedPoints, Polytope const& polytope) { |
|
||||
for (auto const& p : points) { |
|
||||
if (polytope->contains(p.second.get())) { |
|
||||
collectedPoints.insert(p.first); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Pointset::printToStream(std::ostream& out, bool includeIDs, bool convertToDouble) { |
|
||||
for (auto const& p : this->points) { |
|
||||
if (includeIDs) { |
|
||||
out << p.first << ": [" << p.second.toString(convertToDouble) << "]" << std::endl; |
|
||||
} else { |
|
||||
out << p.second.toString(convertToDouble) << std::endl; |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Facet::Facet(storm::storage::geometry::Halfspace<GeometryValueType> const& halfspace) : halfspace(halfspace) { |
|
||||
// Intentionally left empty
|
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Facet::Facet(storm::storage::geometry::Halfspace<GeometryValueType>&& halfspace) : halfspace(std::move(halfspace)) { |
|
||||
// Intentionally left empty
|
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
storm::storage::geometry::Halfspace<GeometryValueType> const& DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Facet::getHalfspace() const { |
|
||||
return halfspace; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Facet::addPoint(PointId const& pointId, Point const& point) { |
|
||||
inducedSimplex = nullptr; |
|
||||
GeometryValueType product = storm::utility::vector::dotProduct(getHalfspace().normalVector(), point.get()); |
|
||||
if (product != getHalfspace().offset()) { |
|
||||
if (product < getHalfspace().offset()) { |
|
||||
STORM_LOG_DEBUG("The point on the facet actually has distance " << storm::utility::convertNumber<double>(getHalfspace().euclideanDistance(point.get()))); |
|
||||
} else { |
|
||||
STORM_LOG_DEBUG("Halfspace of facet is shifted by " << storm::utility::convertNumber<double>(getHalfspace().euclideanDistance(point.get())) << " to capture all points that are supposed to lie on the facet."); |
|
||||
halfspace.offset() = product; |
|
||||
} |
|
||||
} |
|
||||
paretoPointsOnFacet.push_back(pointId); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
std::vector<typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::PointId> const& DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Facet::getPoints() const { |
|
||||
return paretoPointsOnFacet; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
uint64_t DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Facet::getNumberOfPoints() const { |
|
||||
return paretoPointsOnFacet.size(); |
|
||||
} |
|
||||
|
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Polytope const& DeterministicParetoExplorer<SparseModelType, GeometryValueType>::Facet::getInducedSimplex(Pointset const& pointset, std::vector<GeometryValueType> const& referenceCoordinates) { |
|
||||
if (!inducedSimplex) { |
|
||||
std::vector<std::vector<GeometryValueType>> vertices = {referenceCoordinates}; |
|
||||
for (auto const& pId : paretoPointsOnFacet) { |
|
||||
vertices.push_back(pointset.getPoint(pId).get()); |
|
||||
} |
|
||||
// This facet might lie at the 'border', which means that the downward closure has to be taken in some directions
|
|
||||
storm::storage::BitVector dimensionsForDownwardClosure = storm::utility::vector::filterZero(this->halfspace.normalVector()); |
|
||||
STORM_LOG_ASSERT(dimensionsForDownwardClosure.getNumberOfSetBits() + vertices.size() >= halfspace.normalVector().size() + 1, "The number of points on the facet is insufficient"); |
|
||||
if (dimensionsForDownwardClosure.empty()) { |
|
||||
inducedSimplex = storm::storage::geometry::Polytope<GeometryValueType>::create(vertices); |
|
||||
} else { |
|
||||
inducedSimplex = storm::storage::geometry::Polytope<GeometryValueType>::createSelectiveDownwardClosure(vertices, dimensionsForDownwardClosure); |
|
||||
} |
|
||||
} |
|
||||
return inducedSimplex; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
DeterministicParetoExplorer<SparseModelType, GeometryValueType>::FacetAnalysisContext::FacetAnalysisContext(Facet& f) : facet(f) { |
|
||||
// Intentionally left empty
|
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
DeterministicParetoExplorer<SparseModelType, GeometryValueType>::DeterministicParetoExplorer(preprocessing::SparseMultiObjectivePreprocessorResult<SparseModelType>& preprocessorResult) : model(preprocessorResult.preprocessedModel), objectives(preprocessorResult.objectives) { |
|
||||
originalModelInitialState = *preprocessorResult.originalModel.getInitialStates().begin(); |
|
||||
schedulerEvaluator = std::make_shared<MultiObjectiveSchedulerEvaluator<SparseModelType>>(preprocessorResult); |
|
||||
weightVectorChecker = std::make_shared<DetSchedsWeightVectorChecker<SparseModelType>>(schedulerEvaluator); |
|
||||
simplexChecker = std::make_shared<DetSchedsSimplexChecker<SparseModelType, GeometryValueType>>(schedulerEvaluator); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
std::unique_ptr<CheckResult> DeterministicParetoExplorer<SparseModelType, GeometryValueType>::check(Environment const& env) { |
|
||||
|
|
||||
clean(); |
|
||||
initializeFacets(env); |
|
||||
while (!unprocessedFacets.empty()) { |
|
||||
Facet f = std::move(unprocessedFacets.front()); |
|
||||
unprocessedFacets.pop(); |
|
||||
processFacet(env, f); |
|
||||
} |
|
||||
|
|
||||
std::vector<std::vector<ModelValueType>>paretoPoints; |
|
||||
paretoPoints.reserve(pointset.size()); |
|
||||
for (auto const& p : pointset) { |
|
||||
paretoPoints.push_back(storm::utility::vector::convertNumericVector<ModelValueType>(transformObjectiveValuesToOriginal(objectives, p.second.get()))); |
|
||||
} |
|
||||
return std::make_unique<storm::modelchecker::ExplicitParetoCurveCheckResult<ModelValueType>>(originalModelInitialState, std::move(paretoPoints), |
|
||||
nullptr, nullptr); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::clean() { |
|
||||
pointset = Pointset(); |
|
||||
unprocessedFacets = std::queue<Facet>(); |
|
||||
overApproximation = storm::storage::geometry::Polytope<GeometryValueType>::createUniversalPolytope(); |
|
||||
unachievableAreas.clear(); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::addHalfspaceToOverApproximation(Environment const& env, std::vector<GeometryValueType> const& normalVector, Point const& pointOnHalfspace) { |
|
||||
GeometryValueType offset = storm::utility::vector::dotProduct(normalVector, pointOnHalfspace.get()); |
|
||||
if (env.modelchecker().multi().isPrintResultsSet()) { |
|
||||
std::cout << "## unachievable halfspace: ["; |
|
||||
bool first = true; |
|
||||
for (auto const& xi : normalVector) { |
|
||||
if (first) { |
|
||||
first = false; |
|
||||
} else { |
|
||||
std::cout << ","; |
|
||||
} |
|
||||
std::cout << storm::utility::convertNumber<double>(xi); |
|
||||
} |
|
||||
std::cout << "];[" << storm::utility::convertNumber<double>(offset) << "]" << std::endl; |
|
||||
} |
|
||||
storm::storage::geometry::Halfspace<GeometryValueType> overApproxHalfspace(normalVector, offset); |
|
||||
overApproximation = overApproximation->intersection(overApproxHalfspace); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::addUnachievableArea(Environment const& env, Polytope const& area) { |
|
||||
if (env.modelchecker().multi().isPrintResultsSet()) { |
|
||||
std::vector<std::vector<GeometryValueType>> vertices; |
|
||||
if (objectives.size() == 2) { |
|
||||
vertices = area->getVerticesInClockwiseOrder(); |
|
||||
} else { |
|
||||
vertices = area->getVertices(); |
|
||||
} |
|
||||
std::cout << "## unachievable polytope: "; |
|
||||
bool firstVertex = true; |
|
||||
for (auto const& v : vertices) { |
|
||||
if (firstVertex) { |
|
||||
firstVertex = false; |
|
||||
} else { |
|
||||
std::cout << ";"; |
|
||||
} |
|
||||
std::cout << "["; |
|
||||
bool firstEntry = true; |
|
||||
for (auto const& vi : v) { |
|
||||
if (firstEntry) { |
|
||||
firstEntry = false; |
|
||||
} else { |
|
||||
std::cout << ","; |
|
||||
} |
|
||||
std::cout << storm::utility::convertNumber<double>(vi); |
|
||||
} |
|
||||
std::cout << "]"; |
|
||||
} |
|
||||
std::cout << std::endl; |
|
||||
} |
|
||||
unachievableAreas.push_back(area); |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::initializeFacets(Environment const& env) { |
|
||||
for (uint64_t objIndex = 0; objIndex < objectives.size(); ++objIndex) { |
|
||||
std::vector<ModelValueType> weightVector(objectives.size(), storm::utility::zero<ModelValueType>()); |
|
||||
if (storm::solver::minimize(objectives[objIndex].formula->getOptimalityType())) { |
|
||||
weightVector[objIndex] = -storm::utility::one<ModelValueType>(); |
|
||||
} else { |
|
||||
weightVector[objIndex] = storm::utility::one<ModelValueType>(); |
|
||||
} |
|
||||
auto points = weightVectorChecker->check(env, weightVector); |
|
||||
bool last = true; |
|
||||
for (auto pIt = points.rbegin(); pIt != points.rend(); ++pIt) { |
|
||||
for (uint64_t objIndex = 0; objIndex < this->objectives.size(); ++objIndex) { |
|
||||
if (storm::solver::minimize(objectives[objIndex].formula->getOptimalityType())) { |
|
||||
(*pIt)[objIndex] *= -storm::utility::one<ModelValueType>(); |
|
||||
} |
|
||||
} |
|
||||
Point p(storm::utility::vector::convertNumericVector<GeometryValueType>(*pIt)); |
|
||||
if (last) { |
|
||||
last = false; |
|
||||
p.setOnFacet(); |
|
||||
// Adapt the overapproximation
|
|
||||
std::vector<GeometryValueType> normalVector(objectives.size(), storm::utility::zero<GeometryValueType>()); |
|
||||
normalVector[objIndex] = storm::utility::one<GeometryValueType>(); |
|
||||
addHalfspaceToOverApproximation(env, normalVector, p); |
|
||||
} |
|
||||
pointset.addPoint(env, std::move(p)); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
auto initialHalfspaces = pointset.downwardClosure()->getHalfspaces(); |
|
||||
for (auto& h : initialHalfspaces) { |
|
||||
Facet f(std::move(h)); |
|
||||
for (auto const& p : pointset) { |
|
||||
if (f.getHalfspace().isPointOnBoundary(p.second.get())) { |
|
||||
f.addPoint(p.first, p.second); |
|
||||
} |
|
||||
} |
|
||||
STORM_LOG_ASSERT(std::count(f.getHalfspace().normalVector().begin(), f.getHalfspace().normalVector().end(), storm::utility::zero<GeometryValueType>()) + f.getNumberOfPoints() == objectives.size(), "Unexpected number of points on facet."); |
|
||||
if (!checkFacetPrecision(env, f)) { |
|
||||
unprocessedFacets.push(std::move(f)); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
std::vector<GeometryValueType> DeterministicParetoExplorer<SparseModelType, GeometryValueType>::getReferenceCoordinates() const { |
|
||||
std::vector<GeometryValueType> result; |
|
||||
for (auto const& obj : schedulerEvaluator->getObjectives()) { |
|
||||
ModelValueType value = storm::solver::minimize(obj.formula->getOptimalityType()) ? obj.upperResultBound.get() : obj.lowerResultBound.get(); |
|
||||
result.push_back(storm::utility::convertNumber<GeometryValueType>(value)); |
|
||||
} |
|
||||
return result; |
|
||||
} |
|
||||
|
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::checkFacetPrecision(Environment const& env, Facet& f) { |
|
||||
// TODO:
|
|
||||
return false; |
|
||||
/*
|
|
||||
auto const& inducedSimplex = f.getInducedSimplex(pointset); |
|
||||
|
|
||||
GeometryValueType eps = storm::utility::convertNumber<GeometryValueType>(env.modelchecker().multi().getPrecision()); |
|
||||
// get a polytope that contains exactly the points y, such that y+eps is in the induced simplex
|
|
||||
std::vector<GeometryValueType> offsetVector(objectives.size(), -eps); |
|
||||
auto shiftedSimplex = inducedSimplex->shift(offsetVector); |
|
||||
|
|
||||
// If the intersection of both polytopes is empty, it means that there can not be a point y in the simplex
|
|
||||
// such that y-eps is also in the simplex, i.e., the facet is already precise enough.
|
|
||||
return inducedSimplex->intersection(shiftedSimplex)->isEmpty(); |
|
||||
*/ |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::checkFacetPrecision(Environment const& env, Facet& f, std::set<PointId> const& collectedSimplexPoints) { |
|
||||
assert(false); |
|
||||
return false; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::processFacet(Environment const& env, Facet& f) { |
|
||||
if (optimizeAndSplitFacet(env,f)) { |
|
||||
return; |
|
||||
} |
|
||||
|
|
||||
GeometryValueType eps = storm::utility::convertNumber<GeometryValueType>(env.modelchecker().multi().getPrecision()); |
|
||||
eps += eps; // The unknown area (box) can actually have size 2*eps
|
|
||||
PolytopeTree<GeometryValueType> polytopeTree(f.getInducedSimplex(pointset, getReferenceCoordinates())); |
|
||||
for (auto const& point : pointset) { |
|
||||
polytopeTree.substractDownwardClosure(point.second.get(), eps); |
|
||||
if (polytopeTree.isEmpty()) { |
|
||||
break; |
|
||||
} |
|
||||
} |
|
||||
if (!polytopeTree.isEmpty()) { |
|
||||
auto res = simplexChecker->check(env, f.getHalfspace().normalVector(), polytopeTree, eps); |
|
||||
for (auto const& infeasableArea : res.second) { |
|
||||
addUnachievableArea(env, infeasableArea); |
|
||||
} |
|
||||
for (auto& achievablePoint : res.first) { |
|
||||
pointset.addPoint(env, Point(std::move(achievablePoint))); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
/*
|
|
||||
FacetAnalysisContext context = createAnalysisContext(env, f); |
|
||||
|
|
||||
if (findAndCheckCachedPoints(env, context)) { |
|
||||
return; |
|
||||
} |
|
||||
|
|
||||
if (analyzePointsOnFacet(env, context)) { |
|
||||
return; |
|
||||
} |
|
||||
|
|
||||
if (analyzePointsInSimplex(env, context)) { |
|
||||
return; |
|
||||
} |
|
||||
*/ |
|
||||
// Reaching this point means that the facet could not be analyzed completely.
|
|
||||
//STORM_LOG_ERROR("Facet " << f.getHalfspace().toString(true) << " could not be analyzed completely.");
|
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
typename DeterministicParetoExplorer<SparseModelType, GeometryValueType>::FacetAnalysisContext DeterministicParetoExplorer<SparseModelType, GeometryValueType>::createAnalysisContext(Environment const& env, Facet& f) { |
|
||||
|
|
||||
FacetAnalysisContext res(f); |
|
||||
/*
|
|
||||
res.expressionManager = std::make_shared<storm::expressions::ExpressionManager>(); |
|
||||
res.smtSolver = storm::utility::solver::SmtSolverFactory().create(*res.expressionManager); |
|
||||
|
|
||||
Polytope const& inducedPoly = res.facet.getInducedSimplex(pointset); |
|
||||
|
|
||||
res.x = inducedPoly->declareVariables(*res.expressionManager, "x"); |
|
||||
for (auto const& c : inducedPoly->getConstraints(*res.expressionManager, res.x)) { |
|
||||
res.smtSolver->add(c); |
|
||||
} |
|
||||
|
|
||||
res.xMinusEps = inducedPoly->declareVariables(*res.expressionManager, "y"); |
|
||||
for (auto const& c : inducedPoly->getConstraints(*res.expressionManager, res.xMinusEps)) { |
|
||||
res.smtSolver->add(c); |
|
||||
} |
|
||||
|
|
||||
auto eps = res.expressionManager->rational(env.modelchecker().multi().getPrecision()); |
|
||||
storm::expressions::Expression xme; |
|
||||
for (uint64_t i = 0; i < res.x.size(); ++i) { |
|
||||
storm::expressions::Expression subExpr = (res.xMinusEps[i].getExpression() == res.x[i].getExpression() - eps); |
|
||||
if (i == 0) { |
|
||||
xme = subExpr; |
|
||||
} else { |
|
||||
xme = xme && subExpr; |
|
||||
} |
|
||||
} |
|
||||
res.smtSolver->add(xme); |
|
||||
*/ |
|
||||
return res; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::optimizeAndSplitFacet(Environment const& env, Facet& f) { |
|
||||
// Obtain the correct weight vector
|
|
||||
auto weightVector = storm::utility::vector::convertNumericVector<ModelValueType>(f.getHalfspace().normalVector()); |
|
||||
bool weightVectorYieldsParetoOptimalPoint = !storm::utility::vector::hasZeroEntry(weightVector); |
|
||||
for (uint64_t objIndex = 0; objIndex < this->objectives.size(); ++objIndex) { |
|
||||
if (storm::solver::minimize(objectives[objIndex].formula->getOptimalityType())) { |
|
||||
weightVector[objIndex] *= -storm::utility::one<ModelValueType>(); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// Invoke optimization and insert the explored points
|
|
||||
boost::optional<PointId> optPointId; |
|
||||
auto points = weightVectorChecker->check(env, weightVector); |
|
||||
bool last = true; |
|
||||
for (auto pIt = points.rbegin(); pIt != points.rend(); ++pIt) { |
|
||||
for (uint64_t objIndex = 0; objIndex < this->objectives.size(); ++objIndex) { |
|
||||
if (storm::solver::minimize(objectives[objIndex].formula->getOptimalityType())) { |
|
||||
(*pIt)[objIndex] *= -storm::utility::one<ModelValueType>(); |
|
||||
} |
|
||||
} |
|
||||
Point p(storm::utility::vector::convertNumericVector<GeometryValueType>(*pIt)); |
|
||||
if (last) { |
|
||||
last = false; |
|
||||
p.setParetoOptimal(weightVectorYieldsParetoOptimalPoint); |
|
||||
p.setOnFacet(); |
|
||||
addHalfspaceToOverApproximation(env, f.getHalfspace().normalVector(), p); |
|
||||
optPointId = pointset.addPoint(env, std::move(p)); |
|
||||
} else { |
|
||||
pointset.addPoint(env, std::move(p)); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// Potentially generate new facets
|
|
||||
if (optPointId) { |
|
||||
auto const& optPoint = pointset.getPoint(*optPointId); |
|
||||
if (f.getHalfspace().contains(optPoint.get())) { |
|
||||
// The point is contained in the halfspace which means that no more splitting is possible.
|
|
||||
return false; |
|
||||
} else { |
|
||||
// Found a new Pareto optimal point -> generate new facets
|
|
||||
std::vector<std::vector<GeometryValueType>> vertices; |
|
||||
vertices.push_back(optPoint.get()); |
|
||||
for (auto const& pId : f.getPoints()) { |
|
||||
vertices.push_back(pointset.getPoint(pId).get()); |
|
||||
} |
|
||||
auto newHalfspaceCandidates = storm::storage::geometry::Polytope<GeometryValueType>::createSelectiveDownwardClosure(vertices, storm::utility::vector::filterZero(f.getHalfspace().normalVector()))->getHalfspaces(); |
|
||||
for (auto& h : newHalfspaceCandidates) { |
|
||||
if (!storm::utility::vector::hasNegativeEntry(h.normalVector())) { |
|
||||
STORM_LOG_ASSERT(h.isPointOnBoundary(optPoint.get()), "Unexpected facet found while splitting."); |
|
||||
Facet fNew(std::move(h)); |
|
||||
fNew.addPoint(optPointId.get(), optPoint); |
|
||||
auto vertexIt = vertices.begin(); |
|
||||
++vertexIt; |
|
||||
for (auto const& pId : f.getPoints()) { |
|
||||
assert(pointset.getPoint(pId).get() == *vertexIt); |
|
||||
if (fNew.getHalfspace().isPointOnBoundary(*vertexIt)) { |
|
||||
fNew.addPoint(pId, pointset.getPoint(pId)); |
|
||||
} |
|
||||
++vertexIt; |
|
||||
} |
|
||||
assert(vertexIt == vertices.end()); |
|
||||
if (!checkFacetPrecision(env, fNew)) { |
|
||||
unprocessedFacets.push(std::move(fNew)); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
return true; |
|
||||
} |
|
||||
} else { |
|
||||
// If the 'optimal point' was dominated by an existing point, we can not split the facet any further.
|
|
||||
return false; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::addNewSimplexPoint(FacetAnalysisContext& context, PointId const& pointId, bool performCheck) { |
|
||||
auto const& coordinates = pointset.getPoint(pointId).get(); |
|
||||
storm::expressions::Expression pointAchievesXMinusEps; |
|
||||
for (uint64_t i = 0; i < coordinates.size(); ++i) { |
|
||||
storm::expressions::Expression subExpr = context.xMinusEps[i] <= context.expressionManager->rational(coordinates[i]); |
|
||||
if (i == 0) { |
|
||||
pointAchievesXMinusEps = subExpr; |
|
||||
} else { |
|
||||
pointAchievesXMinusEps = pointAchievesXMinusEps && subExpr; |
|
||||
} |
|
||||
} |
|
||||
context.smtSolver->add(!pointAchievesXMinusEps); |
|
||||
if (performCheck) { |
|
||||
auto smtCheckResult = context.smtSolver->check(); |
|
||||
if (smtCheckResult == storm::solver::SmtSolver::CheckResult::Unsat) { |
|
||||
// For all points x, there is a cached point that dominates or is equal to (x-eps).
|
|
||||
// (we have a constraint pointAchievesXminusEps that does not not hold (double negation)
|
|
||||
return true; |
|
||||
} else { |
|
||||
STORM_LOG_THROW(smtCheckResult == storm::solver::SmtSolver::CheckResult::Sat, storm::exceptions::UnexpectedException, "The smt solver did not yield sat or unsat."); |
|
||||
// there is a point x such that (x-eps) is not dominated by or equal to a cached point.
|
|
||||
return false; |
|
||||
} |
|
||||
} else { |
|
||||
return false; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::findAndCheckCachedPoints(Environment const& env, FacetAnalysisContext& context) { |
|
||||
/*
|
|
||||
Polytope inducedPoly = context.facet.getInducedSimplex(pointset); |
|
||||
pointset.collectPointsInPolytope(context.collectedPoints, inducedPoly); |
|
||||
uint64_t numNewPoints = context.collectedPoints.size(); |
|
||||
STORM_LOG_ASSERT(numNewPoints >= context.facet.getNumberOfPoints(), "Did not find all points on the facet"); |
|
||||
// return true iff for all points x there is a cached point that dominates or is equal to (x-eps).
|
|
||||
for (auto const& pId : context.collectedPoints) { |
|
||||
--numNewPoints; |
|
||||
if (numNewPoints == 0) { |
|
||||
return addNewSimplexPoint(context, pId, true); |
|
||||
} else { |
|
||||
addNewSimplexPoint(context, pId, false); |
|
||||
} |
|
||||
} |
|
||||
*/ |
|
||||
STORM_LOG_THROW(false, storm::exceptions::UnexpectedException, "Reached code that should be unreachable..."); |
|
||||
|
|
||||
return false; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::analyzePointsOnFacet(Environment const& env, FacetAnalysisContext& context) { |
|
||||
// Enumerate all points on the facet by creating a sub-MDP
|
|
||||
|
|
||||
// TODO: Enumerate them using the scheduler evaluator, ie create a class similar to the weight vector checker
|
|
||||
|
|
||||
return false; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
bool DeterministicParetoExplorer<SparseModelType, GeometryValueType>::analyzePointsInSimplex(Environment const& env, FacetAnalysisContext& context) { |
|
||||
auto const& pointIds = context.facet.getPoints(); |
|
||||
std::vector<typename DetSchedsSimplexChecker<SparseModelType, GeometryValueType>::Point> pointsOnFacet; |
|
||||
pointsOnFacet.reserve(pointIds.size()); |
|
||||
for (auto const& pId : pointIds) { |
|
||||
pointsOnFacet.push_back(pointset.getPoint(pId).get()); |
|
||||
} |
|
||||
|
|
||||
// simplexChecker->setSimplex(context.facet.getInducedSimplex(pointset), context.facet.getHalfspace().normalVector(), pointsOnFacet);
|
|
||||
//for (auto const& pointInSimplex : context.collectedPoints) {
|
|
||||
// simplexChecker->addAchievablePoint(pointset.getPoint(pointInSimplex).get());
|
|
||||
//}
|
|
||||
|
|
||||
|
|
||||
return false; |
|
||||
} |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
void DeterministicParetoExplorer<SparseModelType, GeometryValueType>::exportPlotOfCurrentApproximation(Environment const& env) { |
|
||||
/*
|
|
||||
STORM_LOG_ERROR_COND(objectives.size()==2, "Exporting plot requested but this is only implemented for the two-dimensional case."); |
|
||||
|
|
||||
auto transformedUnderApprox = transformPolytopeToOriginalModel(underApproximation); |
|
||||
auto transformedOverApprox = transformPolytopeToOriginalModel(overApproximation); |
|
||||
|
|
||||
// Get pareto points as well as a hyperrectangle that is used to guarantee that the resulting polytopes are bounded.
|
|
||||
storm::storage::geometry::Hyperrectangle<GeometryValueType> boundaries(std::vector<GeometryValueType>(objectives.size(), storm::utility::zero<GeometryValueType>()), std::vector<GeometryValueType>(objectives.size(), storm::utility::zero<GeometryValueType>())); |
|
||||
std::vector<std::vector<GeometryValueType>> paretoPoints; |
|
||||
paretoPoints.reserve(refinementSteps.size()); |
|
||||
for(auto const& step : refinementSteps) { |
|
||||
paretoPoints.push_back(transformPointToOriginalModel(step.lowerBoundPoint)); |
|
||||
boundaries.enlarge(paretoPoints.back()); |
|
||||
} |
|
||||
auto underApproxVertices = transformedUnderApprox->getVertices(); |
|
||||
for(auto const& v : underApproxVertices) { |
|
||||
boundaries.enlarge(v); |
|
||||
} |
|
||||
auto overApproxVertices = transformedOverApprox->getVertices(); |
|
||||
for(auto const& v : overApproxVertices) { |
|
||||
boundaries.enlarge(v); |
|
||||
} |
|
||||
|
|
||||
//Further enlarge the boundaries a little
|
|
||||
storm::utility::vector::scaleVectorInPlace(boundaries.lowerBounds(), GeometryValueType(15) / GeometryValueType(10)); |
|
||||
storm::utility::vector::scaleVectorInPlace(boundaries.upperBounds(), GeometryValueType(15) / GeometryValueType(10)); |
|
||||
|
|
||||
auto boundariesAsPolytope = boundaries.asPolytope(); |
|
||||
std::vector<std::string> columnHeaders = {"x", "y"}; |
|
||||
|
|
||||
std::vector<std::vector<double>> pointsForPlotting; |
|
||||
if (env.modelchecker().multi().getPlotPathUnderApproximation()) { |
|
||||
underApproxVertices = transformedUnderApprox->intersection(boundariesAsPolytope)->getVerticesInClockwiseOrder(); |
|
||||
pointsForPlotting.reserve(underApproxVertices.size()); |
|
||||
for(auto const& v : underApproxVertices) { |
|
||||
pointsForPlotting.push_back(storm::utility::vector::convertNumericVector<double>(v)); |
|
||||
} |
|
||||
storm::utility::exportDataToCSVFile<double, std::string>(env.modelchecker().multi().getPlotPathUnderApproximation().get(), pointsForPlotting, columnHeaders); |
|
||||
} |
|
||||
|
|
||||
if (env.modelchecker().multi().getPlotPathOverApproximation()) { |
|
||||
pointsForPlotting.clear(); |
|
||||
overApproxVertices = transformedOverApprox->intersection(boundariesAsPolytope)->getVerticesInClockwiseOrder(); |
|
||||
pointsForPlotting.reserve(overApproxVertices.size()); |
|
||||
for(auto const& v : overApproxVertices) { |
|
||||
pointsForPlotting.push_back(storm::utility::vector::convertNumericVector<double>(v)); |
|
||||
} |
|
||||
storm::utility::exportDataToCSVFile<double, std::string>(env.modelchecker().multi().getPlotPathOverApproximation().get(), pointsForPlotting, columnHeaders); |
|
||||
} |
|
||||
|
|
||||
if (env.modelchecker().multi().getPlotPathParetoPoints()) { |
|
||||
pointsForPlotting.clear(); |
|
||||
pointsForPlotting.reserve(paretoPoints.size()); |
|
||||
for(auto const& v : paretoPoints) { |
|
||||
pointsForPlotting.push_back(storm::utility::vector::convertNumericVector<double>(v)); |
|
||||
} |
|
||||
storm::utility::exportDataToCSVFile<double, std::string>(env.modelchecker().multi().getPlotPathParetoPoints().get(), pointsForPlotting, columnHeaders); |
|
||||
} |
|
||||
}; |
|
||||
*/ |
|
||||
} |
|
||||
|
|
||||
template class DeterministicParetoExplorer<storm::models::sparse::Mdp<double>, storm::RationalNumber>; |
|
||||
template class DeterministicParetoExplorer<storm::models::sparse::Mdp<storm::RationalNumber>, storm::RationalNumber>; |
|
||||
template class DeterministicParetoExplorer<storm::models::sparse::MarkovAutomaton<double>, storm::RationalNumber>; |
|
||||
template class DeterministicParetoExplorer<storm::models::sparse::MarkovAutomaton<storm::RationalNumber>, storm::RationalNumber>; |
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,245 +0,0 @@ |
|||||
#pragma once |
|
||||
|
|
||||
#include <memory> |
|
||||
#include <queue> |
|
||||
|
|
||||
#include "storm/modelchecker/multiobjective/preprocessing/SparseMultiObjectivePreprocessorResult.h" |
|
||||
#include "storm/modelchecker/multiobjective/deterministicScheds/MultiObjectiveSchedulerEvaluator.h" |
|
||||
#include "storm/modelchecker/multiobjective/deterministicScheds/DetSchedsWeightVectorChecker.h" |
|
||||
#include "storm/modelchecker/multiobjective/deterministicScheds/DetSchedsSimplexChecker.h" |
|
||||
|
|
||||
#include "storm/storage/geometry/Polytope.h" |
|
||||
#include "storm/storage/geometry/Halfspace.h" |
|
||||
#include "storm/modelchecker/results/CheckResult.h" |
|
||||
#include "storm/storage/expressions/ExpressionManager.h" |
|
||||
#include "storm/solver/SmtSolver.h" |
|
||||
|
|
||||
namespace storm { |
|
||||
|
|
||||
class Environment; |
|
||||
|
|
||||
namespace modelchecker { |
|
||||
namespace multiobjective { |
|
||||
|
|
||||
template <class SparseModelType, typename GeometryValueType> |
|
||||
class DeterministicParetoExplorer { |
|
||||
public: |
|
||||
typedef uint64_t PointId; |
|
||||
typedef typename std::shared_ptr<storm::storage::geometry::Polytope<GeometryValueType>> Polytope; |
|
||||
typedef typename SparseModelType::ValueType ModelValueType; |
|
||||
|
|
||||
class Point { |
|
||||
public: |
|
||||
Point(std::vector<GeometryValueType> const& coordinates); |
|
||||
Point(std::vector<GeometryValueType>&& coordinates); |
|
||||
|
|
||||
std::vector<GeometryValueType> const& get() const; |
|
||||
std::vector<GeometryValueType>& get(); |
|
||||
|
|
||||
uint64_t dimension() const; |
|
||||
|
|
||||
enum class DominanceResult { |
|
||||
Incomparable, |
|
||||
Dominates, |
|
||||
Dominated, |
|
||||
Equal |
|
||||
}; |
|
||||
DominanceResult getDominance(Point const& other) const; |
|
||||
|
|
||||
void setParetoOptimal(bool value = true); |
|
||||
bool isParetoOptimal() const; |
|
||||
void setOnFacet(bool value = true); |
|
||||
bool liesOnFacet() const; |
|
||||
|
|
||||
std::string toString(bool convertToDouble = false) const; |
|
||||
|
|
||||
private: |
|
||||
std::vector<GeometryValueType> coordinates; |
|
||||
bool paretoOptimal; |
|
||||
bool onFacet; |
|
||||
}; |
|
||||
|
|
||||
|
|
||||
class Pointset { |
|
||||
public: |
|
||||
|
|
||||
typedef typename std::map<PointId, Point>::const_iterator iterator_type; |
|
||||
|
|
||||
|
|
||||
Pointset(); |
|
||||
|
|
||||
/*! |
|
||||
* If the given point is not dominated by another point in the set, it is added |
|
||||
* to the set and its ID is returned. |
|
||||
* If the point is dominated by another point, boost::none is returned. |
|
||||
* Erases all points in the set, that are dominated by the given point. |
|
||||
* If the same point is already contained in the set, its id is returned |
|
||||
*/ |
|
||||
boost::optional<PointId> addPoint(Environment const& env, Point&& point); |
|
||||
|
|
||||
/*! |
|
||||
* Returns the point with the given ID |
|
||||
*/ |
|
||||
Point const& getPoint(PointId const& id) const; |
|
||||
|
|
||||
iterator_type begin() const; |
|
||||
iterator_type end() const; |
|
||||
|
|
||||
/*! |
|
||||
* Returns the number of points currently contained in the set |
|
||||
*/ |
|
||||
uint64_t size() const; |
|
||||
|
|
||||
/*! |
|
||||
* Returns the downward closure of the contained points. |
|
||||
*/ |
|
||||
Polytope downwardClosure() const; |
|
||||
|
|
||||
void collectPointsInPolytope(std::set<PointId>& collectedPoints, Polytope const& polytope); |
|
||||
|
|
||||
void printToStream(std::ostream& out, bool includeIDs = true, bool convertToDouble = false); |
|
||||
|
|
||||
private: |
|
||||
std::map<PointId, Point> points; |
|
||||
PointId currId; |
|
||||
}; |
|
||||
|
|
||||
class Facet { |
|
||||
public: |
|
||||
Facet(storm::storage::geometry::Halfspace<GeometryValueType> const& halfspace); |
|
||||
Facet(storm::storage::geometry::Halfspace<GeometryValueType>&& halfspace); |
|
||||
storm::storage::geometry::Halfspace<GeometryValueType> const& getHalfspace() const; |
|
||||
void addPoint(PointId const& pointId, Point const& point); |
|
||||
std::vector<PointId> const& getPoints() const; |
|
||||
uint64_t getNumberOfPoints() const; |
|
||||
|
|
||||
/*! |
|
||||
* Creates a polytope that captures all points that lie 'under' the facet |
|
||||
*/ |
|
||||
Polytope const& getInducedSimplex(Pointset const& pointset, std::vector<GeometryValueType> const& referenceCoordinates); |
|
||||
|
|
||||
|
|
||||
|
|
||||
private: |
|
||||
storm::storage::geometry::Halfspace<GeometryValueType> halfspace; |
|
||||
std::vector<PointId> paretoPointsOnFacet; |
|
||||
Polytope inducedSimplex; |
|
||||
}; |
|
||||
|
|
||||
struct FacetAnalysisContext { |
|
||||
FacetAnalysisContext(Facet& f); |
|
||||
|
|
||||
Facet& facet; |
|
||||
std::set<PointId> collectedPoints; |
|
||||
std::unique_ptr<storm::solver::SmtSolver> smtSolver; |
|
||||
std::shared_ptr<storm::expressions::ExpressionManager> expressionManager; |
|
||||
|
|
||||
// Variables that encode two points that lie in the induced simplex of the analyzed facet |
|
||||
// xMinusEps = (x_1-eps, x_m-eps) |
|
||||
std::vector<storm::expressions::Variable> x, xMinusEps; |
|
||||
}; |
|
||||
|
|
||||
|
|
||||
DeterministicParetoExplorer(preprocessing::SparseMultiObjectivePreprocessorResult<SparseModelType>& preprocessorResult); |
|
||||
|
|
||||
virtual std::unique_ptr<CheckResult> check(Environment const& env); |
|
||||
|
|
||||
void exportPlotOfCurrentApproximation(Environment const& env); |
|
||||
|
|
||||
private: |
|
||||
|
|
||||
/*! |
|
||||
* Cleans up all cached results from a previous check call |
|
||||
*/ |
|
||||
void clean(); |
|
||||
|
|
||||
/*! |
|
||||
* Intersects the overapproximation with the given halfspace |
|
||||
*/ |
|
||||
void addHalfspaceToOverApproximation(Environment const& env, std::vector<GeometryValueType> const& normalVector, Point const& pointOnHalfspace); |
|
||||
|
|
||||
/*! |
|
||||
* Adds a polytope which consists of unachievable points |
|
||||
*/ |
|
||||
void addUnachievableArea(Environment const& env, Polytope const& area); |
|
||||
|
|
||||
/*! |
|
||||
* Builds the initial facets by optimizing the objectives individually. |
|
||||
* Adds the facets that need further processing to unprocessedFacets |
|
||||
*/ |
|
||||
void initializeFacets(Environment const& env); |
|
||||
|
|
||||
/*! |
|
||||
* Gets reference coordinates used to subdividing the downwardclosure |
|
||||
*/ |
|
||||
std::vector<GeometryValueType> getReferenceCoordinates() const; |
|
||||
|
|
||||
/*! |
|
||||
* Checks the precision of the given Facet and returns true, if no further processing of the facet is necessary |
|
||||
*/ |
|
||||
bool checkFacetPrecision(Environment const& env, Facet& f); |
|
||||
|
|
||||
/*! |
|
||||
* Checks the precision of the given Facet and returns true, if no further processing of the facet is necessary. |
|
||||
* Also takes the given points within the simplex of the facet into account |
|
||||
*/ |
|
||||
bool checkFacetPrecision(Environment const& env, Facet& f, std::set<PointId> const& collectedSimplexPoints); |
|
||||
|
|
||||
/*! Processes the given facet as follows: |
|
||||
* 1. Optimize in the facet direction. Potentially, this adds new, unprocessed facets |
|
||||
* 2. Find points that have already been collected so far such that they lie in the induced simplex of the facet. |
|
||||
* 3. Find more points that lie on the facet |
|
||||
* 4. Find all points that lie in the induced simplex or prove that there are none |
|
||||
*/ |
|
||||
void processFacet(Environment const& env, Facet& f); |
|
||||
|
|
||||
FacetAnalysisContext createAnalysisContext(Environment const& env, Facet& f); |
|
||||
|
|
||||
/*! |
|
||||
* Optimizes in the facet direction. If this results in a point that does not lie on the facet, |
|
||||
* 1. The new Pareto optimal point is added |
|
||||
* 2. New facets are generated and (if not already precise enough) added to unprocessedFacets |
|
||||
* 3. true is returned |
|
||||
*/ |
|
||||
bool optimizeAndSplitFacet(Environment const& env, Facet& f); |
|
||||
|
|
||||
/*! |
|
||||
* Adds a new point that lies within the induced simplex of the given facet to the analysis context. |
|
||||
* @param context the analysis context |
|
||||
* @param pointId the id of the given point. |
|
||||
* @param performCheck if true, it is checked whether the facet is sufficiently precise now. If false, no check is performed. |
|
||||
* @return true iff performCheck is true and the facet is sufficiently precise. |
|
||||
*/ |
|
||||
bool addNewSimplexPoint(FacetAnalysisContext& context, PointId const& pointId, bool performCheck); |
|
||||
|
|
||||
/*! |
|
||||
* Finds all points that lie within the induced Simplex of the given facet. |
|
||||
* Returns true if the facet is sufficiently precise when considering all added points |
|
||||
*/ |
|
||||
bool findAndCheckCachedPoints(Environment const& env, FacetAnalysisContext& context); |
|
||||
|
|
||||
/*! |
|
||||
* Finds points that lie on the facet |
|
||||
* Returns true if the facet has been analyzed sufficiently precise. |
|
||||
* If false is returned, it means that *all* points that lie on the facet have been analyzed but the analysis is still not precise enough |
|
||||
*/ |
|
||||
bool analyzePointsOnFacet(Environment const& env, FacetAnalysisContext& context); |
|
||||
|
|
||||
bool analyzePointsInSimplex(Environment const& env, FacetAnalysisContext& context); |
|
||||
|
|
||||
Pointset pointset; |
|
||||
std::queue<Facet> unprocessedFacets; |
|
||||
Polytope overApproximation; |
|
||||
std::vector<Polytope> unachievableAreas; |
|
||||
|
|
||||
std::shared_ptr<MultiObjectiveSchedulerEvaluator<SparseModelType>> schedulerEvaluator; |
|
||||
std::shared_ptr<DetSchedsWeightVectorChecker<SparseModelType>> weightVectorChecker; |
|
||||
std::shared_ptr<DetSchedsSimplexChecker<SparseModelType, GeometryValueType>> simplexChecker; |
|
||||
std::shared_ptr<SparseModelType> const& model; |
|
||||
uint64_t originalModelInitialState; |
|
||||
std::vector<Objective<ModelValueType>> const& objectives; |
|
||||
}; |
|
||||
|
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,192 +0,0 @@ |
|||||
#include <storm/transformer/ContinuousToDiscreteTimeModelTransformer.h>
|
|
||||
#include "storm/modelchecker/multiobjective/deterministicScheds/MultiObjectiveSchedulerEvaluator.h"
|
|
||||
|
|
||||
|
|
||||
#include "storm/models/sparse/Dtmc.h"
|
|
||||
#include "storm/models/sparse/Ctmc.h"
|
|
||||
#include "storm/models/sparse/Mdp.h"
|
|
||||
#include "storm/models/sparse/MarkovAutomaton.h"
|
|
||||
#include "storm/models/sparse/StandardRewardModel.h"
|
|
||||
#include "storm/modelchecker/results/ExplicitQuantitativeCheckResult.h"
|
|
||||
#include "storm/modelchecker/results/ExplicitQualitativeCheckResult.h"
|
|
||||
#include "storm/modelchecker/propositional/SparsePropositionalModelChecker.h"
|
|
||||
#include "storm/modelchecker/prctl/SparseDtmcPrctlModelChecker.h"
|
|
||||
#include "storm/modelchecker/csl/SparseCtmcCslModelChecker.h"
|
|
||||
#include "storm/storage/BitVector.h"
|
|
||||
#include "storm/utility/graph.h"
|
|
||||
#include "storm/utility/vector.h"
|
|
||||
|
|
||||
#include "storm/utility/constants.h"
|
|
||||
#include "storm/storage/Scheduler.h"
|
|
||||
|
|
||||
#include "storm/exceptions/NotSupportedException.h"
|
|
||||
|
|
||||
|
|
||||
namespace storm { |
|
||||
namespace modelchecker { |
|
||||
namespace multiobjective { |
|
||||
|
|
||||
template <class ModelType> |
|
||||
MultiObjectiveSchedulerEvaluator<ModelType>::MultiObjectiveSchedulerEvaluator(preprocessing::SparseMultiObjectivePreprocessorResult<ModelType>& preprocessorResult) : model(*preprocessorResult.preprocessedModel), objectives(preprocessorResult.objectives), currSchedHasBeenChecked(false) { |
|
||||
results.resize(this->objectives.size(), std::vector<ValueType>(getModel().getNumberOfStates())); |
|
||||
currSched.resize(this->getModel().getNumberOfStates(), 0); |
|
||||
initializeSchedulerIndependentStates(); |
|
||||
if (getModel().isOfType(storm::models::ModelType::MarkovAutomaton)) { |
|
||||
bool transformMaToMdp = true; |
|
||||
for (auto const& obj : objectives) { |
|
||||
if (!storm::transformer::ContinuousToDiscreteTimeModelTransformer<ValueType>::preservesFormula(*obj.formula)) { |
|
||||
transformMaToMdp = false; |
|
||||
} |
|
||||
} |
|
||||
if (transformMaToMdp) { |
|
||||
auto modelAsMa = getModel().template as<storm::models::sparse::MarkovAutomaton<ValueType>>(); |
|
||||
mdp = storm::transformer::ContinuousToDiscreteTimeModelTransformer<ValueType>::transform(*modelAsMa); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
void MultiObjectiveSchedulerEvaluator<ModelType>::initializeSchedulerIndependentStates() { |
|
||||
storm::modelchecker::SparsePropositionalModelChecker<ModelType> mc(getModel()); |
|
||||
for (uint64_t objIndex = 0; objIndex < this->objectives.size(); ++objIndex) { |
|
||||
auto const& formula = *this->objectives[objIndex].formula; |
|
||||
if (formula.isProbabilityOperatorFormula() && formula.getSubformula().isUntilFormula()) { |
|
||||
storm::storage::BitVector phiStates = mc.check(formula.getSubformula().asUntilFormula().getLeftSubformula())->asExplicitQualitativeCheckResult().getTruthValuesVector(); |
|
||||
storm::storage::BitVector psiStates = mc.check(formula.getSubformula().asUntilFormula().getRightSubformula())->asExplicitQualitativeCheckResult().getTruthValuesVector(); |
|
||||
auto backwardTransitions = getModel().getBackwardTransitions(); |
|
||||
storm::storage::BitVector prob1States = storm::utility::graph::performProb1A(getModel().getTransitionMatrix(), getModel().getNondeterministicChoiceIndices(), backwardTransitions, phiStates, psiStates); |
|
||||
storm::storage::BitVector prob0States = storm::utility::graph::performProb0A(backwardTransitions, phiStates, psiStates); |
|
||||
storm::utility::vector::setVectorValues(results[objIndex], prob0States, storm::utility::zero<ValueType>()); |
|
||||
storm::utility::vector::setVectorValues(results[objIndex], prob1States, storm::utility::one<ValueType>()); |
|
||||
schedulerIndependentStates.push_back(prob1States | prob0States); |
|
||||
} else if (formula.getSubformula().isEventuallyFormula() && (formula.isRewardOperatorFormula() || formula.isTimeOperatorFormula())) { |
|
||||
storm::storage::BitVector rew0States = mc.check(formula.getSubformula().asEventuallyFormula().getSubformula())->asExplicitQualitativeCheckResult().getTruthValuesVector(); |
|
||||
if (formula.isRewardOperatorFormula()) { |
|
||||
auto const& rewModel = formula.asRewardOperatorFormula().hasRewardModelName() ? getModel().getRewardModel(formula.asRewardOperatorFormula().getRewardModelName()) : getModel().getUniqueRewardModel(); |
|
||||
storm::storage::BitVector statesWithoutReward = rewModel.getStatesWithZeroReward(getModel().getTransitionMatrix()); |
|
||||
rew0States = storm::utility::graph::performProb1A(getModel().getTransitionMatrix(), getModel().getNondeterministicChoiceIndices(), getModel().getBackwardTransitions(), statesWithoutReward, rew0States); |
|
||||
} |
|
||||
storm::utility::vector::setVectorValues(results[objIndex], rew0States, storm::utility::zero<ValueType>()); |
|
||||
schedulerIndependentStates.push_back(std::move(rew0States)); |
|
||||
} else if (formula.isRewardOperatorFormula() && formula.getSubformula().isTotalRewardFormula()) { |
|
||||
auto const& rewModel = formula.asRewardOperatorFormula().hasRewardModelName() ? getModel().getRewardModel(formula.asRewardOperatorFormula().getRewardModelName()) : getModel().getUniqueRewardModel(); |
|
||||
storm::storage::BitVector statesWithoutReward = rewModel.getStatesWithZeroReward(getModel().getTransitionMatrix()); |
|
||||
storm::storage::BitVector rew0States = storm::utility::graph::performProbGreater0E(getModel().getBackwardTransitions(), statesWithoutReward, ~statesWithoutReward); |
|
||||
rew0States.complement(); |
|
||||
storm::utility::vector::setVectorValues(results[objIndex], rew0States, storm::utility::zero<ValueType>()); |
|
||||
schedulerIndependentStates.push_back(std::move(rew0States)); |
|
||||
} else { |
|
||||
STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "The given formula " << formula << " is not supported."); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template<typename ValueType> |
|
||||
std::unique_ptr<storm::modelchecker::CheckResult> invokeModelChecker(Environment const& env, std::shared_ptr<storm::models::sparse::Model<ValueType>> const& model, storm::modelchecker::CheckTask<storm::logic::Formula, ValueType> const& task) { |
|
||||
if (model->getType() == storm::models::ModelType::Dtmc) { |
|
||||
} else if(model->getType() == storm::models::ModelType::Ctmc) { |
|
||||
return storm::modelchecker::SparseCtmcCslModelChecker<storm::models::sparse::Ctmc<ValueType>>(*model->template as<storm::models::sparse::Ctmc<ValueType>>()).check(env, task); |
|
||||
} else { |
|
||||
STORM_LOG_ASSERT(false, "invalid model type"); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
void MultiObjectiveSchedulerEvaluator<ModelType>::check(Environment const& env) { |
|
||||
if (!currSchedHasBeenChecked) { |
|
||||
storm::storage::Scheduler<ValueType> scheduler(model.getNumberOfStates()); |
|
||||
for (uint64_t state = 0; state < model.getNumberOfStates(); ++state) { |
|
||||
scheduler.setChoice(currSched[state], state); |
|
||||
} |
|
||||
|
|
||||
auto detModel = mdp ? mdp->applyScheduler(scheduler, false) : model.applyScheduler(scheduler, false); |
|
||||
detModel->getTransitionMatrix().makeRowGroupingTrivial(); |
|
||||
storm::models::sparse::Dtmc<ValueType> dtmc(std::move(detModel->getTransitionMatrix()), std::move(detModel->getStateLabeling()), std::move(detModel->getRewardModels())); |
|
||||
for (uint64_t objIndex = 0; objIndex < this->objectives.size(); ++objIndex) { |
|
||||
storm::modelchecker::CheckTask<storm::logic::Formula, ValueType> task(*this->objectives[objIndex].formula, false); |
|
||||
auto res = storm::modelchecker::SparseDtmcPrctlModelChecker<storm::models::sparse::Dtmc<ValueType>>(dtmc).check(env, task); |
|
||||
results[objIndex] = std::move(res->template asExplicitQuantitativeCheckResult<ValueType>().getValueVector()); |
|
||||
} |
|
||||
currSchedHasBeenChecked = true; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
ModelType const& MultiObjectiveSchedulerEvaluator<ModelType>::getModel() const { |
|
||||
return model; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
std::vector<Objective<typename MultiObjectiveSchedulerEvaluator<ModelType>::ValueType>> const& MultiObjectiveSchedulerEvaluator<ModelType>::getObjectives() const { |
|
||||
return objectives; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
std::vector<uint64_t> const& MultiObjectiveSchedulerEvaluator<ModelType>::getScheduler() const { |
|
||||
return currSched; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
bool MultiObjectiveSchedulerEvaluator<ModelType>::hasCurrentSchedulerBeenChecked() const { |
|
||||
return currSchedHasBeenChecked; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
uint64_t const& MultiObjectiveSchedulerEvaluator<ModelType>::getChoiceAtState(uint64_t state) const { |
|
||||
return currSched[state]; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
void MultiObjectiveSchedulerEvaluator<ModelType>::setChoiceAtState(uint64_t state, uint64_t choice) { |
|
||||
if (currSched[state] != choice) { |
|
||||
STORM_LOG_ASSERT(choice < this->getModel().getTransitionMatrix().getRowGroupSize(state), "Invalid choice index."); |
|
||||
currSched[state] = choice; |
|
||||
currSchedHasBeenChecked = false; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
std::vector<typename MultiObjectiveSchedulerEvaluator<ModelType>::ValueType> const& MultiObjectiveSchedulerEvaluator<ModelType>::getResultForObjective(uint64_t objIndex) const { |
|
||||
STORM_LOG_ASSERT(currSchedHasBeenChecked, "Tried to get results for a scheduler that has not yet been analyzed."); |
|
||||
return results[objIndex]; |
|
||||
} |
|
||||
|
|
||||
|
|
||||
template <class ModelType> |
|
||||
typename MultiObjectiveSchedulerEvaluator<ModelType>::ValueType const& MultiObjectiveSchedulerEvaluator<ModelType>::getSchedulerIndependentStateResult(uint64_t objIndex, uint64_t state) const { |
|
||||
STORM_LOG_ASSERT(getSchedulerIndependentStates(objIndex).get(state), "Tried to get scheduler-independent result for a scheduler-dependent state."); |
|
||||
return results[objIndex][state]; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
std::vector<std::vector<typename MultiObjectiveSchedulerEvaluator<ModelType>::ValueType>> const& MultiObjectiveSchedulerEvaluator<ModelType>::getResults() const { |
|
||||
STORM_LOG_ASSERT(currSchedHasBeenChecked, "Tried to get results for a scheduler that has not yet been analyzed."); |
|
||||
return results; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
storm::storage::BitVector const& MultiObjectiveSchedulerEvaluator<ModelType>::getSchedulerIndependentStates(uint64_t objIndex) const { |
|
||||
return schedulerIndependentStates[objIndex]; |
|
||||
} |
|
||||
|
|
||||
template <class ModelType> |
|
||||
std::vector<typename MultiObjectiveSchedulerEvaluator<ModelType>::ValueType> MultiObjectiveSchedulerEvaluator<ModelType>::getInitialStateResults() const { |
|
||||
STORM_LOG_ASSERT(currSchedHasBeenChecked, "Tried to get results for a scheduler that has not yet been analyzed."); |
|
||||
STORM_LOG_ASSERT(model.getInitialStates().getNumberOfSetBits() == 1, "Getting initial state result ist only supported for models with a single initial state."); |
|
||||
std::vector<ValueType> res; |
|
||||
for (auto objResult : results) { |
|
||||
res.push_back(objResult[*model.getInitialStates().begin()]); |
|
||||
} |
|
||||
return res; |
|
||||
} |
|
||||
|
|
||||
|
|
||||
|
|
||||
template class MultiObjectiveSchedulerEvaluator<storm::models::sparse::Mdp<double>>; |
|
||||
template class MultiObjectiveSchedulerEvaluator<storm::models::sparse::MarkovAutomaton<double>>; |
|
||||
template class MultiObjectiveSchedulerEvaluator<storm::models::sparse::Mdp<storm::RationalNumber>>; |
|
||||
template class MultiObjectiveSchedulerEvaluator<storm::models::sparse::MarkovAutomaton<storm::RationalNumber>>; |
|
||||
|
|
||||
} |
|
||||
} |
|
||||
} |
|
@ -1,65 +0,0 @@ |
|||||
#pragma once |
|
||||
|
|
||||
#include <vector> |
|
||||
|
|
||||
#include "storm/modelchecker/multiobjective/preprocessing/SparseMultiObjectivePreprocessorResult.h" |
|
||||
#include "storm/models/sparse/Mdp.h" |
|
||||
|
|
||||
namespace storm { |
|
||||
|
|
||||
class Environment; |
|
||||
namespace storage { |
|
||||
class BitVector; |
|
||||
} |
|
||||
|
|
||||
namespace modelchecker { |
|
||||
namespace multiobjective { |
|
||||
|
|
||||
template <class ModelType> |
|
||||
class MultiObjectiveSchedulerEvaluator { |
|
||||
public: |
|
||||
|
|
||||
typedef typename ModelType::ValueType ValueType; |
|
||||
|
|
||||
MultiObjectiveSchedulerEvaluator(preprocessing::SparseMultiObjectivePreprocessorResult<ModelType>& preprocessorResult); |
|
||||
|
|
||||
/*! |
|
||||
* Instantiates the given model with the provided scheduler and checks the objectives individually under that scheduler. |
|
||||
*/ |
|
||||
void check(Environment const& env); |
|
||||
|
|
||||
// Retrieve the results after calling check. |
|
||||
std::vector<std::vector<ValueType>> const& getResults() const; |
|
||||
std::vector<ValueType> const& getResultForObjective(uint64_t objIndex) const; |
|
||||
ValueType const& getSchedulerIndependentStateResult(uint64_t objIndex, uint64_t state) const; |
|
||||
storm::storage::BitVector const& getSchedulerIndependentStates(uint64_t objIndex) const; |
|
||||
std::vector<ValueType> getInitialStateResults() const; |
|
||||
ModelType const& getModel() const; |
|
||||
std::vector<Objective<ValueType>> const& getObjectives() const; |
|
||||
bool hasCurrentSchedulerBeenChecked() const; |
|
||||
std::vector<uint64_t> const& getScheduler() const; |
|
||||
uint64_t const& getChoiceAtState(uint64_t state) const; |
|
||||
void setChoiceAtState(uint64_t state, uint64_t choice); |
|
||||
|
|
||||
|
|
||||
private: |
|
||||
|
|
||||
void initializeSchedulerIndependentStates(); |
|
||||
|
|
||||
ModelType const& model; |
|
||||
// In case the model is a markov automaton, we transform it to an mdp |
|
||||
std::shared_ptr<storm::models::sparse::Mdp<ValueType>> mdp; |
|
||||
|
|
||||
std::vector<Objective<ValueType>> const& objectives; |
|
||||
|
|
||||
// Indicates for each objective the set of states for which the result is fix (i.e. independent of the scheduler). |
|
||||
std::vector<storm::storage::BitVector> schedulerIndependentStates; |
|
||||
|
|
||||
// Stores the results from the last call to check |
|
||||
std::vector<std::vector<ValueType>> results; |
|
||||
std::vector<uint64_t> currSched; |
|
||||
bool currSchedHasBeenChecked; |
|
||||
}; |
|
||||
} |
|
||||
} |
|
||||
} |
|
Write
Preview
Loading…
Cancel
Save
Reference in new issue