Browse Source

extended tests for validity of returned strategies

Former-commit-id: fb6a1c23f0
tempestpy_adaptions
dehnert 9 years ago
parent
commit
781610b05d
  1. 4
      src/storage/dd/CuddDdManager.cpp
  2. 7
      src/storage/dd/CuddDdManager.h
  3. 2
      src/utility/graph.h
  4. 75
      test/functional/utility/GraphTest.cpp

4
src/storage/dd/CuddDdManager.cpp

@ -331,5 +331,9 @@ namespace storm {
std::shared_ptr<DdManager<DdType::CUDD> const> DdManager<DdType::CUDD>::asSharedPointer() const {
return this->shared_from_this();
}
std::shared_ptr<DdManager<DdType::CUDD>> DdManager<DdType::CUDD>::asSharedPointer() {
return this->shared_from_this();
}
}
}

7
src/storage/dd/CuddDdManager.h

@ -187,6 +187,13 @@ namespace storm {
*/
std::shared_ptr<DdManager<DdType::CUDD> const> asSharedPointer() const;
/*!
* Retrieves the manager as a shared pointer.
*
* @return A shared pointer to the manager.
*/
std::shared_ptr<DdManager<DdType::CUDD>> asSharedPointer();
private:
/*!
* Retrieves a list of names of the DD variables in the order of their index.

2
src/utility/graph.h

@ -175,7 +175,7 @@ namespace storm {
* @return All states with probability 1.
*/
template <storm::dd::DdType Type>
storm::dd::Bdd<Type> performProb1(storm::models::symbolic::Model<Type> const& model, storm::dd::Bdd<Type> const& transitionMatrix, storm::dd::Bdd<Type> const& phiStates, storm::dd::Bdd<Type> const& psiStates) ;
storm::dd::Bdd<Type> performProb1(storm::models::symbolic::Model<Type> const& model, storm::dd::Bdd<Type> const& transitionMatrix, storm::dd::Bdd<Type> const& phiStates, storm::dd::Bdd<Type> const& psiStates);
/*!
* Computes the sets of states that have probability 0 or 1, respectively, of satisfying phi until psi in a

75
test/functional/utility/GraphTest.cpp

@ -150,6 +150,18 @@ TEST(GraphTest, SymbolicProb01StochasticGameDieSmall) {
EXPECT_TRUE(static_cast<bool>(result.player1Strategy));
EXPECT_TRUE(static_cast<bool>(result.player2Strategy));
// Check the validity of the strategies. Start by checking whether only prob0 states have a strategy.
storm::dd::Bdd<storm::dd::DdType::CUDD> nonProb0StatesWithStrategy = !result.states && result.player1Strategy.get();
EXPECT_TRUE(nonProb0StatesWithStrategy.isZero());
// Proceed by checking whether they select exactly one action in each state.
storm::dd::Add<storm::dd::DdType::CUDD> stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());;
EXPECT_EQ(0, stateDistributionsUnderStrategies.getNonZeroCount());
// Check that the number of distributions per state is one (or zero in the case where there are no prob0 states).
storm::dd::Add<storm::dd::DdType::CUDD> stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables());
EXPECT_EQ(0, stateDistributionCount.getMax());
result = storm::utility::graph::performProb1(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true);
EXPECT_EQ(3, result.states.getNonZeroCount());
@ -172,6 +184,18 @@ TEST(GraphTest, SymbolicProb01StochasticGameDieSmall) {
EXPECT_EQ(3, result.states.getNonZeroCount());
EXPECT_TRUE(static_cast<bool>(result.player1Strategy));
EXPECT_TRUE(static_cast<bool>(result.player2Strategy));
// Check the validity of the strategies. Start by checking whether only prob1 states have a strategy.
storm::dd::Bdd<storm::dd::DdType::CUDD> nonProb1StatesWithStrategy = !result.states && result.player1Strategy.get();
EXPECT_TRUE(nonProb1StatesWithStrategy.isZero());
// Proceed by checking whether they select exactly one action in each state.
stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());
EXPECT_EQ(3, stateDistributionsUnderStrategies.getNonZeroCount());
// Check that the number of distributions per state is one (or zero in the case where there are no prob1 states).
stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables());
EXPECT_EQ(1, stateDistributionCount.getMax());
}
TEST(GraphTest, SymbolicProb01StochasticGameTwoDice) {
@ -228,6 +252,17 @@ TEST(GraphTest, SymbolicProb01StochasticGameTwoDice) {
EXPECT_TRUE(static_cast<bool>(result.player1Strategy));
EXPECT_TRUE(static_cast<bool>(result.player2Strategy));
// Check the validity of the strategies. Start by checking whether only prob0 states have a strategy.
storm::dd::Bdd<storm::dd::DdType::CUDD> nonProb0StatesWithStrategy = !result.states && result.player1Strategy.get();
EXPECT_TRUE(nonProb0StatesWithStrategy.isZero());
// Proceed by checking whether they select exactly one exaction in each state.
storm::dd::Add<storm::dd::DdType::CUDD> stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());
EXPECT_EQ(153, stateDistributionsUnderStrategies.getNonZeroCount());
storm::dd::Add<storm::dd::DdType::CUDD> stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables());
EXPECT_EQ(1, stateDistributionCount.getMax());
result = storm::utility::graph::performProb1(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true);
EXPECT_EQ(1, result.states.getNonZeroCount());
@ -250,6 +285,18 @@ TEST(GraphTest, SymbolicProb01StochasticGameTwoDice) {
EXPECT_EQ(1, result.states.getNonZeroCount());
EXPECT_TRUE(static_cast<bool>(result.player1Strategy));
EXPECT_TRUE(static_cast<bool>(result.player2Strategy));
// Check the validity of the strategies. Start by checking whether only prob1 states have a strategy.
storm::dd::Bdd<storm::dd::DdType::CUDD> nonProb1StatesWithStrategy = !result.states && result.player1Strategy.get();
EXPECT_TRUE(nonProb1StatesWithStrategy.isZero());
// Proceed by checking whether they select exactly one action in each state.
stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());
EXPECT_EQ(1, stateDistributionsUnderStrategies.getNonZeroCount());
// Check that the number of distributions per state is one (or zero in the case where there are no prob1 states).
stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables());
EXPECT_EQ(1, stateDistributionCount.getMax());
}
TEST(GraphTest, SymbolicProb01StochasticGameWlan) {
@ -371,8 +418,20 @@ TEST(GraphTest, SymbolicProb01StochasticGameWlan) {
storm::utility::graph::GameProb01Result<storm::dd::DdType::CUDD> result = storm::utility::graph::performProb0(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true);
EXPECT_EQ(2831, result.states.getNonZeroCount());
EXPECT_TRUE(static_cast<bool>(result.player1Strategy));
EXPECT_TRUE(static_cast<bool>(result.player2Strategy));
ASSERT_TRUE(static_cast<bool>(result.player1Strategy));
ASSERT_TRUE(static_cast<bool>(result.player2Strategy));
// Check the validity of the strategies. Start by checking whether only prob0 states have a strategy.
storm::dd::Bdd<storm::dd::DdType::CUDD> nonProb0StatesWithStrategy = !result.states && result.player1Strategy.get();
EXPECT_TRUE(nonProb0StatesWithStrategy.isZero());
// Proceed by checking whether they select exactly one action in each state.
storm::dd::Add<storm::dd::DdType::CUDD> stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());;
EXPECT_EQ(2831, stateDistributionsUnderStrategies.getNonZeroCount());
// Check that the number of distributions per state is one (or zero in the case where there are no prob0 states).
storm::dd::Add<storm::dd::DdType::CUDD> stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables());
EXPECT_EQ(1, stateDistributionCount.getMax());
result = storm::utility::graph::performProb1(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true);
EXPECT_EQ(2692, result.states.getNonZeroCount());
@ -396,6 +455,18 @@ TEST(GraphTest, SymbolicProb01StochasticGameWlan) {
EXPECT_EQ(2884, result.states.getNonZeroCount());
EXPECT_TRUE(static_cast<bool>(result.player1Strategy));
EXPECT_TRUE(static_cast<bool>(result.player2Strategy));
// Check the validity of the strategies. Start by checking whether only prob1 states have a strategy.
storm::dd::Bdd<storm::dd::DdType::CUDD> nonProb1StatesWithStrategy = !result.states && result.player1Strategy.get();
EXPECT_TRUE(nonProb1StatesWithStrategy.isZero());
// Proceed by checking whether they select exactly one action in each state.
stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());
EXPECT_EQ(2884, stateDistributionsUnderStrategies.getNonZeroCount());
// Check that the number of distributions per state is one (or zero in the case where there are no prob1 states).
stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables());
EXPECT_EQ(1, stateDistributionCount.getMax());
}
#endif

Loading…
Cancel
Save