diff --git a/src/storage/dd/CuddDdManager.cpp b/src/storage/dd/CuddDdManager.cpp index 375c255d8..622ecbb44 100644 --- a/src/storage/dd/CuddDdManager.cpp +++ b/src/storage/dd/CuddDdManager.cpp @@ -331,5 +331,9 @@ namespace storm { std::shared_ptr const> DdManager::asSharedPointer() const { return this->shared_from_this(); } + + std::shared_ptr> DdManager::asSharedPointer() { + return this->shared_from_this(); + } } } \ No newline at end of file diff --git a/src/storage/dd/CuddDdManager.h b/src/storage/dd/CuddDdManager.h index b6e5d836e..411a64b55 100644 --- a/src/storage/dd/CuddDdManager.h +++ b/src/storage/dd/CuddDdManager.h @@ -186,7 +186,14 @@ namespace storm { * @return A shared pointer to the manager. */ std::shared_ptr const> asSharedPointer() const; - + + /*! + * Retrieves the manager as a shared pointer. + * + * @return A shared pointer to the manager. + */ + std::shared_ptr> asSharedPointer(); + private: /*! * Retrieves a list of names of the DD variables in the order of their index. diff --git a/src/utility/graph.h b/src/utility/graph.h index 7475d0570..a4f083aed 100644 --- a/src/utility/graph.h +++ b/src/utility/graph.h @@ -175,7 +175,7 @@ namespace storm { * @return All states with probability 1. */ template - storm::dd::Bdd performProb1(storm::models::symbolic::Model const& model, storm::dd::Bdd const& transitionMatrix, storm::dd::Bdd const& phiStates, storm::dd::Bdd const& psiStates) ; + storm::dd::Bdd performProb1(storm::models::symbolic::Model const& model, storm::dd::Bdd const& transitionMatrix, storm::dd::Bdd const& phiStates, storm::dd::Bdd const& psiStates); /*! * Computes the sets of states that have probability 0 or 1, respectively, of satisfying phi until psi in a diff --git a/test/functional/utility/GraphTest.cpp b/test/functional/utility/GraphTest.cpp index c402b06f5..7d8e3fc95 100644 --- a/test/functional/utility/GraphTest.cpp +++ b/test/functional/utility/GraphTest.cpp @@ -149,7 +149,19 @@ TEST(GraphTest, SymbolicProb01StochasticGameDieSmall) { EXPECT_EQ(0, result.states.getNonZeroCount()); EXPECT_TRUE(static_cast(result.player1Strategy)); EXPECT_TRUE(static_cast(result.player2Strategy)); - + + // Check the validity of the strategies. Start by checking whether only prob0 states have a strategy. + storm::dd::Bdd nonProb0StatesWithStrategy = !result.states && result.player1Strategy.get(); + EXPECT_TRUE(nonProb0StatesWithStrategy.isZero()); + + // Proceed by checking whether they select exactly one action in each state. + storm::dd::Add stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());; + EXPECT_EQ(0, stateDistributionsUnderStrategies.getNonZeroCount()); + + // Check that the number of distributions per state is one (or zero in the case where there are no prob0 states). + storm::dd::Add stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables()); + EXPECT_EQ(0, stateDistributionCount.getMax()); + result = storm::utility::graph::performProb1(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true); EXPECT_EQ(3, result.states.getNonZeroCount()); @@ -172,6 +184,18 @@ TEST(GraphTest, SymbolicProb01StochasticGameDieSmall) { EXPECT_EQ(3, result.states.getNonZeroCount()); EXPECT_TRUE(static_cast(result.player1Strategy)); EXPECT_TRUE(static_cast(result.player2Strategy)); + + // Check the validity of the strategies. Start by checking whether only prob1 states have a strategy. + storm::dd::Bdd nonProb1StatesWithStrategy = !result.states && result.player1Strategy.get(); + EXPECT_TRUE(nonProb1StatesWithStrategy.isZero()); + + // Proceed by checking whether they select exactly one action in each state. + stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables()); + EXPECT_EQ(3, stateDistributionsUnderStrategies.getNonZeroCount()); + + // Check that the number of distributions per state is one (or zero in the case where there are no prob1 states). + stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables()); + EXPECT_EQ(1, stateDistributionCount.getMax()); } TEST(GraphTest, SymbolicProb01StochasticGameTwoDice) { @@ -228,6 +252,17 @@ TEST(GraphTest, SymbolicProb01StochasticGameTwoDice) { EXPECT_TRUE(static_cast(result.player1Strategy)); EXPECT_TRUE(static_cast(result.player2Strategy)); + // Check the validity of the strategies. Start by checking whether only prob0 states have a strategy. + storm::dd::Bdd nonProb0StatesWithStrategy = !result.states && result.player1Strategy.get(); + EXPECT_TRUE(nonProb0StatesWithStrategy.isZero()); + + // Proceed by checking whether they select exactly one exaction in each state. + storm::dd::Add stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables()); + EXPECT_EQ(153, stateDistributionsUnderStrategies.getNonZeroCount()); + + storm::dd::Add stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables()); + EXPECT_EQ(1, stateDistributionCount.getMax()); + result = storm::utility::graph::performProb1(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true); EXPECT_EQ(1, result.states.getNonZeroCount()); @@ -250,6 +285,18 @@ TEST(GraphTest, SymbolicProb01StochasticGameTwoDice) { EXPECT_EQ(1, result.states.getNonZeroCount()); EXPECT_TRUE(static_cast(result.player1Strategy)); EXPECT_TRUE(static_cast(result.player2Strategy)); + + // Check the validity of the strategies. Start by checking whether only prob1 states have a strategy. + storm::dd::Bdd nonProb1StatesWithStrategy = !result.states && result.player1Strategy.get(); + EXPECT_TRUE(nonProb1StatesWithStrategy.isZero()); + + // Proceed by checking whether they select exactly one action in each state. + stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables()); + EXPECT_EQ(1, stateDistributionsUnderStrategies.getNonZeroCount()); + + // Check that the number of distributions per state is one (or zero in the case where there are no prob1 states). + stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables()); + EXPECT_EQ(1, stateDistributionCount.getMax()); } TEST(GraphTest, SymbolicProb01StochasticGameWlan) { @@ -371,8 +418,20 @@ TEST(GraphTest, SymbolicProb01StochasticGameWlan) { storm::utility::graph::GameProb01Result result = storm::utility::graph::performProb0(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true); EXPECT_EQ(2831, result.states.getNonZeroCount()); - EXPECT_TRUE(static_cast(result.player1Strategy)); - EXPECT_TRUE(static_cast(result.player2Strategy)); + ASSERT_TRUE(static_cast(result.player1Strategy)); + ASSERT_TRUE(static_cast(result.player2Strategy)); + + // Check the validity of the strategies. Start by checking whether only prob0 states have a strategy. + storm::dd::Bdd nonProb0StatesWithStrategy = !result.states && result.player1Strategy.get(); + EXPECT_TRUE(nonProb0StatesWithStrategy.isZero()); + + // Proceed by checking whether they select exactly one action in each state. + storm::dd::Add stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables());; + EXPECT_EQ(2831, stateDistributionsUnderStrategies.getNonZeroCount()); + + // Check that the number of distributions per state is one (or zero in the case where there are no prob0 states). + storm::dd::Add stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables()); + EXPECT_EQ(1, stateDistributionCount.getMax()); result = storm::utility::graph::performProb1(game, game.getQualitativeTransitionMatrix(), game.getReachableStates(), targetStates, storm::OptimizationDirection::Minimize, storm::OptimizationDirection::Minimize, true); EXPECT_EQ(2692, result.states.getNonZeroCount()); @@ -396,6 +455,18 @@ TEST(GraphTest, SymbolicProb01StochasticGameWlan) { EXPECT_EQ(2884, result.states.getNonZeroCount()); EXPECT_TRUE(static_cast(result.player1Strategy)); EXPECT_TRUE(static_cast(result.player2Strategy)); + + // Check the validity of the strategies. Start by checking whether only prob1 states have a strategy. + storm::dd::Bdd nonProb1StatesWithStrategy = !result.states && result.player1Strategy.get(); + EXPECT_TRUE(nonProb1StatesWithStrategy.isZero()); + + // Proceed by checking whether they select exactly one action in each state. + stateDistributionsUnderStrategies = (game.getTransitionMatrix() * result.player1Strategy.get().toAdd() * result.player2Strategy.get().toAdd()).sumAbstract(game.getColumnVariables()); + EXPECT_EQ(2884, stateDistributionsUnderStrategies.getNonZeroCount()); + + // Check that the number of distributions per state is one (or zero in the case where there are no prob1 states). + stateDistributionCount = stateDistributionsUnderStrategies.sumAbstract(game.getNondeterminismVariables()); + EXPECT_EQ(1, stateDistributionCount.getMax()); } #endif