Browse Source

eliminating selfloops in pomdps

tempestpy_adaptions
sjunges 7 years ago
parent
commit
10b1f65840
  1. 11
      src/storm-pomdp-cli/storm-pomdp.cpp
  2. 30
      src/storm-pomdp/transformer/GlobalPOMDPSelfLoopEliminator.cpp
  3. 13
      src/storm/models/sparse/Pomdp.cpp
  4. 2
      src/storm/models/sparse/Pomdp.h
  5. 10
      src/storm/transformer/ChoiceSelector.cpp

11
src/storm-pomdp-cli/storm-pomdp.cpp

@ -34,6 +34,7 @@
#include "storm-cli-utilities/model-handling.h"
#include "storm-pomdp/transformer/ApplyFiniteSchedulerToPomdp.h"
#include "storm-pomdp/transformer/GlobalPOMDPSelfLoopEliminator.h"
/*!
* Initialize the settings manager.
@ -97,8 +98,14 @@ int main(const int argc, const char** argv) {
storm::cli::SymbolicInput symbolicInput = storm::cli::parseAndPreprocessSymbolicInput();
auto model = storm::cli::buildPreprocessExportModelWithValueTypeAndDdlib<storm::dd::DdType::Sylvan, storm::RationalNumber>(symbolicInput, engine);
STORM_LOG_THROW(model->getType() == storm::models::ModelType::Pomdp, storm::exceptions::WrongFormatException, "Expected a POMDP.");
storm::transformer::ApplyFiniteSchedulerToPomdp<storm::RationalNumber> toPMCTransformer(*(model->template as<storm::models::sparse::Pomdp<storm::RationalNumber>>()));
STORM_LOG_THROW(model && model->getType() == storm::models::ModelType::Pomdp, storm::exceptions::WrongFormatException, "Expected a POMDP.");
// CHECK if prop maximizes, only apply in those situations
std::shared_ptr<storm::models::sparse::Pomdp<storm::RationalNumber>> pomdp = model->template as<storm::models::sparse::Pomdp<storm::RationalNumber>>();
storm::transformer::GlobalPOMDPSelfLoopEliminator<storm::RationalNumber> selfLoopEliminator(*pomdp);
pomdp = selfLoopEliminator.transform();
storm::transformer::ApplyFiniteSchedulerToPomdp<storm::RationalNumber> toPMCTransformer(*pomdp);
if (pomdpSettings.isExportToParametricSet()) {
auto const &pmc = toPMCTransformer.transform();

30
src/storm-pomdp/transformer/GlobalPOMDPSelfLoopEliminator.cpp

@ -1,6 +1,7 @@
#include "storm-pomdp/transformer/GlobalPOMDPSelfLoopEliminator.h"
#include "storm/storage/BitVector.h"
#include <vector>
#include <storm/transformer/ChoiceSelector.h>
namespace storm {
namespace transformer {
@ -14,12 +15,19 @@ namespace storm {
std::vector<storm::storage::BitVector> observationSelfLoopMasks;
for (uint64_t obs = 0; obs < pomdp.getNrObservations(); ++obs) {
observationSelfLoopMasks.push_back(storm::storage::BitVector());
observationSelfLoopMasks.push_back(storm::storage::BitVector(1, false));
assert(observationSelfLoopMasks.back().size() == 1);
}
assert(pomdp.getNrObservations() >= 1);
assert(observationSelfLoopMasks.size() == pomdp.getNrObservations());
for (uint64_t state = 0; state < nrStates; ++state) {
uint32_t observation = pomdp.getObservation(state);
assert(pomdp.getNumberOfChoices(state) != 0);
if (pomdp.getNumberOfChoices(state) == 1) {
continue;
}
storm::storage::BitVector actionVector(pomdp.getNumberOfChoices(state), false);
for (uint64_t action = 0; action < pomdp.getNumberOfChoices(state); ++action) {
// We just look at the first entry.
@ -30,14 +38,17 @@ namespace storm {
break;
}
}
if (observationSelfLoopMasks[observation].size() == 0) {
STORM_LOG_ASSERT(observation < observationSelfLoopMasks.size(), "Observation index (" << observation << ") should be less than number of observations (" << observationSelfLoopMasks.size() << "). ");
if (observationSelfLoopMasks[observation].size() == 1) {
observationSelfLoopMasks[observation] = actionVector;
} else {
STORM_LOG_ASSERT(observationSelfLoopMasks[observation].size() == pomdp.getNumberOfChoices(state), "State " + std::to_string(state) + " has " + std::to_string(pomdp.getNumberOfChoices(state)) + " actions, different from other with same observation (" + std::to_string(observationSelfLoopMasks[observation].size()) + ")." );
observationSelfLoopMasks[observation] &= actionVector;
}
}
storm::storage::BitVector filter(pomdp.getNumberOfChoices(), true);
storm::storage::BitVector filter(pomdp.getNumberOfChoices(), false);
uint64_t offset = 0;
for (uint64_t state = 0; state < nrStates; ++state) {
uint32_t observation = pomdp.getObservation(state);
@ -45,6 +56,8 @@ namespace storm {
if (vec.full()) {
vec.set(0, false);
}
assert(!vec.full());
std::cout << "state " << state << " vec " << vec << std::endl;
for (uint64_t action = 0; action < pomdp.getNumberOfChoices(state); ++action) {
if (vec.get(action)) {
filter.set(offset + action);
@ -52,10 +65,15 @@ namespace storm {
}
offset += pomdp.getNumberOfChoices(state);
}
// TODO rewards.
std::cout << "filter: " << filter << std::endl;
assert(filter.size() == pomdp.getNumberOfChoices());
// TODO rewards with state-action rewards
filter.complement();
std::cout << "selection: " << filter << std::endl;
//storm::storage::sparse::ModelComponents<storm::RationalFunction> modelComponents(smb.build(),pomdp.getStateLabeling());
//return std::make_shared<storm::models::sparse::Dtmc<storm::RationalFunction>>(modelComponents);
ChoiceSelector<ValueType> cs(pomdp);
return cs.transform(filter)->template as<storm::models::sparse::Pomdp<ValueType>>();
}

13
src/storm/models/sparse/Pomdp.cpp

@ -6,12 +6,12 @@ namespace storm {
template <typename ValueType, typename RewardModelType>
Pomdp<ValueType, RewardModelType>::Pomdp(storm::storage::SparseMatrix<ValueType> const &transitionMatrix, storm::models::sparse::StateLabeling const &stateLabeling, std::unordered_map <std::string, RewardModelType> const &rewardModels) : Mdp<ValueType, RewardModelType>(transitionMatrix, stateLabeling, rewardModels, storm::models::ModelType::Pomdp) {
// Intentionally left blank.
computeNrObservations();
}
template <typename ValueType, typename RewardModelType>
Pomdp<ValueType, RewardModelType>::Pomdp(storm::storage::SparseMatrix<ValueType> &&transitionMatrix, storm::models::sparse::StateLabeling &&stateLabeling, std::unordered_map <std::string, RewardModelType> &&rewardModels) : Mdp<ValueType, RewardModelType>(transitionMatrix, stateLabeling, rewardModels, storm::models::ModelType::Pomdp) {
// Intentionally left empty.
computeNrObservations();
}
template <typename ValueType, typename RewardModelType>
@ -40,7 +40,7 @@ namespace storm {
highestEntry = entry;
}
}
nrObservations = highestEntry;
nrObservations = highestEntry + 1; // Smallest entry should be zero.
// In debug mode, ensure that every observability is used.
}
@ -54,6 +54,13 @@ namespace storm {
return nrObservations;
}
template<typename ValueType, typename RewardModelType>
std::vector<uint32_t> const& Pomdp<ValueType, RewardModelType>::getObservations() const {
return observations;
}
template class Pomdp<double>;
template class Pomdp<storm::RationalNumber>;

2
src/storm/models/sparse/Pomdp.h

@ -59,6 +59,8 @@ namespace storm {
uint64_t getNrObservations() const;
std::vector<uint32_t> const& getObservations() const;
protected:

10
src/storm/transformer/ChoiceSelector.cpp

@ -1,5 +1,7 @@
#include <storm/exceptions/NotImplementedException.h>
#include "storm/transformer/ChoiceSelector.h"
#include "storm/models/sparse/Mdp.h"
#include "storm/models/sparse/Pomdp.h"
namespace storm {
namespace transformer {
@ -18,9 +20,17 @@ namespace storm {
if (inputModel.hasChoiceOrigins()) {
newComponents.choiceOrigins = inputModel.getChoiceOrigins()->selectChoices(enabledActions);
}
STORM_LOG_THROW(inputModel.getType() != storm::models::ModelType::MarkovAutomaton, storm::exceptions::NotImplementedException, "Selecting choices is not implemented for MA.");
if(inputModel.getType() == storm::models::ModelType::Pomdp) {
newComponents.observabilityClasses = static_cast<storm::models::sparse::Pomdp<ValueType,RewardModelType> const&>(inputModel).getObservations();
return std::make_shared<storm::models::sparse::Pomdp<ValueType, RewardModelType>>(std::move(newComponents));
} else {
return std::make_shared<storm::models::sparse::Mdp<ValueType, RewardModelType>>(std::move(newComponents));
}
}
template class ChoiceSelector<double>;
template class ChoiceSelector<storm::RationalNumber>;
}
}
Loading…
Cancel
Save