Browse Source

Fixed some bugs in bit vector and vector set that prevented the MEC decomposition from functioning correctly.

Former-commit-id: 51b6d7eb18
tempestpy_adaptions
dehnert 11 years ago
parent
commit
2cbdf56267
  1. 2
      src/models/AbstractNondeterministicModel.h
  2. 225
      src/models/MarkovAutomaton.h
  3. 23
      src/parser/MarkovAutomatonSparseTransitionParser.cpp
  4. 4
      src/storage/BitVector.h
  5. 6
      src/storage/MaximalEndComponentDecomposition.cpp
  6. 4
      src/storage/VectorSet.cpp
  7. 4
      src/storm.cpp

2
src/models/AbstractNondeterministicModel.h

@ -237,7 +237,7 @@ namespace storm {
this->choiceLabeling.reset(newChoiceLabeling);
}
private:
protected:
/*! A vector of indices mapping states to the choices (rows) in the transition matrix. */
std::vector<uint_fast64_t> nondeterministicChoiceIndices;
};

225
src/models/MarkovAutomaton.h

@ -28,7 +28,7 @@ namespace storm {
boost::optional<std::vector<T>> const& optionalStateRewardVector, boost::optional<storm::storage::SparseMatrix<T>> const& optionalTransitionRewardMatrix,
boost::optional<std::vector<storm::storage::VectorSet<uint_fast64_t>>> const& optionalChoiceLabeling)
: AbstractNondeterministicModel<T>(transitionMatrix, stateLabeling, nondeterministicChoiceIndices, optionalStateRewardVector, optionalTransitionRewardMatrix, optionalChoiceLabeling),
markovianStates(markovianStates), exitRates(exitRates), isClosed(false) {
markovianStates(markovianStates), exitRates(exitRates), closed(false) {
if (this->hasTransitionRewards()) {
if (!this->getTransitionRewardMatrix().isSubmatrixOf(this->getTransitionMatrix())) {
LOG4CPLUS_ERROR(logger, "Transition reward matrix is not a submatrix of the transition matrix, i.e. there are rewards for transitions that do not exist.");
@ -45,7 +45,7 @@ namespace storm {
boost::optional<storm::storage::SparseMatrix<T>>&& optionalTransitionRewardMatrix,
boost::optional<std::vector<storm::storage::VectorSet<uint_fast64_t>>>&& optionalChoiceLabeling)
: AbstractNondeterministicModel<T>(std::move(transitionMatrix), std::move(stateLabeling), std::move(nondeterministicChoiceIndices), std::move(optionalStateRewardVector), std::move(optionalTransitionRewardMatrix),
std::move(optionalChoiceLabeling)), markovianStates(markovianStates), exitRates(std::move(exitRates)), isClosed(false) {
std::move(optionalChoiceLabeling)), markovianStates(markovianStates), exitRates(std::move(exitRates)), closed(false) {
if (this->hasTransitionRewards()) {
if (!this->getTransitionRewardMatrix().isSubmatrixOf(this->getTransitionMatrix())) {
LOG4CPLUS_ERROR(logger, "Transition reward matrix is not a submatrix of the transition matrix, i.e. there are rewards for transitions that do not exist.");
@ -54,11 +54,11 @@ namespace storm {
}
}
MarkovAutomaton(MarkovAutomaton<T> const& markovAutomaton) : AbstractNondeterministicModel<T>(markovAutomaton), markovianStates(markovAutomaton.markovianStates), exitRates(markovAutomaton.exitRates), isClosed(markovAutomaton.isClosed) {
MarkovAutomaton(MarkovAutomaton<T> const& markovAutomaton) : AbstractNondeterministicModel<T>(markovAutomaton), markovianStates(markovAutomaton.markovianStates), exitRates(markovAutomaton.exitRates), closed(markovAutomaton.closed) {
// Intentionally left empty.
}
MarkovAutomaton(MarkovAutomaton<T>&& markovAutomaton) : AbstractNondeterministicModel<T>(std::move(markovAutomaton)), markovianStates(std::move(markovAutomaton.markovianStates)), exitRates(std::move(markovAutomaton.exitRates)), isClosed(markovAutomaton.isClosed) {
MarkovAutomaton(MarkovAutomaton<T>&& markovAutomaton) : AbstractNondeterministicModel<T>(std::move(markovAutomaton)), markovianStates(std::move(markovAutomaton.markovianStates)), exitRates(std::move(markovAutomaton.exitRates)), closed(markovAutomaton.closed) {
// Intentionally left empty.
}
@ -69,88 +69,81 @@ namespace storm {
storm::models::ModelType getType() const {
return MA;
}
virtual void writeDotToStream(std::ostream& outStream, bool includeLabeling = true, storm::storage::BitVector const* subsystem = nullptr, std::vector<T> const* firstValue = nullptr, std::vector<T> const* secondValue = nullptr, std::vector<uint_fast64_t> const* stateColoring = nullptr, std::vector<std::string> const* colors = nullptr, std::vector<uint_fast64_t>* scheduler = nullptr, bool finalizeOutput = true) const override {
AbstractModel<T>::writeDotToStream(outStream, includeLabeling, subsystem, firstValue, secondValue, stateColoring, colors, scheduler, false);
// Write the probability distributions for all the states.
auto rowIt = this->transitionMatrix.begin();
for (uint_fast64_t state = 0, highestStateIndex = this->getNumberOfStates() - 1; state <= highestStateIndex; ++state) {
uint_fast64_t rowCount = this->getNondeterministicChoiceIndices()[state + 1] - this->getNondeterministicChoiceIndices()[state];
bool highlightChoice = true;
// For this, we need to iterate over all available nondeterministic choices in the current state.
for (uint_fast64_t row = 0; row < rowCount; ++row, ++rowIt) {
if (scheduler != nullptr) {
// If the scheduler picked the current choice, we will not make it dotted, but highlight it.
if ((*scheduler)[state] == row) {
highlightChoice = true;
} else {
highlightChoice = false;
}
}
// If it's not a Markovian state or the current row is the first choice for this state, then we
// are dealing with a probabilitic choice.
if (!markovianStates.get(state) || row != 0) {
// For each nondeterministic choice, we draw an arrow to an intermediate node to better display
// the grouping of transitions.
outStream << "\t\"" << state << "c" << row << "\" [shape = \"point\"";
// If we were given a scheduler to highlight, we do so now.
if (scheduler != nullptr) {
if (highlightChoice) {
outStream << ", fillcolor=\"red\"";
}
}
outStream << "];" << std::endl;
bool isClosed() const {
return closed;
}
bool isHybridState(uint_fast64_t state) const {
return isMarkovianState(state) && (this->getNondeterministicChoiceIndices()[state + 1] - this->getNondeterministicChoiceIndices()[state] > 1);
}
outStream << "\t" << state << " -> \"" << state << "c" << row << "\"";
bool isMarkovianState(uint_fast64_t state) const {
return this->markovianStates.get(state);
}
bool isProbabilisticState(uint_fast64_t state) const {
return !this->markovianStates.get(state);
}
// If we were given a scheduler to highlight, we do so now.
if (scheduler != nullptr) {
if (highlightChoice) {
outStream << " [color=\"red\", penwidth = 2]";
} else {
outStream << " [style = \"dotted\"]";
}
}
outStream << ";" << std::endl;
void close() {
if (!closed) {
// First, count the number of hybrid states to know how many Markovian choices
// will be removed.
uint_fast64_t numberOfHybridStates = 0;
for (uint_fast64_t state = 0; state < this->getNumberOfStates(); ++state) {
if (this->isHybridState(state)) {
++numberOfHybridStates;
}
}
// Then compute how many rows the new matrix is going to have.
uint_fast64_t newNumberOfRows = this->getNumberOfChoices() - numberOfHybridStates;
// Create the matrix for the new transition relation and the corresponding nondeterministic choice vector.
storm::storage::SparseMatrix<T> newTransitionMatrix(newNumberOfRows, this->getNumberOfStates());
newTransitionMatrix.initialize();
std::vector<uint_fast64_t> newNondeterministicChoiceIndices(this->getNumberOfStates() + 1);
// Now copy over all choices that need to be kept.
uint_fast64_t currentChoice = 0;
for (uint_fast64_t state = 0; state < this->getNumberOfStates(); ++state) {
// Record the new beginning of choices of this state.
newNondeterministicChoiceIndices[state] = currentChoice;
typename storm::storage::SparseMatrix<T>::ConstRowIterator rowIt = this->transitionMatrix.begin(this->nondeterministicChoiceIndices[state]), rowIte = this->transitionMatrix.end(this->nondeterministicChoiceIndices[state + 1] - 1);
// If we are currently treating a hybrid state, we need to skip its first choice.
if (this->isHybridState(state)) {
++rowIt;
// Now draw all probabilitic arcs that belong to this nondeterminstic choice.
for (auto transitionIt = rowIt.begin(), transitionIte = rowIt.end(); transitionIt != transitionIte; ++transitionIt) {
if (subsystem == nullptr || subsystem->get(transitionIt.column())) {
outStream << "\t\"" << state << "c" << row << "\" -> " << transitionIt.column() << " [ label= \"" << transitionIt.value() << "\" ]";
// If we were given a scheduler to highlight, we do so now.
if (scheduler != nullptr) {
if (highlightChoice) {
outStream << " [color=\"red\", penwidth = 2]";
} else {
outStream << " [style = \"dotted\"]";
}
}
outStream << ";" << std::endl;
}
}
} else {
// In this case we are emitting a Markovian choice, so draw the arrows directly to the target states.
for (auto transitionIt = rowIt.begin(), transitionIte = rowIt.end(); transitionIt != transitionIte; ++transitionIt) {
if (subsystem == nullptr || subsystem->get(transitionIt.column())) {
outStream << "\t\"" << state << "\" -> " << transitionIt.column() << " [ label= \"" << transitionIt.value() << "\" ]";
}
// Remove the Markovian state marking.
this->markovianStates.set(state, false);
}
for (; rowIt != rowIte; ++rowIt) {
for (typename storm::storage::SparseMatrix<T>::ConstIterator succIt = rowIt.begin(), succIte = rowIt.end(); succIt != succIte; ++succIt) {
newTransitionMatrix.insertNextValue(currentChoice, succIt.column(), succIt.value());
}
++currentChoice;
}
}
}
if (finalizeOutput) {
outStream << "}" << std::endl;
// Put a sentinel element at the end.
newNondeterministicChoiceIndices.back() = currentChoice;
// Finalize the matrix and put the new transition data in place.
newTransitionMatrix.finalize();
this->transitionMatrix = std::move(newTransitionMatrix);
this->nondeterministicChoiceIndices = std::move(newNondeterministicChoiceIndices);
// Mark the automaton as closed.
closed = true;
}
}
virtual std::shared_ptr<AbstractModel<T>> applyScheduler(storm::storage::Scheduler const& scheduler) const override {
if (!isClosed) {
if (!closed) {
throw storm::exceptions::InvalidStateException() << "Applying a scheduler to a non-closed Markov automaton is illegal; it needs to be closed first.";
}
@ -165,12 +158,90 @@ namespace storm {
return std::shared_ptr<AbstractModel<T>>(new MarkovAutomaton(newTransitionMatrix, this->getStateLabeling(), nondeterministicChoiceIndices, markovianStates, exitRates, this->hasStateRewards() ? this->getStateRewardVector() : boost::optional<std::vector<T>>(), this->hasTransitionRewards() ? this->getTransitionRewardMatrix() : boost::optional<storm::storage::SparseMatrix<T>>(), this->hasChoiceLabeling() ? this->getChoiceLabeling() : boost::optional<std::vector<storm::storage::VectorSet<uint_fast64_t>>>()));
}
virtual void writeDotToStream(std::ostream& outStream, bool includeLabeling = true, storm::storage::BitVector const* subsystem = nullptr, std::vector<T> const* firstValue = nullptr, std::vector<T> const* secondValue = nullptr, std::vector<uint_fast64_t> const* stateColoring = nullptr, std::vector<std::string> const* colors = nullptr, std::vector<uint_fast64_t>* scheduler = nullptr, bool finalizeOutput = true) const override {
AbstractModel<T>::writeDotToStream(outStream, includeLabeling, subsystem, firstValue, secondValue, stateColoring, colors, scheduler, false);
// Write the probability distributions for all the states.
auto rowIt = this->transitionMatrix.begin();
for (uint_fast64_t state = 0, highestStateIndex = this->getNumberOfStates() - 1; state <= highestStateIndex; ++state) {
uint_fast64_t rowCount = this->getNondeterministicChoiceIndices()[state + 1] - this->getNondeterministicChoiceIndices()[state];
bool highlightChoice = true;
// For this, we need to iterate over all available nondeterministic choices in the current state.
for (uint_fast64_t row = 0; row < rowCount; ++row, ++rowIt) {
if (scheduler != nullptr) {
// If the scheduler picked the current choice, we will not make it dotted, but highlight it.
if ((*scheduler)[state] == row) {
highlightChoice = true;
} else {
highlightChoice = false;
}
}
// If it's not a Markovian state or the current row is the first choice for this state, then we
// are dealing with a probabilitic choice.
if (!markovianStates.get(state) || row != 0) {
// For each nondeterministic choice, we draw an arrow to an intermediate node to better display
// the grouping of transitions.
outStream << "\t\"" << state << "c" << row << "\" [shape = \"point\"";
// If we were given a scheduler to highlight, we do so now.
if (scheduler != nullptr) {
if (highlightChoice) {
outStream << ", fillcolor=\"red\"";
}
}
outStream << "];" << std::endl;
outStream << "\t" << state << " -> \"" << state << "c" << row << "\"";
// If we were given a scheduler to highlight, we do so now.
if (scheduler != nullptr) {
if (highlightChoice) {
outStream << " [color=\"red\", penwidth = 2]";
} else {
outStream << " [style = \"dotted\"]";
}
}
outStream << ";" << std::endl;
// Now draw all probabilitic arcs that belong to this nondeterminstic choice.
for (auto transitionIt = rowIt.begin(), transitionIte = rowIt.end(); transitionIt != transitionIte; ++transitionIt) {
if (subsystem == nullptr || subsystem->get(transitionIt.column())) {
outStream << "\t\"" << state << "c" << row << "\" -> " << transitionIt.column() << " [ label= \"" << transitionIt.value() << "\" ]";
// If we were given a scheduler to highlight, we do so now.
if (scheduler != nullptr) {
if (highlightChoice) {
outStream << " [color=\"red\", penwidth = 2]";
} else {
outStream << " [style = \"dotted\"]";
}
}
outStream << ";" << std::endl;
}
}
} else {
// In this case we are emitting a Markovian choice, so draw the arrows directly to the target states.
for (auto transitionIt = rowIt.begin(), transitionIte = rowIt.end(); transitionIt != transitionIte; ++transitionIt) {
if (subsystem == nullptr || subsystem->get(transitionIt.column())) {
outStream << "\t\"" << state << "\" -> " << transitionIt.column() << " [ label= \"" << transitionIt.value() << "\" ]";
}
}
}
}
}
if (finalizeOutput) {
outStream << "}" << std::endl;
}
}
private:
storm::storage::BitVector markovianStates;
std::vector<T> exitRates;
bool isClosed;
bool closed;
};
}

23
src/parser/MarkovAutomatonSparseTransitionParser.cpp

@ -37,6 +37,9 @@ namespace storm {
LOG4CPLUS_ERROR(logger, "Found deadlock states (e.g. " << lastsource + 1 << ") during parsing. Please fix them or set the appropriate flag.");
throw storm::exceptions::WrongFormatException() << "Found deadlock states (e.g. " << lastsource + 1 << ") during parsing. Please fix them or set the appropriate flag.";
}
} else if (source < lastsource) {
LOG4CPLUS_ERROR(logger, "Illegal state choice order. A choice of state " << source << " appears at an illegal position.");
throw storm::exceptions::WrongFormatException() << "Illegal state choice order. A choice of state " << source << " appears at an illegal position.";
}
++result.numberOfChoices;
@ -78,6 +81,7 @@ namespace storm {
// Now that we have the source state and the information whether or not the current choice is probabilistic or Markovian, we need to read the list of successors and the probabilities/rates.
bool hasSuccessorState = false;
bool encounteredNewDistribution = false;
uint_fast64_t lastSuccessorState = 0;
// At this point, we need to check whether there is an additional successor or we have reached the next choice for the same or a different state.
do {
@ -91,9 +95,6 @@ namespace storm {
encounteredEOF = true;
}
} else if (buf[0] == '*') {
// We need to record that we found at least one successor state for the current choice.
hasSuccessorState = true;
// As we have encountered a "*", we know that there is an additional successor state for the current choice.
++buf;
@ -102,14 +103,22 @@ namespace storm {
if (target > result.highestStateIndex) {
result.highestStateIndex = target;
}
if (hasSuccessorState && target <= lastSuccessorState) {
LOG4CPLUS_ERROR(logger, "Illegal transition order for source state " << source << ".");
throw storm::exceptions::WrongFormatException() << "Illegal transition order for source state " << source << ".";
}
// And the corresponding probability/rate.
double val = checked_strtod(buf, &buf);
if (val <= 0.0) {
LOG4CPLUS_ERROR(logger, "Illegal probability/rate value " << val << ".");
throw storm::exceptions::WrongFormatException() << "Illegal probability/rate value " << val << ".";
LOG4CPLUS_ERROR(logger, "Illegal probability/rate value for transition from " << source << " to " << target << ": " << val << ".");
throw storm::exceptions::WrongFormatException() << "Illegal probability/rate value for transition from " << source << " to " << target << ": " << val << ".";
}
// We need to record that we found at least one successor state for the current choice.
hasSuccessorState = true;
lastSuccessorState = target;
// As we found a new successor, we need to increase the number of nonzero entries.
++result.numberOfNonzeroEntries;

4
src/storage/BitVector.h

@ -580,10 +580,10 @@ public:
bool empty() const {
for (uint_fast64_t i = 0; i < this->bucketCount; ++i) {
if (this->bucketArray[i] != 0) {
return true;
return false;
}
}
return false;
return true;
}
/*!

6
src/storage/MaximalEndComponentDecomposition.cpp

@ -112,8 +112,10 @@ namespace storm {
if (mecChanged) {
std::list<StateBlock>::const_iterator eraseIterator(mecIterator);
for (StateBlock& scc : sccs) {
endComponentStateSets.push_back(std::move(scc));
++mecIterator;
if (!scc.empty()) {
endComponentStateSets.push_back(std::move(scc));
++mecIterator;
}
}
endComponentStateSets.erase(eraseIterator);
} else {

4
src/storage/VectorSet.cpp

@ -241,7 +241,7 @@ namespace storm {
template<typename ValueType>
void VectorSet<ValueType>::erase(VectorSet<ValueType> const& eraseSet) {
if (eraseSet.size() > 0) {
if (eraseSet.size() > 0 && this->size() > 0) {
ensureSet();
eraseSet.ensureSet();
@ -249,7 +249,7 @@ namespace storm {
while (setIt != eraseSet.data.rend() && *setIt > *delIt) {
++setIt;
}
if (setIt != data.rend()) break;
if (setIt == data.rend()) break;
if (*setIt == *delIt) {
data.erase((setIt + 1).base());

4
src/storm.cpp

@ -469,7 +469,9 @@ int main(const int argc, const char* argv[]) {
break;
case storm::models::MA: {
LOG4CPLUS_INFO(logger, "Model is a Markov automaton.");
storm::storage::MaximalEndComponentDecomposition<double> mecDecomposition(*parser.getModel<storm::models::MarkovAutomaton<double>>());
std::shared_ptr<storm::models::MarkovAutomaton<double>> markovAutomaton = parser.getModel<storm::models::MarkovAutomaton<double>>();
markovAutomaton->close();
storm::storage::MaximalEndComponentDecomposition<double> mecDecomposition(*markovAutomaton);
break;
}
case storm::models::Unknown:

Loading…
Cancel
Save