Browse Source

Sparse Bisimulation is still ongoing work.

Former-commit-id: 0b82c628a9
tempestpy_adaptions
dehnert 10 years ago
parent
commit
ca9dddb110
  1. 4
      src/settings/modules/GeneralSettings.cpp
  2. 183
      src/storage/BisimulationDecomposition.cpp
  3. 11
      src/storage/BisimulationDecomposition.h
  4. 5
      src/storage/Decomposition.cpp
  5. 56
      src/storage/Distribution.cpp
  6. 32
      src/storage/Distribution.h
  7. 5
      src/storage/StateBlock.cpp
  8. 7
      src/storage/StateBlock.h
  9. 10
      src/utility/cli.h

4
src/settings/modules/GeneralSettings.cpp

@ -74,7 +74,7 @@ namespace storm {
.addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "The file from which to read the LTL formulas.").addValidationFunctionString(storm::settings::ArgumentValidators::existingReadableFileValidator()).build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, counterexampleOptionName, false, "Generates a counterexample for the given PRCTL formulas if not satisfied by the model")
.addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "The name of the file to which the counterexample is to be written.").setDefaultValueString("-").setIsOptional(true).build()).setShortName(counterexampleOptionShortName).build());
this->addOption(storm::settings::OptionBuilder(moduleName, bisimulationOptionName, true, "Sets whether to perform bisimulation minimization.").setShortName(bisimulationOptionShortName).build());
this->addOption(storm::settings::OptionBuilder(moduleName, bisimulationOptionName, false, "Sets whether to perform bisimulation minimization.").setShortName(bisimulationOptionShortName).build());
this->addOption(storm::settings::OptionBuilder(moduleName, transitionRewardsOptionName, "", "If given, the transition rewards are read from this file and added to the explicit model. Note that this requires the model to be given as an explicit model (i.e., via --" + explicitOptionName + ").")
.addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "The file from which to read the transition rewards.").addValidationFunctionString(storm::settings::ArgumentValidators::existingReadableFileValidator()).build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, stateRewardsOptionName, false, "If given, the state rewards are read from this file and added to the explicit model. Note that this requires the model to be given as an explicit model (i.e., via --" + explicitOptionName + ").")
@ -92,7 +92,7 @@ namespace storm {
.addArgument(storm::settings::ArgumentBuilder::createStringArgument("name", "The name of an LP solver. Available are: gurobi and glpk.").addValidationFunctionString(storm::settings::ArgumentValidators::stringInListValidator(lpSolvers)).setDefaultValueString("glpk").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, constantsOptionName, false, "Specifies the constant replacements to use in symbolic models. Note that Note that this requires the model to be given as an symbolic model (i.e., via --" + symbolicOptionName + ").").setShortName(constantsOptionShortName)
.addArgument(storm::settings::ArgumentBuilder::createStringArgument("values", "A comma separated list of constants and their value, e.g. a=1,b=2,c=3.").setDefaultValueString("").build()).build());
this->addOption(storm::settings::OptionBuilder(moduleName, statisticsOptionName, true, "Sets whether to display statistics if available.").setShortName(statisticsOptionShortName).build());
this->addOption(storm::settings::OptionBuilder(moduleName, statisticsOptionName, false, "Sets whether to display statistics if available.").setShortName(statisticsOptionShortName).build());
}
bool GeneralSettings::isHelpSet() const {

183
src/storage/BisimulationDecomposition.cpp

@ -1,48 +1,189 @@
#include "src/storage/BisimulationDecomposition.h"
#include <queue>
#include <unordered_map>
#include <chrono>
namespace storm {
namespace storage {
template<typename ValueType>
BisimulationDecomposition<ValueType>::BisimulationDecomposition(storm::models::Dtmc<ValueType> const& model, bool weak) {
computeBisimulationEquivalenceClasses(model, weak);
BisimulationDecomposition<ValueType>::BisimulationDecomposition(storm::models::Dtmc<ValueType> const& dtmc, bool weak) {
computeBisimulationEquivalenceClasses(dtmc, weak);
}
template<typename ValueType>
void BisimulationDecomposition<ValueType>::computeBisimulationEquivalenceClasses(storm::models::Dtmc<ValueType> const& model, bool weak) {
void BisimulationDecomposition<ValueType>::computeBisimulationEquivalenceClasses(storm::models::Dtmc<ValueType> const& dtmc, bool weak) {
std::chrono::high_resolution_clock::time_point totalStart = std::chrono::high_resolution_clock::now();
// We start by computing the initial partition. In particular, we also keep a mapping of states to their blocks.
std::vector<std::size_t> stateToBlockMapping(model.getNumberOfStates());
storm::storage::BitVector labeledStates = model.getLabeledStates("one");
std::vector<std::size_t> stateToBlockMapping(dtmc.getNumberOfStates());
storm::storage::BitVector labeledStates = dtmc.getLabeledStates("observe0Greater1");
this->blocks.emplace_back(labeledStates.begin(), labeledStates.end());
std::for_each(labeledStates.begin(), labeledStates.end(), [&] (storm::storage::sparse::state_type const& state) { stateToBlockMapping[state] = 0; } );
labeledStates.complement();
this->blocks.emplace_back(labeledStates.begin(), labeledStates.end());
std::for_each(labeledStates.begin(), labeledStates.end(), [&] (storm::storage::sparse::state_type const& state) { stateToBlockMapping[state] = 1; } );
// Create empty distributions for the two initial blocks.
std::vector<storm::storage::Distribution<ValueType>> distributions(2);
// Retrieve the backward transitions to allow for better checking of states that need to be re-examined.
storm::storage::SparseMatrix<ValueType> const& backwardTransitions = model.getBackwardTransitions();
storm::storage::SparseMatrix<ValueType> const& backwardTransitions = dtmc.getBackwardTransitions();
// Initially, both blocks are potential splitters.
std::queue<std::size_t> splitterQueue;
splitterQueue.push(0);
splitterQueue.push(1);
// As long as there is a splitter, we keep refining the current partition.
while (!splitterQueue.empty()) {
// Initially, both blocks are potential splitters. A splitter is marked as a pair in which the first entry
// is the ID of the parent block of the splitter and the second entry is the block ID of the splitter itself.
std::deque<std::size_t> refinementQueue;
storm::storage::BitVector blocksInRefinementQueue(this->size());
refinementQueue.push_back(0);
refinementQueue.push_back(1);
// As long as there are blocks to refine, well, refine them.
uint_fast64_t iteration = 0;
while (!refinementQueue.empty()) {
++iteration;
// Optionally sort the queue to potentially speed up the convergence.
// std::sort(refinementQueue.begin(), refinementQueue.end(), [=] (std::size_t const& a, std::size_t const& b) { return this->blocks[a].size() > this->blocks[b].size(); });
std::size_t currentBlock = refinementQueue.front();
refinementQueue.pop_front();
blocksInRefinementQueue.set(currentBlock, false);
splitBlock(dtmc, backwardTransitions, currentBlock, stateToBlockMapping, distributions, blocksInRefinementQueue, refinementQueue);
}
// While there is a splitter...
//
// check the predecessors of the splitter:
// * if they still have the same signature as before, then they remain unsplit
// * otherwise, split off the states that now behave differently
// and mark the smaller block as a splitter
std::chrono::high_resolution_clock::duration totalTime = std::chrono::high_resolution_clock::now() - totalStart;
//
std::cout << "Bisimulation quotient has " << this->blocks.size() << " blocks and took " << iteration << " iterations and " << std::chrono::duration_cast<std::chrono::milliseconds>(totalTime).count() << "ms." << std::endl;
}
template<typename ValueType>
std::size_t BisimulationDecomposition<ValueType>::splitPredecessorsGraphBased(storm::models::Dtmc<ValueType> const& dtmc, storm::storage::SparseMatrix<ValueType> const& backwardTransitions, std::size_t const& blockId, std::vector<std::size_t>& stateToBlockMapping, std::vector<storm::storage::Distribution<ValueType>>& distributions, storm::storage::BitVector& blocksInRefinementQueue, std::deque<std::size_t>& graphRefinementQueue) {
// This method tries to split the blocks of predecessors of the provided block by checking whether there is
// a transition into the current block or not.
std::unordered_map<std::size_t, typename BisimulationDecomposition<ValueType>::block_type> predecessorBlockToNewBlock;
// Now for each predecessor block which state could actually reach the current block.
for (auto const& state : this->blocks[blockId]) {
for (auto const& predecessorEntry : backwardTransitions.getRow(state)) {
storm::storage::sparse::state_type predecessor = predecessorEntry.getColumn();
predecessorBlockToNewBlock[stateToBlockMapping[predecessor]].insert(predecessor);
}
}
// Now, we can check for each predecessor block whether it needs to be split.
for (auto const& blockNewBlockPair : predecessorBlockToNewBlock) {
if (this->blocks[blockNewBlockPair.first].size() > blockNewBlockPair.second.size()) {
// Add the states which have a successor in the current block to a totally new block.
this->blocks.emplace_back(std::move(blockNewBlockPair.second));
// Compute the set of states that remains in the old block;
typename BisimulationDecomposition<ValueType>::block_type newBlock;
std::set_difference(this->blocks[blockId].begin(), this->blocks[blockId].end(), this->blocks.back().begin(), this->blocks.back().end(), std::inserter(newBlock, newBlock.begin()));
this->blocks[blockNewBlockPair.first] = std::move(newBlock);
blocksInRefinementQueue.resize(this->blocks.size());
// Add the smaller part of the old block to the queue.
std::size_t blockToAddToQueue = this->blocks.back().size() < this->blocks[blockNewBlockPair.first].size() ? this->blocks.size() - 1 : blockNewBlockPair.first;
if (!blocksInRefinementQueue.get(blockToAddToQueue)) {
graphRefinementQueue.push_back(blockToAddToQueue);
blocksInRefinementQueue.set(blockToAddToQueue, true);
}
}
}
// FIXME
return 0;
}
template<typename ValueType>
std::size_t BisimulationDecomposition<ValueType>::splitBlock(storm::models::Dtmc<ValueType> const& dtmc, storm::storage::SparseMatrix<ValueType> const& backwardTransitions, std::size_t const& blockId, std::vector<std::size_t>& stateToBlockMapping, std::vector<storm::storage::Distribution<ValueType>>& distributions, storm::storage::BitVector& blocksInRefinementQueue, std::deque<std::size_t>& refinementQueue) {
std::chrono::high_resolution_clock::time_point totalStart = std::chrono::high_resolution_clock::now();
std::unordered_map<storm::storage::Distribution<ValueType>, typename BisimulationDecomposition<ValueType>::block_type> distributionToNewBlocks;
// Traverse all states of the block and check whether they have different distributions.
for (auto const& state : this->blocks[blockId]) {
// Now construct the distribution of this state wrt. to the current partitioning.
storm::storage::Distribution<ValueType> distribution;
for (auto const& successorEntry : dtmc.getTransitionMatrix().getRow(state)) {
distribution.addProbability(static_cast<storm::storage::sparse::state_type>(stateToBlockMapping[successorEntry.getColumn()]), successorEntry.getValue());
}
// If the distribution already exists, we simply add the state. Otherwise, we open a new block.
auto distributionIterator = distributionToNewBlocks.find(distribution);
if (distributionIterator != distributionToNewBlocks.end()) {
distributionIterator->second.insert(state);
} else {
distributionToNewBlocks[distribution].insert(state);
}
}
// Now we are ready to split the block.
if (distributionToNewBlocks.size() == 1) {
// If there is just one behavior, we just set the distribution as the new one for this block.
distributions[blockId] = std::move(distributionToNewBlocks.begin()->first);
} else {
// In this case, we need to split the block.
typename BisimulationDecomposition<ValueType>::block_type tmpBlock;
auto distributionIterator = distributionToNewBlocks.begin();
distributions[blockId] = std::move(distributionIterator->first);
tmpBlock = std::move(distributionIterator->second);
std::swap(this->blocks[blockId], tmpBlock);
++distributionIterator;
// Remember the number of blocks prior to splitting for later use.
std::size_t beforeNumberOfBlocks = this->blocks.size();
for (; distributionIterator != distributionToNewBlocks.end(); ++distributionIterator) {
// In this case, we need to move the newly created block to the end of the list of actual blocks.
this->blocks.emplace_back(std::move(distributionIterator->second));
distributions.emplace_back(std::move(distributionIterator->first));
// Update the mapping of states to their blocks.
std::size_t newBlockId = this->blocks.size() - 1;
for (auto const& state : this->blocks.back()) {
stateToBlockMapping[state] = newBlockId;
}
}
// Now that we have split the block, we try to trigger a chain of graph-based splittings. That is, we
// try to split the predecessors of the current block by checking whether they have some transition
// to one given sub-block of the current block.
std::deque<std::size_t> localRefinementQueue;
storm::storage::BitVector blocksInLocalRefinementQueue(this->size());
localRefinementQueue.push_back(blockId);
for (std::size_t i = beforeNumberOfBlocks; i < this->blocks.size(); ++i) {
localRefinementQueue.push_back(i);
}
while (!localRefinementQueue.empty()) {
std::size_t currentBlock = localRefinementQueue.front();
localRefinementQueue.pop_front();
blocksInLocalRefinementQueue.set(currentBlock, false);
splitPredecessorsGraphBased(dtmc, backwardTransitions, blockId, stateToBlockMapping, distributions, blocksInLocalRefinementQueue, localRefinementQueue);
}
// Since we created some new blocks, we need to extend the bit vector storing the blocks in the
// refinement queue.
blocksInRefinementQueue.resize(blocksInRefinementQueue.size() + (distributionToNewBlocks.size() - 1));
// Insert blocks that possibly need a refinement into the queue.
for (auto const& state : tmpBlock) {
for (auto const& predecessor : backwardTransitions.getRow(state)) {
if (!blocksInRefinementQueue.get(stateToBlockMapping[predecessor.getColumn()])) {
blocksInRefinementQueue.set(stateToBlockMapping[predecessor.getColumn()]);
refinementQueue.push_back(stateToBlockMapping[predecessor.getColumn()]);
}
}
}
}
std::chrono::high_resolution_clock::duration totalTime = std::chrono::high_resolution_clock::now() - totalStart;
std::cout << "refinement of block " << blockId << " took " << std::chrono::duration_cast<std::chrono::milliseconds>(totalTime).count() << "ms." << std::endl;
return distributionToNewBlocks.size();
}
template class BisimulationDecomposition<double>;

11
src/storage/BisimulationDecomposition.h

@ -1,8 +1,12 @@
#ifndef STORM_STORAGE_BISIMULATIONDECOMPOSITION_H_
#define STORM_STORAGE_BISIMULATIONDECOMPOSITION_H_
#include <queue>
#include <deque>
#include "src/storage/Decomposition.h"
#include "src/models/Dtmc.h"
#include "src/storage/Distribution.h"
namespace storm {
namespace storage {
@ -13,15 +17,18 @@ namespace storm {
template <typename ValueType>
class BisimulationDecomposition : public Decomposition<StateBlock> {
public:
BisimulationDecomposition() = default;
/*!
* Decomposes the given DTMC into equivalence classes under weak or strong bisimulation.
*/
BisimulationDecomposition(storm::models::Dtmc<ValueType> const& model, bool weak);
BisimulationDecomposition(storm::models::Dtmc<ValueType> const& model, bool weak = false);
private:
void computeBisimulationEquivalenceClasses(storm::models::Dtmc<ValueType> const& model, bool weak);
std::size_t splitPredecessorsGraphBased(storm::models::Dtmc<ValueType> const& dtmc, storm::storage::SparseMatrix<ValueType> const& backwardTransitions, std::size_t const& block, std::vector<std::size_t>& stateToBlockMapping, std::vector<storm::storage::Distribution<ValueType>>& distributions, storm::storage::BitVector& blocksInRefinementQueue, std::deque<std::size_t>& refinementQueue);
std::size_t splitBlock(storm::models::Dtmc<ValueType> const& dtmc, storm::storage::SparseMatrix<ValueType> const& backwardTransitions, std::size_t const& block, std::vector<std::size_t>& stateToBlockMapping, std::vector<storm::storage::Distribution<ValueType>>& distributions, storm::storage::BitVector& blocksInRefinementQueue, std::deque<std::size_t>& refinementQueue);
};
}
}

5
src/storage/Decomposition.cpp

@ -89,7 +89,10 @@ namespace storm {
out << "]";
return out;
}
template class Decomposition<StateBlock>;
template std::ostream& operator<<(std::ostream& out, Decomposition<StateBlock> const& decomposition);
template class Decomposition<StronglyConnectedComponent>;
template std::ostream& operator<<(std::ostream& out, Decomposition<StronglyConnectedComponent> const& decomposition);

56
src/storage/Distribution.cpp

@ -1,13 +1,48 @@
#include "src/storage/Distribution.h"
#include <algorithm>
#include <iostream>
#include "src/settings/SettingsManager.h"
namespace storm {
namespace storage {
template<typename ValueType>
Distribution<ValueType>::Distribution() : hash(0) {
// Intentionally left empty.
}
template<typename ValueType>
bool Distribution<ValueType>::operator==(Distribution<ValueType> const& other) const {
// We need to check equality by ourselves, because we need to account for epsilon differences.
if (this->distribution.size() != other.distribution.size() || this->getHash() != other.getHash()) {
return false;
}
auto first1 = this->distribution.begin();
auto last1 = this->distribution.end();
auto first2 = other.distribution.begin();
auto last2 = other.distribution.end();
for (; first1 != last1; ++first1, ++first2) {
if (first1->first != first2->first) {
return false;
}
if (std::abs(first1->second - first2->second) > 1e-6) {
return false;
}
}
return true;
}
template<typename ValueType>
void Distribution<ValueType>::addProbability(storm::storage::sparse::state_type const& state, ValueType const& probability) {
if (this->distribution.find(state) == this->distribution.end()) {
this->hash += static_cast<std::size_t>(state);
}
this->distribution[state] += probability;
this->hash += static_cast<std::size_t>(state);
}
template<typename ValueType>
@ -30,6 +65,23 @@ namespace storm {
return this->distribution.end();
}
template<typename ValueType>
std::size_t Distribution<ValueType>::getHash() const {
return this->hash ^ (this->distribution.size() << 8);
}
template<typename ValueType>
std::ostream& operator<<(std::ostream& out, Distribution<ValueType> const& distribution) {
out << "{";
for (auto const& entry : distribution) {
out << "[" << entry.second << ": " << entry.first << "], ";
}
out << "}";
return out;
}
template class Distribution<double>;
template std::ostream& operator<<(std::ostream& out, Distribution<double> const& distribution);
}
}
}

32
src/storage/Distribution.h

@ -2,6 +2,7 @@
#define STORM_STORAGE_DISTRIBUTION_H_
#include <vector>
#include <ostream>
#include <boost/container/flat_map.hpp>
#include "src/storage/sparse/StateType.h"
@ -19,7 +20,15 @@ namespace storm {
/*!
* Creates an empty distribution.
*/
Distribution() = default;
Distribution();
/*!
* Checks whether the two distributions specify the same probabilities to go to the same states.
*
* @param other The distribution with which the current distribution is to be compared.
* @return True iff the two distributions are equal.
*/
bool operator==(Distribution const& other) const;
/*!
* Assigns the given state the given probability under this distribution.
@ -57,6 +66,13 @@ namespace storm {
*/
const_iterator end() const;
/*!
* Retrieves the hash value of the distribution.
*
* @return The hash value of the distribution.
*/
std::size_t getHash() const;
private:
// A list of states and the probabilities that are assigned to them.
container_type distribution;
@ -65,7 +81,21 @@ namespace storm {
std::size_t hash;
};
template<typename ValueType>
std::ostream& operator<<(std::ostream& out, Distribution<ValueType> const& distribution);
}
}
namespace std {
template <typename ValueType>
struct hash<storm::storage::Distribution<ValueType>> {
std::size_t operator()(storm::storage::Distribution<ValueType> const& distribution) const {
return (distribution.getHash());
}
};
}
#endif /* STORM_STORAGE_DISTRIBUTION_H_ */

5
src/storage/StateBlock.cpp

@ -30,6 +30,10 @@ namespace storm {
states.insert(state);
}
StateBlock::iterator StateBlock::insert(container_type::const_iterator iterator, value_type const& state) {
return states.insert(iterator, state);
}
void StateBlock::erase(value_type const& state) {
states.erase(state);
}
@ -55,5 +59,6 @@ namespace storm {
out << block.getStates();
return out;
}
}
}

7
src/storage/StateBlock.h

@ -101,6 +101,13 @@ namespace storm {
* @param state The state to add to this SCC.
*/
void insert(value_type const& state);
/*!
* Inserts the given element into this SCC.
*
* @param state The state to add to this SCC.
*/
iterator insert(container_type::const_iterator iterator, value_type const& state);
/*!
* Removes the given element from this SCC.

10
src/utility/cli.h

@ -43,6 +43,9 @@ log4cplus::Logger logger;
// Headers of adapters.
#include "src/adapters/ExplicitModelAdapter.h"
// Headers for model processing.
#include "src/storage/BisimulationDecomposition.h"
// Headers for counterexample generation.
#include "src/counterexamples/MILPMinimalLabelSetGenerator.h"
#include "src/counterexamples/SMTMinimalCommandSetGenerator.h"
@ -254,6 +257,12 @@ namespace storm {
STORM_LOG_THROW(false, storm::exceptions::InvalidSettingsException, "No input model.");
}
if (settings.isBisimulationSet()) {
STORM_LOG_THROW(result->getType() == storm::models::DTMC, storm::exceptions::InvalidSettingsException, "Bisimulation minimization is currently only compatible with DTMCs.");
std::shared_ptr<storm::models::Dtmc<double>> dtmc = result->template as<storm::models::Dtmc<double>>();
storm::storage::BisimulationDecomposition<double> bisimulationDecomposition(*dtmc);
}
return result;
}
@ -298,6 +307,7 @@ namespace storm {
std::shared_ptr<storm::properties::prctl::PrctlFilter<double>> filterFormula = storm::parser::PrctlParser::parsePrctlFormula(settings.getPctlProperty());
generateCounterexample(model, filterFormula->getChild());
}
}
}

Loading…
Cancel
Save