Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Symbolic bound tightening in NLR #249

Merged
merged 26 commits into from
Apr 27, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
0294146
Merge pull request #2 from NeuralNetworkVerification/master
guykatzz Apr 1, 2020
49d06bf
Merge pull request #4 from NeuralNetworkVerification/master
guykatzz Apr 20, 2020
3c03339
test
guykatzz Apr 20, 2020
bdfb642
basic interval artihmetic bound propagation
guykatzz Apr 20, 2020
8f71acb
another test case
guykatzz Apr 20, 2020
34b56f5
initialize an SBT within the NLR. Pass topology, weights and biases
GuyKatzHuji Apr 21, 2020
1cec6f1
wip
GuyKatzHuji Apr 21, 2020
1919617
SBT functionality into NLR
GuyKatzHuji Apr 22, 2020
cac046a
wip
GuyKatzHuji Apr 22, 2020
268279a
bug fix
GuyKatzHuji Apr 23, 2020
fe44808
Merge pull request #5 from NeuralNetworkVerification/master
guykatzz Apr 23, 2020
be21eca
Merge pull request #6 from guykatzz/master
guykatzz Apr 23, 2020
8ccc3b0
cleanup
GuyKatzHuji Apr 23, 2020
c306f42
Merge branch 'nlr2' of https://github.com/guykatzz/Marabou into nlr2
GuyKatzHuji Apr 23, 2020
7aaa6b0
cleanup
GuyKatzHuji Apr 23, 2020
0fa06b0
wip
GuyKatzHuji Apr 23, 2020
244bb4f
handle eliminated relus
guykatzz Apr 23, 2020
ee8321c
cleanup: remove symbolic bound tightener
guykatzz Apr 23, 2020
a599c8a
oops
guykatzz Apr 23, 2020
09de4ce
additional cleanup
guykatzz Apr 23, 2020
212c8fa
oops
guykatzz Apr 23, 2020
7be76e0
first unit test
guykatzz Apr 24, 2020
34d26b4
unit tests for NLR
guykatzz Apr 24, 2020
f08849c
Merge pull request #8 from NeuralNetworkVerification/master
guykatzz Apr 26, 2020
7bcc0bb
Merge branch 'nlr2'
GuyKatzHuji Apr 26, 2020
867ab66
Merge branch 'master' into nlr2
GuyKatzHuji Apr 26, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 10 additions & 11 deletions maraboupy/MarabouCore.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ PYBIND11_MODULE(MarabouCore, m) {
.def("markInputVariable", &InputQuery::markInputVariable)
.def("markOutputVariable", &InputQuery::markOutputVariable)
.def("outputVariableByIndex", &InputQuery::outputVariableByIndex)
.def("setSymbolicBoundTightener", &InputQuery::setSymbolicBoundTightener);
.def("setNetworkLevelReasoner", &InputQuery::setNetworkLevelReasoner);
py::class_<MarabouOptions>(m, "Options")
.def(py::init())
.def_readwrite("_numWorkers", &MarabouOptions::_numWorkers)
Expand All @@ -248,17 +248,16 @@ PYBIND11_MODULE(MarabouCore, m) {
.def_readwrite("_timeoutFactor", &MarabouOptions::_timeoutFactor)
.def_readwrite("_verbosity", &MarabouOptions::_verbosity)
.def_readwrite("_dnc", &MarabouOptions::_dnc);
py::class_<SymbolicBoundTightener, std::unique_ptr<SymbolicBoundTightener,py::nodelete>>(m, "SymbolicBoundTightener")
py::class_<NetworkLevelReasoner, std::unique_ptr<NetworkLevelReasoner,py::nodelete>>(m, "NetworkLevelReasoner")
.def(py::init())
.def("setNumberOfLayers", &SymbolicBoundTightener::setNumberOfLayers)
.def("setLayerSize", &SymbolicBoundTightener::setLayerSize)
.def("allocateWeightAndBiasSpace", &SymbolicBoundTightener::allocateWeightAndBiasSpace)
.def("setBias", &SymbolicBoundTightener::setBias)
.def("setWeight", &SymbolicBoundTightener::setWeight)
.def("setInputLowerBound", &SymbolicBoundTightener::setInputLowerBound)
.def("setInputUpperBound", &SymbolicBoundTightener::setInputUpperBound)
.def("setReluBVariable", &SymbolicBoundTightener::setReluBVariable)
.def("setReluFVariable", &SymbolicBoundTightener::setReluFVariable);
.def("setNumberOfLayers", &NetworkLevelReasoner::setNumberOfLayers)
.def("setLayerSize", &NetworkLevelReasoner::setLayerSize)
.def("setNeuronActivationFunction", &NetworkLevelReasoner::setNeuronActivationFunction)
.def("setBias", &NetworkLevelReasoner::setBias)
.def("setWeight", &NetworkLevelReasoner::setWeight)
.def("allocateMemoryByTopology", &NetworkLevelReasoner::allocateMemoryByTopology)
.def("setWeightedSumVariable", &NetworkLevelReasoner::setWeightedSumVariable)
.def("setActivationResultVariable", &NetworkLevelReasoner::setActivationResultVariable);
py::class_<Equation> eq(m, "Equation");
eq.def(py::init());
eq.def(py::init<Equation::EquationType>());
Expand Down
82 changes: 43 additions & 39 deletions maraboupy/MarabouNetworkNNet.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,26 +24,26 @@ class MarabouNetworkNNet(MarabouNetwork.MarabouNetwork):
"""
Class that implements a MarabouNetwork from an NNet file.
"""
def __init__ (self, filename, perform_sbt=False):
def __init__ (self, filename, use_nlr=False):
"""
Constructs a MarabouNetworkNNet object from an .nnet file.

Args:
filename: path to the .nnet file.
Attributes:
numLayers (int) The number of layers in the network
layerSizes (list of ints) Layer sizes.
layerSizes (list of ints) Layer sizes.
inputSize (int) Size of the input.
outputSize (int) Size of the output.
maxLayersize (int) Size of largest layer.
maxLayersize (int) Size of largest layer.
inputMinimums (list of floats) Minimum value for each input.
inputMaximums (list of floats) Maximum value for each input.
inputMeans (list of floats) Mean value for each input.
inputRanges (list of floats) Range for each input
weights (list of list of lists) Outer index corresponds to layer
number.
biases (list of lists) Outer index corresponds to layer number.
sbt The SymbolicBoundTightener object
nlr The NetworkLeverReasoner object
"""
super().__init__()

Expand Down Expand Up @@ -83,46 +83,50 @@ def __init__ (self, filename, perform_sbt=False):

# Set the number of variables
self.numVars = self.numberOfVariables()
if perform_sbt:
self.sbt = self.createSBT(filename)
if use_nlr:
self.nlr = self.createNLR(filename)
else:
self.sbt = None
self.nlr = None

def createSBT(self, filename):
sbt = MarabouCore.SymbolicBoundTightener()
sbt.setNumberOfLayers(self.numLayers + 1)
def createNLR(self, filename):
nlr = MarabouCore.NetworkLevelReasoner()
nlr.setNumberOfLayers(self.numLayers + 1)
for layer, size in enumerate(self.layerSizes):
sbt.setLayerSize(layer, size)
sbt.allocateWeightAndBiasSpace()
nlr.setLayerSize(layer, size)
nlr.allocateMemoryByTopology()
# Biases
for layer in range(len(self.biases)):
for node in range(len(self.biases[layer])):
sbt.setBias(layer + 1, node, self.biases[layer][node])
nlr.setBias(layer + 1, node, self.biases[layer][node])
# Weights
for layer in range(len(self.weights)): # starting from the first hidden layer
for target in range(len(self.weights[layer])):
for source in range(len(self.weights[layer][target])):
sbt.setWeight(layer, source, target, self.weights[layer][target][source])

# Initial bounds
for i in range(self.inputSize):
sbt.setInputLowerBound( i, self.getInputMinimum(i))
sbt.setInputUpperBound( i, self.getInputMaximum(i))

# Variable indexing of hidden layers
nlr.setWeight(layer, source, target, self.weights[layer][target][source])

# Activation Functions
RELU = 0;
for layer in range(len(self.weights) - 1): # only hidden layers
for neuron in range(len(self.weights[layer])):
nlr.setNeuronActivationFunction(layer, neuron, RELU );

# Variable indexing
for layer in range(len(self.layerSizes))[1:-1]:
for node in range(self.layerSizes[layer]):
sbt.setReluBVariable(layer, node, self.nodeTo_b(layer, node))
sbt.setReluFVariable(layer, node, self.nodeTo_f(layer, node))
nlr.setWeightedSumVariable(layer, node, self.nodeTo_b(layer, node))
nlr.setActivationResultVariable(layer, node, self.nodeTo_f(layer, node))

for node in range(self.inputSize):
nlr.setActivationResultVariable(0, node, self.nodeTo_f(0, node))

for node in range(self.outputSize):
sbt.setReluFVariable(len(self.layerSizes) - 1, node, self.nodeTo_b(len(self.layerSizes) - 1, node))
nlr.setWeightedSumVariable(len(self.layerSizes) - 1, node, self.nodeTo_b(len(self.layerSizes) - 1, node))

return sbt
return nlr

def getMarabouQuery(self):
ipq = super(MarabouNetworkNNet, self).getMarabouQuery()
ipq.setSymbolicBoundTightener(self.sbt)
ipq.setNetworkLevelReasoner(self.nlr)
return ipq

"""
Expand Down Expand Up @@ -205,18 +209,18 @@ def variableRanges(self):
b_variables = []
f_variables = []
output_variables = []

input_variables = [i for i in range(self.layerSizes[0])]

hidden_layers = self.layerSizes[1:-1]

for layer, hidden_layer_length in enumerate(hidden_layers):
for i in range(hidden_layer_length):
offset = sum([x*2 for x in hidden_layers[:layer]])

b_variables.append(self.layerSizes[0] + offset + i)
f_variables.append(self.layerSizes[0] + offset + i+hidden_layer_length)

#final layer
for i in range(self.layerSizes[-1]):
offset = sum([x*2 for x in hidden_layers[:len(hidden_layers) - 1]])
Expand All @@ -241,10 +245,10 @@ def variableRanges(self):
def nodeTo_b(self, layer, node):
assert(0 < layer)
assert(node < self.layerSizes[layer])

offset = self.layerSizes[0]
offset += sum([x*2 for x in self.layerSizes[1:layer]])

return offset + node

"""
Expand All @@ -261,7 +265,7 @@ def nodeTo_b(self, layer, node):
def nodeTo_f(self, layer, node):
assert(layer < len(self.layerSizes))
assert(node < self.layerSizes[layer])

if layer == 0:
return node
else:
Expand Down Expand Up @@ -289,16 +293,16 @@ def buildEquations(self):

for node in range(size):
#add marabou equation

equations_aux.append([])
equations_aux[equations_count].append([self.nodeTo_b(layer, node), -1.0])
for previous_node in range(self.layerSizes[layer-1]):
equations_aux[equations_count].append([self.nodeTo_f(layer-1, previous_node), self.weights[layer-1][node][previous_node]])


equations_aux[equations_count].append(-self.biases[layer-1][node])
equations_count += 1

return equations_aux
"""
Identify all relus and their associated variable numbers.
Expand All @@ -314,7 +318,7 @@ def findRelus(self):
for layer, size in enumerate(hidden_layers):
for node in range(size):
relus.append([self.nodeTo_b(layer+1, node), self.nodeTo_f(layer+1, node)])

return relus

def numberOfVariables(self):
Expand Down
2 changes: 1 addition & 1 deletion src/configuration/GlobalConfiguration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ const double GlobalConfiguration::COST_FUNCTION_ERROR_THRESHOLD = 0.0000000001;
const bool GlobalConfiguration::USE_HARRIS_RATIO_TEST = true;

const bool GlobalConfiguration::USE_SYMBOLIC_BOUND_TIGHTENING = true;
const bool GlobalConfiguration::USE_LINEAR_CONCRETIZATION = true;
const double GlobalConfiguration::SYMBOLIC_TIGHTENING_ROUNDING_CONSTANT = 0.00000005;

const bool GlobalConfiguration::PREPROCESS_INPUT_QUERY = true;
Expand Down Expand Up @@ -92,6 +91,7 @@ const bool GlobalConfiguration::PROJECTED_STEEPEST_EDGE_LOGGING = false;
const bool GlobalConfiguration::GAUSSIAN_ELIMINATION_LOGGING = false;
const bool GlobalConfiguration::QUERY_LOADER_LOGGING = false;
const bool GlobalConfiguration::SYMBOLIC_BOUND_TIGHTENER_LOGGING = false;
const bool GlobalConfiguration::NETWORK_LEVEL_REASONER_LOGGING = false;

const bool GlobalConfiguration::USE_SMART_FIX = false;
const bool GlobalConfiguration::USE_LEAST_FIX = false;
Expand Down
5 changes: 1 addition & 4 deletions src/configuration/GlobalConfiguration.h
Original file line number Diff line number Diff line change
Expand Up @@ -164,10 +164,6 @@ class GlobalConfiguration
// Whether symbolic bound tightening should be used or not
static const bool USE_SYMBOLIC_BOUND_TIGHTENING;

// If symbolic bound tightening is used, should linear concretization (as
// opposed to constant concretization) be used.
static const bool USE_LINEAR_CONCRETIZATION;

// Symbolic tightening rounding constant
static const double SYMBOLIC_TIGHTENING_ROUNDING_CONSTANT;

Expand Down Expand Up @@ -212,6 +208,7 @@ class GlobalConfiguration
static const bool GAUSSIAN_ELIMINATION_LOGGING;
static const bool QUERY_LOADER_LOGGING;
static const bool SYMBOLIC_BOUND_TIGHTENER_LOGGING;
static const bool NETWORK_LEVEL_REASONER_LOGGING;
};

#endif // __GlobalConfiguration_h__
Expand Down
1 change: 0 additions & 1 deletion src/engine/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ engine_add_unit_test(ProjectedSteepestEdge)
engine_add_unit_test(ReluConstraint)
engine_add_unit_test(RowBoundTightener)
engine_add_unit_test(SmtCore)
engine_add_unit_test(SymbolicBoundTightener)
engine_add_unit_test(Tableau)

if (${BUILD_PYTHON})
Expand Down
77 changes: 22 additions & 55 deletions src/engine/Engine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@

Engine::Engine( unsigned verbosity )
: _rowBoundTightener( *_tableau )
, _symbolicBoundTightener( NULL )
, _smtCore( this )
, _numPlConstraintsDisabledByValidSplits( 0 )
, _preprocessingEnabled( false )
Expand Down Expand Up @@ -1046,8 +1045,8 @@ void Engine::initializeNetworkLevelReasoning()
{
_networkLevelReasoner = _preprocessedQuery.getNetworkLevelReasoner();

if ( _preprocessedQuery._sbt )
_symbolicBoundTightener = _preprocessedQuery._sbt;
if ( _networkLevelReasoner )
_networkLevelReasoner->setTableau( _tableau );
}

bool Engine::processInputQuery( InputQuery &inputQuery, bool preprocess )
Expand Down Expand Up @@ -1732,71 +1731,39 @@ List<unsigned> Engine::getInputVariables() const
void Engine::performSymbolicBoundTightening()
{
if ( ( !GlobalConfiguration::USE_SYMBOLIC_BOUND_TIGHTENING ) ||
( !_symbolicBoundTightener ) )
( !_networkLevelReasoner ) )
return;

struct timespec start = TimeUtils::sampleMicro();

unsigned numTightenedBounds = 0;

// Clear any previously stored information
_symbolicBoundTightener->clearReluStatuses();
// Step 1: tell the NLR about the current bounds
_networkLevelReasoner->obtainCurrentBounds();

// Step 1: tell the SBT about input bounds; maybe they were tightened
unsigned inputVariableIndex = 0;
for ( const auto &inputVariable : _preprocessedQuery.getInputVariables() )
{
// We assume the input variables are the first variables
if ( inputVariable != inputVariableIndex )
{
throw MarabouError( MarabouError::SYMBOLIC_BOUND_TIGHTENER_FAULTY_INPUT,
Stringf( "Sanity check failed, input variable %u with unexpected index %u", inputVariableIndex, inputVariable ).ascii() );
}
++inputVariableIndex;

double min = _tableau->getLowerBound( inputVariable );
double max = _tableau->getUpperBound( inputVariable );

_symbolicBoundTightener->setInputLowerBound( inputVariable, min );
_symbolicBoundTightener->setInputUpperBound( inputVariable, max );
}

// Step 2: tell the SBT about the state of the ReLU constraints
for ( const auto &constraint : _plConstraints )
{
if ( !constraint->supportsSymbolicBoundTightening() )
throw MarabouError( MarabouError::SYMBOLIC_BOUND_TIGHTENER_UNSUPPORTED_CONSTRAINT_TYPE );

ReluConstraint *relu = (ReluConstraint *)constraint;
unsigned b = relu->getB();
SymbolicBoundTightener::NodeIndex nodeIndex = _symbolicBoundTightener->nodeIndexFromB( b );
_symbolicBoundTightener->setReluStatus( nodeIndex._layer, nodeIndex._neuron, relu->getPhaseStatus() );
}
// Step 2: perform SBT
_networkLevelReasoner->symbolicBoundPropagation();

// Step 3: perfrom the bound tightening
_symbolicBoundTightener->run();
// Step 3: Extract the bounds
List<Tightening> tightenings;
_networkLevelReasoner->getConstraintTightenings( tightenings );

// Stpe 4: extract any tighter bounds that were discovered
for ( const auto &pair : _symbolicBoundTightener->getNodeIndexToFMapping() )
for ( const auto &tightening : tightenings )
{
unsigned layer = pair.first._layer;
unsigned neuron = pair.first._neuron;
unsigned var = pair.second;

double lb = _symbolicBoundTightener->getLowerBound( layer, neuron );
double ub = _symbolicBoundTightener->getUpperBound( layer, neuron );

double currentLb = _tableau->getLowerBound( var );
double currentUb = _tableau->getUpperBound( var );

_tableau->tightenLowerBound( var, lb );
_tableau->tightenUpperBound( var, ub );

if ( FloatUtils::lt( ub, currentUb ) )
if ( tightening._type == Tightening::LB &&
_tableau->getLowerBound( tightening._variable ) < tightening._value )
{
_tableau->tightenLowerBound( tightening._variable, tightening._value );
++numTightenedBounds;
}


if ( FloatUtils::gt( lb, currentLb ) )
if ( tightening._type == Tightening::UB &&
_tableau->getUpperBound( tightening._variable ) > tightening._value )
{
_tableau->tightenUpperBound( tightening._variable, tightening._value );
++numTightenedBounds;
}
}

struct timespec end = TimeUtils::sampleMicro();
Expand Down
5 changes: 0 additions & 5 deletions src/engine/Engine.h
Original file line number Diff line number Diff line change
Expand Up @@ -219,11 +219,6 @@ class Engine : public IEngine, public SignalHandler::Signalable
*/
AutoRowBoundTightener _rowBoundTightener;

/*
Symbolic bound tightnere.
*/
SymbolicBoundTightener *_symbolicBoundTightener;

/*
The SMT engine is in charge of case splitting.
*/
Expand Down
Loading