Skip to content

Commit

Permalink
Merge pull request #8 from NeuralNetworkVerification/master
Browse files Browse the repository at this point in the history
Merge from master
  • Loading branch information
guykatzz authored Apr 26, 2020
2 parents fe44808 + 98d7e9d commit f08849c
Show file tree
Hide file tree
Showing 8 changed files with 392 additions and 44 deletions.
4 changes: 4 additions & 0 deletions src/engine/AbsoluteValueConstraint.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,10 @@ class AbsoluteValueConstraint : public PiecewiseLinearConstraint
PHASE_NEGATIVE = 2,
};

/*
The f variable is the absolute value of the b variable:
f = | b |
*/
AbsoluteValueConstraint( unsigned b, unsigned f );

/*
Expand Down
1 change: 1 addition & 0 deletions src/engine/MarabouError.h
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ class MarabouError : public Error
MERGED_OUTPUT_VARIABLE = 21,
INVALID_WEIGHTED_SUM_INDEX = 22,
UNSUCCESSFUL_QUEUE_PUSH = 23,
NETWORK_LEVEL_REASONER_ACTIVATION_NOT_SUPPORTED = 24,

// Error codes for Query Loader
FILE_DOES_NOT_EXIST = 100,
Expand Down
6 changes: 5 additions & 1 deletion src/engine/MaxConstraint.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ class MaxConstraint : public PiecewiseLinearConstraint
public:
~MaxConstraint();

/*
The f variable is the output of max over the variables stored in elements:
f = max( elements )
*/
MaxConstraint( unsigned f, const Set<unsigned> &elements );
MaxConstraint( const String &serializedMax );

Expand Down Expand Up @@ -125,6 +129,7 @@ class MaxConstraint : public PiecewiseLinearConstraint
private:
unsigned _f;
Set<unsigned> _elements;

unsigned _maxIndex;
bool _maxIndexSet;
double _maxLowerBound;
Expand All @@ -136,7 +141,6 @@ class MaxConstraint : public PiecewiseLinearConstraint
Returns the phase where variable argMax has maximum value.
*/
PiecewiseLinearCaseSplit getSplit( unsigned argMax ) const;

};

#endif // __MaxConstraint_h__
Expand Down
197 changes: 193 additions & 4 deletions src/engine/NetworkLevelReasoner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,22 @@
**/

#include "Debug.h"
#include "FloatUtils.h"
#include "MStringf.h"
#include "NetworkLevelReasoner.h"
#include "MarabouError.h"
#include "NetworkLevelReasoner.h"
#include <cstring>

NetworkLevelReasoner::NetworkLevelReasoner()
: _weights( NULL )
, _maxLayerSize( 0 )
, _work1( NULL )
, _work2( NULL )
, _tableau( NULL )
, _lowerBoundsWeightedSums( NULL )
, _upperBoundsWeightedSums( NULL )
, _lowerBoundsActivations( NULL )
, _upperBoundsActivations( NULL )
{
}

Expand Down Expand Up @@ -60,6 +66,66 @@ void NetworkLevelReasoner::freeMemoryIfNeeded()
delete[] _work2;
_work2 = NULL;
}

if ( _lowerBoundsWeightedSums )
{
for ( unsigned i = 0; i < _numberOfLayers; ++i )
{
if ( _lowerBoundsWeightedSums[i] )
{
delete[] _lowerBoundsWeightedSums[i];
_lowerBoundsWeightedSums[i] = NULL;
}
}

delete[] _lowerBoundsWeightedSums;
_lowerBoundsWeightedSums = NULL;
}

if ( _upperBoundsWeightedSums )
{
for ( unsigned i = 0; i < _numberOfLayers; ++i )
{
if ( _upperBoundsWeightedSums[i] )
{
delete[] _upperBoundsWeightedSums[i];
_upperBoundsWeightedSums[i] = NULL;
}
}

delete[] _upperBoundsWeightedSums;
_upperBoundsWeightedSums = NULL;
}

if ( _lowerBoundsActivations )
{
for ( unsigned i = 0; i < _numberOfLayers; ++i )
{
if ( _lowerBoundsActivations[i] )
{
delete[] _lowerBoundsActivations[i];
_lowerBoundsActivations[i] = NULL;
}
}

delete[] _lowerBoundsActivations;
_lowerBoundsActivations = NULL;
}

if ( _upperBoundsActivations )
{
for ( unsigned i = 0; i < _numberOfLayers; ++i )
{
if ( _upperBoundsActivations[i] )
{
delete[] _upperBoundsActivations[i];
_upperBoundsActivations[i] = NULL;
}
}

delete[] _upperBoundsActivations;
_upperBoundsActivations = NULL;
}
}

void NetworkLevelReasoner::setNumberOfLayers( unsigned numberOfLayers )
Expand All @@ -76,7 +142,7 @@ void NetworkLevelReasoner::setLayerSize( unsigned layer, unsigned size )
_maxLayerSize = size;
}

void NetworkLevelReasoner::allocateWeightMatrices()
void NetworkLevelReasoner::allocateMemoryByTopology()
{
freeMemoryIfNeeded();

Expand All @@ -99,6 +165,27 @@ void NetworkLevelReasoner::allocateWeightMatrices()
_work2 = new double[_maxLayerSize];
if ( !_work2 )
throw MarabouError( MarabouError::ALLOCATION_FAILED, "NetworkLevelReasoner::work2" );

_lowerBoundsWeightedSums = new double *[_numberOfLayers];
_upperBoundsWeightedSums = new double *[_numberOfLayers];
_lowerBoundsActivations = new double *[_numberOfLayers];
_upperBoundsActivations = new double *[_numberOfLayers];

if ( !_lowerBoundsWeightedSums || !_upperBoundsWeightedSums ||
!_lowerBoundsActivations || !_upperBoundsActivations )
throw MarabouError( MarabouError::ALLOCATION_FAILED, "NetworkLevelReasoner::bounds" );

for ( unsigned i = 0; i < _numberOfLayers; ++i )
{
_lowerBoundsWeightedSums[i] = new double[_layerSizes[i]];
_upperBoundsWeightedSums[i] = new double[_layerSizes[i]];
_lowerBoundsActivations[i] = new double[_layerSizes[i]];
_upperBoundsActivations[i] = new double[_layerSizes[i]];

if ( !_lowerBoundsWeightedSums[i] || !_lowerBoundsWeightedSums[i] ||
!_lowerBoundsActivations[i] || !_lowerBoundsActivations[i] )
throw MarabouError( MarabouError::ALLOCATION_FAILED, "NetworkLevelReasoner::bounds[i]" );
}
}

void NetworkLevelReasoner::setNeuronActivationFunction( unsigned layer, unsigned neuron, ActivationFunction activationFuction )
Expand Down Expand Up @@ -155,7 +242,7 @@ void NetworkLevelReasoner::evaluate( double *input, double *output )
break;

default:
ASSERT( false );
throw MarabouError( MarabouError::NETWORK_LEVEL_REASONER_ACTIVATION_NOT_SUPPORTED );
break;
}
}
Expand Down Expand Up @@ -206,7 +293,7 @@ void NetworkLevelReasoner::storeIntoOther( NetworkLevelReasoner &other ) const
other.setNumberOfLayers( _numberOfLayers );
for ( const auto &pair : _layerSizes )
other.setLayerSize( pair.first, pair.second );
other.allocateWeightMatrices();
other.allocateMemoryByTopology();

for ( const auto &pair : _neuronToActivationFunction )
other.setNeuronActivationFunction( pair.first._layer, pair.first._neuron, pair.second );
Expand Down Expand Up @@ -330,6 +417,108 @@ void NetworkLevelReasoner::updateVariableIndices( const Map<unsigned, unsigned>
}
}

void NetworkLevelReasoner::obtainInputBounds()
{
ASSERT( _tableau );

for ( unsigned i = 0; i < _layerSizes[0]; ++i )
{
unsigned varIndex = _indexToActivationResultVariable[Index( 0, i )];
_lowerBoundsActivations[0][i] = _tableau->getLowerBound( varIndex );
_upperBoundsActivations[0][i] = _tableau->getUpperBound( varIndex );
}
}

void NetworkLevelReasoner::setTableau( const ITableau *tableau )
{
_tableau = tableau;
}

void NetworkLevelReasoner::intervalArithmeticBoundPropagation()
{
for ( unsigned i = 1; i < _numberOfLayers; ++i )
{
for ( unsigned j = 0; j < _layerSizes[i]; ++j )
{
Index index( i, j );
double lb = _bias.exists( index ) ? _bias[index] : 0;
double ub = _bias.exists( index ) ? _bias[index] : 0;

for ( unsigned k = 0; k < _layerSizes[i - 1]; ++k )
{
double previousLb = _lowerBoundsActivations[i - 1][k];
double previousUb = _upperBoundsActivations[i - 1][k];
double weight = _weights[i - 1][k * _layerSizes[i] + j];

if ( weight > 0 )
{
lb += weight * previousLb;
ub += weight * previousUb;
}
else
{
lb += weight * previousUb;
ub += weight * previousLb;
}
}

_lowerBoundsWeightedSums[i][j] = lb;
_upperBoundsWeightedSums[i][j] = ub;

if ( i != _numberOfLayers - 1 )
{
// Apply activation function
ASSERT( _neuronToActivationFunction.exists( index ) );

switch ( _neuronToActivationFunction[index] )
{
case ReLU:
_lowerBoundsActivations[i][j] = lb > 0 ? lb : 0;
_upperBoundsActivations[i][j] = ub;
break;

default:
ASSERT( false );
break;
}
}
}
}
}

void NetworkLevelReasoner::getConstraintTightenings( List<Tightening> &tightenings ) const
{
tightenings.clear();

for ( unsigned i = 1; i < _numberOfLayers; ++i )
{
for ( unsigned j = 0; j < _layerSizes[i]; ++j )
{
Index index( i, j );

// Weighted sums
tightenings.append( Tightening( _indexToWeightedSumVariable[index],
_lowerBoundsWeightedSums[i][j],
Tightening::LB ) );

tightenings.append( Tightening( _indexToWeightedSumVariable[index],
_upperBoundsWeightedSums[i][j],
Tightening::UB ) );

// Activation results
if ( i != _numberOfLayers - 1 )
{
tightenings.append( Tightening( _indexToActivationResultVariable[index],
_lowerBoundsActivations[i][j],
Tightening::LB ) );
tightenings.append( Tightening( _indexToActivationResultVariable[index],
_upperBoundsActivations[i][j],
Tightening::UB ) );
}
}
}
}

//
// Local Variables:
// compile-command: "make -C ../.. "
Expand Down
36 changes: 35 additions & 1 deletion src/engine/NetworkLevelReasoner.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
#ifndef __NetworkLevelReasoner_h__
#define __NetworkLevelReasoner_h__

#include "ITableau.h"
#include "Map.h"
#include "Tightening.h"

/*
A class for performing operations that require knowledge of network
Expand Down Expand Up @@ -68,11 +70,17 @@ class NetworkLevelReasoner

void setNumberOfLayers( unsigned numberOfLayers );
void setLayerSize( unsigned layer, unsigned size );
void allocateWeightMatrices();
void setNeuronActivationFunction( unsigned layer, unsigned neuron, ActivationFunction activationFuction );
void setWeight( unsigned sourceLayer, unsigned sourceNeuron, unsigned targetNeuron, double weight );
void setBias( unsigned layer, unsigned neuron, double bias );

/*
A method that allocates all internal memory structures, based on
the network's topology. Should be invoked after the layer sizes
have been provided.
*/
void allocateMemoryByTopology();

/*
Mapping from node indices to the variables representing their
weighted sum values and activation result values.
Expand Down Expand Up @@ -108,6 +116,22 @@ class NetworkLevelReasoner
void updateVariableIndices( const Map<unsigned, unsigned> &oldIndexToNewIndex,
const Map<unsigned, unsigned> &mergedVariables );

/*
Bound propagation methods:
- obtainInputBounds: obtain the current bounds on input variables
from the tableau.
- Interval arithmetic: compute the bounds of a layer's neurons
based on the concrete bounds of the previous layer.
*/

void setTableau( const ITableau *tableau );
void obtainInputBounds();
void intervalArithmeticBoundPropagation();

void getConstraintTightenings( List<Tightening> &tightenings ) const;

private:
unsigned _numberOfLayers;
Map<unsigned, unsigned> _layerSizes;
Expand All @@ -120,6 +144,8 @@ class NetworkLevelReasoner
double *_work1;
double *_work2;

const ITableau *_tableau;

void freeMemoryIfNeeded();

/*
Expand All @@ -133,6 +159,14 @@ class NetworkLevelReasoner
*/
Map<Index, double> _indexToWeightedSumAssignment;
Map<Index, double> _indexToActivationResultAssignment;

/*
Work space for bound tightening.
*/
double **_lowerBoundsWeightedSums;
double **_upperBoundsWeightedSums;
double **_lowerBoundsActivations;
double **_upperBoundsActivations;
};

#endif // __NetworkLevelReasoner_h__
Expand Down
4 changes: 4 additions & 0 deletions src/engine/ReluConstraint.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,10 @@ class ReluConstraint : public PiecewiseLinearConstraint
PHASE_INACTIVE = 2,
};

/*
The f variable is the relu output on the b variable:
f = relu( b )
*/
ReluConstraint( unsigned b, unsigned f );
ReluConstraint( const String &serializedRelu );

Expand Down
Loading

0 comments on commit f08849c

Please sign in to comment.