Skip to content

Commit

Permalink
bug fix. differentiate approximate_affine_map for hyperrectangles. Up…
Browse files Browse the repository at this point in the history
…date Manifest
  • Loading branch information
tomerarnon committed Feb 28, 2019
1 parent 0bfe9b3 commit b99f427
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 11 deletions.
6 changes: 3 additions & 3 deletions Manifest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ version = "0.18.5"

[[LazySets]]
deps = ["Compat", "Distributed", "Expokit", "GLPKMathProgInterface", "IntervalArithmetic", "LinearAlgebra", "MathProgBase", "Pkg", "Random", "RecipesBase", "Requires", "SharedArrays", "SparseArrays"]
git-tree-sha1 = "c00ba943cf40b8d0b201f7ec824ed5143990eb7a"
git-tree-sha1 = "ad0dfae4f073c6ffd7db129d8c449603184e4207"
repo-rev = "master"
repo-url = "https://github.com/JuliaReach/LazySets.jl.git"
uuid = "b4f0291d-fe17-52bc-9479-3d1a343d9043"
Expand Down Expand Up @@ -368,9 +368,9 @@ version = "0.7.2"

[[StaticArrays]]
deps = ["InteractiveUtils", "LinearAlgebra", "Random", "Statistics", "Test"]
git-tree-sha1 = "1eb114d6e23a817cd3e99abc3226190876d7c898"
git-tree-sha1 = "3841b39ed5f047db1162627bf5f80a9cd3e39ae2"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "0.10.2"
version = "0.10.3"

[[Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
Expand Down
2 changes: 1 addition & 1 deletion src/optimization/duality.jl
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ function activation_value(layer::Layer,
λᵢ::Vector{Variable},
bound::Hyperrectangle)
o = zero(eltype(μᵢ))
b_hat = affine_map(layer, bound)
b_hat = approximate_affine_map(layer, bound)
l_hat, u_hat = low(b_hat), high(b_hat)
l, u = layer.activation(l_hat), layer.activation(u_hat)

Expand Down
4 changes: 2 additions & 2 deletions src/optimization/utils/constraints.jl
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ function encode_layer!(::TriangularRelaxedLP,
bounds::Hyperrectangle)

= layer.weights * z_current + layer.bias
ẑ_bound = affine_map(layer, bounds)
ẑ_bound = approximate_affine_map(layer, bounds)
l̂, û = low(ẑ_bound), high(ẑ_bound)
for j in 1:length(layer.bias)
if l̂[j] > 0.0
Expand Down Expand Up @@ -199,7 +199,7 @@ function encode_layer!(::BoundedMixedIntegerLP,
bounds::Hyperrectangle)

= layer.weights * z_current + layer.bias
ẑ_bound = affine_map(layer, bounds)
ẑ_bound = approximate_affine_map(layer, bounds)
l̂, û = low(ẑ_bound), high(ẑ_bound)

for j in 1:length(layer.bias) # For evey node
Expand Down
2 changes: 1 addition & 1 deletion src/satisfiability/planet.jl
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ function init_ψ(nnet::Network, bounds::Vector{Hyperrectangle})
return ψ
end
function set_activation_pattern!::Vector{Vector{Int64}}, L::Layer{ReLU}, bound::Hyperrectangle, index::Int64)
before_act_bound = affine_map(L, bound)
before_act_bound = approximate_affine_map(L, bound)
lower = low(before_act_bound)
upper = high(before_act_bound)
for j in 1:length(lower)
Expand Down
14 changes: 10 additions & 4 deletions src/utils/util.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ function read_nnet(fname::String; last_layer_activation = Id())
# number of layers
nlayers = parse(Int64, split(line, ",")[1])
# read in layer sizes
layer_sizes = parse.(Int64, split(readline(f), ",")[1:nlayers])
layer_sizes = parse.(Int64, split(readline(f), ",")[1:nlayers+1])
# read past additonal information
for i in 1:5
line = readline(f)
Expand Down Expand Up @@ -119,7 +119,7 @@ function get_activation(nnet::Network, bounds::Vector{Hyperrectangle})
end

function get_activation(L::Layer{ReLU}, bounds::Hyperrectangle)
before_act_bound = affine_map(L, bounds)
before_act_bound = approximate_affine_map(L, bounds)
lower = low(before_act_bound)
upper = high(before_act_bound)
act_pattern = zeros(n_nodes(L))
Expand Down Expand Up @@ -186,7 +186,7 @@ function act_gradient_bounds(nnet::Network, input::AbstractPolytope)
= Vector{Matrix}(undef, 0)
= Vector{Matrix}(undef, 0)
for (i, layer) in enumerate(nnet.layers)
before_act_bound = affine_map(layer, bounds[i])
before_act_bound = approximate_affine_map(layer, bounds[i])
lower = low(before_act_bound)
upper = high(before_act_bound)
l = act_gradient(layer.activation, lower)
Expand Down Expand Up @@ -308,7 +308,13 @@ function affine_map(layer::Layer, input::AbstractPolytope)
W, b = layer.weights, layer.bias
return translate(b, linear_map(W, input))
end
function affine_map(layer::Layer, input::Hyperrectangle)

"""
approximate_affine_map(layer, input::Hyperrectangle)
Returns a Hyperrectangle overapproximation of the affine map of the input.
"""
function approximate_affine_map(layer::Layer, input::Hyperrectangle)
c = affine_map(layer, input.center)
r = abs.(layer.weights) * input.radius
return Hyperrectangle(c, r)
Expand Down

0 comments on commit b99f427

Please sign in to comment.