Skip to content

Commit

Permalink
Merge 81f13af into ac4eea2
Browse files Browse the repository at this point in the history
  • Loading branch information
changliuliu authored Apr 25, 2020
2 parents ac4eea2 + 81f13af commit 113009b
Show file tree
Hide file tree
Showing 10 changed files with 343 additions and 40 deletions.
3 changes: 2 additions & 1 deletion src/NeuralVerification.jl
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,11 @@ export BaB, Sherlock, Reluplex
include("satisfiability/planet.jl")
export Planet

include("adversarial/neurify.jl")
include("adversarial/reluVal.jl")
include("adversarial/fastLin.jl")
include("adversarial/fastLip.jl")
include("adversarial/dlv.jl")
export ReluVal, FastLin, FastLip, DLV
export ReluVal, Neurify, FastLin, FastLip, DLV

end
242 changes: 242 additions & 0 deletions src/adversarial/neurify.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,242 @@
"""
Neurify(max_iter::Int64, tree_search::Symbol)
Neurify combines symbolic reachability analysis with constraint refinement to minimize over-approximation of the reachable set.
# Problem requirement
1. Network: any depth, ReLU activation
2. Input: AbstractPolytope
3. Output: AbstractPolytope
# Return
`CounterExampleResult` or `ReachabilityResult`
# Method
Symbolic reachability analysis and iterative interval refinement (search).
- `max_iter` default `10`.
# Property
Sound but not complete.
# Reference
[S. Wang, K. Pei, J. Whitehouse, J. Yang, and S. Jana,
"Efficient Formal Safety Analysis of Neural Networks,"
*CoRR*, vol. abs/1809.08098, 2018. arXiv: 1809.08098.](https://arxiv.org/pdf/1809.08098.pdf)
[https://github.com/tcwangshiqi-columbia/Neurify](https://github.com/tcwangshiqi-columbia/Neurify)
"""

@with_kw struct Neurify
max_iter::Int64 = 1000
tree_search::Symbol = :DFS # only :DFS/:BFS allowed? If so, we should assert this.
end

struct SymbolicInterval{F<:AbstractPolytope}
Low::Matrix{Float64}
Up::Matrix{Float64}
interval::F
end

SymbolicInterval(x::Matrix{Float64}, y::Matrix{Float64}) = SymbolicInterval(x, y, Hyperrectangle([0],[0]))

# Data to be passed during forward_layer
struct SymbolicIntervalGradient
sym::SymbolicInterval
::Vector{Vector{Float64}}
::Vector{Vector{Float64}}
end

function solve(solver::Neurify, problem::Problem)
problem = Problem(problem.network, convert(HPolytope, problem.input), convert(HPolytope, problem.output))

reach = forward_network(solver, problem.network, problem.input)
result = check_inclusion(reach.sym, problem.output, problem.network) # This called the check_inclusion function in ReluVal, because the constraints are Hyperrectangle
result.status == :unknown || return result
reach_list = SymbolicIntervalGradient[reach]
for i in 2:solver.max_iter
length(reach_list) > 0 || return BasicResult(:holds)
reach = pick_out!(reach_list, solver.tree_search)
intervals = constraint_refinement(solver, problem.network, reach)
for interval in intervals
reach = forward_network(solver, problem.network, interval)
result = check_inclusion(reach.sym, problem.output, problem.network)
result.status == :violated && return result
result.status == :holds || (push!(reach_list, reach))
end
end
return BasicResult(:unknown)
end

function check_inclusion(reach::SymbolicInterval{HPolytope{N}}, output::AbstractPolytope, nnet::Network) where N
# The output constraint is in the form A*x < b
# We try to maximize output constraint to find a violated case, or to verify the inclusion,
# suppose the output is [1, 0, -1] * x < 2, Then we are maximizing reach.Up[1] * 1 + reach.Low[3] * (-1)

reach_lc = reach.interval.constraints
output_lc = output.constraints

n = size(reach_lc, 1)
m = size(reach_lc[1].a, 1)
model =Model(with_optimizer(GLPK.Optimizer))
@variable(model, x[1:m])
@constraint(model, [i in 1:n], reach_lc[i].a' * x <= reach_lc[i].b)

for i in 1:size(output.constraints, 1)
obj = zeros(size(reach.Low, 2))
for j in 1:size(reach.Low, 1)
if output.constraints[i].a[j] > 0
obj += output.constraints[i].a[j] * reach.Up[j,:]
else
obj += output.constraints[i].a[j] * reach.Low[j,:]
end
end
@objective(model, Min, obj' * [x; [1]])
optimize!(model)
# println("termination_status")
# println(termination_status(model))
y = compute_output(nnet, value(x))
if !(y, output)
if (value(x), reach.interval)
return CounterExampleResult(:violated, value(x))
end
end
if objective_value(model) > output.constraints[i].b
return BasicResult(:unknown)
end

end
return BasicResult(:holds)
end

function constraint_refinement(solver::Neurify, nnet::Network, reach::SymbolicIntervalGradient)
i, j, gradient = get_nodewise_gradient(nnet, reach.LΛ, reach.UΛ)
# We can generate three more constraints
# Symbolic representation of node i j is Low[i][j,:] and Up[i][j,:]
nnet_new = Network(nnet.layers[1:i])
reach_new = forward_network(solver, nnet_new, reach.sym.interval)
C, d = tosimplehrep(reach.sym.interval)
l_sym = reach_new.sym.Low[[j], 1:end-1]
l_off = reach_new.sym.Low[[j], end]
u_sym = reach_new.sym.Up[[j], 1:end-1]
u_off = reach_new.sym.Up[[j], end]
intervals = Vector{HPolytope{Float64}}(undef, 3)
intervals[1] = HPolytope([C; l_sym; u_sym], [d; -l_off; -u_off])
intervals[2] = HPolytope([C; l_sym; -u_sym], [d; -l_off; u_off])
intervals[3] = HPolytope([C; -l_sym; -u_sym], [d; l_off; u_off])
# intervals[4] = HPolytope([C; -l_sym; u_sym], [d; l_off; -u_off]) lower bound can not be greater than upper bound
return intervals
end

function get_nodewise_gradient(nnet::Network, LΛ::Vector{Vector{Float64}}, UΛ::Vector{Vector{Float64}})
n_output = size(nnet.layers[end].weights, 1)
n_length = length(nnet.layers)
LG = Matrix(1.0I, n_output, n_output)
UG = Matrix(1.0I, n_output, n_output)
max_tuple = (0, 0, 0.0)
for (k, layer) in enumerate(reverse(nnet.layers))
i = n_length - k + 1
for j in size(layer.bias)
if LΛ[i][j] (0.0, 1.0) && UΛ[i][j] (0.0, 1.0)
max_gradient = max(abs(LG[j]), abs(UG[j]))
if max_gradient > max_tuple[3]
max_tuple = (i, j, max_gradient)
end
end
end
i >= 1 || break
LG_hat = max.(LG, 0.0) * Diagonal(LΛ[i]) + min.(LG, 0.0) * Diagonal(UΛ[i])
UG_hat = min.(UG, 0.0) * Diagonal(LΛ[i]) + max.(UG, 0.0) * Diagonal(UΛ[i])
LG, UG = interval_map_right(layer.weights, LG_hat, UG_hat)
end
return max_tuple
end

function forward_layer(solver::Neurify, layer::Layer, input)
return forward_act(forward_linear(solver, input, layer), layer)
end

# Symbolic forward_linear for the first layer
function forward_linear(solver::Neurify, input::AbstractPolytope, layer::Layer)
(W, b) = (layer.weights, layer.bias)
sym = SymbolicInterval(hcat(W, b), hcat(W, b), input)
= Vector{Vector{Int64}}(undef, 0)
= Vector{Vector{Int64}}(undef, 0)
return SymbolicIntervalGradient(sym, LΛ, UΛ)
end

# Symbolic forward_linear
function forward_linear(solver::Neurify, input::SymbolicIntervalGradient, layer::Layer)
(W, b) = (layer.weights, layer.bias)
output_Up = max.(W, 0) * input.sym.Up + min.(W, 0) * input.sym.Low
output_Low = max.(W, 0) * input.sym.Low + min.(W, 0) * input.sym.Up
output_Up[:, end] += b
output_Low[:, end] += b
sym = SymbolicInterval(output_Low, output_Up, input.sym.interval)
return SymbolicIntervalGradient(sym, input.LΛ, input.UΛ)
end

# Symbolic forward_act
function forward_act(input::SymbolicIntervalGradient, layer::Layer{ReLU})
n_node, n_input = size(input.sym.Up)
output_Low, output_Up = input.sym.Low[:, :], input.sym.Up[:, :]
mask_lower, mask_upper = zeros(Float64, n_node), ones(Float64, n_node)
for i in 1:n_node
if upper_bound(input.sym.Up[i, :], input.sym.interval) <= 0.0
# Update to zero
mask_lower[i], mask_upper[i] = 0, 0
output_Up[i, :] = zeros(n_input)
output_Low[i, :] = zeros(n_input)
elseif lower_bound(input.sym.Low[i, :], input.sym.interval) >= 0
# Keep dependency
mask_lower[i], mask_upper[i] = 1, 1
else
# Symbolic linear relaxation
# This is different from ReluVal
up_up = upper_bound(input.sym.Up[i, :], input.sym.interval)
up_low = lower_bound(input.sym.Up[i, :], input.sym.interval)
low_up = upper_bound(input.sym.Low[i, :], input.sym.interval)
low_low = lower_bound(input.sym.Low[i, :], input.sym.interval)
up_slop = up_up / (up_up - up_low)
low_slop = low_up / (low_up - low_low)
output_Low[i, :] = low_slop * output_Low[i, :]
output_Up[i, end] -= up_low
output_Up[i, :] = up_slop * output_Up[i, :]
mask_lower[i], mask_upper[i] = low_slop, up_slop
end
end
sym = SymbolicInterval(output_Low, output_Up, input.sym.interval)
= push!(input.LΛ, mask_lower)
= push!(input.UΛ, mask_upper)
return SymbolicIntervalGradient(sym, LΛ, UΛ)
end

function forward_act(input::SymbolicIntervalGradient, layer::Layer{Id})
sym = input.sym
n_node = size(input.sym.Up, 1)
= push!(input.LΛ, ones(Float64, n_node))
= push!(input.UΛ, ones(Float64, n_node))
return SymbolicIntervalGradient(sym, LΛ, UΛ)
end


function upper_bound(map::Vector{Float64}, input::HPolytope)
n = size(input.constraints, 1)
m = size(input.constraints[1].a, 1)
model =Model(with_optimizer(GLPK.Optimizer))
@variable(model, x[1:m])
@constraint(model, [i in 1:n], input.constraints[i].a' * x <= input.constraints[i].b)
@objective(model, Max, map' * [x; [1]])
optimize!(model)
return objective_value(model)
end


function lower_bound(map::Vector{Float64}, input::HPolytope)
n = size(input.constraints, 1)
m = size(input.constraints[1].a, 1)
model =Model(with_optimizer(GLPK.Optimizer))
@variable(model, x[1:m])
@constraint(model, [i in 1:n], input.constraints[i].a' * x <= input.constraints[i].b)
@objective(model, Min, map' * [x; [1]])
optimize!(model)
return objective_value(model)
end
31 changes: 15 additions & 16 deletions src/adversarial/reluVal.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,10 @@ Sound but not complete.
[https://github.com/tcwangshiqi-columbia/ReluVal](https://github.com/tcwangshiqi-columbia/ReluVal)
"""
@with_kw struct ReluVal
max_iter::Int64 = 10
max_iter::Int64 = 1000
tree_search::Symbol = :DFS # only :DFS/:BFS allowed? If so, we should assert this.
end

struct SymbolicInterval
Low::Matrix{Float64}
Up::Matrix{Float64}
interval::Hyperrectangle
end

SymbolicInterval(x::Matrix{Float64}, y::Matrix{Float64}) = SymbolicInterval(x, y, Hyperrectangle([0],[0]))

# Data to be passed during forward_layer
struct SymbolicIntervalMask
sym::SymbolicInterval
Expand All @@ -54,9 +46,7 @@ function solve(solver::ReluVal, problem::Problem)
for i in 2:solver.max_iter
length(reach_list) > 0 || return BasicResult(:holds)
reach = pick_out!(reach_list, solver.tree_search)
LG, UG = get_gradient(problem.network, reach.LΛ, reach.UΛ)
feature = get_smear_index(problem.network, reach.sym.interval, LG, UG)
intervals = split_interval(reach.sym.interval, feature)
intervals = interval_refinement(problem.network, reach)
for interval in intervals
reach = forward_network(solver, problem.network, interval)
result = check_inclusion(reach.sym, problem.output, problem.network)
Expand All @@ -67,6 +57,12 @@ function solve(solver::ReluVal, problem::Problem)
return BasicResult(:unknown)
end

function interval_refinement(nnet::Network, reach::SymbolicIntervalMask)
LG, UG = get_gradient(nnet, reach.LΛ, reach.UΛ)
feature, monotone = get_smear_index(nnet, reach.sym.interval, LG, UG) #monotonicity not used in this implementation.
return split_interval(reach.sym.interval, feature)
end

function pick_out!(reach_list, tree_search)
if tree_search == :BFS
reach = reach_list[1]
Expand All @@ -79,7 +75,7 @@ function pick_out!(reach_list, tree_search)
return reach
end

function symbol_to_concrete(reach::SymbolicInterval)
function symbol_to_concrete(reach::SymbolicInterval{Hyperrectangle{N}}) where N
n_output = size(reach.Low, 1)
upper = zeros(n_output)
lower = zeros(n_output)
Expand All @@ -90,8 +86,10 @@ function symbol_to_concrete(reach::SymbolicInterval)
return Hyperrectangle(low = lower, high = upper)
end

function check_inclusion(reach::SymbolicInterval, output::AbstractPolytope, nnet::Network)
function check_inclusion(reach::SymbolicInterval{Hyperrectangle{N}}, output::AbstractPolytope, nnet::Network) where N
reachable = symbol_to_concrete(reach)
# println("reluval reachable")
# println(reachable)
issubset(reachable, output) && return BasicResult(:holds)
# is_intersection_empty(reachable, output) && return BasicResult(:violated)

Expand All @@ -103,7 +101,7 @@ function check_inclusion(reach::SymbolicInterval, output::AbstractPolytope, nnet
return BasicResult(:unknown)
end

function forward_layer(solver::ReluVal, layer::Layer, input::Union{SymbolicIntervalMask, Hyperrectangle})
function forward_layer(solver::ReluVal, layer::Layer, input)
return forward_act(forward_linear(input, layer), layer)
end

Expand Down Expand Up @@ -178,7 +176,8 @@ function get_smear_index(nnet::Network, input::Hyperrectangle, LG::Matrix, UG::M
feature = i
end
end
return feature
monotone = all((LG[:, feature] .* UG[:, feature]) .> 0)
return feature, monotone
end

# Get upper bound in concretization
Expand Down
8 changes: 7 additions & 1 deletion src/utils/util.jl
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ end
Simple linear mapping on intervals
Inputs:
- `W::Matrix{N}`: linear mapping
- `W::Matrix{N}`: linear mapping (left)
- `l::Vector{N}`: lower bound
- `u::Vector{N}`: upper bound
Outputs:
Expand All @@ -259,6 +259,12 @@ function interval_map(W::Matrix{N}, l::AbstractVecOrMat, u::AbstractVecOrMat) wh
return (l_new, u_new)
end

function interval_map_right(W::Matrix{N}, l::AbstractVecOrMat, u::AbstractVecOrMat) where N
l_new = l * max.(W, zero(N)) + u * min.(W, zero(N))
u_new = u * max.(W, zero(N)) + l * min.(W, zero(N))
return (l_new, u_new)
end

"""
get_bounds(problem::Problem)
get_bounds(nnet::Network, input::Hyperrectangle)
Expand Down
15 changes: 15 additions & 0 deletions test/complements.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,18 @@
using NeuralVerification, LazySets, GLPKMathProgInterface
using Test

import NeuralVerification: ReLU, Id

macro no_error(ex)
quote
try $(esc(ex))
true
catch e
@error(e)
false
end
end
end

@testset "PolytopeComplements" begin

Expand Down
Loading

0 comments on commit 113009b

Please sign in to comment.