Skip to content

Commit

Permalink
Merge 1c03c27 into 29e25ab
Browse files Browse the repository at this point in the history
  • Loading branch information
castrong committed Jul 1, 2020
2 parents 29e25ab + 1c03c27 commit 5112d05
Show file tree
Hide file tree
Showing 4 changed files with 105 additions and 2 deletions.
1 change: 1 addition & 0 deletions src/NeuralVerification.jl
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ export
AdversarialResult,
ReachabilityResult,
read_nnet,
write_nnet,
solve,
forward_network,
check_inclusion
Expand Down
72 changes: 71 additions & 1 deletion src/utils/util.jl
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,76 @@ function read_layer(output_dim::Int64, f::IOStream, act = ReLU())
return Layer(weights, bias, act)
end

"""
Prepend `//` to each line of a string.
"""
to_comment(txt) = "//"*replace(txt, "\n"=>"\n//")

"""
print_layer(file::IOStream, layer)
Print to `file` an object implementing `weights(layer)` and `bias(layer)`
"""
function print_layer(file::IOStream, layer)
print_row(W, i) = println(file, join(W[i,:], ", "), ",")
W = layer.weights
b = layer.bias
[print_row(W, row) for row in axes(W, 1)]
[println(file, b[row], ",") for row in axes(W, 1)]
end

"""
print_header(file::IOStream, network[; header_text])
The NNet format has a particular header containing information about the network size and training data.
`print_header` does not take training-related information into account (subject to change).
"""
function print_header(file::IOStream, network; header_text="")
println(file, to_comment(header_text))
layer_sizes = [size(layer.weights, 1) for layer in network.layers] # doesn't include the output layer
pushfirst!(layer_sizes, size(network.layers[end].weights, 1)) # add the output layer

# num layers, num inputs, num outputs, max layer size
num_layers = length(network.layers)
num_inputs = layer_sizes[1]
num_outputs = layer_sizes[end]
max_layer = maximum(layer_sizes[1:end-1]) # chop off the output layer for the maximum,
println(file, join([num_layers, num_inputs, num_outputs, max_layer], ", "), ",")
#layer sizes input, ..., output
println(file, join(layer_sizes, ", "), ",")
# empty
println(file, "This line extraneous")
# minimum vals of inputs
println(file, string(join(fill(-floatmax(Float16), num_inputs), ","), ","))
# maximum vals of inputs
println(file, string(join(fill(floatmax(Float16), num_inputs), ","), ","))
# mean vals of inputs + 1 for output
println(file, string(join(fill(0.0, num_inputs+1), ","), ","))
# range vals of inputs + 1 for output
println(file, string(join(fill(1.0, num_inputs+1), ","), ","))
return nothing
end

"""
write_nnet(filename, network[; header_text])
Write `network` to \$filename.nnet.
Note: Does not perform safety checks on inputs, so use with caution.
Based on python code at https://github.com/sisl/NNet/blob/master/utils/writeNNet.py
and follows .nnet format given here: https://github.com/sisl/NNet.
"""
function write_nnet(outfile, network; header_text="Default header text.\nShould replace with the real deal.")
name, ext = splitext(outfile)
outfile = name*".nnet"
open(outfile, "w") do f
print_header(f, network, header_text=header_text)
for layer in network.layers
print_layer(f, layer)
end
end
nothing
end
"""
compute_output(nnet::Network, input::Vector{Float64})
Expand Down Expand Up @@ -350,4 +420,4 @@ function split_interval(dom::Hyperrectangle, i::Int64)
input_upper[i] = dom.center[i] + dom.radius[i]
input_split_right = Hyperrectangle(low = input_lower, high = input_upper)
return (input_split_left, input_split_right)
end
end
3 changes: 2 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,5 @@ include("inactive_relus.jl")
if Base.find_package("Flux") != nothing
include("flux.jl")
end
include("complements.jl")
include("complements.jl")
include("write_nnet_test.jl")
31 changes: 31 additions & 0 deletions test/write_nnet_test.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Test by creating a network, writing it, then reading it back in and make sure that all weights and biases match
@testset "Read nnet test" begin

# 3 --> 3 --> 2 --> 5
l1 = Layer([3.0 2.0 1.0; 5.0 6.0 7.0; 8.0 9.0 10.0], [0.8; 1.0; 1.2], ReLU())
l2 = Layer([1.5 2.5 3.5; 4.5 6.5 7.5], [-1.0; -3.0], ReLU())
# The .nnet file doesn't store an activation at each layer,
l3 = Layer([10.0 -1.0; -2.0 3.0; 4.0 5.0; 10.0 7.0; -3.5 -4.5], [0.0; -1.0; 0.0; 10.0; -10.0], Id())

# Write out the network
network_file = string(tempname(), ".nnet")
println("File: ", network_file)
nnet = Network([l1, l2, l3])
write_nnet(network_file, nnet; header_text="test_header")

# Read back in the network
new_nnet = NeuralVerification.read_nnet(network_file)

# Test that all weights, biases, and activations are the same
@test new_nnet.layers[1].weights == l1.weights;
@test new_nnet.layers[1].bias == l1.bias;
@test new_nnet.layers[1].activation == l1.activation;

@test new_nnet.layers[2].weights == l2.weights;
@test new_nnet.layers[2].bias == l2.bias;
@test new_nnet.layers[2].activation == l2.activation;

@test new_nnet.layers[3].weights == l3.weights;
@test new_nnet.layers[3].bias == l3.bias;
@test new_nnet.layers[3].activation == l3.activation;
end

0 comments on commit 5112d05

Please sign in to comment.