From 6d4283c99d31d2594a3a669f78653e033f8f8a93 Mon Sep 17 00:00:00 2001 From: castrong Date: Mon, 29 Jun 2020 18:42:42 -0700 Subject: [PATCH 1/2] added testing_utils.jl file, added make_random_network function --- src/utils/testing_utils.jl | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 src/utils/testing_utils.jl diff --git a/src/utils/testing_utils.jl b/src/utils/testing_utils.jl new file mode 100644 index 00000000..660fe2bf --- /dev/null +++ b/src/utils/testing_utils.jl @@ -0,0 +1,34 @@ +using Random +""" + make_random_network(layer_sizes::Vector{Int, 1}, [min_weight = -1.0], [max_weight = 1.0], [min_bias = -1.0], [max_bias = 1.0], [rng = 1.0]) + read_layer(output_dim::Int, f::IOStream, [act = ReLU()]) + +Generate a network with random weights and bias. The first layer is treated as the input. +The values for the weights and bias will be uniformly drawn from the range between min_weight +and max_weight and min_bias and max_bias respectively. The last layer will have an ID() +activation function and the rest will have ReLU() activation functions. Allow a random number +generator(rng) to be passed in. This allows for seeded random network generation. +""" +function make_random_network(layer_sizes::Vector{Int, 1}, min_weight = -1.0, max_weight = 1.0, min_bias = -1.0, max_bias = 1.0, rng=MersenneTwister(0)) + # Create each layer based on the layer_size + layers = [] + for index in 1:(length(layer_sizes)-1) + # Use Id activation for the last layer - otherwise use ReLU activation + if index == length(layer_sizes) + cur_activation = NeuralVerification.Id() + else + cur_activation = NeuralVerification.ReLU() + end + + # Dimension: num_out x num_in + cur_weights = min_weight + (max_weight - min_weight) * rand(rng, Float64, (layer_sizes(index+1), layer_sizes(index))) + cur_weights = reshape(cur_weights, (layer_sizes(index+1), layer_sizes(index))) # for edge case where 1 dimension is equal to 1 this keeps it from being a 1-d vector + + # Dimension: num_out x 1 + cur_bias = min_weight + (max_weight - min_weight) * rand(rng, Float64, (layer_sizes(index+1))) + push!(layers, Layer(cur_weights, cur_bias, cur_activation)) + end + + return Network(layers) + +end From bacf128749e6e278d55f8f51964205d1fbb07bdb Mon Sep 17 00:00:00 2001 From: castrong Date: Mon, 29 Jun 2020 18:52:53 -0700 Subject: [PATCH 2/2] fixed bugs --- src/utils/testing_utils.jl | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/utils/testing_utils.jl b/src/utils/testing_utils.jl index 660fe2bf..44ca1d94 100644 --- a/src/utils/testing_utils.jl +++ b/src/utils/testing_utils.jl @@ -1,6 +1,8 @@ using Random +using NeuralVerification; + """ - make_random_network(layer_sizes::Vector{Int, 1}, [min_weight = -1.0], [max_weight = 1.0], [min_bias = -1.0], [max_bias = 1.0], [rng = 1.0]) + make_random_network(layer_sizes::Vector{Int}, [min_weight = -1.0], [max_weight = 1.0], [min_bias = -1.0], [max_bias = 1.0], [rng = 1.0]) read_layer(output_dim::Int, f::IOStream, [act = ReLU()]) Generate a network with random weights and bias. The first layer is treated as the input. @@ -9,26 +11,27 @@ and max_weight and min_bias and max_bias respectively. The last layer will have activation function and the rest will have ReLU() activation functions. Allow a random number generator(rng) to be passed in. This allows for seeded random network generation. """ -function make_random_network(layer_sizes::Vector{Int, 1}, min_weight = -1.0, max_weight = 1.0, min_bias = -1.0, max_bias = 1.0, rng=MersenneTwister(0)) +function make_random_network(layer_sizes::Vector{Int}, min_weight = -1.0, max_weight = 1.0, min_bias = -1.0, max_bias = 1.0, rng=MersenneTwister(0)) # Create each layer based on the layer_size layers = [] for index in 1:(length(layer_sizes)-1) + cur_size = layer_sizes[index] + next_size = layer_sizes[index+1] # Use Id activation for the last layer - otherwise use ReLU activation - if index == length(layer_sizes) + if index == (length(layer_sizes)-1) cur_activation = NeuralVerification.Id() else cur_activation = NeuralVerification.ReLU() end # Dimension: num_out x num_in - cur_weights = min_weight + (max_weight - min_weight) * rand(rng, Float64, (layer_sizes(index+1), layer_sizes(index))) - cur_weights = reshape(cur_weights, (layer_sizes(index+1), layer_sizes(index))) # for edge case where 1 dimension is equal to 1 this keeps it from being a 1-d vector + cur_weights = min_weight .+ (max_weight - min_weight) * rand(rng, Float64, (next_size, cur_size)) + cur_weights = reshape(cur_weights, (next_size, cur_size)) # for edge case where 1 dimension is equal to 1 this keeps it from being a 1-d vector # Dimension: num_out x 1 - cur_bias = min_weight + (max_weight - min_weight) * rand(rng, Float64, (layer_sizes(index+1))) - push!(layers, Layer(cur_weights, cur_bias, cur_activation)) + cur_bias = min_bias .+ (max_bias - min_bias) * rand(rng, Float64, (next_size)) + push!(layers, NeuralVerification.Layer(cur_weights, cur_bias, cur_activation)) end - + return Network(layers) - end