diff --git a/src/utils/testing_utils.jl b/src/utils/testing_utils.jl new file mode 100644 index 00000000..44ca1d94 --- /dev/null +++ b/src/utils/testing_utils.jl @@ -0,0 +1,37 @@ +using Random +using NeuralVerification; + +""" + make_random_network(layer_sizes::Vector{Int}, [min_weight = -1.0], [max_weight = 1.0], [min_bias = -1.0], [max_bias = 1.0], [rng = 1.0]) + read_layer(output_dim::Int, f::IOStream, [act = ReLU()]) + +Generate a network with random weights and bias. The first layer is treated as the input. +The values for the weights and bias will be uniformly drawn from the range between min_weight +and max_weight and min_bias and max_bias respectively. The last layer will have an ID() +activation function and the rest will have ReLU() activation functions. Allow a random number +generator(rng) to be passed in. This allows for seeded random network generation. +""" +function make_random_network(layer_sizes::Vector{Int}, min_weight = -1.0, max_weight = 1.0, min_bias = -1.0, max_bias = 1.0, rng=MersenneTwister(0)) + # Create each layer based on the layer_size + layers = [] + for index in 1:(length(layer_sizes)-1) + cur_size = layer_sizes[index] + next_size = layer_sizes[index+1] + # Use Id activation for the last layer - otherwise use ReLU activation + if index == (length(layer_sizes)-1) + cur_activation = NeuralVerification.Id() + else + cur_activation = NeuralVerification.ReLU() + end + + # Dimension: num_out x num_in + cur_weights = min_weight .+ (max_weight - min_weight) * rand(rng, Float64, (next_size, cur_size)) + cur_weights = reshape(cur_weights, (next_size, cur_size)) # for edge case where 1 dimension is equal to 1 this keeps it from being a 1-d vector + + # Dimension: num_out x 1 + cur_bias = min_bias .+ (max_bias - min_bias) * rand(rng, Float64, (next_size)) + push!(layers, NeuralVerification.Layer(cur_weights, cur_bias, cur_activation)) + end + + return Network(layers) +end