Skip to content

Commit

Permalink
Drhuffman12/cmn (#36)
Browse files Browse the repository at this point in the history
* refactor and add cmn and mini_net parts

* crystal tool format

* code cleanup

* add Ai4cr::NeuralNetwork::Cmn::ConnectedNetSet::Sequencial, add error_distance_history tracking to Ai4cr::NeuralNetwork::Backpropagation, add training comparisons

* version bump from 0.1.8 to 0.1.9
  • Loading branch information
drhuffman12 committed Feb 17, 2020
1 parent fc5e17a commit 080c5e8
Show file tree
Hide file tree
Showing 24 changed files with 2,565 additions and 261 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ Add this to your application's `shard.yml`:
dependencies:
ai4cr:
github: drhuffman12/ai4cr
branch: master
```

## Usage
Expand Down
19 changes: 18 additions & 1 deletion shard.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,26 @@
name: ai4cr
version: 0.1.8
version: 0.1.9

authors:
- Daniel Huffman <drhuffman12@yahoo.com>

crystal: 0.33.0

license: MIT

development_dependencies:
ameba:
github: crystal-ameba/ameba
version: ~> 0.11.0

ascii_bar_charter:
github: drhuffman12/ascii_bar_charter
branch: master

# aasm:
# github: veelenga/aasm.cr
# # version: 0.11.0
# # github: drhuffman12/aasm.cr
# branch: master
# # version: 0.1.1
# # # https://github.com/veelenga/aasm.cr
116 changes: 113 additions & 3 deletions spec/ai4cr/neural_network/backpropagation_spec.cr
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,20 @@ describe Ai4cr::NeuralNetwork::Backpropagation do

describe "#eval" do
describe "when given a net with structure of [3, 2]" do
it "weights do not change" do
in_size = 3
out_size = 2
inputs = [3, 2, 3]
structure = [in_size, out_size]
net = Ai4cr::NeuralNetwork::Backpropagation.new(structure)

weights_before = net.weights.clone
y = net.eval(inputs)
weights_after = net.weights.clone

assert_equality_of_nested_list weights_before, weights_after
end

it "returns output nodes of expected size" do
in_size = 3
out_size = 2
Expand Down Expand Up @@ -211,15 +225,111 @@ describe Ai4cr::NeuralNetwork::Backpropagation do

describe "#train" do
describe "when given a net with structure of [3, 2]" do
# before_each do
structure = [3, 2]
net = Ai4cr::NeuralNetwork::Backpropagation.new([3, 2]).init_network
net = Ai4cr::NeuralNetwork::Backpropagation.new([3, 2])
hard_coded_weights = [
[
[-0.9, 0.7],
[-0.9, 0.6],
[0.1, 0.2],
[0.6, -0.3],
],
]
expected_deltas_before = [[0.0, 0.0]]
expected_after_deltas = [[-0.045761358353806764, 0.0031223972161964547]]
expected_after_weights = [
[
[-0.9011440339588452, 0.7000780599304048],
[-0.9022880679176903, 0.6001561198608099],
[0.0965678981234645, 0.20023417979121474],
[0.5885596604115483, -0.2992194006959509],
],
]
expected_error = 0.017946235313986033

inputs = [0.1, 0.2, 0.3]
outputs = [0.4, 0.5]
# end

it "deltas start as zeros" do
net.init_network
net.learning_rate = 0.25
net.momentum = 0.1
net.weights = hard_coded_weights.clone
puts "\nnet (BEFORE): #{net.to_json}\n"

deltas_before = net.deltas.clone

assert_equality_of_nested_list deltas_before, expected_deltas_before
end

it "correctly updates the deltas" do
net.init_network
net.learning_rate = 0.25
net.momentum = 0.1
net.weights = hard_coded_weights.clone
puts "\nnet (BEFORE): #{net.to_json}\n"

deltas_before = net.deltas.clone
net.train(inputs, outputs)
deltas_after = net.deltas.clone
puts "\nnet (AFTER): #{net.to_json}\n"

assert_equality_of_nested_list deltas_before, expected_deltas_before
assert_approximate_equality_of_nested_list deltas_after, expected_after_deltas, 0.0000001
end

it "weights do change" do
net.init_network
net.learning_rate = 0.25
net.momentum = 0.1
net.weights = hard_coded_weights.clone

weights_before = net.weights.clone
net.train(inputs, outputs)
weights_after = net.weights.clone
assert_inequality_of_nested_list weights_before, weights_after
end

it "correctly updates the weights" do
net.init_network
net.learning_rate = 0.25
net.momentum = 0.1
net.weights = hard_coded_weights.clone

weights_before = net.weights.clone
puts "\nnet (BEFORE): #{net.to_json}\n"

net.train(inputs, outputs)

weights_after = net.weights.clone
puts "\nnet (AFTER): #{net.to_json}\n"

weights_before.should eq(hard_coded_weights)

weights_after.should eq(expected_after_weights)
end

it "returns an error of type Float64" do
inputs = [1, 2, 3]
outputs = [4, 5]
net.init_network
net.learning_rate = 0.25
net.momentum = 0.1
net.weights = hard_coded_weights.clone

error_value = net.train(inputs, outputs)
error_value.should be_a(Float64)
end

it "returns the expected error" do
net.init_network
net.learning_rate = 0.25
net.momentum = 0.1
net.weights = hard_coded_weights.clone

error_value = net.train(inputs, outputs)
error_value.should eq(expected_error)
end
end
end
end
93 changes: 93 additions & 0 deletions spec/ai4cr/neural_network/cmn/connected_net_set/sequencial_spec.cr
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
require "./../../../../spec_helper"

describe Ai4cr::NeuralNetwork::Cmn::ConnectedNetSet::Sequencial do
describe "when given two nets with structure of [3, 4] and [4, 2]" do
# before_each do
# structure = [3, 2]
# net = Ai4cr::NeuralNetwork::Backpropagation.new([3, 2])
inputs = [0.1, 0.2, 0.3]

hard_coded_weights0 = [
[-0.4, 0.9, -0.4, -0.7],
[0.1, 0.8, 0.9, -0.0],
[-0.7, -0.3, -0.6, -0.7],
[1.0, 0.2, 0.6, -0.5]
]
hard_coded_weights1 = [
[-0.4, 0.8],
[-1.0, -0.3],
[-0.6, 0.6],
[0.2, -0.3],
[1.0, -0.1]
]

puts "hard_coded_weights0: #{hard_coded_weights0.each { |a| puts a.join("\t") }}"
puts "hard_coded_weights1: #{hard_coded_weights1.each { |a| puts a.join("\t") }}"

expected_outputs_guessed_before = [0.0, 0.0]
expected_outputs_guessed_after = [0.454759979898907, 0.635915600435646]

it "the 'outputs_guessed' start as zeros" do

net0 = Ai4cr::NeuralNetwork::Cmn::MiniNet::Exp.new(height: 3, width: 4)
net1 = Ai4cr::NeuralNetwork::Cmn::MiniNet::Exp.new(height: 4, width: 2)
cns = Ai4cr::NeuralNetwork::Cmn::ConnectedNetSet::Sequencial(Ai4cr::NeuralNetwork::Cmn::MiniNet::Exp).new([net0, net1])

puts "net0.weights: #{net0.weights.map { |a| a.map { |b| b.round(1) } }}"
puts "net1.weights: #{net1.weights.map { |a| a.map { |b| b.round(1) } }}"

net0.init_network
net0.learning_rate = 0.25
net0.momentum = 0.1
net0.weights = hard_coded_weights0.clone
# puts "\nnet0 (BEFORE): #{net0.to_json}\n"


net1.init_network
net1.learning_rate = 0.25
net1.momentum = 0.1
net1.weights = hard_coded_weights1.clone
# puts "\nnet1 (BEFORE): #{net1.to_json}\n"

puts "\ncns (BEFORE): #{cns.to_json}\n"


outputs_guessed_before = net1.outputs_guessed.clone

assert_equality_of_nested_list outputs_guessed_before, expected_outputs_guessed_before
end

it "the 'outputs_guessed' start are updated as expected" do
net0 = Ai4cr::NeuralNetwork::Cmn::MiniNet::Exp.new(height: 3, width: 4)
net1 = Ai4cr::NeuralNetwork::Cmn::MiniNet::Exp.new(height: 4, width: 2)
cns = Ai4cr::NeuralNetwork::Cmn::ConnectedNetSet::Sequencial(Ai4cr::NeuralNetwork::Cmn::MiniNet::Exp).new([net0, net1])

puts "net0.weights: #{net0.weights.map { |a| a.map { |b| b.round(1) } }}"
puts "net1.weights: #{net1.weights.map { |a| a.map { |b| b.round(1) } }}"

net0.init_network
net0.learning_rate = 0.25
net0.momentum = 0.1
net0.weights = hard_coded_weights0.clone
# puts "\nnet0 (BEFORE): #{net0.to_json}\n"


net1.init_network
net1.learning_rate = 0.25
net1.momentum = 0.1
net1.weights = hard_coded_weights1.clone
# puts "\nnet1 (BEFORE): #{net1.to_json}\n"

puts "\ncns (BEFORE): #{cns.to_json}\n"

outputs_guessed_before = cns.net_set.last.outputs_guessed.clone

cns.eval(inputs)
outputs_guessed_after = cns.net_set.last.outputs_guessed.clone
puts "\ncns (AFTER): #{cns.to_json}\n"

assert_approximate_equality_of_nested_list outputs_guessed_after, expected_outputs_guessed_after
end

end
end
Loading

0 comments on commit 080c5e8

Please sign in to comment.