Skip to content

Commit

Permalink
added fitness caching to GA. why had I not done that before!
Browse files Browse the repository at this point in the history
  • Loading branch information
sujimichi committed May 27, 2012
1 parent 70c18f7 commit a65cdf1
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 32 deletions.
12 changes: 6 additions & 6 deletions bootcamp.rb
Expand Up @@ -245,13 +245,12 @@ def initialize n_layers = 2
@target_score = 842
set_config_for n_layers
reset_high_score
@fitness_cache = {}

@ga =MGA.new(:generations => 5000, :mutation_rate => 2, :gene_length => @gene_length, :fitness => Proc.new{|genome, gen|
@ga =MGA.new(:generations => 5000, :mutation_rate => 2, :gene_length => @gene_length, :cache_fitness => true, :fitness => Proc.new{|genome, gen|
puts "#{gen}\n"

genome_file = "./genome"
File.open(genome_file,'w'){|f| f.write( genome.join(",") )}

File.open(genome_file,'w'){|f| f.write( genome.join(",") )}
score_sum = 0
threads = []
levels = [1,2,3,4,5,6,7,8,9]
Expand All @@ -266,6 +265,7 @@ def initialize n_layers = 2
end
threads.each{|t| t.join}
score_sum = levels.map{|i| instance_variable_get("@ans#{i}")}.compact.sum

puts "\n\t==Summed Score #{score_sum}"
remark_on score_sum
puts "."
Expand All @@ -290,7 +290,7 @@ def initialize n_layers =2

rootdir = "/home/sujimichi/coding/lab/rubywarrior"

@ga =MGA.new(:generations => 5000, :mutation_rate => 2, :gene_length => @gene_length, :fitness => Proc.new{|genome, gen|
@ga =MGA.new(:generations => 5000, :mutation_rate => 2, :gene_length => @gene_length, :cache_fitness => true, :fitness => Proc.new{|genome, gen|
puts "#{gen}\n"
Dir.chdir(rootdir)

Expand All @@ -299,7 +299,7 @@ def initialize n_layers =2
puts "\n\n"

threads = []
levels.sort_by{rand}.each do |lvl|
levels.each do |lvl|
Dir.chdir("#{rootdir}/level#{lvl}bot-beginner")
File.open("./genome", 'w'){|f| f.write( genome.join(",") )} #write the genome to file which Player will use

Expand Down
39 changes: 36 additions & 3 deletions darwin.rb
@@ -1,6 +1,8 @@
#Micro Genetic Algorithm - slight variation on https://github.com/Sujimichi/micro_ga
class MGA
attr_accessor :population, :generations, :mutation_rate, :cross_over_rate, :current_generation, :popsize
require 'digest'

attr_accessor :population, :generations, :mutation_rate, :cross_over_rate, :current_generation, :popsize, :scores
def initialize args = {}
@popsize = args[:popsize] || 30 #Number of members (genomes) in the population
@gene_length = args[:gene_length] || 10 #Number of bit (genes) in a genome
Expand All @@ -11,6 +13,7 @@ def initialize args = {}
@fitness_function = args[:fitness] || Proc.new{|genome| genome.inject{|i,j| i+j} } #init fitness function or use simple max ones
@current_generation = 0
@scores = {}
@cache_fitness = args[:cache_fitness] || false
end

def evolve generations = @generations
Expand All @@ -29,14 +32,44 @@ def pos_mutate n
n + (rand - 0.5) #plus or minus small value. || (n-1).abs #for binary mutation; 1 -> 0, 0 -> 1
end
def fitness genome
@fitness_function.call(genome, @current_generation)
return @fitness_function.call(genome, @current_generation) unless @cache_fitness #return fitness as norm if caching is off
@scores[genome] = @fitness_function.call(genome, @current_generation) unless @scores[genome] #update cache if value not present
@scores[genome] #return cached value
end

def ordered_population
population.sort_by{|member| fitness(member)}.reverse
end

def best
ordered_population.first
end

end

=begin
def cache_test
f = Proc.new{|genome| print'.';sleep(0.05); genome.inject{|i,j| i+j} }
pop = Array.new(30){ Array.new(10){ 0 } }
g1 = MGA.new(:cache => false, :generations => 5000, :fitness => f)
g2 = MGA.new(:cache => true, :generations => 5000, :fitness => f)
g1.population = pop
g2.population = pop
ave1 = g1.population.map{|g| g1.fitness g}.inject{|i,j| i+j} / g1.population.size
ave2 = g2.population.map{|g| g1.fitness g}.inject{|i,j| i+j} / g2.population.size
puts [ave1, ave2].inspect
t1_1 = Time.now;g1.evolve; t1_2 = Time.now;
t2_1 = Time.now;g2.evolve; t2_2 = Time.now;
t1 = t1_2 - t1_1
t2 = t2_2 - t2_1
ave1 = g1.population.map{|g| g1.fitness g}.inject{|i,j| i+j} / g1.population.size
ave2 = g2.population.map{|g| g1.fitness g}.inject{|i,j| i+j} / g2.population.size
puts [ave1, ave2].inspect
puts [t1, t2].inspect
end
=end
42 changes: 19 additions & 23 deletions player.rb
Expand Up @@ -3,21 +3,17 @@
class Player
def initialize
genome = File.open("./genome", "r"){|f| f.readlines}.join.split(",").map{|s| s.to_f} #Read genome from file.
nodes = {:in => 16, :inner => 6, :out => 8} #nodes = {:in => 15, :inner => 8, :inner2 => 8, :out => 5} || #nodes = {:in => 15, :out => 5}
@brain = Brains::R2D2.new(nodes, genome)
nodes = {:in => 16, :inner => 6, :out => 8} #3layernodes = {:in => 15, :inner => 8, :inner2 => 8, :out => 5} || #1layernodes = {:in => 15, :out => 5}
@brain = Brains::R2D2.new(nodes, genome) #Initialize warriors brain (neural net)
end

def play_turn(warrior)
@previous_health ||= 20

#Sense world and present as an array of inputs for NN
inputs = input_array_for(warrior)

#send inputs to neural network and interpret its output as :action and :impulse
action, impulse = @brain.act_on(inputs)
puts [inputs, action, impulse].inspect
inputs = input_array_for(warrior) #Sense world and present as an array of inputs for NN
action, impulse = @brain.act_on(inputs) #send inputs to neural network and interpret its output as :action and :impulse
puts [inputs, action, impulse].inspect #whats on its mind?

#send 'impulse' and 'action' from brain to warrior. done inside rescue as brain may request actions the body can't yet do, like rest! in the eariler levels.
#send 'action' and impulse from brain to warrior. done inside rescue as brain may request actions the body can't yet do, like rest! in the eariler levels.
#no need to program which actions are allowed, evolution will work it out for itself. Yes creationists, this shit actually works!
#Once evolved the brain will 'know' what its body is capable of and the rescue should not be needed.
begin
Expand All @@ -30,16 +26,18 @@ def play_turn(warrior)

#sense the world and return info as an array of inputs for the NN
def input_array_for warrior
dirs = [:left, :forward, :right, :backward]
things = [:wall, :enemy, :captive]#, :stairs, :ticking, :golem]
dirs = [:left, :forward, :right, :backward] #directions in which things can be
things = [:wall, :enemy, :captive] #type of things there can be
vis_scale = [0, 0.6, 0.3] #used to scale the values returned by :look.

if warrior.respond_to?(:feel)
inputs = things.map do |thing|
dirs.map do |dir|
v = (warrior.feel(dir).send("#{thing}?").eql?(true) ? 1 : 0)
if warrior.respond_to?(:look)
look = warrior.look(dir)
if warrior.respond_to?(:feel)
can_look = warrior.respond_to?(:look)
inputs = things.map do |thing| #for each of the things
dirs.map do |dir| #in each of the directions
v = (warrior.feel(dir).send("#{thing}?").eql?(true) ? 1 : 0) #test if that thing is there, returning 1 for true else 0
if can_look #if warrior can also look
look = warrior.look(dir) #look in direction
#reduce to a single val from given 3 ie [0,1,1] => [0, 0.6, 0.3] => [0.6]
v = v + look.map{|l| (l.send("#{thing}?").eql?(true) ? 1 : 0) * vis_scale[look.index(l)] }.max
end
v
Expand All @@ -48,9 +46,8 @@ def input_array_for warrior
else
#in the first level the warrior has less sensory input than a sea sponge. No sensory input means no neural activity.
#So when warrior does not respond to :feel it 'imagines' that its in an empty corridor!
inputs = [1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0]
inputs = [1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0] #inputs for empty corridor.
end

#give the NN sense of whether it is armed or not.
inputs << (warrior.respond_to?(:shoot!) ? 1 : 0)

Expand All @@ -59,8 +56,7 @@ def input_array_for warrior
w_health = warrior.respond_to?(:health) ? warrior.health : 20
inputs << (1 - 1.0/20 * w_health).round(2)
inputs << ((@previous_health > w_health) ? 1 : 0) #sense of health dropping
inputs << 1 #representational bias. yeah, I should prob explain this! its REALLY important!

inputs.flatten
inputs << 1 #representational bias. yeah, I should prob explain this! its REALLY important!
inputs.flatten #return array of values.
end
end

0 comments on commit a65cdf1

Please sign in to comment.