Skip to content

Commit

Permalink
trivial code cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
5712249898 committed Nov 11, 2017
1 parent dd13d90 commit c09e200
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 24 deletions.
7 changes: 1 addition & 6 deletions genome.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,17 +70,12 @@ def mutate_one_gene(self):
gene_to_mutate = random.choice( list(self.all_possible_genes.keys()) )

# And then let's mutate one of the genes.
# We need to make sure that this actually creates mutation!!!!!
# Make sure that this actually creates mutation
current_value = self.geneparam[gene_to_mutate]
possible_choices = copy.deepcopy(self.all_possible_genes[gene_to_mutate])

#print(self.all_possible_genes[gene_to_mutate])
#print(current_value)

possible_choices.remove(current_value)

#print(possible_choices)

self.geneparam[gene_to_mutate] = random.choice( possible_choices )

self.update_hash()
Expand Down
1 change: 0 additions & 1 deletion idgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ def __init__(self):

def get_next_ID(self):

#id_to_return = self.currentID
self.currentID += 1

return self.currentID
Expand Down
28 changes: 14 additions & 14 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,35 +124,35 @@ def main():
dataset = 'mnist_cnn'
elif (ds == 3):
dataset = 'cifar10_mlp'
else:
elif (ds == 4):
dataset = 'cifar10_cnn'
else:
dataset = 'mnist_mlp'

print("***Dataset:", dataset)

#dataset = 'mnist_mlp' # 'cifar10' 'mnist_mlp' 'mnist_cnn'

if dataset == 'mnist_cnn':
generations = 5 # Number of times to evolve the population.
generations = 8 # Number of times to evolve the population.
all_possible_genes = {
'nb_neurons': [32, 64, 128],
'nb_layers': [1, 2, 3],
'activation': ['relu', 'elu', 'tanh', 'sigmoid'],
'nb_neurons': [16, 32, 64, 128],
'nb_layers': [1, 2, 3, 4 ,5],
'activation': ['relu', 'elu', 'tanh', 'sigmoid', 'hard_sigmoid','softplus','linear'],
'optimizer': ['rmsprop', 'adam', 'sgd', 'adagrad','adadelta', 'adamax', 'nadam']
}
elif dataset == 'mnist_mlp':
generations = 3 # Number of times to evolve the population.
generations = 8 # Number of times to evolve the population.
all_possible_genes = {
'nb_neurons': [64, 128], #, 256, 512, 768, 1024],
'nb_layers': [1, 2, 3, 4],
'activation': ['relu', 'elu', 'tanh', 'sigmoid'],
'nb_layers': [1, 2, 3, 4, 5],
'activation': ['relu', 'elu', 'tanh', 'sigmoid', 'hard_sigmoid','softplus','linear'],
'optimizer': ['rmsprop', 'adam', 'sgd', 'adagrad','adadelta', 'adamax', 'nadam']
}
elif dataset == 'cifar10_mlp':
generations = 8 # Number of times to evolve the population.
all_possible_genes = {
'nb_neurons': [64, 128, 256, 512, 768, 1024],
'nb_layers': [1, 2, 3, 4],
'activation': ['relu', 'elu', 'tanh', 'sigmoid'],
'nb_layers': [1, 2, 3, 4, 5],
'activation': ['relu', 'elu', 'tanh', 'sigmoid', 'hard_sigmoid','softplus','linear'],
'optimizer': ['rmsprop', 'adam', 'sgd', 'adagrad','adadelta', 'adamax', 'nadam']
}
elif dataset == 'cifar10_cnn':
Expand All @@ -167,8 +167,8 @@ def main():
generations = 8 # Number of times to evolve the population.
all_possible_genes = {
'nb_neurons': [64, 128, 256, 512, 768, 1024],
'nb_layers': [1, 2, 3, 4],
'activation': ['relu', 'elu', 'tanh', 'sigmoid'],
'nb_layers': [1, 2, 3, 4, 5],
'activation': ['relu', 'elu', 'tanh', 'sigmoid', 'hard_sigmoid','softplus','linear'],
'optimizer': ['rmsprop', 'adam', 'sgd', 'adagrad','adadelta', 'adamax', 'nadam']
}

Expand Down
7 changes: 4 additions & 3 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def get_cifar10_mlp():
# Set defaults.
nb_classes = 10 #dataset dependent
batch_size = 64
epochs = 1 #will mostly terminate earlier
epochs = 4
input_shape = (3072,) #because it's RGB

# Get the data.
Expand Down Expand Up @@ -84,8 +84,8 @@ def get_mnist_mlp():
"""Retrieve the MNIST dataset and process the data."""
# Set defaults.
nb_classes = 10 #dataset dependent
batch_size = 128*12*8 # all we use mlp for is super-crude debugging.
epochs = 1 #will mostly terminate earlier
batch_size = 64
epochs = 4
input_shape = (784,)

# Get the data.
Expand Down Expand Up @@ -227,6 +227,7 @@ def compile_model_cnn(geneparam, nb_classes, input_shape):
model.add(Dense(nb_classes, activation = 'softmax'))

#BAYESIAN CONVOLUTIONAL NEURAL NETWORKS WITH BERNOULLI APPROXIMATE VARIATIONAL INFERENCE
#need to read this paper

model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
Expand Down

0 comments on commit c09e200

Please sign in to comment.