Skip to content

Commit

Permalink
In the middle of writing the resultor.
Browse files Browse the repository at this point in the history
  • Loading branch information
Ragav Venkatesan committed Jan 23, 2017
1 parent 2dc937a commit ce45ce5
Show file tree
Hide file tree
Showing 18 changed files with 103 additions and 15 deletions.
Binary file not shown.
Binary file removed docs/source/pantry/sample_visualizations/predict.pdf
Binary file not shown.
Binary file removed docs/source/pantry/sample_visualizations/test.pdf
Binary file not shown.
Binary file removed docs/source/pantry/sample_visualizations/train.pdf
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
6 changes: 3 additions & 3 deletions pantry/tutorials/lenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def lenet5 ( dataset= None, verbose = 1 ):
"frequency" : 1,
"sample_size": 32,
"rgb_filters": False,
"debug_functions" : False,
"debug_functions" : True,
"debug_layers": False, # Since we are on steroids this time, print everything.
"id" : 'main'
}
Expand Down Expand Up @@ -169,7 +169,7 @@ def lenet_maxout ( dataset= None, verbose = 1 ):
"frequency" : 1,
"sample_size": 32,
"rgb_filters": True,
"debug_functions" : False,
"debug_functions" : True,
"debug_layers": False, # Since we are on steroids this time, print everything.
"id" : 'main'
}
Expand Down Expand Up @@ -298,6 +298,6 @@ def lenet_maxout ( dataset= None, verbose = 1 ):
dataset = data.dataset_location()

lenet5 ( dataset, verbose = 2 )
lenet_maxout (dataset, verbose = 2)
#lenet_maxout (dataset, verbose = 2)


24 changes: 24 additions & 0 deletions yann/modules/resultor.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ class resultor(module):
"costs" : "<cost_file_name>.txt",
"confusion" : "<confusion_file_name>.txt",
"network" : "<network_save_file_name>.pkl"
"learning_rate" : "<learning_rate_file_name>.txt"
"momentum" : <momentum_file_name>.txt
"visualize" : <bool>
"id" : id of the resultor
}
Expand Down Expand Up @@ -52,6 +55,11 @@ def __init__( self, resultor_init_args, verbose = 1):
self.confusion_file = value
elif item == "network":
self.network_file = value
elif item == "learning_rate":
self.learning_rate = value
elif item == "momentum":
self.momentum = value


if not hasattr(self, 'root'): raise Exception('root variable has not been provided. \
Without a root folder, no save can be performed')
Expand All @@ -62,5 +70,21 @@ def __init__( self, resultor_init_args, verbose = 1):

if verbose >= 3:
print "... Resultor is initiliazed"

def process_results( self,
cost,
lr,
mom,
verbose = 2):
"""
This method will print results and also write them down in the appropriate files.
Args:
cost: Cost, is a float
lr: Learning Rate, is a float
mom: Momentum, is a float.
"""




88 changes: 76 additions & 12 deletions yann/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -417,22 +417,22 @@ def add_module (self, type, params, verbose = 2):

# input parameter `viualizer` is used
if type == 'visualizer':
self.add_visualizer(visualizer_params = params, verbose = verbose)
self._add_visualizer(visualizer_params = params, verbose = verbose)

# input parameter `optimizer` is used
elif type == 'optimizer':
self.add_optimizer(optimizer_params = params, verbose = verbose)
self._add_optimizer(optimizer_params = params, verbose = verbose)

elif type == 'datastream':
self.add_datastream(dataset_params = params, verbose = verbose)
self._add_datastream(dataset_params = params, verbose = verbose)

elif type == 'resultor':
self.add_resultor(resultor_params = params, verbose = verbose)
self._add_resultor(resultor_params = params, verbose = verbose)

else:
raise Exception ('No module called ' + type)

def add_resultor(self, resultor_params, verbose = 2):
def _add_resultor(self, resultor_params = None, verbose = 2):
"""
This function is used to add a resultor to the network.
Expand All @@ -441,16 +441,47 @@ def add_resultor(self, resultor_params, verbose = 2):
Refer to the network or resultor class for details.
verbose: Similar to what is found in the rest of the toolbox.
"""
if resultor_params is None:
resultor_params = {}

if not "id" in resultor_params.keys():
id = len(self.resultor) + 1
resultor_params["id"] = id
else:
id = resultor_params['id']

if not "root" in resultor_params.keys():
resultor_params["root"] = "."

if not "results" in resultor_params.keys():
resultor_params["results"] = "results.txt"

if not "errors" in resultor_params.keys():
resultor_params["erros"] = "errors.txt"

if not "costs" in resultor_params.keys():
resultor_params["costs"] = "costs.txt"

if not "confusion" in resultor_params.keys():
resultor_params["confusion"] = "confusion.txt"

if not "network" in resultor_params.keys():
resultor_params["network"] = "network.pkl"

if not "learning_rate" in resultor_params.keys():
resultor_params["learning_rate"] = "learning_rate.txt"

if not "momentum" in resultor_params.keys():
resultor_params["momentum"] = "momentum.txt"

if not "viualize" in resultor_params.keys():
resultor_params["visualize"] = True

from yann.modules.resultor import resultor
self.resultor[id] = resultor ( resultor_init_args = resultor_params, verbose = verbose )
self.last_resultor_created = id

def add_visualizer(self, visualizer_params, verbose = 2):
def _add_visualizer(self, visualizer_params, verbose = 2):
"""
This function is used to add a visualizer to the network.
Expand All @@ -469,7 +500,7 @@ def add_visualizer(self, visualizer_params, verbose = 2):
verbose = verbose )
self.last_visualizer_created = id

def add_optimizer(self, optimizer_params, verbose = 2):
def _add_optimizer(self, optimizer_params, verbose = 2):
"""
This function is used to add a optimizer to the network.
Expand All @@ -487,7 +518,7 @@ def add_optimizer(self, optimizer_params, verbose = 2):
self.optimizer[id] = optimizer ( optimizer_init_args = optimizer_params, verbose = verbose )
self.last_optimizer_created = id

def add_datastream(self, dataset_params, verbose = 2):
def _add_datastream(self, dataset_params, verbose = 2):
"""
This function is used to add a datastream to the network.
Expand Down Expand Up @@ -1797,6 +1828,16 @@ def visualize(self, epoch = 0, verbose =2 ):
self.visualize_activities(epoch = epoch, verbose = verbose)
self.visualize_filters(epoch = epoch, verbose = verbose)

def _cook_resultor (verbose = 2):
"""
This is an internal function that cooks a resultor
Args:
verbose: as always
"""
if verbose > 3:
print "... Resultor is cooked"

def cook(self, verbose = 2, **kwargs):
"""
This function builds the backprop network, and makes the trainer, tester and validator
Expand Down Expand Up @@ -1866,6 +1907,26 @@ def cook(self, verbose = 2, **kwargs):
else:
params = params

if not 'resultor' in kwargs.keys():
resultor = None
else:
resultor = kwargs['resultor']

if resultor is None:
if self.last_resultor_created is None:
if verbose >= 3:
print '... No resultor setup, creating a defualt one.'
self.add_module( type = 'resultor', verbose =verbose )
else:
if verbose >= 3:
print "... resultor not provided, assuming " + self.last_resultor_created
resultor = self.last_resultor_created
else:
if not resultor in self.resultor.keys():
raise Exception ("Resultor " + resultor + " not found.")
self.cooked_resultor = self.resultor[resultor]


if generator is None and classifier is None:
if verbose >= 3:
print "... assuming classifier because it is not specified what network we are \
Expand Down Expand Up @@ -1947,13 +2008,12 @@ def cook(self, verbose = 2, **kwargs):
self.cooked_visualizer = self.visualizer[visualizer]
self._cook_visualizer(verbose = verbose) # always cook visualizer last.
self.visualize (epoch = 0, verbose = verbose)
# Cook Resultor.

self._cook_resultor(resultor = self.cooked_resultor, verbose = verbose)

def print_status (self, epoch , verbose = 2):
"""
This function prints the cost of the current epoch, learning rate and momentum of the
network at the moment.
network at the moment. This also calls the resultor to process results.
Todo:
This needs to to go to visualizer.
Expand All @@ -1976,7 +2036,11 @@ def print_status (self, epoch , verbose = 2):
print "... Learning Rate : " + str(self.learning_rate.get_value(borrow=\
self.borrow))
print "... Momentum : " + str(self.current_momentum(epoch))


self.cooked_resultor.process_results(cost = self.cost[-1],
lr = self.learning_rate.get_value(borrow=self.borrow),
mom = self.current_momentum(epoch),
verbose = verbose)


def _print_layer (self, id, prefix = " ", nest = True, last = True):
Expand Down

0 comments on commit ce45ce5

Please sign in to comment.