Permalink
Browse files

Release 1.1.8 fix

  • Loading branch information...
Wicker25 committed May 23, 2012
1 parent 921d75a commit 00ef1e995b3c3d5899a8c4e4fb9cf8abc698bd1f
Showing with 23 additions and 23 deletions.
  1. +16 −16 tools/src/gym.cpp
  2. +7 −7 tools/src/ocr.cpp
View
@@ -788,8 +788,8 @@ Gym::startTraining() {
Trainer trainer( this->neural_network );
// Imposto la funzione di report dell'addestramento
- trainer.SetReportFun( Gym::static_update_plot );
- trainer.SetReportFunData( (void *) this );
+ trainer.setReportFun( Gym::static_update_plot );
+ trainer.setReportFunData( (void *) this );
// Controllo l'algoritmo di addestramento della rete neurale
switch ( train_algorithm ) {
@@ -804,10 +804,10 @@ Gym::startTraining() {
const T_Precision momentum = (T_Precision) this->momentum_input->value();
// Imposto i parametri dell'apprendimento
- trainer.SetParameters( eps, momentum );
+ trainer.setParams( eps, momentum );
// Addestro la rete neurale
- trainer.TrainOnFile< Algorithms::Batch >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
+ trainer.trainOnFile< algorithms::Batch >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
break;
}
@@ -822,10 +822,10 @@ Gym::startTraining() {
const T_Precision decrease_factor = (T_Precision) this->decrease_factor_input->value();
// Imposto i parametri dell'apprendimento
- trainer.SetParameters( decrease_factor, increase_factor );
+ trainer.setParams( decrease_factor, increase_factor );
// Addestro la rete neurale
- trainer.TrainOnFile< Algorithms::Rprop >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
+ trainer.trainOnFile< algorithms::Rprop >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
break;
}
@@ -840,10 +840,10 @@ Gym::startTraining() {
const T_Precision decrease_factor = (T_Precision) this->decrease_factor_input->value();
// Imposto i parametri dell'apprendimento
- trainer.SetParameters( decrease_factor, increase_factor );
+ trainer.setParams( decrease_factor, increase_factor );
// Addestro la rete neurale
- trainer.TrainOnFile< Algorithms::RpropPlus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
+ trainer.trainOnFile< algorithms::RpropPlus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
break;
}
@@ -858,10 +858,10 @@ Gym::startTraining() {
const T_Precision decrease_factor = (T_Precision) this->decrease_factor_input->value();
// Imposto i parametri dell'apprendimento
- trainer.SetParameters( decrease_factor, increase_factor );
+ trainer.setParams( decrease_factor, increase_factor );
// Addestro la rete neurale
- trainer.TrainOnFile< Algorithms::RpropMinus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
+ trainer.trainOnFile< algorithms::RpropMinus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
break;
}
@@ -876,10 +876,10 @@ Gym::startTraining() {
const T_Precision decrease_factor = (T_Precision) this->decrease_factor_input->value();
// Imposto i parametri dell'apprendimento
- trainer.SetParameters( decrease_factor, increase_factor );
+ trainer.setParams( decrease_factor, increase_factor );
// Addestro la rete neurale
- trainer.TrainOnFile< Algorithms::IRpropPlus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
+ trainer.trainOnFile< algorithms::IRpropPlus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
break;
}
@@ -894,10 +894,10 @@ Gym::startTraining() {
const T_Precision decrease_factor = (T_Precision) this->decrease_factor_input->value();
// Imposto i parametri dell'apprendimento
- trainer.SetParameters( decrease_factor, increase_factor );
+ trainer.setParams( decrease_factor, increase_factor );
// Addestro la rete neurale
- trainer.TrainOnFile< Algorithms::IRpropMinus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
+ trainer.trainOnFile< algorithms::IRpropMinus >( this->train_set_path.c_str(), desired_error, max_epochs, report_frequency );
break;
}
@@ -963,7 +963,7 @@ Gym::updatePlot( Network *network, size_t epochs, time_t elapsed_time,
}
// Eseguo la rete neurale con gli ingressi dell'esempio
- const std::vector< T_Precision > out = this->neural_network->Run( &inputs_sample[0] );
+ const std::vector< T_Precision > out = this->neural_network->run( &inputs_sample[0] );
// Memorizzo le uscite della rete
for ( j = 0; j < this->output_size; j++ ) {
@@ -1048,7 +1048,7 @@ Gym::saveNeuralNetwork( const char *path ) {
if ( this->neural_network != NULL ) {
// Salvo la rete neurale in un file
- this->neural_network->Save( path );
+ this->neural_network->save( path );
}
}
View
@@ -357,27 +357,27 @@ Ocr::TrainNetwork( int choice ) {
do {
// Controllo se non si è raggiunto l'errore desiderato
- if ( trainer.GetError() > 0.00001 ) {
+ if ( trainer.getError() > 0.00001 ) {
// Log di lavoro
printf( "Riprovo a studiare la lettera '%c'...\n", 'A' + (char) choice );
// Reinizializzo i pesi sinaptici prima del nuovo addestramento
- trainer.InitWeights();
+ trainer.initWeights();
}
// Addestro la rete neurale
- trainer.SetParameters( 0.5, 1.2 );
- trainer.TrainOnFile< Algorithms::Rprop>( "data/character/train/character.train", 0.00001, 200, 1 );
+ trainer.setParams( 0.5, 1.2 );
+ trainer.trainOnFile< algorithms::Rprop>( "data/character/train/character.train", 0.00001, 200, 1 );
// Ciclo finché non raggiungo l'errore desiderato
- } while ( trainer.GetError() > 0.00001 );
+ } while ( trainer.getError() > 0.00001 );
// Costruisco il percorso di destinazione
snprintf( path, 100, "data/character/%c.net", 'a' + (char) choice );
// Salva la rete nel file di destinazione
- this->neural_network[choice]->Save( path );
+ this->neural_network[choice]->save( path );
}
void
@@ -659,7 +659,7 @@ Ocr::RecognitionCharacter() {
for ( i = 0; i < _OCR_CHARACTER_NUM_; i++ ) {
// Cerco il riscontro migliore
- if ( ( new_check = this->neural_network[i]->Run( input )[0]) > check ) {
+ if ( ( new_check = this->neural_network[i]->run( input )[0]) > check ) {
// Memorizzo il nuovo migliore riscontro
check = new_check;

0 comments on commit 00ef1e9

Please sign in to comment.