Skip to content

Examples of scripts

Gustavo Rosa edited this page Aug 29, 2016 · 1 revision

The LibDEV package contains a directory LibDEV/examples, in which you can find some useful examples to work with our deep learning techniques + optimization algorithms.

It is really simple to work with any algorithm, as we have already included all examples source codes in this package. For instance, we will show an example on how to work with RBM.c.

RBM.c Usage

Usage: RBM <P1> <P2> <P3> <P4> <P5> <P6> <P7> <P8> <P9> <P10>

P1: training dataset in the OPF file format
P2: testing dataset in the OPF file format
P3: output results file name
P4: cross-validation iteration number
P5: search space configuration file
P6: output best parameters file name
P7: number of epochs
P8: batch size
P9: number of iterations for Contrastive Divergence
P10: training method (1 - CD | 2 - PCD | 3 - FPCD)

If you wish to work with another optimization techniques, just look for the optimization directives and change them for your desired technique. We have tagged with bold tags all the needed directives on the following example.

#include "dev.h"
int main(int argc, char **argv){
    if(argc != 11){
        fprintf(stderr,"\nUsage: RBM <training set> <testing set> <output results file name> <cross-validation iteration number> \
                <search space configuration file> <output best parameters file name> <n_epochs> <batch_size> \
                <number of iterations for Constrastive Divergence> <1 - CD | 2 - PCD | 3 - FPCD>");
        exit(-1);
    }
    
    SearchSpace *s = NULL;
    int i, j, z;
    int iteration = atoi(argv[4]), n_epochs = atoi(argv[7]), batch_size = atoi(argv[8]), n_gibbs_sampling = atoi(argv[9]), op = atoi(argv[10]);
    int n_hidden_units;
    double *eta_bound, errorTrain, errorTest;
    FILE *f = NULL;
    Subgraph *Train = NULL, *Test = NULL;
    Dataset *DatasetTrain = NULL, *DatasetTest = NULL;
    RBM *m = NULL;
    
    Train = ReadSubgraph(argv[1]);
    Test = ReadSubgraph(argv[2]);
    DatasetTrain = Subgraph2Dataset(Train);
    DatasetTest = Subgraph2Dataset(Test);
    
    s = ReadSearchSpaceFromFile(argv[5], <b>_PSO_</b>);
    
    eta_bound = (double *)calloc(2, sizeof(double));
    eta_bound[0] = s->LB[1];
    eta_bound[1] = s->UB[1];
    
    fprintf(stderr,"\nInitializing search space ... ");
    InitializeSearchSpace(s, <b>_PSO_</b>);
    fprintf(stderr,"\nOk\n");
    
    fprintf(stderr,"\nRunning <b>PSO</b> ... ");
    run<b>PSO</b>(s, BernoulliRBM, Train, op, n_epochs, batch_size, n_gibbs_sampling, eta_bound);
    
    fprintf(stderr,"\n\nRunning RBM with best parameters on training set ... ");
    n_hidden_units = (int)s->g[0];
    m = CreateRBM(Train->nfeats, n_hidden_units, Train->nlabels);
    m->eta = s->g[1];
    m->lambda = s->g[2];
    m->alpha = s->g[3];
    m->eta_min = eta_bound[0];
    m->eta_max = eta_bound[1];

    InitializeWeights(m);
    InitializeLabelWeights(m);    
    InitializeBias4HiddenUnits(m);
    InitializeBias4VisibleUnitsWithRandomValues(m);
    InitializeBias4LabelUnits(m);

    switch (op){
        case 1:
            errorTrain = BernoulliRBMTrainingbyContrastiveDivergence(DatasetTrain, m, n_epochs, 1, batch_size);
        break;
        case 2:
            errorTrain = BernoulliRBMTrainingbyPersistentContrastiveDivergence(DatasetTrain, m, n_epochs, n_gibbs_sampling, batch_size);
        break;
        case 3:
            errorTrain = BernoulliRBMTrainingbyFastPersistentContrastiveDivergence(DatasetTrain, m, n_epochs, n_gibbs_sampling, batch_size);
        break;
    }
    
    fprintf(stderr,"\n\nRunning RBM for reconstruction on testing set ... ");
    errorTest = BernoulliRBMReconstruction(DatasetTest, m);
    fprintf(stderr,"\nOK\n");
    
    fprintf(stderr,"\nTraining error: %lf\nTesting error: %lf\n", errorTrain, errorTest);

    fprintf(stderr, "\nSaving outputs ... ");
    f = fopen(argv[3], "a");
    fprintf(f,"%d %lf %lf\n", iteration, errorTrain, errorTest);
    fclose(f);
    
    f = fopen(argv[6], "a");
    fprintf(f,"%d ", s->n);
    for(i = 0; i < s->n; i++)
        fprintf(f, "%lf ", s->g[i]);
    fprintf(f, "\n");
    fclose(f);
    fprintf(stderr, "Ok!\n");
        
    free(eta_bound);
    DestroySearchSpace(&s, <b>_PSO_</b>);
    DestroyDataset(&DatasetTrain);
    DestroyDataset(&DatasetTest);
    DestroySubgraph(&Train);
    DestroySubgraph(&Test);
    DestroyRBM(&m);
    
    return 0;
}
Clone this wiki locally