Skip to content
Permalink
Browse files

Added option "state_prior_log10nize" in dnnconf

  • Loading branch information...
nitslp-ri committed May 26, 2019
1 parent 8c60cb8 commit ff91593b8babcb87fa6067d7315d374ffe1978ca
@@ -241,6 +241,7 @@ typedef struct __jconf_am__ {
char *output_bfile; /* b vector file for output layer */
char *priorfile; /* state prior file */
float prior_factor; /* state prior factor */
boolean prior_factor_log10nize; /* TRUE when requires log10nize state priors */
int batchsize; /* batch size */
int num_threads; /* number of threads */
} dnn;
@@ -175,6 +175,7 @@ jconf_set_default_values_am(JCONF_AM *j)
j->dnn.output_bfile = NULL;
j->dnn.priorfile = NULL;
j->dnn.prior_factor = 1.0;
j->dnn.prior_factor_log10nize = TRUE;
j->dnn.batchsize = 1;
j->dnn.num_threads = 2;
}
@@ -570,6 +570,7 @@ j_load_am(Recog *recog, JCONF_AM *amconf)
amconf->dnn.output_bfile,
amconf->dnn.priorfile,
amconf->dnn.prior_factor,
amconf->dnn.prior_factor_log10nize,
amconf->dnn.batchsize,
amconf->dnn.num_threads) == FALSE) {
jlog("ERROR: m_fusion: failed to initialize DNN\n");
@@ -407,6 +407,11 @@ print_engine_info(Recog *recog)
jlog(" # of hidden layers = %d\n", am->dnn->hnum);
jlog(" hidden layer dim. = %d\n", am->dnn->hiddennodenum);
jlog(" state prior factor = %f\n", am->dnn->prior_factor);
if (am->config->dnn.prior_factor_log10nize) {
jlog(" state prior log10nize = on\n");
} else {
jlog(" state prior log10nize = off\n");
}
jlog(" batch size = %d\n", am->dnn->batch_size);
jlog(" number of threads = %d\n", am->dnn->num_threads);
}
@@ -669,7 +669,18 @@ dnn_config_file_parse(char *filename, JCONF_AM *am, Jconf *jconf)
else if (strmatch(pp, "output_B")) am->dnn.output_bfile = filepath(v, cdir);
else if (strmatch(pp, "state_prior")) am->dnn.priorfile = filepath(v, cdir);
else if (strmatch(pp, "state_prior_factor")) am->dnn.prior_factor = atof(v);
else if (strmatch(pp, "batch_size")) am->dnn.batchsize = atoi(v);
else if (strmatch(pp, "state_prior_log10nize")) {
if (strmatch(v, "yes") || strmatch(v, "true")) {
am->dnn.prior_factor_log10nize = TRUE;
} else if (strmatch(v, "no") || strmatch(v, "false")) {
am->dnn.prior_factor_log10nize = FALSE;
} else {
jlog("ERROR: dnn_config_file_parse: value of state_prior_log10nize must be \"true\" or \"false\"\n");
if (cdir) free(cdir);
fclose(fp);
return FALSE;
}
} else if (strmatch(pp, "batch_size")) am->dnn.batchsize = atoi(v);
else if (strmatch(pp, "num_threads")) am->dnn.num_threads = atoi(v);
else {
jlog("ERROR: dnn_config_file_parse: unknown spec: %s %s\n", pp, v);
@@ -235,7 +235,7 @@ int check_avail_simd();
DNNData *dnn_new();
void dnn_clear(DNNData *dnn);
void dnn_free(DNNData *dnn);
boolean dnn_setup(DNNData *dnn, int veclen, int contextlen, int inputnodes, int outputnodes, int hiddennodes, int hiddenlayernum, char **wfile, char **bfile, char *output_wfile, char *output_bfile, char *priorfile, float prior_factor, int batchsize, int num_threads);
boolean dnn_setup(DNNData *dnn, int veclen, int contextlen, int inputnodes, int outputnodes, int hiddennodes, int hiddenlayernum, char **wfile, char **bfile, char *output_wfile, char *output_bfile, char *priorfile, float prior_factor, boolean state_prior_log10nize, int batchsize, int num_threads);
void dnn_calc_outprob(HMMWork *wrk);

/* calc_dnn_*.c */
@@ -461,12 +461,12 @@ void dnn_clear(DNNData *dnn)
int i;

if (dnn->h) {
dnn_layer_clear(&(dnn->o));
for (i = 0; i < dnn->hnum; i++) {
dnn_layer_clear(&(dnn->h[i]));
}
free(dnn->h);
}
dnn_layer_clear(&(dnn->o));
if (dnn->state_prior) free(dnn->state_prior);
for (i = 0; i < dnn->hnum; i++) {
if (dnn->work[i]) {
@@ -513,7 +513,7 @@ sub1(float *dst, float *src, float *w, float *b, int out, int in, float *fstore)
/************************************************************************/

/* initialize dnn */
boolean dnn_setup(DNNData *dnn, int veclen, int contextlen, int inputnodes, int outputnodes, int hiddennodes, int hiddenlayernum, char **wfile, char **bfile, char *output_wfile, char *output_bfile, char *priorfile, float prior_factor, int batchsize, int num_threads)
boolean dnn_setup(DNNData *dnn, int veclen, int contextlen, int inputnodes, int outputnodes, int hiddennodes, int hiddenlayernum, char **wfile, char **bfile, char *output_wfile, char *output_bfile, char *priorfile, float prior_factor, boolean state_prior_log10nize, int batchsize, int num_threads)
{
int i;

@@ -601,8 +601,10 @@ boolean dnn_setup(DNNData *dnn, int veclen, int contextlen, int inputnodes, int
return FALSE;
}
dnn->state_prior[id] = val * prior_factor;
// log10-nize prior
dnn->state_prior[id] = log10(dnn->state_prior[id]);
if (state_prior_log10nize) {
// log10-nize prior
dnn->state_prior[id] = log10(dnn->state_prior[id]);
}
}
fclose(fp);
jlog("Stat: dnn_init: state prior loaded: %s\n", priorfile);

0 comments on commit ff91593

Please sign in to comment.
You can’t perform that action at this time.