diff --git a/egs/ami/s5b/RESULTS_ihm b/egs/ami/s5b/RESULTS_ihm index 44234fc3fd9..25a60d24cfb 100644 --- a/egs/ami/s5b/RESULTS_ihm +++ b/egs/ami/s5b/RESULTS_ihm @@ -40,7 +40,6 @@ %WER 24.0 | 13098 94470 | 79.4 12.1 8.5 3.4 24.0 57.1 | -0.153 | exp/ihm/nnet3_cleaned/tdnn_sp/decode_dev/ascore_12/dev_hires.ctm.filt.sys %WER 25.5 | 12643 89984 | 77.7 14.2 8.2 3.2 25.5 56.4 | -0.139 | exp/ihm/nnet3_cleaned/tdnn_sp/decode_eval/ascore_11/eval_hires.ctm.filt.sys - # local/nnet3/run_tdnn.sh --mic ihm --train-set train --gmm tri3 --nnet3-affix "" # nnet3 xent TDNN without data cleaning [cleaning makes very small and # inconsistent difference on this dat] @@ -55,17 +54,21 @@ %WER 22.4 | 12643 89977 | 80.3 12.5 7.2 2.7 22.4 53.6 | -0.503 | exp/ihm/nnet3_cleaned/lstm_bidirectional_sp/decode_eval/ascore_10/eval_hires.ctm.filt.sys ############################################ - -# local/chain/run_tdnn.sh --mic ihm --stage 12 & -# cleanup + chain TDNN model -# for d in exp/ihm/chain_cleaned/tdnn_sp_bi/decode_*; do grep Sum $d/*sc*/*ys | utils/best_wer.sh; done -%WER 22.5 | 13098 94490 | 80.6 10.8 8.6 3.1 22.5 55.0 | 0.072 | exp/ihm/chain_cleaned/tdnn_sp_bi/decode_dev/ascore_10/dev_hires.ctm.filt.sys -%WER 22.5 | 12643 89978 | 80.3 12.5 7.2 2.7 22.5 53.1 | 0.149 | exp/ihm/chain_cleaned/tdnn_sp_bi/decode_eval/ascore_10/eval_hires.ctm.filt.sys - +# cleanup + chain TDNN model. +# local/chain/run_tdnn.sh --mic ihm --stage 4 & +# for d in exp/ihm/chain_cleaned/tdnn1d_sp_bi/decode_*; do grep Sum $d/*sc*/*ys | utils/best_wer.sh; done +%WER 21.7 | 13098 94488 | 81.1 10.4 8.4 2.8 21.7 54.4 | 0.096 | exp/ihm/chain_cleaned/tdnn1d_sp_bi/decode_dev/ascore_10/dev_hires.ctm.filt.sys +%WER 22.1 | 12643 89979 | 80.5 12.1 7.4 2.6 22.1 52.8 | 0.185 | exp/ihm/chain_cleaned/tdnn1d_sp_bi/decode_eval/ascore_10/eval_hires.ctm.filt.sys + +# cleanup + chain TDNN model. Uses LDA instead of PCA for ivector features. +# local/chain/tuning/run_tdnn_1b.sh --mic ihm --stage 4 & +# for d in exp/ihm/chain_cleaned/tdnn1b_sp_bi/decode_*; do grep Sum $d/*sc*/*ys | utils/best_wer.sh; done +%WER 22.0 | 13098 94488 | 80.8 10.2 9.0 2.8 22.0 54.7 | 0.102 | exp/ihm/chain_cleaned/tdnn1b_sp_bi/decode_dev/ascore_10/dev_hires.ctm.filt.sys +%WER 22.2 | 12643 89968 | 80.3 12.1 7.6 2.6 22.2 52.9 | 0.170 | exp/ihm/chain_cleaned/tdnn1b_sp_bi/decode_eval/ascore_10/eval_hires.ctm.filt.sys # local/chain/run_tdnn.sh --mic ihm --train-set train --gmm tri3 --nnet3-affix "" --stage 12 # chain TDNN model without cleanup [note: cleanup helps very little on this IHM data.] -for d in exp/ihm/chain/tdnn_sp_bi/decode_*; do grep Sum $d/*sc*/*ys | utils/best_wer.sh; done +# for d in exp/ihm/chain/tdnn_sp_bi/decode_*; do grep Sum $d/*sc*/*ys | utils/best_wer.sh; done %WER 22.4 | 13098 94476 | 80.4 10.4 9.2 2.8 22.4 54.6 | 0.069 | exp/ihm/chain/tdnn_sp_bi/decode_dev/ascore_10/dev_hires.ctm.filt.sys %WER 22.5 | 12643 89974 | 80.0 12.1 7.9 2.6 22.5 52.8 | 0.157 | exp/ihm/chain/tdnn_sp_bi/decode_eval/ascore_10/eval_hires.ctm.filt.sys diff --git a/egs/ami/s5b/local/chain/run_tdnn.sh b/egs/ami/s5b/local/chain/run_tdnn.sh index 61f8f499182..e1adaa9346d 120000 --- a/egs/ami/s5b/local/chain/run_tdnn.sh +++ b/egs/ami/s5b/local/chain/run_tdnn.sh @@ -1 +1 @@ -tuning/run_tdnn_1b.sh \ No newline at end of file +tuning/run_tdnn_1d.sh \ No newline at end of file diff --git a/egs/ami/s5b/local/chain/tuning/run_tdnn_1d.sh b/egs/ami/s5b/local/chain/tuning/run_tdnn_1d.sh new file mode 100755 index 00000000000..a9f228cb55d --- /dev/null +++ b/egs/ami/s5b/local/chain/tuning/run_tdnn_1d.sh @@ -0,0 +1,269 @@ +#!/bin/bash + +# same as 1b but uses PCA instead of +# LDA features for the ivector extractor. + +# Results on 03/27/2017: +# local/chain/compare_wer_general.sh ihm tdnn1b_sp_bi tdnn1d_sp_bi +# System tdnn1b_sp_bi tdnn1d_sp_bi +# WER on dev 22.0 21.9 +# WER on eval 22.2 22.3 +# Final train prob -0.0813472 -0.0807054 +# Final valid prob -0.132032 -0.133564 +# Final train prob (xent) -1.41543 -1.41951 +# Final valid prob (xent) -1.62316 -1.63021 + +set -e -o pipefail +# First the options that are passed through to run_ivector_common.sh +# (some of which are also used in this script directly). +stage=0 +mic=ihm +nj=30 +min_seg_len=1.55 +use_ihm_ali=false +train_set=train_cleaned +gmm=tri3_cleaned # the gmm for the target data +ihm_gmm=tri3 # the gmm for the IHM system (if --use-ihm-ali true). +num_threads_ubm=32 +ivector_transform_type=pca +nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned + +# The rest are configs specific to this script. Most of the parameters +# are just hardcoded at this level, in the commands below. +train_stage=-10 +tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration. +tdnn_affix=1d #affix for TDNN directory, e.g. "a" or "b", in case we change the configuration. +common_egs_dir= # you can set this to use previously dumped egs. + +# End configuration section. +echo "$0 $@" # Print the command line for logging + +. ./cmd.sh +. ./path.sh +. ./utils/parse_options.sh + + +if ! cuda-compiled; then + cat <data/lang_chain/topo + fi +fi + +if [ $stage -le 13 ]; then + # Get the alignments as lattices (gives the chain training more freedom). + # use the same num-jobs as the alignments + steps/align_fmllr_lats.sh --nj 100 --cmd "$train_cmd" ${lores_train_data_dir} \ + data/lang $gmm_dir $lat_dir + rm $lat_dir/fsts.*.gz # save space +fi + +if [ $stage -le 14 ]; then + # Build a tree using our new topology. We know we have alignments for the + # speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use + # those. + if [ -f $tree_dir/final.mdl ]; then + echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it." + exit 1; + fi + steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \ + --context-opts "--context-width=2 --central-position=1" \ + --leftmost-questions-truncate -1 \ + --cmd "$train_cmd" 4200 ${lores_train_data_dir} data/lang_chain $ali_dir $tree_dir +fi + +xent_regularize=0.1 + +if [ $stage -le 15 ]; then + echo "$0: creating neural net configs using the xconfig parser"; + + num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}') + learning_rate_factor=$(echo "print 0.5/$xent_regularize" | python) + + mkdir -p $dir/configs + cat < $dir/configs/network.xconfig + input dim=100 name=ivector + input dim=40 name=input + + # please note that it is important to have input layer with the name=input + # as the layer immediately preceding the fixed-affine-layer to enable + # the use of short notation for the descriptor + fixed-affine-layer name=lda input=Append(-1,0,1,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat + + # the first splicing is moved before the lda layer, so no splicing here + relu-renorm-layer name=tdnn1 dim=450 + relu-renorm-layer name=tdnn2 input=Append(-1,0,1) dim=450 + relu-renorm-layer name=tdnn3 input=Append(-1,0,1) dim=450 + relu-renorm-layer name=tdnn4 input=Append(-3,0,3) dim=450 + relu-renorm-layer name=tdnn5 input=Append(-3,0,3) dim=450 + relu-renorm-layer name=tdnn6 input=Append(-3,0,3) dim=450 + relu-renorm-layer name=tdnn7 input=Append(-3,0,3) dim=450 + + ## adding the layers for chain branch + relu-renorm-layer name=prefinal-chain input=tdnn7 dim=450 target-rms=0.5 + output-layer name=output include-log-softmax=false dim=$num_targets max-change=1.5 + + # adding the layers for xent branch + # This block prints the configs for a separate output that will be + # trained with a cross-entropy objective in the 'chain' models... this + # has the effect of regularizing the hidden parts of the model. we use + # 0.5 / args.xent_regularize as the learning rate factor- the factor of + # 0.5 / args.xent_regularize is suitable as it means the xent + # final-layer learns at a rate independent of the regularization + # constant; and the 0.5 was tuned so as to make the relative progress + # similar in the xent and regular final layers. + relu-renorm-layer name=prefinal-xent input=tdnn7 dim=450 target-rms=0.5 + output-layer name=output-xent dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5 + +EOF + + steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/ +fi + +if [ $stage -le 16 ]; then + if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then + utils/create_split_dir.pl \ + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5b/$dir/egs/storage $dir/egs/storage + fi + + touch $dir/egs/.nodelete # keep egs around when that run dies. + + steps/nnet3/chain/train.py --stage $train_stage \ + --cmd "$decode_cmd" \ + --feat.online-ivector-dir $train_ivector_dir \ + --feat.cmvn-opts "--norm-means=false --norm-vars=false" \ + --chain.xent-regularize $xent_regularize \ + --chain.leaky-hmm-coefficient 0.1 \ + --chain.l2-regularize 0.00005 \ + --chain.apply-deriv-weights false \ + --chain.lm-opts="--num-extra-lm-states=2000" \ + --egs.dir "$common_egs_dir" \ + --egs.opts "--frames-overlap-per-eg 0" \ + --egs.chunk-width 150 \ + --trainer.num-chunk-per-minibatch 128 \ + --trainer.frames-per-iter 1500000 \ + --trainer.num-epochs 4 \ + --trainer.optimization.num-jobs-initial 2 \ + --trainer.optimization.num-jobs-final 12 \ + --trainer.optimization.initial-effective-lrate 0.001 \ + --trainer.optimization.final-effective-lrate 0.0001 \ + --trainer.max-param-change 2.0 \ + --cleanup.remove-egs true \ + --feat-dir $train_data_dir \ + --tree-dir $tree_dir \ + --lat-dir $lat_dir \ + --dir $dir +fi + + +graph_dir=$dir/graph_${LM} +if [ $stage -le 17 ]; then + # Note: it might appear that this data/lang_chain directory is mismatched, and it is as + # far as the 'topo' is concerned, but this script doesn't read the 'topo' from + # the lang directory. + utils/mkgraph.sh --self-loop-scale 1.0 data/lang_${LM} $dir $graph_dir +fi + +if [ $stage -le 18 ]; then + rm $dir/.error 2>/dev/null || true + for decode_set in dev eval; do + ( + steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \ + --nj $nj --cmd "$decode_cmd" \ + --online-ivector-dir exp/$mic/nnet3${nnet3_affix}/ivectors_${decode_set}_hires \ + --scoring-opts "--min-lmwt 5 " \ + $graph_dir data/$mic/${decode_set}_hires $dir/decode_${decode_set} || exit 1; + ) || touch $dir/.error & + done + wait + if [ -f $dir/.error ]; then + echo "$0: something went wrong in decoding" + exit 1 + fi +fi +exit 0 diff --git a/egs/ami/s5b/local/nnet3/run_ivector_common.sh b/egs/ami/s5b/local/nnet3/run_ivector_common.sh index bccbb42494c..860009c5ef5 100755 --- a/egs/ami/s5b/local/nnet3/run_ivector_common.sh +++ b/egs/ami/s5b/local/nnet3/run_ivector_common.sh @@ -17,8 +17,8 @@ train_set=train # you might set this to e.g. train_cleaned. gmm=tri3 # This specifies a GMM-dir from the features of the type you're training the system on; # it should contain alignments for 'train_set'. - num_threads_ubm=32 +ivector_transform_type=lda nnet3_affix=_cleaned # affix for exp/$mic/nnet3 directory to put iVector stuff in, so it # becomes exp/$mic/nnet3_cleaned or whatever. @@ -30,7 +30,7 @@ nnet3_affix=_cleaned # affix for exp/$mic/nnet3 directory to put iVector stu gmmdir=exp/${mic}/${gmm} -for f in data/${mic}/${train_set}/feats.scp ${gmmdir}/final.mdl; do +for f in data/${mic}/${train_set}/feats.scp ; do if [ ! -f $f ]; then echo "$0: expected file $f to exist" exit 1 @@ -110,20 +110,36 @@ if [ $stage -le 4 ]; then echo "$0: warning: number of feats $n1 != $n2, if these are very different it could be bad." fi - echo "$0: training a system on the hires data for its LDA+MLLT transform, in order to produce the diagonal GMM." - if [ -e exp/$mic/nnet3${nnet3_affix}/tri5/final.mdl ]; then - # we don't want to overwrite old stuff, ask the user to delete it. - echo "$0: exp/$mic/nnet3${nnet3_affix}/tri5/final.mdl already exists: " - echo " ... please delete and then rerun, or use a later --stage option." - exit 1; - fi - steps/train_lda_mllt.sh --cmd "$train_cmd" --num-iters 7 --mllt-iters "2 4 6" \ - --splice-opts "--left-context=3 --right-context=3" \ - 3000 10000 $temp_data_root/${train_set}_hires data/lang \ - $gmmdir exp/$mic/nnet3${nnet3_affix}/tri5 + case $ivector_transform_type in + lda) + if [ ! -f ${gmmdir}/final.mdl ]; then + echo "$0: expected file ${gmmdir}/final.mdl to exist" + exit 1; + fi + echo "$0: training a system on the hires data for its LDA+MLLT transform, in order to produce the diagonal GMM." + if [ -e exp/$mic/nnet3${nnet3_affix}/tri5/final.mdl ]; then + # we don't want to overwrite old stuff, ask the user to delete it. + echo "$0: exp/$mic/nnet3${nnet3_affix}/tri5/final.mdl already exists: " + echo " ... please delete and then rerun, or use a later --stage option." + exit 1; + fi + steps/train_lda_mllt.sh --cmd "$train_cmd" --num-iters 7 --mllt-iters "2 4 6" \ + --splice-opts "--left-context=3 --right-context=3" \ + 3000 10000 $temp_data_root/${train_set}_hires data/lang \ + $gmmdir exp/$mic/nnet3${nnet3_affix}/tri5 + ;; + pca) + echo "$0: computing a PCA transform from the hires data." + steps/online/nnet2/get_pca_transform.sh --cmd "$train_cmd" \ + --splice-opts "--left-context=3 --right-context=3" \ + --max-utts 10000 --subsample 2 \ + $temp_data_root/${train_set}_hires \ + exp/$mic/nnet3${nnet3_affix}/tri5 + ;; + *) echo "$0: invalid iVector transform type $ivector_transform_type" && exit 1; + esac fi - if [ $stage -le 5 ]; then echo "$0: computing a subset of data to train the diagonal UBM." diff --git a/egs/babel/s5d/conf/common.fullLP b/egs/babel/s5d/conf/common.fullLP index d203908d3e0..05dea74beb0 100644 --- a/egs/babel/s5d/conf/common.fullLP +++ b/egs/babel/s5d/conf/common.fullLP @@ -35,10 +35,10 @@ babel_type=full use_pitch=true -lmwt_plp_extra_opts=( --min-lmwt 8 --max-lmwt 18 ) +lmwt_plp_extra_opts=( --min-lmwt 9 --max-lmwt 13 ) lmwt_bnf_extra_opts=( --min-lmwt 15 --max-lmwt 22 ) lmwt_dnn_extra_opts=( --min-lmwt 10 --max-lmwt 15 ) -lmwt_chain_extra_opts=( --min-lmwt 4 --max-lmwt 22 ) +lmwt_chain_extra_opts=( --min-lmwt 9 --max-lmwt 13 ) dnn_beam=16.0 dnn_lat_beam=8.5 diff --git a/egs/babel/s5d/conf/common_vars.sh b/egs/babel/s5d/conf/common_vars.sh index 4a48d2577a8..3d81a3fcc6c 100644 --- a/egs/babel/s5d/conf/common_vars.sh +++ b/egs/babel/s5d/conf/common_vars.sh @@ -12,6 +12,7 @@ cer=0 #Declaring here to make the definition inside the language conf files more # transparent and nice +declare -A train_kwlists declare -A dev10h_kwlists declare -A dev2h_kwlists declare -A evalpart1_kwlists diff --git a/egs/babel/s5d/conf/lang/104-pashto-fullLP.official.conf b/egs/babel/s5d/conf/lang/104-pashto-fullLP.official.conf index 08f849b7605..af1bbb132f7 100644 --- a/egs/babel/s5d/conf/lang/104-pashto-fullLP.official.conf +++ b/egs/babel/s5d/conf/lang/104-pashto-fullLP.official.conf @@ -3,7 +3,7 @@ #speech corpora files location train_data_dir=/export/babel/data/104-pashto/release-current/conversational/training -train_data_list=/export/babel/data/splits/Pashto_Babel104/train.FullLP.list +train_data_list=./conf/lists/104-pashto/training.list train_nj=32 #RADICAL DEV2H data files @@ -22,7 +22,7 @@ dev2h_nj=18 #Official DEV data files dev10h_data_dir=/export/babel/data/104-pashto/release-current/conversational/dev -dev10h_data_list=/export/babel/data/splits/Pashto_Babel104/dev.list +dev10h_data_list=./conf/lists/104-pashto/dev.list dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel104b-v0.4bY_conv-dev/IARPA-babel104b-v0.4bY_conv-dev.stm dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel104b-v0.4bY_conv-dev.ecf.xml dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel104b-v0.4bY_conv-dev/IARPA-babel104b-v0.4bY_conv-dev.mitllfa3.rttm diff --git a/egs/babel/s5d/conf/lang/105-turkish-fullLP.official.conf b/egs/babel/s5d/conf/lang/105-turkish-fullLP.official.conf index 6889cb7eb37..d6ae1007ac9 100644 --- a/egs/babel/s5d/conf/lang/105-turkish-fullLP.official.conf +++ b/egs/babel/s5d/conf/lang/105-turkish-fullLP.official.conf @@ -5,7 +5,14 @@ #speech corpora files location train_data_dir=/export/babel/data/105-turkish/release-current-b/conversational/training train_data_list=/export/babel/data/splits/Turkish_Babel105/train.fullLP.list -train_nj=32 +#train_nj=32 +train_ecf_file=./data/train/ecf.train.xml +train_rttm_file=./exp/tri5/rttm +train_kwlists=( + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel105b-v0.4_conv-dev.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel105b-v0.4_conv-dev.kwlist2.xml +) +train_nj=64 #RADICAL DEV data files dev2h_data_dir=/export/babel/data/105-turkish/release-current-b/conversational/dev diff --git a/egs/babel/s5d/conf/lang/305-guarani.FLP.official.conf b/egs/babel/s5d/conf/lang/305-guarani.FLP.official.conf index 233cd81fffb..b1dd7f5b4f5 100644 --- a/egs/babel/s5d/conf/lang/305-guarani.FLP.official.conf +++ b/egs/babel/s5d/conf/lang/305-guarani.FLP.official.conf @@ -11,11 +11,12 @@ train_nj=32 #Radical reduced DEV corpora files location dev2h_data_dir=/export/babel/data/305-guarani/IARPA-babel305b-v1.0b-build/BABEL_OP3_305/conversational/dev dev2h_data_list=./conf/lists/305-guarani//dev.2h.list -dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.mitllfa3.rttm -dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.scoring.ecf.xml -dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.stm +dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.mitllfa3.rttm +dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.scoring.ecf.xml +dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.stm dev2h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev.kwlist3.xml ) # dev2h_kwlists dev2h_nj=16 dev2h_subset_ecf=true @@ -24,11 +25,12 @@ dev2h_subset_ecf=true #Official DEV corpora files location dev10h_data_dir=/export/babel/data/305-guarani/IARPA-babel305b-v1.0b-build/BABEL_OP3_305/conversational/dev dev10h_data_list=./conf/lists/305-guarani//dev.list -dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.mitllfa3.rttm -dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.scoring.ecf.xml -dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.stm +dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.mitllfa3.rttm +dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.scoring.ecf.xml +dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.stm dev10h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0a_conv-dev/IARPA-babel305b-v1.0a_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev/IARPA-babel305b-v1.0c_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel305b-v1.0c_conv-dev.kwlist3.xml ) # dev10h_kwlists dev10h_nj=32 diff --git a/egs/babel/s5d/conf/lang/306-igbo.FLP.official.conf b/egs/babel/s5d/conf/lang/306-igbo.FLP.official.conf index 87f82da6b49..15a0264de61 100644 --- a/egs/babel/s5d/conf/lang/306-igbo.FLP.official.conf +++ b/egs/babel/s5d/conf/lang/306-igbo.FLP.official.conf @@ -15,7 +15,8 @@ dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-de dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.scoring.ecf.xml dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.stm dev2h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev.kwlist3.xml ) # dev2h_kwlists dev2h_nj=16 dev2h_subset_ecf=true @@ -28,7 +29,8 @@ dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-d dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.scoring.ecf.xml dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.stm dev10h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev/IARPA-babel306b-v2.0c_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel306b-v2.0c_conv-dev.kwlist3.xml ) # dev10h_kwlists dev10h_nj=32 diff --git a/egs/babel/s5d/conf/lang/307-amharic.FLP.official.conf b/egs/babel/s5d/conf/lang/307-amharic.FLP.official.conf index 9668bd14e6b..8ae1b53eb2b 100644 --- a/egs/babel/s5d/conf/lang/307-amharic.FLP.official.conf +++ b/egs/babel/s5d/conf/lang/307-amharic.FLP.official.conf @@ -15,7 +15,8 @@ dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-de dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.scoring.ecf.xml dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.stm dev2h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev.kwlist4.xml ) # dev2h_kwlists dev2h_nj=16 dev2h_subset_ecf=true @@ -28,7 +29,8 @@ dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-d dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.scoring.ecf.xml dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.stm dev10h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev/IARPA-babel307b-v1.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel307b-v1.0b_conv-dev.kwlist4.xml ) # dev10h_kwlists dev10h_nj=32 @@ -42,5 +44,9 @@ unsup_nj=32 lexicon_file=/export/babel/data/307-amharic/IARPA-babel307b-v1.0b-build/BABEL_OP3_307/conversational/reference_materials/lexicon.txt lexiconFlags="--romanized --oov " +extlex_proxy_phone_beam=5 +extlex_proxy_phone_nbest=300 +extlex_proxy_beam=-1 +extlex_proxy_nbest=-1 diff --git a/egs/babel/s5d/conf/lang/401-mongolian.FLP.official.conf b/egs/babel/s5d/conf/lang/401-mongolian.FLP.official.conf index 902ded164d2..aac78e77a80 100644 --- a/egs/babel/s5d/conf/lang/401-mongolian.FLP.official.conf +++ b/egs/babel/s5d/conf/lang/401-mongolian.FLP.official.conf @@ -15,7 +15,8 @@ dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-de dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.scoring.ecf.xml dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.stm dev2h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev.kwlist3.xml ) # dev2h_kwlists dev2h_nj=16 dev2h_subset_ecf=true @@ -28,7 +29,8 @@ dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-d dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.scoring.ecf.xml dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.stm dev10h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev/IARPA-babel401b-v2.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel401b-v2.0b_conv-dev.kwlist3.xml ) # dev10h_kwlists dev10h_nj=32 diff --git a/egs/babel/s5d/conf/lang/402-javanese.FLP.official.conf b/egs/babel/s5d/conf/lang/402-javanese.FLP.official.conf index 0f176dc9396..d0f86207484 100644 --- a/egs/babel/s5d/conf/lang/402-javanese.FLP.official.conf +++ b/egs/babel/s5d/conf/lang/402-javanese.FLP.official.conf @@ -15,7 +15,8 @@ dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-de dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.scoring.ecf.xml dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.stm dev2h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev.kwlist3.xml ) # dev2h_kwlists dev2h_nj=16 dev2h_subset_ecf=true @@ -28,9 +29,8 @@ dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-d dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.scoring.ecf.xml dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.stm dev10h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.annot.kwlist.xml - [kwlist3]=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.annot.kwlist3.xml - + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev/IARPA-babel402b-v1.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel402b-v1.0b_conv-dev.kwlist3.xml ) # dev10h_kwlists dev10h_nj=32 diff --git a/egs/babel/s5d/conf/lang/403-dholuo.FLP.official.conf b/egs/babel/s5d/conf/lang/403-dholuo.FLP.official.conf index 6dc95d74304..9096a21fdc4 100644 --- a/egs/babel/s5d/conf/lang/403-dholuo.FLP.official.conf +++ b/egs/babel/s5d/conf/lang/403-dholuo.FLP.official.conf @@ -15,7 +15,8 @@ dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-de dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.scoring.ecf.xml dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.stm dev2h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev.kwlist4.xml ) # dev2h_kwlists dev2h_nj=16 dev2h_subset_ecf=true @@ -28,7 +29,8 @@ dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-d dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.scoring.ecf.xml dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.stm dev10h_kwlists=( - [kwlist]=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.annot.kwlist.xml + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev/IARPA-babel403b-v1.0b_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel403b-v1.0b_conv-dev.kwlist4.xml ) # dev10h_kwlists dev10h_nj=32 diff --git a/egs/babel/s5d/conf/lang/404-georgian.FLP.official.conf b/egs/babel/s5d/conf/lang/404-georgian.FLP.official.conf new file mode 100644 index 00000000000..4c36a8878fd --- /dev/null +++ b/egs/babel/s5d/conf/lang/404-georgian.FLP.official.conf @@ -0,0 +1,78 @@ +# include common settings for fullLP systems. +. conf/common.fullLP || exit 1; + + +#speech corpora files location +train_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/training +train_data_list=./conf/lists/404-georgian//training.list +train_nj=32 + + +#Radical reduced DEV corpora files location +dev2h_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/dev +dev2h_data_list=./conf/lists/404-georgian//dev.2h.list +dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.mitllfa3.rttm +dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.scoring.ecf.xml +dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.stm +dev2h_kwlists=( + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev.kwlist3.xml +) # dev2h_kwlists +dev2h_nj=16 +dev2h_subset_ecf=true + + +#Official DEV corpora files location +dev10h_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/dev +dev10h_data_list=./conf/lists/404-georgian//dev.list +dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.mitllfa3.rttm +dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.scoring.ecf.xml +dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.stm +dev10h_kwlists=( + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev.kwlist3.xml +) # dev10h_kwlists +dev10h_nj=32 + + +#Official EVAL period evaluation data files +eval_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/eval +eval_data_list=./conf/lists/404-georgian//eval.list +eval_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-eval.ecf.xml +eval_kwlists=( + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev.kwlist3.xml +) # eval_kwlists +eval_nj=32 + + +#Shadow data files +shadow_data_dir=( + /export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/dev + /export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/eval +) # shadow_data_dir +shadow_data_list=( + ./conf/lists/404-georgian//dev.list + ./conf/lists/404-georgian//eval.list +) # shadow_data_dir +shadow_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.scoring.ecf.xml +shadow_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.mitllfa3.rttm +shadow_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.stm +shadow_kwlists=( + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev.kwlist3.xml +) # shadow_kwlists +shadow_nj=32 + + +#Unsupervised dataset for FullLP condition +unsup_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/untranscribed-training +unsup_data_list=./conf/lists/404-georgian//untranscribed-training.list +unsup_nj=32 + + +lexicon_file= +lexiconFlags="--romanized --oov " + + + diff --git a/egs/babel/s5d/conf/lang/404-georgian.LLP.official.conf b/egs/babel/s5d/conf/lang/404-georgian.LLP.official.conf new file mode 100644 index 00000000000..570bcab68ec --- /dev/null +++ b/egs/babel/s5d/conf/lang/404-georgian.LLP.official.conf @@ -0,0 +1,54 @@ +# include common settings for fullLP systems. +. conf/common.limitedLP || exit 1; + + +#speech corpora files location +train_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/training +train_data_list=./conf/lists/404-georgian//sub-train.list +train_nj=32 + + +#Radical reduced DEV corpora files location +dev2h_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/dev +dev2h_data_list=./conf/lists/404-georgian//dev.2h.list +dev2h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.mitllfa3.rttm +dev2h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.scoring.ecf.xml +dev2h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.stm +dev2h_kwlists=( + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist3.xml +) # dev2h_kwlists +dev2h_nj=16 +dev2h_subset_ecf=true + + +#Official DEV corpora files location +dev10h_data_dir=/export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/dev +dev10h_data_list=./conf/lists/404-georgian//dev.list +dev10h_rttm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.mitllfa3.rttm +dev10h_ecf_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.scoring.ecf.xml +dev10h_stm_file=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.stm +dev10h_kwlists=( + [dev]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist.xml + [eval]=/export/babel/data/scoring/IndusDB/IARPA-babel404b-v1.0a_conv-dev/IARPA-babel404b-v1.0a_conv-dev.annot.kwlist3.xml +) # dev10h_kwlists +dev10h_nj=32 + + +#Unsupervised dataset for LimitedLP condition +unsup_data_list=( + ./conf/lists/404-georgian//untranscribed-training.list + ./conf/lists/404-georgian//sub-train.untranscribed.list +) # unsup_data_list +unsup_data_dir=( + /export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/untranscribed-training + /export/babel/data//404-georgian/IARPA-babel404b-v1.0a-build/BABEL_OP3_404/conversational/training +) # unsup_data_dir +unsup_nj=32 + + +lexicon_file= +lexiconFlags="--romanized --oov " + + + diff --git a/egs/babel/s5d/conf/lists/404-georgian/dev.2h.list b/egs/babel/s5d/conf/lists/404-georgian/dev.2h.list new file mode 100644 index 00000000000..a823552044c --- /dev/null +++ b/egs/babel/s5d/conf/lists/404-georgian/dev.2h.list @@ -0,0 +1,124 @@ +BABEL_OP3_404_10184_20141107_212406_inLine +BABEL_OP3_404_10184_20141107_212406_outLine +BABEL_OP3_404_12851_20141013_024620_inLine +BABEL_OP3_404_12851_20141013_024620_outLine +BABEL_OP3_404_16184_20141020_233508_inLine +BABEL_OP3_404_16184_20141020_233508_outLine +BABEL_OP3_404_17165_20141117_063008_inLine +BABEL_OP3_404_17165_20141117_063008_outLine +BABEL_OP3_404_17472_20141201_023731_inLine +BABEL_OP3_404_17472_20141201_023731_outLine +BABEL_OP3_404_18380_20141118_001754_inLine +BABEL_OP3_404_18380_20141118_001754_outLine +BABEL_OP3_404_18939_20141009_063127_inLine +BABEL_OP3_404_18939_20141009_063127_outLine +BABEL_OP3_404_22446_20141013_062554_inLine +BABEL_OP3_404_22446_20141013_062554_outLine +BABEL_OP3_404_22466_20141018_193129_inLine +BABEL_OP3_404_22466_20141018_193129_outLine +BABEL_OP3_404_22494_20141127_221208_inLine +BABEL_OP3_404_22494_20141127_221208_outLine +BABEL_OP3_404_22494_20141127_222057_inLine +BABEL_OP3_404_22494_20141127_222057_outLine +BABEL_OP3_404_23239_20141127_054155_inLine +BABEL_OP3_404_23239_20141127_054155_outLine +BABEL_OP3_404_24253_20150513_212152_inLine +BABEL_OP3_404_24253_20150513_212152_outLine +BABEL_OP3_404_24779_20150620_032949_inLine +BABEL_OP3_404_24779_20150620_032949_outLine +BABEL_OP3_404_26074_20141120_050650_inLine +BABEL_OP3_404_26074_20141120_050650_outLine +BABEL_OP3_404_28419_20141028_024104_inLine +BABEL_OP3_404_28419_20141028_024104_outLine +BABEL_OP3_404_33476_20141114_205102_inLine +BABEL_OP3_404_33476_20141114_205102_outLine +BABEL_OP3_404_34564_20141211_015413_inLine +BABEL_OP3_404_34564_20141211_015413_outLine +BABEL_OP3_404_35467_20141020_054030_inLine +BABEL_OP3_404_35467_20141020_054030_outLine +BABEL_OP3_404_38431_20141130_190122_inLine +BABEL_OP3_404_38431_20141130_190122_outLine +BABEL_OP3_404_41592_20141117_033328_inLine +BABEL_OP3_404_41592_20141117_033328_outLine +BABEL_OP3_404_41741_20141019_015552_inLine +BABEL_OP3_404_41741_20141019_015552_outLine +BABEL_OP3_404_42231_20141130_013425_inLine +BABEL_OP3_404_42231_20141130_013425_outLine +BABEL_OP3_404_42231_20141130_014628_inLine +BABEL_OP3_404_42231_20141130_014628_outLine +BABEL_OP3_404_42600_20141029_174857_inLine +BABEL_OP3_404_42600_20141029_174857_outLine +BABEL_OP3_404_44619_20141028_234639_inLine +BABEL_OP3_404_44619_20141028_234639_outLine +BABEL_OP3_404_46535_20150216_024618_inLine +BABEL_OP3_404_46535_20150216_024618_outLine +BABEL_OP3_404_46757_20141123_021510_inLine +BABEL_OP3_404_46757_20141123_021510_outLine +BABEL_OP3_404_47487_20141030_235808_inLine +BABEL_OP3_404_47487_20141030_235808_outLine +BABEL_OP3_404_47866_20150526_162411_inLine +BABEL_OP3_404_47866_20150526_162411_outLine +BABEL_OP3_404_47959_20141026_214447_inLine +BABEL_OP3_404_47959_20141026_214447_outLine +BABEL_OP3_404_51955_20141024_012212_inLine +BABEL_OP3_404_51955_20141024_012212_outLine +BABEL_OP3_404_51968_20141117_023015_inLine +BABEL_OP3_404_51968_20141117_023015_outLine +BABEL_OP3_404_52804_20141023_174815_inLine +BABEL_OP3_404_52804_20141023_174815_outLine +BABEL_OP3_404_54567_20141119_040337_inLine +BABEL_OP3_404_54567_20141119_040337_outLine +BABEL_OP3_404_56677_20141201_065523_inLine +BABEL_OP3_404_56677_20141201_065523_outLine +BABEL_OP3_404_56826_20141201_042429_inLine +BABEL_OP3_404_56826_20141201_042429_outLine +BABEL_OP3_404_58047_20141110_215330_inLine +BABEL_OP3_404_58047_20141110_215330_outLine +BABEL_OP3_404_58313_20141119_234202_inLine +BABEL_OP3_404_58313_20141119_234202_outLine +BABEL_OP3_404_59549_20141102_190355_inLine +BABEL_OP3_404_59549_20141102_190355_outLine +BABEL_OP3_404_60307_20150625_022621_inLine +BABEL_OP3_404_60307_20150625_022621_outLine +BABEL_OP3_404_61040_20141211_011552_inLine +BABEL_OP3_404_61040_20141211_011552_outLine +BABEL_OP3_404_61190_20141029_013447_inLine +BABEL_OP3_404_61190_20141029_013447_outLine +BABEL_OP3_404_64638_20141130_205157_inLine +BABEL_OP3_404_64638_20141130_205157_outLine +BABEL_OP3_404_66472_20141107_204602_inLine +BABEL_OP3_404_66472_20141107_204602_outLine +BABEL_OP3_404_66519_20141031_015751_inLine +BABEL_OP3_404_66519_20141031_015751_outLine +BABEL_OP3_404_67794_20141103_023323_inLine +BABEL_OP3_404_67794_20141103_023323_outLine +BABEL_OP3_404_73696_20150618_060036_inLine +BABEL_OP3_404_73696_20150618_060036_outLine +BABEL_OP3_404_73757_20141117_025704_inLine +BABEL_OP3_404_73757_20141117_025704_outLine +BABEL_OP3_404_74121_20141120_020705_inLine +BABEL_OP3_404_74121_20141120_020705_outLine +BABEL_OP3_404_80781_20141104_212234_inLine +BABEL_OP3_404_80781_20141104_212234_outLine +BABEL_OP3_404_80881_20141010_222135_inLine +BABEL_OP3_404_80881_20141010_222135_outLine +BABEL_OP3_404_81424_20141123_000421_inLine +BABEL_OP3_404_81424_20141123_000421_outLine +BABEL_OP3_404_87298_20141025_213601_inLine +BABEL_OP3_404_87298_20141025_213601_outLine +BABEL_OP3_404_87313_20141119_014632_inLine +BABEL_OP3_404_87313_20141119_014632_outLine +BABEL_OP3_404_87796_20141120_065537_inLine +BABEL_OP3_404_87796_20141120_065537_outLine +BABEL_OP3_404_87884_20141128_211555_inLine +BABEL_OP3_404_87884_20141128_211555_outLine +BABEL_OP3_404_88776_20141006_193621_inLine +BABEL_OP3_404_88776_20141006_193621_outLine +BABEL_OP3_404_91760_20150609_033824_inLine +BABEL_OP3_404_91760_20150609_033824_outLine +BABEL_OP3_404_91930_20150522_034521_inLine +BABEL_OP3_404_91930_20150522_034521_outLine +BABEL_OP3_404_92740_20141126_025242_inLine +BABEL_OP3_404_92740_20141126_025242_outLine +BABEL_OP3_404_97376_20141126_024552_inLine +BABEL_OP3_404_97376_20141126_024552_outLine diff --git a/egs/babel/s5d/conf/lists/404-georgian/dev.list b/egs/babel/s5d/conf/lists/404-georgian/dev.list new file mode 100644 index 00000000000..a823552044c --- /dev/null +++ b/egs/babel/s5d/conf/lists/404-georgian/dev.list @@ -0,0 +1,124 @@ +BABEL_OP3_404_10184_20141107_212406_inLine +BABEL_OP3_404_10184_20141107_212406_outLine +BABEL_OP3_404_12851_20141013_024620_inLine +BABEL_OP3_404_12851_20141013_024620_outLine +BABEL_OP3_404_16184_20141020_233508_inLine +BABEL_OP3_404_16184_20141020_233508_outLine +BABEL_OP3_404_17165_20141117_063008_inLine +BABEL_OP3_404_17165_20141117_063008_outLine +BABEL_OP3_404_17472_20141201_023731_inLine +BABEL_OP3_404_17472_20141201_023731_outLine +BABEL_OP3_404_18380_20141118_001754_inLine +BABEL_OP3_404_18380_20141118_001754_outLine +BABEL_OP3_404_18939_20141009_063127_inLine +BABEL_OP3_404_18939_20141009_063127_outLine +BABEL_OP3_404_22446_20141013_062554_inLine +BABEL_OP3_404_22446_20141013_062554_outLine +BABEL_OP3_404_22466_20141018_193129_inLine +BABEL_OP3_404_22466_20141018_193129_outLine +BABEL_OP3_404_22494_20141127_221208_inLine +BABEL_OP3_404_22494_20141127_221208_outLine +BABEL_OP3_404_22494_20141127_222057_inLine +BABEL_OP3_404_22494_20141127_222057_outLine +BABEL_OP3_404_23239_20141127_054155_inLine +BABEL_OP3_404_23239_20141127_054155_outLine +BABEL_OP3_404_24253_20150513_212152_inLine +BABEL_OP3_404_24253_20150513_212152_outLine +BABEL_OP3_404_24779_20150620_032949_inLine +BABEL_OP3_404_24779_20150620_032949_outLine +BABEL_OP3_404_26074_20141120_050650_inLine +BABEL_OP3_404_26074_20141120_050650_outLine +BABEL_OP3_404_28419_20141028_024104_inLine +BABEL_OP3_404_28419_20141028_024104_outLine +BABEL_OP3_404_33476_20141114_205102_inLine +BABEL_OP3_404_33476_20141114_205102_outLine +BABEL_OP3_404_34564_20141211_015413_inLine +BABEL_OP3_404_34564_20141211_015413_outLine +BABEL_OP3_404_35467_20141020_054030_inLine +BABEL_OP3_404_35467_20141020_054030_outLine +BABEL_OP3_404_38431_20141130_190122_inLine +BABEL_OP3_404_38431_20141130_190122_outLine +BABEL_OP3_404_41592_20141117_033328_inLine +BABEL_OP3_404_41592_20141117_033328_outLine +BABEL_OP3_404_41741_20141019_015552_inLine +BABEL_OP3_404_41741_20141019_015552_outLine +BABEL_OP3_404_42231_20141130_013425_inLine +BABEL_OP3_404_42231_20141130_013425_outLine +BABEL_OP3_404_42231_20141130_014628_inLine +BABEL_OP3_404_42231_20141130_014628_outLine +BABEL_OP3_404_42600_20141029_174857_inLine +BABEL_OP3_404_42600_20141029_174857_outLine +BABEL_OP3_404_44619_20141028_234639_inLine +BABEL_OP3_404_44619_20141028_234639_outLine +BABEL_OP3_404_46535_20150216_024618_inLine +BABEL_OP3_404_46535_20150216_024618_outLine +BABEL_OP3_404_46757_20141123_021510_inLine +BABEL_OP3_404_46757_20141123_021510_outLine +BABEL_OP3_404_47487_20141030_235808_inLine +BABEL_OP3_404_47487_20141030_235808_outLine +BABEL_OP3_404_47866_20150526_162411_inLine +BABEL_OP3_404_47866_20150526_162411_outLine +BABEL_OP3_404_47959_20141026_214447_inLine +BABEL_OP3_404_47959_20141026_214447_outLine +BABEL_OP3_404_51955_20141024_012212_inLine +BABEL_OP3_404_51955_20141024_012212_outLine +BABEL_OP3_404_51968_20141117_023015_inLine +BABEL_OP3_404_51968_20141117_023015_outLine +BABEL_OP3_404_52804_20141023_174815_inLine +BABEL_OP3_404_52804_20141023_174815_outLine +BABEL_OP3_404_54567_20141119_040337_inLine +BABEL_OP3_404_54567_20141119_040337_outLine +BABEL_OP3_404_56677_20141201_065523_inLine +BABEL_OP3_404_56677_20141201_065523_outLine +BABEL_OP3_404_56826_20141201_042429_inLine +BABEL_OP3_404_56826_20141201_042429_outLine +BABEL_OP3_404_58047_20141110_215330_inLine +BABEL_OP3_404_58047_20141110_215330_outLine +BABEL_OP3_404_58313_20141119_234202_inLine +BABEL_OP3_404_58313_20141119_234202_outLine +BABEL_OP3_404_59549_20141102_190355_inLine +BABEL_OP3_404_59549_20141102_190355_outLine +BABEL_OP3_404_60307_20150625_022621_inLine +BABEL_OP3_404_60307_20150625_022621_outLine +BABEL_OP3_404_61040_20141211_011552_inLine +BABEL_OP3_404_61040_20141211_011552_outLine +BABEL_OP3_404_61190_20141029_013447_inLine +BABEL_OP3_404_61190_20141029_013447_outLine +BABEL_OP3_404_64638_20141130_205157_inLine +BABEL_OP3_404_64638_20141130_205157_outLine +BABEL_OP3_404_66472_20141107_204602_inLine +BABEL_OP3_404_66472_20141107_204602_outLine +BABEL_OP3_404_66519_20141031_015751_inLine +BABEL_OP3_404_66519_20141031_015751_outLine +BABEL_OP3_404_67794_20141103_023323_inLine +BABEL_OP3_404_67794_20141103_023323_outLine +BABEL_OP3_404_73696_20150618_060036_inLine +BABEL_OP3_404_73696_20150618_060036_outLine +BABEL_OP3_404_73757_20141117_025704_inLine +BABEL_OP3_404_73757_20141117_025704_outLine +BABEL_OP3_404_74121_20141120_020705_inLine +BABEL_OP3_404_74121_20141120_020705_outLine +BABEL_OP3_404_80781_20141104_212234_inLine +BABEL_OP3_404_80781_20141104_212234_outLine +BABEL_OP3_404_80881_20141010_222135_inLine +BABEL_OP3_404_80881_20141010_222135_outLine +BABEL_OP3_404_81424_20141123_000421_inLine +BABEL_OP3_404_81424_20141123_000421_outLine +BABEL_OP3_404_87298_20141025_213601_inLine +BABEL_OP3_404_87298_20141025_213601_outLine +BABEL_OP3_404_87313_20141119_014632_inLine +BABEL_OP3_404_87313_20141119_014632_outLine +BABEL_OP3_404_87796_20141120_065537_inLine +BABEL_OP3_404_87796_20141120_065537_outLine +BABEL_OP3_404_87884_20141128_211555_inLine +BABEL_OP3_404_87884_20141128_211555_outLine +BABEL_OP3_404_88776_20141006_193621_inLine +BABEL_OP3_404_88776_20141006_193621_outLine +BABEL_OP3_404_91760_20150609_033824_inLine +BABEL_OP3_404_91760_20150609_033824_outLine +BABEL_OP3_404_91930_20150522_034521_inLine +BABEL_OP3_404_91930_20150522_034521_outLine +BABEL_OP3_404_92740_20141126_025242_inLine +BABEL_OP3_404_92740_20141126_025242_outLine +BABEL_OP3_404_97376_20141126_024552_inLine +BABEL_OP3_404_97376_20141126_024552_outLine diff --git a/egs/babel/s5d/conf/lists/404-georgian/eval.list b/egs/babel/s5d/conf/lists/404-georgian/eval.list new file mode 100644 index 00000000000..d197b90ee2f --- /dev/null +++ b/egs/babel/s5d/conf/lists/404-georgian/eval.list @@ -0,0 +1,956 @@ +BABEL_OP3_404_10036_20141030_200515_inLine +BABEL_OP3_404_10036_20141030_200515_outLine +BABEL_OP3_404_10188_20141021_043537_inLine +BABEL_OP3_404_10188_20141021_043537_outLine +BABEL_OP3_404_10319_20141015_010220_inLine +BABEL_OP3_404_10319_20141015_010220_outLine +BABEL_OP3_404_10319_20141015_011118_inLine +BABEL_OP3_404_10319_20141015_011118_outLine +BABEL_OP3_404_10482_20141130_013900_inLine +BABEL_OP3_404_10482_20141130_013900_outLine +BABEL_OP3_404_10524_20150518_002415_inLine +BABEL_OP3_404_10524_20150518_002415_outLine +BABEL_OP3_404_10901_20141120_172058_inLine +BABEL_OP3_404_10901_20141120_172058_outLine +BABEL_OP3_404_10966_20141027_000701_inLine +BABEL_OP3_404_10966_20141027_000701_outLine +BABEL_OP3_404_11419_20150212_050835_inLine +BABEL_OP3_404_11419_20150212_050835_outLine +BABEL_OP3_404_11419_20150212_051550_inLine +BABEL_OP3_404_11419_20150212_051550_outLine +BABEL_OP3_404_11581_20141110_223927_inLine +BABEL_OP3_404_11581_20141110_223927_outLine +BABEL_OP3_404_11797_20141019_195244_inLine +BABEL_OP3_404_11797_20141019_195244_outLine +BABEL_OP3_404_12321_20141211_055837_inLine +BABEL_OP3_404_12321_20141211_055837_outLine +BABEL_OP3_404_13040_20141024_004921_inLine +BABEL_OP3_404_13040_20141024_004921_outLine +BABEL_OP3_404_13427_20141107_220103_inLine +BABEL_OP3_404_13427_20141107_220103_outLine +BABEL_OP3_404_13483_20141128_002800_inLine +BABEL_OP3_404_13483_20141128_002800_outLine +BABEL_OP3_404_13490_20141118_023408_inLine +BABEL_OP3_404_13490_20141118_023408_outLine +BABEL_OP3_404_13561_20141115_003843_inLine +BABEL_OP3_404_13561_20141115_003843_outLine +BABEL_OP3_404_13586_20141106_180057_inLine +BABEL_OP3_404_13586_20141106_180057_outLine +BABEL_OP3_404_13744_20141021_043037_inLine +BABEL_OP3_404_13744_20141021_043037_outLine +BABEL_OP3_404_13792_20141011_010111_inLine +BABEL_OP3_404_13792_20141011_010111_outLine +BABEL_OP3_404_14097_20150211_010746_inLine +BABEL_OP3_404_14097_20150211_010746_outLine +BABEL_OP3_404_14179_20141201_063636_inLine +BABEL_OP3_404_14179_20141201_063636_outLine +BABEL_OP3_404_14228_20141130_062059_inLine +BABEL_OP3_404_14228_20141130_062059_outLine +BABEL_OP3_404_14560_20141201_073709_inLine +BABEL_OP3_404_14560_20141201_073709_outLine +BABEL_OP3_404_14719_20141201_014614_inLine +BABEL_OP3_404_14719_20141201_014614_outLine +BABEL_OP3_404_14725_20141013_005356_inLine +BABEL_OP3_404_14725_20141013_005356_outLine +BABEL_OP3_404_15163_20141115_035641_inLine +BABEL_OP3_404_15163_20141115_035641_outLine +BABEL_OP3_404_15322_20150512_231817_inLine +BABEL_OP3_404_15322_20150512_231817_outLine +BABEL_OP3_404_15324_20141120_031528_inLine +BABEL_OP3_404_15324_20141120_031528_outLine +BABEL_OP3_404_15702_20141129_051812_inLine +BABEL_OP3_404_15702_20141129_051812_outLine +BABEL_OP3_404_15730_20141021_055606_inLine +BABEL_OP3_404_15730_20141021_055606_outLine +BABEL_OP3_404_15926_20141124_004339_inLine +BABEL_OP3_404_15926_20141124_004339_outLine +BABEL_OP3_404_15926_20141124_005513_inLine +BABEL_OP3_404_15926_20141124_005513_outLine +BABEL_OP3_404_16056_20141009_005123_inLine +BABEL_OP3_404_16056_20141009_005123_outLine +BABEL_OP3_404_16787_20141120_174312_inLine +BABEL_OP3_404_16787_20141120_174312_outLine +BABEL_OP3_404_16800_20141212_184132_inLine +BABEL_OP3_404_16800_20141212_184132_outLine +BABEL_OP3_404_16800_20141212_185849_inLine +BABEL_OP3_404_16800_20141212_185849_outLine +BABEL_OP3_404_16886_20141117_002313_inLine +BABEL_OP3_404_16886_20141117_002313_outLine +BABEL_OP3_404_16886_20141117_003801_inLine +BABEL_OP3_404_16886_20141117_003801_outLine +BABEL_OP3_404_16924_20141201_020122_inLine +BABEL_OP3_404_16924_20141201_020122_outLine +BABEL_OP3_404_16938_20141118_045730_inLine +BABEL_OP3_404_16938_20141118_045730_outLine +BABEL_OP3_404_17032_20141128_030249_inLine +BABEL_OP3_404_17032_20141128_030249_outLine +BABEL_OP3_404_17440_20141127_041844_inLine +BABEL_OP3_404_17440_20141127_041844_outLine +BABEL_OP3_404_17496_20141130_022805_inLine +BABEL_OP3_404_17496_20141130_022805_outLine +BABEL_OP3_404_17751_20150611_030539_inLine +BABEL_OP3_404_17751_20150611_030539_outLine +BABEL_OP3_404_17881_20150524_231317_inLine +BABEL_OP3_404_17881_20150524_231317_outLine +BABEL_OP3_404_17914_20150526_054931_inLine +BABEL_OP3_404_17914_20150526_054931_outLine +BABEL_OP3_404_18280_20150213_011322_inLine +BABEL_OP3_404_18280_20150213_011322_outLine +BABEL_OP3_404_18370_20150210_194727_inLine +BABEL_OP3_404_18370_20150210_194727_outLine +BABEL_OP3_404_18924_20141110_211055_inLine +BABEL_OP3_404_18924_20141110_211055_outLine +BABEL_OP3_404_19101_20141113_042102_inLine +BABEL_OP3_404_19101_20141113_042102_outLine +BABEL_OP3_404_19545_20141107_223152_inLine +BABEL_OP3_404_19545_20141107_223152_outLine +BABEL_OP3_404_19621_20141201_041129_inLine +BABEL_OP3_404_19621_20141201_041129_outLine +BABEL_OP3_404_19672_20141124_015046_inLine +BABEL_OP3_404_19672_20141124_015046_outLine +BABEL_OP3_404_19722_20141006_033717_inLine +BABEL_OP3_404_19722_20141006_033717_outLine +BABEL_OP3_404_19782_20141201_231608_inLine +BABEL_OP3_404_19782_20141201_231608_outLine +BABEL_OP3_404_19818_20141124_044516_inLine +BABEL_OP3_404_19818_20141124_044516_outLine +BABEL_OP3_404_20367_20150618_055644_inLine +BABEL_OP3_404_20367_20150618_055644_outLine +BABEL_OP3_404_20682_20141211_044056_inLine +BABEL_OP3_404_20682_20141211_044056_outLine +BABEL_OP3_404_20682_20141211_045257_inLine +BABEL_OP3_404_20682_20141211_045257_outLine +BABEL_OP3_404_20738_20150503_191409_inLine +BABEL_OP3_404_20738_20150503_191409_outLine +BABEL_OP3_404_20768_20141207_081305_inLine +BABEL_OP3_404_20768_20141207_081305_outLine +BABEL_OP3_404_20800_20141022_192312_inLine +BABEL_OP3_404_20800_20141022_192312_outLine +BABEL_OP3_404_20916_20141006_192451_inLine +BABEL_OP3_404_20916_20141006_192451_outLine +BABEL_OP3_404_21029_20141105_033902_inLine +BABEL_OP3_404_21029_20141105_033902_outLine +BABEL_OP3_404_21206_20141024_194128_inLine +BABEL_OP3_404_21206_20141024_194128_outLine +BABEL_OP3_404_21624_20150525_034841_inLine +BABEL_OP3_404_21624_20150525_034841_outLine +BABEL_OP3_404_21794_20141115_220258_inLine +BABEL_OP3_404_21794_20141115_220258_outLine +BABEL_OP3_404_22021_20150217_213437_inLine +BABEL_OP3_404_22021_20150217_213437_outLine +BABEL_OP3_404_22021_20150220_194248_inLine +BABEL_OP3_404_22021_20150220_194248_outLine +BABEL_OP3_404_22034_20150211_165126_inLine +BABEL_OP3_404_22034_20150211_165126_outLine +BABEL_OP3_404_22170_20150528_002541_inLine +BABEL_OP3_404_22170_20150528_002541_outLine +BABEL_OP3_404_22216_20141020_051333_inLine +BABEL_OP3_404_22216_20141020_051333_outLine +BABEL_OP3_404_22321_20141019_214812_inLine +BABEL_OP3_404_22321_20141019_214812_outLine +BABEL_OP3_404_22612_20141201_080517_inLine +BABEL_OP3_404_22612_20141201_080517_outLine +BABEL_OP3_404_22641_20141021_165119_inLine +BABEL_OP3_404_22641_20141021_165119_outLine +BABEL_OP3_404_22965_20141101_192617_inLine +BABEL_OP3_404_22965_20141101_192617_outLine +BABEL_OP3_404_23006_20141026_211155_inLine +BABEL_OP3_404_23006_20141026_211155_outLine +BABEL_OP3_404_23092_20141129_005335_inLine +BABEL_OP3_404_23092_20141129_005335_outLine +BABEL_OP3_404_23153_20141118_015224_inLine +BABEL_OP3_404_23153_20141118_015224_outLine +BABEL_OP3_404_23628_20141027_170345_inLine +BABEL_OP3_404_23628_20141027_170345_outLine +BABEL_OP3_404_24017_20141211_021947_inLine +BABEL_OP3_404_24017_20141211_021947_outLine +BABEL_OP3_404_24290_20150515_164252_inLine +BABEL_OP3_404_24290_20150515_164252_outLine +BABEL_OP3_404_24569_20141130_214924_inLine +BABEL_OP3_404_24569_20141130_214924_outLine +BABEL_OP3_404_24605_20141013_043620_inLine +BABEL_OP3_404_24605_20141013_043620_outLine +BABEL_OP3_404_25698_20150611_021501_inLine +BABEL_OP3_404_25698_20150611_021501_outLine +BABEL_OP3_404_25767_20141009_211814_inLine +BABEL_OP3_404_25767_20141009_211814_outLine +BABEL_OP3_404_26206_20141128_031139_inLine +BABEL_OP3_404_26206_20141128_031139_outLine +BABEL_OP3_404_26999_20141130_004320_inLine +BABEL_OP3_404_26999_20141130_004320_outLine +BABEL_OP3_404_27082_20141119_041436_inLine +BABEL_OP3_404_27082_20141119_041436_outLine +BABEL_OP3_404_27125_20141007_032335_inLine +BABEL_OP3_404_27125_20141007_032335_outLine +BABEL_OP3_404_27478_20150514_205232_inLine +BABEL_OP3_404_27478_20150514_205232_outLine +BABEL_OP3_404_28422_20141124_055809_inLine +BABEL_OP3_404_28422_20141124_055809_outLine +BABEL_OP3_404_28606_20141127_011719_inLine +BABEL_OP3_404_28606_20141127_011719_outLine +BABEL_OP3_404_28775_20141028_193907_inLine +BABEL_OP3_404_28775_20141028_193907_outLine +BABEL_OP3_404_29023_20141024_225827_inLine +BABEL_OP3_404_29023_20141024_225827_outLine +BABEL_OP3_404_29072_20141128_023212_inLine +BABEL_OP3_404_29072_20141128_023212_outLine +BABEL_OP3_404_29135_20141022_182050_inLine +BABEL_OP3_404_29135_20141022_182050_outLine +BABEL_OP3_404_29168_20141023_013832_inLine +BABEL_OP3_404_29168_20141023_013832_outLine +BABEL_OP3_404_29352_20150618_035033_inLine +BABEL_OP3_404_29352_20150618_035033_outLine +BABEL_OP3_404_29352_20150618_041025_inLine +BABEL_OP3_404_29352_20150618_041025_outLine +BABEL_OP3_404_29685_20141103_223309_inLine +BABEL_OP3_404_29685_20141103_223309_outLine +BABEL_OP3_404_29765_20150616_155830_inLine +BABEL_OP3_404_29765_20150616_155830_outLine +BABEL_OP3_404_30013_20141127_211853_inLine +BABEL_OP3_404_30013_20141127_211853_outLine +BABEL_OP3_404_30058_20150514_024957_inLine +BABEL_OP3_404_30058_20150514_024957_outLine +BABEL_OP3_404_30180_20141118_011806_inLine +BABEL_OP3_404_30180_20141118_011806_outLine +BABEL_OP3_404_30253_20141201_051926_inLine +BABEL_OP3_404_30253_20141201_051926_outLine +BABEL_OP3_404_30395_20141106_185545_inLine +BABEL_OP3_404_30395_20141106_185545_outLine +BABEL_OP3_404_31039_20150217_050120_inLine +BABEL_OP3_404_31039_20150217_050120_outLine +BABEL_OP3_404_31039_20150217_051317_inLine +BABEL_OP3_404_31039_20150217_051317_outLine +BABEL_OP3_404_31074_20150121_022649_inLine +BABEL_OP3_404_31074_20150121_022649_outLine +BABEL_OP3_404_31184_20141118_183536_inLine +BABEL_OP3_404_31184_20141118_183536_outLine +BABEL_OP3_404_31490_20141022_200135_inLine +BABEL_OP3_404_31490_20141022_200135_outLine +BABEL_OP3_404_31583_20141130_004731_inLine +BABEL_OP3_404_31583_20141130_004731_outLine +BABEL_OP3_404_31628_20141202_000346_inLine +BABEL_OP3_404_31628_20141202_000346_outLine +BABEL_OP3_404_32097_20141006_221638_inLine +BABEL_OP3_404_32097_20141006_221638_outLine +BABEL_OP3_404_32244_20150609_043200_inLine +BABEL_OP3_404_32244_20150609_043200_outLine +BABEL_OP3_404_32301_20141126_204138_inLine +BABEL_OP3_404_32301_20141126_204138_outLine +BABEL_OP3_404_33111_20150528_004829_inLine +BABEL_OP3_404_33111_20150528_004829_outLine +BABEL_OP3_404_33251_20141119_205146_inLine +BABEL_OP3_404_33251_20141119_205146_outLine +BABEL_OP3_404_33273_20141105_213401_inLine +BABEL_OP3_404_33273_20141105_213401_outLine +BABEL_OP3_404_33497_20141119_051436_inLine +BABEL_OP3_404_33497_20141119_051436_outLine +BABEL_OP3_404_33635_20141106_005750_inLine +BABEL_OP3_404_33635_20141106_005750_outLine +BABEL_OP3_404_33672_20141014_004055_inLine +BABEL_OP3_404_33672_20141014_004055_outLine +BABEL_OP3_404_33672_20141014_005233_inLine +BABEL_OP3_404_33672_20141014_005233_outLine +BABEL_OP3_404_33951_20141119_072531_inLine +BABEL_OP3_404_33951_20141119_072531_outLine +BABEL_OP3_404_34197_20141018_201528_inLine +BABEL_OP3_404_34197_20141018_201528_outLine +BABEL_OP3_404_34336_20141027_211535_inLine +BABEL_OP3_404_34336_20141027_211535_outLine +BABEL_OP3_404_34477_20141027_184645_inLine +BABEL_OP3_404_34477_20141027_184645_outLine +BABEL_OP3_404_34903_20141124_020719_inLine +BABEL_OP3_404_34903_20141124_020719_outLine +BABEL_OP3_404_35139_20141023_224322_inLine +BABEL_OP3_404_35139_20141023_224322_outLine +BABEL_OP3_404_35202_20141128_053756_inLine +BABEL_OP3_404_35202_20141128_053756_outLine +BABEL_OP3_404_35885_20150518_015426_inLine +BABEL_OP3_404_35885_20150518_015426_outLine +BABEL_OP3_404_36293_20141006_004659_inLine +BABEL_OP3_404_36293_20141006_004659_outLine +BABEL_OP3_404_36341_20141021_045218_inLine +BABEL_OP3_404_36341_20141021_045218_outLine +BABEL_OP3_404_36669_20141116_050542_inLine +BABEL_OP3_404_36669_20141116_050542_outLine +BABEL_OP3_404_36894_20141009_013557_inLine +BABEL_OP3_404_36894_20141009_013557_outLine +BABEL_OP3_404_36990_20141117_041052_inLine +BABEL_OP3_404_36990_20141117_041052_outLine +BABEL_OP3_404_37068_20150212_050250_inLine +BABEL_OP3_404_37068_20150212_050250_outLine +BABEL_OP3_404_37285_20141128_060822_inLine +BABEL_OP3_404_37285_20141128_060822_outLine +BABEL_OP3_404_37684_20150211_031551_inLine +BABEL_OP3_404_37684_20150211_031551_outLine +BABEL_OP3_404_38076_20141129_030136_inLine +BABEL_OP3_404_38076_20141129_030136_outLine +BABEL_OP3_404_38689_20141128_235841_inLine +BABEL_OP3_404_38689_20141128_235841_outLine +BABEL_OP3_404_38741_20141028_190310_inLine +BABEL_OP3_404_38741_20141028_190310_outLine +BABEL_OP3_404_38750_20141130_052516_inLine +BABEL_OP3_404_38750_20141130_052516_outLine +BABEL_OP3_404_38878_20141118_224023_inLine +BABEL_OP3_404_38878_20141118_224023_outLine +BABEL_OP3_404_39006_20150617_032943_inLine +BABEL_OP3_404_39006_20150617_032943_outLine +BABEL_OP3_404_39159_20141021_033733_inLine +BABEL_OP3_404_39159_20141021_033733_outLine +BABEL_OP3_404_39848_20141113_234103_inLine +BABEL_OP3_404_39848_20141113_234103_outLine +BABEL_OP3_404_40565_20141126_191549_inLine +BABEL_OP3_404_40565_20141126_191549_outLine +BABEL_OP3_404_41038_20141201_070557_inLine +BABEL_OP3_404_41038_20141201_070557_outLine +BABEL_OP3_404_41174_20141117_033354_inLine +BABEL_OP3_404_41174_20141117_033354_outLine +BABEL_OP3_404_41442_20141201_065524_inLine +BABEL_OP3_404_41442_20141201_065524_outLine +BABEL_OP3_404_41469_20141015_041032_inLine +BABEL_OP3_404_41469_20141015_041032_outLine +BABEL_OP3_404_41493_20141007_192601_inLine +BABEL_OP3_404_41493_20141007_192601_outLine +BABEL_OP3_404_41618_20141114_232533_inLine +BABEL_OP3_404_41618_20141114_232533_outLine +BABEL_OP3_404_41890_20150516_214915_inLine +BABEL_OP3_404_41890_20150516_214915_outLine +BABEL_OP3_404_42146_20150524_225524_inLine +BABEL_OP3_404_42146_20150524_225524_outLine +BABEL_OP3_404_42434_20141101_015900_inLine +BABEL_OP3_404_42434_20141101_015900_outLine +BABEL_OP3_404_42718_20150514_042601_inLine +BABEL_OP3_404_42718_20150514_042601_outLine +BABEL_OP3_404_42771_20141119_032738_inLine +BABEL_OP3_404_42771_20141119_032738_outLine +BABEL_OP3_404_42942_20141105_231330_inLine +BABEL_OP3_404_42942_20141105_231330_outLine +BABEL_OP3_404_42991_20141201_174138_inLine +BABEL_OP3_404_42991_20141201_174138_outLine +BABEL_OP3_404_43115_20150518_051249_inLine +BABEL_OP3_404_43115_20150518_051249_outLine +BABEL_OP3_404_43285_20141127_224948_inLine +BABEL_OP3_404_43285_20141127_224948_outLine +BABEL_OP3_404_43286_20141011_233252_inLine +BABEL_OP3_404_43286_20141011_233252_outLine +BABEL_OP3_404_43646_20141011_031534_inLine +BABEL_OP3_404_43646_20141011_031534_outLine +BABEL_OP3_404_43784_20141101_215816_inLine +BABEL_OP3_404_43784_20141101_215816_outLine +BABEL_OP3_404_43784_20141101_220445_inLine +BABEL_OP3_404_43784_20141101_220445_outLine +BABEL_OP3_404_43784_20141101_222312_inLine +BABEL_OP3_404_43784_20141101_222312_outLine +BABEL_OP3_404_43788_20141125_190621_inLine +BABEL_OP3_404_43788_20141125_190621_outLine +BABEL_OP3_404_43920_20141128_232903_inLine +BABEL_OP3_404_43920_20141128_232903_outLine +BABEL_OP3_404_44255_20150525_073716_inLine +BABEL_OP3_404_44255_20150525_073716_outLine +BABEL_OP3_404_44420_20141025_211032_inLine +BABEL_OP3_404_44420_20141025_211032_outLine +BABEL_OP3_404_44531_20150527_015805_inLine +BABEL_OP3_404_44531_20150527_015805_outLine +BABEL_OP3_404_44709_20141126_024811_inLine +BABEL_OP3_404_44709_20141126_024811_outLine +BABEL_OP3_404_44868_20141123_032254_inLine +BABEL_OP3_404_44868_20141123_032254_outLine +BABEL_OP3_404_45642_20141011_233950_inLine +BABEL_OP3_404_45642_20141011_233950_outLine +BABEL_OP3_404_45770_20141009_185730_inLine +BABEL_OP3_404_45770_20141009_185730_outLine +BABEL_OP3_404_45777_20141028_195713_inLine +BABEL_OP3_404_45777_20141028_195713_outLine +BABEL_OP3_404_45843_20141124_042608_inLine +BABEL_OP3_404_45843_20141124_042608_outLine +BABEL_OP3_404_46008_20150525_024936_inLine +BABEL_OP3_404_46008_20150525_024936_outLine +BABEL_OP3_404_46261_20141117_200301_inLine +BABEL_OP3_404_46261_20141117_200301_outLine +BABEL_OP3_404_46389_20150216_043700_inLine +BABEL_OP3_404_46389_20150216_043700_outLine +BABEL_OP3_404_46558_20141020_013256_inLine +BABEL_OP3_404_46558_20141020_013256_outLine +BABEL_OP3_404_46589_20141126_010932_inLine +BABEL_OP3_404_46589_20141126_010932_outLine +BABEL_OP3_404_46702_20141021_004925_inLine +BABEL_OP3_404_46702_20141021_004925_outLine +BABEL_OP3_404_47110_20150211_041423_inLine +BABEL_OP3_404_47110_20150211_041423_outLine +BABEL_OP3_404_47186_20141130_032126_inLine +BABEL_OP3_404_47186_20141130_032126_outLine +BABEL_OP3_404_47215_20141016_012848_inLine +BABEL_OP3_404_47215_20141016_012848_outLine +BABEL_OP3_404_47283_20141105_063730_inLine +BABEL_OP3_404_47283_20141105_063730_outLine +BABEL_OP3_404_47451_20141201_044107_inLine +BABEL_OP3_404_47451_20141201_044107_outLine +BABEL_OP3_404_47451_20141201_045923_inLine +BABEL_OP3_404_47451_20141201_045923_outLine +BABEL_OP3_404_47878_20141115_030044_inLine +BABEL_OP3_404_47878_20141115_030044_outLine +BABEL_OP3_404_48789_20141130_013950_inLine +BABEL_OP3_404_48789_20141130_013950_outLine +BABEL_OP3_404_49001_20141102_054949_inLine +BABEL_OP3_404_49001_20141102_054949_outLine +BABEL_OP3_404_49216_20141023_021720_inLine +BABEL_OP3_404_49216_20141023_021720_outLine +BABEL_OP3_404_49287_20141201_003931_inLine +BABEL_OP3_404_49287_20141201_003931_outLine +BABEL_OP3_404_49502_20141012_055001_inLine +BABEL_OP3_404_49502_20141012_055001_outLine +BABEL_OP3_404_49637_20141006_052951_inLine +BABEL_OP3_404_49637_20141006_052951_outLine +BABEL_OP3_404_50090_20141119_215921_inLine +BABEL_OP3_404_50090_20141119_215921_outLine +BABEL_OP3_404_50427_20141108_184045_inLine +BABEL_OP3_404_50427_20141108_184045_outLine +BABEL_OP3_404_50630_20141123_224108_inLine +BABEL_OP3_404_50630_20141123_224108_outLine +BABEL_OP3_404_50681_20141119_074034_inLine +BABEL_OP3_404_50681_20141119_074034_outLine +BABEL_OP3_404_50726_20141021_005526_inLine +BABEL_OP3_404_50726_20141021_005526_outLine +BABEL_OP3_404_50958_20141118_184358_inLine +BABEL_OP3_404_50958_20141118_184358_outLine +BABEL_OP3_404_50958_20141118_185604_inLine +BABEL_OP3_404_50958_20141118_185604_outLine +BABEL_OP3_404_50962_20141107_060744_inLine +BABEL_OP3_404_50962_20141107_060744_outLine +BABEL_OP3_404_51407_20141117_062029_inLine +BABEL_OP3_404_51407_20141117_062029_outLine +BABEL_OP3_404_51611_20141022_024919_inLine +BABEL_OP3_404_51611_20141022_024919_outLine +BABEL_OP3_404_51819_20141126_211917_inLine +BABEL_OP3_404_51819_20141126_211917_outLine +BABEL_OP3_404_52272_20141006_031940_inLine +BABEL_OP3_404_52272_20141006_031940_outLine +BABEL_OP3_404_52438_20141104_034612_inLine +BABEL_OP3_404_52438_20141104_034612_outLine +BABEL_OP3_404_52442_20141109_004908_inLine +BABEL_OP3_404_52442_20141109_004908_outLine +BABEL_OP3_404_52614_20150503_200805_inLine +BABEL_OP3_404_52614_20150503_200805_outLine +BABEL_OP3_404_52694_20141121_043410_inLine +BABEL_OP3_404_52694_20141121_043410_outLine +BABEL_OP3_404_52717_20141014_234034_inLine +BABEL_OP3_404_52717_20141014_234034_outLine +BABEL_OP3_404_52818_20141130_231525_inLine +BABEL_OP3_404_52818_20141130_231525_outLine +BABEL_OP3_404_52932_20141101_234724_inLine +BABEL_OP3_404_52932_20141101_234724_outLine +BABEL_OP3_404_53419_20141201_030819_inLine +BABEL_OP3_404_53419_20141201_030819_outLine +BABEL_OP3_404_53842_20141119_044935_inLine +BABEL_OP3_404_53842_20141119_044935_outLine +BABEL_OP3_404_54074_20141129_060147_inLine +BABEL_OP3_404_54074_20141129_060147_outLine +BABEL_OP3_404_54162_20141119_032442_inLine +BABEL_OP3_404_54162_20141119_032442_outLine +BABEL_OP3_404_54390_20141028_230702_inLine +BABEL_OP3_404_54390_20141028_230702_outLine +BABEL_OP3_404_54530_20141130_011651_inLine +BABEL_OP3_404_54530_20141130_011651_outLine +BABEL_OP3_404_54697_20141201_053854_inLine +BABEL_OP3_404_54697_20141201_053854_outLine +BABEL_OP3_404_54953_20141115_022411_inLine +BABEL_OP3_404_54953_20141115_022411_outLine +BABEL_OP3_404_55742_20141102_071943_inLine +BABEL_OP3_404_55742_20141102_071943_outLine +BABEL_OP3_404_55818_20141014_062259_inLine +BABEL_OP3_404_55818_20141014_062259_outLine +BABEL_OP3_404_55950_20150502_234657_inLine +BABEL_OP3_404_55950_20150502_234657_outLine +BABEL_OP3_404_55968_20141009_231223_inLine +BABEL_OP3_404_55968_20141009_231223_outLine +BABEL_OP3_404_56090_20141019_172050_inLine +BABEL_OP3_404_56090_20141019_172050_outLine +BABEL_OP3_404_56198_20141103_031752_inLine +BABEL_OP3_404_56198_20141103_031752_outLine +BABEL_OP3_404_56307_20141201_210608_inLine +BABEL_OP3_404_56307_20141201_210608_outLine +BABEL_OP3_404_56370_20141010_013542_inLine +BABEL_OP3_404_56370_20141010_013542_outLine +BABEL_OP3_404_56429_20141024_003551_inLine +BABEL_OP3_404_56429_20141024_003551_outLine +BABEL_OP3_404_56523_20141114_215534_inLine +BABEL_OP3_404_56523_20141114_215534_outLine +BABEL_OP3_404_56720_20141129_182808_inLine +BABEL_OP3_404_56720_20141129_182808_outLine +BABEL_OP3_404_56720_20141129_183649_inLine +BABEL_OP3_404_56720_20141129_183649_outLine +BABEL_OP3_404_57093_20141118_034107_inLine +BABEL_OP3_404_57093_20141118_034107_outLine +BABEL_OP3_404_57116_20141008_023139_inLine +BABEL_OP3_404_57116_20141008_023139_outLine +BABEL_OP3_404_57529_20141201_050129_inLine +BABEL_OP3_404_57529_20141201_050129_outLine +BABEL_OP3_404_57548_20141119_194430_inLine +BABEL_OP3_404_57548_20141119_194430_outLine +BABEL_OP3_404_57609_20141117_063904_inLine +BABEL_OP3_404_57609_20141117_063904_outLine +BABEL_OP3_404_57609_20141119_223552_inLine +BABEL_OP3_404_57609_20141119_223552_outLine +BABEL_OP3_404_57922_20141119_172249_inLine +BABEL_OP3_404_57922_20141119_172249_outLine +BABEL_OP3_404_57935_20141122_233816_inLine +BABEL_OP3_404_57935_20141122_233816_outLine +BABEL_OP3_404_58107_20141107_223929_inLine +BABEL_OP3_404_58107_20141107_223929_outLine +BABEL_OP3_404_58145_20141120_014653_inLine +BABEL_OP3_404_58145_20141120_014653_outLine +BABEL_OP3_404_58489_20141201_035927_inLine +BABEL_OP3_404_58489_20141201_035927_outLine +BABEL_OP3_404_58717_20141106_221300_inLine +BABEL_OP3_404_58717_20141106_221300_outLine +BABEL_OP3_404_58734_20141019_223233_inLine +BABEL_OP3_404_58734_20141019_223233_outLine +BABEL_OP3_404_58815_20141129_230108_inLine +BABEL_OP3_404_58815_20141129_230108_outLine +BABEL_OP3_404_58821_20141128_224222_inLine +BABEL_OP3_404_58821_20141128_224222_outLine +BABEL_OP3_404_58850_20141116_234915_inLine +BABEL_OP3_404_58850_20141116_234915_outLine +BABEL_OP3_404_58926_20141105_025457_inLine +BABEL_OP3_404_58926_20141105_025457_outLine +BABEL_OP3_404_59163_20150212_233430_inLine +BABEL_OP3_404_59163_20150212_233430_outLine +BABEL_OP3_404_59291_20141129_223855_inLine +BABEL_OP3_404_59291_20141129_223855_outLine +BABEL_OP3_404_59509_20141120_010036_inLine +BABEL_OP3_404_59509_20141120_010036_outLine +BABEL_OP3_404_59747_20141020_002625_inLine +BABEL_OP3_404_59747_20141020_002625_outLine +BABEL_OP3_404_59928_20141107_063850_inLine +BABEL_OP3_404_59928_20141107_063850_outLine +BABEL_OP3_404_59993_20141102_204023_inLine +BABEL_OP3_404_59993_20141102_204023_outLine +BABEL_OP3_404_60115_20141123_045055_inLine +BABEL_OP3_404_60115_20141123_045055_outLine +BABEL_OP3_404_60418_20141201_012853_inLine +BABEL_OP3_404_60418_20141201_012853_outLine +BABEL_OP3_404_60538_20141010_000421_inLine +BABEL_OP3_404_60538_20141010_000421_outLine +BABEL_OP3_404_60661_20141023_185331_inLine +BABEL_OP3_404_60661_20141023_185331_outLine +BABEL_OP3_404_60830_20141119_050849_inLine +BABEL_OP3_404_60830_20141119_050849_outLine +BABEL_OP3_404_60836_20141026_014449_inLine +BABEL_OP3_404_60836_20141026_014449_outLine +BABEL_OP3_404_61011_20141022_235244_inLine +BABEL_OP3_404_61011_20141022_235244_outLine +BABEL_OP3_404_61357_20141118_052326_inLine +BABEL_OP3_404_61357_20141118_052326_outLine +BABEL_OP3_404_61731_20141026_185743_inLine +BABEL_OP3_404_61731_20141026_185743_outLine +BABEL_OP3_404_62014_20141120_021455_inLine +BABEL_OP3_404_62014_20141120_021455_outLine +BABEL_OP3_404_62177_20150503_025324_inLine +BABEL_OP3_404_62177_20150503_025324_outLine +BABEL_OP3_404_62200_20141115_024033_inLine +BABEL_OP3_404_62200_20141115_024033_outLine +BABEL_OP3_404_62289_20150526_045908_inLine +BABEL_OP3_404_62289_20150526_045908_outLine +BABEL_OP3_404_62430_20150526_181036_inLine +BABEL_OP3_404_62430_20150526_181036_outLine +BABEL_OP3_404_62434_20141019_201121_inLine +BABEL_OP3_404_62434_20141019_201121_outLine +BABEL_OP3_404_62656_20150119_185511_inLine +BABEL_OP3_404_62656_20150119_185511_outLine +BABEL_OP3_404_62800_20141020_020318_inLine +BABEL_OP3_404_62800_20141020_020318_outLine +BABEL_OP3_404_62835_20141119_043323_inLine +BABEL_OP3_404_62835_20141119_043323_outLine +BABEL_OP3_404_62976_20141119_061748_inLine +BABEL_OP3_404_62976_20141119_061748_outLine +BABEL_OP3_404_63307_20141119_192444_inLine +BABEL_OP3_404_63307_20141119_192444_outLine +BABEL_OP3_404_63445_20141021_013007_inLine +BABEL_OP3_404_63445_20141021_013007_outLine +BABEL_OP3_404_63523_20150512_050203_inLine +BABEL_OP3_404_63523_20150512_050203_outLine +BABEL_OP3_404_63604_20141011_021042_inLine +BABEL_OP3_404_63604_20141011_021042_outLine +BABEL_OP3_404_63787_20141010_225937_inLine +BABEL_OP3_404_63787_20141010_225937_outLine +BABEL_OP3_404_63938_20150526_052814_inLine +BABEL_OP3_404_63938_20150526_052814_outLine +BABEL_OP3_404_64350_20141022_195842_inLine +BABEL_OP3_404_64350_20141022_195842_outLine +BABEL_OP3_404_64398_20141126_031756_inLine +BABEL_OP3_404_64398_20141126_031756_outLine +BABEL_OP3_404_64902_20150522_041540_inLine +BABEL_OP3_404_64902_20150522_041540_outLine +BABEL_OP3_404_65064_20141127_003631_inLine +BABEL_OP3_404_65064_20141127_003631_outLine +BABEL_OP3_404_65077_20141015_025834_inLine +BABEL_OP3_404_65077_20141015_025834_outLine +BABEL_OP3_404_65466_20150524_182317_inLine +BABEL_OP3_404_65466_20150524_182317_outLine +BABEL_OP3_404_65477_20141115_020305_inLine +BABEL_OP3_404_65477_20141115_020305_outLine +BABEL_OP3_404_65692_20141117_074414_inLine +BABEL_OP3_404_65692_20141117_074414_outLine +BABEL_OP3_404_65723_20141102_051040_inLine +BABEL_OP3_404_65723_20141102_051040_outLine +BABEL_OP3_404_65882_20141024_191236_inLine +BABEL_OP3_404_65882_20141024_191236_outLine +BABEL_OP3_404_66001_20141006_015944_inLine +BABEL_OP3_404_66001_20141006_015944_outLine +BABEL_OP3_404_66026_20141130_061639_inLine +BABEL_OP3_404_66026_20141130_061639_outLine +BABEL_OP3_404_66350_20150212_043953_inLine +BABEL_OP3_404_66350_20150212_043953_outLine +BABEL_OP3_404_66959_20141130_212725_inLine +BABEL_OP3_404_66959_20141130_212725_outLine +BABEL_OP3_404_66975_20150119_001417_inLine +BABEL_OP3_404_66975_20150119_001417_outLine +BABEL_OP3_404_67066_20150611_043029_inLine +BABEL_OP3_404_67066_20150611_043029_outLine +BABEL_OP3_404_67283_20141008_234315_inLine +BABEL_OP3_404_67283_20141008_234315_outLine +BABEL_OP3_404_67373_20141106_191525_inLine +BABEL_OP3_404_67373_20141106_191525_outLine +BABEL_OP3_404_67373_20141106_192955_inLine +BABEL_OP3_404_67373_20141106_192955_outLine +BABEL_OP3_404_67622_20141021_002234_inLine +BABEL_OP3_404_67622_20141021_002234_outLine +BABEL_OP3_404_67659_20141101_010904_inLine +BABEL_OP3_404_67659_20141101_010904_outLine +BABEL_OP3_404_67964_20150515_011635_inLine +BABEL_OP3_404_67964_20150515_011635_outLine +BABEL_OP3_404_68040_20141118_235516_inLine +BABEL_OP3_404_68040_20141118_235516_outLine +BABEL_OP3_404_68748_20141123_003226_inLine +BABEL_OP3_404_68748_20141123_003226_outLine +BABEL_OP3_404_68854_20150512_025452_inLine +BABEL_OP3_404_68854_20150512_025452_outLine +BABEL_OP3_404_68924_20141119_025325_inLine +BABEL_OP3_404_68924_20141119_025325_outLine +BABEL_OP3_404_69992_20141014_035441_inLine +BABEL_OP3_404_69992_20141014_035441_outLine +BABEL_OP3_404_70110_20141020_043016_inLine +BABEL_OP3_404_70110_20141020_043016_outLine +BABEL_OP3_404_70251_20141009_221726_inLine +BABEL_OP3_404_70251_20141009_221726_outLine +BABEL_OP3_404_70293_20150118_220441_inLine +BABEL_OP3_404_70293_20150118_220441_outLine +BABEL_OP3_404_70343_20141126_030147_inLine +BABEL_OP3_404_70343_20141126_030147_outLine +BABEL_OP3_404_70386_20141029_002717_inLine +BABEL_OP3_404_70386_20141029_002717_outLine +BABEL_OP3_404_70452_20141028_031043_inLine +BABEL_OP3_404_70452_20141028_031043_outLine +BABEL_OP3_404_70601_20141103_194852_inLine +BABEL_OP3_404_70601_20141103_194852_outLine +BABEL_OP3_404_71704_20141021_001821_inLine +BABEL_OP3_404_71704_20141021_001821_outLine +BABEL_OP3_404_71704_20141021_002603_inLine +BABEL_OP3_404_71704_20141021_002603_outLine +BABEL_OP3_404_72007_20141201_045843_inLine +BABEL_OP3_404_72007_20141201_045843_outLine +BABEL_OP3_404_72040_20141103_035957_inLine +BABEL_OP3_404_72040_20141103_035957_outLine +BABEL_OP3_404_72040_20141103_042101_inLine +BABEL_OP3_404_72040_20141103_042101_outLine +BABEL_OP3_404_72110_20141128_013317_inLine +BABEL_OP3_404_72110_20141128_013317_outLine +BABEL_OP3_404_72324_20141201_013717_inLine +BABEL_OP3_404_72324_20141201_013717_outLine +BABEL_OP3_404_72654_20141110_003307_inLine +BABEL_OP3_404_72654_20141110_003307_outLine +BABEL_OP3_404_73042_20141022_163748_inLine +BABEL_OP3_404_73042_20141022_163748_outLine +BABEL_OP3_404_73301_20141101_210322_inLine +BABEL_OP3_404_73301_20141101_210322_outLine +BABEL_OP3_404_73446_20150513_002217_inLine +BABEL_OP3_404_73446_20150513_002217_outLine +BABEL_OP3_404_73511_20141129_045420_inLine +BABEL_OP3_404_73511_20141129_045420_outLine +BABEL_OP3_404_73549_20150619_204148_inLine +BABEL_OP3_404_73549_20150619_204148_outLine +BABEL_OP3_404_73591_20141018_022404_inLine +BABEL_OP3_404_73591_20141018_022404_outLine +BABEL_OP3_404_73622_20141016_060513_inLine +BABEL_OP3_404_73622_20141016_060513_outLine +BABEL_OP3_404_73814_20141120_180559_inLine +BABEL_OP3_404_73814_20141120_180559_outLine +BABEL_OP3_404_74226_20141130_235823_inLine +BABEL_OP3_404_74226_20141130_235823_outLine +BABEL_OP3_404_74253_20141201_231036_inLine +BABEL_OP3_404_74253_20141201_231036_outLine +BABEL_OP3_404_74280_20141010_230433_inLine +BABEL_OP3_404_74280_20141010_230433_outLine +BABEL_OP3_404_74667_20141114_221123_inLine +BABEL_OP3_404_74667_20141114_221123_outLine +BABEL_OP3_404_74886_20141022_200909_inLine +BABEL_OP3_404_74886_20141022_200909_outLine +BABEL_OP3_404_74921_20141124_030609_inLine +BABEL_OP3_404_74921_20141124_030609_outLine +BABEL_OP3_404_75223_20141012_224637_inLine +BABEL_OP3_404_75223_20141012_224637_outLine +BABEL_OP3_404_75342_20141130_193132_inLine +BABEL_OP3_404_75342_20141130_193132_outLine +BABEL_OP3_404_75930_20150206_063407_inLine +BABEL_OP3_404_75930_20150206_063407_outLine +BABEL_OP3_404_75993_20141102_192754_inLine +BABEL_OP3_404_75993_20141102_192754_outLine +BABEL_OP3_404_76155_20141118_052757_inLine +BABEL_OP3_404_76155_20141118_052757_outLine +BABEL_OP3_404_76218_20141119_232010_inLine +BABEL_OP3_404_76218_20141119_232010_outLine +BABEL_OP3_404_76499_20141117_005535_inLine +BABEL_OP3_404_76499_20141117_005535_outLine +BABEL_OP3_404_76756_20141120_014151_inLine +BABEL_OP3_404_76756_20141120_014151_outLine +BABEL_OP3_404_77033_20150503_233304_inLine +BABEL_OP3_404_77033_20150503_233304_outLine +BABEL_OP3_404_77112_20141105_062419_inLine +BABEL_OP3_404_77112_20141105_062419_outLine +BABEL_OP3_404_77139_20141022_022951_inLine +BABEL_OP3_404_77139_20141022_022951_outLine +BABEL_OP3_404_77744_20141103_034001_inLine +BABEL_OP3_404_77744_20141103_034001_outLine +BABEL_OP3_404_78116_20141128_231322_inLine +BABEL_OP3_404_78116_20141128_231322_outLine +BABEL_OP3_404_78194_20141019_052949_inLine +BABEL_OP3_404_78194_20141019_052949_outLine +BABEL_OP3_404_78398_20141022_235403_inLine +BABEL_OP3_404_78398_20141022_235403_outLine +BABEL_OP3_404_78544_20141130_192658_inLine +BABEL_OP3_404_78544_20141130_192658_outLine +BABEL_OP3_404_78604_20141022_164244_inLine +BABEL_OP3_404_78604_20141022_164244_outLine +BABEL_OP3_404_78630_20141025_220904_inLine +BABEL_OP3_404_78630_20141025_220904_outLine +BABEL_OP3_404_78743_20141202_001451_inLine +BABEL_OP3_404_78743_20141202_001451_outLine +BABEL_OP3_404_78943_20141025_004503_inLine +BABEL_OP3_404_78943_20141025_004503_outLine +BABEL_OP3_404_79028_20150213_002817_inLine +BABEL_OP3_404_79028_20150213_002817_outLine +BABEL_OP3_404_79107_20150614_013139_inLine +BABEL_OP3_404_79107_20150614_013139_outLine +BABEL_OP3_404_79129_20141110_183305_inLine +BABEL_OP3_404_79129_20141110_183305_outLine +BABEL_OP3_404_79367_20141008_232735_inLine +BABEL_OP3_404_79367_20141008_232735_outLine +BABEL_OP3_404_79451_20141031_025601_inLine +BABEL_OP3_404_79451_20141031_025601_outLine +BABEL_OP3_404_79995_20141201_013108_inLine +BABEL_OP3_404_79995_20141201_013108_outLine +BABEL_OP3_404_80622_20141119_054644_inLine +BABEL_OP3_404_80622_20141119_054644_outLine +BABEL_OP3_404_80721_20141201_013404_inLine +BABEL_OP3_404_80721_20141201_013404_outLine +BABEL_OP3_404_81287_20141130_024232_inLine +BABEL_OP3_404_81287_20141130_024232_outLine +BABEL_OP3_404_81392_20141130_022613_inLine +BABEL_OP3_404_81392_20141130_022613_outLine +BABEL_OP3_404_81392_20141130_023326_inLine +BABEL_OP3_404_81392_20141130_023326_outLine +BABEL_OP3_404_81404_20141104_055546_inLine +BABEL_OP3_404_81404_20141104_055546_outLine +BABEL_OP3_404_81433_20141119_073031_inLine +BABEL_OP3_404_81433_20141119_073031_outLine +BABEL_OP3_404_81435_20141128_235050_inLine +BABEL_OP3_404_81435_20141128_235050_outLine +BABEL_OP3_404_81622_20141129_212937_inLine +BABEL_OP3_404_81622_20141129_212937_outLine +BABEL_OP3_404_81810_20141126_051528_inLine +BABEL_OP3_404_81810_20141126_051528_outLine +BABEL_OP3_404_82030_20150517_193420_inLine +BABEL_OP3_404_82030_20150517_193420_outLine +BABEL_OP3_404_82035_20141119_063429_inLine +BABEL_OP3_404_82035_20141119_063429_outLine +BABEL_OP3_404_82138_20141116_234338_inLine +BABEL_OP3_404_82138_20141116_234338_outLine +BABEL_OP3_404_82140_20141117_021927_inLine +BABEL_OP3_404_82140_20141117_021927_outLine +BABEL_OP3_404_82145_20150502_232707_inLine +BABEL_OP3_404_82145_20150502_232707_outLine +BABEL_OP3_404_82391_20141128_063323_inLine +BABEL_OP3_404_82391_20141128_063323_outLine +BABEL_OP3_404_82496_20141009_062659_inLine +BABEL_OP3_404_82496_20141009_062659_outLine +BABEL_OP3_404_82622_20141008_042910_inLine +BABEL_OP3_404_82622_20141008_042910_outLine +BABEL_OP3_404_82904_20150523_231750_inLine +BABEL_OP3_404_82904_20150523_231750_outLine +BABEL_OP3_404_83455_20141112_000643_inLine +BABEL_OP3_404_83455_20141112_000643_outLine +BABEL_OP3_404_83783_20141115_005815_inLine +BABEL_OP3_404_83783_20141115_005815_outLine +BABEL_OP3_404_83935_20141201_214527_inLine +BABEL_OP3_404_83935_20141201_214527_outLine +BABEL_OP3_404_84327_20141130_185722_inLine +BABEL_OP3_404_84327_20141130_185722_outLine +BABEL_OP3_404_84408_20141105_182756_inLine +BABEL_OP3_404_84408_20141105_182756_outLine +BABEL_OP3_404_84469_20141130_030156_inLine +BABEL_OP3_404_84469_20141130_030156_outLine +BABEL_OP3_404_84547_20141022_025230_inLine +BABEL_OP3_404_84547_20141022_025230_outLine +BABEL_OP3_404_84605_20141026_234127_inLine +BABEL_OP3_404_84605_20141026_234127_outLine +BABEL_OP3_404_84611_20141024_005352_inLine +BABEL_OP3_404_84611_20141024_005352_outLine +BABEL_OP3_404_84768_20141012_183416_inLine +BABEL_OP3_404_84768_20141012_183416_outLine +BABEL_OP3_404_84823_20141201_061552_inLine +BABEL_OP3_404_84823_20141201_061552_outLine +BABEL_OP3_404_84936_20141130_025359_inLine +BABEL_OP3_404_84936_20141130_025359_outLine +BABEL_OP3_404_85647_20141111_231451_inLine +BABEL_OP3_404_85647_20141111_231451_outLine +BABEL_OP3_404_86321_20141127_025302_inLine +BABEL_OP3_404_86321_20141127_025302_outLine +BABEL_OP3_404_86433_20141201_005203_inLine +BABEL_OP3_404_86433_20141201_005203_outLine +BABEL_OP3_404_86433_20141201_010208_inLine +BABEL_OP3_404_86433_20141201_010208_outLine +BABEL_OP3_404_86433_20141201_011757_inLine +BABEL_OP3_404_86433_20141201_011757_outLine +BABEL_OP3_404_86467_20141019_022847_inLine +BABEL_OP3_404_86467_20141019_022847_outLine +BABEL_OP3_404_86467_20141019_024243_inLine +BABEL_OP3_404_86467_20141019_024243_outLine +BABEL_OP3_404_86557_20141021_041027_inLine +BABEL_OP3_404_86557_20141021_041027_outLine +BABEL_OP3_404_86676_20141125_223657_inLine +BABEL_OP3_404_86676_20141125_223657_outLine +BABEL_OP3_404_86952_20141008_194318_inLine +BABEL_OP3_404_86952_20141008_194318_outLine +BABEL_OP3_404_87073_20141007_223759_inLine +BABEL_OP3_404_87073_20141007_223759_outLine +BABEL_OP3_404_87280_20141201_232519_inLine +BABEL_OP3_404_87280_20141201_232519_outLine +BABEL_OP3_404_87693_20141105_002311_inLine +BABEL_OP3_404_87693_20141105_002311_outLine +BABEL_OP3_404_88601_20141115_021916_inLine +BABEL_OP3_404_88601_20141115_021916_outLine +BABEL_OP3_404_88601_20141115_024632_inLine +BABEL_OP3_404_88601_20141115_024632_outLine +BABEL_OP3_404_88686_20141019_023828_inLine +BABEL_OP3_404_88686_20141019_023828_outLine +BABEL_OP3_404_88925_20141201_043633_inLine +BABEL_OP3_404_88925_20141201_043633_outLine +BABEL_OP3_404_88982_20141106_212556_inLine +BABEL_OP3_404_88982_20141106_212556_outLine +BABEL_OP3_404_89358_20141119_055634_inLine +BABEL_OP3_404_89358_20141119_055634_outLine +BABEL_OP3_404_89695_20141115_212119_inLine +BABEL_OP3_404_89695_20141115_212119_outLine +BABEL_OP3_404_89794_20141130_055655_inLine +BABEL_OP3_404_89794_20141130_055655_outLine +BABEL_OP3_404_89877_20141120_061055_inLine +BABEL_OP3_404_89877_20141120_061055_outLine +BABEL_OP3_404_90417_20150611_052409_inLine +BABEL_OP3_404_90417_20150611_052409_outLine +BABEL_OP3_404_90737_20141116_233627_inLine +BABEL_OP3_404_90737_20141116_233627_outLine +BABEL_OP3_404_90739_20141116_034352_inLine +BABEL_OP3_404_90739_20141116_034352_outLine +BABEL_OP3_404_90777_20141115_012657_inLine +BABEL_OP3_404_90777_20141115_012657_outLine +BABEL_OP3_404_90935_20141104_195620_inLine +BABEL_OP3_404_90935_20141104_195620_outLine +BABEL_OP3_404_91080_20141119_062453_inLine +BABEL_OP3_404_91080_20141119_062453_outLine +BABEL_OP3_404_91125_20141010_234127_inLine +BABEL_OP3_404_91125_20141010_234127_outLine +BABEL_OP3_404_91336_20141110_011202_inLine +BABEL_OP3_404_91336_20141110_011202_outLine +BABEL_OP3_404_92065_20141201_041019_inLine +BABEL_OP3_404_92065_20141201_041019_outLine +BABEL_OP3_404_92077_20150610_053919_inLine +BABEL_OP3_404_92077_20150610_053919_outLine +BABEL_OP3_404_92459_20141026_000227_inLine +BABEL_OP3_404_92459_20141026_000227_outLine +BABEL_OP3_404_92459_20141026_000839_inLine +BABEL_OP3_404_92459_20141026_000839_outLine +BABEL_OP3_404_92509_20141020_034921_inLine +BABEL_OP3_404_92509_20141020_034921_outLine +BABEL_OP3_404_92527_20141115_024550_inLine +BABEL_OP3_404_92527_20141115_024550_outLine +BABEL_OP3_404_92809_20141009_080406_inLine +BABEL_OP3_404_92809_20141009_080406_outLine +BABEL_OP3_404_92886_20141103_032433_inLine +BABEL_OP3_404_92886_20141103_032433_outLine +BABEL_OP3_404_92941_20141027_175733_inLine +BABEL_OP3_404_92941_20141027_175733_outLine +BABEL_OP3_404_92941_20141027_180356_inLine +BABEL_OP3_404_92941_20141027_180356_outLine +BABEL_OP3_404_93224_20141119_210156_inLine +BABEL_OP3_404_93224_20141119_210156_outLine +BABEL_OP3_404_93411_20141119_193212_inLine +BABEL_OP3_404_93411_20141119_193212_outLine +BABEL_OP3_404_93861_20141111_181324_inLine +BABEL_OP3_404_93861_20141111_181324_outLine +BABEL_OP3_404_93946_20141129_015946_inLine +BABEL_OP3_404_93946_20141129_015946_outLine +BABEL_OP3_404_93964_20141111_213251_inLine +BABEL_OP3_404_93964_20141111_213251_outLine +BABEL_OP3_404_94141_20150516_175827_inLine +BABEL_OP3_404_94141_20150516_175827_outLine +BABEL_OP3_404_94253_20141029_184039_inLine +BABEL_OP3_404_94253_20141029_184039_outLine +BABEL_OP3_404_94409_20141117_003829_inLine +BABEL_OP3_404_94409_20141117_003829_outLine +BABEL_OP3_404_94666_20141119_231115_inLine +BABEL_OP3_404_94666_20141119_231115_outLine +BABEL_OP3_404_94745_20141201_033432_inLine +BABEL_OP3_404_94745_20141201_033432_outLine +BABEL_OP3_404_94923_20141116_230334_inLine +BABEL_OP3_404_94923_20141116_230334_outLine +BABEL_OP3_404_94978_20150528_024921_inLine +BABEL_OP3_404_94978_20150528_024921_outLine +BABEL_OP3_404_95294_20141129_062228_inLine +BABEL_OP3_404_95294_20141129_062228_outLine +BABEL_OP3_404_95467_20150612_031400_inLine +BABEL_OP3_404_95467_20150612_031400_outLine +BABEL_OP3_404_95490_20141021_050016_inLine +BABEL_OP3_404_95490_20141021_050016_outLine +BABEL_OP3_404_95663_20141022_043520_inLine +BABEL_OP3_404_95663_20141022_043520_outLine +BABEL_OP3_404_95670_20141019_224431_inLine +BABEL_OP3_404_95670_20141019_224431_outLine +BABEL_OP3_404_95677_20150220_205948_inLine +BABEL_OP3_404_95677_20150220_205948_outLine +BABEL_OP3_404_95942_20150514_235402_inLine +BABEL_OP3_404_95942_20150514_235402_outLine +BABEL_OP3_404_96088_20150524_191148_inLine +BABEL_OP3_404_96088_20150524_191148_outLine +BABEL_OP3_404_96190_20141107_040725_inLine +BABEL_OP3_404_96190_20141107_040725_outLine +BABEL_OP3_404_96405_20141026_045704_inLine +BABEL_OP3_404_96405_20141026_045704_outLine +BABEL_OP3_404_96820_20141109_204448_inLine +BABEL_OP3_404_96820_20141109_204448_outLine +BABEL_OP3_404_96842_20150610_040559_inLine +BABEL_OP3_404_96842_20150610_040559_outLine +BABEL_OP3_404_96910_20141026_195400_inLine +BABEL_OP3_404_96910_20141026_195400_outLine +BABEL_OP3_404_96934_20141025_223703_inLine +BABEL_OP3_404_96934_20141025_223703_outLine +BABEL_OP3_404_96934_20141025_225156_inLine +BABEL_OP3_404_96934_20141025_225156_outLine +BABEL_OP3_404_96985_20141013_053332_inLine +BABEL_OP3_404_96985_20141013_053332_outLine +BABEL_OP3_404_97363_20141120_034843_inLine +BABEL_OP3_404_97363_20141120_034843_outLine +BABEL_OP3_404_97570_20141120_050344_inLine +BABEL_OP3_404_97570_20141120_050344_outLine +BABEL_OP3_404_98311_20141022_042555_inLine +BABEL_OP3_404_98311_20141022_042555_outLine +BABEL_OP3_404_98356_20141123_013523_inLine +BABEL_OP3_404_98356_20141123_013523_outLine +BABEL_OP3_404_98390_20141014_024134_inLine +BABEL_OP3_404_98390_20141014_024134_outLine +BABEL_OP3_404_98565_20150217_195949_inLine +BABEL_OP3_404_98565_20150217_195949_outLine +BABEL_OP3_404_98580_20141130_022138_inLine +BABEL_OP3_404_98580_20141130_022138_outLine +BABEL_OP3_404_98909_20141027_032903_inLine +BABEL_OP3_404_98909_20141027_032903_outLine +BABEL_OP3_404_99516_20141019_071828_inLine +BABEL_OP3_404_99516_20141019_071828_outLine diff --git a/egs/babel/s5d/conf/lists/404-georgian/sub-train.list b/egs/babel/s5d/conf/lists/404-georgian/sub-train.list new file mode 100644 index 00000000000..a042ee569ef --- /dev/null +++ b/egs/babel/s5d/conf/lists/404-georgian/sub-train.list @@ -0,0 +1,124 @@ +BABEL_OP3_404_11663_20141118_032146_inLine +BABEL_OP3_404_11663_20141118_032146_outLine +BABEL_OP3_404_12242_20141028_021853_inLine +BABEL_OP3_404_12242_20141028_021853_outLine +BABEL_OP3_404_13178_20141129_192909_inLine +BABEL_OP3_404_13178_20141129_192909_outLine +BABEL_OP3_404_14137_20141025_202817_inLine +BABEL_OP3_404_14137_20141025_202817_outLine +BABEL_OP3_404_14875_20141026_230227_inLine +BABEL_OP3_404_14875_20141026_230227_outLine +BABEL_OP3_404_15869_20150218_225936_inLine +BABEL_OP3_404_15869_20150218_225936_outLine +BABEL_OP3_404_17113_20150611_050102_inLine +BABEL_OP3_404_17113_20150611_050102_outLine +BABEL_OP3_404_23505_20141021_032033_inLine +BABEL_OP3_404_23505_20141021_032033_outLine +BABEL_OP3_404_24470_20141111_184651_inLine +BABEL_OP3_404_24470_20141111_184651_outLine +BABEL_OP3_404_24470_20141111_190229_inLine +BABEL_OP3_404_24470_20141111_190229_outLine +BABEL_OP3_404_24679_20141018_015615_inLine +BABEL_OP3_404_24679_20141018_015615_outLine +BABEL_OP3_404_26388_20141026_014207_inLine +BABEL_OP3_404_26388_20141026_014207_outLine +BABEL_OP3_404_27042_20141201_215107_inLine +BABEL_OP3_404_27042_20141201_215107_outLine +BABEL_OP3_404_28538_20141119_005526_inLine +BABEL_OP3_404_28538_20141119_005526_outLine +BABEL_OP3_404_29208_20141106_013309_inLine +BABEL_OP3_404_29208_20141106_013309_outLine +BABEL_OP3_404_30461_20150620_020316_inLine +BABEL_OP3_404_30461_20150620_020316_outLine +BABEL_OP3_404_31979_20141106_000523_inLine +BABEL_OP3_404_31979_20141106_000523_outLine +BABEL_OP3_404_31992_20141014_221817_inLine +BABEL_OP3_404_31992_20141014_221817_outLine +BABEL_OP3_404_37064_20141102_063308_inLine +BABEL_OP3_404_37064_20141102_063308_outLine +BABEL_OP3_404_37281_20141119_053453_inLine +BABEL_OP3_404_37281_20141119_053453_outLine +BABEL_OP3_404_37853_20150602_030625_inLine +BABEL_OP3_404_37853_20150602_030625_outLine +BABEL_OP3_404_40713_20141028_221207_inLine +BABEL_OP3_404_40713_20141028_221207_outLine +BABEL_OP3_404_41680_20141012_040411_inLine +BABEL_OP3_404_41680_20141012_040411_outLine +BABEL_OP3_404_41920_20141008_040539_inLine +BABEL_OP3_404_41920_20141008_040539_outLine +BABEL_OP3_404_42877_20150212_052937_inLine +BABEL_OP3_404_42877_20150212_052937_outLine +BABEL_OP3_404_45121_20150609_055234_inLine +BABEL_OP3_404_45121_20150609_055234_outLine +BABEL_OP3_404_46169_20141130_224339_inLine +BABEL_OP3_404_46169_20141130_224339_outLine +BABEL_OP3_404_46625_20141011_040505_inLine +BABEL_OP3_404_46625_20141011_040505_outLine +BABEL_OP3_404_46681_20141021_040451_inLine +BABEL_OP3_404_46681_20141021_040451_outLine +BABEL_OP3_404_47270_20150512_053415_inLine +BABEL_OP3_404_47270_20150512_053415_outLine +BABEL_OP3_404_48844_20141020_065414_inLine +BABEL_OP3_404_48844_20141020_065414_outLine +BABEL_OP3_404_49768_20141026_022902_inLine +BABEL_OP3_404_49768_20141026_022902_outLine +BABEL_OP3_404_50175_20141021_025726_inLine +BABEL_OP3_404_50175_20141021_025726_outLine +BABEL_OP3_404_52301_20141009_051739_inLine +BABEL_OP3_404_52301_20141009_051739_outLine +BABEL_OP3_404_52301_20141009_054049_inLine +BABEL_OP3_404_52301_20141009_054049_outLine +BABEL_OP3_404_52490_20141016_020323_inLine +BABEL_OP3_404_52490_20141016_020323_outLine +BABEL_OP3_404_56213_20141201_000837_inLine +BABEL_OP3_404_56213_20141201_000837_outLine +BABEL_OP3_404_58103_20141030_002209_inLine +BABEL_OP3_404_58103_20141030_002209_outLine +BABEL_OP3_404_59078_20141111_004941_inLine +BABEL_OP3_404_59078_20141111_004941_outLine +BABEL_OP3_404_61225_20141009_174003_inLine +BABEL_OP3_404_61225_20141009_174003_outLine +BABEL_OP3_404_63220_20141127_033605_inLine +BABEL_OP3_404_63220_20141127_033605_outLine +BABEL_OP3_404_64494_20141026_203549_inLine +BABEL_OP3_404_64494_20141026_203549_outLine +BABEL_OP3_404_64768_20141027_201818_inLine +BABEL_OP3_404_64768_20141027_201818_outLine +BABEL_OP3_404_66916_20141022_000731_inLine +BABEL_OP3_404_66916_20141022_000731_outLine +BABEL_OP3_404_67401_20141109_211809_inLine +BABEL_OP3_404_67401_20141109_211809_outLine +BABEL_OP3_404_68059_20141109_052011_inLine +BABEL_OP3_404_68059_20141109_052011_outLine +BABEL_OP3_404_68068_20141201_054518_inLine +BABEL_OP3_404_68068_20141201_054518_outLine +BABEL_OP3_404_68384_20141130_035214_inLine +BABEL_OP3_404_68384_20141130_035214_outLine +BABEL_OP3_404_68627_20141105_190511_inLine +BABEL_OP3_404_68627_20141105_190511_outLine +BABEL_OP3_404_72844_20141007_033837_inLine +BABEL_OP3_404_72844_20141007_033837_outLine +BABEL_OP3_404_73837_20141026_191037_inLine +BABEL_OP3_404_73837_20141026_191037_outLine +BABEL_OP3_404_78511_20141201_003606_inLine +BABEL_OP3_404_78511_20141201_003606_outLine +BABEL_OP3_404_79139_20141117_054733_inLine +BABEL_OP3_404_79139_20141117_054733_outLine +BABEL_OP3_404_81971_20141022_025641_inLine +BABEL_OP3_404_81971_20141022_025641_outLine +BABEL_OP3_404_83062_20150523_220236_inLine +BABEL_OP3_404_83062_20150523_220236_outLine +BABEL_OP3_404_83775_20141030_230742_inLine +BABEL_OP3_404_83775_20141030_230742_outLine +BABEL_OP3_404_84339_20150502_014143_inLine +BABEL_OP3_404_84339_20150502_014143_outLine +BABEL_OP3_404_86191_20141027_013544_inLine +BABEL_OP3_404_86191_20141027_013544_outLine +BABEL_OP3_404_86888_20141119_022459_inLine +BABEL_OP3_404_86888_20141119_022459_outLine +BABEL_OP3_404_95966_20141129_060246_inLine +BABEL_OP3_404_95966_20141129_060246_outLine +BABEL_OP3_404_97461_20141118_230730_inLine +BABEL_OP3_404_97461_20141118_230730_outLine +BABEL_OP3_404_99487_20141021_053024_inLine +BABEL_OP3_404_99487_20141021_053024_outLine diff --git a/egs/babel/s5d/conf/lists/404-georgian/sub-train.untranscribed.list b/egs/babel/s5d/conf/lists/404-georgian/sub-train.untranscribed.list new file mode 100644 index 00000000000..32d863a65ad --- /dev/null +++ b/egs/babel/s5d/conf/lists/404-georgian/sub-train.untranscribed.list @@ -0,0 +1,929 @@ +BABEL_OP3_404_10019_20141101_191932_inLine +BABEL_OP3_404_10019_20141101_191932_outLine +BABEL_OP3_404_10058_20150526_034808_inLine +BABEL_OP3_404_10411_20150611_172027_inLine +BABEL_OP3_404_10411_20150611_172027_outLine +BABEL_OP3_404_10416_20141117_064700_inLine +BABEL_OP3_404_10416_20141117_064700_outLine +BABEL_OP3_404_10647_20150514_001106_inLine +BABEL_OP3_404_10647_20150514_001106_outLine +BABEL_OP3_404_10938_20141030_023413_inLine +BABEL_OP3_404_10938_20141030_023413_outLine +BABEL_OP3_404_10974_20141119_205506_inLine +BABEL_OP3_404_10974_20141119_205506_outLine +BABEL_OP3_404_11352_20150513_002642_inLine +BABEL_OP3_404_11352_20150513_002642_outLine +BABEL_OP3_404_11673_20141023_035438_inLine +BABEL_OP3_404_11673_20141023_035438_outLine +BABEL_OP3_404_11681_20141107_190101_inLine +BABEL_OP3_404_11681_20141107_190101_outLine +BABEL_OP3_404_11859_20150611_041737_inLine +BABEL_OP3_404_11859_20150611_041737_outLine +BABEL_OP3_404_12220_20141116_205911_inLine +BABEL_OP3_404_12220_20141116_205911_outLine +BABEL_OP3_404_12609_20150524_172934_inLine +BABEL_OP3_404_12609_20150524_172934_outLine +BABEL_OP3_404_13030_20141101_200709_inLine +BABEL_OP3_404_13030_20141101_200709_outLine +BABEL_OP3_404_13126_20150524_221540_inLine +BABEL_OP3_404_13126_20150524_221540_outLine +BABEL_OP3_404_13324_20141022_200257_inLine +BABEL_OP3_404_13324_20141022_200257_outLine +BABEL_OP3_404_13664_20141012_013523_inLine +BABEL_OP3_404_13664_20141012_013523_outLine +BABEL_OP3_404_13709_20150512_015216_inLine +BABEL_OP3_404_13709_20150512_015216_outLine +BABEL_OP3_404_14158_20141130_030130_inLine +BABEL_OP3_404_14158_20141130_030130_outLine +BABEL_OP3_404_14229_20141029_200136_inLine +BABEL_OP3_404_14229_20141029_200136_outLine +BABEL_OP3_404_14237_20141006_171921_inLine +BABEL_OP3_404_14237_20141006_171921_outLine +BABEL_OP3_404_14440_20141127_213106_inLine +BABEL_OP3_404_14440_20141127_213106_outLine +BABEL_OP3_404_14807_20141110_231934_inLine +BABEL_OP3_404_14807_20141110_231934_outLine +BABEL_OP3_404_14899_20141022_202217_inLine +BABEL_OP3_404_14899_20141022_202217_outLine +BABEL_OP3_404_14929_20141129_192841_inLine +BABEL_OP3_404_14929_20141129_192841_outLine +BABEL_OP3_404_15024_20141118_234824_inLine +BABEL_OP3_404_15024_20141118_234824_outLine +BABEL_OP3_404_15042_20150506_232829_inLine +BABEL_OP3_404_15042_20150506_232829_outLine +BABEL_OP3_404_15382_20141130_213942_inLine +BABEL_OP3_404_15382_20141130_213942_outLine +BABEL_OP3_404_15535_20141129_021659_inLine +BABEL_OP3_404_15535_20141129_021659_outLine +BABEL_OP3_404_15638_20141127_220502_outLine +BABEL_OP3_404_15848_20141006_231138_inLine +BABEL_OP3_404_15848_20141006_231138_outLine +BABEL_OP3_404_15902_20141020_173105_outLine +BABEL_OP3_404_16149_20141010_173548_inLine +BABEL_OP3_404_16149_20141010_173548_outLine +BABEL_OP3_404_16467_20141130_014316_inLine +BABEL_OP3_404_16467_20141130_014316_outLine +BABEL_OP3_404_16467_20141130_015010_inLine +BABEL_OP3_404_16467_20141130_015010_outLine +BABEL_OP3_404_16475_20141116_052010_outLine +BABEL_OP3_404_16601_20141201_041704_inLine +BABEL_OP3_404_16601_20141201_041704_outLine +BABEL_OP3_404_17280_20141103_190330_inLine +BABEL_OP3_404_17280_20141103_190330_outLine +BABEL_OP3_404_17320_20150524_213213_inLine +BABEL_OP3_404_17320_20150524_213213_outLine +BABEL_OP3_404_17420_20150503_201902_inLine +BABEL_OP3_404_17420_20150503_201902_outLine +BABEL_OP3_404_17420_20150527_025815_inLine +BABEL_OP3_404_17420_20150527_025815_outLine +BABEL_OP3_404_17420_20150527_034621_inLine +BABEL_OP3_404_17420_20150527_034621_outLine +BABEL_OP3_404_17520_20141113_032534_inLine +BABEL_OP3_404_17567_20141117_182919_inLine +BABEL_OP3_404_17567_20141117_182919_outLine +BABEL_OP3_404_17573_20141129_035040_inLine +BABEL_OP3_404_17573_20141129_035040_outLine +BABEL_OP3_404_17615_20141201_025917_inLine +BABEL_OP3_404_17615_20141201_025917_outLine +BABEL_OP3_404_17890_20141128_040046_inLine +BABEL_OP3_404_17890_20141128_040046_outLine +BABEL_OP3_404_17923_20141022_231429_outLine +BABEL_OP3_404_18118_20150503_165936_inLine +BABEL_OP3_404_18118_20150503_165936_outLine +BABEL_OP3_404_18291_20150611_062705_outLine +BABEL_OP3_404_18291_20150611_063700_outLine +BABEL_OP3_404_18766_20150610_064349_inLine +BABEL_OP3_404_19120_20150525_014657_inLine +BABEL_OP3_404_19120_20150525_014657_outLine +BABEL_OP3_404_19120_20150525_015635_inLine +BABEL_OP3_404_19120_20150525_015635_outLine +BABEL_OP3_404_19134_20141120_053128_inLine +BABEL_OP3_404_19134_20141120_053128_outLine +BABEL_OP3_404_19703_20141027_004315_inLine +BABEL_OP3_404_19703_20141027_004315_outLine +BABEL_OP3_404_19877_20150506_202237_outLine +BABEL_OP3_404_20133_20141010_195231_inLine +BABEL_OP3_404_20133_20141010_195231_outLine +BABEL_OP3_404_20454_20150218_171143_inLine +BABEL_OP3_404_20454_20150218_171143_outLine +BABEL_OP3_404_20985_20141126_183236_inLine +BABEL_OP3_404_20985_20141126_183236_outLine +BABEL_OP3_404_21004_20141201_035831_inLine +BABEL_OP3_404_21004_20141201_035831_outLine +BABEL_OP3_404_21159_20150615_021612_inLine +BABEL_OP3_404_21435_20150523_030702_inLine +BABEL_OP3_404_21435_20150523_030702_outLine +BABEL_OP3_404_21581_20141101_011021_inLine +BABEL_OP3_404_21581_20141101_011021_outLine +BABEL_OP3_404_21807_20141112_225225_outLine +BABEL_OP3_404_22280_20141111_020522_inLine +BABEL_OP3_404_22280_20141111_020522_outLine +BABEL_OP3_404_22591_20150217_220714_inLine +BABEL_OP3_404_23046_20141031_030755_inLine +BABEL_OP3_404_23046_20141031_030755_outLine +BABEL_OP3_404_23731_20141130_033602_inLine +BABEL_OP3_404_23731_20141130_033602_outLine +BABEL_OP3_404_23980_20141106_225951_inLine +BABEL_OP3_404_23980_20141106_225951_outLine +BABEL_OP3_404_24209_20150212_224614_inLine +BABEL_OP3_404_24239_20150517_203015_inLine +BABEL_OP3_404_24270_20141111_012902_inLine +BABEL_OP3_404_24270_20141111_012902_outLine +BABEL_OP3_404_24323_20141117_020615_outLine +BABEL_OP3_404_24501_20150522_030231_inLine +BABEL_OP3_404_24532_20141007_211325_inLine +BABEL_OP3_404_24532_20141007_211325_outLine +BABEL_OP3_404_24586_20150524_190657_inLine +BABEL_OP3_404_24586_20150524_190657_outLine +BABEL_OP3_404_24589_20141031_020641_inLine +BABEL_OP3_404_24589_20141031_020641_outLine +BABEL_OP3_404_24590_20141116_230233_inLine +BABEL_OP3_404_24590_20141116_230233_outLine +BABEL_OP3_404_24982_20141102_021352_inLine +BABEL_OP3_404_24982_20141102_021352_outLine +BABEL_OP3_404_25068_20150206_022730_outLine +BABEL_OP3_404_25085_20150611_040906_inLine +BABEL_OP3_404_25085_20150611_040906_outLine +BABEL_OP3_404_25412_20141120_031532_inLine +BABEL_OP3_404_25412_20141120_031532_outLine +BABEL_OP3_404_25496_20150613_034126_inLine +BABEL_OP3_404_25496_20150613_034126_outLine +BABEL_OP3_404_26398_20150527_032152_inLine +BABEL_OP3_404_26398_20150527_032152_outLine +BABEL_OP3_404_26478_20150617_004029_inLine +BABEL_OP3_404_26478_20150617_004029_outLine +BABEL_OP3_404_26836_20141102_024528_inLine +BABEL_OP3_404_26836_20141102_024528_outLine +BABEL_OP3_404_27203_20141119_185720_inLine +BABEL_OP3_404_27203_20141119_185720_outLine +BABEL_OP3_404_27203_20141119_191138_inLine +BABEL_OP3_404_27203_20141119_191138_outLine +BABEL_OP3_404_27590_20141128_051454_inLine +BABEL_OP3_404_28280_20150619_024509_inLine +BABEL_OP3_404_28280_20150619_024509_outLine +BABEL_OP3_404_28280_20150619_025848_inLine +BABEL_OP3_404_28280_20150619_025848_outLine +BABEL_OP3_404_28303_20141028_182204_inLine +BABEL_OP3_404_28303_20141028_182204_outLine +BABEL_OP3_404_28522_20141124_222758_inLine +BABEL_OP3_404_28522_20141124_222758_outLine +BABEL_OP3_404_28600_20141201_223206_inLine +BABEL_OP3_404_28600_20141201_223206_outLine +BABEL_OP3_404_28871_20141019_181913_inLine +BABEL_OP3_404_28871_20141019_181913_outLine +BABEL_OP3_404_28945_20141104_060349_outLine +BABEL_OP3_404_29039_20141128_035839_inLine +BABEL_OP3_404_29039_20141128_035839_outLine +BABEL_OP3_404_29076_20141109_215142_inLine +BABEL_OP3_404_29076_20141109_215142_outLine +BABEL_OP3_404_29230_20150611_051340_inLine +BABEL_OP3_404_29230_20150611_051340_outLine +BABEL_OP3_404_29439_20150524_201524_inLine +BABEL_OP3_404_29439_20150524_201524_outLine +BABEL_OP3_404_30098_20150610_150504_inLine +BABEL_OP3_404_30098_20150610_150504_outLine +BABEL_OP3_404_30432_20141126_052839_inLine +BABEL_OP3_404_30432_20141126_052839_outLine +BABEL_OP3_404_30497_20150525_194737_inLine +BABEL_OP3_404_30497_20150525_194737_outLine +BABEL_OP3_404_30645_20141019_220859_inLine +BABEL_OP3_404_30653_20150514_014515_inLine +BABEL_OP3_404_31267_20150615_011004_outLine +BABEL_OP3_404_31484_20141122_232804_inLine +BABEL_OP3_404_31484_20141122_232804_outLine +BABEL_OP3_404_31624_20141105_214349_inLine +BABEL_OP3_404_31624_20141105_214349_outLine +BABEL_OP3_404_31919_20150526_220911_inLine +BABEL_OP3_404_31919_20150526_220911_outLine +BABEL_OP3_404_32122_20141115_022841_inLine +BABEL_OP3_404_32122_20141115_022841_outLine +BABEL_OP3_404_32287_20150210_060823_inLine +BABEL_OP3_404_32287_20150210_060823_outLine +BABEL_OP3_404_32630_20150609_012137_inLine +BABEL_OP3_404_32630_20150609_012137_outLine +BABEL_OP3_404_32708_20141106_032826_inLine +BABEL_OP3_404_32708_20141106_032826_outLine +BABEL_OP3_404_32727_20141128_203500_inLine +BABEL_OP3_404_32727_20141128_203500_outLine +BABEL_OP3_404_32727_20141128_204751_inLine +BABEL_OP3_404_32727_20141128_204751_outLine +BABEL_OP3_404_32959_20141201_005331_inLine +BABEL_OP3_404_32959_20141201_005331_outLine +BABEL_OP3_404_32998_20141112_054111_inLine +BABEL_OP3_404_33355_20141019_032024_inLine +BABEL_OP3_404_33355_20141019_032024_outLine +BABEL_OP3_404_33355_20141019_034109_inLine +BABEL_OP3_404_33355_20141019_034109_outLine +BABEL_OP3_404_33704_20141207_073436_inLine +BABEL_OP3_404_33704_20141207_073436_outLine +BABEL_OP3_404_34328_20141119_054513_outLine +BABEL_OP3_404_34328_20141119_055432_outLine +BABEL_OP3_404_34679_20141102_052808_inLine +BABEL_OP3_404_34679_20141102_052808_outLine +BABEL_OP3_404_34688_20141009_073303_inLine +BABEL_OP3_404_34688_20141009_073303_outLine +BABEL_OP3_404_34811_20141109_001009_inLine +BABEL_OP3_404_34811_20141109_001009_outLine +BABEL_OP3_404_34899_20150611_060602_outLine +BABEL_OP3_404_35008_20141201_023042_inLine +BABEL_OP3_404_35008_20141201_023042_outLine +BABEL_OP3_404_35143_20141130_181111_inLine +BABEL_OP3_404_35143_20141130_181111_outLine +BABEL_OP3_404_35181_20150526_211416_inLine +BABEL_OP3_404_35181_20150526_211416_outLine +BABEL_OP3_404_35706_20150523_015900_inLine +BABEL_OP3_404_35706_20150523_015900_outLine +BABEL_OP3_404_35786_20150604_015518_inLine +BABEL_OP3_404_35786_20150604_015518_outLine +BABEL_OP3_404_36017_20150528_192934_inLine +BABEL_OP3_404_36017_20150528_192934_outLine +BABEL_OP3_404_36039_20150526_230125_inLine +BABEL_OP3_404_36039_20150526_230125_outLine +BABEL_OP3_404_36059_20150601_023254_inLine +BABEL_OP3_404_36059_20150601_023254_outLine +BABEL_OP3_404_36059_20150601_033346_inLine +BABEL_OP3_404_36059_20150601_033346_outLine +BABEL_OP3_404_36147_20150211_013803_outLine +BABEL_OP3_404_36219_20141104_012216_inLine +BABEL_OP3_404_36219_20141104_012216_outLine +BABEL_OP3_404_36642_20150610_161207_inLine +BABEL_OP3_404_36642_20150610_161207_outLine +BABEL_OP3_404_37290_20141115_050457_inLine +BABEL_OP3_404_37290_20141115_050457_outLine +BABEL_OP3_404_37598_20141119_045926_inLine +BABEL_OP3_404_37598_20141119_045926_outLine +BABEL_OP3_404_37682_20141101_221445_inLine +BABEL_OP3_404_37682_20141101_221445_outLine +BABEL_OP3_404_38125_20150526_233108_inLine +BABEL_OP3_404_38125_20150526_233108_outLine +BABEL_OP3_404_38323_20150615_021843_inLine +BABEL_OP3_404_38340_20141103_231545_inLine +BABEL_OP3_404_38340_20141103_231545_outLine +BABEL_OP3_404_38554_20141010_224451_inLine +BABEL_OP3_404_38554_20141010_224451_outLine +BABEL_OP3_404_38588_20141118_163844_inLine +BABEL_OP3_404_38588_20141118_163844_outLine +BABEL_OP3_404_38664_20141030_175135_inLine +BABEL_OP3_404_38664_20141030_175135_outLine +BABEL_OP3_404_38979_20150503_202406_outLine +BABEL_OP3_404_39099_20150511_053646_outLine +BABEL_OP3_404_39307_20141022_200554_inLine +BABEL_OP3_404_39307_20141022_201758_inLine +BABEL_OP3_404_39426_20150527_181901_outLine +BABEL_OP3_404_39744_20141023_002710_inLine +BABEL_OP3_404_39893_20150611_034149_inLine +BABEL_OP3_404_39920_20150503_205354_outLine +BABEL_OP3_404_40557_20141127_200639_inLine +BABEL_OP3_404_40557_20141127_200639_outLine +BABEL_OP3_404_40939_20150210_212748_inLine +BABEL_OP3_404_40939_20150210_212748_outLine +BABEL_OP3_404_41097_20141129_055801_inLine +BABEL_OP3_404_41097_20141129_055801_outLine +BABEL_OP3_404_41100_20141021_022126_inLine +BABEL_OP3_404_41100_20141021_022126_outLine +BABEL_OP3_404_41272_20150503_232941_inLine +BABEL_OP3_404_41334_20150617_041322_inLine +BABEL_OP3_404_41400_20150515_021408_inLine +BABEL_OP3_404_41609_20141009_013405_inLine +BABEL_OP3_404_41609_20141009_013405_outLine +BABEL_OP3_404_41692_20150604_005657_inLine +BABEL_OP3_404_41692_20150604_005657_outLine +BABEL_OP3_404_41745_20141114_235452_inLine +BABEL_OP3_404_41745_20141114_235452_outLine +BABEL_OP3_404_41958_20141029_212755_inLine +BABEL_OP3_404_41958_20141029_212755_outLine +BABEL_OP3_404_42155_20141127_055149_inLine +BABEL_OP3_404_42619_20141130_012456_outLine +BABEL_OP3_404_42834_20141125_004837_inLine +BABEL_OP3_404_42834_20141125_004837_outLine +BABEL_OP3_404_42883_20150604_035732_inLine +BABEL_OP3_404_42883_20150604_035732_outLine +BABEL_OP3_404_43368_20141031_010629_inLine +BABEL_OP3_404_43368_20141031_010629_outLine +BABEL_OP3_404_43388_20141114_212210_inLine +BABEL_OP3_404_43388_20141114_214120_inLine +BABEL_OP3_404_43588_20150517_233637_inLine +BABEL_OP3_404_43789_20141120_011327_outLine +BABEL_OP3_404_44114_20150614_012319_inLine +BABEL_OP3_404_44114_20150614_012319_outLine +BABEL_OP3_404_44309_20150525_022635_inLine +BABEL_OP3_404_44309_20150525_022635_outLine +BABEL_OP3_404_44477_20141201_180604_inLine +BABEL_OP3_404_44477_20141201_180604_outLine +BABEL_OP3_404_44478_20150512_225118_inLine +BABEL_OP3_404_44847_20141130_221248_inLine +BABEL_OP3_404_44847_20141130_221248_outLine +BABEL_OP3_404_45106_20141119_050859_inLine +BABEL_OP3_404_45106_20141119_050859_outLine +BABEL_OP3_404_45374_20150122_014830_outLine +BABEL_OP3_404_45374_20150122_015920_outLine +BABEL_OP3_404_45459_20150525_020410_inLine +BABEL_OP3_404_45459_20150525_020410_outLine +BABEL_OP3_404_45560_20141012_030417_inLine +BABEL_OP3_404_45560_20141012_030417_outLine +BABEL_OP3_404_45699_20150205_021829_inLine +BABEL_OP3_404_45851_20150514_155157_inLine +BABEL_OP3_404_45851_20150514_155157_outLine +BABEL_OP3_404_45908_20150515_004218_outLine +BABEL_OP3_404_46268_20141019_032022_inLine +BABEL_OP3_404_46268_20141019_032022_outLine +BABEL_OP3_404_46310_20141015_051100_inLine +BABEL_OP3_404_46310_20141015_051100_outLine +BABEL_OP3_404_46315_20141129_012912_inLine +BABEL_OP3_404_46315_20141129_012912_outLine +BABEL_OP3_404_46550_20141105_072519_inLine +BABEL_OP3_404_46550_20141105_072519_outLine +BABEL_OP3_404_46688_20141015_211329_inLine +BABEL_OP3_404_46688_20141015_211329_outLine +BABEL_OP3_404_46712_20141027_224004_inLine +BABEL_OP3_404_46712_20141027_224004_outLine +BABEL_OP3_404_46881_20141012_020055_inLine +BABEL_OP3_404_46881_20141012_020055_outLine +BABEL_OP3_404_46974_20141128_055136_inLine +BABEL_OP3_404_46974_20141128_055136_outLine +BABEL_OP3_404_46976_20141107_183806_inLine +BABEL_OP3_404_46976_20141107_183806_outLine +BABEL_OP3_404_47156_20150625_025324_inLine +BABEL_OP3_404_47156_20150625_025324_outLine +BABEL_OP3_404_47802_20141110_200430_inLine +BABEL_OP3_404_47802_20141110_200430_outLine +BABEL_OP3_404_47823_20141201_044425_inLine +BABEL_OP3_404_47823_20141201_044425_outLine +BABEL_OP3_404_48016_20150615_000741_inLine +BABEL_OP3_404_48016_20150615_000741_outLine +BABEL_OP3_404_48243_20141023_200903_inLine +BABEL_OP3_404_48243_20141023_200903_outLine +BABEL_OP3_404_48610_20141013_011505_inLine +BABEL_OP3_404_48610_20141013_012904_inLine +BABEL_OP3_404_48663_20150512_202837_inLine +BABEL_OP3_404_48663_20150512_202837_outLine +BABEL_OP3_404_49197_20141117_024730_inLine +BABEL_OP3_404_49197_20141117_024730_outLine +BABEL_OP3_404_49306_20150524_003356_inLine +BABEL_OP3_404_49306_20150524_003356_outLine +BABEL_OP3_404_49630_20141128_020114_inLine +BABEL_OP3_404_49630_20141128_020114_outLine +BABEL_OP3_404_49767_20150613_050113_inLine +BABEL_OP3_404_49767_20150613_050113_outLine +BABEL_OP3_404_49775_20141011_005306_inLine +BABEL_OP3_404_49775_20141011_005306_outLine +BABEL_OP3_404_49902_20141101_175534_inLine +BABEL_OP3_404_49902_20141101_175534_outLine +BABEL_OP3_404_49907_20141103_050534_inLine +BABEL_OP3_404_49907_20141103_050534_outLine +BABEL_OP3_404_49945_20150610_154709_inLine +BABEL_OP3_404_50601_20141127_032527_inLine +BABEL_OP3_404_50601_20141127_032527_outLine +BABEL_OP3_404_50745_20150513_162805_inLine +BABEL_OP3_404_50745_20150513_162805_outLine +BABEL_OP3_404_50779_20141115_012852_inLine +BABEL_OP3_404_50779_20141115_012852_outLine +BABEL_OP3_404_50810_20141007_234432_inLine +BABEL_OP3_404_50810_20141007_234432_outLine +BABEL_OP3_404_51015_20141123_193824_inLine +BABEL_OP3_404_51015_20141123_193824_outLine +BABEL_OP3_404_51414_20150604_001601_inLine +BABEL_OP3_404_51414_20150604_001601_outLine +BABEL_OP3_404_51484_20141202_000325_inLine +BABEL_OP3_404_51484_20141202_000325_outLine +BABEL_OP3_404_51701_20150620_010924_outLine +BABEL_OP3_404_52070_20150620_014422_outLine +BABEL_OP3_404_52070_20150620_020559_outLine +BABEL_OP3_404_52246_20141118_035022_inLine +BABEL_OP3_404_52246_20141118_035022_outLine +BABEL_OP3_404_52246_20141118_040850_inLine +BABEL_OP3_404_52246_20141118_040850_outLine +BABEL_OP3_404_52404_20141125_004855_inLine +BABEL_OP3_404_52404_20141125_004855_outLine +BABEL_OP3_404_52725_20150522_222730_inLine +BABEL_OP3_404_52725_20150522_222730_outLine +BABEL_OP3_404_53063_20141201_005237_inLine +BABEL_OP3_404_53063_20141201_005237_outLine +BABEL_OP3_404_53072_20150518_015132_inLine +BABEL_OP3_404_53415_20150503_225920_inLine +BABEL_OP3_404_53415_20150503_225920_outLine +BABEL_OP3_404_53492_20150525_055025_inLine +BABEL_OP3_404_53492_20150525_055025_outLine +BABEL_OP3_404_53665_20150526_004549_inLine +BABEL_OP3_404_53917_20150503_205456_outLine +BABEL_OP3_404_53957_20141201_051933_inLine +BABEL_OP3_404_54104_20141008_214620_inLine +BABEL_OP3_404_54104_20141008_214620_outLine +BABEL_OP3_404_54160_20141009_180704_inLine +BABEL_OP3_404_54160_20141009_180704_outLine +BABEL_OP3_404_54160_20141009_184719_inLine +BABEL_OP3_404_54160_20141009_184719_outLine +BABEL_OP3_404_54160_20141009_185557_inLine +BABEL_OP3_404_54160_20141009_185557_outLine +BABEL_OP3_404_54405_20141117_054820_inLine +BABEL_OP3_404_54405_20141117_054820_outLine +BABEL_OP3_404_54477_20141211_033627_inLine +BABEL_OP3_404_54477_20141211_033627_outLine +BABEL_OP3_404_54744_20141015_012011_inLine +BABEL_OP3_404_54744_20141015_012011_outLine +BABEL_OP3_404_55013_20150525_222257_inLine +BABEL_OP3_404_55013_20150525_222257_outLine +BABEL_OP3_404_55259_20141029_225631_inLine +BABEL_OP3_404_55259_20141029_225631_outLine +BABEL_OP3_404_55267_20141130_212756_inLine +BABEL_OP3_404_55349_20150523_031602_inLine +BABEL_OP3_404_55349_20150523_031602_outLine +BABEL_OP3_404_56019_20150502_020750_inLine +BABEL_OP3_404_56019_20150502_020750_outLine +BABEL_OP3_404_56076_20150516_164959_inLine +BABEL_OP3_404_56076_20150516_164959_outLine +BABEL_OP3_404_56331_20150526_020747_inLine +BABEL_OP3_404_56331_20150526_020747_outLine +BABEL_OP3_404_56743_20141114_223719_inLine +BABEL_OP3_404_56743_20141114_223719_outLine +BABEL_OP3_404_57065_20141201_002920_inLine +BABEL_OP3_404_57219_20150618_045613_inLine +BABEL_OP3_404_57219_20150618_045613_outLine +BABEL_OP3_404_57464_20150523_224617_inLine +BABEL_OP3_404_57542_20150526_233832_inLine +BABEL_OP3_404_57542_20150526_233832_outLine +BABEL_OP3_404_57542_20150526_235003_inLine +BABEL_OP3_404_57542_20150526_235003_outLine +BABEL_OP3_404_57654_20141023_235628_inLine +BABEL_OP3_404_57654_20141023_235628_outLine +BABEL_OP3_404_57678_20141104_023128_inLine +BABEL_OP3_404_57678_20141104_023128_outLine +BABEL_OP3_404_57919_20150127_041057_inLine +BABEL_OP3_404_57919_20150127_041057_outLine +BABEL_OP3_404_58006_20150526_024205_inLine +BABEL_OP3_404_58006_20150526_024205_outLine +BABEL_OP3_404_58026_20150615_004130_inLine +BABEL_OP3_404_58026_20150615_004130_outLine +BABEL_OP3_404_58915_20150611_034220_outLine +BABEL_OP3_404_59262_20141130_212633_inLine +BABEL_OP3_404_59262_20141130_212633_outLine +BABEL_OP3_404_59307_20150504_003405_inLine +BABEL_OP3_404_59307_20150504_003405_outLine +BABEL_OP3_404_59720_20141029_204612_inLine +BABEL_OP3_404_59720_20141029_204612_outLine +BABEL_OP3_404_59864_20150602_014458_inLine +BABEL_OP3_404_60026_20141008_051633_inLine +BABEL_OP3_404_60026_20141008_051633_outLine +BABEL_OP3_404_60299_20150611_040929_inLine +BABEL_OP3_404_60310_20141130_231532_inLine +BABEL_OP3_404_60310_20141130_231532_outLine +BABEL_OP3_404_60352_20141201_060712_inLine +BABEL_OP3_404_60352_20141201_060712_outLine +BABEL_OP3_404_60352_20141201_061821_inLine +BABEL_OP3_404_60352_20141201_061821_outLine +BABEL_OP3_404_60458_20150609_021527_inLine +BABEL_OP3_404_60458_20150609_021527_outLine +BABEL_OP3_404_60474_20141029_182816_inLine +BABEL_OP3_404_60474_20141029_182816_outLine +BABEL_OP3_404_60477_20150613_223056_inLine +BABEL_OP3_404_60477_20150613_224002_inLine +BABEL_OP3_404_60498_20150606_022221_inLine +BABEL_OP3_404_60498_20150606_022221_outLine +BABEL_OP3_404_60626_20141028_212539_inLine +BABEL_OP3_404_60626_20141028_212539_outLine +BABEL_OP3_404_60706_20141020_215729_inLine +BABEL_OP3_404_60706_20141020_215729_outLine +BABEL_OP3_404_61167_20141030_222711_inLine +BABEL_OP3_404_61167_20141030_222711_outLine +BABEL_OP3_404_61219_20141025_193634_inLine +BABEL_OP3_404_61219_20141025_193634_outLine +BABEL_OP3_404_61678_20141019_201928_inLine +BABEL_OP3_404_61678_20141019_201928_outLine +BABEL_OP3_404_61873_20141108_214852_inLine +BABEL_OP3_404_61873_20141108_214852_outLine +BABEL_OP3_404_61888_20150504_171019_inLine +BABEL_OP3_404_61971_20150525_020101_outLine +BABEL_OP3_404_62155_20150522_032307_inLine +BABEL_OP3_404_62155_20150522_032307_outLine +BABEL_OP3_404_62286_20141105_204359_inLine +BABEL_OP3_404_62286_20141105_204359_outLine +BABEL_OP3_404_62360_20150517_033230_inLine +BABEL_OP3_404_62360_20150517_033230_outLine +BABEL_OP3_404_62456_20141108_202333_inLine +BABEL_OP3_404_62456_20141108_202333_outLine +BABEL_OP3_404_62714_20150522_011337_inLine +BABEL_OP3_404_62714_20150522_011337_outLine +BABEL_OP3_404_62724_20141130_200827_inLine +BABEL_OP3_404_62724_20141130_200827_outLine +BABEL_OP3_404_62734_20141029_221513_inLine +BABEL_OP3_404_62734_20141029_221513_outLine +BABEL_OP3_404_62852_20141013_054854_outLine +BABEL_OP3_404_63081_20141021_032233_inLine +BABEL_OP3_404_63081_20141021_032233_outLine +BABEL_OP3_404_63081_20141021_033457_inLine +BABEL_OP3_404_63081_20141021_033457_outLine +BABEL_OP3_404_63084_20141130_221452_inLine +BABEL_OP3_404_63084_20141130_221452_outLine +BABEL_OP3_404_63425_20141126_054504_inLine +BABEL_OP3_404_63481_20141020_221014_outLine +BABEL_OP3_404_63481_20141020_224225_outLine +BABEL_OP3_404_63670_20141130_050318_inLine +BABEL_OP3_404_63670_20141130_050318_outLine +BABEL_OP3_404_63757_20141111_180721_inLine +BABEL_OP3_404_63757_20141111_180721_outLine +BABEL_OP3_404_63906_20150525_050310_inLine +BABEL_OP3_404_63906_20150525_050310_outLine +BABEL_OP3_404_63999_20150610_041309_inLine +BABEL_OP3_404_64014_20150503_032745_inLine +BABEL_OP3_404_64014_20150503_032745_outLine +BABEL_OP3_404_64722_20150514_034208_outLine +BABEL_OP3_404_64759_20141014_044027_inLine +BABEL_OP3_404_64759_20141014_045519_inLine +BABEL_OP3_404_64796_20141022_055826_inLine +BABEL_OP3_404_64870_20141108_192546_inLine +BABEL_OP3_404_64870_20141108_192546_outLine +BABEL_OP3_404_65561_20141124_060558_inLine +BABEL_OP3_404_65561_20141124_060558_outLine +BABEL_OP3_404_65640_20150528_211835_inLine +BABEL_OP3_404_65640_20150528_211835_outLine +BABEL_OP3_404_66045_20141117_035937_inLine +BABEL_OP3_404_66045_20141117_035937_outLine +BABEL_OP3_404_66177_20150503_202932_inLine +BABEL_OP3_404_66177_20150503_202932_outLine +BABEL_OP3_404_66822_20141117_020953_inLine +BABEL_OP3_404_66822_20141117_020953_outLine +BABEL_OP3_404_66967_20141008_202611_inLine +BABEL_OP3_404_66967_20141008_202611_outLine +BABEL_OP3_404_67152_20150503_201836_inLine +BABEL_OP3_404_67152_20150503_201836_outLine +BABEL_OP3_404_67304_20150211_054416_inLine +BABEL_OP3_404_67304_20150211_054416_outLine +BABEL_OP3_404_67552_20141126_011955_inLine +BABEL_OP3_404_67552_20141126_011955_outLine +BABEL_OP3_404_67842_20141104_051753_inLine +BABEL_OP3_404_67842_20141104_051753_outLine +BABEL_OP3_404_68244_20141119_065540_inLine +BABEL_OP3_404_68244_20141119_065540_outLine +BABEL_OP3_404_68306_20141126_180315_inLine +BABEL_OP3_404_68306_20141126_180315_outLine +BABEL_OP3_404_68385_20141017_031005_inLine +BABEL_OP3_404_68385_20141017_031005_outLine +BABEL_OP3_404_68823_20150212_041147_inLine +BABEL_OP3_404_68823_20150212_041147_outLine +BABEL_OP3_404_69096_20150512_165126_inLine +BABEL_OP3_404_69096_20150512_165126_outLine +BABEL_OP3_404_69107_20141120_010459_inLine +BABEL_OP3_404_69107_20141120_010459_outLine +BABEL_OP3_404_69153_20141130_221412_inLine +BABEL_OP3_404_69153_20141130_221412_outLine +BABEL_OP3_404_69153_20141130_222842_inLine +BABEL_OP3_404_69153_20141130_222842_outLine +BABEL_OP3_404_69474_20141128_051323_outLine +BABEL_OP3_404_69574_20141006_023156_inLine +BABEL_OP3_404_69574_20141006_023156_outLine +BABEL_OP3_404_69578_20141117_003921_inLine +BABEL_OP3_404_69578_20141117_003921_outLine +BABEL_OP3_404_69633_20141129_051648_inLine +BABEL_OP3_404_69633_20141129_051648_outLine +BABEL_OP3_404_69636_20141126_061322_inLine +BABEL_OP3_404_69636_20141126_061322_outLine +BABEL_OP3_404_69885_20150503_011226_inLine +BABEL_OP3_404_69885_20150503_011226_outLine +BABEL_OP3_404_69937_20150620_015912_inLine +BABEL_OP3_404_69964_20150524_015556_inLine +BABEL_OP3_404_69964_20150524_015556_outLine +BABEL_OP3_404_69982_20150625_035440_outLine +BABEL_OP3_404_70121_20141104_202610_inLine +BABEL_OP3_404_70121_20141104_202610_outLine +BABEL_OP3_404_70221_20141124_052004_inLine +BABEL_OP3_404_70221_20141124_052004_outLine +BABEL_OP3_404_70282_20141111_000251_inLine +BABEL_OP3_404_70282_20141111_000251_outLine +BABEL_OP3_404_70460_20150527_015340_inLine +BABEL_OP3_404_70460_20150527_015340_outLine +BABEL_OP3_404_70526_20150501_015444_inLine +BABEL_OP3_404_70526_20150501_015444_outLine +BABEL_OP3_404_70713_20150527_013058_inLine +BABEL_OP3_404_70713_20150527_013058_outLine +BABEL_OP3_404_70794_20141021_185105_inLine +BABEL_OP3_404_70794_20141021_185105_outLine +BABEL_OP3_404_71189_20150523_005918_inLine +BABEL_OP3_404_71189_20150523_005918_outLine +BABEL_OP3_404_71263_20141119_234747_inLine +BABEL_OP3_404_71263_20141119_234747_outLine +BABEL_OP3_404_71278_20150211_052730_inLine +BABEL_OP3_404_71278_20150211_052730_outLine +BABEL_OP3_404_71278_20150211_054040_inLine +BABEL_OP3_404_71278_20150211_054040_outLine +BABEL_OP3_404_71333_20141102_023503_inLine +BABEL_OP3_404_71333_20141102_023503_outLine +BABEL_OP3_404_71401_20150206_070446_inLine +BABEL_OP3_404_71401_20150206_070446_outLine +BABEL_OP3_404_71404_20141023_215509_inLine +BABEL_OP3_404_71404_20141023_215509_outLine +BABEL_OP3_404_71460_20150206_015309_outLine +BABEL_OP3_404_71559_20141210_220929_outLine +BABEL_OP3_404_71566_20141130_035713_inLine +BABEL_OP3_404_71566_20141130_035713_outLine +BABEL_OP3_404_71566_20141130_040359_inLine +BABEL_OP3_404_71566_20141130_040359_outLine +BABEL_OP3_404_71780_20141105_055543_inLine +BABEL_OP3_404_71780_20141105_055543_outLine +BABEL_OP3_404_72319_20150502_041426_inLine +BABEL_OP3_404_72319_20150502_041426_outLine +BABEL_OP3_404_72733_20150515_044419_inLine +BABEL_OP3_404_72733_20150515_044419_outLine +BABEL_OP3_404_73072_20141012_012029_inLine +BABEL_OP3_404_73072_20141012_012029_outLine +BABEL_OP3_404_73119_20141026_232203_inLine +BABEL_OP3_404_73119_20141026_232203_outLine +BABEL_OP3_404_73258_20141117_010123_inLine +BABEL_OP3_404_73258_20141117_010123_outLine +BABEL_OP3_404_73485_20150512_234636_inLine +BABEL_OP3_404_73485_20150512_234636_outLine +BABEL_OP3_404_73964_20150512_205010_inLine +BABEL_OP3_404_73964_20150512_205010_outLine +BABEL_OP3_404_74641_20141108_223951_inLine +BABEL_OP3_404_74641_20141108_223951_outLine +BABEL_OP3_404_74728_20150503_042547_inLine +BABEL_OP3_404_74728_20150503_042547_outLine +BABEL_OP3_404_74799_20141109_222638_inLine +BABEL_OP3_404_74799_20141109_222638_outLine +BABEL_OP3_404_75465_20141129_223330_outLine +BABEL_OP3_404_75869_20150527_230650_inLine +BABEL_OP3_404_75869_20150527_230650_outLine +BABEL_OP3_404_75975_20150127_051140_outLine +BABEL_OP3_404_76126_20141201_202238_inLine +BABEL_OP3_404_76126_20141201_202238_outLine +BABEL_OP3_404_76238_20141129_223455_inLine +BABEL_OP3_404_76238_20141129_223455_outLine +BABEL_OP3_404_76372_20150601_014341_inLine +BABEL_OP3_404_76372_20150601_014341_outLine +BABEL_OP3_404_76437_20141019_202715_inLine +BABEL_OP3_404_76437_20141019_202715_outLine +BABEL_OP3_404_76444_20141127_032124_inLine +BABEL_OP3_404_76444_20141127_032124_outLine +BABEL_OP3_404_76482_20150618_063131_outLine +BABEL_OP3_404_76683_20141110_191551_inLine +BABEL_OP3_404_76683_20141110_191551_outLine +BABEL_OP3_404_76837_20150124_222250_outLine +BABEL_OP3_404_76970_20150625_191722_inLine +BABEL_OP3_404_77126_20141022_202348_inLine +BABEL_OP3_404_77126_20141022_202348_outLine +BABEL_OP3_404_77146_20141019_060916_inLine +BABEL_OP3_404_77242_20150612_024655_inLine +BABEL_OP3_404_77391_20141026_222314_inLine +BABEL_OP3_404_77391_20141026_222314_outLine +BABEL_OP3_404_77427_20141030_192713_inLine +BABEL_OP3_404_77427_20141030_192713_outLine +BABEL_OP3_404_77567_20141021_021210_inLine +BABEL_OP3_404_77567_20141021_021210_outLine +BABEL_OP3_404_77730_20141014_201059_inLine +BABEL_OP3_404_77730_20141014_201059_outLine +BABEL_OP3_404_77803_20141020_030844_inLine +BABEL_OP3_404_77803_20141020_030844_outLine +BABEL_OP3_404_77990_20141024_215822_inLine +BABEL_OP3_404_77990_20141024_215822_outLine +BABEL_OP3_404_78016_20141029_233059_inLine +BABEL_OP3_404_78016_20141029_233059_outLine +BABEL_OP3_404_78254_20141025_202742_inLine +BABEL_OP3_404_78254_20141025_202742_outLine +BABEL_OP3_404_78254_20141025_204922_inLine +BABEL_OP3_404_78254_20141025_204922_outLine +BABEL_OP3_404_78454_20141115_043455_inLine +BABEL_OP3_404_78749_20150620_025728_inLine +BABEL_OP3_404_78749_20150620_025728_outLine +BABEL_OP3_404_78976_20141025_183704_inLine +BABEL_OP3_404_78976_20141025_183704_outLine +BABEL_OP3_404_79190_20141108_232204_inLine +BABEL_OP3_404_79190_20141108_232204_outLine +BABEL_OP3_404_79590_20141129_025808_outLine +BABEL_OP3_404_79751_20141101_232250_inLine +BABEL_OP3_404_79751_20141101_232250_outLine +BABEL_OP3_404_79820_20141104_045340_inLine +BABEL_OP3_404_79820_20141104_045340_outLine +BABEL_OP3_404_79858_20141015_200446_inLine +BABEL_OP3_404_79898_20150620_022648_inLine +BABEL_OP3_404_79898_20150620_022648_outLine +BABEL_OP3_404_79898_20150620_024014_inLine +BABEL_OP3_404_79898_20150620_024014_outLine +BABEL_OP3_404_80069_20150614_233606_inLine +BABEL_OP3_404_80069_20150614_233606_outLine +BABEL_OP3_404_80306_20141119_003833_inLine +BABEL_OP3_404_80306_20141119_003833_outLine +BABEL_OP3_404_80306_20141119_005121_inLine +BABEL_OP3_404_80306_20141119_005121_outLine +BABEL_OP3_404_80439_20141026_005410_inLine +BABEL_OP3_404_80439_20141026_005410_outLine +BABEL_OP3_404_80559_20141022_010255_inLine +BABEL_OP3_404_80655_20150525_221544_inLine +BABEL_OP3_404_80655_20150525_221544_outLine +BABEL_OP3_404_80897_20141119_233718_inLine +BABEL_OP3_404_80897_20141119_233718_outLine +BABEL_OP3_404_81149_20150525_003741_inLine +BABEL_OP3_404_81149_20150525_003741_outLine +BABEL_OP3_404_81213_20141102_205052_inLine +BABEL_OP3_404_81213_20141102_205052_outLine +BABEL_OP3_404_81229_20141117_041745_inLine +BABEL_OP3_404_81229_20141117_041745_outLine +BABEL_OP3_404_81427_20141030_015136_inLine +BABEL_OP3_404_81427_20141030_015136_outLine +BABEL_OP3_404_81854_20150610_060437_inLine +BABEL_OP3_404_82089_20141117_045302_inLine +BABEL_OP3_404_82089_20141117_045302_outLine +BABEL_OP3_404_82303_20150614_024236_inLine +BABEL_OP3_404_82303_20150614_024236_outLine +BABEL_OP3_404_82473_20141026_060037_inLine +BABEL_OP3_404_82473_20141026_060037_outLine +BABEL_OP3_404_82626_20150615_014517_inLine +BABEL_OP3_404_82637_20141021_010105_inLine +BABEL_OP3_404_82637_20141021_010105_outLine +BABEL_OP3_404_82742_20141201_234306_inLine +BABEL_OP3_404_82742_20141201_234306_outLine +BABEL_OP3_404_82863_20141119_044230_inLine +BABEL_OP3_404_82863_20141119_044230_outLine +BABEL_OP3_404_83238_20141119_180953_inLine +BABEL_OP3_404_83238_20141119_180953_outLine +BABEL_OP3_404_83366_20141120_192208_inLine +BABEL_OP3_404_83366_20141120_192208_outLine +BABEL_OP3_404_83651_20141102_170912_inLine +BABEL_OP3_404_83651_20141102_170912_outLine +BABEL_OP3_404_83771_20150604_012300_outLine +BABEL_OP3_404_83851_20141028_203735_inLine +BABEL_OP3_404_83851_20141028_203735_outLine +BABEL_OP3_404_83929_20141018_184023_inLine +BABEL_OP3_404_83929_20141018_184023_outLine +BABEL_OP3_404_83974_20150617_022055_inLine +BABEL_OP3_404_84055_20150504_002015_inLine +BABEL_OP3_404_84055_20150504_002015_outLine +BABEL_OP3_404_84061_20141030_205021_inLine +BABEL_OP3_404_84061_20141030_205021_outLine +BABEL_OP3_404_84125_20141018_023340_inLine +BABEL_OP3_404_84125_20141018_023340_outLine +BABEL_OP3_404_84458_20141130_053628_outLine +BABEL_OP3_404_84815_20141127_011952_inLine +BABEL_OP3_404_84815_20141127_013345_inLine +BABEL_OP3_404_85047_20141117_014630_inLine +BABEL_OP3_404_85047_20141117_014630_outLine +BABEL_OP3_404_85048_20141127_023704_inLine +BABEL_OP3_404_85048_20141127_023704_outLine +BABEL_OP3_404_85254_20150620_035606_inLine +BABEL_OP3_404_85254_20150620_035606_outLine +BABEL_OP3_404_85322_20141008_235518_inLine +BABEL_OP3_404_85322_20141008_235518_outLine +BABEL_OP3_404_85340_20141103_022707_inLine +BABEL_OP3_404_85340_20141103_022707_outLine +BABEL_OP3_404_85651_20141211_032650_inLine +BABEL_OP3_404_85651_20141211_032650_outLine +BABEL_OP3_404_86472_20141201_011325_inLine +BABEL_OP3_404_86472_20141201_011325_outLine +BABEL_OP3_404_86597_20150612_170328_inLine +BABEL_OP3_404_86597_20150612_170328_outLine +BABEL_OP3_404_86635_20141127_204158_inLine +BABEL_OP3_404_86635_20141127_204158_outLine +BABEL_OP3_404_86722_20141029_192140_inLine +BABEL_OP3_404_86722_20141029_192140_outLine +BABEL_OP3_404_87074_20141105_190107_outLine +BABEL_OP3_404_87470_20141114_214639_inLine +BABEL_OP3_404_87470_20141114_214639_outLine +BABEL_OP3_404_87629_20141127_020403_inLine +BABEL_OP3_404_87629_20141127_020403_outLine +BABEL_OP3_404_87777_20141127_040747_inLine +BABEL_OP3_404_87777_20141127_040747_outLine +BABEL_OP3_404_87871_20141201_023608_inLine +BABEL_OP3_404_87871_20141201_023608_outLine +BABEL_OP3_404_87921_20141201_023029_inLine +BABEL_OP3_404_87921_20141201_023029_outLine +BABEL_OP3_404_88260_20141103_234824_inLine +BABEL_OP3_404_88260_20141103_234824_outLine +BABEL_OP3_404_88445_20141119_043713_inLine +BABEL_OP3_404_88445_20141119_043713_outLine +BABEL_OP3_404_88661_20141127_025208_inLine +BABEL_OP3_404_88661_20141127_025208_outLine +BABEL_OP3_404_88669_20141119_000147_inLine +BABEL_OP3_404_88669_20141119_000147_outLine +BABEL_OP3_404_88783_20141201_045305_inLine +BABEL_OP3_404_88783_20141201_045305_outLine +BABEL_OP3_404_88873_20141028_190127_inLine +BABEL_OP3_404_88873_20141028_190127_outLine +BABEL_OP3_404_89045_20141022_193202_inLine +BABEL_OP3_404_89045_20141022_193202_outLine +BABEL_OP3_404_89330_20150616_002908_inLine +BABEL_OP3_404_89330_20150616_002908_outLine +BABEL_OP3_404_89372_20141010_000950_inLine +BABEL_OP3_404_89372_20141010_000950_outLine +BABEL_OP3_404_89650_20150220_222402_inLine +BABEL_OP3_404_89650_20150220_222402_outLine +BABEL_OP3_404_89650_20150220_224606_inLine +BABEL_OP3_404_89650_20150220_224606_outLine +BABEL_OP3_404_89665_20141103_202723_inLine +BABEL_OP3_404_89665_20141103_202723_outLine +BABEL_OP3_404_89943_20141105_211847_outLine +BABEL_OP3_404_90347_20141119_012016_inLine +BABEL_OP3_404_90347_20141119_012016_outLine +BABEL_OP3_404_90760_20150611_151739_inLine +BABEL_OP3_404_90760_20150611_151739_outLine +BABEL_OP3_404_90832_20150616_012728_inLine +BABEL_OP3_404_90832_20150616_012728_outLine +BABEL_OP3_404_90930_20150119_021352_inLine +BABEL_OP3_404_90930_20150119_021352_outLine +BABEL_OP3_404_91383_20150618_035815_inLine +BABEL_OP3_404_91463_20141116_023036_inLine +BABEL_OP3_404_91463_20141116_023036_outLine +BABEL_OP3_404_91475_20150614_034536_inLine +BABEL_OP3_404_91581_20141129_045608_inLine +BABEL_OP3_404_91581_20141129_045608_outLine +BABEL_OP3_404_91581_20141129_050730_inLine +BABEL_OP3_404_91581_20141129_050730_outLine +BABEL_OP3_404_91593_20150611_021825_inLine +BABEL_OP3_404_91593_20150611_021825_outLine +BABEL_OP3_404_91825_20141009_181224_inLine +BABEL_OP3_404_91825_20141009_181224_outLine +BABEL_OP3_404_91825_20141009_183843_inLine +BABEL_OP3_404_91825_20141009_183843_outLine +BABEL_OP3_404_91884_20150503_022858_inLine +BABEL_OP3_404_91884_20150503_022858_outLine +BABEL_OP3_404_91888_20150512_191012_inLine +BABEL_OP3_404_91888_20150512_191012_outLine +BABEL_OP3_404_91891_20141129_005825_inLine +BABEL_OP3_404_91891_20141129_005825_outLine +BABEL_OP3_404_91944_20141022_021002_inLine +BABEL_OP3_404_91971_20150217_041455_inLine +BABEL_OP3_404_91971_20150217_041455_outLine +BABEL_OP3_404_91977_20141122_230420_outLine +BABEL_OP3_404_92176_20141119_195614_inLine +BABEL_OP3_404_92176_20141119_195614_outLine +BABEL_OP3_404_92281_20150625_185123_inLine +BABEL_OP3_404_92698_20141117_072302_inLine +BABEL_OP3_404_92698_20141117_072302_outLine +BABEL_OP3_404_92736_20141201_011442_inLine +BABEL_OP3_404_92736_20141201_011442_outLine +BABEL_OP3_404_92757_20150525_200048_inLine +BABEL_OP3_404_92757_20150525_200048_outLine +BABEL_OP3_404_92792_20150503_182854_outLine +BABEL_OP3_404_92792_20150525_025523_outLine +BABEL_OP3_404_92942_20141120_022830_inLine +BABEL_OP3_404_92942_20141120_022830_outLine +BABEL_OP3_404_93007_20150615_051230_inLine +BABEL_OP3_404_93007_20150615_051230_outLine +BABEL_OP3_404_93858_20150611_043732_inLine +BABEL_OP3_404_94002_20141119_015307_inLine +BABEL_OP3_404_94002_20141119_015307_outLine +BABEL_OP3_404_94025_20141129_180207_inLine +BABEL_OP3_404_94025_20141129_180207_outLine +BABEL_OP3_404_94333_20141020_024439_outLine +BABEL_OP3_404_94487_20150518_005132_outLine +BABEL_OP3_404_94869_20141007_194254_inLine +BABEL_OP3_404_94869_20141007_194254_outLine +BABEL_OP3_404_95077_20141201_055702_outLine +BABEL_OP3_404_95269_20141105_221810_inLine +BABEL_OP3_404_95269_20141105_221810_outLine +BABEL_OP3_404_95338_20150610_211203_inLine +BABEL_OP3_404_95338_20150610_211203_outLine +BABEL_OP3_404_95399_20141119_001023_inLine +BABEL_OP3_404_95399_20141119_001023_outLine +BABEL_OP3_404_95583_20141019_010741_inLine +BABEL_OP3_404_95583_20141019_010741_outLine +BABEL_OP3_404_96059_20150524_042224_outLine +BABEL_OP3_404_96205_20141119_033053_inLine +BABEL_OP3_404_96205_20141119_033053_outLine +BABEL_OP3_404_96205_20141119_034909_inLine +BABEL_OP3_404_96205_20141119_034909_outLine +BABEL_OP3_404_96247_20150526_202623_outLine +BABEL_OP3_404_96376_20150503_033706_inLine +BABEL_OP3_404_96376_20150503_033706_outLine +BABEL_OP3_404_96504_20141103_031329_inLine +BABEL_OP3_404_96504_20141103_031329_outLine +BABEL_OP3_404_96690_20141117_053054_inLine +BABEL_OP3_404_96690_20141117_053054_outLine +BABEL_OP3_404_96808_20150609_034129_inLine +BABEL_OP3_404_97097_20150601_042649_outLine +BABEL_OP3_404_97136_20150528_011250_inLine +BABEL_OP3_404_97136_20150528_011250_outLine +BABEL_OP3_404_97557_20141119_230718_inLine +BABEL_OP3_404_97557_20141119_230718_outLine +BABEL_OP3_404_97588_20141018_234016_inLine +BABEL_OP3_404_97588_20141018_234016_outLine +BABEL_OP3_404_97588_20141018_235425_inLine +BABEL_OP3_404_97588_20141018_235425_outLine +BABEL_OP3_404_97896_20141116_221329_inLine +BABEL_OP3_404_97896_20141116_221329_outLine +BABEL_OP3_404_97911_20150613_195820_outLine +BABEL_OP3_404_97988_20141201_030306_inLine +BABEL_OP3_404_97988_20141201_030306_outLine +BABEL_OP3_404_98165_20141030_214051_inLine +BABEL_OP3_404_98165_20141030_214051_outLine +BABEL_OP3_404_98192_20150617_021906_outLine +BABEL_OP3_404_98489_20141102_002030_inLine +BABEL_OP3_404_98489_20141102_004054_inLine +BABEL_OP3_404_98678_20150528_021605_inLine +BABEL_OP3_404_98678_20150528_023029_inLine +BABEL_OP3_404_98888_20141113_212715_inLine +BABEL_OP3_404_98888_20141113_212715_outLine +BABEL_OP3_404_99202_20141108_210814_inLine +BABEL_OP3_404_99202_20141108_210814_outLine +BABEL_OP3_404_99289_20150521_220314_inLine +BABEL_OP3_404_99289_20150521_220314_outLine +BABEL_OP3_404_99289_20150521_222144_inLine +BABEL_OP3_404_99289_20150521_222144_outLine +BABEL_OP3_404_99594_20141105_194545_inLine +BABEL_OP3_404_99594_20141105_194545_outLine +BABEL_OP3_404_99718_20141019_051850_inLine +BABEL_OP3_404_99718_20141019_051850_outLine +BABEL_OP3_404_99718_20141019_053305_inLine +BABEL_OP3_404_99718_20141019_053305_outLine +BABEL_OP3_404_99732_20141130_232553_inLine +BABEL_OP3_404_99732_20141130_232553_outLine +BABEL_OP3_404_99813_20141120_025129_inLine +BABEL_OP3_404_99813_20141120_025129_outLine +BABEL_OP3_404_99920_20141022_052026_inLine diff --git a/egs/babel/s5d/conf/lists/404-georgian/training.list b/egs/babel/s5d/conf/lists/404-georgian/training.list new file mode 100644 index 00000000000..efc0afb8219 --- /dev/null +++ b/egs/babel/s5d/conf/lists/404-georgian/training.list @@ -0,0 +1,518 @@ +BABEL_OP3_404_10019_20141101_191932_inLine +BABEL_OP3_404_10019_20141101_191932_outLine +BABEL_OP3_404_10416_20141117_064700_inLine +BABEL_OP3_404_10416_20141117_064700_outLine +BABEL_OP3_404_10647_20150514_001106_inLine +BABEL_OP3_404_10647_20150514_001106_outLine +BABEL_OP3_404_10974_20141119_205506_inLine +BABEL_OP3_404_10974_20141119_205506_outLine +BABEL_OP3_404_11663_20141118_032146_inLine +BABEL_OP3_404_11663_20141118_032146_outLine +BABEL_OP3_404_11673_20141023_035438_inLine +BABEL_OP3_404_11673_20141023_035438_outLine +BABEL_OP3_404_11681_20141107_190101_inLine +BABEL_OP3_404_11681_20141107_190101_outLine +BABEL_OP3_404_12242_20141028_021853_inLine +BABEL_OP3_404_12242_20141028_021853_outLine +BABEL_OP3_404_13030_20141101_200709_inLine +BABEL_OP3_404_13030_20141101_200709_outLine +BABEL_OP3_404_13178_20141129_192909_inLine +BABEL_OP3_404_13178_20141129_192909_outLine +BABEL_OP3_404_13324_20141022_200257_inLine +BABEL_OP3_404_13324_20141022_200257_outLine +BABEL_OP3_404_13664_20141012_013523_inLine +BABEL_OP3_404_13664_20141012_013523_outLine +BABEL_OP3_404_13709_20150512_015216_inLine +BABEL_OP3_404_13709_20150512_015216_outLine +BABEL_OP3_404_14137_20141025_202817_inLine +BABEL_OP3_404_14137_20141025_202817_outLine +BABEL_OP3_404_14229_20141029_200136_inLine +BABEL_OP3_404_14229_20141029_200136_outLine +BABEL_OP3_404_14237_20141006_171921_inLine +BABEL_OP3_404_14237_20141006_171921_outLine +BABEL_OP3_404_14440_20141127_213106_inLine +BABEL_OP3_404_14440_20141127_213106_outLine +BABEL_OP3_404_14807_20141110_231934_inLine +BABEL_OP3_404_14807_20141110_231934_outLine +BABEL_OP3_404_14875_20141026_230227_inLine +BABEL_OP3_404_14875_20141026_230227_outLine +BABEL_OP3_404_14899_20141022_202217_inLine +BABEL_OP3_404_14899_20141022_202217_outLine +BABEL_OP3_404_14929_20141129_192841_inLine +BABEL_OP3_404_14929_20141129_192841_outLine +BABEL_OP3_404_15382_20141130_213942_inLine +BABEL_OP3_404_15382_20141130_213942_outLine +BABEL_OP3_404_15848_20141006_231138_inLine +BABEL_OP3_404_15848_20141006_231138_outLine +BABEL_OP3_404_15869_20150218_225936_inLine +BABEL_OP3_404_15869_20150218_225936_outLine +BABEL_OP3_404_16149_20141010_173548_inLine +BABEL_OP3_404_16149_20141010_173548_outLine +BABEL_OP3_404_16467_20141130_014316_inLine +BABEL_OP3_404_16467_20141130_014316_outLine +BABEL_OP3_404_16467_20141130_015010_inLine +BABEL_OP3_404_16467_20141130_015010_outLine +BABEL_OP3_404_17113_20150611_050102_inLine +BABEL_OP3_404_17113_20150611_050102_outLine +BABEL_OP3_404_17280_20141103_190330_inLine +BABEL_OP3_404_17280_20141103_190330_outLine +BABEL_OP3_404_17615_20141201_025917_inLine +BABEL_OP3_404_17615_20141201_025917_outLine +BABEL_OP3_404_19134_20141120_053128_inLine +BABEL_OP3_404_19134_20141120_053128_outLine +BABEL_OP3_404_19703_20141027_004315_inLine +BABEL_OP3_404_19703_20141027_004315_outLine +BABEL_OP3_404_20133_20141010_195231_inLine +BABEL_OP3_404_20133_20141010_195231_outLine +BABEL_OP3_404_20985_20141126_183236_inLine +BABEL_OP3_404_20985_20141126_183236_outLine +BABEL_OP3_404_21004_20141201_035831_inLine +BABEL_OP3_404_21004_20141201_035831_outLine +BABEL_OP3_404_22280_20141111_020522_inLine +BABEL_OP3_404_22280_20141111_020522_outLine +BABEL_OP3_404_23046_20141031_030755_inLine +BABEL_OP3_404_23046_20141031_030755_outLine +BABEL_OP3_404_23505_20141021_032033_inLine +BABEL_OP3_404_23505_20141021_032033_outLine +BABEL_OP3_404_23731_20141130_033602_inLine +BABEL_OP3_404_23731_20141130_033602_outLine +BABEL_OP3_404_23980_20141106_225951_inLine +BABEL_OP3_404_23980_20141106_225951_outLine +BABEL_OP3_404_24270_20141111_012902_inLine +BABEL_OP3_404_24270_20141111_012902_outLine +BABEL_OP3_404_24470_20141111_184651_inLine +BABEL_OP3_404_24470_20141111_184651_outLine +BABEL_OP3_404_24470_20141111_190229_inLine +BABEL_OP3_404_24470_20141111_190229_outLine +BABEL_OP3_404_24532_20141007_211325_inLine +BABEL_OP3_404_24532_20141007_211325_outLine +BABEL_OP3_404_24589_20141031_020641_inLine +BABEL_OP3_404_24589_20141031_020641_outLine +BABEL_OP3_404_24679_20141018_015615_inLine +BABEL_OP3_404_24679_20141018_015615_outLine +BABEL_OP3_404_24982_20141102_021352_inLine +BABEL_OP3_404_24982_20141102_021352_outLine +BABEL_OP3_404_26388_20141026_014207_inLine +BABEL_OP3_404_26388_20141026_014207_outLine +BABEL_OP3_404_27042_20141201_215107_inLine +BABEL_OP3_404_27042_20141201_215107_outLine +BABEL_OP3_404_28303_20141028_182204_inLine +BABEL_OP3_404_28303_20141028_182204_outLine +BABEL_OP3_404_28522_20141124_222758_inLine +BABEL_OP3_404_28522_20141124_222758_outLine +BABEL_OP3_404_28538_20141119_005526_inLine +BABEL_OP3_404_28538_20141119_005526_outLine +BABEL_OP3_404_28871_20141019_181913_inLine +BABEL_OP3_404_28871_20141019_181913_outLine +BABEL_OP3_404_29039_20141128_035839_inLine +BABEL_OP3_404_29039_20141128_035839_outLine +BABEL_OP3_404_29208_20141106_013309_inLine +BABEL_OP3_404_29208_20141106_013309_outLine +BABEL_OP3_404_30098_20150610_150504_inLine +BABEL_OP3_404_30098_20150610_150504_outLine +BABEL_OP3_404_30432_20141126_052839_inLine +BABEL_OP3_404_30432_20141126_052839_outLine +BABEL_OP3_404_30461_20150620_020316_inLine +BABEL_OP3_404_30461_20150620_020316_outLine +BABEL_OP3_404_31624_20141105_214349_inLine +BABEL_OP3_404_31624_20141105_214349_outLine +BABEL_OP3_404_31979_20141106_000523_inLine +BABEL_OP3_404_31979_20141106_000523_outLine +BABEL_OP3_404_31992_20141014_221817_inLine +BABEL_OP3_404_31992_20141014_221817_outLine +BABEL_OP3_404_32122_20141115_022841_inLine +BABEL_OP3_404_32122_20141115_022841_outLine +BABEL_OP3_404_32287_20150210_060823_inLine +BABEL_OP3_404_32287_20150210_060823_outLine +BABEL_OP3_404_32708_20141106_032826_inLine +BABEL_OP3_404_32708_20141106_032826_outLine +BABEL_OP3_404_32727_20141128_203500_inLine +BABEL_OP3_404_32727_20141128_203500_outLine +BABEL_OP3_404_32727_20141128_204751_inLine +BABEL_OP3_404_32727_20141128_204751_outLine +BABEL_OP3_404_33355_20141019_032024_inLine +BABEL_OP3_404_33355_20141019_032024_outLine +BABEL_OP3_404_33355_20141019_034109_inLine +BABEL_OP3_404_33355_20141019_034109_outLine +BABEL_OP3_404_33704_20141207_073436_inLine +BABEL_OP3_404_33704_20141207_073436_outLine +BABEL_OP3_404_34679_20141102_052808_inLine +BABEL_OP3_404_34679_20141102_052808_outLine +BABEL_OP3_404_34688_20141009_073303_inLine +BABEL_OP3_404_34688_20141009_073303_outLine +BABEL_OP3_404_35143_20141130_181111_inLine +BABEL_OP3_404_35143_20141130_181111_outLine +BABEL_OP3_404_37064_20141102_063308_inLine +BABEL_OP3_404_37064_20141102_063308_outLine +BABEL_OP3_404_37281_20141119_053453_inLine +BABEL_OP3_404_37281_20141119_053453_outLine +BABEL_OP3_404_37598_20141119_045926_inLine +BABEL_OP3_404_37598_20141119_045926_outLine +BABEL_OP3_404_37682_20141101_221445_inLine +BABEL_OP3_404_37682_20141101_221445_outLine +BABEL_OP3_404_37853_20150602_030625_inLine +BABEL_OP3_404_37853_20150602_030625_outLine +BABEL_OP3_404_38588_20141118_163844_inLine +BABEL_OP3_404_38588_20141118_163844_outLine +BABEL_OP3_404_40557_20141127_200639_inLine +BABEL_OP3_404_40557_20141127_200639_outLine +BABEL_OP3_404_40713_20141028_221207_inLine +BABEL_OP3_404_40713_20141028_221207_outLine +BABEL_OP3_404_40939_20150210_212748_inLine +BABEL_OP3_404_40939_20150210_212748_outLine +BABEL_OP3_404_41100_20141021_022126_inLine +BABEL_OP3_404_41100_20141021_022126_outLine +BABEL_OP3_404_41609_20141009_013405_inLine +BABEL_OP3_404_41609_20141009_013405_outLine +BABEL_OP3_404_41680_20141012_040411_inLine +BABEL_OP3_404_41680_20141012_040411_outLine +BABEL_OP3_404_41920_20141008_040539_inLine +BABEL_OP3_404_41920_20141008_040539_outLine +BABEL_OP3_404_41958_20141029_212755_inLine +BABEL_OP3_404_41958_20141029_212755_outLine +BABEL_OP3_404_42877_20150212_052937_inLine +BABEL_OP3_404_42877_20150212_052937_outLine +BABEL_OP3_404_43368_20141031_010629_inLine +BABEL_OP3_404_43368_20141031_010629_outLine +BABEL_OP3_404_44114_20150614_012319_inLine +BABEL_OP3_404_44114_20150614_012319_outLine +BABEL_OP3_404_44477_20141201_180604_inLine +BABEL_OP3_404_44477_20141201_180604_outLine +BABEL_OP3_404_44847_20141130_221248_inLine +BABEL_OP3_404_44847_20141130_221248_outLine +BABEL_OP3_404_45121_20150609_055234_inLine +BABEL_OP3_404_45121_20150609_055234_outLine +BABEL_OP3_404_45560_20141012_030417_inLine +BABEL_OP3_404_45560_20141012_030417_outLine +BABEL_OP3_404_46169_20141130_224339_inLine +BABEL_OP3_404_46169_20141130_224339_outLine +BABEL_OP3_404_46268_20141019_032022_inLine +BABEL_OP3_404_46268_20141019_032022_outLine +BABEL_OP3_404_46550_20141105_072519_inLine +BABEL_OP3_404_46550_20141105_072519_outLine +BABEL_OP3_404_46625_20141011_040505_inLine +BABEL_OP3_404_46625_20141011_040505_outLine +BABEL_OP3_404_46681_20141021_040451_inLine +BABEL_OP3_404_46681_20141021_040451_outLine +BABEL_OP3_404_46881_20141012_020055_inLine +BABEL_OP3_404_46881_20141012_020055_outLine +BABEL_OP3_404_46976_20141107_183806_inLine +BABEL_OP3_404_46976_20141107_183806_outLine +BABEL_OP3_404_47270_20150512_053415_inLine +BABEL_OP3_404_47270_20150512_053415_outLine +BABEL_OP3_404_47802_20141110_200430_inLine +BABEL_OP3_404_47802_20141110_200430_outLine +BABEL_OP3_404_48243_20141023_200903_inLine +BABEL_OP3_404_48243_20141023_200903_outLine +BABEL_OP3_404_48844_20141020_065414_inLine +BABEL_OP3_404_48844_20141020_065414_outLine +BABEL_OP3_404_49197_20141117_024730_inLine +BABEL_OP3_404_49197_20141117_024730_outLine +BABEL_OP3_404_49768_20141026_022902_inLine +BABEL_OP3_404_49768_20141026_022902_outLine +BABEL_OP3_404_49902_20141101_175534_inLine +BABEL_OP3_404_49902_20141101_175534_outLine +BABEL_OP3_404_49907_20141103_050534_inLine +BABEL_OP3_404_49907_20141103_050534_outLine +BABEL_OP3_404_50175_20141021_025726_inLine +BABEL_OP3_404_50175_20141021_025726_outLine +BABEL_OP3_404_50745_20150513_162805_inLine +BABEL_OP3_404_50745_20150513_162805_outLine +BABEL_OP3_404_51015_20141123_193824_inLine +BABEL_OP3_404_51015_20141123_193824_outLine +BABEL_OP3_404_52246_20141118_035022_inLine +BABEL_OP3_404_52246_20141118_035022_outLine +BABEL_OP3_404_52246_20141118_040850_inLine +BABEL_OP3_404_52246_20141118_040850_outLine +BABEL_OP3_404_52301_20141009_051739_inLine +BABEL_OP3_404_52301_20141009_051739_outLine +BABEL_OP3_404_52301_20141009_054049_inLine +BABEL_OP3_404_52301_20141009_054049_outLine +BABEL_OP3_404_52490_20141016_020323_inLine +BABEL_OP3_404_52490_20141016_020323_outLine +BABEL_OP3_404_52725_20150522_222730_inLine +BABEL_OP3_404_52725_20150522_222730_outLine +BABEL_OP3_404_54104_20141008_214620_inLine +BABEL_OP3_404_54104_20141008_214620_outLine +BABEL_OP3_404_54160_20141009_180704_inLine +BABEL_OP3_404_54160_20141009_180704_outLine +BABEL_OP3_404_54160_20141009_184719_inLine +BABEL_OP3_404_54160_20141009_184719_outLine +BABEL_OP3_404_54160_20141009_185557_inLine +BABEL_OP3_404_54160_20141009_185557_outLine +BABEL_OP3_404_54405_20141117_054820_inLine +BABEL_OP3_404_54405_20141117_054820_outLine +BABEL_OP3_404_54744_20141015_012011_inLine +BABEL_OP3_404_54744_20141015_012011_outLine +BABEL_OP3_404_55259_20141029_225631_inLine +BABEL_OP3_404_55259_20141029_225631_outLine +BABEL_OP3_404_56213_20141201_000837_inLine +BABEL_OP3_404_56213_20141201_000837_outLine +BABEL_OP3_404_57654_20141023_235628_inLine +BABEL_OP3_404_57654_20141023_235628_outLine +BABEL_OP3_404_57678_20141104_023128_inLine +BABEL_OP3_404_57678_20141104_023128_outLine +BABEL_OP3_404_57919_20150127_041057_inLine +BABEL_OP3_404_57919_20150127_041057_outLine +BABEL_OP3_404_58103_20141030_002209_inLine +BABEL_OP3_404_58103_20141030_002209_outLine +BABEL_OP3_404_59078_20141111_004941_inLine +BABEL_OP3_404_59078_20141111_004941_outLine +BABEL_OP3_404_59262_20141130_212633_inLine +BABEL_OP3_404_59262_20141130_212633_outLine +BABEL_OP3_404_59720_20141029_204612_inLine +BABEL_OP3_404_59720_20141029_204612_outLine +BABEL_OP3_404_60026_20141008_051633_inLine +BABEL_OP3_404_60026_20141008_051633_outLine +BABEL_OP3_404_60474_20141029_182816_inLine +BABEL_OP3_404_60474_20141029_182816_outLine +BABEL_OP3_404_60626_20141028_212539_inLine +BABEL_OP3_404_60626_20141028_212539_outLine +BABEL_OP3_404_61167_20141030_222711_inLine +BABEL_OP3_404_61167_20141030_222711_outLine +BABEL_OP3_404_61219_20141025_193634_inLine +BABEL_OP3_404_61219_20141025_193634_outLine +BABEL_OP3_404_61225_20141009_174003_inLine +BABEL_OP3_404_61225_20141009_174003_outLine +BABEL_OP3_404_61678_20141019_201928_inLine +BABEL_OP3_404_61678_20141019_201928_outLine +BABEL_OP3_404_61873_20141108_214852_inLine +BABEL_OP3_404_61873_20141108_214852_outLine +BABEL_OP3_404_62155_20150522_032307_inLine +BABEL_OP3_404_62155_20150522_032307_outLine +BABEL_OP3_404_62286_20141105_204359_inLine +BABEL_OP3_404_62286_20141105_204359_outLine +BABEL_OP3_404_62456_20141108_202333_inLine +BABEL_OP3_404_62456_20141108_202333_outLine +BABEL_OP3_404_62714_20150522_011337_inLine +BABEL_OP3_404_62714_20150522_011337_outLine +BABEL_OP3_404_62734_20141029_221513_inLine +BABEL_OP3_404_62734_20141029_221513_outLine +BABEL_OP3_404_63081_20141021_032233_inLine +BABEL_OP3_404_63081_20141021_032233_outLine +BABEL_OP3_404_63081_20141021_033457_inLine +BABEL_OP3_404_63081_20141021_033457_outLine +BABEL_OP3_404_63084_20141130_221452_inLine +BABEL_OP3_404_63084_20141130_221452_outLine +BABEL_OP3_404_63220_20141127_033605_inLine +BABEL_OP3_404_63220_20141127_033605_outLine +BABEL_OP3_404_63757_20141111_180721_inLine +BABEL_OP3_404_63757_20141111_180721_outLine +BABEL_OP3_404_64494_20141026_203549_inLine +BABEL_OP3_404_64494_20141026_203549_outLine +BABEL_OP3_404_64768_20141027_201818_inLine +BABEL_OP3_404_64768_20141027_201818_outLine +BABEL_OP3_404_64870_20141108_192546_inLine +BABEL_OP3_404_64870_20141108_192546_outLine +BABEL_OP3_404_66045_20141117_035937_inLine +BABEL_OP3_404_66045_20141117_035937_outLine +BABEL_OP3_404_66177_20150503_202932_inLine +BABEL_OP3_404_66177_20150503_202932_outLine +BABEL_OP3_404_66822_20141117_020953_inLine +BABEL_OP3_404_66822_20141117_020953_outLine +BABEL_OP3_404_66916_20141022_000731_inLine +BABEL_OP3_404_66916_20141022_000731_outLine +BABEL_OP3_404_67401_20141109_211809_inLine +BABEL_OP3_404_67401_20141109_211809_outLine +BABEL_OP3_404_67842_20141104_051753_inLine +BABEL_OP3_404_67842_20141104_051753_outLine +BABEL_OP3_404_68059_20141109_052011_inLine +BABEL_OP3_404_68059_20141109_052011_outLine +BABEL_OP3_404_68068_20141201_054518_inLine +BABEL_OP3_404_68068_20141201_054518_outLine +BABEL_OP3_404_68244_20141119_065540_inLine +BABEL_OP3_404_68244_20141119_065540_outLine +BABEL_OP3_404_68384_20141130_035214_inLine +BABEL_OP3_404_68384_20141130_035214_outLine +BABEL_OP3_404_68385_20141017_031005_inLine +BABEL_OP3_404_68385_20141017_031005_outLine +BABEL_OP3_404_68627_20141105_190511_inLine +BABEL_OP3_404_68627_20141105_190511_outLine +BABEL_OP3_404_68823_20150212_041147_inLine +BABEL_OP3_404_68823_20150212_041147_outLine +BABEL_OP3_404_69107_20141120_010459_inLine +BABEL_OP3_404_69107_20141120_010459_outLine +BABEL_OP3_404_69574_20141006_023156_inLine +BABEL_OP3_404_69574_20141006_023156_outLine +BABEL_OP3_404_69578_20141117_003921_inLine +BABEL_OP3_404_69578_20141117_003921_outLine +BABEL_OP3_404_70121_20141104_202610_inLine +BABEL_OP3_404_70121_20141104_202610_outLine +BABEL_OP3_404_70282_20141111_000251_inLine +BABEL_OP3_404_70282_20141111_000251_outLine +BABEL_OP3_404_70794_20141021_185105_inLine +BABEL_OP3_404_70794_20141021_185105_outLine +BABEL_OP3_404_71263_20141119_234747_inLine +BABEL_OP3_404_71263_20141119_234747_outLine +BABEL_OP3_404_71401_20150206_070446_inLine +BABEL_OP3_404_71401_20150206_070446_outLine +BABEL_OP3_404_71404_20141023_215509_inLine +BABEL_OP3_404_71404_20141023_215509_outLine +BABEL_OP3_404_71566_20141130_035713_inLine +BABEL_OP3_404_71566_20141130_035713_outLine +BABEL_OP3_404_71566_20141130_040359_inLine +BABEL_OP3_404_71566_20141130_040359_outLine +BABEL_OP3_404_72844_20141007_033837_inLine +BABEL_OP3_404_72844_20141007_033837_outLine +BABEL_OP3_404_73119_20141026_232203_inLine +BABEL_OP3_404_73119_20141026_232203_outLine +BABEL_OP3_404_73485_20150512_234636_inLine +BABEL_OP3_404_73485_20150512_234636_outLine +BABEL_OP3_404_73837_20141026_191037_inLine +BABEL_OP3_404_73837_20141026_191037_outLine +BABEL_OP3_404_74641_20141108_223951_inLine +BABEL_OP3_404_74641_20141108_223951_outLine +BABEL_OP3_404_74799_20141109_222638_inLine +BABEL_OP3_404_74799_20141109_222638_outLine +BABEL_OP3_404_75869_20150527_230650_inLine +BABEL_OP3_404_75869_20150527_230650_outLine +BABEL_OP3_404_76437_20141019_202715_inLine +BABEL_OP3_404_76437_20141019_202715_outLine +BABEL_OP3_404_77126_20141022_202348_inLine +BABEL_OP3_404_77126_20141022_202348_outLine +BABEL_OP3_404_77391_20141026_222314_inLine +BABEL_OP3_404_77391_20141026_222314_outLine +BABEL_OP3_404_77427_20141030_192713_inLine +BABEL_OP3_404_77427_20141030_192713_outLine +BABEL_OP3_404_77730_20141014_201059_inLine +BABEL_OP3_404_77730_20141014_201059_outLine +BABEL_OP3_404_77990_20141024_215822_inLine +BABEL_OP3_404_77990_20141024_215822_outLine +BABEL_OP3_404_78016_20141029_233059_inLine +BABEL_OP3_404_78016_20141029_233059_outLine +BABEL_OP3_404_78254_20141025_202742_inLine +BABEL_OP3_404_78254_20141025_202742_outLine +BABEL_OP3_404_78254_20141025_204922_inLine +BABEL_OP3_404_78254_20141025_204922_outLine +BABEL_OP3_404_78511_20141201_003606_inLine +BABEL_OP3_404_78511_20141201_003606_outLine +BABEL_OP3_404_78976_20141025_183704_inLine +BABEL_OP3_404_78976_20141025_183704_outLine +BABEL_OP3_404_79139_20141117_054733_inLine +BABEL_OP3_404_79139_20141117_054733_outLine +BABEL_OP3_404_79751_20141101_232250_inLine +BABEL_OP3_404_79751_20141101_232250_outLine +BABEL_OP3_404_80439_20141026_005410_inLine +BABEL_OP3_404_80439_20141026_005410_outLine +BABEL_OP3_404_81213_20141102_205052_inLine +BABEL_OP3_404_81213_20141102_205052_outLine +BABEL_OP3_404_81229_20141117_041745_inLine +BABEL_OP3_404_81229_20141117_041745_outLine +BABEL_OP3_404_81971_20141022_025641_inLine +BABEL_OP3_404_81971_20141022_025641_outLine +BABEL_OP3_404_82089_20141117_045302_inLine +BABEL_OP3_404_82089_20141117_045302_outLine +BABEL_OP3_404_82303_20150614_024236_inLine +BABEL_OP3_404_82303_20150614_024236_outLine +BABEL_OP3_404_82473_20141026_060037_inLine +BABEL_OP3_404_82473_20141026_060037_outLine +BABEL_OP3_404_82637_20141021_010105_inLine +BABEL_OP3_404_82637_20141021_010105_outLine +BABEL_OP3_404_82742_20141201_234306_inLine +BABEL_OP3_404_82742_20141201_234306_outLine +BABEL_OP3_404_83062_20150523_220236_inLine +BABEL_OP3_404_83062_20150523_220236_outLine +BABEL_OP3_404_83238_20141119_180953_inLine +BABEL_OP3_404_83238_20141119_180953_outLine +BABEL_OP3_404_83366_20141120_192208_inLine +BABEL_OP3_404_83366_20141120_192208_outLine +BABEL_OP3_404_83775_20141030_230742_inLine +BABEL_OP3_404_83775_20141030_230742_outLine +BABEL_OP3_404_83851_20141028_203735_inLine +BABEL_OP3_404_83851_20141028_203735_outLine +BABEL_OP3_404_83929_20141018_184023_inLine +BABEL_OP3_404_83929_20141018_184023_outLine +BABEL_OP3_404_84055_20150504_002015_inLine +BABEL_OP3_404_84055_20150504_002015_outLine +BABEL_OP3_404_84061_20141030_205021_inLine +BABEL_OP3_404_84061_20141030_205021_outLine +BABEL_OP3_404_84339_20150502_014143_inLine +BABEL_OP3_404_84339_20150502_014143_outLine +BABEL_OP3_404_85048_20141127_023704_inLine +BABEL_OP3_404_85048_20141127_023704_outLine +BABEL_OP3_404_85254_20150620_035606_inLine +BABEL_OP3_404_85254_20150620_035606_outLine +BABEL_OP3_404_85322_20141008_235518_inLine +BABEL_OP3_404_85322_20141008_235518_outLine +BABEL_OP3_404_85651_20141211_032650_inLine +BABEL_OP3_404_85651_20141211_032650_outLine +BABEL_OP3_404_86191_20141027_013544_inLine +BABEL_OP3_404_86191_20141027_013544_outLine +BABEL_OP3_404_86472_20141201_011325_inLine +BABEL_OP3_404_86472_20141201_011325_outLine +BABEL_OP3_404_86635_20141127_204158_inLine +BABEL_OP3_404_86635_20141127_204158_outLine +BABEL_OP3_404_86722_20141029_192140_inLine +BABEL_OP3_404_86722_20141029_192140_outLine +BABEL_OP3_404_86888_20141119_022459_inLine +BABEL_OP3_404_86888_20141119_022459_outLine +BABEL_OP3_404_87470_20141114_214639_inLine +BABEL_OP3_404_87470_20141114_214639_outLine +BABEL_OP3_404_87629_20141127_020403_inLine +BABEL_OP3_404_87629_20141127_020403_outLine +BABEL_OP3_404_88260_20141103_234824_inLine +BABEL_OP3_404_88260_20141103_234824_outLine +BABEL_OP3_404_88445_20141119_043713_inLine +BABEL_OP3_404_88445_20141119_043713_outLine +BABEL_OP3_404_88661_20141127_025208_inLine +BABEL_OP3_404_88661_20141127_025208_outLine +BABEL_OP3_404_88669_20141119_000147_inLine +BABEL_OP3_404_88669_20141119_000147_outLine +BABEL_OP3_404_88783_20141201_045305_inLine +BABEL_OP3_404_88783_20141201_045305_outLine +BABEL_OP3_404_89045_20141022_193202_inLine +BABEL_OP3_404_89045_20141022_193202_outLine +BABEL_OP3_404_89372_20141010_000950_inLine +BABEL_OP3_404_89372_20141010_000950_outLine +BABEL_OP3_404_89650_20150220_222402_inLine +BABEL_OP3_404_89650_20150220_222402_outLine +BABEL_OP3_404_89650_20150220_224606_inLine +BABEL_OP3_404_89650_20150220_224606_outLine +BABEL_OP3_404_89665_20141103_202723_inLine +BABEL_OP3_404_89665_20141103_202723_outLine +BABEL_OP3_404_90930_20150119_021352_inLine +BABEL_OP3_404_90930_20150119_021352_outLine +BABEL_OP3_404_91463_20141116_023036_inLine +BABEL_OP3_404_91463_20141116_023036_outLine +BABEL_OP3_404_91825_20141009_181224_inLine +BABEL_OP3_404_91825_20141009_181224_outLine +BABEL_OP3_404_91825_20141009_183843_inLine +BABEL_OP3_404_91825_20141009_183843_outLine +BABEL_OP3_404_91971_20150217_041455_inLine +BABEL_OP3_404_91971_20150217_041455_outLine +BABEL_OP3_404_92698_20141117_072302_inLine +BABEL_OP3_404_92698_20141117_072302_outLine +BABEL_OP3_404_92736_20141201_011442_inLine +BABEL_OP3_404_92736_20141201_011442_outLine +BABEL_OP3_404_94025_20141129_180207_inLine +BABEL_OP3_404_94025_20141129_180207_outLine +BABEL_OP3_404_94869_20141007_194254_inLine +BABEL_OP3_404_94869_20141007_194254_outLine +BABEL_OP3_404_95966_20141129_060246_inLine +BABEL_OP3_404_95966_20141129_060246_outLine +BABEL_OP3_404_96376_20150503_033706_inLine +BABEL_OP3_404_96376_20150503_033706_outLine +BABEL_OP3_404_96504_20141103_031329_inLine +BABEL_OP3_404_96504_20141103_031329_outLine +BABEL_OP3_404_97461_20141118_230730_inLine +BABEL_OP3_404_97461_20141118_230730_outLine +BABEL_OP3_404_97557_20141119_230718_inLine +BABEL_OP3_404_97557_20141119_230718_outLine +BABEL_OP3_404_97588_20141018_234016_inLine +BABEL_OP3_404_97588_20141018_234016_outLine +BABEL_OP3_404_97588_20141018_235425_inLine +BABEL_OP3_404_97588_20141018_235425_outLine +BABEL_OP3_404_97896_20141116_221329_inLine +BABEL_OP3_404_97896_20141116_221329_outLine +BABEL_OP3_404_97988_20141201_030306_inLine +BABEL_OP3_404_97988_20141201_030306_outLine +BABEL_OP3_404_98888_20141113_212715_inLine +BABEL_OP3_404_98888_20141113_212715_outLine +BABEL_OP3_404_99202_20141108_210814_inLine +BABEL_OP3_404_99202_20141108_210814_outLine +BABEL_OP3_404_99487_20141021_053024_inLine +BABEL_OP3_404_99487_20141021_053024_outLine +BABEL_OP3_404_99594_20141105_194545_inLine +BABEL_OP3_404_99594_20141105_194545_outLine +BABEL_OP3_404_99813_20141120_025129_inLine +BABEL_OP3_404_99813_20141120_025129_outLine diff --git a/egs/babel/s5d/conf/lists/404-georgian/untranscribed-training.list b/egs/babel/s5d/conf/lists/404-georgian/untranscribed-training.list new file mode 100644 index 00000000000..8d6682cc789 --- /dev/null +++ b/egs/babel/s5d/conf/lists/404-georgian/untranscribed-training.list @@ -0,0 +1,535 @@ +BABEL_OP3_404_10058_20150526_034808_inLine +BABEL_OP3_404_10411_20150611_172027_inLine +BABEL_OP3_404_10411_20150611_172027_outLine +BABEL_OP3_404_10938_20141030_023413_inLine +BABEL_OP3_404_10938_20141030_023413_outLine +BABEL_OP3_404_11352_20150513_002642_inLine +BABEL_OP3_404_11352_20150513_002642_outLine +BABEL_OP3_404_11859_20150611_041737_inLine +BABEL_OP3_404_11859_20150611_041737_outLine +BABEL_OP3_404_12220_20141116_205911_inLine +BABEL_OP3_404_12220_20141116_205911_outLine +BABEL_OP3_404_12609_20150524_172934_inLine +BABEL_OP3_404_12609_20150524_172934_outLine +BABEL_OP3_404_13126_20150524_221540_inLine +BABEL_OP3_404_13126_20150524_221540_outLine +BABEL_OP3_404_14158_20141130_030130_inLine +BABEL_OP3_404_14158_20141130_030130_outLine +BABEL_OP3_404_15024_20141118_234824_inLine +BABEL_OP3_404_15024_20141118_234824_outLine +BABEL_OP3_404_15042_20150506_232829_inLine +BABEL_OP3_404_15042_20150506_232829_outLine +BABEL_OP3_404_15535_20141129_021659_inLine +BABEL_OP3_404_15535_20141129_021659_outLine +BABEL_OP3_404_15638_20141127_220502_outLine +BABEL_OP3_404_15902_20141020_173105_outLine +BABEL_OP3_404_16475_20141116_052010_outLine +BABEL_OP3_404_16601_20141201_041704_inLine +BABEL_OP3_404_16601_20141201_041704_outLine +BABEL_OP3_404_17320_20150524_213213_inLine +BABEL_OP3_404_17320_20150524_213213_outLine +BABEL_OP3_404_17420_20150503_201902_inLine +BABEL_OP3_404_17420_20150503_201902_outLine +BABEL_OP3_404_17420_20150527_025815_inLine +BABEL_OP3_404_17420_20150527_025815_outLine +BABEL_OP3_404_17420_20150527_034621_inLine +BABEL_OP3_404_17420_20150527_034621_outLine +BABEL_OP3_404_17520_20141113_032534_inLine +BABEL_OP3_404_17567_20141117_182919_inLine +BABEL_OP3_404_17567_20141117_182919_outLine +BABEL_OP3_404_17573_20141129_035040_inLine +BABEL_OP3_404_17573_20141129_035040_outLine +BABEL_OP3_404_17890_20141128_040046_inLine +BABEL_OP3_404_17890_20141128_040046_outLine +BABEL_OP3_404_17923_20141022_231429_outLine +BABEL_OP3_404_18118_20150503_165936_inLine +BABEL_OP3_404_18118_20150503_165936_outLine +BABEL_OP3_404_18291_20150611_062705_outLine +BABEL_OP3_404_18291_20150611_063700_outLine +BABEL_OP3_404_18766_20150610_064349_inLine +BABEL_OP3_404_19120_20150525_014657_inLine +BABEL_OP3_404_19120_20150525_014657_outLine +BABEL_OP3_404_19120_20150525_015635_inLine +BABEL_OP3_404_19120_20150525_015635_outLine +BABEL_OP3_404_19877_20150506_202237_outLine +BABEL_OP3_404_20454_20150218_171143_inLine +BABEL_OP3_404_20454_20150218_171143_outLine +BABEL_OP3_404_21159_20150615_021612_inLine +BABEL_OP3_404_21435_20150523_030702_inLine +BABEL_OP3_404_21435_20150523_030702_outLine +BABEL_OP3_404_21581_20141101_011021_inLine +BABEL_OP3_404_21581_20141101_011021_outLine +BABEL_OP3_404_21807_20141112_225225_outLine +BABEL_OP3_404_22591_20150217_220714_inLine +BABEL_OP3_404_24209_20150212_224614_inLine +BABEL_OP3_404_24239_20150517_203015_inLine +BABEL_OP3_404_24323_20141117_020615_outLine +BABEL_OP3_404_24501_20150522_030231_inLine +BABEL_OP3_404_24586_20150524_190657_inLine +BABEL_OP3_404_24586_20150524_190657_outLine +BABEL_OP3_404_24590_20141116_230233_inLine +BABEL_OP3_404_24590_20141116_230233_outLine +BABEL_OP3_404_25068_20150206_022730_outLine +BABEL_OP3_404_25085_20150611_040906_inLine +BABEL_OP3_404_25085_20150611_040906_outLine +BABEL_OP3_404_25412_20141120_031532_inLine +BABEL_OP3_404_25412_20141120_031532_outLine +BABEL_OP3_404_25496_20150613_034126_inLine +BABEL_OP3_404_25496_20150613_034126_outLine +BABEL_OP3_404_26398_20150527_032152_inLine +BABEL_OP3_404_26398_20150527_032152_outLine +BABEL_OP3_404_26478_20150617_004029_inLine +BABEL_OP3_404_26478_20150617_004029_outLine +BABEL_OP3_404_26836_20141102_024528_inLine +BABEL_OP3_404_26836_20141102_024528_outLine +BABEL_OP3_404_27203_20141119_185720_inLine +BABEL_OP3_404_27203_20141119_185720_outLine +BABEL_OP3_404_27203_20141119_191138_inLine +BABEL_OP3_404_27203_20141119_191138_outLine +BABEL_OP3_404_27590_20141128_051454_inLine +BABEL_OP3_404_28280_20150619_024509_inLine +BABEL_OP3_404_28280_20150619_024509_outLine +BABEL_OP3_404_28280_20150619_025848_inLine +BABEL_OP3_404_28280_20150619_025848_outLine +BABEL_OP3_404_28600_20141201_223206_inLine +BABEL_OP3_404_28600_20141201_223206_outLine +BABEL_OP3_404_28945_20141104_060349_outLine +BABEL_OP3_404_29076_20141109_215142_inLine +BABEL_OP3_404_29076_20141109_215142_outLine +BABEL_OP3_404_29230_20150611_051340_inLine +BABEL_OP3_404_29230_20150611_051340_outLine +BABEL_OP3_404_29439_20150524_201524_inLine +BABEL_OP3_404_29439_20150524_201524_outLine +BABEL_OP3_404_30497_20150525_194737_inLine +BABEL_OP3_404_30497_20150525_194737_outLine +BABEL_OP3_404_30645_20141019_220859_inLine +BABEL_OP3_404_30653_20150514_014515_inLine +BABEL_OP3_404_31267_20150615_011004_outLine +BABEL_OP3_404_31484_20141122_232804_inLine +BABEL_OP3_404_31484_20141122_232804_outLine +BABEL_OP3_404_31919_20150526_220911_inLine +BABEL_OP3_404_31919_20150526_220911_outLine +BABEL_OP3_404_32630_20150609_012137_inLine +BABEL_OP3_404_32630_20150609_012137_outLine +BABEL_OP3_404_32959_20141201_005331_inLine +BABEL_OP3_404_32959_20141201_005331_outLine +BABEL_OP3_404_32998_20141112_054111_inLine +BABEL_OP3_404_34328_20141119_054513_outLine +BABEL_OP3_404_34328_20141119_055432_outLine +BABEL_OP3_404_34811_20141109_001009_inLine +BABEL_OP3_404_34811_20141109_001009_outLine +BABEL_OP3_404_34899_20150611_060602_outLine +BABEL_OP3_404_35008_20141201_023042_inLine +BABEL_OP3_404_35008_20141201_023042_outLine +BABEL_OP3_404_35181_20150526_211416_inLine +BABEL_OP3_404_35181_20150526_211416_outLine +BABEL_OP3_404_35706_20150523_015900_inLine +BABEL_OP3_404_35706_20150523_015900_outLine +BABEL_OP3_404_35786_20150604_015518_inLine +BABEL_OP3_404_35786_20150604_015518_outLine +BABEL_OP3_404_36017_20150528_192934_inLine +BABEL_OP3_404_36017_20150528_192934_outLine +BABEL_OP3_404_36039_20150526_230125_inLine +BABEL_OP3_404_36039_20150526_230125_outLine +BABEL_OP3_404_36059_20150601_023254_inLine +BABEL_OP3_404_36059_20150601_023254_outLine +BABEL_OP3_404_36059_20150601_033346_inLine +BABEL_OP3_404_36059_20150601_033346_outLine +BABEL_OP3_404_36147_20150211_013803_outLine +BABEL_OP3_404_36219_20141104_012216_inLine +BABEL_OP3_404_36219_20141104_012216_outLine +BABEL_OP3_404_36642_20150610_161207_inLine +BABEL_OP3_404_36642_20150610_161207_outLine +BABEL_OP3_404_37290_20141115_050457_inLine +BABEL_OP3_404_37290_20141115_050457_outLine +BABEL_OP3_404_38125_20150526_233108_inLine +BABEL_OP3_404_38125_20150526_233108_outLine +BABEL_OP3_404_38323_20150615_021843_inLine +BABEL_OP3_404_38340_20141103_231545_inLine +BABEL_OP3_404_38340_20141103_231545_outLine +BABEL_OP3_404_38554_20141010_224451_inLine +BABEL_OP3_404_38554_20141010_224451_outLine +BABEL_OP3_404_38664_20141030_175135_inLine +BABEL_OP3_404_38664_20141030_175135_outLine +BABEL_OP3_404_38979_20150503_202406_outLine +BABEL_OP3_404_39099_20150511_053646_outLine +BABEL_OP3_404_39307_20141022_200554_inLine +BABEL_OP3_404_39307_20141022_201758_inLine +BABEL_OP3_404_39426_20150527_181901_outLine +BABEL_OP3_404_39744_20141023_002710_inLine +BABEL_OP3_404_39893_20150611_034149_inLine +BABEL_OP3_404_39920_20150503_205354_outLine +BABEL_OP3_404_41097_20141129_055801_inLine +BABEL_OP3_404_41097_20141129_055801_outLine +BABEL_OP3_404_41272_20150503_232941_inLine +BABEL_OP3_404_41334_20150617_041322_inLine +BABEL_OP3_404_41400_20150515_021408_inLine +BABEL_OP3_404_41692_20150604_005657_inLine +BABEL_OP3_404_41692_20150604_005657_outLine +BABEL_OP3_404_41745_20141114_235452_inLine +BABEL_OP3_404_41745_20141114_235452_outLine +BABEL_OP3_404_42155_20141127_055149_inLine +BABEL_OP3_404_42619_20141130_012456_outLine +BABEL_OP3_404_42834_20141125_004837_inLine +BABEL_OP3_404_42834_20141125_004837_outLine +BABEL_OP3_404_42883_20150604_035732_inLine +BABEL_OP3_404_42883_20150604_035732_outLine +BABEL_OP3_404_43388_20141114_212210_inLine +BABEL_OP3_404_43388_20141114_214120_inLine +BABEL_OP3_404_43588_20150517_233637_inLine +BABEL_OP3_404_43789_20141120_011327_outLine +BABEL_OP3_404_44309_20150525_022635_inLine +BABEL_OP3_404_44309_20150525_022635_outLine +BABEL_OP3_404_44478_20150512_225118_inLine +BABEL_OP3_404_45106_20141119_050859_inLine +BABEL_OP3_404_45106_20141119_050859_outLine +BABEL_OP3_404_45374_20150122_014830_outLine +BABEL_OP3_404_45374_20150122_015920_outLine +BABEL_OP3_404_45459_20150525_020410_inLine +BABEL_OP3_404_45459_20150525_020410_outLine +BABEL_OP3_404_45699_20150205_021829_inLine +BABEL_OP3_404_45851_20150514_155157_inLine +BABEL_OP3_404_45851_20150514_155157_outLine +BABEL_OP3_404_45908_20150515_004218_outLine +BABEL_OP3_404_46310_20141015_051100_inLine +BABEL_OP3_404_46310_20141015_051100_outLine +BABEL_OP3_404_46315_20141129_012912_inLine +BABEL_OP3_404_46315_20141129_012912_outLine +BABEL_OP3_404_46688_20141015_211329_inLine +BABEL_OP3_404_46688_20141015_211329_outLine +BABEL_OP3_404_46712_20141027_224004_inLine +BABEL_OP3_404_46712_20141027_224004_outLine +BABEL_OP3_404_46974_20141128_055136_inLine +BABEL_OP3_404_46974_20141128_055136_outLine +BABEL_OP3_404_47156_20150625_025324_inLine +BABEL_OP3_404_47156_20150625_025324_outLine +BABEL_OP3_404_47823_20141201_044425_inLine +BABEL_OP3_404_47823_20141201_044425_outLine +BABEL_OP3_404_48016_20150615_000741_inLine +BABEL_OP3_404_48016_20150615_000741_outLine +BABEL_OP3_404_48610_20141013_011505_inLine +BABEL_OP3_404_48610_20141013_012904_inLine +BABEL_OP3_404_48663_20150512_202837_inLine +BABEL_OP3_404_48663_20150512_202837_outLine +BABEL_OP3_404_49306_20150524_003356_inLine +BABEL_OP3_404_49306_20150524_003356_outLine +BABEL_OP3_404_49630_20141128_020114_inLine +BABEL_OP3_404_49630_20141128_020114_outLine +BABEL_OP3_404_49767_20150613_050113_inLine +BABEL_OP3_404_49767_20150613_050113_outLine +BABEL_OP3_404_49775_20141011_005306_inLine +BABEL_OP3_404_49775_20141011_005306_outLine +BABEL_OP3_404_49945_20150610_154709_inLine +BABEL_OP3_404_50601_20141127_032527_inLine +BABEL_OP3_404_50601_20141127_032527_outLine +BABEL_OP3_404_50779_20141115_012852_inLine +BABEL_OP3_404_50779_20141115_012852_outLine +BABEL_OP3_404_50810_20141007_234432_inLine +BABEL_OP3_404_50810_20141007_234432_outLine +BABEL_OP3_404_51414_20150604_001601_inLine +BABEL_OP3_404_51414_20150604_001601_outLine +BABEL_OP3_404_51484_20141202_000325_inLine +BABEL_OP3_404_51484_20141202_000325_outLine +BABEL_OP3_404_51701_20150620_010924_outLine +BABEL_OP3_404_52070_20150620_014422_outLine +BABEL_OP3_404_52070_20150620_020559_outLine +BABEL_OP3_404_52404_20141125_004855_inLine +BABEL_OP3_404_52404_20141125_004855_outLine +BABEL_OP3_404_53063_20141201_005237_inLine +BABEL_OP3_404_53063_20141201_005237_outLine +BABEL_OP3_404_53072_20150518_015132_inLine +BABEL_OP3_404_53415_20150503_225920_inLine +BABEL_OP3_404_53415_20150503_225920_outLine +BABEL_OP3_404_53492_20150525_055025_inLine +BABEL_OP3_404_53492_20150525_055025_outLine +BABEL_OP3_404_53665_20150526_004549_inLine +BABEL_OP3_404_53917_20150503_205456_outLine +BABEL_OP3_404_53957_20141201_051933_inLine +BABEL_OP3_404_54477_20141211_033627_inLine +BABEL_OP3_404_54477_20141211_033627_outLine +BABEL_OP3_404_55013_20150525_222257_inLine +BABEL_OP3_404_55013_20150525_222257_outLine +BABEL_OP3_404_55267_20141130_212756_inLine +BABEL_OP3_404_55349_20150523_031602_inLine +BABEL_OP3_404_55349_20150523_031602_outLine +BABEL_OP3_404_56019_20150502_020750_inLine +BABEL_OP3_404_56019_20150502_020750_outLine +BABEL_OP3_404_56076_20150516_164959_inLine +BABEL_OP3_404_56076_20150516_164959_outLine +BABEL_OP3_404_56331_20150526_020747_inLine +BABEL_OP3_404_56331_20150526_020747_outLine +BABEL_OP3_404_56743_20141114_223719_inLine +BABEL_OP3_404_56743_20141114_223719_outLine +BABEL_OP3_404_57065_20141201_002920_inLine +BABEL_OP3_404_57219_20150618_045613_inLine +BABEL_OP3_404_57219_20150618_045613_outLine +BABEL_OP3_404_57464_20150523_224617_inLine +BABEL_OP3_404_57542_20150526_233832_inLine +BABEL_OP3_404_57542_20150526_233832_outLine +BABEL_OP3_404_57542_20150526_235003_inLine +BABEL_OP3_404_57542_20150526_235003_outLine +BABEL_OP3_404_58006_20150526_024205_inLine +BABEL_OP3_404_58006_20150526_024205_outLine +BABEL_OP3_404_58026_20150615_004130_inLine +BABEL_OP3_404_58026_20150615_004130_outLine +BABEL_OP3_404_58915_20150611_034220_outLine +BABEL_OP3_404_59307_20150504_003405_inLine +BABEL_OP3_404_59307_20150504_003405_outLine +BABEL_OP3_404_59864_20150602_014458_inLine +BABEL_OP3_404_60299_20150611_040929_inLine +BABEL_OP3_404_60310_20141130_231532_inLine +BABEL_OP3_404_60310_20141130_231532_outLine +BABEL_OP3_404_60352_20141201_060712_inLine +BABEL_OP3_404_60352_20141201_060712_outLine +BABEL_OP3_404_60352_20141201_061821_inLine +BABEL_OP3_404_60352_20141201_061821_outLine +BABEL_OP3_404_60458_20150609_021527_inLine +BABEL_OP3_404_60458_20150609_021527_outLine +BABEL_OP3_404_60477_20150613_223056_inLine +BABEL_OP3_404_60477_20150613_224002_inLine +BABEL_OP3_404_60498_20150606_022221_inLine +BABEL_OP3_404_60498_20150606_022221_outLine +BABEL_OP3_404_60706_20141020_215729_inLine +BABEL_OP3_404_60706_20141020_215729_outLine +BABEL_OP3_404_61888_20150504_171019_inLine +BABEL_OP3_404_61971_20150525_020101_outLine +BABEL_OP3_404_62360_20150517_033230_inLine +BABEL_OP3_404_62360_20150517_033230_outLine +BABEL_OP3_404_62724_20141130_200827_inLine +BABEL_OP3_404_62724_20141130_200827_outLine +BABEL_OP3_404_62852_20141013_054854_outLine +BABEL_OP3_404_63425_20141126_054504_inLine +BABEL_OP3_404_63481_20141020_221014_outLine +BABEL_OP3_404_63481_20141020_224225_outLine +BABEL_OP3_404_63670_20141130_050318_inLine +BABEL_OP3_404_63670_20141130_050318_outLine +BABEL_OP3_404_63906_20150525_050310_inLine +BABEL_OP3_404_63906_20150525_050310_outLine +BABEL_OP3_404_63999_20150610_041309_inLine +BABEL_OP3_404_64014_20150503_032745_inLine +BABEL_OP3_404_64014_20150503_032745_outLine +BABEL_OP3_404_64722_20150514_034208_outLine +BABEL_OP3_404_64759_20141014_044027_inLine +BABEL_OP3_404_64759_20141014_045519_inLine +BABEL_OP3_404_64796_20141022_055826_inLine +BABEL_OP3_404_65561_20141124_060558_inLine +BABEL_OP3_404_65561_20141124_060558_outLine +BABEL_OP3_404_65640_20150528_211835_inLine +BABEL_OP3_404_65640_20150528_211835_outLine +BABEL_OP3_404_66967_20141008_202611_inLine +BABEL_OP3_404_66967_20141008_202611_outLine +BABEL_OP3_404_67152_20150503_201836_inLine +BABEL_OP3_404_67152_20150503_201836_outLine +BABEL_OP3_404_67304_20150211_054416_inLine +BABEL_OP3_404_67304_20150211_054416_outLine +BABEL_OP3_404_67552_20141126_011955_inLine +BABEL_OP3_404_67552_20141126_011955_outLine +BABEL_OP3_404_68306_20141126_180315_inLine +BABEL_OP3_404_68306_20141126_180315_outLine +BABEL_OP3_404_69096_20150512_165126_inLine +BABEL_OP3_404_69096_20150512_165126_outLine +BABEL_OP3_404_69153_20141130_221412_inLine +BABEL_OP3_404_69153_20141130_221412_outLine +BABEL_OP3_404_69153_20141130_222842_inLine +BABEL_OP3_404_69153_20141130_222842_outLine +BABEL_OP3_404_69474_20141128_051323_outLine +BABEL_OP3_404_69633_20141129_051648_inLine +BABEL_OP3_404_69633_20141129_051648_outLine +BABEL_OP3_404_69636_20141126_061322_inLine +BABEL_OP3_404_69636_20141126_061322_outLine +BABEL_OP3_404_69885_20150503_011226_inLine +BABEL_OP3_404_69885_20150503_011226_outLine +BABEL_OP3_404_69937_20150620_015912_inLine +BABEL_OP3_404_69964_20150524_015556_inLine +BABEL_OP3_404_69964_20150524_015556_outLine +BABEL_OP3_404_69982_20150625_035440_outLine +BABEL_OP3_404_70221_20141124_052004_inLine +BABEL_OP3_404_70221_20141124_052004_outLine +BABEL_OP3_404_70460_20150527_015340_inLine +BABEL_OP3_404_70460_20150527_015340_outLine +BABEL_OP3_404_70526_20150501_015444_inLine +BABEL_OP3_404_70526_20150501_015444_outLine +BABEL_OP3_404_70713_20150527_013058_inLine +BABEL_OP3_404_70713_20150527_013058_outLine +BABEL_OP3_404_71189_20150523_005918_inLine +BABEL_OP3_404_71189_20150523_005918_outLine +BABEL_OP3_404_71278_20150211_052730_inLine +BABEL_OP3_404_71278_20150211_052730_outLine +BABEL_OP3_404_71278_20150211_054040_inLine +BABEL_OP3_404_71278_20150211_054040_outLine +BABEL_OP3_404_71333_20141102_023503_inLine +BABEL_OP3_404_71333_20141102_023503_outLine +BABEL_OP3_404_71460_20150206_015309_outLine +BABEL_OP3_404_71559_20141210_220929_outLine +BABEL_OP3_404_71780_20141105_055543_inLine +BABEL_OP3_404_71780_20141105_055543_outLine +BABEL_OP3_404_72319_20150502_041426_inLine +BABEL_OP3_404_72319_20150502_041426_outLine +BABEL_OP3_404_72733_20150515_044419_inLine +BABEL_OP3_404_72733_20150515_044419_outLine +BABEL_OP3_404_73072_20141012_012029_inLine +BABEL_OP3_404_73072_20141012_012029_outLine +BABEL_OP3_404_73258_20141117_010123_inLine +BABEL_OP3_404_73258_20141117_010123_outLine +BABEL_OP3_404_73964_20150512_205010_inLine +BABEL_OP3_404_73964_20150512_205010_outLine +BABEL_OP3_404_74728_20150503_042547_inLine +BABEL_OP3_404_74728_20150503_042547_outLine +BABEL_OP3_404_75465_20141129_223330_outLine +BABEL_OP3_404_75975_20150127_051140_outLine +BABEL_OP3_404_76126_20141201_202238_inLine +BABEL_OP3_404_76126_20141201_202238_outLine +BABEL_OP3_404_76238_20141129_223455_inLine +BABEL_OP3_404_76238_20141129_223455_outLine +BABEL_OP3_404_76372_20150601_014341_inLine +BABEL_OP3_404_76372_20150601_014341_outLine +BABEL_OP3_404_76444_20141127_032124_inLine +BABEL_OP3_404_76444_20141127_032124_outLine +BABEL_OP3_404_76482_20150618_063131_outLine +BABEL_OP3_404_76683_20141110_191551_inLine +BABEL_OP3_404_76683_20141110_191551_outLine +BABEL_OP3_404_76837_20150124_222250_outLine +BABEL_OP3_404_76970_20150625_191722_inLine +BABEL_OP3_404_77146_20141019_060916_inLine +BABEL_OP3_404_77242_20150612_024655_inLine +BABEL_OP3_404_77567_20141021_021210_inLine +BABEL_OP3_404_77567_20141021_021210_outLine +BABEL_OP3_404_77803_20141020_030844_inLine +BABEL_OP3_404_77803_20141020_030844_outLine +BABEL_OP3_404_78454_20141115_043455_inLine +BABEL_OP3_404_78749_20150620_025728_inLine +BABEL_OP3_404_78749_20150620_025728_outLine +BABEL_OP3_404_79190_20141108_232204_inLine +BABEL_OP3_404_79190_20141108_232204_outLine +BABEL_OP3_404_79590_20141129_025808_outLine +BABEL_OP3_404_79820_20141104_045340_inLine +BABEL_OP3_404_79820_20141104_045340_outLine +BABEL_OP3_404_79858_20141015_200446_inLine +BABEL_OP3_404_79898_20150620_022648_inLine +BABEL_OP3_404_79898_20150620_022648_outLine +BABEL_OP3_404_79898_20150620_024014_inLine +BABEL_OP3_404_79898_20150620_024014_outLine +BABEL_OP3_404_80069_20150614_233606_inLine +BABEL_OP3_404_80069_20150614_233606_outLine +BABEL_OP3_404_80306_20141119_003833_inLine +BABEL_OP3_404_80306_20141119_003833_outLine +BABEL_OP3_404_80306_20141119_005121_inLine +BABEL_OP3_404_80306_20141119_005121_outLine +BABEL_OP3_404_80559_20141022_010255_inLine +BABEL_OP3_404_80655_20150525_221544_inLine +BABEL_OP3_404_80655_20150525_221544_outLine +BABEL_OP3_404_80897_20141119_233718_inLine +BABEL_OP3_404_80897_20141119_233718_outLine +BABEL_OP3_404_81149_20150525_003741_inLine +BABEL_OP3_404_81149_20150525_003741_outLine +BABEL_OP3_404_81427_20141030_015136_inLine +BABEL_OP3_404_81427_20141030_015136_outLine +BABEL_OP3_404_81854_20150610_060437_inLine +BABEL_OP3_404_82626_20150615_014517_inLine +BABEL_OP3_404_82863_20141119_044230_inLine +BABEL_OP3_404_82863_20141119_044230_outLine +BABEL_OP3_404_83651_20141102_170912_inLine +BABEL_OP3_404_83651_20141102_170912_outLine +BABEL_OP3_404_83771_20150604_012300_outLine +BABEL_OP3_404_83974_20150617_022055_inLine +BABEL_OP3_404_84125_20141018_023340_inLine +BABEL_OP3_404_84125_20141018_023340_outLine +BABEL_OP3_404_84458_20141130_053628_outLine +BABEL_OP3_404_84815_20141127_011952_inLine +BABEL_OP3_404_84815_20141127_013345_inLine +BABEL_OP3_404_85047_20141117_014630_inLine +BABEL_OP3_404_85047_20141117_014630_outLine +BABEL_OP3_404_85340_20141103_022707_inLine +BABEL_OP3_404_85340_20141103_022707_outLine +BABEL_OP3_404_86597_20150612_170328_inLine +BABEL_OP3_404_86597_20150612_170328_outLine +BABEL_OP3_404_87074_20141105_190107_outLine +BABEL_OP3_404_87777_20141127_040747_inLine +BABEL_OP3_404_87777_20141127_040747_outLine +BABEL_OP3_404_87871_20141201_023608_inLine +BABEL_OP3_404_87871_20141201_023608_outLine +BABEL_OP3_404_87921_20141201_023029_inLine +BABEL_OP3_404_87921_20141201_023029_outLine +BABEL_OP3_404_88873_20141028_190127_inLine +BABEL_OP3_404_88873_20141028_190127_outLine +BABEL_OP3_404_89330_20150616_002908_inLine +BABEL_OP3_404_89330_20150616_002908_outLine +BABEL_OP3_404_89943_20141105_211847_outLine +BABEL_OP3_404_90347_20141119_012016_inLine +BABEL_OP3_404_90347_20141119_012016_outLine +BABEL_OP3_404_90760_20150611_151739_inLine +BABEL_OP3_404_90760_20150611_151739_outLine +BABEL_OP3_404_90832_20150616_012728_inLine +BABEL_OP3_404_90832_20150616_012728_outLine +BABEL_OP3_404_91383_20150618_035815_inLine +BABEL_OP3_404_91475_20150614_034536_inLine +BABEL_OP3_404_91581_20141129_045608_inLine +BABEL_OP3_404_91581_20141129_045608_outLine +BABEL_OP3_404_91581_20141129_050730_inLine +BABEL_OP3_404_91581_20141129_050730_outLine +BABEL_OP3_404_91593_20150611_021825_inLine +BABEL_OP3_404_91593_20150611_021825_outLine +BABEL_OP3_404_91884_20150503_022858_inLine +BABEL_OP3_404_91884_20150503_022858_outLine +BABEL_OP3_404_91888_20150512_191012_inLine +BABEL_OP3_404_91888_20150512_191012_outLine +BABEL_OP3_404_91891_20141129_005825_inLine +BABEL_OP3_404_91891_20141129_005825_outLine +BABEL_OP3_404_91944_20141022_021002_inLine +BABEL_OP3_404_91977_20141122_230420_outLine +BABEL_OP3_404_92176_20141119_195614_inLine +BABEL_OP3_404_92176_20141119_195614_outLine +BABEL_OP3_404_92281_20150625_185123_inLine +BABEL_OP3_404_92757_20150525_200048_inLine +BABEL_OP3_404_92757_20150525_200048_outLine +BABEL_OP3_404_92792_20150503_182854_outLine +BABEL_OP3_404_92792_20150525_025523_outLine +BABEL_OP3_404_92942_20141120_022830_inLine +BABEL_OP3_404_92942_20141120_022830_outLine +BABEL_OP3_404_93007_20150615_051230_inLine +BABEL_OP3_404_93007_20150615_051230_outLine +BABEL_OP3_404_93858_20150611_043732_inLine +BABEL_OP3_404_94002_20141119_015307_inLine +BABEL_OP3_404_94002_20141119_015307_outLine +BABEL_OP3_404_94333_20141020_024439_outLine +BABEL_OP3_404_94487_20150518_005132_outLine +BABEL_OP3_404_95077_20141201_055702_outLine +BABEL_OP3_404_95269_20141105_221810_inLine +BABEL_OP3_404_95269_20141105_221810_outLine +BABEL_OP3_404_95338_20150610_211203_inLine +BABEL_OP3_404_95338_20150610_211203_outLine +BABEL_OP3_404_95399_20141119_001023_inLine +BABEL_OP3_404_95399_20141119_001023_outLine +BABEL_OP3_404_95583_20141019_010741_inLine +BABEL_OP3_404_95583_20141019_010741_outLine +BABEL_OP3_404_96059_20150524_042224_outLine +BABEL_OP3_404_96205_20141119_033053_inLine +BABEL_OP3_404_96205_20141119_033053_outLine +BABEL_OP3_404_96205_20141119_034909_inLine +BABEL_OP3_404_96205_20141119_034909_outLine +BABEL_OP3_404_96247_20150526_202623_outLine +BABEL_OP3_404_96690_20141117_053054_inLine +BABEL_OP3_404_96690_20141117_053054_outLine +BABEL_OP3_404_96808_20150609_034129_inLine +BABEL_OP3_404_97097_20150601_042649_outLine +BABEL_OP3_404_97136_20150528_011250_inLine +BABEL_OP3_404_97136_20150528_011250_outLine +BABEL_OP3_404_97911_20150613_195820_outLine +BABEL_OP3_404_98165_20141030_214051_inLine +BABEL_OP3_404_98165_20141030_214051_outLine +BABEL_OP3_404_98192_20150617_021906_outLine +BABEL_OP3_404_98489_20141102_002030_inLine +BABEL_OP3_404_98489_20141102_004054_inLine +BABEL_OP3_404_98678_20150528_021605_inLine +BABEL_OP3_404_98678_20150528_023029_inLine +BABEL_OP3_404_99289_20150521_220314_inLine +BABEL_OP3_404_99289_20150521_220314_outLine +BABEL_OP3_404_99289_20150521_222144_inLine +BABEL_OP3_404_99289_20150521_222144_outLine +BABEL_OP3_404_99718_20141019_051850_inLine +BABEL_OP3_404_99718_20141019_051850_outLine +BABEL_OP3_404_99718_20141019_053305_inLine +BABEL_OP3_404_99718_20141019_053305_outLine +BABEL_OP3_404_99732_20141130_232553_inLine +BABEL_OP3_404_99732_20141130_232553_outLine +BABEL_OP3_404_99920_20141022_052026_inLine diff --git a/egs/babel/s5d/local/arpa2G.sh b/egs/babel/s5d/local/arpa2G.sh index 40c269fbb22..887b393b459 100755 --- a/egs/babel/s5d/local/arpa2G.sh +++ b/egs/babel/s5d/local/arpa2G.sh @@ -85,7 +85,8 @@ if [ ! -z "$oov_prob_file" ]; then print "$log10prob $word\n"; } }} print STDERR "Ceilinged $ceilinged unk-probs\n";' \ - $oov_prob_file $min_prob $unk_fraction | gzip -c > $destdir/lm_tmp.gz + $oov_prob_file $min_prob $unk_fraction | \ + ngram -unk -lm - -write-lm $destdir/lm_tmp.gz lmfile=$destdir/lm_tmp.gz fi diff --git a/egs/babel/s5d/local/chain/run_blstm.sh b/egs/babel/s5d/local/chain/run_blstm.sh index 6d13c55fc7d..f098604d04a 100755 --- a/egs/babel/s5d/local/chain/run_blstm.sh +++ b/egs/babel/s5d/local/chain/run_blstm.sh @@ -136,7 +136,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_blstm_bab1.sh b/egs/babel/s5d/local/chain/run_blstm_bab1.sh index ba8da0e14bc..95c7e9f28aa 100755 --- a/egs/babel/s5d/local/chain/run_blstm_bab1.sh +++ b/egs/babel/s5d/local/chain/run_blstm_bab1.sh @@ -136,7 +136,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_blstm_bab2.sh b/egs/babel/s5d/local/chain/run_blstm_bab2.sh index f5d698e262c..a6dd4cb9566 100755 --- a/egs/babel/s5d/local/chain/run_blstm_bab2.sh +++ b/egs/babel/s5d/local/chain/run_blstm_bab2.sh @@ -136,7 +136,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_blstm_bab3.sh b/egs/babel/s5d/local/chain/run_blstm_bab3.sh index 7ad51204c6f..52f085f8942 100755 --- a/egs/babel/s5d/local/chain/run_blstm_bab3.sh +++ b/egs/babel/s5d/local/chain/run_blstm_bab3.sh @@ -136,7 +136,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_blstm_bab4.sh b/egs/babel/s5d/local/chain/run_blstm_bab4.sh index 72aaeb8778f..47704e80ae4 100755 --- a/egs/babel/s5d/local/chain/run_blstm_bab4.sh +++ b/egs/babel/s5d/local/chain/run_blstm_bab4.sh @@ -135,7 +135,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_blstm_bab5.sh b/egs/babel/s5d/local/chain/run_blstm_bab5.sh index 1bae225022e..73c6a4089ed 100755 --- a/egs/babel/s5d/local/chain/run_blstm_bab5.sh +++ b/egs/babel/s5d/local/chain/run_blstm_bab5.sh @@ -135,7 +135,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_blstm_xconfig.sh b/egs/babel/s5d/local/chain/run_blstm_xconfig.sh new file mode 100755 index 00000000000..27e1a571ad0 --- /dev/null +++ b/egs/babel/s5d/local/chain/run_blstm_xconfig.sh @@ -0,0 +1,206 @@ +#!/bin/bash + + +# by default, with cleanup: +# local/chain/run_blstm.sh +# %WER 46.8 | 19252 60586 | 57.6 28.5 13.8 4.5 46.8 31.7 | -0.643 | exp/chain_cleaned/blstm_sp_bi/decode_dev10h.pem/score_8/penalty_0.25/dev10h.pem.ctm.sys + +set -e -o pipefail + +# First the options that are passed through to run_ivector_common.sh +# (some of which are also used in this script directly). +stage=17 +nj=30 +min_seg_len=1.55 +train_set=train_cleaned +gmm=tri5_cleaned # the gmm for the target data +langdir=data/langp/tri5_ali +num_threads_ubm=12 +nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned + +# The rest are configs specific to this script. Most of the parameters +# are just hardcoded at this level, in the commands below. +train_stage=-10 +tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration. +blstm_affix=_xconfig #affix for TDNN directory, e.g. "a" or "b", in case we change the configuration. +common_egs_dir=exp/chain_cleaned/blstm_sp_bi/egs # you can set this to use previously dumped egs. +common_egs_dir= # you can set this to use previously dumped egs. + +# End configuration section. +echo "$0 $@" # Print the command line for logging + +. ./cmd.sh +. ./path.sh +. ./utils/parse_options.sh + + +if ! cuda-compiled; then + cat <data/lang_chain/topo + fi +fi + +if [ $stage -le 15 ]; then + # Get the alignments as lattices (gives the chain training more freedom). + # use the same num-jobs as the alignments + steps/align_fmllr_lats.sh --nj 100 --cmd "$train_cmd" ${lores_train_data_dir} \ + $langdir $gmm_dir $lat_dir + rm $lat_dir/fsts.*.gz # save space +fi + +if [ $stage -le 16 ]; then + # Build a tree using our new topology. We know we have alignments for the + # speed-perturbed data (local/nnet3/run_ivector_common.sh made them), so use + # those. + if [ -f $tree_dir/final.mdl ]; then + echo "$0: $tree_dir/final.mdl already exists, refusing to overwrite it." + exit 1; + fi + steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \ + --context-opts "--context-width=2 --central-position=1" \ + --leftmost-questions-truncate -1 \ + --cmd "$train_cmd" 4000 ${lores_train_data_dir} data/lang_chain $ali_dir $tree_dir +fi + +if [ $stage -le 17 ]; then + mkdir -p $dir + + #echo "$0: creating neural net configs"; + #steps/nnet3/lstm/make_configs.py \ + # --self-repair-scale-nonlinearity 0.00001 \ + # --self-repair-scale-clipgradient 1.0 \ + # $dir/configs || exit 1; + echo "$0: creating neural net configs using the xconfig parser"; + + label_delay=0 + xent_regularize=0.1 + num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}') + learning_rate_factor=$(echo "print 0.5/$xent_regularize" | python) + + + mkdir -p $dir/configs + cat < $dir/configs/network.xconfig + input dim=100 name=ivector + input dim=40 name=input + # please note that it is important to have input layer with the name=input + # as the layer immediately preceding the fixed-affine-layer to enable + # the use of short notation for the descriptor + fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat + # check steps/libs/nnet3/xconfig/lstm.py for the other options and defaults + lstmp-layer name=blstm1-forward input=lda cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 + lstmp-layer name=blstm1-backward input=lda cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=3 + lstmp-layer name=blstm2-forward input=Append(blstm1-forward, blstm1-backward) cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 + lstmp-layer name=blstm2-backward input=Append(blstm1-forward, blstm1-backward) cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=3 + lstmp-layer name=blstm3-forward input=Append(blstm2-forward, blstm2-backward) cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=-3 + lstmp-layer name=blstm3-backward input=Append(blstm2-forward, blstm2-backward) cell-dim=512 recurrent-projection-dim=128 non-recurrent-projection-dim=128 delay=3 + ## adding the layers for chain branch + output-layer name=output input=Append(blstm3-forward, blstm3-backward) output-delay=$label_delay include-log-softmax=false dim=$num_targets max-change=1.5 + # adding the layers for xent branch + # This block prints the configs for a separate output that will be + # trained with a cross-entropy objective in the 'chain' models... this + # has the effect of regularizing the hidden parts of the model. we use + # 0.5 / args.xent_regularize as the learning rate factor- the factor of + # 0.5 / args.xent_regularize is suitable as it means the xent + # final-layer learns at a rate independent of the regularization + # constant; and the 0.5 was tuned so as to make the relative progress + # similar in the xent and regular final layers. + output-layer name=output-xent input=Append(blstm3-forward, blstm3-backward) output-delay=$label_delay dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5 +EOF + steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/ + +fi + +if [ $stage -le 18 ]; then + if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then + utils/create_split_dir.pl \ + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage + fi + [ ! -d $dir/egs ] && mkdir -p $dir/egs/ + touch $dir/egs/.nodelete # keep egs around when that run dies. + + steps/nnet3/chain/train.py --stage $train_stage \ + --cmd "$decode_cmd" \ + --feat.online-ivector-dir $train_ivector_dir \ + --feat.cmvn-opts "--norm-means=false --norm-vars=false" \ + --chain.xent-regularize 0.1 \ + --chain.leaky-hmm-coefficient 0.1 \ + --chain.l2-regularize 0.00005 \ + --chain.apply-deriv-weights false \ + --chain.lm-opts="--num-extra-lm-states=2000" \ + --egs.dir "$common_egs_dir" \ + --egs.opts "--frames-overlap-per-eg 0" \ + --egs.chunk-width 150 \ + --trainer.num-chunk-per-minibatch 128 \ + --trainer.frames-per-iter 1500000 \ + --trainer.num-epochs 4 \ + --trainer.optimization.num-jobs-initial 2 \ + --trainer.optimization.num-jobs-final 12 \ + --trainer.optimization.initial-effective-lrate 0.001 \ + --trainer.optimization.final-effective-lrate 0.0001 \ + --trainer.max-param-change 2.0 \ + --cleanup.remove-egs true \ + --feat-dir $train_data_dir \ + --tree-dir $tree_dir \ + --lat-dir $lat_dir \ + --dir $dir +fi + + + +if [ $stage -le 19 ]; then + # Note: it might appear that this data/lang_chain directory is mismatched, and it is as + # far as the 'topo' is concerned, but this script doesn't read the 'topo' from + # the lang directory. + utils/mkgraph.sh --left-biphone --self-loop-scale 1.0 data/langp_test $dir $dir/graph +fi + +exit 0 diff --git a/egs/babel/s5d/local/chain/run_ivector_common.sh b/egs/babel/s5d/local/chain/run_ivector_common.sh index 7354d59465b..696fd14b45f 100755 --- a/egs/babel/s5d/local/chain/run_ivector_common.sh +++ b/egs/babel/s5d/local/chain/run_ivector_common.sh @@ -71,7 +71,8 @@ if [ $stage -le 2 ]; then utils/copy_data_dir.sh data/${train_set}_sp data/${train_set}_sp_hires mfccdir=data/${train_set}_sp_hires/data if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then - utils/create_split_dir.pl /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5/$mfccdir/storage $mfccdir/storage + utils/create_split_dir.pl \ + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$mfccdir/storage $mfccdir/storage fi # do volume-perturbation on the training data prior to extracting hires @@ -171,7 +172,8 @@ if [ $stage -le 7 ]; then # valid for the non-'max2' data, the utterance list is the same. ivectordir=exp/nnet3${nnet3_affix}/ivectors_${train_set}_sp_hires_comb if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $ivectordir/storage ]; then - utils/create_split_dir.pl /export/b{15,16,17,18}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5/$ivectordir/storage $ivectordir/storage + utils/create_split_dir.pl \ + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$ivectordir/storage $ivectordir/storage fi # We extract iVectors on the speed-perturbed training data after combining # short segments, which will be what we train the system on. With diff --git a/egs/babel/s5d/local/chain/run_tdnn.sh b/egs/babel/s5d/local/chain/run_tdnn.sh index 3ce53fa9292..2d9b6db75b7 100755 --- a/egs/babel/s5d/local/chain/run_tdnn.sh +++ b/egs/babel/s5d/local/chain/run_tdnn.sh @@ -133,7 +133,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_tdnn_bab1.sh b/egs/babel/s5d/local/chain/run_tdnn_bab1.sh index db82c0f358a..0fa4020977c 100755 --- a/egs/babel/s5d/local/chain/run_tdnn_bab1.sh +++ b/egs/babel/s5d/local/chain/run_tdnn_bab1.sh @@ -133,7 +133,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_tdnn_bab2.sh b/egs/babel/s5d/local/chain/run_tdnn_bab2.sh index 51387901683..ea9d5959c75 100755 --- a/egs/babel/s5d/local/chain/run_tdnn_bab2.sh +++ b/egs/babel/s5d/local/chain/run_tdnn_bab2.sh @@ -133,7 +133,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_tdnn_bab3.sh b/egs/babel/s5d/local/chain/run_tdnn_bab3.sh index 098c3de0482..2973a2c9f02 100755 --- a/egs/babel/s5d/local/chain/run_tdnn_bab3.sh +++ b/egs/babel/s5d/local/chain/run_tdnn_bab3.sh @@ -3,7 +3,6 @@ # by default, with cleanup: # local/chain/run_tdnn.sh - # %WER 46.7 | 19252 60586 | 57.4 26.4 16.2 4.0 46.7 31.6 | -0.469 | exp/chain_cleaned/tdnnbab3_sp_bi/decode_dev10h.pem/score_9/penalty_0.0/dev10h.pem.ctm.sys set -e -o pipefail @@ -134,7 +133,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_tdnn_bab4.sh b/egs/babel/s5d/local/chain/run_tdnn_bab4.sh index 5831cfc28f0..bd2eba9cb8b 100755 --- a/egs/babel/s5d/local/chain/run_tdnn_bab4.sh +++ b/egs/babel/s5d/local/chain/run_tdnn_bab4.sh @@ -133,7 +133,7 @@ fi if [ $stage -le 18 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{5,6,7,8}/$USER/kaldi-data/egs/ami-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi [ ! -d $dir/egs ] && mkdir -p $dir/egs/ touch $dir/egs/.nodelete # keep egs around when that run dies. diff --git a/egs/babel/s5d/local/chain/run_tdnn_lstm_1e.sh b/egs/babel/s5d/local/chain/run_tdnn_lstm_1e.sh new file mode 100755 index 00000000000..ec8366492d7 --- /dev/null +++ b/egs/babel/s5d/local/chain/run_tdnn_lstm_1e.sh @@ -0,0 +1,227 @@ +#!/bin/bash + +# From egs/swbdrun_tdnn_lstm_1e.sh + +set -e -o pipefail -u + +# configs for 'chain' +stage=0 +nj=30 +min_seg_len=1.55 +train_set=train_cleaned +gmm=tri5_cleaned # the gmm for the target data +langdir=data/langp/tri5_ali +num_threads_ubm=12 +nnet3_affix=_cleaned # cleanup affix for nnet3 and chain dirs, e.g. _cleaned + +# The rest are configs specific to this script. Most of the parameters +# are just hardcoded at this level, in the commands below. +train_stage=-10 +tree_affix= # affix for tree directory, e.g. "a" or "b", in case we change the configuration. +blstm_affix=bab1 #affix for TDNN directory, e.g. "a" or "b", in case we change the configuration. +train_stage=-10 +get_egs_stage=-10 +speed_perturb=true +dir=exp/chain/tdnn_lstm_1e # Note: _sp will get added to this if $speed_perturb == true. +decode_iter= +decode_nj=50 + +# training options +xent_regularize=0.01 +self_repair_scale=0.00001 +label_delay=5 + +chunk_left_context=40 +chunk_right_context=0 +# we'll put chunk-left-context-initial=0 and chunk-right-context-final=0 +# directly without variables. +frames_per_chunk=140,100,160 + +# (non-looped) decoding options +frames_per_chunk_primary=$(echo $frames_per_chunk | cut -d, -f1) +extra_left_context=50 +extra_right_context=0 +# we'll put extra-left-context-initial=0 and extra-right-context-final=0 +# directly without variables. + + +remove_egs=false +common_egs_dir=exp/chain_cleaned/blstm_sp_bi/egs # you can set this to use previously dumped egs. + +# End configuration section. +echo "$0 $@" # Print the command line for logging + +. ./cmd.sh +. ./path.sh +. ./utils/parse_options.sh + + +if ! cuda-compiled; then + cat <$lang/topo +fi + +if [ $stage -le 11 ]; then + # Build a tree using our new topology. + steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \ + --context-opts "--context-width=2 --central-position=1" \ + --cmd "$train_cmd" 7000 data/$train_set $lang $ali_dir $tree_dir +fi + +if [ $stage -le 12 ]; then + echo "$0: creating neural net configs using the xconfig parser"; + + num_targets=$(tree-info $tree_dir/tree |grep num-pdfs|awk '{print $2}') + [ -z $num_targets ] && { echo "$0: error getting num-targets"; exit 1; } + learning_rate_factor=$(echo "print 0.5/$xent_regularize" | python) + + lstm_opts="decay-time=20" + + mkdir -p $dir/configs + cat < $dir/configs/network.xconfig + input dim=100 name=ivector + input dim=40 name=input + + # please note that it is important to have input layer with the name=input + # as the layer immediately preceding the fixed-affine-layer to enable + # the use of short notation for the descriptor + fixed-affine-layer name=lda input=Append(-2,-1,0,1,2,ReplaceIndex(ivector, t, 0)) affine-transform-file=$dir/configs/lda.mat + + # the first splicing is moved before the lda layer, so no splicing here + relu-renorm-layer name=tdnn1 dim=1024 + relu-renorm-layer name=tdnn2 input=Append(-1,0,1) dim=1024 + relu-renorm-layer name=tdnn3 input=Append(-1,0,1) dim=1024 + + # check steps/libs/nnet3/xconfig/lstm.py for the other options and defaults + fast-lstmp-layer name=fastlstm1 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3 $lstm_opts + relu-renorm-layer name=tdnn4 input=Append(-3,0,3) dim=1024 + relu-renorm-layer name=tdnn5 input=Append(-3,0,3) dim=1024 + fast-lstmp-layer name=fastlstm2 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3 $lstm_opts + relu-renorm-layer name=tdnn6 input=Append(-3,0,3) dim=1024 + relu-renorm-layer name=tdnn7 input=Append(-3,0,3) dim=1024 + fast-lstmp-layer name=fastlstm3 cell-dim=1024 recurrent-projection-dim=256 non-recurrent-projection-dim=256 delay=-3 $lstm_opts + + ## adding the layers for chain branch + output-layer name=output input=fastlstm3 output-delay=$label_delay include-log-softmax=false dim=$num_targets max-change=1.5 + + # adding the layers for xent branch + # This block prints the configs for a separate output that will be + # trained with a cross-entropy objective in the 'chain' models... this + # has the effect of regularizing the hidden parts of the model. we use + # 0.5 / args.xent_regularize as the learning rate factor- the factor of + # 0.5 / args.xent_regularize is suitable as it means the xent + # final-layer learns at a rate independent of the regularization + # constant; and the 0.5 was tuned so as to make the relative progress + # similar in the xent and regular final layers. + output-layer name=output-xent input=fastlstm3 output-delay=$label_delay dim=$num_targets learning-rate-factor=$learning_rate_factor max-change=1.5 + +EOF + steps/nnet3/xconfig_to_configs.py --xconfig-file $dir/configs/network.xconfig --config-dir $dir/configs/ +fi + +if [ $stage -le 13 ]; then + if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then + utils/create_split_dir.pl \ + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage + fi + [ ! -d $dir/egs ] && mkdir -p $dir/egs/ + touch $dir/egs/.nodelete # keep egs around when that run dies. + + steps/nnet3/chain/train.py --stage $train_stage \ + --cmd "$decode_cmd" \ + --feat.online-ivector-dir $train_ivector_dir \ + --feat.cmvn-opts "--norm-means=false --norm-vars=false" \ + --chain.xent-regularize $xent_regularize \ + --chain.leaky-hmm-coefficient 0.1 \ + --chain.l2-regularize 0.00005 \ + --chain.apply-deriv-weights false \ + --chain.lm-opts="--num-extra-lm-states=2000" \ + --egs.dir "$common_egs_dir" \ + --egs.opts "--frames-overlap-per-eg 0" \ + #--trainer.num-chunk-per-minibatch 128,64 \ + --trainer.num-chunk-per-minibatch 128 \ + --trainer.frames-per-iter 1500000 \ + --trainer.num-epochs 4 \ + --trainer.optimization.num-jobs-initial 2 \ + --trainer.optimization.num-jobs-final 6 \ + --trainer.optimization.shrink-value 0.99 \ + --trainer.optimization.initial-effective-lrate 0.001 \ + --trainer.optimization.final-effective-lrate 0.0001 \ + --trainer.max-param-change 2.0 \ + --cleanup.remove-egs $remove_egs \ + --feat-dir data/${train_set}_hires \ + --trainer.optimization.momentum 0.0 \ + --trainer.deriv-truncate-margin 8 \ + --egs.stage $get_egs_stage \ + --egs.chunk-width $frames_per_chunk \ + --egs.chunk-left-context $chunk_left_context \ + --egs.chunk-right-context $chunk_right_context \ + --egs.chunk-left-context-initial 0 \ + --egs.chunk-right-context-final 0 \ + --tree-dir $tree_dir \ + --lat-dir $lat_dir \ + --dir $dir +fi + +if [ $stage -le 14 ]; then + # Note: it might appear that this $lang directory is mismatched, and it is as + # far as the 'topo' is concerned, but this script doesn't read the 'topo' from + # the lang directory. + utils/mkgraph.sh --self-loop-scale 1.0 data/lang_sw1_tg $dir $dir/graph_sw1_tg +fi + +exit 0 diff --git a/egs/babel/s5d/local/check_tools.sh b/egs/babel/s5d/local/check_tools.sh index ca8800def41..2c96f8445d1 100755 --- a/egs/babel/s5d/local/check_tools.sh +++ b/egs/babel/s5d/local/check_tools.sh @@ -18,20 +18,20 @@ [ -f ./path.sh ] && . ./path.sh sph2pipe=`command -v sph2pipe 2>/dev/null` \ - || { echo >&2 "sph2pipe not found on PATH. Did you run make in the $KALDI_ROOT/tools directory?"; return 1; } + || { echo >&2 "sph2pipe not found on PATH. Did you run make in the $KALDI_ROOT/tools directory?"; exit 1; } srilm=`command -v ngram 2>/dev/null` \ - || { echo >&2 "srilm not found on PATH. Please use the script $KALDI_ROOT/tools/extras/install_srilm.sh"; return 1; } + || { echo >&2 "srilm not found on PATH. Please use the script $KALDI_ROOT/tools/extras/install_srilm.sh"; exit 1; } sox=`command -v sox 2>/dev/null` \ - || { echo >&2 "sox not found on PATH. Please install it manually (you will need version 14.4.0 and higher)."; return 1; } + || { echo >&2 "sox not found on PATH. Please install it manually (you will need version 14.4.0 and higher)."; exit 1; } # If sox is found on path, check if the version is correct if [ ! -z "$sox" ]; then sox_version=`$sox --version 2>&1| head -1 | sed -e 's?.*: ??' -e 's?.* ??'` if [[ ! $sox_version =~ v14.4.* ]]; then echo "Unsupported sox version $sox_version found on path. You will need version v14.4.0 and higher." - return 1 + exit 1 fi fi diff --git a/egs/babel/s5d/local/extend_lexicon.sh b/egs/babel/s5d/local/extend_lexicon.sh index c930b1729e0..41b244f110b 100755 --- a/egs/babel/s5d/local/extend_lexicon.sh +++ b/egs/babel/s5d/local/extend_lexicon.sh @@ -148,20 +148,10 @@ cp $input_lexicon $toplevel_dir/input_lexicon.txt # just to have a record of wh loc=`which ngram-count`; if [ -z $loc ]; then - if uname -a | grep 64 >/dev/null; then # some kind of 64 bit... - sdir=`pwd`/../../../tools/srilm/bin/i686-m64 - else - sdir=`pwd`/../../../tools/srilm/bin/i686 - fi - if [ -f $sdir/ngram-count ]; then - echo Using SRILM tools from $sdir - export PATH=$PATH:$sdir - else - echo You appear to not have SRILM tools installed, either on your path, - echo or installed in $sdir. See tools/install_srilm.sh for installation - echo instructions. - exit 1 - fi + echo You appear to not have SRILM tools installed, either on your path, + echo or installed in $sdir. See tools/install_srilm.sh for installation + echo instructions. + exit 1 fi @@ -231,10 +221,9 @@ if [ $stage -le -3 ]; then echo "$0: using SRILM to train syllable LM" - ngram-count -lm $dir/3gram.kn022.gz -kndiscount1 -gt1min 0 -kndiscount2 -gt2min 2 -kndiscount3 -gt3min 2 -order 3 -text $dir/syllable_text.txt -sort - + ngram-count -lm $dir/3gram.me.gz -maxent -maxent-convert-to-arpa -kndiscount1 -gt1min 0 -kndiscount2 -gt2min 2 -kndiscount3 -gt3min 2 -order 3 -text $dir/syllable_text.txt -sort rm $dir/lm.gz 2>/dev/null - ln -s 3gram.kn022.gz $dir/lm.gz + ln -s 3gram.me.gz $dir/lm.gz fi diff --git a/egs/babel/s5d/local/generate_confusion_matrix.sh b/egs/babel/s5d/local/generate_confusion_matrix.sh index 48263e729de..fb602cf0957 100755 --- a/egs/babel/s5d/local/generate_confusion_matrix.sh +++ b/egs/babel/s5d/local/generate_confusion_matrix.sh @@ -61,7 +61,7 @@ fi mkdir -p $wdir/log cat $data/phones.txt | sed 's/_[B|E|I|S]//g' |\ - sed 's/_[%|"]//g' | sed 's/_[0-9]\+//g' > $wdir/phones.txt + sed 's/_[%|"]//g' | sed 's/_[0-9]\+//g' | sed 's/_[^ ]*//g' > $wdir/phones.txt echo "Converting alignments to phone sequences..." $cmd JOB=1:$nj $wdir/log/ali_to_phones.JOB.log \ @@ -81,7 +81,8 @@ for i in `seq 1 $nj` ; do done echo "Converting statistics..." -cat $confusion_files | cut -f 2- -d ' ' | sed 's/ *; */\n/g'| sort | uniq -c | \ +cat $confusion_files | cut -f 2- -d ' ' | sed 's/ *; */\n/g' | \ + sed 's/ *$//g' | sed 's/^ *//g' | sort | uniq -c | \ grep -v -E '|||SIL' | \ perl -ane ' die unless scalar @F == 3; diff --git a/egs/babel/s5d/local/lexicon/make_unicode_lexicon.py b/egs/babel/s5d/local/lexicon/make_unicode_lexicon.py index b6d4b9ab944..3670ba755bc 100755 --- a/egs/babel/s5d/local/lexicon/make_unicode_lexicon.py +++ b/egs/babel/s5d/local/lexicon/make_unicode_lexicon.py @@ -27,7 +27,7 @@ def main(): unicode_transcription = baseform2unicode(baseforms) encoded_transcription, table = encode(unicode_transcription, args.tag_percentage, - log=args.verbose) + log=args.log) write_table(table, args.lex_out) # Extract dictionary of nonspeech pronunciations @@ -59,7 +59,7 @@ def parse_input(): Parse commandline input. ''' if len(sys.argv[1:]) == 0: - print("Usage: ./make_unicode_lexicon.py [opts] lex_in lex_out") + print("Usage: ./make_unicode_lexicon.py [opts] lex_in lex_out [log]") sys.exit(1) parser = argparse.ArgumentParser() @@ -67,7 +67,9 @@ def parse_input(): "paired with a baseform. 1 word per line with the " "baseform separated by a tab") parser.add_argument("lex_out", help="Path of output output " - "graphemc lexicon") + "graphemic lexicon") + parser.add_argument("log", nargs='?', default=None, + help="Directory in which the logs will be stored"); parser.add_argument("-F", "--fmt", help="Format of input word list", action="store", default="word_list") parser.add_argument("-T", "--tag_percentage", help="Percentage of least" @@ -246,12 +248,11 @@ def encode(unicode_transcription, tag_percentage, log=False): graph_counts = graph_counts_dict # Print grapheme counts to histogram - if log: + if log is not None: graph_counts_sorted = sorted(graph_counts, reverse=True, key=graph_counts.get) - if not os.path.exists("lex_log"): - os.makedirs("lex_log") - with codecs.open("lex_log/grapheme_histogram.txt", "w", "utf-8") as fp: + logfile = "{}/grapheme_histogram.txt".format(log) + with codecs.open(logfile, "w", "utf-8") as fp: fp.write("Graphemes (Count Threshold = %.6f)\n" % count_thresh) for g in graph_counts_sorted: weight = ("-" * int(np.ceil(500.0 * graph_counts[g])) + diff --git a/egs/babel/s5d/local/nist_eval/create_new_language_configs.LLP.sh b/egs/babel/s5d/local/nist_eval/create_new_language_configs.LLP.sh index 2ffb73810e3..be6aa5c2b40 100755 --- a/egs/babel/s5d/local/nist_eval/create_new_language_configs.LLP.sh +++ b/egs/babel/s5d/local/nist_eval/create_new_language_configs.LLP.sh @@ -4,15 +4,16 @@ # Begin configuration section. language="201-haitian" +corpus=/export/babel/data/ +indus=/export/babel/data/scoring/IndusDB # End configuration section . ./utils/parse_options.sh set -e -o pipefail set -o nounset # Treat unset variables as an error -corpus=/export/babel/data/$language +corpus=$corpus/$language lists=./conf/lists/$language/ -indus=/export/babel/data/scoring/IndusDB corpusdir=$(find $corpus -maxdepth 1 -name "*-build" -type d) || exit 1 [ -z "$corpusdir" ] && "Corpus directory for $language not found!" && exit 1 diff --git a/egs/babel/s5d/local/nnet3/run_blstm.sh b/egs/babel/s5d/local/nnet3/run_blstm.sh index 6833baa0d72..fcf7fb8947d 100755 --- a/egs/babel/s5d/local/nnet3/run_blstm.sh +++ b/egs/babel/s5d/local/nnet3/run_blstm.sh @@ -5,7 +5,7 @@ cell_dim=512 rp_dim=128 nrp_dim=128 affix=bidirectional -multicondition=true +multicondition=false common_egs_dir= num_epochs=8 diff --git a/egs/babel/s5d/local/nnet3/run_ivector_common.sh b/egs/babel/s5d/local/nnet3/run_ivector_common.sh index bfe66d13f76..7313230a7ee 100755 --- a/egs/babel/s5d/local/nnet3/run_ivector_common.sh +++ b/egs/babel/s5d/local/nnet3/run_ivector_common.sh @@ -60,8 +60,8 @@ fi if [ $stage -le 3 ]; then mfccdir=mfcc_hires if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then - date=$(date +'%m_%d_%H_%M') - utils/create_split_dir.pl /export/b0{1,2,3,4}/$USER/kaldi-data/egs/swbd-$date/s5b/$mfccdir/storage $mfccdir/storage + utils/create_split_dir.pl \ + /export/b0{5,6,7,8}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$mfccdir/storage $mfccdir/storage fi # the 100k_nodup directory is copied seperately, as diff --git a/egs/babel/s5d/local/nnet3/run_ivector_multicondition_common.sh b/egs/babel/s5d/local/nnet3/run_ivector_multicondition_common.sh index 8d3973e65bc..c3a6e1c0952 100755 --- a/egs/babel/s5d/local/nnet3/run_ivector_multicondition_common.sh +++ b/egs/babel/s5d/local/nnet3/run_ivector_multicondition_common.sh @@ -70,8 +70,8 @@ fi if [ $stage -le 3 ]; then mfccdir=mfcc_hires if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then - date=$(date +'%m_%d_%H_%M') - utils/create_split_dir.pl /export/b0{1,2,3,4}/$USER/kaldi-data/egs/swbd-$date/s5b/$mfccdir/storage $mfccdir/storage + utils/create_split_dir.pl \ + /export/b0{1,2,3,4}/$USER/kaldi-data/egs/kaldi-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$mfccdir/storage $mfccdir/storage fi # the 100k_nodup directory is copied seperately, as @@ -151,8 +151,8 @@ train_set=train_sp_mc if [ $stage -le 7 ]; then mfccdir=mfcc_reverb if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $mfccdir/storage ]; then - date=$(date +'%m_%d_%H_%M') - utils/create_split_dir.pl /export/b0{1,2,3,4}/$USER/kaldi-data/egs/babel_reverb-$date/s5/$mfccdir/storage $mfccdir/storage + utils/create_split_dir.pl \ + /export/b0{1,2,3,4}/$USER/kaldi-data/egs/babel_reverb-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$mfccdir/storage $mfccdir/storage fi for data_dir in $train_set; do utils/copy_data_dir.sh data/$data_dir data/${data_dir}_hires diff --git a/egs/babel/s5d/local/nnet3/run_lstm.sh b/egs/babel/s5d/local/nnet3/run_lstm.sh index 8105cfda387..f7d06501569 100755 --- a/egs/babel/s5d/local/nnet3/run_lstm.sh +++ b/egs/babel/s5d/local/nnet3/run_lstm.sh @@ -121,7 +121,7 @@ fi if [ $stage -le 13 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{3,4,5,6}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{3,4,5,6}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi steps/nnet3/train_rnn.py --stage=$train_stage \ @@ -136,7 +136,6 @@ if [ $stage -le 13 ]; then --trainer.optimization.final-effective-lrate=$final_effective_lrate \ --trainer.optimization.shrink-value 0.99 \ --trainer.rnn.num-chunk-per-minibatch=$num_chunk_per_minibatch \ - --trainer.optimization.cv-minibatch-size 128 \ --trainer.optimization.momentum=$momentum \ --egs.chunk-width=$chunk_width \ --egs.chunk-left-context=$chunk_left_context \ diff --git a/egs/babel/s5d/local/nnet3/run_lstm_realigned.sh b/egs/babel/s5d/local/nnet3/run_lstm_realigned.sh index acd65e9114e..2448b1b17ff 100755 --- a/egs/babel/s5d/local/nnet3/run_lstm_realigned.sh +++ b/egs/babel/s5d/local/nnet3/run_lstm_realigned.sh @@ -114,7 +114,7 @@ fi if [ $stage -le 3 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{3,4,5,6}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{3,4,5,6}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi steps/nnet3/train_rnn.py --stage=$train_stage \ diff --git a/egs/babel/s5d/local/nnet3/run_tdnn.sh b/egs/babel/s5d/local/nnet3/run_tdnn.sh index 8899e363dd9..2a663486bcb 100755 --- a/egs/babel/s5d/local/nnet3/run_tdnn.sh +++ b/egs/babel/s5d/local/nnet3/run_tdnn.sh @@ -60,7 +60,7 @@ local/nnet3/run_ivector_common.sh --stage $stage \ if [ $stage -le 9 ]; then if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then utils/create_split_dir.pl \ - /export/b0{3,4,5,6}/$USER/kaldi-data/egs/swbd-$(date +'%m_%d_%H_%M')/s5/$dir/egs/storage $dir/egs/storage + /export/b0{3,4,5,6}/$USER/kaldi-data/egs/babel-$(date +'%m_%d_%H_%M')/s5d/$RANDOM/$dir/egs/storage $dir/egs/storage fi steps/nnet3/train_tdnn.sh --stage $train_stage \ diff --git a/egs/babel/s5d/local/reestimate_langp.sh b/egs/babel/s5d/local/reestimate_langp.sh index 059fba52043..ae70b6a8f46 100755 --- a/egs/babel/s5d/local/reestimate_langp.sh +++ b/egs/babel/s5d/local/reestimate_langp.sh @@ -29,5 +29,6 @@ utils/dict_dir_add_pronprobs.sh --max-normalize true $idict \ $amdir/pron_bigram_counts_nowb.txt $odict utils/prepare_lang.sh --phone-symbol-table $langdir/phones.txt \ + --share-silence-phones true \ $odict "$unk" $olocallang $olang diff --git a/egs/babel/s5d/local/run_kws_stt_task2.sh b/egs/babel/s5d/local/run_kws_stt_task2.sh index 6007baa1756..9c10bfe6da5 100755 --- a/egs/babel/s5d/local/run_kws_stt_task2.sh +++ b/egs/babel/s5d/local/run_kws_stt_task2.sh @@ -71,14 +71,26 @@ fi if ! $skip_kws ; then [ ! -f $data_dir/extra_kws_tasks ] && exit 0 - syll_data_dir=$(echo $data_dir | perl -pe 's/\.(pem|seg)$/.syll.$1/g' ) + idata=$(basename $data_dir) + idir=$(dirname $data_dir) + + idataset=${idata%%.*} + idatatype=${idata#*.} + + if [ "$idata" == "$idataset" ]; then + syll_data_dir=$idir/${idataset}.syll + phn_data_dir=$idir/${idataset}.phn + else + syll_data_dir=$idir/${idataset}.syll.${idatatype} + phn_data_dir=$idir/${idataset}.phn.${idatatype} + fi + if [ -d ${syll_data_dir} ] && [ ! -f ${decode_dir}/syllabs/.done ] ; then local/syllab/lattice_word2syll.sh --cmd "$cmd --mem 8G" \ $data_dir $lang_dir ${lang_dir}.syll $decode_dir ${decode_dir}/syllabs touch ${decode_dir}/syllabs/.done fi - phn_data_dir=$(echo $data_dir | perl -pe 's/\.(pem|seg)$/.phn.$1/g' ) if [ -d ${phn_data_dir} ] && [ ! -f ${decode_dir}/phones/.done ] ; then local/syllab/lattice_word2syll.sh --cmd "$cmd --mem 8G" \ $data_dir $lang_dir ${lang_dir}.phn $decode_dir ${decode_dir}/phones diff --git a/egs/babel/s5d/local/search/run_phn_search.sh b/egs/babel/s5d/local/search/run_phn_search.sh index 44587699a38..e4dba529b3d 100755 --- a/egs/babel/s5d/local/search/run_phn_search.sh +++ b/egs/babel/s5d/local/search/run_phn_search.sh @@ -29,7 +29,11 @@ dataset=${dir%%.*} datatype=${dir#*.} lang=data/lang.phn -data=data/${dataset}.phn.${datatype} +if [ "$dir" == "$dataset" ]; then + data=data/${dataset}.phn +else + data=data/${dataset}.phn.${datatype} +fi set +o nounset eval kwsets=${!dataset_kwlists[@]} @@ -76,7 +80,7 @@ if [ $stage -le 2 ] ; then ${data}/kwset_${set}/tmp.4 # and finally, replace the categories by the word-level categories - cp data/$dir/kwset_${set}/categories $data/kwset_${set}/categories + cp data/${dir}/kwset_${set}/categories $data/kwset_${set}/categories done fi diff --git a/egs/babel/s5d/local/search/run_search.sh b/egs/babel/s5d/local/search/run_search.sh index 2cb40cabb59..1fbdb071123 100755 --- a/egs/babel/s5d/local/search/run_search.sh +++ b/egs/babel/s5d/local/search/run_search.sh @@ -67,8 +67,11 @@ if [ $stage -le 2 ] ; then #-- data/dev10h.pem/${set}_oov_kws/tmp/L1.lex data/dev10h.pem/kwset_${set}/tmp.3 if [ -d data/local/extend ]; then echo "Detected extended lexicon system..." - local/search/compile_proxy_keywords.sh --cmd "$decode_cmd --mem 12G" --filter "OOV=1&&Characters>2"\ - --beam 5 --nbest 50 --nj 64 --phone-beam 5 --phone-nbest 300 --confusion-matrix exp/conf_matrix/confusions.txt \ + local/search/compile_proxy_keywords.sh --filter "OOV=1&&Characters>2"\ + --cmd "$decode_cmd --mem 24G --max-jobs-run 64" --nj 128 \ + --beam $extlex_proxy_beam --nbest $extlex_proxy_nbest \ + --phone-beam $extlex_proxy_phone_beam --phone-nbest $extlex_proxy_phone_nbest\ + --confusion-matrix exp/conf_matrix/confusions.txt \ data/$dir/kwset_${set} data/lang data/local/lexiconp.txt exp/g2p \ data/$dir/kwset_${set}/tmp.4 else diff --git a/egs/babel/s5d/local/search/run_syll_search.sh b/egs/babel/s5d/local/search/run_syll_search.sh index eb48d836e77..41a925ce13a 100755 --- a/egs/babel/s5d/local/search/run_syll_search.sh +++ b/egs/babel/s5d/local/search/run_syll_search.sh @@ -29,7 +29,11 @@ dataset=${dir%%.*} datatype=${dir#*.} lang=data/lang.syll -data=data/${dataset}.syll.${datatype} +if [ "$dir" == "$dataset" ]; then + data=data/${dataset}.syll +else + data=data/${dataset}.syll.${datatype} +fi set +o nounset eval kwsets=${!dataset_kwlists[@]} diff --git a/egs/babel/s5d/local/search/search.sh b/egs/babel/s5d/local/search/search.sh index 200a49d8e86..6a5b2d35a97 100755 --- a/egs/babel/s5d/local/search/search.sh +++ b/egs/babel/s5d/local/search/search.sh @@ -26,6 +26,7 @@ silence_word= # specify this if you did to in kws_setup.sh, it's more accurate. strict=false duptime=0.6 ntrue_scale=1.0 +frame_subsampling_factor=1 nbest=-1 max_silence_frames=50 # End configuration section. diff --git a/egs/babel/s5d/local/syllab/lattice_word2syll.sh b/egs/babel/s5d/local/syllab/lattice_word2syll.sh index b81bf9d18d4..63e9114875d 100755 --- a/egs/babel/s5d/local/syllab/lattice_word2syll.sh +++ b/egs/babel/s5d/local/syllab/lattice_word2syll.sh @@ -26,7 +26,7 @@ mkdir -p $output/log if [ -f $olang/lex.words2syllabs.fst ] ; then fstinvert $olang/lex.words2syllabs.fst | fstreverse | \ - fstminimize | fstreverse > $output/L.fst + fstminimize --allow_nondet | fstreverse > $output/L.fst $cmd JOB=1:$nj $output/log/convert.JOB.log \ lattice-push --push-strings ark:"gunzip -c $input/lat.JOB.gz|" ark:- \| \ diff --git a/egs/babel/s5d/local/syllab/run_phones.sh b/egs/babel/s5d/local/syllab/run_phones.sh index 6f3c7be4cef..7c4a13c61f9 100755 --- a/egs/babel/s5d/local/syllab/run_phones.sh +++ b/egs/babel/s5d/local/syllab/run_phones.sh @@ -21,10 +21,20 @@ if [ $# -ne 1 ] ; then fi idir=$1 + +if [ ! -d "$idir" ] ; then + echo "The directory $idir does not exist" + exit 1 +fi + idata=${idir##*/} -odata=${idata%%.*}.phn.${idata#*.} +if [ "$idata" == ${idata%%.*} ]; then + odata=${idata%%.*}.phn +else + odata=${idata%%.*}.phn.${idata#*.} +fi if [ $stage -le -1 ] ; then local/syllab/generate_phone_lang.sh \ diff --git a/egs/babel/s5d/local/syllab/run_syllabs.sh b/egs/babel/s5d/local/syllab/run_syllabs.sh index a2ec82f3033..7366ac9ad35 100755 --- a/egs/babel/s5d/local/syllab/run_syllabs.sh +++ b/egs/babel/s5d/local/syllab/run_syllabs.sh @@ -21,10 +21,19 @@ if [ $# -ne 1 ] ; then fi idir=$1 -idata=${idir##*/} +if [ ! -d "$idir" ] ; then + echo "The directory $idir does not exist" + exit 1 +fi + +idata=${idir##*/} -odata=${idata%%.*}.syll.${idata#*.} +if [ "$idata" == ${idata%%.*} ]; then + odata=${idata%%.*}.syll +else + odata=${idata%%.*}.syll.${idata#*.} +fi if [ $stage -le -1 ] ; then local/syllab/generate_syllable_lang.sh \ @@ -45,7 +54,7 @@ if [ $stage -le -1 ] ; then local/arpa2G.sh data/srilm.syll/lm.gz data/lang.syll/ data/lang.syll/ fi -if [ $stage -le 0 ] && [ -f "$idir/text" ] ; then +if [ $stage -le 0 ] && [ -f "$idir/text" ]; then #Create dev10h.syll.pem dir steps/align_fmllr.sh \ --boost-silence $boost_sil --nj $train_nj --cmd "$train_cmd" \ diff --git a/egs/babel/s5d/run-1-main-unicode-extend-lex.sh b/egs/babel/s5d/run-1-main-unicode-extend-lex.sh new file mode 100755 index 00000000000..f9de3e8e947 --- /dev/null +++ b/egs/babel/s5d/run-1-main-unicode-extend-lex.sh @@ -0,0 +1,209 @@ +#!/bin/bash + +# Parameters for extended lexicon. +extend_lexicon=true +unk_fraction_boost=1.0 +num_sent_gen=12000000 +num_prons=1000000 +morfessor=true +tag_percentage=0.1 +denlats_only=false + +[ ! -f ./lang.conf ] && echo 'Language configuration does not exist! Use the configurations in conf/lang/* as a startup' && exit 1 +[ ! -f ./conf/common_vars.sh ] && echo 'the file conf/common_vars.sh does not exist!' && exit 1 + +. conf/common_vars.sh || exit 1; +. ./lang.conf || exit 1; + +[ -f local.conf ] && . ./local.conf + +. ./utils/parse_options.sh + +set -e #Exit on non-zero return code from any command +set -o pipefail #Exit if any of the commands in the pipeline will + #return non-zero return code +#set -u #Fail on an undefined variable + +lexicon=data/local/lexicon.txt +if $extend_lexicon; then + lexicon=data/local/lexiconp.txt +fi + +./local/check_tools.sh || exit 1 + +#Preparing dev2h and train directories +if [ ! -f data/raw_train_data/.done ]; then + echo --------------------------------------------------------------------- + echo "Subsetting the TRAIN set" + echo --------------------------------------------------------------------- + + local/make_corpus_subset.sh "$train_data_dir" "$train_data_list" ./data/raw_train_data + train_data_dir=`readlink -f ./data/raw_train_data` + touch data/raw_train_data/.done +fi +nj_max=`cat $train_data_list | wc -l` +if [[ "$nj_max" -lt "$train_nj" ]] ; then + echo "The maximum reasonable number of jobs is $nj_max (you have $train_nj)! (The training and decoding process has file-granularity)" + exit 1; + train_nj=$nj_max +fi +train_data_dir=`readlink -f ./data/raw_train_data` + +if [ ! -d data/raw_dev2h_data ]; then + echo --------------------------------------------------------------------- + echo "Subsetting the DEV2H set" + echo --------------------------------------------------------------------- + local/make_corpus_subset.sh "$dev2h_data_dir" "$dev2h_data_list" ./data/raw_dev2h_data || exit 1 +fi + +if [ ! -d data/raw_dev10h_data ]; then + echo --------------------------------------------------------------------- + echo "Subsetting the DEV10H set" + echo --------------------------------------------------------------------- + local/make_corpus_subset.sh "$dev10h_data_dir" "$dev10h_data_list" ./data/raw_dev10h_data || exit 1 +fi + +# Move data/dev2h preparation forward so we can get data/dev2h/text for +# diagnostic purpose when extending the lexicon. +if [[ ! -f data/dev2h/wav.scp || data/dev2h/wav.scp -ot ./data/raw_dev2h_data/audio ]]; then + echo --------------------------------------------------------------------- + echo "Preparing dev2h data lists in data/dev2h on" `date` + echo --------------------------------------------------------------------- + mkdir -p data/dev2h + local/prepare_acoustic_training_data.pl \ + --fragmentMarkers \-\*\~ \ + `pwd`/data/raw_dev2h_data data/dev2h > data/dev2h/skipped_utts.log || exit 1 +fi + +if [[ ! -f data/dev2h/glm || data/dev2h/glm -ot "$glmFile" ]]; then + echo --------------------------------------------------------------------- + echo "Preparing dev2h stm files in data/dev2h on" `date` + echo --------------------------------------------------------------------- + if [ -z $dev2h_stm_file ]; then + echo "WARNING: You should define the variable stm_file pointing to the IndusDB stm" + echo "WARNING: Doing that, it will give you scoring close to the NIST scoring. " + local/prepare_stm.pl --fragmentMarkers \-\*\~ data/dev2h || exit 1 + else + local/augment_original_stm.pl $dev2h_stm_file data/dev2h || exit 1 + fi + [ ! -z $glmFile ] && cp $glmFile data/dev2h/glm + +fi + +mkdir -p data/local +if [[ ! -f $lexicon || $lexicon -ot "$lexicon_file" ]]; then + echo --------------------------------------------------------------------- + echo "Preparing lexicon in data/local on" `date` + echo --------------------------------------------------------------------- + + local/lexicon/make_word_list.py $train_data_dir/filelist.list $train_data_dir/transcription data/local/word_list.txt + echo -e " SIL\n \n \n " > data/local/nonspeech.txt + echo -e " " > data/local/extraspeech.txt + + fmt="word_list" + if $morfessor; then + fmt="morfessor" + morfessor-train --encoding=utf_8 --traindata-list -f"-_" -s data/local/morfessor.bin \ + data/local/word_list.txt + morfessor-segment --encoding=utf_8 --output-format-separator '.' --viterbi-maxlen 3 \ + -l data/local/morfessor.bin <(cut -d' ' -f2 data/local/word_list.txt) \ + | sed 's/\.[\_\-]\././g' > data/local/segments + cut -d' ' data/local/word_list.txt -f2 | paste -d' ' - data/local/segments > data/local/word_list_tmp.txt + mv data/local/word_list_tmp.txt data/local/word_list.txt + fi + + local/lexicon/make_unicode_lexicon.py --tag_percentage $tag_percentage --fmt $fmt \ + --nonspeech data/local/nonspeech.txt --extraspeech data/local/extraspeech.txt \ + --verbose data/local/word_list.txt data/local/lexicon.txt data/local/ + local/prepare_unicode_lexicon.py --nonspeech data/local/nonspeech.txt \ + --extraspeech data/local/extraspeech.txt data/local/lexicon_table.txt data/local + cp data/local/lexicon.txt data/local/filtered_lexicon.txt + if $extend_lexicon; then + # Extend the original lexicon. + # Will creates the files data/local/extend/{lexiconp.txt,oov2prob}. + mv data/local/lexicon.txt data/local/lexicon_orig.txt + local/extend_lexicon.sh --cmd "$train_cmd" --cleanup false \ + --num-sent-gen $num_sent_gen --num-prons $num_prons \ + data/local/lexicon_orig.txt data/local/extend data/dev2h/text + cp data/local/extend/lexiconp.txt data/local/ + fi +fi + +mkdir -p data/lang +if [[ ! -f data/lang/L.fst || data/lang/L.fst -ot $lexicon ]]; then + echo --------------------------------------------------------------------- + echo "Creating L.fst etc in data/lang on" `date` + echo --------------------------------------------------------------------- + utils/prepare_lang.sh \ + --share-silence-phones true \ + data/local $oovSymbol data/local/tmp.lang data/lang +fi + +if [[ ! -f data/train/wav.scp || data/train/wav.scp -ot "$train_data_dir" ]]; then + echo --------------------------------------------------------------------- + echo "Preparing acoustic training lists in data/train on" `date` + echo --------------------------------------------------------------------- + mkdir -p data/train + local/prepare_acoustic_training_data.pl \ + --vocab $lexicon --fragmentMarkers \-\*\~ \ + $train_data_dir data/train > data/train/skipped_utts.log +fi + +if [[ ! -f data/srilm/lm.gz || data/srilm/lm.gz -ot data/train/text ]]; then + echo --------------------------------------------------------------------- + echo "Training SRILM language models on" `date` + echo --------------------------------------------------------------------- + # If extending the lexicon, use "--words-file data/local/lexicon_orig.txt" so + # that the LM is trained just on the vocab that appears in the text. Will add + # in the OOVs later. + words_file_param=() + if $extend_lexicon; then + words_file_param=(--words-file data/local/lexicon_orig.txt) + fi + local/train_lms_srilm.sh --oov-symbol "$oovSymbol"\ + "${words_file_param[@]}" \ + --train-text data/train/text data data/srilm +fi + +if [[ ! -f data/lang/G.fst || data/lang/G.fst -ot data/srilm/lm.gz ||\ + ( -f data/local/extend/oov2prob &&\ + data/lang/G.fst -ot data/local/extend/oov2prob ) ]]; then + echo --------------------------------------------------------------------- + echo "Creating G.fst on " `date` + echo --------------------------------------------------------------------- + extend_lexicon_param=() + if $extend_lexicon; then + [ -f data/local/extend/original_oov_rates ] || exit 1; + unk_fraction=`cat data/local/extend/original_oov_rates |\ + grep "token" | awk -v x=$unk_fraction_boost '{print $NF/100.0*x}'` + extend_lexicon_param=(--cleanup false --unk-fraction $unk_fraction \ + --oov-prob-file data/local/extend/oov2prob) + fi + local/arpa2G.sh ${extend_lexicon_param[@]} \ + data/srilm/lm.gz data/lang data/lang +fi + +echo --------------------------------------------------------------------- +echo "Starting plp feature extraction for data/train in plp on" `date` +echo --------------------------------------------------------------------- + +if [ ! -f data/train/.plp.done ]; then + if $use_pitch; then + steps/make_plp_pitch.sh --cmd "$train_cmd" --nj $train_nj data/train exp/make_plp_pitch/train plp + else + steps/make_plp.sh --cmd "$train_cmd" --nj $train_nj data/train exp/make_plp/train plp + fi + utils/fix_data_dir.sh data/train + steps/compute_cmvn_stats.sh data/train exp/make_plp/train plp + utils/fix_data_dir.sh data/train + touch data/train/.plp.done +fi + +touch data/.extlex +mkdir -p exp + +echo ------------------------------------------------------------------------- +echo "Extended lexicon finished on" `date`. Now running script run-1-main.sh +echo ------------------------------------------------------------------------- +./run-1-main-unicode.sh --denlats-only "$denlats_only" +exit 0 diff --git a/egs/babel/s5d/run-1-main-unicode.sh b/egs/babel/s5d/run-1-main-unicode.sh index e3fb2486c83..acd2693cbef 100755 --- a/egs/babel/s5d/run-1-main-unicode.sh +++ b/egs/babel/s5d/run-1-main-unicode.sh @@ -80,7 +80,7 @@ if [[ ! -f $lexicon || $lexicon -ot "$lexicon_file" ]]; then local/lexicon/make_unicode_lexicon.py --tag_percentage $tag_percentage --fmt $fmt \ --nonspeech data/local/nonspeech.txt --extraspeech data/local/extraspeech.txt \ - --verbose data/local/word_list.txt data/local/lexicon.txt + --verbose data/local/word_list.txt data/local/lexicon.txt data/local/ local/prepare_unicode_lexicon.py --nonspeech data/local/nonspeech.txt \ --extraspeech data/local/extraspeech.txt data/local/lexicon_table.txt data/local cp data/local/lexicon.txt data/local/filtered_lexicon.txt diff --git a/egs/babel/s5d/run-4-anydecode.sh b/egs/babel/s5d/run-4-anydecode.sh index 083ac7e9879..8ac0fde2621 100755 --- a/egs/babel/s5d/run-4-anydecode.sh +++ b/egs/babel/s5d/run-4-anydecode.sh @@ -26,7 +26,7 @@ extra_left_context=40 extra_right_context=40 frames_per_chunk=20 -echo "run-4-test.sh $@" +echo "$0 $@" . utils/parse_options.sh @@ -61,7 +61,9 @@ dataset_type=${dir%%.*} #By default, we want the script to accept how the dataset should be handled, #i.e. of what kind is the dataset if [ -z ${kind} ] ; then - if [ "$dataset_type" == "dev2h" ] || [ "$dataset_type" == "dev10h" ]; then + if [ "$dataset_type" == "dev2h" ] || \ + [ "$dataset_type" == "dev10h" ] || \ + [ "$dataset_type" == "train" ]; then dataset_kind=supervised else dataset_kind=unsupervised @@ -96,11 +98,24 @@ if [ -z $my_data_dir ] || [ -z $my_data_list ] ; then exit 1 fi +if [ "$dataset_type" == "train" ] ; then + local/ali_to_rttm.sh --cmd "$decode_cmd" data/train data/langp_test exp/tri5_ali + bash -x local/qbe/wav_to_ecf.sh data/train/wav.scp > data/train/ecf.train.xml + train_rttm_file=./exp/tri5_ali/rttm + train_ecf_file=./data/train/ecf.train.xml +fi + + eval my_stm_file=\$${dataset_type}_stm_file eval my_ecf_file=\$${dataset_type}_ecf_file eval my_rttm_file=\$${dataset_type}_rttm_file eval my_nj=\$${dataset_type}_nj #for shadow, this will be re-set when appropriate +echo "my_stm_file=$my_stm_file" +echo "my_ecf_file=$my_ecf_file" +echo "my_rttm_file=$my_rttm_file" +echo "my_nj=$my_nj" + if [ -z "$my_nj" ]; then echo >&2 "You didn't specify the number of jobs -- variable \"${dataset_type}_nj\" not defined." exit 1 @@ -214,7 +229,8 @@ if [ ! -f $dataset_dir/.done ] ; then . ./local/datasets/supervised_seg.sh || exit 1 elif [ "$dataset_segments" == "uem" ]; then . ./local/datasets/supervised_uem.sh || exit 1 - elif [ "$dataset_segments" == "pem" ]; then + elif [ "$dataset_segments" == "train" ] ||\ + [ "$dataset_segments" == "pem" ]; then . ./local/datasets/supervised_pem.sh || exit 1 else echo "Unknown type of the dataset: \"$dataset_segments\"!"; @@ -294,29 +310,31 @@ echo --------------------------------------------------------------------- echo "Preparing kws data files in ${dataset_dir} on" `date` echo --------------------------------------------------------------------- lang=data/lang -if ! $skip_kws ; then - if $extra_kws ; then - L1_lex=data/local/lexiconp.txt - . ./local/datasets/extra_kws.sh || exit 1 - fi - if $vocab_kws ; then - . ./local/datasets/vocab_kws.sh || exit 1 - fi - if [ ! -f data/lang.phn/G.fst ] ; then - ./local/syllab/run_phones.sh --stage -2 ${dataset_dir} - else - ./local/syllab/run_phones.sh ${dataset_dir} - fi +if [ ! -f data/dev10h.pem/.done.kws.dev ] ; then + if ! $skip_kws ; then + if $extra_kws ; then + L1_lex=data/local/lexiconp.txt + . ./local/datasets/extra_kws.sh || exit 1 + fi + if $vocab_kws ; then + . ./local/datasets/vocab_kws.sh || exit 1 + fi + if [ ! -f data/lang.phn/G.fst ] ; then + ./local/syllab/run_phones.sh --stage -2 ${dataset_dir} + else + ./local/syllab/run_phones.sh ${dataset_dir} + fi - if [ ! -f data/lang.syll/G.fst ] ; then - ./local/syllab/run_syllabs.sh --stage -2 ${dataset_dir} - else - ./local/syllab/run_syllabs.sh ${dataset_dir} - fi + if [ ! -f data/lang.syll/G.fst ] ; then + ./local/syllab/run_syllabs.sh --stage -2 ${dataset_dir} + else + ./local/syllab/run_syllabs.sh ${dataset_dir} + fi - ./local/search/run_search.sh --dir ${dataset_dir##*/} - ./local/search/run_phn_search.sh --dir ${dataset_dir##*/} - ./local/search/run_syll_search.sh --dir ${dataset_dir##*/} + ./local/search/run_search.sh --dir ${dataset_dir##*/} + ./local/search/run_phn_search.sh --dir ${dataset_dir##*/} + ./local/search/run_syll_search.sh --dir ${dataset_dir##*/} + fi fi if $data_only ; then @@ -379,72 +397,6 @@ if $tri5_only; then exit 0 fi -#################################################################### -## SGMM2 decoding -## We Include the SGMM_MMI inside this, as we might only have the DNN systems -## trained and not PLP system. The DNN systems build only on the top of tri5 stage -#################################################################### -if [ -f exp/sgmm5/.done ]; then - decode=exp/sgmm5/decode_fmllr_${dataset_id} - if [ ! -f $decode/.done ]; then - echo --------------------------------------------------------------------- - echo "Spawning $decode on" `date` - echo --------------------------------------------------------------------- - utils/mkgraph.sh \ - data/langp_test exp/sgmm5 exp/sgmm5/graph |tee exp/sgmm5/mkgraph.log - - mkdir -p $decode - steps/decode_sgmm2.sh --skip-scoring true --use-fmllr true --nj $my_nj \ - --cmd "$decode_cmd" --transform-dir exp/tri5/decode_${dataset_id} "${decode_extra_opts[@]}"\ - exp/sgmm5/graph ${dataset_dir} $decode |tee $decode/decode.log - touch $decode/.done - - if ! $fast_path ; then - local/run_kws_stt_task2.sh --cer $cer --max-states $max_states \ - --skip-scoring $skip_scoring --extra-kws $extra_kws --wip $wip \ - --cmd "$decode_cmd" --skip-kws $skip_kws --skip-stt $skip_stt \ - "${lmwt_plp_extra_opts[@]}" \ - ${dataset_dir} data/langp_test exp/sgmm5/decode_fmllr_${dataset_id} - fi - fi - - #################################################################### - ## - ## SGMM_MMI rescoring - ## - #################################################################### - - for iter in 1 2 3 4; do - # Decode SGMM+MMI (via rescoring). - decode=exp/sgmm5_mmi_b0.1/decode_fmllr_${dataset_id}_it$iter - if [ -x exp/sgmm5_mmi_b0.1 ] && [ ! -f $decode/.done ]; then - - mkdir -p $decode - steps/decode_sgmm2_rescore.sh --skip-scoring true \ - --cmd "$decode_cmd" --iter $iter --transform-dir exp/tri5/decode_${dataset_id} \ - data/langp_test ${dataset_dir} exp/sgmm5/decode_fmllr_${dataset_id} $decode | tee ${decode}/decode.log - - touch $decode/.done - fi - done - - #We are done -- all lattices has been generated. We have to - #a)Run MBR decoding - #b)Run KW search - for iter in 1 2 3 4; do - # Decode SGMM+MMI (via rescoring). - decode=exp/sgmm5_mmi_b0.1/decode_fmllr_${dataset_id}_it$iter - if [ -f $decode/.done ]; then - local/run_kws_stt_task2.sh --cer $cer --max-states $max_states \ - --skip-scoring $skip_scoring --extra-kws $extra_kws --wip $wip \ - --cmd "$decode_cmd" --skip-kws $skip_kws --skip-stt $skip_stt \ - "${lmwt_plp_extra_opts[@]}" \ - ${dataset_dir} data/langp_test $decode - fi - done -fi - - #################################################################### ## @@ -476,10 +428,13 @@ fi ## nnet3 model decoding ## #################################################################### -if [ -f exp/nnet3/lstm_bidirectional_sp/.done ]; then +if [ -f exp/nnet3/lstm_bidirectional_sp/final.mdl ]; then decode=exp/nnet3/lstm_bidirectional_sp/decode_${dataset_id} rnn_opts=" --extra-left-context 40 --extra-right-context 40 --frames-per-chunk 20 " decode_script=steps/nnet3/decode.sh + my_nj_backup=$my_nj + echo "Modifying the number of jobs as this is an RNN and decoding can be extremely slow." + my_nj=`cat ${dataset_dir}_hires/spk2utt|wc -l` if [ ! -f $decode/.done ]; then mkdir -p $decode $decode_script --nj $my_nj --cmd "$decode_cmd" $rnn_opts \ @@ -496,9 +451,11 @@ if [ -f exp/nnet3/lstm_bidirectional_sp/.done ]; then --cmd "$decode_cmd" --skip-kws $skip_kws --skip-stt $skip_stt \ "${lmwt_dnn_extra_opts[@]}" \ ${dataset_dir} data/langp_test $decode + + my_nj=$my_nj_backup fi -if [ -f exp/nnet3/lstm_realigned_bidirectional_sp//.done ]; then +if [ -f exp/nnet3/lstm_realigned_bidirectional_sp/final.mdl ]; then decode=exp/nnet3/lstm_realigned_bidirectional_sp//decode_${dataset_id} rnn_opts=" --extra-left-context 40 --extra-right-context 40 --frames-per-chunk 20 " decode_script=steps/nnet3/decode.sh @@ -519,7 +476,7 @@ if [ -f exp/nnet3/lstm_realigned_bidirectional_sp//.done ]; then "${lmwt_dnn_extra_opts[@]}" \ ${dataset_dir} data/langp_test $decode fi -if [ -f exp/nnet3/lstm_sp/.done ]; then +if [ -f exp/nnet3/lstm_sp/final.mdl ]; then decode=exp/nnet3/lstm_sp/decode_${dataset_id} rnn_opts=" --extra-left-context 40 --extra-right-context 0 --frames-per-chunk 20 " decode_script=steps/nnet3/decode.sh @@ -541,7 +498,7 @@ if [ -f exp/nnet3/lstm_sp/.done ]; then ${dataset_dir} data/langp_test $decode fi -if [ -f exp/$nnet3_model/.done ]; then +if [ -f exp/$nnet3_model/final.mdl ]; then decode=exp/$nnet3_model/decode_${dataset_id} rnn_opts= decode_script=steps/nnet3/decode.sh @@ -583,6 +540,7 @@ if [ -f exp/$chain_model/final.mdl ]; then touch exp/nnet3$parent_dir_suffix/ivectors_${dataset_id}/.done fi + my_nj_backup=$my_nj rnn_opts= if [ "$is_rnn" == "true" ]; then rnn_opts=" --extra-left-context $extra_left_context --extra-right-context $extra_right_context --frames-per-chunk $frames_per_chunk " @@ -608,6 +566,7 @@ if [ -f exp/$chain_model/final.mdl ]; then --cmd "$decode_cmd" --skip-kws $skip_kws --skip-stt $skip_stt \ "${lmwt_chain_extra_opts[@]}" \ ${dataset_dir} data/langp_test $decode + my_nj=$my_nj_backup else echo "no chain model exp/$chain_model" fi @@ -720,5 +679,72 @@ for dnn in tri6_nnet_semi_supervised tri6_nnet_semi_supervised2 \ ${dataset_dir} data/langp_test $decode fi done + +#################################################################### +## SGMM2 decoding +## We Include the SGMM_MMI inside this, as we might only have the DNN systems +## trained and not PLP system. The DNN systems build only on the top of tri5 stage +#################################################################### +if [ -f exp/sgmm5/.done ]; then + decode=exp/sgmm5/decode_fmllr_${dataset_id} + if [ ! -f $decode/.done ]; then + echo --------------------------------------------------------------------- + echo "Spawning $decode on" `date` + echo --------------------------------------------------------------------- + utils/mkgraph.sh \ + data/langp_test exp/sgmm5 exp/sgmm5/graph |tee exp/sgmm5/mkgraph.log + + mkdir -p $decode + steps/decode_sgmm2.sh --skip-scoring true --use-fmllr true --nj $my_nj \ + --cmd "$decode_cmd" --transform-dir exp/tri5/decode_${dataset_id} "${decode_extra_opts[@]}"\ + exp/sgmm5/graph ${dataset_dir} $decode |tee $decode/decode.log + touch $decode/.done + + if ! $fast_path ; then + local/run_kws_stt_task2.sh --cer $cer --max-states $max_states \ + --skip-scoring $skip_scoring --extra-kws $extra_kws --wip $wip \ + --cmd "$decode_cmd" --skip-kws $skip_kws --skip-stt $skip_stt \ + "${lmwt_plp_extra_opts[@]}" \ + ${dataset_dir} data/langp_test exp/sgmm5/decode_fmllr_${dataset_id} + fi + fi + + #################################################################### + ## + ## SGMM_MMI rescoring + ## + #################################################################### + + for iter in 1 2 3 4; do + # Decode SGMM+MMI (via rescoring). + decode=exp/sgmm5_mmi_b0.1/decode_fmllr_${dataset_id}_it$iter + if [ -x exp/sgmm5_mmi_b0.1 ] && [ ! -f $decode/.done ]; then + + mkdir -p $decode + steps/decode_sgmm2_rescore.sh --skip-scoring true \ + --cmd "$decode_cmd" --iter $iter --transform-dir exp/tri5/decode_${dataset_id} \ + data/langp_test ${dataset_dir} exp/sgmm5/decode_fmllr_${dataset_id} $decode | tee ${decode}/decode.log + + touch $decode/.done + fi + done + + #We are done -- all lattices has been generated. We have to + #a)Run MBR decoding + #b)Run KW search + for iter in 1 2 3 4; do + # Decode SGMM+MMI (via rescoring). + decode=exp/sgmm5_mmi_b0.1/decode_fmllr_${dataset_id}_it$iter + if [ -f $decode/.done ]; then + local/run_kws_stt_task2.sh --cer $cer --max-states $max_states \ + --skip-scoring $skip_scoring --extra-kws $extra_kws --wip $wip \ + --cmd "$decode_cmd" --skip-kws $skip_kws --skip-stt $skip_stt \ + "${lmwt_plp_extra_opts[@]}" \ + ${dataset_dir} data/langp_test $decode + fi + done +fi + + echo "Everything looking good...." exit 0 diff --git a/egs/wsj/s5/run.sh b/egs/wsj/s5/run.sh index 4d505f5da3a..de0c96fe387 100755 --- a/egs/wsj/s5/run.sh +++ b/egs/wsj/s5/run.sh @@ -123,7 +123,7 @@ if [ $stage -le 2 ]; then for data in dev93 eval92; do nspk=$(wc -l " + echo " e.g.: steps/train_pca_transform.sh data/train_si84 exp/tri2b" + echo "Main options (for others, see top of script file)" + echo " --cmd (utils/run.pl|utils/queue.pl ) # how to run jobs." + echo " --config # config containing options" + echo " --stage # stage to do partial re-run from." + exit 1; +fi + +data=$1 +dir=$2 + +for f in $data/feats.scp ; do + [ ! -f "$f" ] && echo "$0: expecting file $f to exist" && exit 1 +done + +mkdir -p $dir/log + +echo "$splice_opts" >$dir/splice_opts # keep track of frame-splicing options + # so that later stages of system building can know what they were. +echo $online_cmvn_opts > $dir/online_cmvn.conf # keep track of options to CMVN. + +# create global_cmvn.stats +if ! matrix-sum --binary=false scp:$data/cmvn.scp - >$dir/global_cmvn.stats 2>/dev/null; then + echo "$0: Error summing cmvn stats" + exit 1 +fi + +feats="ark,s,cs:utils/subset_scp.pl --quiet $max_utts $data/feats.scp | apply-cmvn-online $online_cmvn_opts $dir/global_cmvn.stats scp:- ark:- | splice-feats $splice_opts ark:- ark:- | subsample-feats --n=$subsample ark:- ark:- |" + +if [ $stage -le 0 ]; then + $cmd $dir/log/pca_est.log \ + est-pca --dim=$dim --normalize-variance=$normalize_variance \ + --normalize-mean=$normalize_mean "$feats" $dir/final.mat || exit 1; +fi + +echo "Done estimating PCA transform in $dir" + +exit 0 diff --git a/egs/wsj/s5/steps/online/nnet2/train_diag_ubm.sh b/egs/wsj/s5/steps/online/nnet2/train_diag_ubm.sh index 22250ae9ee3..80a023fed8a 100755 --- a/egs/wsj/s5/steps/online/nnet2/train_diag_ubm.sh +++ b/egs/wsj/s5/steps/online/nnet2/train_diag_ubm.sh @@ -10,15 +10,15 @@ # This script was modified from ../../sre08/v1/sid/train_diag_ubm.sh. It trains # a diagonal UBM on top of features processed with apply-cmvn-online and then -# transformed with an LDA+MLLT matrix (obtained from the source directory). -# This script does not use the trained model from the source directory to -# initialize the diagonal GMM; instead, we initialize the GMM using +# transformed with an LDA+MLLT or PCA matrix (obtained from the source +# directory). This script does not use the trained model from the source +# directory to initialize the diagonal GMM; instead, we initialize the GMM using # gmm-global-init-from-feats, which sets the means to random data points and # then does some iterations of E-M in memory. After the in-memory -# initialization we train for a few iterations in parallel. -# Note that there is a slight mismatch in that the source LDA+MLLT matrix -# (final.mat) will have been estimated using standard CMVN, and we're using -# online CMVN. We don't think this will have much effect. +# initialization we train for a few iterations in parallel. Note that if an +# LDA+MLLT transform matrix is used, there will be a slight mismatch in that the +# source LDA+MLLT matrix (final.mat) will have been estimated using standard +# CMVN, and we're using online CMVN. We don't think this will have much effect. # Begin configuration section. @@ -58,7 +58,7 @@ if [ $# != 4 ]; then echo " --stage # stage to do partial re-run from." echo " --num-gselect # Number of Gaussians per frame to" echo " # limit computation to, for speed" - echo " --subsample # In main E-M phase, use every n" + echo " --subsample # In main E-M phase, use every n" echo " # frames (a speedup)" echo " --num-frames # Maximum num-frames to keep in memory" echo " # for model initialization" @@ -89,6 +89,15 @@ for f in $data/feats.scp "$online_cmvn_config" $srcdir/splice_opts $srcdir/final [ ! -f "$f" ] && echo "$0: expecting file $f to exist" && exit 1 done +if [ -d "$dir" ]; then + bak_dir=$(mktemp -d ${dir}/backup.XXX); + echo "$0: Directory $dir already exists. Backing up diagonal UBM in ${bak_dir}"; + for f in $dir/final.mat $dir/final.dubm $dir/online_cmvn.conf $dir/global_cmvn.stats; do + [ -f "$f" ] && mv $f ${bak_dir}/ + done + [ -d "$dir/log" ] && mv $dir/log ${bak_dir}/ +fi + splice_opts=$(cat $srcdir/splice_opts) cp $srcdir/splice_opts $dir/ || exit 1; cp $srcdir/final.mat $dir/ || exit 1; @@ -146,10 +155,16 @@ for x in `seq 0 $[$num_iters-1]`; do $cmd $dir/log/update.$x.log \ gmm-global-est $opt --min-gaussian-weight=$min_gaussian_weight $dir/$x.dubm "gmm-global-sum-accs - $dir/$x.*.acc|" \ $dir/$[$x+1].dubm || exit 1; - rm $dir/$x.*.acc $dir/$x.dubm + + if $cleanup; then + rm $dir/$x.*.acc $dir/$x.dubm + fi fi done -rm $dir/gselect.*.gz +if $cleanup; then + rm $dir/gselect.*.gz +fi + mv $dir/$num_iters.dubm $dir/final.dubm || exit 1; exit 0; diff --git a/egs/wsj/s5/steps/online/nnet2/train_ivector_extractor.sh b/egs/wsj/s5/steps/online/nnet2/train_ivector_extractor.sh index 67845b01c8a..5dbda1780f4 100755 --- a/egs/wsj/s5/steps/online/nnet2/train_ivector_extractor.sh +++ b/egs/wsj/s5/steps/online/nnet2/train_ivector_extractor.sh @@ -21,7 +21,7 @@ # - Set num_threads to the minimum of (4, or how many virtual cores your machine has). # (because of needing to lock various global quantities, the program can't # use many more than 4 threads with good CPU utilization). -# - Set num_processes to the number of virtual cores on each machine you have, divided by +# - Set num_processes to the number of virtual cores on each machine you have, divided by # num_threads. E.g. 4, if you have 16 virtual cores. If you're on a shared queue # that's busy with other people's jobs, it may be wise to set it to rather less # than this maximum though, or your jobs won't get scheduled. And if memory is @@ -32,8 +32,8 @@ # may want more jobs, though. # Begin configuration section. -nj=10 # this is the number of separate queue jobs we run, but each one - # contains num_processes sub-jobs.. the real number of threads we +nj=10 # this is the number of separate queue jobs we run, but each one + # contains num_processes sub-jobs.. the real number of threads we # run is nj * num_processes * num_threads, and the number of # separate pieces of data is nj * num_processes. num_threads=4 @@ -88,6 +88,17 @@ for f in $srcdir/final.dubm $srcdir/final.mat $srcdir/global_cmvn.stats $srcdir/ [ ! -f $f ] && echo "No such file $f" && exit 1; done + +if [ -d "$dir" ]; then + bak_dir=$(mktemp -d ${dir}/backup.XXX); + echo "$0: Directory $dir already exists. Backing up iVector extractor in ${bak_dir}"; + for f in $dir/final.ie $dir/*.ie $dir/final.mat $dir/final.dubm \ + $dir/online_cmvn.conf $dir/global_cmvn.stats; do + [ -f "$f" ] && mv $f ${bak_dir}/ + done + [ -d "$dir/log" ] && mv $dir/log ${bak_dir}/ +fi + # Set various variables. mkdir -p $dir/log nj_full=$[$nj*$num_processes] @@ -105,7 +116,6 @@ gmm_feats="ark,s,cs:apply-cmvn-online --config=$dir/online_cmvn.conf $dir/global feats="ark,s,cs:splice-feats $splice_opts scp:$sdata/JOB/feats.scp ark:- | transform-feats $dir/final.mat ark:- ark:- | subsample-feats --n=$subsample ark:- ark:- |" - # Initialize the i-vector extractor using the input GMM, which is converted to # full because that's what the i-vector extractor expects. Note: we have to do # --use-weights=false to disable regression of the log weights on the ivector, @@ -115,7 +125,7 @@ if [ $stage -le -2 ]; then $cmd $dir/log/init.log \ ivector-extractor-init --ivector-dim=$ivector_dim --use-weights=false \ "gmm-global-to-fgmm $dir/final.dubm -|" $dir/0.ie || exit 1 -fi +fi # Do Gaussian selection and posterior extracion @@ -168,20 +178,23 @@ while [ $x -lt $num_iters ]; do # each accumulation process uses, since we # can be sure the queue will support this many. # - # The parallel-opts was either specified by + # The parallel-opts was either specified by # the user or we computed it correctly in # tge previous stages $cmd --num-threads $[$num_threads*$num_processes] $dir/log/update.$x.log \ ivector-extractor-est --num-threads=$nt $dir/$x.ie $dir/acc.$x $dir/$[$x+1].ie || exit 1; rm $dir/acc.$x.* if $cleanup; then - rm $dir/acc.$x - # rm $dir/$x.ie + rm $dir/acc.$x $dir/$x.ie fi fi x=$[$x+1] done +if $cleanup; then + rm $dir/post.*.gz +fi + rm $dir/final.ie 2>/dev/null ln -s $x.ie $dir/final.ie diff --git a/egs/wsj/s5/steps/train_lda_mllt.sh b/egs/wsj/s5/steps/train_lda_mllt.sh index 8b5e19ec8d1..363df34a3cd 100755 --- a/egs/wsj/s5/steps/train_lda_mllt.sh +++ b/egs/wsj/s5/steps/train_lda_mllt.sh @@ -95,7 +95,7 @@ feats="$splicedfeats transform-feats $dir/0.mat ark:- ark:- |" if [ $stage -le -5 ]; then if [ -z "$use_lda_mat" ]; then - echo "Accumulating LDA statistics." + echo "$0: Accumulating LDA statistics." rm $dir/lda.*.acc 2>/dev/null $cmd JOB=1:$nj $dir/log/lda_acc.JOB.log \ ali-to-post "ark:gunzip -c $alidir/ali.JOB.gz|" ark:- \| \ @@ -106,11 +106,11 @@ if [ $stage -le -5 ]; then 2>$dir/log/lda_est.log || exit 1; rm $dir/lda.*.acc else - echo "Using supplied LDA matrix $use_lda_mat" + echo "$0: Using supplied LDA matrix $use_lda_mat" cp $use_lda_mat $dir/0.mat || exit 1; [ ! -z "$mllt_iters" ] && \ - echo "Warning: using supplied LDA matrix $use_lda_mat but we will do MLLT," && \ - echo "which you might not want; to disable MLLT, specify --mllt-iters ''" && \ + echo "$0: Warning: using supplied LDA matrix $use_lda_mat but we will do MLLT," && \ + echo " which you might not want; to disable MLLT, specify --mllt-iters ''" && \ sleep 5 fi fi @@ -118,12 +118,12 @@ fi cur_lda_iter=0 if [ $stage -le -4 ] && $train_tree; then - echo "Accumulating tree stats" + echo "$0: Accumulating tree stats" $cmd JOB=1:$nj $dir/log/acc_tree.JOB.log \ acc-tree-stats $context_opts \ --ci-phones=$ciphonelist $alidir/final.mdl "$feats" \ "ark:gunzip -c $alidir/ali.JOB.gz|" $dir/JOB.treeacc || exit 1; - [ `ls $dir/*.treeacc | wc -w` -ne "$nj" ] && echo "Wrong #tree-accs" && exit 1; + [ `ls $dir/*.treeacc | wc -w` -ne "$nj" ] && echo "$0: Wrong #tree-accs" && exit 1; $cmd $dir/log/sum_tree_acc.log \ sum-tree-stats $dir/treeacc $dir/*.treeacc || exit 1; rm $dir/*.treeacc @@ -131,7 +131,7 @@ fi if [ $stage -le -3 ] && $train_tree; then - echo "Getting questions for tree clustering." + echo "$0: Getting questions for tree clustering." # preparing questions, roots file... cluster-phones $context_opts $dir/treeacc $lang/phones/sets.int \ $dir/questions.int 2> $dir/log/questions.log || exit 1; @@ -139,7 +139,7 @@ if [ $stage -le -3 ] && $train_tree; then compile-questions $context_opts $lang/topo $dir/questions.int \ $dir/questions.qst 2>$dir/log/compile_questions.log || exit 1; - echo "Building the tree" + echo "$0: Building the tree" $cmd $dir/log/build_tree.log \ build-tree $context_opts --verbose=1 --max-leaves=$numleaves \ --cluster-thresh=$cluster_thresh $dir/treeacc $lang/phones/roots.int \ @@ -164,14 +164,14 @@ fi if [ $stage -le -1 ]; then # Convert the alignments. - echo "Converting alignments from $alidir to use current tree" + echo "$0: Converting alignments from $alidir to use current tree" $cmd JOB=1:$nj $dir/log/convert.JOB.log \ convert-ali $alidir/final.mdl $dir/1.mdl $dir/tree \ "ark:gunzip -c $alidir/ali.JOB.gz|" "ark:|gzip -c >$dir/ali.JOB.gz" || exit 1; fi if [ $stage -le 0 ] && [ "$realign_iters" != "" ]; then - echo "Compiling graphs of transcripts" + echo "$0: Compiling graphs of transcripts" $cmd JOB=1:$nj $dir/log/compile_graphs.JOB.log \ compile-train-graphs --read-disambig-syms=$lang/phones/disambig.int $dir/tree $dir/1.mdl $lang/L.fst \ "ark:utils/sym2int.pl --map-oov $oov -f 2- $lang/words.txt < $data/split$nj/JOB/text |" \ @@ -192,7 +192,7 @@ while [ $x -lt $num_iters ]; do fi if echo $mllt_iters | grep -w $x >/dev/null; then if [ $stage -le $x ]; then - echo "Estimating MLLT" + echo "$0: Estimating MLLT" $cmd JOB=1:$nj $dir/log/macc.$x.JOB.log \ ali-to-post "ark:gunzip -c $dir/ali.JOB.gz|" ark:- \| \ weight-silence-post 0.0 $silphonelist $dir/$x.mdl ark:- ark:- \| \ @@ -233,6 +233,6 @@ utils/summarize_warnings.pl $dir/log steps/info/gmm_dir_info.pl $dir -echo "Done training system with LDA+MLLT features in $dir" +echo "$0: Done training system with LDA+MLLT features in $dir" exit 0 diff --git a/egs/wsj/s5/utils/validate_data_dir.sh b/egs/wsj/s5/utils/validate_data_dir.sh index 58e51a75aef..7e93b0f8400 100755 --- a/egs/wsj/s5/utils/validate_data_dir.sh +++ b/egs/wsj/s5/utils/validate_data_dir.sh @@ -22,6 +22,8 @@ done if [ $# -ne 1 ]; then echo "Usage: $0 [--no-feats] [--no-text] [--no-wav] " + echo "The --no-xxx options mean that the script does not require " + echo "xxx.scp to be present, but it will check it if it is present." echo "e.g.: $0 data/train" exit 1; fi diff --git a/src/base/get_version.sh b/src/base/get_version.sh index 4829391ac44..d6c6c975a4d 100755 --- a/src/base/get_version.sh +++ b/src/base/get_version.sh @@ -54,20 +54,20 @@ elif [ "$(git rev-parse --is-inside-work-tree 2>/dev/null)" != true ]; then echo "$0: Using the version number \"$version\" specified in src/.version." else # Figure out patch number. - version_commit=$(git log -1 --pretty=oneline ../.version | cut -f 1 -d ' ') - patch_number=$(git rev-list ${version_commit}..HEAD | wc -l) + version_commit=$(git log -1 --pretty=oneline ../.version | awk '{print $1}') + patch_number=$(git rev-list ${version_commit}..HEAD | wc -l | awk '{print $1}') version="$version.$patch_number" # Check for uncommitted changes in src/. - uncommitted_changes=$(git diff-index HEAD -- .. | wc -l) + uncommitted_changes=$(git diff-index HEAD -- .. | wc -l | awk '{print $1}') if [ $uncommitted_changes -gt 0 ]; then # Add suffix ~N if there are N files in src/ with uncommitted changes version="$version~$uncommitted_changes" fi # Figure out HEAD commit SHA-1. - head_commit=$(git log -1 --pretty=oneline | cut -f 1 -d ' ') - head_commit_short=$(git log -1 --oneline --abbrev=4 | cut -f 1 -d ' ') + head_commit=$(git log -1 --pretty=oneline | awk '{print $1}') + head_commit_short=$(git log -1 --oneline --abbrev=4 | awk '{print $1}') version="$version-${head_commit_short}" fi diff --git a/src/bin/ali-to-phones.cc b/src/bin/ali-to-phones.cc index b370dbc7f18..2a76000cfae 100644 --- a/src/bin/ali-to-phones.cc +++ b/src/bin/ali-to-phones.cc @@ -35,7 +35,7 @@ int main(int argc, char *argv[]) { "Usage: ali-to-phones [options] " "\n" "e.g.: \n" - " ali-to-phones 1.mdl ark:1.ali ark:phones.tra\n" + " ali-to-phones 1.mdl ark:1.ali ark:-\n" "or:\n" " ali-to-phones --ctm-output 1.mdl ark:1.ali 1.ctm\n" "See also: show-alignments lattice-align-phones\n"; diff --git a/src/bin/align-equal.cc b/src/bin/align-equal.cc index 3d35ee33daa..a3bc40dc236 100644 --- a/src/bin/align-equal.cc +++ b/src/bin/align-equal.cc @@ -36,10 +36,13 @@ int main(int argc, char *argv[]) { using fst::VectorFst; using fst::StdArc; - const char *usage = "Write equally spaced alignments of utterances (to get training started)\n" - "Usage: align-equal \n" + const char *usage = "Write equally spaced alignments of utterances " + "(to get training started)\n" + "Usage: align-equal " + " \n" "e.g.: \n" - " align-equal 1.tree 1.mdl lex.fst scp:train.scp ark:train.tra ark:equal.ali\n"; + " align-equal 1.tree 1.mdl lex.fst scp:train.scp " + "'ark:sym2int.pl -f 2- words.txt text|' ark:equal.ali\n"; ParseOptions po(usage); std::string disambig_rxfilename; diff --git a/src/bin/compile-train-graphs.cc b/src/bin/compile-train-graphs.cc index 6636ef88878..874d079376e 100644 --- a/src/bin/compile-train-graphs.cc +++ b/src/bin/compile-train-graphs.cc @@ -37,9 +37,11 @@ int main(int argc, char *argv[]) { const char *usage = "Creates training graphs (without transition-probabilities, by default)\n" "\n" - "Usage: compile-train-graphs [options] \n" + "Usage: compile-train-graphs [options] " + " \n" "e.g.: \n" - " compile-train-graphs tree 1.mdl lex.fst ark:train.tra ark:graphs.fsts\n"; + " compile-train-graphs tree 1.mdl lex.fst " + "'ark:sym2int.pl -f 2- words.txt text|' ark:graphs.fsts\n"; ParseOptions po(usage); TrainingGraphCompilerOptions gopts; diff --git a/src/bin/phones-to-prons.cc b/src/bin/phones-to-prons.cc index 6e3cf7a4651..0d7ab12c232 100644 --- a/src/bin/phones-to-prons.cc +++ b/src/bin/phones-to-prons.cc @@ -80,7 +80,8 @@ int main(int argc, char *argv[]) { " \n" "e.g.: \n" " ali-to-phones 1.mdl ark:1.ali ark:- | \\\n" - " phones-to-prons L_align.fst 46 47 ark:- 1.tra ark:1.prons\n"; + " phones-to-prons L_align.fst 46 47 ark:- " + "'ark:sym2int.pl -f 2- words.txt text|' ark:1.prons\n"; ParseOptions po(usage); po.Read(argc, argv); diff --git a/src/bin/prons-to-wordali.cc b/src/bin/prons-to-wordali.cc index 8e89d7cc644..a6331043500 100644 --- a/src/bin/prons-to-wordali.cc +++ b/src/bin/prons-to-wordali.cc @@ -52,8 +52,8 @@ int main(int argc, char *argv[]) { " \n" "e.g.: \n" " ali-to-phones 1.mdl ark:1.ali ark:- | \\\n" - " phones-to-prons L_align.fst 46 47 ark:- 1.tra ark:- | \\\n" - " prons-to-wordali ark:- \\\n" + " phones-to-prons L_align.fst 46 47 ark:- 'ark:sym2int.pl -f 2- words.txt text|' \\\n" + " ark:- | prons-to-wordali ark:- \\\n" " \"ark:ali-to-phones --write-lengths 1.mdl ark:1.ali ark:-|\" ark:1.wali\n"; ParseOptions po(usage); diff --git a/src/chain/chain-training.cc b/src/chain/chain-training.cc index 1bf0201fbfa..53de69a0e07 100644 --- a/src/chain/chain-training.cc +++ b/src/chain/chain-training.cc @@ -30,7 +30,7 @@ void ComputeChainObjfAndDeriv(const ChainTrainingOptions &opts, const Supervision &supervision, const CuMatrixBase &nnet_output, BaseFloat *objf, - BaseFloat *l2_term, + BaseFloat *l2_term, BaseFloat *weight, CuMatrixBase *nnet_output_deriv, CuMatrixBase *xent_output_deriv) { @@ -86,7 +86,7 @@ void ComputeChainObjfAndDeriv(const ChainTrainingOptions &opts, // for different frames of the sequences. As expected, they are // smaller towards the edges of the sequences (due to the penalization // of 'incorrect' pdf-ids. - if (GetVerboseLevel() >= 1) { + if (GetVerboseLevel() >= 1 && nnet_output_deriv != NULL) { int32 tot_frames = nnet_output_deriv->NumRows(), frames_per_sequence = supervision.frames_per_sequence, num_sequences = supervision.num_sequences; diff --git a/src/cudamatrix/cu-allocator.h b/src/cudamatrix/cu-allocator.h index b10601b8245..c6500e95559 100644 --- a/src/cudamatrix/cu-allocator.h +++ b/src/cudamatrix/cu-allocator.h @@ -141,7 +141,7 @@ class CuMemoryAllocator { // be a multiple of 4, and num_rows will frequently be a multiple of // powers of 2 also. We need to shift right and add so that there will be // some action in the lower-order bits. - size_t operator () (const std::pair &p) const { + size_t operator () (const std::pair &p) const noexcept { size_t temp = p.first + 1867 * p.second; return temp + (temp >> 2) + (temp >> 8); } @@ -206,7 +206,7 @@ class CuMemoryAllocator { }; struct PointerHasher { - size_t operator() (const void *arg) const { + size_t operator() (const void *arg) const noexcept { // the last few bits tend to be very predictable, for alignment reasons (CUDA // allocation may align on 256 byte or 512 byte boundaries or something similar). size_t temp = reinterpret_cast(arg); diff --git a/src/cudamatrix/cu-kernels-ansi.h b/src/cudamatrix/cu-kernels-ansi.h index a69246a339a..444da38dd30 100644 --- a/src/cudamatrix/cu-kernels-ansi.h +++ b/src/cudamatrix/cu-kernels-ansi.h @@ -30,6 +30,12 @@ #if HAVE_CUDA == 1 extern "C" { +void cudaD_add_col_sum_mat(int Gr, int Bl, double* result, const double* mat, + const MatrixDim d, const double alpha, + const double beta); +void cudaF_add_col_sum_mat(int Gr, int Bl, float* result, const float* mat, + const MatrixDim d, const float alpha, + const float beta); void cudaD_add_cols(dim3 Gr, dim3 Bl, double* dst, const double* src, const MatrixIndexT_cuda* reorder, MatrixDim dst_dim, int src_stride); diff --git a/src/cudamatrix/cu-kernels.cu b/src/cudamatrix/cu-kernels.cu index d4b247ffaa7..60800d9568d 100644 --- a/src/cudamatrix/cu-kernels.cu +++ b/src/cudamatrix/cu-kernels.cu @@ -1220,7 +1220,7 @@ static void _equal_element_mask(const Real *mat1, const Real *mat2, Real *mask, } enum EnumTransformReduce { - SUM, MAX, MIN, LINFNORM, L2NORM, L1NORM, L0NORM, LPNORM + SUMAB, SUM, MAX, MIN, LINFNORM, L2NORM, L1NORM, L0NORM, LPNORM }; template @@ -1243,6 +1243,35 @@ struct TransReduceOp { } }; +template +struct TransReduceOp { + const Real alpha_; + const Real beta_; + TransReduceOp(const Real& a, const Real& b) : + alpha_(a), beta_(b) { + } + __forceinline__ + __device__ Real InitValue() const { + return Real(0); + } + __forceinline__ + __device__ Real Transform(const Real& x) const { + return x; + } + __forceinline__ + __device__ Real Reduce(const Real& a, const Real& b) const { + return a + b; + } + __forceinline__ + __device__ Real PostReduce(const Real& x, const Real& output) const { + if (beta_ == Real(0)) { + return alpha_ * x; + } else { + return alpha_ * x + beta_ * output; + } + } +}; + template struct TransReduceOp { __forceinline__ @@ -3570,6 +3599,12 @@ void cudaF_sum_mat_cols(int Gr, int Bl, float* result, const float* mat, _transform_reduce_mat_cols<<>>(result,mat,d, TransReduceOp()); } +void cudaF_add_col_sum_mat(int Gr, int Bl, float* result, const float* mat, + const MatrixDim d, const float alpha, + const float beta) { + _transform_reduce_mat_cols<<>>(result, mat, d, + TransReduceOp(alpha, beta)); +} void cudaF_replace_value(int Gr, int Bl, float *v, int dim, float orig, float changed) { @@ -4225,6 +4260,12 @@ void cudaD_sum_mat_cols(int Gr, int Bl, double* result, const double* mat, _transform_reduce_mat_cols<<>>(result,mat,d, TransReduceOp()); } +void cudaD_add_col_sum_mat(int Gr, int Bl, double* result, const double* mat, + const MatrixDim d, const double alpha, + const double beta) { + _transform_reduce_mat_cols<<>>(result, mat, d, + TransReduceOp(alpha, beta)); +} void cudaD_replace_value(int Gr, int Bl, double *v, int dim, double orig, double changed) { diff --git a/src/cudamatrix/cu-kernels.h b/src/cudamatrix/cu-kernels.h index 87aaf096570..77352b5925f 100644 --- a/src/cudamatrix/cu-kernels.h +++ b/src/cudamatrix/cu-kernels.h @@ -38,6 +38,16 @@ namespace kaldi { +inline void cuda_add_col_sum_mat(int Gr, int Bl, double* result, + const double* mat, const MatrixDim d, + const double alpha, const double beta) { + cudaD_add_col_sum_mat(Gr, Bl, result, mat, d, alpha, beta); +} +inline void cuda_add_col_sum_mat(int Gr, int Bl, float* result, + const float* mat, const MatrixDim d, + const float alpha, const float beta) { + cudaF_add_col_sum_mat(Gr, Bl, result, mat, d, alpha, beta); +} inline void cuda_add_cols(dim3 Gr, dim3 Bl, double* dst, const double* src, const MatrixIndexT_cuda* reorder, MatrixDim dst_dim, int src_stride) { diff --git a/src/cudamatrix/cu-vector.cc b/src/cudamatrix/cu-vector.cc index e6aa72249f7..b825b9c0a6e 100644 --- a/src/cudamatrix/cu-vector.cc +++ b/src/cudamatrix/cu-vector.cc @@ -1173,19 +1173,25 @@ void CuVectorBase::AddRowSumMat(Real alpha, const CuMatrixBase &mat, } - template -void CuVectorBase::AddColSumMat(Real alpha, - const CuMatrixBase &mat, +void CuVectorBase::AddColSumMat(Real alpha, const CuMatrixBase &mat, Real beta) { - KALDI_ASSERT(mat.NumRows() == Dim()); - - CuVector ones(mat.NumCols()); - ones.Set(1.0); - this->AddMatVec(alpha, mat, kNoTrans, ones, beta); -} +#if HAVE_CUDA == 1 + if (CuDevice::Instantiate().Enabled()) { + Timer tim; + KALDI_ASSERT(mat.NumRows() == Dim()); + cuda_add_col_sum_mat(mat.NumRows(), CU1DBLOCK, Data(), mat.Data(), + mat.Dim(), alpha, beta); + CU_SAFE_CALL(cudaGetLastError()); + CuDevice::Instantiate().AccuProfile(__func__, tim.Elapsed()); + } else +#endif + { + Vec().AddColSumMat(alpha, mat.Mat(), beta); + } +} template void CuVectorBase::InvertElements() { diff --git a/src/doc/Kaldi.pptx b/src/doc/Kaldi.pptx old mode 100755 new mode 100644 diff --git a/src/doc/KaldiMatrix.pptx b/src/doc/KaldiMatrix.pptx old mode 100755 new mode 100644 diff --git a/src/doc/KaldiModels.pptx b/src/doc/KaldiModels.pptx old mode 100755 new mode 100644 diff --git a/src/doc/KaldiScripts.pptx b/src/doc/KaldiScripts.pptx old mode 100755 new mode 100644 diff --git a/src/doc/README b/src/doc/README old mode 100755 new mode 100644 diff --git a/src/doc/dnn3_scripts_context.dox b/src/doc/dnn3_scripts_context.dox index 43ee0d40260..884e8c79f51 100644 --- a/src/doc/dnn3_scripts_context.dox +++ b/src/doc/dnn3_scripts_context.dox @@ -49,7 +49,7 @@ namespace nnet3 { compute this output without seeing a range of input frames. For example, it may be impossible to compute the output without seeing the range of 't' values from t = 150 through t = 157. In this case (glossing over details), - we'd say that the network has a \b left-context of 3 and a \b right-context of 4. + we'd say that the network has a \b left-context of 4 and a \b right-context of 3. The actual computation of the context is a bit more complex as it has to take into account special cases like where, say, the behavior for odd and even 't' values is different (c.f. Round() descriptors in diff --git a/src/doc/examples.dox b/src/doc/examples.dox old mode 100755 new mode 100644 diff --git a/src/doc/transform.dox b/src/doc/transform.dox index 6d487722124..dfeaf6f66d5 100644 --- a/src/doc/transform.dox +++ b/src/doc/transform.dox @@ -31,7 +31,7 @@ namespace kaldi { relate to the commonalities: - \ref transform_apply - \ref transform_perspk - - \ref transform_utt2spk + - \ref transform_utt2spk - \ref transform_compose - \ref transform_weight @@ -49,8 +49,8 @@ namespace kaldi { We next discuss regression class trees and transforms that use them: - \ref transform_regtree - - + + \section transform_apply Applying global linear or affine feature transforms In the case of feature-space transforms and projections that are global, @@ -59,22 +59,22 @@ namespace kaldi { projection is represented as a matrix by which we will left-multiply a feature vector, so the transformed feature is \f$ A x \f$. An affine transform or projection is represented the same way, but we imagine a 1 has been appended to the - feature vector, so the transformed feature is + feature vector, so the transformed feature is \f$ W \left[ \begin{array}{c} x \\ 1 \end{array} \right] \f$ where \f$ W = \left[ A ; b \right] \f$, with A and b being the linear transform and the constant offset. Note that this convention differs from some of the literature, where the 1 may appear as - the first dimension rather than the last. + the first dimension rather than the last. Global transforms and projections are generally written as a type Matrix to a single file, and speaker or utterance-specific transforms or projections are stored in a table of such matrices (see \ref io_sec_tables) - indexed by speaker-id or utterance-id. + indexed by speaker-id or utterance-id. Transforms may be applied to features using the program transform-feats. Its syntax is \verbatim transform-feats -\endverbatim +\endverbatim where is an rspecifier, is an wspecifier, and may be an rxfilename or an rspecifier (see \ref io_sec_specifiers and \ref io_sec_xfilename). The program will work out whether the transform @@ -83,14 +83,14 @@ namespace kaldi { This program is typically used as part of a pipe. A typical example is: \verbatim - feats="ark:splice-feats scp:data/train.scp ark:- | + feats="ark:splice-feats scp:data/train.scp ark:- | transform-feats $dir/0.mat ark:- ark:-|" some-program some-args "$feats" some-other-args ... \endverbatim Here, the file 0.mat contains a single matrix. An example of applying speaker-specific transforms is: \verbatim - feats="ark:add-deltas scp:data/train.scp ark:- | + feats="ark:add-deltas scp:data/train.scp ark:- | transform-feats --utt2spk=ark:data/train.utt2spk ark:$dir/0.trans ark:- ark:-|" some-program some-args "$feats" some-other-args ... \endverbatim @@ -98,33 +98,33 @@ A per-utterance example would be as above but removing the --utt2spk option. In this example, the archive file 0.trans would contain transforms (e.g. CMLLR transforms) indexed by speaker-id, and the file data/train.utt2spk would have lines of the form "utt-id spk-id" (see next section for more explanation). -The program transform-feats does not care how the transformation matrix was +The program transform-feats does not care how the transformation matrix was estimated, it just applies it to the features. After it has been through all the features it prints out the average per-frame log determinant. This can be useful when comparing objective functions (this log determinant would have to be added to the per-frame likelihood printed out by programs like gmm-align, gmm-acc-stats, or gmm-decode-kaldi). If the linear part A of the transformation (i.e. ignoring the offset term) is not square, -then the program will instead print out the per-frame average of +then the program will instead print out the per-frame average of \f$ \frac{1}{2} \mathbf{logdet} (A A^T) \f$. It refers to this as the pseudo-log-determinant. -This is useful in checking convergence of MLLT estimation where the transformation matrix +This is useful in checking convergence of MLLT estimation where the transformation matrix being applied is the MLLT matrix times an LDA matrix. \section transform_perspk Speaker-independent versus per-speaker versus per-utterance adaptation Programs that estimate transforms are generally set up to do a particular kind of adaptation, i.e. speaker-independent versus (speaker- or utterance-specific). For example, LDA -and MLLT/STC transforms are speaker-independent but fMLLR transforms are speaker- or +and MLLT/STC transforms are speaker-independent but fMLLR transforms are speaker- or utterance-specific. Programs that estimate speaker- or utterance-specific transforms will work in per-utterance mode by default, but in per-speaker mode if the --spk2utt -option is supplied (see below). +option is supplied (see below). One program that can accept either speaker-independent or speaker- or utterance-specific transforms is transform-feats. This program detects whether the first argument (the transform) is an rxfilename (see \ref io_sec_xfilename) or an rspecifier (see \ref io_sec_specifiers). If the former, it treats it as a speaker-independent transform (e.g. a file containing a single matrix). -If the latter, there are two choices. If no --utt2spk option is provided, +If the latter, there are two choices. If no --utt2spk option is provided, it treats the transform as a table of matrices indexed by utterance id. If an --utt2spk option is provided (utt2spk is a table of strings indexed by utterance that contains the string-valued speaker id), then the transforms are assumed to be indexed by speaker id, and the table @@ -133,13 +133,13 @@ provided to the --utt2spk option is used to map each utterance to a speaker id. \section transform_utt2spk Utterance-to-speaker and speaker-to-utterance maps At this point we give a general overview of the --utt2spk and --spk2utt options. - These options are accepted by programs that deal with transformations; they are used when + These options are accepted by programs that deal with transformations; they are used when you are doing per-speaker (as opposed to per-utterance) adaptation. Typically programs that process already-created transforms will need the --utt2spk - option and programs that create the transforms will need the --spk2utt option. + option and programs that create the transforms will need the --spk2utt option. A typical case is that there will be a file called some-directory/utt2spk that looks like: -\verbatim +\verbatim spk1utt1 spk1 spk1utt2 spk1 spk2utt1 spk2 @@ -148,11 +148,11 @@ spk2utt2 spk2 \endverbatim where these strings are just examples, they stand for generic speaker and utterance identifiers; and there will be a file called some-directory/spk2utt that looks like: -\verbatim +\verbatim spk1 spk1utt1 spk1utt2 spk2 spk2utt1 spk2utt2 ... -\endverbatim +\endverbatim and you will supply options that look like --utt2spk=ark:some-directory/utt2spk or --spk2utt=ark:some-directory/spk2utt. The 'ark:' prefix is necessary because these files are given as rspecifiers by the Table code, and are interpreted as archives @@ -177,7 +177,7 @@ spk2 spk2utt1 spk2utt2 for more discussion of this issue. \section transform_compose Composing transforms - + Another program that accepts generic transforms is the program compose-transforms. The general syntax is "compose-transforms a b c", and it performs the multiplication c = a b (although this involves a little more than matrix multiplication if a is affine). @@ -197,7 +197,7 @@ spk2 spk2utt1 spk2utt2 feats="ark:splice-feats scp:data/train.scp ark:- | transform-feats 0.mat ark:- ark:- | transform-feats ark:1.trans ark:- ark:- |" - ... + ... \endverbatim In general, the transforms a and b that are the inputs to compose-transforms may be either speaker-independent transforms or speaker- or utterance-specific @@ -208,11 +208,11 @@ spk2 spk2utt1 spk2utt2 represent either tables or normal files (i.e. either {r,w}specifiers or {r,w}xfilenames), subject to consistency requirements. - If a is an affine transform, in order to perform the composition correctly, compose-transforms + If a is an affine transform, in order to perform the composition correctly, compose-transforms needs to know whether b is affine or linear (it does not know this because it does not have access to the dimension of the features that are transformed by b). This is controlled by the option --b-is-affine (bool, default false). - If b is affine but you forget to set this option and a is affine, compose-transforms + If b is affine but you forget to set this option and a is affine, compose-transforms will treat b as a linear transform from dimension (the real input feature dimension) plus one, and will output a transform whose input dimension is (the real input feature dimension) plus two. There is no way for "transform-feats" to interpret this when it is to be applied to features, @@ -225,7 +225,7 @@ Eliminating silence frames can be helpful when estimating speaker adaptive transforms such as CMLLR. This even appears to be true when using a multi-class approach with a regression tree (for which, see \ref transform_regtree). The way we implement this is by weighting down the posteriors associated with -silence phones. This takes place as a modification to the \ref hmm_post +silence phones. This takes place as a modification to the \ref hmm_post "state-level posteriors". An extract of a bash shell script that does this is below (this script is discussed in more detail in \ref transform_cmllr_global): \verbatim @@ -249,7 +249,7 @@ class LdaEstimate { void Accumulate(const VectorBase &data, int32 class_id, BaseFloat weight=1.0); }; -\endverbatim +\endverbatim The program acc-lda accumulates LDA statistics using the acoustic states (i.e. pdf-ids) as the classes. It requires the transition model in order to map the alignments (expressed in terms of transition-ids) to pdf-ids. However, it is not limited to a particular type of acoustic model. @@ -262,16 +262,16 @@ when using LDA as an initialization for HLDA. \section transform_splice Frame splicing -Frame splicing (e.g. splicing nine consecutive frames together) is typically done +Frame splicing (e.g. splicing nine consecutive frames together) is typically done to the raw MFCC features prior to LDA. The program splice-feats does this. A typical line from a script that uses this is the following: \verbatim feats="ark:splice-feats scp:data/train.scp ark:- | transform-feats $dir/0.mat ark:- ark:-|" \endverbatim -and the "feats" variable would later be used as an rspecifier (c.f. \ref io_sec_specifiers) +and the "feats" variable would later be used as an rspecifier (c.f. \ref io_sec_specifiers) by some program that needs to read features. In this example we don't specify the number of frames to splice -together because we are using the defaults (--left-context=4, --right-context=4, or +together because we are using the defaults (--left-context=4, --right-context=4, or 9 frames in total). \section transform_delta Delta feature computation @@ -279,7 +279,7 @@ together because we are using the defaults (--left-context=4, --right-context=4, Computation of delta features is done by the program add-deltas, which uses the function ComputeDeltas. The delta feature computation has the same default setup as HTK's, i.e. to compute the first delta feature we multiply by the features -by a sliding window of values [ -2, 1, 0, 1, 2 ], and then normalize by +by a sliding window of values [ -2, -1, 0, 1, 2 ], and then normalize by dividing by (2^2 + 1^2 + 0^2 + 1^2 + 2^2 = 10). The second delta feature is computed by applying the same approach to the first delta feature. The number of frames of context on each side is controlled by --delta-window (default: 2) @@ -311,9 +311,9 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" case they need to be defined slightly differently for the accepted and rejected dimensions. Suppose the original feature dimension is D and the - reduced feature dimension is K. + reduced feature dimension is K. Let us forget the iteration superscript r, and use subscript j for state and - m for Gaussian mixture. + m for Gaussian mixture. For accepted dimensions (\f$0 \leq i < K\f$), the statistics are: \f[ \mathbf{G}^{(i)} = \sum_{t,j,m} \gamma_{jm}(t) \frac{1}{ \sigma^2_{jm}(i) } (\mu_{jm} - \mathbf{x}(t)) (\mu_{jm} - \mathbf{x}(t))^T @@ -333,13 +333,13 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" same, so in the code we only store statistics for K+1 rather than D dimensions. Also, it is convenient for the program that accumulates the statistics to only have - access to the K-dimensional model, so during HLDA accumulation we accumulate + access to the K-dimensional model, so during HLDA accumulation we accumulate statistics sufficient to estimate the K-dimensional means \f$\mu_{jm}\f$, and insead of - G we accumulate the following statistics: for accepted dimensions (\f$0 \leq i < K\f$), + G we accumulate the following statistics: for accepted dimensions (\f$0 \leq i < K\f$), \f[ \mathbf{S}^{(i)} = \sum_{t,j,m} \gamma_{jm}(t) \frac{1}{ \sigma^2_{jm}(i) } \mathbf{x}(t) \mathbf{x}(t)^T \f] - and for rejected dimensions \f$K \leq i < D\f$ + and for rejected dimensions \f$K \leq i < D\f$ \f[ \mathbf{S}^{(i)} = \sum_{t,j,m} \gamma_{jm}(t) \mathbf{x}(t) \mathbf{x}(t)^T , \f] @@ -350,13 +350,13 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" \f] and for \f$K \leq i < D\f$, \f[ - \mathbf{G}^{(i)} = \mathbf{S}^{(i)} - \beta \mu \mu^T, + \mathbf{G}^{(i)} = \mathbf{S}^{(i)} - \beta \mu \mu^T, \f] where \f$ \beta = \sum_{j,m} \gamma_{jm} \f$ is the total count and \f$\mu = \frac{1}{\beta} \sum_{j,m} \mu_{j,m}\f$ is the global feature mean. After computing the transform from the G statistics using the same computation as MLLT, we output the transform, and we also use the first K rows of the transform to project the means into dimension K and write out the transformed model. - + The computation described here is fairly slow; it is \f$ O(K^3) \f$ on each frame, and K is fairly large (e.g. 117). This is the price we pay for compact statistics; if we stored full mean and variance statistics, the per-frame computation would be \f$O(K^2)\f$. @@ -366,14 +366,14 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" the frames. If this option is activated, we need to store two separate versions of the sufficient statistics for the means. One version of the mean statistics, accumulated on the subset, is only used in the HLDA computation, and - corresponds to the quantities \f$\gamma_{jm}\f$ and \f$\mu_{jm}\f$ in the formulas above. + corresponds to the quantities \f$\gamma_{jm}\f$ and \f$\mu_{jm}\f$ in the formulas above. The other version of the mean statistics is accumulated on all the training data - and is used to write out the transformed model. - + and is used to write out the transformed model. + The overall HLDA estimation process is as follows (see rm_recipe_2/scripts/train_tri2j.sh): - First initialize it with LDA (we store both the reduced dimension matrix and the full matrix). - - Start model-building and training process. On certain (non-consecutive) + - Start model-building and training process. On certain (non-consecutive) iterations where we have decided to do the HLDA update, do the following: - Accumulate HLDA statistics (S, plus statistics for the full-dimensional means). The program that accumulates these (gmm-acc-hlda) needs the model, the un-transformed features, @@ -384,14 +384,14 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" transformation matrix which it needs to start the optimization and to correctly report auxiliary function changes. It outputs the new transform (both full and reduced dimension), and the model with newly estimated and transformed means. - + \section transform_mllt Global Semi-tied Covariance (STC) / Maximum Likelihood Linear Transform (MLLT) estimation Global STC/MLLT is a square feature-transformation matrix. For more details, - see "Semi-tied Covariance Matrices for Hidden Markov Models", by Mark Gales, + see "Semi-tied Covariance Matrices for Hidden Markov Models", by Mark Gales, IEEE Transactions on Speech and Audio Processing, vol. 7, 1999, pages 272-281. Viewing it as a feature-space transform, the objective function is the average - per-frame log-likelihood of the transformed features given the model, plus the + per-frame log-likelihood of the transformed features given the model, plus the log determinant of the transform. The means of the model are also rotated by transform in the update phase. The sufficient statistics are the following, for \f$ 0 \leq i < D \f$ where D is the feature dimension: @@ -399,9 +399,9 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" \mathbf{G}^{(i)} = \sum_{t,j,m} \gamma_{jm}(t) \frac{1}{ \sigma^2_{jm}(i) } (\mu_{jm} - \mathbf{x}(t)) (\mu_{jm} - \mathbf{x}(t))^T \f] See the reference, Equations (22) and (23) for the update equations. These are - basically a simplified form of the diagonal row-by-row Constrained MLLR/fMLLR update + basically a simplified form of the diagonal row-by-row Constrained MLLR/fMLLR update equations, where the first-order term of the quadratic equation disappears. Note that - our implementation differs from that reference by using a column of the inverse of the matrix + our implementation differs from that reference by using a column of the inverse of the matrix rather than the cofactor, since multiplying by the determinant does not make a difference to the result and could potentially cause problems with floating-point underflow or overflow. @@ -411,9 +411,9 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" - Estimate the LDA transformation matrix (we only need the first rows of this, not the full matrix). Call this matrix \f$\mathbf{M}\f$. - - Start a normal model building process, always using features transformed with \f$\mathbf{M}\f$. + - Start a normal model building process, always using features transformed with \f$\mathbf{M}\f$. At certain selected iterations (where we will update the MLLT matrix), we do the following: - - Accumulate MLLT statistics in the current fully-transformed space + - Accumulate MLLT statistics in the current fully-transformed space (i.e., on top of features transformed with \f$\mathbf{M}\f$). For efficiency we do this using a subset of the training data. - Do the MLLT update; let this produce a square matrix \f$\mathbf{T}\f$. @@ -423,34 +423,34 @@ feats="ark:add-deltas --print-args=false scp:data/train.scp ark:- |" The programs involved in MLLT estimation are gmm-acc-mllt and est-mllt. We also need the programs gmm-transform-means (to transform the Gaussian means using \f$\mathbf{T}\f$), and compose-transforms (to do the multiplication \f$\mathbf{M} \leftarrow \mathbf{T} \mathbf{M} \f$). - + \section transform_cmllr_global Global CMLLR/fMLLR transforms Constrained Maximum Likelihood Linear Regression (CMLLR), also known as feature-space MLLR (fMLLR), is an affine feature transform of the form \f$ \mathbf{x} \rightarrow \mathbf{A} \mathbf{x} + \mathbf{b} \f$, - which we write in the form \f$ \mathbf{x} \rightarrow \mathbf{W} \mathbf{x}^+ \f$, where + which we write in the form \f$ \mathbf{x} \rightarrow \mathbf{W} \mathbf{x}^+ \f$, where \f$\mathbf{x}^+ = \left[\begin{array}{c} \mathbf{x} \\ 1 \end{array} \right]\f$ is the feature with - a 1 appended. Note that this differs from some of the literature where the 1 comes first. + a 1 appended. Note that this differs from some of the literature where the 1 comes first. For a review paper that explains CMLLR and the estimation techniques we use, see "Maximum likelihood linear transformations for HMM-based speech recognition" by Mark Gales, - Computer Speech and Language Vol. 12, pages 75-98. + Computer Speech and Language Vol. 12, pages 75-98. The sufficient statistics we store are: \f[ \mathbf{K} = \sum_{t,j,m} \gamma_{j,m}(t) \Sigma_{jm}^{-1} \mu_{jm} \mathbf{x}(t)^+ \f] where \f$\Sigma_{jm}^{-1}\f$ is the inverse covariance matrix, and for \f$0 \leq i < D \f$ where D is the feature dimension, - \f[ \mathbf{G}^{(i)} = \sum_{t,j,m} \gamma_{j,m}(t) \frac{1}{\sigma^2_{j,m}(i)} \mathbf{x}(t)^+ \left.\mathbf{x}(t)^+\right.^T \f] + \f[ \mathbf{G}^{(i)} = \sum_{t,j,m} \gamma_{j,m}(t) \frac{1}{\sigma^2_{j,m}(i)} \mathbf{x}(t)^+ \left.\mathbf{x}(t)^+\right.^T \f] Our estimation scheme is the standard one, see Appendix B of the reference (in particular section B.1, "Direct method over rows"). We differ by using a column of the inverse in place of the cofactor row, i.e. ignoring the factor of the determinant, as it does not affect the result and causes danger of numerical underflow or overflow. - Estimation of global Constrained MLLR (CMLLR) transforms is done by the + Estimation of global Constrained MLLR (CMLLR) transforms is done by the class FmllrDiagGmmAccs, - and by the program gmm-est-fmllr (also see gmm-est-fmllr-gpost). The syntax + and by the program gmm-est-fmllr (also see gmm-est-fmllr-gpost). The syntax of gmm-est-fmllr is: \verbatim gmm-est-fmllr [options] \ @@ -486,27 +486,27 @@ feats="ark:add-deltas --print-args=false scp:data/test.scp ark:- | gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ --word-symbol-table=data/words.txt $model $graphdir/HCLG.fst \ "$feats" ark,t:$dir/test.tra ark,t:$dir/test.ali 2>$dir/decode.log -\endverbatim +\endverbatim \section transform_lvtln Linear VTLN (LVTLN) In recent years, there have been a number of papers that describe implementations of Vocal Tract Length Normalization (VTLN) that - work out a linear feature transform corresponding to each VTLN + work out a linear feature transform corresponding to each VTLN warp factor. See, for example, ``Using VTLN for broadcast news transcription'', by D. Y. Kim, S. Umesh, M. J. F. Gales, T. Hain and P. C. Woodland, ICSLP 2004. - + We implement a method in this general category using the class LinearVtln, and programs such as gmm-init-lvtln, gmm-train-lvtln-special, and gmm-est-lvtln-trans. The LinearVtln object essentially stores a set of linear feature transforms, one for each warp factor. Let these linear feature transform matrices be \f[\mathbf{A}^{(i)}, 0\leq i < N, \f] - where for instance we might have \f$N\f$=31, corresponding to 31 different warp - factors. We will describe below how we obtain these matrices below. + where for instance we might have \f$N\f$=31, corresponding to 31 different warp + factors. We will describe below how we obtain these matrices below. The way the speaker-specific transform is estimated is as follows. First, we require some kind of model and a corresponding alignment. In the - example scripts we do this either with a small monophone model, or with + example scripts we do this either with a small monophone model, or with a full triphone model. From this model and alignment, and using the original, unwarped features, we compute the conventional statistics for estimating CMLLR. When computing the LVTLN transform, what we do is take each matrix @@ -514,33 +514,33 @@ gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ maximizes the CMLLR auxiliary function for the transform \f$\mathbf{W} = \left[ \mathbf{A}^{(i)} \, ; \, \mathbf{b} \right]\f$. This value of \f$\mathbf{W}\f$ that gave the best auxiliary function value - (i.e. maximizing over i) becomes the transform for that speaker. Since we + (i.e. maximizing over i) becomes the transform for that speaker. Since we are estimating a mean offset here, we are essentially combining a kind of model-based cepstral mean normalization (or alternatively an offset-only form of CMLLR) with VTLN warping implemented - as a linear transform. This avoids us having to implement mean normalization + as a linear transform. This avoids us having to implement mean normalization as a separate step. We next describe how we estimate the matrices \f$\mathbf{A}^{(i)}\f$. We don't do this in the same way as described in the referenced paper; our method is simpler (and easier to justify). Here we describe our computation for a particular warp factor; in the current scripts we have 31 distinct warp - factors ranging from 0.85, 0.86, ..., 1.15. + factors ranging from 0.85, 0.86, ..., 1.15. We take a subset of feature data (e.g. several tens of utterances), and for this subset we compute both the original and transformed features, where the transformed features are computed using a conventional VLTN computation - (see \ref feat_vtln). - Call the original and transformed features \f$\mathbf{x}(t)\f$ and \f$\mathbf{y}(t)\f$ respectively, + (see \ref feat_vtln). + Call the original and transformed features \f$\mathbf{x}(t)\f$ and \f$\mathbf{y}(t)\f$ respectively, where \f$t\f$ will range over the frames of the selected utterances. We compute the affine transform that maps \f$\mathbf{x}\f$ to \f$\mathbf{y}\f$ in a least-squares - sense, i.e. if \f$\mathbf{y}' = \mathbf{A} \mathbf{x} + \mathbf{b}\f$, + sense, i.e. if \f$\mathbf{y}' = \mathbf{A} \mathbf{x} + \mathbf{b}\f$, we compute \f$\mathbf{A}\f$ and \f$\mathbf{b}\f$ that minimizes the sum-of-squares difference \f$\sum_t (\mathbf{y}'(t) - \mathbf{y}(t) )^T (\mathbf{y}'(t) - \mathbf{y}(t) )\f$. Then we normalize the diagonal variance as follows: we compute the variance of the original features as \f$\mathbf{\Sigma}^{(x)}\f$ and of the linearly transformed features as \f$\mathbf{\Sigma}^{(y')}\f$, and for each dimension index d we multiply the - d'th row of \f$\mathbf{A}\f$ by - \f$\sqrt{ \frac{\mathbf{\Sigma}^{(x)}_{d,d}}{\mathbf{\Sigma}^{(y')}_{d,d}}}\f$. + d'th row of \f$\mathbf{A}\f$ by + \f$\sqrt{ \frac{\mathbf{\Sigma}^{(x)}_{d,d}}{\mathbf{\Sigma}^{(y')}_{d,d}}}\f$. The resulting matrix will become \f$\mathbf{A}^{(i)}\f$ for some value of i. The command-line tools support the option to ignore the log determinant term @@ -579,8 +579,8 @@ gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ are speaker-specific; other quantities (i.e. \f$\mathbf{A}\f$ and \f$\mathbf{B}\f$) are global and shared across all speakers. - The most important factor in this equation is the middle one, - with the exponential function in it. + The most important factor in this equation is the middle one, + with the exponential function in it. The factor \f$\mathbf{D}_s\f$ gives us the ability to combine model-based mean and optionally variance normalization (i.e. offset-only or diagonal-only CMLLR) @@ -596,7 +596,7 @@ gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ there would be no point to this technique as the other quantities in the equation would add no degrees of freedom. The tools support three kinds of constraints on \f$\mathbf{D}_s\f$: it may be of the form - \f$[ {\mathbf I} \, \;\, {\mathbf 0} ]\f$ (no adaptation), or + \f$[ {\mathbf I} \, \;\, {\mathbf 0} ]\f$ (no adaptation), or \f$[ {\mathbf I} \, \;\, {\mathbf m} ]\f$ (offset only), or \f$[ {\mathrm{diag}}( {\mathbf d} ) \, \;\, {\mathbf m} ]\f$ (diagonal CMLLR); this is controlled by the --normalize-type options to the command-line tools. @@ -613,9 +613,9 @@ gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ if we were to warp by a factor f and then a factor g, this should be the same as warping by the combined factor fg. Let l = log(f) and m = log(g). Then we achieve this - property via the identity + property via the identity \f[ \exp( l \mathbf{A} ) \exp( m \mathbf{A}) = \exp( (l+m) \mathbf{A} ) . \f] - + The ET computation for a particular speaker is as follows; this assumes we are given \f$\mathbf{A}\f$ and \f$\mathbf{B}\f$. We accumulate conventional CMLLR sufficient statistics for the speaker. In the update phase we iteratively optimize @@ -636,9 +636,9 @@ gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ \f$\mathbf{B}\f$, or the model. - If updating \f$\mathbf{A}\f$, we do this given fixed values of \f$t_s\f$ and \f$\mathbf{D}_s\f$. The update is not guaranteed to - converge, but converges rapidly in practice; it's based on a + converge, but converges rapidly in practice; it's based on a quadratic "weak-sense auxiliary function" - where the quadratic term is obtained using a first-order truncation + where the quadratic term is obtained using a first-order truncation of the Taylor series expansion of the matrix exponential function. After updating \f$\mathbf{A}\f$, we modify \f$\mathbf{B}\f$ in order to renormalize the \f$t_s\f$ to zero; this involves premultiplying @@ -646,11 +646,11 @@ gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ value of \f$t_s\f$. - If updating \f$\mathbf{B}\f$, this is also done using fixed values of - \f$t_s\f$ and \f$\mathbf{D}_s\f$, and the update is similar to MLLT + \f$t_s\f$ and \f$\mathbf{D}_s\f$, and the update is similar to MLLT (a.k.a. global STC). For purposes of the accumulation and update, we imagine we are estimating an MLLT matrix just to the left of \f$\mathbf{A}\f$, i.e. some matrix - \f$\mathbf{C} \in \Re^{D\times D}\f$; let us define + \f$\mathbf{C} \in \Re^{D\times D}\f$; let us define \f$\mathbf{C}^+ = \left[ \begin{array}{cc} \mathbf{C} & 0 \\ 0 & 1 \end{array} \right]\f$. The transform will be \f$\mathbf{W}_s = \mathbf{D}_s \mathbf{C}^+ \exp ( t_s \mathbf{A} ) \mathbf{B}\f$. @@ -660,24 +660,24 @@ gmm-decode-faster --beam=30.0 --acoustic-scale=0.08333 \ \f$\exp ( t_s \mathbf{A} ) \mathbf{B}\f$ as a feature-space transform (i.e. as part of the features). After estimating \f$\mathbf{C}\f$, we will use the identity \f[ - \mathbf{C}^+ \exp ( t_s \mathbf{A} ) = \exp ( t_s \mathbf{C}^+ \mathbf{A} \left.\mathbf{C}^+\right.^{-1} ) \mathbf{C}^+ + \mathbf{C}^+ \exp ( t_s \mathbf{A} ) = \exp ( t_s \mathbf{C}^+ \mathbf{A} \left.\mathbf{C}^+\right.^{-1} ) \mathbf{C}^+ \f] so the update becomes: \f[ \mathbf{A} \leftarrow \mathbf{C}^+ \mathbf{A} \left.\mathbf{C}^+\right.^{-1} , \ \ \mathbf{B} \leftarrow \mathbf{C}^+ \mathbf{B} . \f] At this point we need to transform the model means with the matrix - \f$\mathbf{C}\f$. The reader might question how this interacts with the + \f$\mathbf{C}\f$. The reader might question how this interacts with the fact that for estimating \f$\mathbf{C}\f$, we viewed the quantity \f$\mathbf{D}_s\f$ as a model-space transform. If \f$\mathbf{D}_s\f$ only - contains a mean offset, we can still prove that the auxiliary function + contains a mean offset, we can still prove that the auxiliary function would increase, except we would have to change the offsets appropriately (this is not necessary to do explicitly, as we will re-estimate them on - the next iteration anyway). However, if \f$\mathbf{D}_s\f$ has non-unit - diagonal (i.e. is diagonal not offset CMLLR), this re-estimation process - is not guaranteed to improve the likelihood; the tools will print a warning + the next iteration anyway). However, if \f$\mathbf{D}_s\f$ has non-unit + diagonal (i.e. is diagonal not offset CMLLR), this re-estimation process + is not guaranteed to improve the likelihood; the tools will print a warning in this case. In order to avoid encountering this case, our scripts - train in a mode where \f$\mathbf{D}_s\f$ is an offset-only transform; but + train in a mode where \f$\mathbf{D}_s\f$ is an offset-only transform; but in test time we allow \f$\mathbf{D}_s\f$ to be a diagonal CMLLR transform, which seems to give slightly better results than the offset-only case. @@ -704,7 +704,7 @@ expanded features). For very fast operation, it is possible to apply these approaches using a very tiny model with a phone-based language model, and some of our example scripts demonstrate this. There is also the capability in the feature extraction code to subtract the mean on a per-utterance basis (the ---subtract-mean option to compute-mfcc-feats and compute-plp-feats). +--subtract-mean option to compute-mfcc-feats and compute-plp-feats). In order to support per-utterance and per-speaker mean and variance normalization we provide the programs compute-cmvn-stats and apply-cmvn. The program diff --git a/src/feat/feature-window.h b/src/feat/feature-window.h index 287f1bf01f6..bbb24fd8988 100644 --- a/src/feat/feature-window.h +++ b/src/feat/feature-window.h @@ -76,7 +76,8 @@ struct FrameExtractionOptions { opts->Register("blackman-coeff", &blackman_coeff, "Constant coefficient for generalized Blackman window."); opts->Register("round-to-power-of-two", &round_to_power_of_two, - "If true, round window size to power of two."); + "If true, round window size to power of two by zero-padding " + "input to FFT."); opts->Register("snip-edges", &snip_edges, "If true, end effects will be handled by outputting only frames that " "completely fit in the file, and the number of frames depends on the " diff --git a/src/featbin/extract-rows.cc b/src/featbin/extract-rows.cc index e14f9cc0e82..e4e2a927e6b 100644 --- a/src/featbin/extract-rows.cc +++ b/src/featbin/extract-rows.cc @@ -149,7 +149,7 @@ int main(int argc, char *argv[]) { KALDI_LOG << "Processed " << num_done << " segments successfully; " << "errors on " << num_err; - return (num_done > 0); + return (num_done > 0 ? 0 : 1); } catch(const std::exception &e) { std::cerr << e.what(); return -1; diff --git a/src/gmmbin/gmm-align-compiled.cc b/src/gmmbin/gmm-align-compiled.cc index 85ac3fd27a7..c3aadcc7ec9 100644 --- a/src/gmmbin/gmm-align-compiled.cc +++ b/src/gmmbin/gmm-align-compiled.cc @@ -44,8 +44,8 @@ int main(int argc, char *argv[]) { "e.g.: \n" " gmm-align-compiled 1.mdl ark:graphs.fsts scp:train.scp ark:1.ali\n" "or:\n" - " compile-train-graphs tree 1.mdl lex.fst ark:train.tra b, ark:- | \\\n" - " gmm-align-compiled 1.mdl ark:- scp:train.scp t, ark:1.ali\n"; + " compile-train-graphs tree 1.mdl lex.fst 'ark:sym2int.pl -f 2- words.txt text|' \\\n" + " ark:- | gmm-align-compiled 1.mdl ark:- scp:train.scp t, ark:1.ali\n"; ParseOptions po(usage); AlignConfig align_config; diff --git a/src/gmmbin/gmm-align.cc b/src/gmmbin/gmm-align.cc index 7ef5f9c8dab..c9c2fde11f6 100644 --- a/src/gmmbin/gmm-align.cc +++ b/src/gmmbin/gmm-align.cc @@ -39,9 +39,11 @@ int main(int argc, char *argv[]) { const char *usage = "Align features given [GMM-based] models.\n" - "Usage: gmm-align [options] tree-in model-in lexicon-fst-in feature-rspecifier transcriptions-rspecifier alignments-wspecifier\n" + "Usage: gmm-align [options] tree-in model-in lexicon-fst-in feature-rspecifier " + "transcriptions-rspecifier alignments-wspecifier\n" "e.g.: \n" - " gmm-align tree 1.mdl lex.fst scp:train.scp ark:train.tra ark:1.ali\n"; + " gmm-align tree 1.mdl lex.fst scp:train.scp " + "'ark:sym2int.pl -f 2- words.txt text|' ark:1.ali\n"; ParseOptions po(usage); AlignConfig align_config; BaseFloat acoustic_scale = 1.0; diff --git a/src/latbin/lattice-best-path.cc b/src/latbin/lattice-best-path.cc index dc25fb351c6..ce9016d750c 100644 --- a/src/latbin/lattice-best-path.cc +++ b/src/latbin/lattice-best-path.cc @@ -38,7 +38,7 @@ int main(int argc, char *argv[]) { "Note: if you want output as FSTs, use lattice-1best; if you want output\n" "with acoustic and LM scores, use lattice-1best | nbest-to-linear\n" "Usage: lattice-best-path [options] [ [ ] ]\n" - " e.g.: lattice-best-path --acoustic-scale=0.1 ark:1.lats ark:1.tra ark:1.ali\n"; + " e.g.: lattice-best-path --acoustic-scale=0.1 ark:1.lats 'ark,t:|int2sym.pl -f 2- words.txt > text' ark:1.ali\n"; ParseOptions po(usage); BaseFloat acoustic_scale = 1.0; diff --git a/src/latbin/lattice-mbr-decode.cc b/src/latbin/lattice-mbr-decode.cc index 465f4e35fbd..fba5daa4dd8 100644 --- a/src/latbin/lattice-mbr-decode.cc +++ b/src/latbin/lattice-mbr-decode.cc @@ -43,8 +43,8 @@ int main(int argc, char *argv[]) { "Usage: lattice-mbr-decode [options] lattice-rspecifier " "transcriptions-wspecifier [ bayes-risk-wspecifier " "[ sausage-stats-wspecifier [ times-wspecifier] ] ] \n" - " e.g.: lattice-mbr-decode --acoustic-scale=0.1 ark:1.lats ark:1.tra " - "ark:/dev/null ark:1.sau\n"; + " e.g.: lattice-mbr-decode --acoustic-scale=0.1 ark:1.lats " + "'ark,t:|int2sym.pl -f 2- words.txt > text' ark:/dev/null ark:1.sau\n"; ParseOptions po(usage); BaseFloat acoustic_scale = 1.0; diff --git a/src/latbin/linear-to-nbest.cc b/src/latbin/linear-to-nbest.cc index fd025f382b6..a1864d0d14a 100644 --- a/src/latbin/linear-to-nbest.cc +++ b/src/latbin/linear-to-nbest.cc @@ -67,7 +67,8 @@ int main(int argc, char *argv[]) { "\n" "Note: if the rspecifiers for lm-cost or ac-cost are the empty string,\n" "these value will default to zero.\n" - " e.g.: linear-to-nbest ark:1.ali ark:1.tra ark:1.lmscore ark:1.acscore " + " e.g.: linear-to-nbest ark:1.ali 'ark:sym2int.pl -f 2- words.txt text|' " + "ark:1.lmscore ark:1.acscore " "ark:1.nbest\n"; ParseOptions po(usage); diff --git a/src/latbin/nbest-to-linear.cc b/src/latbin/nbest-to-linear.cc index 6b3fe5e1d01..d63c380133a 100644 --- a/src/latbin/nbest-to-linear.cc +++ b/src/latbin/nbest-to-linear.cc @@ -39,7 +39,7 @@ int main(int argc, char *argv[]) { "Usage: nbest-to-linear [options] " "[ [ []]]\n" " e.g.: lattice-to-nbest --n=10 ark:1.lats ark:- | \\\n" - " nbest-to-linear ark:1.lats ark,t:1.ali ark,t:1.tra\n"; + " nbest-to-linear ark:1.lats ark,t:1.ali 'ark,t:|int2sym.pl -f 2- words.txt > text'\n"; ParseOptions po(usage); diff --git a/src/lmbin/arpa2fst.cc b/src/lmbin/arpa2fst.cc old mode 100755 new mode 100644 diff --git a/src/matrix/Matrix.vcxproj b/src/matrix/Matrix.vcxproj old mode 100755 new mode 100644 diff --git a/src/nnet/nnet-parametric-relu.h b/src/nnet/nnet-parametric-relu.h old mode 100755 new mode 100644 diff --git a/src/nnet2bin/nnet-align-compiled.cc b/src/nnet2bin/nnet-align-compiled.cc index 60045eb7cce..8f5537c26c7 100644 --- a/src/nnet2bin/nnet-align-compiled.cc +++ b/src/nnet2bin/nnet-align-compiled.cc @@ -40,12 +40,13 @@ int main(int argc, char *argv[]) { const char *usage = "Align features given neural-net-based model\n" - "Usage: nnet-align-compiled [options] model-in graphs-rspecifier feature-rspecifier alignments-wspecifier\n" + "Usage: nnet-align-compiled [options] model-in graphs-rspecifier " + "feature-rspecifier alignments-wspecifier\n" "e.g.: \n" " nnet-align-compiled 1.mdl ark:graphs.fsts scp:train.scp ark:1.ali\n" "or:\n" - " compile-train-graphs tree 1.mdl lex.fst ark:train.tra b, ark:- | \\\n" - " nnet-align-compiled 1.mdl ark:- scp:train.scp t, ark:1.ali\n"; + " compile-train-graphs tree 1.mdl lex.fst 'ark:sym2int.pl -f 2- words.txt text|' \\\n" + " ark:- | nnet-align-compiled 1.mdl ark:- scp:train.scp t, ark:1.ali\n"; ParseOptions po(usage); AlignConfig align_config; diff --git a/src/nnet3/nnet-chain-example.cc b/src/nnet3/nnet-chain-example.cc index 005107a097c..351312fb952 100644 --- a/src/nnet3/nnet-chain-example.cc +++ b/src/nnet3/nnet-chain-example.cc @@ -401,7 +401,7 @@ void ShiftChainExampleTimes(int32 frame_shift, size_t NnetChainExampleStructureHasher::operator () ( - const NnetChainExample &eg) const { + const NnetChainExample &eg) const noexcept { // these numbers were chosen at random from a list of primes. NnetIoStructureHasher io_hasher; size_t size = eg.inputs.size(), ans = size * 35099; diff --git a/src/nnet3/nnet-chain-example.h b/src/nnet3/nnet-chain-example.h index 7a024f3bfcd..2718af746b2 100644 --- a/src/nnet3/nnet-chain-example.h +++ b/src/nnet3/nnet-chain-example.h @@ -135,9 +135,9 @@ struct NnetChainExample { /// without looking at the value of the features. It will be used in combining /// egs into batches of all similar structure. struct NnetChainExampleStructureHasher { - size_t operator () (const NnetChainExample &eg) const; + size_t operator () (const NnetChainExample &eg) const noexcept; // We also provide a version of this that works from pointers. - size_t operator () (const NnetChainExample *eg) const { + size_t operator () (const NnetChainExample *eg) const noexcept { return (*this)(*eg); } }; diff --git a/src/nnet3/nnet-chain-training.cc b/src/nnet3/nnet-chain-training.cc index c3ae3ae0336..5fe28e8142b 100644 --- a/src/nnet3/nnet-chain-training.cc +++ b/src/nnet3/nnet-chain-training.cc @@ -231,7 +231,7 @@ void NnetChainTrainer::UpdateParamsWithMaxChange() { } bool NnetChainTrainer::PrintTotalStats() const { - unordered_map::const_iterator + unordered_map::const_iterator iter = objf_info_.begin(), end = objf_info_.end(); bool ans = false; diff --git a/src/nnet3/nnet-common.cc b/src/nnet3/nnet-common.cc index 906217c3561..6c4fc0f7a1c 100644 --- a/src/nnet3/nnet-common.cc +++ b/src/nnet3/nnet-common.cc @@ -342,14 +342,14 @@ void ReadCindexVector(std::istream &is, bool binary, } } -size_t IndexHasher::operator () (const Index &index) const { +size_t IndexHasher::operator () (const Index &index) const noexcept { // The numbers that appear below were chosen arbitrarily from a list of primes return index.n + 1619 * index.t + 15649 * index.x; } -size_t CindexHasher::operator () (const Cindex &cindex) const { +size_t CindexHasher::operator () (const Cindex &cindex) const noexcept { // The numbers that appear below were chosen arbitrarily from a list of primes return cindex.first + 1619 * cindex.second.n + @@ -359,7 +359,7 @@ size_t CindexHasher::operator () (const Cindex &cindex) const { } size_t CindexVectorHasher::operator () ( - const std::vector &cindex_vector) const { + const std::vector &cindex_vector) const noexcept { // this is an arbitrarily chosen prime. size_t kPrime = 23539, ans = 0; std::vector::const_iterator iter = cindex_vector.begin(), @@ -371,7 +371,7 @@ size_t CindexVectorHasher::operator () ( } size_t IndexVectorHasher::operator () ( - const std::vector &index_vector) const { + const std::vector &index_vector) const noexcept { size_t n1 = 15, n2 = 10; // n1 and n2 are used to extract only a subset of // elements to hash; this makes the hasher faster by // skipping over more elements. Setting n1 large or diff --git a/src/nnet3/nnet-common.h b/src/nnet3/nnet-common.h index 9134e2545de..3f80645ec22 100644 --- a/src/nnet3/nnet-common.h +++ b/src/nnet3/nnet-common.h @@ -106,21 +106,21 @@ void ReadIndexVector(std::istream &is, bool binary, typedef std::pair Cindex; struct IndexHasher { - size_t operator () (const Index &cindex) const; + size_t operator () (const Index &cindex) const noexcept; }; struct CindexHasher { - size_t operator () (const Cindex &cindex) const; + size_t operator () (const Cindex &cindex) const noexcept; }; struct CindexVectorHasher { - size_t operator () (const std::vector &cindex_vector) const; + size_t operator () (const std::vector &cindex_vector) const noexcept; }; // Note: because IndexVectorHasher is used in some things where we really need // it to be fast, it doesn't look at all the indexes, just most of them. struct IndexVectorHasher { - size_t operator () (const std::vector &index_vector) const; + size_t operator () (const std::vector &index_vector) const noexcept; }; diff --git a/src/nnet3/nnet-computation.cc b/src/nnet3/nnet-computation.cc index 5be1b7def94..ec1214279ff 100644 --- a/src/nnet3/nnet-computation.cc +++ b/src/nnet3/nnet-computation.cc @@ -1140,7 +1140,7 @@ void NnetComputation::GetWholeSubmatrices( } size_t IoSpecificationHasher::operator () ( - const IoSpecification &io_spec) const { + const IoSpecification &io_spec) const noexcept { StringHasher string_hasher; IndexVectorHasher indexes_hasher; // 4261 was chosen at random from a list of primes. diff --git a/src/nnet3/nnet-computation.h b/src/nnet3/nnet-computation.h index c7972da2102..623e136dd43 100644 --- a/src/nnet3/nnet-computation.h +++ b/src/nnet3/nnet-computation.h @@ -100,7 +100,7 @@ struct IoSpecification { }; struct IoSpecificationHasher { - size_t operator () (const IoSpecification &io_spec) const; + size_t operator () (const IoSpecification &io_spec) const noexcept; }; diff --git a/src/nnet3/nnet-discriminative-example.cc b/src/nnet3/nnet-discriminative-example.cc index aa7eb48ea04..61a9669fb76 100644 --- a/src/nnet3/nnet-discriminative-example.cc +++ b/src/nnet3/nnet-discriminative-example.cc @@ -396,7 +396,7 @@ void ShiftDiscriminativeExampleTimes(int32 frame_shift, } size_t NnetDiscriminativeExampleStructureHasher::operator () ( - const NnetDiscriminativeExample &eg) const { + const NnetDiscriminativeExample &eg) const noexcept { // these numbers were chosen at random from a list of primes. NnetIoStructureHasher io_hasher; size_t size = eg.inputs.size(), ans = size * 35099; diff --git a/src/nnet3/nnet-discriminative-example.h b/src/nnet3/nnet-discriminative-example.h index ba1cac7ffbe..c0ea446552e 100644 --- a/src/nnet3/nnet-discriminative-example.h +++ b/src/nnet3/nnet-discriminative-example.h @@ -134,9 +134,9 @@ struct NnetDiscriminativeExample { /// without looking at the value of the features. It will be used in combining /// egs into batches of all similar structure. struct NnetDiscriminativeExampleStructureHasher { - size_t operator () (const NnetDiscriminativeExample &eg) const; + size_t operator () (const NnetDiscriminativeExample &eg) const noexcept ; // We also provide a version of this that works from pointers. - size_t operator () (const NnetDiscriminativeExample *eg) const { + size_t operator () (const NnetDiscriminativeExample *eg) const noexcept { return (*this)(*eg); } }; diff --git a/src/nnet3/nnet-discriminative-training.cc b/src/nnet3/nnet-discriminative-training.cc index fb4b7db8c3c..0a436b69f8c 100644 --- a/src/nnet3/nnet-discriminative-training.cc +++ b/src/nnet3/nnet-discriminative-training.cc @@ -189,7 +189,8 @@ void NnetDiscriminativeTrainer::ProcessOutputs(const NnetDiscriminativeExample & bool NnetDiscriminativeTrainer::PrintTotalStats() const { - unordered_map::const_iterator + unordered_map::const_iterator iter = objf_info_.begin(), end = objf_info_.end(); bool ans = false; diff --git a/src/nnet3/nnet-example.cc b/src/nnet3/nnet-example.cc index 3e87ebba3f5..c011f2a0b8a 100644 --- a/src/nnet3/nnet-example.cc +++ b/src/nnet3/nnet-example.cc @@ -124,7 +124,7 @@ void NnetExample::Compress() { size_t NnetIoStructureHasher::operator () ( - const NnetIo &io) const { + const NnetIo &io) const noexcept { StringHasher string_hasher; IndexVectorHasher indexes_hasher; @@ -147,7 +147,7 @@ bool NnetIoStructureCompare::operator () ( size_t NnetExampleStructureHasher::operator () ( - const NnetExample &eg) const { + const NnetExample &eg) const noexcept { // these numbers were chosen at random from a list of primes. NnetIoStructureHasher io_hasher; size_t size = eg.io.size(), ans = size * 35099; diff --git a/src/nnet3/nnet-example.h b/src/nnet3/nnet-example.h index f08754a2bd3..347894e958c 100644 --- a/src/nnet3/nnet-example.h +++ b/src/nnet3/nnet-example.h @@ -79,7 +79,7 @@ struct NnetIo { /// (name, indexes, feature dimension) without looking at the value of features. /// It will be used in combining egs into batches of all similar structure. struct NnetIoStructureHasher { - size_t operator () (const NnetIo &a) const; + size_t operator () (const NnetIo &a) const noexcept; }; /// This comparison object compares just the structural aspects of the NnetIo /// object (name, indexes, feature dimension) without looking at the value of @@ -130,9 +130,11 @@ struct NnetExample { /// deal with differently-ordered, but otherwise identical, egs in practice so /// we don't bother making the hashing function independent of this order. struct NnetExampleStructureHasher { - size_t operator () (const NnetExample &eg) const; + size_t operator () (const NnetExample &eg) const noexcept; // We also provide a version of this that works from pointers. - size_t operator () (const NnetExample *eg) const { return (*this)(*eg); } + size_t operator () (const NnetExample *eg) const noexcept { + return (*this)(*eg); + } }; diff --git a/src/nnet3/nnet-optimize-utils.cc b/src/nnet3/nnet-optimize-utils.cc index adcd5fe22f0..60ec93f3f18 100644 --- a/src/nnet3/nnet-optimize-utils.cc +++ b/src/nnet3/nnet-optimize-utils.cc @@ -188,7 +188,7 @@ class ComputationRenumberer { struct SubMatrixHasher { SubMatrixHasher() { } - size_t operator () (const NnetComputation::SubMatrixInfo &submat) const { + size_t operator () (const NnetComputation::SubMatrixInfo &submat) const noexcept { // these numbers are arbitrarily chosen primes. return submat.matrix_index + 19553 * submat.row_offset + diff --git a/src/nnet3/nnet-optimize.cc b/src/nnet3/nnet-optimize.cc index 30b5f57feb7..abafedc2f2d 100644 --- a/src/nnet3/nnet-optimize.cc +++ b/src/nnet3/nnet-optimize.cc @@ -533,7 +533,8 @@ void Optimize(const NnetOptimizeOptions &config, // ComputationRequests are distinguished by the names and indexes // of inputs and outputs -size_t ComputationRequestHasher::operator() (const ComputationRequest *cr) const { +size_t ComputationRequestHasher::operator() ( + const ComputationRequest *cr) const noexcept { size_t ans = 0; size_t p1 = 4111, p2 = 26951; IoSpecificationHasher io_hasher; @@ -862,7 +863,7 @@ void FixGotoOutputReordering(const Nnet &nnet, FixGotoLabel(computation); // make sure the destination label of the goto statement was // correct. int32 goto_command_index = -1; - for (int32 c = computation->commands.size(); c >= 0; c--) + for (int32 c = computation->commands.size() - 1; c >= 0; c--) if (computation->commands[c].command_type == kGotoLabel) goto_command_index = c; KALDI_ASSERT(goto_command_index > 0); diff --git a/src/nnet3/nnet-optimize.h b/src/nnet3/nnet-optimize.h index 538dde2bbc1..cb14060996b 100644 --- a/src/nnet3/nnet-optimize.h +++ b/src/nnet3/nnet-optimize.h @@ -168,7 +168,7 @@ void Optimize(const NnetOptimizeOptions &config, // ComputationRequest to hash code by looking at input // and output IoSpecifications vectors. struct ComputationRequestHasher { - size_t operator()(const ComputationRequest *cr) const; + size_t operator()(const ComputationRequest *cr) const noexcept; }; // Equality function for ComputationRequest pointer diff --git a/src/nnet3/nnet-simple-component.cc b/src/nnet3/nnet-simple-component.cc index f05623c65b3..8bbe76840da 100644 --- a/src/nnet3/nnet-simple-component.cc +++ b/src/nnet3/nnet-simple-component.cc @@ -2798,11 +2798,19 @@ void NaturalGradientAffineComponent::ZeroStats() { } void NaturalGradientAffineComponent::Scale(BaseFloat scale) { - update_count_ *= scale; - max_change_scale_stats_ *= scale; - active_scaling_count_ *= scale; - linear_params_.Scale(scale); - bias_params_.Scale(scale); + if (scale == 0.0) { + update_count_ = 0.0; + max_change_scale_stats_ = 0.0; + active_scaling_count_ = 0.0; + linear_params_.SetZero(); + bias_params_.SetZero(); + } else { + update_count_ *= scale; + max_change_scale_stats_ *= scale; + active_scaling_count_ *= scale; + linear_params_.Scale(scale); + bias_params_.Scale(scale); + } } void NaturalGradientAffineComponent::Add(BaseFloat alpha, const Component &other_in) { diff --git a/src/nnet3/nnet-training.cc b/src/nnet3/nnet-training.cc index 6bac172b5bd..2a081920738 100644 --- a/src/nnet3/nnet-training.cc +++ b/src/nnet3/nnet-training.cc @@ -180,7 +180,7 @@ void NnetTrainer::UpdateParamsWithMaxChange() { } bool NnetTrainer::PrintTotalStats() const { - unordered_map::const_iterator + unordered_map::const_iterator iter = objf_info_.begin(), end = objf_info_.end(); bool ans = false; diff --git a/src/nnet3/nnet-utils.h b/src/nnet3/nnet-utils.h index 766b0ed1798..921f1f1901d 100644 --- a/src/nnet3/nnet-utils.h +++ b/src/nnet3/nnet-utils.h @@ -81,7 +81,7 @@ std::string PrintVectorPerUpdatableComponent(const Nnet &nnet, /// This function returns true if the nnet has the following properties: /// It has an output called "output" (other outputs are allowed but may be -/// ignored). +/// ignored). /// It has an input called "input", and possibly an extra input called /// "ivector", but no other inputs. /// There are probably some other properties that we really ought to @@ -160,8 +160,8 @@ void ConvertRepeatedToBlockAffine(Nnet *nnet); /// Info() function (we need this in the CTC code). std::string NnetInfo(const Nnet &nnet); -/// This function sets the dropout proportion in all dropout component to -/// dropout_proportion value. +/// This function sets the dropout proportion in all dropout components to +/// the value 'dropout_proportion' void SetDropoutProportion(BaseFloat dropout_proportion, Nnet *nnet); /// This function finds a list of components that are never used, and outputs diff --git a/src/nnet3bin/nnet3-align-compiled.cc b/src/nnet3bin/nnet3-align-compiled.cc index bab5d16f370..84a5f38b4ee 100644 --- a/src/nnet3bin/nnet3-align-compiled.cc +++ b/src/nnet3bin/nnet3-align-compiled.cc @@ -47,8 +47,8 @@ int main(int argc, char *argv[]) { "e.g.: \n" " nnet3-align-compiled 1.mdl ark:graphs.fsts scp:train.scp ark:1.ali\n" "or:\n" - " compile-train-graphs tree 1.mdl lex.fst ark:train.tra b, ark:- | \\\n" - " nnet3-align-compiled 1.mdl ark:- scp:train.scp t, ark:1.ali\n"; + " compile-train-graphs tree 1.mdl lex.fst 'ark:sym2int.pl -f 2- words.txt text|' \\\n" + " ark:- | nnet3-align-compiled 1.mdl ark:- scp:train.scp t, ark:1.ali\n"; ParseOptions po(usage); AlignConfig align_config; diff --git a/src/onlinebin/online-wav-gmm-decode-faster.cc b/src/onlinebin/online-wav-gmm-decode-faster.cc index e5d54b80db5..fe7c6d6b974 100644 --- a/src/onlinebin/online-wav-gmm-decode-faster.cc +++ b/src/onlinebin/online-wav-gmm-decode-faster.cc @@ -41,7 +41,7 @@ int main(int argc, char *argv[]) { const char *usage = "Reads in wav file(s) and simulates online decoding.\n" - "Writes .tra and .ali files for WER computation. Utterance " + "Writes integerized-text and .ali files for WER computation. Utterance " "segmentation is done on-the-fly.\n" "Feature splicing/LDA transform is used, if the optional(last) argument " "is given.\n" diff --git a/src/util/stl-utils.h b/src/util/stl-utils.h index 95ca0b03c5a..a1506f557a7 100644 --- a/src/util/stl-utils.h +++ b/src/util/stl-utils.h @@ -216,7 +216,7 @@ void CopyVectorToVector(const std::vector &vec_in, std::vector *vec_out) { /// A hashing function-object for vectors. template struct VectorHasher { // hashing function for vector. - size_t operator()(const std::vector &x) const { + size_t operator()(const std::vector &x) const noexcept { size_t ans = 0; typename std::vector::const_iterator iter = x.begin(), end = x.end(); for (; iter != end; ++iter) { @@ -235,7 +235,7 @@ struct VectorHasher { // hashing function for vector. /// A hashing function-object for pairs of ints template struct PairHasher { // hashing function for pair - size_t operator()(const std::pair &x) const { + size_t operator()(const std::pair &x) const noexcept { // 7853 was chosen at random from a list of primes. return x.first + x.second * 7853; } @@ -248,7 +248,7 @@ struct PairHasher { // hashing function for pair /// A hashing function object for strings. struct StringHasher { // hashing function for std::string - size_t operator()(const std::string &str) const { + size_t operator()(const std::string &str) const noexcept { size_t ans = 0, len = str.length(); const char *c = str.c_str(), *end = c + len; for (; c != end; c++) { diff --git a/tools/extras/check_dependencies.sh b/tools/extras/check_dependencies.sh index 3c26fd53e82..43579334c89 100755 --- a/tools/extras/check_dependencies.sh +++ b/tools/extras/check_dependencies.sh @@ -181,14 +181,6 @@ if which grep >&/dev/null && pwd | grep -E 'JOB|LMWT' >/dev/null; then status=1; fi -if [ -f /usr/lib64/libfst.so.1 ] || [ -f /usr/local/include/fst.h ] || \ - [ -f /usr/include/fst/fst.h ] || [ -f /usr/local/bin/fstinfo ]; then - echo "*** $0: Kaldi cannot be installed (for now) if you have OpenFst" - echo "*** installed in system space (version mismatches, etc.)" - echo "*** Please try to uninstall it." - status=1 -fi - if ! $printed && [ $status -eq 0 ]; then echo "$0: all OK." fi