diff --git a/egs/babel/s5b/conf/common.fullLP b/egs/babel/s5b/conf/common.fullLP index b8a11e2eb..6e3befa4f 100644 --- a/egs/babel/s5b/conf/common.fullLP +++ b/egs/babel/s5b/conf/common.fullLP @@ -1,14 +1,14 @@ # DNN hybrid system training parameters -dnn_num_hidden_layers=5 -dnn_input_dim=5000 -dnn_output_dim=500 +dnn_num_hidden_layers=4 +dnn_input_dim=4000 +dnn_output_dim=400 dnn_minibatch_size=128 dnn_init_learning_rate=0.008 dnn_final_learning_rate=0.0008 dnn_max_change=10 dnn_num_jobs=8 dnn_num_threads=1 -dnn_mixup=5000 +dnn_mixup=12000 dnn_parallel_opts="-l gpu=1,hostname=g*" bnf_every_nth_frame=2 # take every 2nd frame. @@ -18,7 +18,10 @@ use_pitch=false lmwt_plp_extra_opts=( --min-lmwt 8 --max-lmwt 12 ) lmwt_bnf_extra_opts=( --min-lmwt 13 --max-lmwt 18 ) -lmwt_dnn_extra_opts=( --min-lmwt 8 --max-lmwt 12 ) +lmwt_dnn_extra_opts=( --min-lmwt 9 --max-lmwt 13 ) + +dnn_beam=16.0 +dnn_lat_beam=8.5 icu_opt=(--use-icu true --icu-transform Any-Lower) diff --git a/egs/babel/s5b/conf/common.limitedLP b/egs/babel/s5b/conf/common.limitedLP index d1efe8ef7..a34bb2a2c 100644 --- a/egs/babel/s5b/conf/common.limitedLP +++ b/egs/babel/s5b/conf/common.limitedLP @@ -18,7 +18,10 @@ use_pitch=false lmwt_plp_extra_opts=( --min-lmwt 8 --max-lmwt 12 ) lmwt_bnf_extra_opts=( --min-lmwt 13 --max-lmwt 18 ) -lmwt_dnn_extra_opts=( --min-lmwt 8 --max-lmwt 12 ) +lmwt_dnn_extra_opts=( --min-lmwt 9 --max-lmwt 13 ) + +dnn_beam=16.0 +dnn_lat_beam=8.5 icu_opt=(--use-icu true --icu-transform Any-Lower) diff --git a/egs/babel/s5b/run-5-anydecode.sh b/egs/babel/s5b/run-5-anydecode.sh index 5ea95cbc3..22109f9bb 100755 --- a/egs/babel/s5b/run-5-anydecode.sh +++ b/egs/babel/s5b/run-5-anydecode.sh @@ -332,6 +332,7 @@ if [ -f exp/tri6_nnet/.done ]; then if [ ! -f $decode/.done ]; then mkdir -p $decode steps/nnet2/decode.sh --cmd "$decode_cmd" --nj $my_nj \ + --beam $dnn_beam --lat-beam $dnn_lat_beam \ --skip-scoring true "${decode_extra_opts[@]}" \ --transform-dir exp/tri5/decode_${dirid} \ exp/tri5/graph ${datadir} $decode |tee $decode/decode.log