merge with master branch code
This commit is contained in:
Коммит
4b12277e0a
10
CNTK.sln
10
CNTK.sln
|
@ -20,7 +20,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CNTK", "MachineLearning\CNT
|
|||
{E6646FFE-3588-4276-8A15-8D65C22711C1} = {E6646FFE-3588-4276-8A15-8D65C22711C1}
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Unit Tests", "Unit Tests", "{D45DF403-6781-444E-B654-A96868C5BE68}"
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{D45DF403-6781-444E-B654-A96868C5BE68}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CNTKMathTest", "Math\CNTKMathTest\CNTKMathTest.vcxproj", "{6CEE834A-8104-46A8-8902-64C81BD7928F}"
|
||||
EndProject
|
||||
|
@ -155,6 +155,11 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "figures", "figures", "{889C
|
|||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Other", "Other", "{39E42C4B-A078-4CA4-9D92-B883D8129601}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CheckInSuites", "CheckInSuites\CheckInSuites.vcxproj", "{DBB3C106-B0B4-4059-8477-C89528CEC1B0}"
|
||||
ProjectSection(ProjectDependencies) = postProject
|
||||
{E6F26F9A-FF64-4F0A-B749-CD309EE357EE} = {E6F26F9A-FF64-4F0A-B749-CD309EE357EE}
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|x64 = Debug|x64
|
||||
|
@ -214,6 +219,8 @@ Global
|
|||
{D667AF32-028A-4A5D-BE19-F46776F0F6B2}.Debug|x64.Build.0 = Debug|x64
|
||||
{D667AF32-028A-4A5D-BE19-F46776F0F6B2}.Release|x64.ActiveCfg = Release|x64
|
||||
{D667AF32-028A-4A5D-BE19-F46776F0F6B2}.Release|x64.Build.0 = Release|x64
|
||||
{DBB3C106-B0B4-4059-8477-C89528CEC1B0}.Debug|x64.ActiveCfg = Debug|x64
|
||||
{DBB3C106-B0B4-4059-8477-C89528CEC1B0}.Release|x64.ActiveCfg = Release|x64
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
@ -226,6 +233,7 @@ Global
|
|||
{6CEE834A-8104-46A8-8902-64C81BD7928F} = {D45DF403-6781-444E-B654-A96868C5BE68}
|
||||
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976} = {D45DF403-6781-444E-B654-A96868C5BE68}
|
||||
{0F30EBCF-09F3-4EED-BF54-4214BCE53FEC} = {D45DF403-6781-444E-B654-A96868C5BE68}
|
||||
{DBB3C106-B0B4-4059-8477-C89528CEC1B0} = {D45DF403-6781-444E-B654-A96868C5BE68}
|
||||
{E6646FFE-3588-4276-8A15-8D65C22711C1} = {33EBFE78-A1A8-4961-8938-92A271941F94}
|
||||
{1D5787D4-52E4-45DB-951B-82F220EE0C6A} = {33EBFE78-A1A8-4961-8938-92A271941F94}
|
||||
{62836DC1-DF77-4B98-BF2D-45C943B7DDC6} = {33EBFE78-A1A8-4961-8938-92A271941F94}
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
|
||||
These scripts are similar to those in the TIMIT folder of the ExampleSetups except they use much fewer files (100 utterances) and fewer minibatches. See the README.txt file there for more details about these configurations.
|
||||
|
||||
The globals_cpu.config and globals_gpu.config differ only in which device they use and where the results are stored.
|
||||
|
||||
To test on CPU:
|
||||
cntk configFile=globals_cpu.config+<DesiredConfigFile>
|
||||
CNTK.exe WorkDir=... ExpDir=... LibDir=... ScpDir=... configFile=globals.config+select_cpu.config+<DesiredConfigFile>
|
||||
|
||||
To test on GPU:
|
||||
cntk configFile=globals_gpu.config+<DesiredConfigFile>
|
||||
CNTK.exe WorkDir=... ExpDir=... LibDir=... ScpDir=... configFile=globals.config+select_gpu.config+<DesiredConfigFile>
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
stderr=$ExpDir$\EvalSimpleNetwork\log\log
|
||||
command=TIMIT_EvalSimple
|
||||
command=TIMIT_EvalSimpleNetwork
|
||||
|
||||
precision=float
|
||||
|
||||
TIMIT_EvalSimple=[
|
||||
TIMIT_EvalSimpleNetwork=[
|
||||
action=eval
|
||||
|
||||
modelPath=$ExpDir$\TrainSimpleNetwork\model\cntkSpeech.dnn
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
stderr=$ExpDir$\TrainNDLNetwork\log\log
|
||||
command=TIMIT_TrainNDL
|
||||
command=TIMIT_TrainNDLNetwork
|
||||
|
||||
precision=float
|
||||
|
||||
#######################################
|
||||
# TRAINING CONFIG (NDL, Fixed LR) #
|
||||
#######################################
|
||||
TIMIT_TrainNDL=[
|
||||
TIMIT_TrainNDLNetwork=[
|
||||
action=train
|
||||
|
||||
modelPath=$ExpDir$\TrainNDLNetwork\model\cntkSpeech.dnn
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
stderr=$ExpDir$\TrainSimpleNetwork\log\log
|
||||
command=TIMIT_TrainSimple
|
||||
command=TIMIT_TrainSimpleNetwork
|
||||
|
||||
precision=float
|
||||
|
||||
|
@ -7,7 +7,7 @@ precision=float
|
|||
# TRAINING CONFIG (Simple, Fixed LR) #
|
||||
#######################################
|
||||
|
||||
TIMIT_TrainSimple=[
|
||||
TIMIT_TrainSimpleNetwork=[
|
||||
action=train
|
||||
|
||||
modelPath=$ExpDir$\TrainSimpleNetwork\model\cntkSpeech.dnn
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
# globals.config - config for ASR check-in tests
|
||||
|
||||
# the following variables must be passed on the command line to CNTK:
|
||||
# - WorkDir e.g. ..\..\..\ExampleSetups\ASR\TIMIT # assumes current directory is location of this config file; override on cmd line if not correct
|
||||
# - ExpDir e.g. ..\test_out\cpu
|
||||
# - LibDir e.g. $WorkDir$\lib
|
||||
# - ScpDir e.g. $LibDir$\scp
|
||||
|
||||
# data paths
|
||||
FBankScpShort=$ScpDir$\TIMIT.train.scp.fbank.fullpath.100
|
||||
MfccScpShort=$ScpDir$\TIMIT.train.scp.mfcc.fullpath.100
|
||||
FBankScpShortTest=$ScpDir$\TIMIT.core.scp.fbank.fullpath.20
|
||||
LogLikeScpShortTest=$ScpDir$\TIMIT.core.scp.scaledloglike.fullpath.20
|
||||
BNeckScpShortTest=$ScpDir$\TIMIT.core.scp.bottleneck.fullpath.20
|
||||
|
||||
MlfDir=$LibDir$\mlf
|
||||
|
||||
# config paths & parameters
|
||||
ConfigDir=$WorkDir$\config
|
||||
NdlDir=$ConfigDir$
|
||||
MelDir=$ConfigDir$
|
||||
MaxNumEpochs=5
|
|
@ -1,15 +0,0 @@
|
|||
WorkDir=..\..\..\ExampleSetups\ASR\TIMIT # assumes current directory is location of this config file; override on cmd line if not correct
|
||||
LibDir=$WorkDir$\lib
|
||||
ScpDir=$LibDir$\scp
|
||||
FBankScpShort=$ScpDir$\TIMIT.train.scp.fbank.fullpath.100
|
||||
MfccScpShort=$ScpDir$\TIMIT.train.scp.mfcc.fullpath.100
|
||||
FBankScpShortTest=$ScpDir$\TIMIT.core.scp.fbank.fullpath.20
|
||||
LogLikeScpShortTest=$ScpDir$\TIMIT.core.scp.scaledloglike.fullpath.20
|
||||
BNeckScpShortTest=$ScpDir$\TIMIT.core.scp.bottleneck.fullpath.20
|
||||
MlfDir=$LibDir$\mlf
|
||||
ConfigDir=$WorkDir$\config
|
||||
NdlDir=$ConfigDir$
|
||||
MelDir=$ConfigDir$
|
||||
MaxNumEpochs=5
|
||||
ExpDir=D:\temp\cntk\TIMIT\CheckInSuites\CPU
|
||||
DeviceNumber=-1
|
|
@ -1,15 +0,0 @@
|
|||
WorkDir=..\..\..\ExampleSetups\ASR\TIMIT # assumes current directory is location of this config file; override on cmd line if not correct
|
||||
LibDir=$WorkDir$\lib
|
||||
ScpDir=$LibDir$\scp
|
||||
FBankScpShort=$ScpDir$\TIMIT.train.scp.fbank.fullpath.100
|
||||
MfccScpShort=$ScpDir$\TIMIT.train.scp.mfcc.fullpath.100
|
||||
FBankScpShortTest=$ScpDir$\TIMIT.core.scp.fbank.fullpath.20
|
||||
LogLikeScpShortTest=$ScpDir$\TIMIT.core.scp.scaledloglike.fullpath.20
|
||||
BNeckScpShortTest=$ScpDir$\TIMIT.core.scp.bottleneck.fullpath.20
|
||||
MlfDir=$LibDir$\mlf
|
||||
ConfigDir=$WorkDir$\config
|
||||
NdlDir=$ConfigDir$
|
||||
MelDir=$ConfigDir$
|
||||
MaxNumEpochs=5
|
||||
ExpDir=D:\temp\cntk\TIMIT\CheckInSuites\GPU
|
||||
DeviceNumber=0
|
|
@ -1,15 +1,37 @@
|
|||
set cnpath=d:\gitroot\cntk\CNTKSolution\x64\Release
|
||||
set proc=%1
|
||||
echo on
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_TrainSimpleNetwork.config
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_TrainNDLNetwork.config
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_TrainAutoEncoder.config
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_TrainMultiInput.config
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_TrainMultiTask.config
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_EvalSimpleNetwork.config
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_WriteScaledLogLike.config
|
||||
%cnpath%\cntk configFile=globals_%proc%.config+TIMIT_WriteBottleneck.config
|
||||
::: first argument is CPU or GPU
|
||||
set PROC=%1
|
||||
::: second argument is buildconfig (Release or Debug)
|
||||
set BUILD=%2
|
||||
echo running ASR test on %PROC%
|
||||
|
||||
::: the CNTK executable is found relative to this BAT file
|
||||
set THIS=%~dp0
|
||||
set ROOT=%THIS%..\..\..
|
||||
|
||||
set CNTK=%ROOT%\x64\%2\CNTK.exe
|
||||
|
||||
::: directories we pass in to CNTK config
|
||||
|
||||
::: example setups are here
|
||||
set WorkDir=%ROOT%\ExampleSetups\ASR\TIMIT
|
||||
set ExpDir=%THIS%..\test_out
|
||||
set LibDir=%WorkDir%\lib
|
||||
set ScpDir=%LibDir%\scp
|
||||
|
||||
::: run all tests
|
||||
::: TODO: fix the log path, it seems it cannot be passed to CNTK currently on the command line
|
||||
for %%t in (TrainSimpleNetwork TrainNDLNetwork TrainAutoEncoder TrainMultiInput TrainMultiTask EvalSimpleNetwork WriteScaledLogLike WriteBottleneck) do (
|
||||
echo ------
|
||||
echo running test TIMIT_%%t.config logging to %ExpDir%\%%t\log\log_TIMIT_%%t.log
|
||||
%CNTK% WorkDir=%WorkDir% ExpDir=%ExpDir% LibDir=%LibDir% ScpDir=%ScpDir% configFile=%THIS%globals.config+%THIS%select_%PROC%.config+%THIS%TIMIT_%%t.config
|
||||
if ERRORLEVEL 1 (
|
||||
echo CNTK FAILED:
|
||||
findstr /I EXCEPTION %ExpDir%\%%t\log\log_TIMIT_%%t.log
|
||||
) else (
|
||||
echo CNTK OUTPUT:
|
||||
findstr /I Finished %ExpDir%\%%t\log\log_TIMIT_%%t.log
|
||||
findstr /I EXCEPTION %ExpDir%\%%t\log\log_TIMIT_%%t.log
|
||||
echo REFERENCE:
|
||||
findstr /I Finished %THIS%..\%PROC%\%%t.output
|
||||
)
|
||||
)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
DeviceNumber=-1
|
|
@ -0,0 +1 @@
|
|||
DeviceNumber=0
|
|
@ -0,0 +1,195 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\add_layer.mel" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\ae.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\classify.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\create_1layer.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\default_macros.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\globals.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\lstm.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\mtl_fbank_mfcc.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\mtl_senones_dr.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\PAC-RNN.ndl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\README.txt" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_AdaptLearnRate.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_CrossValidateSimpleNetwork.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_EvalSimpleNetwork.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainAutoEncoder.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainLSTM.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainMultiInput.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainMultiTask.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainNDLNetwork.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainSimpleNetwork.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainWithPreTrain.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_WriteBottleneck.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_WriteScaledLogLike.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\LSTM_ExpectedOutputs.txt" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\TIMIT_DNN.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\TIMIT_LSTM.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.bigram.arpa" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.dnn_map" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.lookahead" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.tfsa" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.transitions" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\DNN_ExpectedOutputs.txt" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\LSTM_ExpectedOutputs.txt" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\TIMIT_DNN.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\TIMIT_LSTM.config" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\cf\CF.fbank24_zda" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\make_dr_mlf.pl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\make_mlf_cntk.pl" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_cistate.mlf" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_cistate.mlf.cntk" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_dr.mlf.cntk" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_cistate.mlf" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_cistate.mlf.cntk" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_dr.mlf.cntk" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.statelist" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_cistate.mlf" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_cistate.mlf.cntk" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_dr.mlf.cntk" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.bottleneck.fullpath" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.bottleneck.fullpath.20" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.fbank.fullpath" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.fbank.fullpath.20" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.scaledloglike.fullpath" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.scaledloglike.fullpath.20" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath.rnn" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath.rnn.20" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.100" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.rnn" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.rnn.100" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.mfcc.fullpath" />
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.mfcc.fullpath.100" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ExpectedResults.log" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\global.config" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlm.gpu.config" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlm.gpu.nce.config.txt" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlmconfig.txt" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\perplexity.nce100.lr0.1.txt" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.test.cntk.100.txt" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.train.cntk.100.txt" />
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.valid.cntk.100.txt" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\ExpectedOutputs.txt" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\fnnlm.config" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\global.config" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\rnnlm.config" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\ExpectedOutputs.txt" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\fnnlm.config" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\global.config" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\rnnlm.config" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\rnnlm.gpu.config" />
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\rnnlm.ndl" />
|
||||
<None Include="..\ExampleSetups\NDLExamples.ndl" />
|
||||
<None Include="..\ExampleSetups\SLU\conlleval.pl" />
|
||||
<None Include="..\ExampleSetups\SLU\feat.txt" />
|
||||
<None Include="..\ExampleSetups\SLU\globals.config" />
|
||||
<None Include="..\ExampleSetups\SLU\lbl.txt" />
|
||||
<None Include="..\ExampleSetups\SLU\lstmNDL.txt" />
|
||||
<None Include="..\ExampleSetups\SLU\README.txt" />
|
||||
<None Include="..\ExampleSetups\SLU\rnnlu.config" />
|
||||
<None Include="..\ExampleSetups\SLU\rnnluModelEditor.txt" />
|
||||
<None Include="..\ExampleSetups\SLU\score.sh" />
|
||||
<None Include="ASR\config\globals.config" />
|
||||
<None Include="ASR\config\README.txt" />
|
||||
<None Include="ASR\config\runall.bat" />
|
||||
<None Include="ASR\config\select_cpu.config" />
|
||||
<None Include="ASR\config\select_gpu.config" />
|
||||
<None Include="ASR\config\TIMIT_EvalSimpleNetwork.config" />
|
||||
<None Include="ASR\config\TIMIT_TrainAutoEncoder.config" />
|
||||
<None Include="ASR\config\TIMIT_TrainMultiInput.config" />
|
||||
<None Include="ASR\config\TIMIT_TrainMultiTask.config" />
|
||||
<None Include="ASR\config\TIMIT_TrainNDLNetwork.config" />
|
||||
<None Include="ASR\config\TIMIT_TrainSimpleNetwork.config" />
|
||||
<None Include="ASR\config\TIMIT_WriteBottleneck.config" />
|
||||
<None Include="ASR\config\TIMIT_WriteScaledLogLike.config" />
|
||||
<None Include="ASR\CPU\EvalSimpleNetwork.output" />
|
||||
<None Include="ASR\CPU\TrainAutoEncoder.output" />
|
||||
<None Include="ASR\CPU\TrainMultiInput.output" />
|
||||
<None Include="ASR\CPU\TrainMultiTask.output" />
|
||||
<None Include="ASR\CPU\TrainNDLNetwork.output" />
|
||||
<None Include="ASR\CPU\TrainSimpleNetwork.output" />
|
||||
<None Include="ASR\GPU\EvalSimpleNetwork.output" />
|
||||
<None Include="ASR\GPU\TrainAutoEncoder.output" />
|
||||
<None Include="ASR\GPU\TrainMultiInput.output" />
|
||||
<None Include="ASR\GPU\TrainMultiTask.output" />
|
||||
<None Include="ASR\GPU\TrainNDLNetwork.output" />
|
||||
<None Include="ASR\GPU\TrainSimpleNetwork.output" />
|
||||
<None Include="LM\RNNLM\CPU\ExpectedOutputs.txt" />
|
||||
<None Include="LM\RNNLM\CPU\global.config" />
|
||||
<None Include="LM\RNNLM\CPU\rnnlm.config" />
|
||||
<None Include="LM\RNNLM\GPU\ExpectedOutputs.txt" />
|
||||
<None Include="LM\RNNLM\GPU\rnnlm.config" />
|
||||
<None Include="MNIST\CPU\expectedresults.txt" />
|
||||
<None Include="MNIST\DefaultMacros.ndl" />
|
||||
<None Include="MNIST\Example.ndl" />
|
||||
<None Include="MNIST\GPU\expectedresults.txt" />
|
||||
<None Include="MNIST\mnistCheckIn.config" />
|
||||
<None Include="MNIST\mnistlabels.txt" />
|
||||
<None Include="runall.bat" />
|
||||
<None Include="SLU\atis.dev.IOB.simple" />
|
||||
<None Include="SLU\atis.test.apos.pred.pos.head.IOB.simple" />
|
||||
<None Include="SLU\atis.train.apos.pred.pos.head.IOB.simple" />
|
||||
<None Include="SLU\Expected.log" />
|
||||
<None Include="SLU\globals.config" />
|
||||
<None Include="SLU\input.txt" />
|
||||
<None Include="SLU\inputmap.txt" />
|
||||
<None Include="SLU\lstmNDL.txt" />
|
||||
<None Include="SLU\output.txt" />
|
||||
<None Include="SLU\README.txt" />
|
||||
<None Include="SLU\rnnlu.config" />
|
||||
<None Include="SLU\rnnlu.ndl.config" />
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{DBB3C106-B0B4-4059-8477-C89528CEC1B0}</ProjectGuid>
|
||||
<Keyword>MakeFileProj</Keyword>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Makefile</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v120</PlatformToolset>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Makefile</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v120</PlatformToolset>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings">
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<NMakePreprocessorDefinitions>WIN32;_DEBUG;$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>
|
||||
<NMakeBuildCommandLine>runall.bat $(Configuration)</NMakeBuildCommandLine>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<NMakePreprocessorDefinitions>WIN32;NDEBUG;$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>
|
||||
<NMakeBuildCommandLine>runall.bat $(Configuration)</NMakeBuildCommandLine>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup>
|
||||
</ItemDefinitionGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
|
@ -0,0 +1,523 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup>
|
||||
<Filter Include="CheckInSuites">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0000}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\ASR">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0001}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\ASR\config">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0002}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\ASR\CPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0003}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\ASR\GPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0004}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\LM">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0005}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\LM\RNNLM">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0006}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\LM\RNNLM\CPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0007}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\LM\RNNLM\GPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0008}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\MNIST">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0009}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\MNIST\CPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0010}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\MNIST\GPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0011}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="CheckInSuites\SLU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0012}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0013}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0014}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0015}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\config">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0016}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\CPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0017}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\decoding">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0018}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\GPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0019}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\lib">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0020}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\lib\cf">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0021}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\lib\mlf">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0022}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\ASR\TIMIT\lib\scp">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0023}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\LM">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0024}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\LM\LSTMLM">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0025}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\LM\RNNLM">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0026}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\LM\RNNLM\CPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0027}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\LM\RNNLM\GPU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0028}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="ExampleSetups\SLU">
|
||||
<UniqueIdentifier>{39d7aaf4-beea-43e1-8bc3-1c2a442f0029}</UniqueIdentifier>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\add_layer.mel">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\ae.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\classify.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\create_1layer.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\default_macros.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\globals.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\lstm.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\mtl_fbank_mfcc.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\mtl_senones_dr.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\PAC-RNN.ndl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\README.txt">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_AdaptLearnRate.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_CrossValidateSimpleNetwork.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_EvalSimpleNetwork.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainAutoEncoder.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainLSTM.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainMultiInput.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainMultiTask.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainNDLNetwork.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainSimpleNetwork.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainWithPreTrain.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_WriteBottleneck.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_WriteScaledLogLike.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\config</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\LSTM_ExpectedOutputs.txt">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\CPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\TIMIT_DNN.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\CPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\TIMIT_LSTM.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\CPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.bigram.arpa">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\decoding</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.dnn_map">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\decoding</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.lookahead">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\decoding</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.tfsa">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\decoding</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.transitions">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\decoding</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\DNN_ExpectedOutputs.txt">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\LSTM_ExpectedOutputs.txt">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\TIMIT_DNN.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\TIMIT_LSTM.config">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\cf\CF.fbank24_zda">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\cf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\make_dr_mlf.pl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\make_mlf_cntk.pl">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_cistate.mlf">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_cistate.mlf.cntk">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_dr.mlf.cntk">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_cistate.mlf">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_cistate.mlf.cntk">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_dr.mlf.cntk">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.statelist">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_cistate.mlf">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_cistate.mlf.cntk">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_dr.mlf.cntk">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.bottleneck.fullpath">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.bottleneck.fullpath.20">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.fbank.fullpath">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.fbank.fullpath.20">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.scaledloglike.fullpath">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.scaledloglike.fullpath.20">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath.rnn">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath.rnn.20">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.100">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.rnn">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.rnn.100">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.mfcc.fullpath">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.mfcc.fullpath.100">
|
||||
<Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ExpectedResults.log">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\global.config">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlm.gpu.config">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlm.gpu.nce.config.txt">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlmconfig.txt">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\perplexity.nce100.lr0.1.txt">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.test.cntk.100.txt">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.train.cntk.100.txt">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.valid.cntk.100.txt">
|
||||
<Filter>ExampleSetups\LM\LSTMLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\ExpectedOutputs.txt">
|
||||
<Filter>ExampleSetups\LM\RNNLM\CPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\fnnlm.config">
|
||||
<Filter>ExampleSetups\LM\RNNLM\CPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\global.config">
|
||||
<Filter>ExampleSetups\LM\RNNLM\CPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\rnnlm.config">
|
||||
<Filter>ExampleSetups\LM\RNNLM\CPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\ExpectedOutputs.txt">
|
||||
<Filter>ExampleSetups\LM\RNNLM\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\fnnlm.config">
|
||||
<Filter>ExampleSetups\LM\RNNLM\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\global.config">
|
||||
<Filter>ExampleSetups\LM\RNNLM\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\rnnlm.config">
|
||||
<Filter>ExampleSetups\LM\RNNLM\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\rnnlm.gpu.config">
|
||||
<Filter>ExampleSetups\LM\RNNLM\GPU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\rnnlm.ndl">
|
||||
<Filter>ExampleSetups\LM\RNNLM</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\NDLExamples.ndl">
|
||||
<Filter>ExampleSetups</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\conlleval.pl">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\feat.txt">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\globals.config">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\lbl.txt">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\lstmNDL.txt">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\README.txt">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\rnnlu.config">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\rnnluModelEditor.txt">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="..\ExampleSetups\SLU\score.sh">
|
||||
<Filter>ExampleSetups\SLU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\README.txt">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\runall.bat">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_EvalSimpleNetwork.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_TrainAutoEncoder.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_TrainMultiInput.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_TrainMultiTask.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_TrainNDLNetwork.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_TrainSimpleNetwork.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_WriteBottleneck.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\TIMIT_WriteScaledLogLike.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\CPU\EvalSimpleNetwork.output">
|
||||
<Filter>CheckInSuites\ASR\CPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\CPU\TrainAutoEncoder.output">
|
||||
<Filter>CheckInSuites\ASR\CPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\CPU\TrainMultiInput.output">
|
||||
<Filter>CheckInSuites\ASR\CPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\CPU\TrainMultiTask.output">
|
||||
<Filter>CheckInSuites\ASR\CPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\CPU\TrainNDLNetwork.output">
|
||||
<Filter>CheckInSuites\ASR\CPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\CPU\TrainSimpleNetwork.output">
|
||||
<Filter>CheckInSuites\ASR\CPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\GPU\EvalSimpleNetwork.output">
|
||||
<Filter>CheckInSuites\ASR\GPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\GPU\TrainAutoEncoder.output">
|
||||
<Filter>CheckInSuites\ASR\GPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\GPU\TrainMultiInput.output">
|
||||
<Filter>CheckInSuites\ASR\GPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\GPU\TrainMultiTask.output">
|
||||
<Filter>CheckInSuites\ASR\GPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\GPU\TrainNDLNetwork.output">
|
||||
<Filter>CheckInSuites\ASR\GPU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\GPU\TrainSimpleNetwork.output">
|
||||
<Filter>CheckInSuites\ASR\GPU</Filter>
|
||||
</None>
|
||||
<None Include="LM\RNNLM\CPU\ExpectedOutputs.txt">
|
||||
<Filter>CheckInSuites\LM\RNNLM\CPU</Filter>
|
||||
</None>
|
||||
<None Include="LM\RNNLM\CPU\global.config">
|
||||
<Filter>CheckInSuites\LM\RNNLM\CPU</Filter>
|
||||
</None>
|
||||
<None Include="LM\RNNLM\CPU\rnnlm.config">
|
||||
<Filter>CheckInSuites\LM\RNNLM\CPU</Filter>
|
||||
</None>
|
||||
<None Include="LM\RNNLM\GPU\ExpectedOutputs.txt">
|
||||
<Filter>CheckInSuites\LM\RNNLM\GPU</Filter>
|
||||
</None>
|
||||
<None Include="LM\RNNLM\GPU\rnnlm.config">
|
||||
<Filter>CheckInSuites\LM\RNNLM\GPU</Filter>
|
||||
</None>
|
||||
<None Include="MNIST\CPU\expectedresults.txt">
|
||||
<Filter>CheckInSuites\MNIST\CPU</Filter>
|
||||
</None>
|
||||
<None Include="MNIST\DefaultMacros.ndl">
|
||||
<Filter>CheckInSuites\MNIST</Filter>
|
||||
</None>
|
||||
<None Include="MNIST\Example.ndl">
|
||||
<Filter>CheckInSuites\MNIST</Filter>
|
||||
</None>
|
||||
<None Include="MNIST\GPU\expectedresults.txt">
|
||||
<Filter>CheckInSuites\MNIST\GPU</Filter>
|
||||
</None>
|
||||
<None Include="MNIST\mnistCheckIn.config">
|
||||
<Filter>CheckInSuites\MNIST</Filter>
|
||||
</None>
|
||||
<None Include="MNIST\mnistlabels.txt">
|
||||
<Filter>CheckInSuites\MNIST</Filter>
|
||||
</None>
|
||||
<None Include="SLU\atis.dev.IOB.simple">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\atis.test.apos.pred.pos.head.IOB.simple">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\atis.train.apos.pred.pos.head.IOB.simple">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\Expected.log">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\globals.config">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\input.txt">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\inputmap.txt">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\lstmNDL.txt">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\output.txt">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\README.txt">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\rnnlu.config">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="SLU\rnnlu.ndl.config">
|
||||
<Filter>CheckInSuites\SLU</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\globals.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\select_cpu.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="ASR\config\select_gpu.config">
|
||||
<Filter>CheckInSuites\ASR\config</Filter>
|
||||
</None>
|
||||
<None Include="runall.bat">
|
||||
<Filter>CheckInSuites</Filter>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -0,0 +1,22 @@
|
|||
::: this runs all tests in this folder
|
||||
::: BUGBUG: so far only the ASR tests have updated pathnames etc., the others are non-functional stubs here that need to be updated
|
||||
::: TODO: find a good solution for specifying directories for data that we cannot distribute with CNTK ourselves.
|
||||
|
||||
set BUILD=%1
|
||||
|
||||
set THIS=%~dp0
|
||||
|
||||
::: ASR tests
|
||||
::: BUGBUG: We do not get to see stdout from CNTK, only from the BAT files.
|
||||
( %THIS%\ASR\config\runall.bat cpu %BUILD% ) 2>&1
|
||||
( %THIS%\ASR\config\runall.bat gpu %BUILD% ) 2>&1
|
||||
|
||||
::: LM tests
|
||||
::: TODO: provide BAT file
|
||||
|
||||
::: MNIST
|
||||
::: TODO: provide BAT file
|
||||
|
||||
::: SLU
|
||||
::: TODO: update paths
|
||||
C:\dev\cntk3\CNTKSolution\x64\Release\cntk configFile=globals.config+rnnlu.config
|
|
@ -6,9 +6,12 @@
|
|||
|
||||
// This file requires the NVML library. Unfortunately, this library does not install an environment variable for locating it.
|
||||
// On Windows, the SDK gets installed to "c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml" (/include, /lib).
|
||||
// On Linux, you need to install the deployment kit from https://developer.nvidia.com/gpu-deployment-kit and
|
||||
// set NVML_INCLUDE = /the path you installed deployment kit/usr/include/nvidia/gdk
|
||||
|
||||
// From the SDK documentation:
|
||||
// "The NVML library can be found at: %ProgramW6432%\"NVIDIA Corporation"\NVSMI\ on Windows, but will not be added to the path. To dynamically link to NVML, add this path to the PATH environmental variable. To dynamically load NVML, call LoadLibrary with this path."
|
||||
// "On Linux the NVML library will be found on the standard library path. For 64-bit Linux, both the 32-bit and 64-bit NVML libraries will be installed."
|
||||
// "On Linux the NVML library will be found on the standard library path. For 64-bit Linux, both the 32-bit and 64-bit NVML libraries will be installed.
|
||||
|
||||
#define _CRT_SECURE_NO_WARNINGS // "secure" CRT not available on all platforms --add this at the top of all CPP files that give "function or variable may be unsafe" warnings
|
||||
#include "Platform.h"
|
||||
|
@ -17,8 +20,8 @@
|
|||
#ifndef CPUONLY
|
||||
#pragma comment (lib, "cudart.lib")
|
||||
#include <cuda_runtime.h>
|
||||
#include <nvml.h> // note: expected at "c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\include" (Windows)
|
||||
#pragma comment (lib, "nvml.lib") // note: expected at "c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\lib" (Windows)
|
||||
#include <nvml.h> // note: expected at "c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\include" (Windows) and /the path you installed deployment kit/usr/include/nvidia/gdk (Linux)
|
||||
#pragma comment (lib, "nvml.lib") // note: expected at "c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\lib" (Windows) and /the path you installed deployment kit/usr/include/nvidia/gdk (Linux)
|
||||
#include <vector>
|
||||
#endif
|
||||
#include "CommonMatrix.h" // for CPUDEVICE and AUTOPLACEMATRIX
|
||||
|
|
|
@ -41,11 +41,14 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
std::string::size_type ParseKeyValue(const std::string& token, std::string::size_type pos, ConfigParameters& dict);
|
||||
|
||||
// ConfigValue - value of one configuration parameter
|
||||
// Parses from string to resultant value on assignment
|
||||
// Parses from string to resultant value on assignment. Basically a ConfigValue is a std::string with type casts to convert it to numeric types, boolean, etc.,
|
||||
// by simply type-casting it or assigning it to a value of the desired type.
|
||||
// ConfigParameters::ConfigDictionary is a collection of names ConfigValues, which know which collection they belong to (their 'parent').
|
||||
// Often, they get constructed on the fly and passed around by value, e.g. in modified form or when falling back to a default value, without being added to the collection.
|
||||
class ConfigValue : public std::string
|
||||
{
|
||||
std::string m_configName; // name of this configuration, e.g. for error messages, optional
|
||||
const ConfigParameters* m_parent; // keep track of parent pointer
|
||||
std::string m_configName; // name of this configuration, e.g. for error messages, optional
|
||||
const ConfigParameters* m_parent; // we belong to this collection of ConfigValues
|
||||
public:
|
||||
std::string Name() const
|
||||
{return m_configName;}
|
||||
|
@ -232,19 +235,30 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
ConfigParser& operator=(const ConfigParser& configParser) = default;
|
||||
|
||||
public:
|
||||
// FindBraces - find matching braces in a string starting at the current position
|
||||
// FindBraces - parser helper function to find matching braces in a string starting at the current position (any leading whitespace must have been consumed).
|
||||
// Basically this tokenizes an entire bracketed section into a single token.
|
||||
// This function assumes that all the underlying languages involved have matching braces.
|
||||
// Braces matched by this function are:
|
||||
// - [ ... ]
|
||||
// - { ... }
|
||||
// - ( ... )
|
||||
// - " ... " (yes)
|
||||
// str - string to search
|
||||
// tokenStart - start location in the string to search
|
||||
// returns: position of last brace, -1 if no braces at current position
|
||||
// returns: character position of matching closing brace, string::npos if no brace present at start position
|
||||
// BUGBUG: This seems to only work for one kind of braces at a time. Nested other braces are not understood. Also, braces in strings are not protected. [fseide]
|
||||
static std::string::size_type FindBraces(const std::string& str, std::string::size_type tokenStart)
|
||||
{
|
||||
static const std::string openBraces = OPENBRACES; // open braces and quote
|
||||
static const std::string openBraces = OPENBRACES; // open braces and quote
|
||||
static const std::string closingBraces = CLOSINGBRACES; // close braces and quote
|
||||
const auto len = str.length();
|
||||
if (tokenStart >= len) // start is outside (or rather, at end of string): no brace here
|
||||
return npos;
|
||||
auto braceFound = openBraces.find(str[tokenStart]);
|
||||
auto len = str.length();
|
||||
if (braceFound == npos || tokenStart >= len)
|
||||
if (braceFound == npos) // no brace present at tokenStart
|
||||
return npos;
|
||||
|
||||
// string begins with a brace--find the closing brace, while correctly handling nested braces
|
||||
std::vector<std::string::size_type> bracesFound;
|
||||
std::string::size_type current, opening;
|
||||
current = opening = tokenStart; //str.find_first_of(openBraces, tokenStart);
|
||||
|
@ -287,7 +301,19 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
return current;
|
||||
}
|
||||
|
||||
// Parse - Parse the string; segment string by top-level a=b expressions and call (virtual) ParseValue() on them.
|
||||
// ParseValue - virtual function to parse a "token" as tokenized by Parse() below.
|
||||
// Parse() calls into ParseValue() which is a virtual function that implements how an assignment token is to be processed.
|
||||
virtual std::string::size_type ParseValue(const std::string& stringParse, std::string::size_type tokenStart, std::string::size_type tokenEnd) = 0;
|
||||
|
||||
// Parse - Break a string into "records" and pass each to a user-specified function, where
|
||||
// - record separator is newline and an optional record separator character (such as semicolon)
|
||||
// - leading and trailing white space is trimmed from records
|
||||
// - nested blocks (braces, string literals) are honored: record separators inside braces or quotes are ignored
|
||||
// In the simplest case, "records" are lines of text, e.g. the lines of a configuration file. Any further parsing of these lines, e.g. of the form a=b, is up to the user-specified ParseValue()).
|
||||
// The above is subject to some special behaviors:
|
||||
// - records that are complete brace expressions themselves are flattened, e.g. a ; [ x ; [ u ; v ] ] ; b emits the tokens "a", "x", "[ u ; v ]", and "b"
|
||||
// This is meant for the case where the entire string is a brace expression (TODO: is that true? [fseide]).
|
||||
// - the separator character can be changed inside a brace expression by appending the different separator right after the brace, e.g. [- a - b] will separate using '-' instead of ';'. TODO: document what this is used for.
|
||||
// This function is used at lots of places for various purposes.
|
||||
// - (ConfigParameters from file) config-file parsing passes in expressions of the type a1=b1 \n a2=b2 \n ..., creates a ConfigDictionary entry for each top-level a=b expression, where b can be a block in braces
|
||||
// - (ConfigParameters) right-hand side that is an array of parameters [ a1=b1; a2=b2 ...], with surrounding braces
|
||||
|
@ -298,84 +324,88 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
// - more to be added
|
||||
// stringParse - string to parse
|
||||
// pos - postion to start parsing at
|
||||
void Parse(const std::string& stringParse, std::string::size_type pos=0)
|
||||
// m_separator - extra separator character between tokens, typically ';' (in addition to comma and newline)
|
||||
void Parse(const std::string& stringParse, std::string::size_type pos = 0)
|
||||
{
|
||||
// list of possible custom separators
|
||||
const std::string customSeperators = "`~!@$%^&*_-+|:;,?.";
|
||||
std::string seps = ",\r\n"; // default separators
|
||||
// add braces and current separator to the separators list so we skip them
|
||||
seps += m_separator;
|
||||
std::string sepsBraces(seps);
|
||||
sepsBraces += OPENBRACES;
|
||||
// set of record separator characters
|
||||
std::string seps = ",\r\n"; // default separators
|
||||
seps += m_separator; // and one extra caller-specified one (typically ';'). Note that this gets temporarily changed inside content level, see below.
|
||||
// set that includes both record separators and all open-brace characters
|
||||
std::string sepsBraces = seps + OPENBRACES; // OPENBRACES includes anything that requires a closing, including "
|
||||
|
||||
// Establish string and get the first token:
|
||||
auto tokenEnd = pos;
|
||||
auto totalLength = stringParse.length();
|
||||
// set up for token loop
|
||||
auto tokenEnd = pos; // current token's end
|
||||
const auto totalLength = stringParse.length();
|
||||
auto braceEnd = totalLength;
|
||||
bool contentLevel = false; // content level (not surrounding braces)
|
||||
bool contentLevel = false; // are we inside content? (=an outer level of braces)
|
||||
|
||||
do
|
||||
do // loop over tokens
|
||||
{
|
||||
// consume separators (newline, comma, semicolon)
|
||||
auto tokenStart = stringParse.find_first_not_of(seps, tokenEnd);
|
||||
if (tokenStart==npos) // no more tokens
|
||||
break;
|
||||
// skip any leading spaces
|
||||
// consume any leading spaces
|
||||
tokenStart = stringParse.find_first_not_of(" \t", tokenStart);
|
||||
if (tokenStart == npos)
|
||||
break;
|
||||
|
||||
auto braceEndFound = FindBraces(stringParse, tokenStart);
|
||||
// lex one token--this determines 'tokenEnd' (we already got 'tokenStart')
|
||||
|
||||
// First check whether we are in a braced condition (including ").
|
||||
const auto braceEndFound = FindBraces(stringParse, tokenStart);
|
||||
bool quoteFound = false;
|
||||
|
||||
if (braceEndFound != npos && tokenStart+1 < totalLength)
|
||||
if (braceEndFound != npos) // opening braces found
|
||||
{
|
||||
if (!contentLevel)
|
||||
// consume one level of braces right here, enter "content level" mode
|
||||
if (!contentLevel && tokenStart + 1 < totalLength/*[fseide] why is this test necessary?*/)
|
||||
{
|
||||
tokenStart++; // skip the brace
|
||||
tokenStart++; // consume the opening brace
|
||||
// check for custom separator character
|
||||
// If the opening brace is immediately followed by any of the customSeparators, change m_separator (inside seps) to that character.
|
||||
const static std::string customSeperators = "`~!@$%^&*_-+|:;,?."; // TODO: document what this is for, where it is used [fseide]
|
||||
if (customSeperators.find(stringParse[tokenStart]) != npos)
|
||||
{
|
||||
char separator = stringParse[tokenStart];
|
||||
seps[seps.length()-1] = separator;
|
||||
seps[seps.length()-1] = separator; // this was m_separator; on content level, we change it to a custom separator (it gets changed back when we exit content level)
|
||||
sepsBraces = seps + OPENBRACES;
|
||||
tokenStart++; // skip the separator
|
||||
tokenStart++; // consume the separator
|
||||
}
|
||||
braceEnd = braceEndFound;
|
||||
tokenEnd = tokenStart;
|
||||
contentLevel = true; // now at content level
|
||||
continue;
|
||||
continue; // this sort of "recursively" calls ourselves with contentLevel flag set. [fseide] does this make sense for strings??
|
||||
}
|
||||
}
|
||||
|
||||
// content level braces, just find the end of the braces
|
||||
if (braceEndFound != npos)
|
||||
{
|
||||
if (stringParse[braceEndFound] == '"')
|
||||
// content level braces: just find the end of the braces, and that's our token
|
||||
if (stringParse[braceEndFound] == '"') // special case for strings
|
||||
{ // for quoted string we skip the quotes
|
||||
tokenStart++;
|
||||
tokenEnd = braceEndFound;
|
||||
quoteFound = true;
|
||||
tokenEnd = braceEndFound; // position of closing "
|
||||
quoteFound = true; // tells code below to consume the closing "
|
||||
}
|
||||
else
|
||||
else // a regular brace: the entire brace expression becomes the token, including the braces themselves
|
||||
{
|
||||
tokenEnd = braceEndFound+1; // tokenEnd is one past the character we want
|
||||
}
|
||||
}
|
||||
else
|
||||
else // not braces
|
||||
{
|
||||
// find the end of the token
|
||||
tokenEnd = stringParse.find_first_of(sepsBraces, tokenStart);
|
||||
|
||||
// now look for contained braces before the next break
|
||||
if (tokenEnd != npos)
|
||||
braceEndFound = FindBraces(stringParse, tokenEnd);
|
||||
// found an embedded brace, go to matching end brace to end token
|
||||
if (braceEndFound != npos)
|
||||
{
|
||||
tokenEnd = braceEndFound+1; // token includes the closing brace
|
||||
const auto braceEndFound = FindBraces(stringParse, tokenEnd);
|
||||
// found an embedded brace, extend token to the end of the braces
|
||||
if (braceEndFound != npos)
|
||||
{
|
||||
tokenEnd = braceEndFound+1; // token includes the closing brace
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (tokenEnd==npos || tokenEnd > braceEnd) // no more seperators
|
||||
{ // use the length of the string as the boundary
|
||||
tokenEnd = braceEnd;
|
||||
|
@ -383,20 +413,23 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
break;
|
||||
}
|
||||
}
|
||||
// token has been determined to range from tokenStart to tokenEnd
|
||||
|
||||
// now parse the value
|
||||
// now parse the value in a caller-specific fashion (through a virtual call into our host class)
|
||||
if (tokenEnd > tokenStart)
|
||||
{
|
||||
tokenEnd = ParseValue(stringParse, tokenStart, tokenEnd);
|
||||
}
|
||||
|
||||
// prepare to advance to next token
|
||||
// if we hit the end of a brace block, move past the ending brace and reset
|
||||
if (tokenEnd == braceEnd)
|
||||
{
|
||||
tokenEnd++;
|
||||
tokenEnd++; // consume closing brace
|
||||
braceEnd = totalLength;
|
||||
seps[seps.length()-1] = m_separator; // restore default separator
|
||||
sepsBraces = seps + OPENBRACES;
|
||||
contentLevel = false;
|
||||
contentLevel = false; // pop out of content level
|
||||
}
|
||||
if (quoteFound)
|
||||
{ // skip the closing quote
|
||||
|
@ -414,7 +447,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
// If there is no comment, simply return the original 'configString'
|
||||
// If there is a comment, remove the part of 'configString' corresponding to the comment
|
||||
// Note that midline comments need to be preceded by whitespace, otherwise they are not treated as comments.
|
||||
std::string StripComments(const std::string &configLine) const
|
||||
static std::string StripComments(const std::string &configLine)
|
||||
{
|
||||
std::string::size_type pos = configLine.find_first_not_of(" \t");
|
||||
|
||||
|
@ -435,7 +468,6 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
return (chPrev == ' ' || chPrev == '\t') ? configLine.substr(pos, midLineCommentPos - pos) : configLine;
|
||||
}
|
||||
|
||||
virtual std::string::size_type ParseValue(const std::string& stringParse, std::string::size_type tokenStart, std::string::size_type tokenEnd) = 0;
|
||||
std::string ReadConfigFile(const std::string &filePath);
|
||||
std::string ReadConfigFile(const std::wstring &filePath);
|
||||
std::string ReadConfigFiles(const std::string &filePaths);
|
||||
|
|
|
@ -89,7 +89,7 @@ public:
|
|||
retries++;
|
||||
}
|
||||
}
|
||||
fprintf (stderr, "randomordering: %zu retries for %zu elements (%.1f%%) to ensure window condition\n", retries, map.size(), 100.0 * retries / map.size());
|
||||
fprintf(stderr, "randomordering: %lu retries for %lu elements (%.1f%%) to ensure window condition\n", (unsigned long)retries, (unsigned long)map.size(), 100.0 * retries / map.size());
|
||||
// ensure the window condition
|
||||
foreach_index (t, map) assert ((size_t) t <= map[t] + randomizationrange/2 && map[t] < (size_t) t + randomizationrange/2);
|
||||
#if 0 // and a live check since I don't trust myself here yet
|
||||
|
|
|
@ -442,7 +442,7 @@ public:
|
|||
long TickDelta = TickStop - TickStart;
|
||||
|
||||
if (m_traceLevel > 2)
|
||||
fprintf(stderr, "\n%zu ms, %d numbers parsed\n\n", TickDelta, m_totalNumbersConverted );
|
||||
fprintf(stderr, "\n%l ms, %d numbers parsed\n\n", TickDelta, m_totalNumbersConverted );
|
||||
return lineCount;
|
||||
}
|
||||
|
||||
|
@ -602,7 +602,7 @@ public:
|
|||
long TickDelta = TickStop - TickStart;
|
||||
|
||||
if (m_traceLevel > 2)
|
||||
fprintf(stderr, "\n%zu ms, %d numbers parsed\n\n", TickDelta, m_totalNumbersConverted );
|
||||
fprintf(stderr, "\n%l ms, %d numbers parsed\n\n", TickDelta, m_totalNumbersConverted );
|
||||
return lineCount;
|
||||
}
|
||||
|
||||
|
|
|
@ -1386,6 +1386,7 @@ void BatchSequenceReader<ElemType>::Init(const ConfigParameters& readerConfig)
|
|||
ConfigParameters featureConfig = readerConfig(m_featuresName,"");
|
||||
ConfigParameters labelConfig[2] = {readerConfig(m_labelsName[0],""),readerConfig(m_labelsName[1],"")};
|
||||
string mode = featureConfig("mode","class");//class, softmax, nce
|
||||
std::transform(mode.begin(), mode.end(), mode.begin(), ::tolower);
|
||||
|
||||
if (mode == "nce")
|
||||
{
|
||||
|
@ -1397,6 +1398,8 @@ void BatchSequenceReader<ElemType>::Init(const ConfigParameters& readerConfig)
|
|||
readerMode = ReaderMode::Softmax;
|
||||
else if (mode == "class")
|
||||
readerMode = ReaderMode::Class;
|
||||
else
|
||||
LogicError("unsupported format %s", mode.c_str());
|
||||
|
||||
/// read unk sybol
|
||||
mUnk = readerConfig("unk", "<unk>");
|
||||
|
|
|
@ -40,7 +40,8 @@ enum ReaderMode
|
|||
Softmax = 0, // no labels to worry about
|
||||
Class = 1, // category labels, creates mapping tables
|
||||
NCE = 2, // sentence mapping (predicts next word)
|
||||
None = 3, // some other type of label
|
||||
Unnormalize = 3,
|
||||
None = 4, // some other type of label
|
||||
};
|
||||
|
||||
template <typename Count>
|
||||
|
|
|
@ -548,7 +548,7 @@ void UCIFastReader<ElemType>::SetupEpoch()
|
|||
if (m_totalSamples == 0)
|
||||
{
|
||||
if (m_traceLevel > 0)
|
||||
fprintf(stderr, "starting at epoch %zu counting lines to determine record count\n", m_epoch);
|
||||
fprintf(stderr, "starting at epoch %lu counting lines to determine record count\n", (unsigned long)m_epoch);
|
||||
m_parser.SetParseMode(ParseLineCount);
|
||||
m_totalSamples = m_parser.Parse(size_t(-1), NULL, NULL);
|
||||
m_parser.SetParseMode(ParseNormal);
|
||||
|
@ -556,14 +556,14 @@ void UCIFastReader<ElemType>::SetupEpoch()
|
|||
m_mbStartSample = 0;
|
||||
UpdateDataVariables(0); // update all the variables since we read to the end...
|
||||
if (m_traceLevel > 0)
|
||||
fprintf(stderr, "\n %zu records found\n", m_totalSamples);
|
||||
fprintf(stderr, "\n %lu records found\n", (unsigned long)m_totalSamples);
|
||||
}
|
||||
|
||||
// make sure we are in the correct location for mid-dataset epochs
|
||||
size_t mbStartSample = m_epoch * m_epochSize;
|
||||
|
||||
size_t fileRecord = m_totalSamples?mbStartSample % m_totalSamples:0;
|
||||
fprintf(stderr, "starting epoch %zu at record count %zu, and file position %zu\n", m_epoch, mbStartSample, fileRecord);
|
||||
fprintf(stderr, "starting epoch %lu at record count %lu, and file position %lu\n", (unsigned long)m_epoch, (unsigned long)mbStartSample, (unsigned long)fileRecord);
|
||||
size_t currentFileRecord = m_mbStartSample % m_totalSamples;
|
||||
|
||||
// reset the next read sample
|
||||
|
@ -604,7 +604,7 @@ void UCIFastReader<ElemType>::SetupEpoch()
|
|||
m_parser.SetFilePosition(0);
|
||||
currentFileRecord = 0;
|
||||
}
|
||||
fprintf(stderr, "reading from record %zu to %zu to be positioned properly for epoch\n", currentFileRecord, fileRecord);
|
||||
fprintf(stderr, "reading from record %lu to %lu to be positioned properly for epoch\n", (unsigned long)currentFileRecord, (unsigned long)fileRecord);
|
||||
m_parser.SetParseMode(ParseLineCount);
|
||||
m_parser.Parse(fileRecord-currentFileRecord, NULL, NULL);
|
||||
m_parser.SetParseMode(ParseNormal);
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -82,7 +82,7 @@
|
|||
\papercolumns 1
|
||||
\papersides 1
|
||||
\paperpagestyle default
|
||||
\listings_params "basicstyle={\small},breaklines=true,frame=tb"
|
||||
\listings_params "basicstyle={\ttfamily \small},columns=fullflexible,breaklines=true,frame=tb"
|
||||
\tracking_changes false
|
||||
\output_changes false
|
||||
\html_math_output 0
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# configuration file for class based RNN training
|
||||
|
||||
# ppl=133.35
|
||||
ExpFolder=$ExpDir$
|
||||
ConfigFolder=$ConfigDir$
|
||||
DataFolder=$DataDir$
|
||||
|
@ -11,16 +11,27 @@ stderr=$ExpFolder$
|
|||
#command=test
|
||||
command=writeWordAndClassInfo:train:test
|
||||
#command=writeWordAndClassInfo
|
||||
|
||||
type=double
|
||||
|
||||
DEVICEID=-1
|
||||
numCPUThreads=4
|
||||
|
||||
VOCABSIZE=10000
|
||||
CLASSSIZE=50
|
||||
|
||||
TRAINFILE=ptb.train.cntk.txt
|
||||
VALIDFILE=ptb.valid.cntk.txt
|
||||
TESTFILE=ptb.test.cntk.txt
|
||||
|
||||
writeWordAndClassInfo=[
|
||||
action=writeWordAndClass
|
||||
inputFile=$DataFolder$\vocab.txt
|
||||
inputFile=$DataFolder$\$TRAINFILE$
|
||||
outputVocabFile=$DataFolder$\vocab.txt
|
||||
outputWord2Cls=$ExpFolder$\word2cls.txt
|
||||
outputCls2Index=$ExpFolder$\cls2idx.txt
|
||||
vocabSize=10000
|
||||
nbrClass=50
|
||||
vocabSize=$VOCABSIZE$
|
||||
nbrClass=$CLASSSIZE$
|
||||
cutoff=0
|
||||
printValues=true
|
||||
]
|
||||
|
||||
|
@ -34,10 +45,10 @@ dumpNodeInfo=[
|
|||
devtest=[action=devtest]
|
||||
|
||||
train=[
|
||||
action=trainRNN
|
||||
action=train
|
||||
minibatchSize=10
|
||||
traceLevel=1
|
||||
deviceId=Auto
|
||||
deviceId=$DEVICEID$
|
||||
epochSize=4430000
|
||||
# which is 886 * 5000
|
||||
recurrentLayer=1
|
||||
|
@ -55,23 +66,23 @@ train=[
|
|||
evalCriterion=classcrossentropywithsoftmax
|
||||
nodeType=Sigmoid
|
||||
initValueScale=6.0
|
||||
layerSizes=10000:200:10000
|
||||
layerSizes=$VOCABSIZE$:200:$VOCABSIZE$
|
||||
addPrior=false
|
||||
addDropoutNodes=false
|
||||
applyMeanVarNorm=false
|
||||
uniformInit=true;
|
||||
|
||||
# these are for the class information for class-based language modeling
|
||||
vocabSize=10000
|
||||
nbrClass=50
|
||||
vocabSize=$VOCABSIZE$
|
||||
nbrClass=$CLASSSIZE$
|
||||
]
|
||||
|
||||
# configuration file, base parameters
|
||||
SGD=[
|
||||
learningRatesPerSample=0.01
|
||||
learningRatesPerSample=0.1
|
||||
momentumPerMB=0
|
||||
gradientClippingWithTruncation=true
|
||||
clippingThresholdPerSample=10.0
|
||||
clippingThresholdPerSample=15.0
|
||||
maxEpochs=40
|
||||
unroll=false
|
||||
numMBsToShowResult=2000
|
||||
|
@ -86,7 +97,7 @@ train=[
|
|||
# auto learning rate adjustment
|
||||
autoAdjustLR=adjustafterepoch
|
||||
reduceLearnRateIfImproveLessThan=0.001
|
||||
continueReduce=true
|
||||
continueReduce=false
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
|
@ -120,9 +131,9 @@ train=[
|
|||
# files cannot be expanded, so this should be large enough. If known modify this element in config before creating file
|
||||
wrecords=1000
|
||||
#windowSize - number of records we should include in BinaryWriter window
|
||||
windowSize=10000
|
||||
windowSize=$VOCABSIZE$
|
||||
|
||||
file=$DataFolder$\ptb.train.cntk.txt
|
||||
file=$DataFolder$\$TRAINFILE$
|
||||
|
||||
#additional features sections
|
||||
#for now store as expanded category data (including label in)
|
||||
|
@ -143,7 +154,7 @@ train=[
|
|||
labelIn=[
|
||||
dim=1
|
||||
# vocabulary size
|
||||
labelDim=10000
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.txt
|
||||
labelType=Category
|
||||
beginSequence="</s>"
|
||||
|
@ -174,7 +185,7 @@ train=[
|
|||
endSequence="O"
|
||||
|
||||
# vocabulary size
|
||||
labelDim=10000
|
||||
labelDim=$VOCABSIZE$
|
||||
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
#### Write definition ####
|
||||
|
@ -218,9 +229,9 @@ train=[
|
|||
# files cannot be expanded, so this should be large enough. If known modify this element in config before creating file
|
||||
wrecords=1000
|
||||
#windowSize - number of records we should include in BinaryWriter window
|
||||
windowSize=10000
|
||||
windowSize=$VOCABSIZE$
|
||||
|
||||
file=$DataFolder$\ptb.valid.cntk.txt
|
||||
file=$DataFolder$\$VALIDFILE$
|
||||
|
||||
#additional features sections
|
||||
#for now store as expanded category data (including label in)
|
||||
|
@ -243,7 +254,7 @@ train=[
|
|||
dim=1
|
||||
|
||||
# vocabulary size
|
||||
labelDim=10000
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
labelType=Category
|
||||
beginSequence="</s>"
|
||||
|
@ -273,7 +284,7 @@ train=[
|
|||
beginSequence="O"
|
||||
endSequence="O"
|
||||
|
||||
labelDim=10000
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
#### Write definition ####
|
||||
# sizeof(unsigned) which is the label index type
|
||||
|
@ -303,7 +314,7 @@ test=[
|
|||
minibatchSize=1
|
||||
# need to be small since models are updated for each minibatch
|
||||
traceLevel=1
|
||||
deviceId=Auto
|
||||
deviceId=$DEVICEID$
|
||||
epochSize=4430000
|
||||
# which is 886 * 5000
|
||||
recurrentLayer=1
|
||||
|
@ -335,9 +346,9 @@ test=[
|
|||
# files cannot be expanded, so this should be large enough. If known modify this element in config before creating file
|
||||
wrecords=1000
|
||||
# windowSize - number of records we should include in BinaryWriter window
|
||||
windowSize=10000
|
||||
windowSize=$VOCABSIZE$
|
||||
|
||||
file=$DataFolder$\ptb.test.cntk.txt
|
||||
file=$DataFolder$\$TESTFILE$
|
||||
|
||||
#additional features sections
|
||||
#for now store as expanded category data (including label in)
|
||||
|
@ -359,7 +370,7 @@ test=[
|
|||
dim=1
|
||||
|
||||
# vocabulary size
|
||||
labelDim=10000
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.txt
|
||||
labelType=Category
|
||||
beginSequence="</s>"
|
||||
|
@ -390,7 +401,7 @@ test=[
|
|||
endSequence="O"
|
||||
|
||||
# vocabulary size
|
||||
labelDim=10000
|
||||
labelDim=$VOCABSIZE$
|
||||
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
#### Write definition ####
|
||||
|
@ -411,4 +422,4 @@ test=[
|
|||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
|
|
@ -0,0 +1,432 @@
|
|||
# configuration file for class based RNN training
|
||||
# final test PPL=122.54
|
||||
ExpFolder=$ExpDir$
|
||||
ConfigFolder=$ConfigDir$
|
||||
DataFolder=$DataDir$
|
||||
|
||||
stderr=$ExpFolder$
|
||||
numCPUThreads=4
|
||||
# command=dumpNodeInfo
|
||||
#command=train
|
||||
#command=test
|
||||
command=writeWordAndClassInfo:train:test
|
||||
command=train:test
|
||||
type=double
|
||||
|
||||
DEVICEID=-1
|
||||
|
||||
NOISE=100
|
||||
RATE=0.1
|
||||
VOCABSIZE=10000
|
||||
CLASSSIZE=50
|
||||
makeMode=true
|
||||
TRAINFILE=ptb.train.cntk.txt
|
||||
VALIDFILE=ptb.valid.cntk.txt
|
||||
TESTFILE=ptb.test.cntk.txt
|
||||
|
||||
#number of threads
|
||||
nthreads=4
|
||||
|
||||
writeWordAndClassInfo=[
|
||||
action=writeWordAndClass
|
||||
inputFile=$DataFolder$\$TRAINFILE$
|
||||
outputVocabFile=$DataFolder$\vocab.txt
|
||||
outputWord2Cls=$ExpFolder$\word2cls.txt
|
||||
outputCls2Index=$ExpFolder$\cls2idx.txt
|
||||
vocabSize=$VOCABSIZE$
|
||||
cutoff=0
|
||||
printValues=true
|
||||
]
|
||||
|
||||
dumpNodeInfo=[
|
||||
action=dumpnode
|
||||
modelPath=$ExpFolder$\modelRnnCNTK
|
||||
#nodeName=W0
|
||||
printValues=true
|
||||
]
|
||||
|
||||
devtest=[action=devtest]
|
||||
|
||||
train=[
|
||||
action=train
|
||||
minibatchSize=10
|
||||
traceLevel=1
|
||||
deviceId=$DEVICEID$
|
||||
epochSize=4430000
|
||||
# which is 886 * 5000
|
||||
recurrentLayer=1
|
||||
defaultHiddenActivity=0.1
|
||||
useValidation=true
|
||||
rnnType=NCELSTM
|
||||
#CLASSLSTM
|
||||
|
||||
# uncomment below and comment SimpleNetworkBuilder section to use NDL to train RNN LM
|
||||
# NDLNetworkBuilder=[
|
||||
# networkDescription=$ConfigFolder$\rnnlm.ndl
|
||||
# ]
|
||||
|
||||
SimpleNetworkBuilder=[
|
||||
trainingCriterion=NoiseContrastiveEstimationNode
|
||||
evalCriterion=NoiseContrastiveEstimationNode
|
||||
nodeType=Sigmoid
|
||||
initValueScale=6.0
|
||||
layerSizes=$VOCABSIZE$:200:$VOCABSIZE$
|
||||
addPrior=false
|
||||
addDropoutNodes=false
|
||||
applyMeanVarNorm=false
|
||||
uniformInit=true;
|
||||
|
||||
# these are for the class information for class-based language modeling
|
||||
vocabSize=$VOCABSIZE$
|
||||
#nbrClass=$CLASSSIZE$
|
||||
noise_number=$NOISE$
|
||||
]
|
||||
|
||||
# configuration file, base parameters
|
||||
SGD=[
|
||||
makeMode=true
|
||||
learningRatesPerSample=$RATE$
|
||||
momentumPerMB=0
|
||||
gradientClippingWithTruncation=true
|
||||
clippingThresholdPerSample=15.0
|
||||
maxEpochs=40
|
||||
unroll=false
|
||||
numMBsToShowResult=2000
|
||||
# gradUpdateType=AdaGrad
|
||||
gradUpdateType=None
|
||||
|
||||
modelPath=$ExpFolder$\modelRnnCNTK
|
||||
loadBestModel=true
|
||||
|
||||
# settings for Auto Adjust Learning Rate
|
||||
AutoAdjust=[
|
||||
# auto learning rate adjustment
|
||||
autoAdjustLR=adjustafterepoch
|
||||
reduceLearnRateIfImproveLessThan=0.001
|
||||
continueReduce=false
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
numMiniBatch4LRSearch=100
|
||||
numPrevLearnRates=5
|
||||
numBestSearchEpoch=1
|
||||
]
|
||||
|
||||
dropoutRate=0.0
|
||||
]
|
||||
|
||||
reader=[
|
||||
readerType=LMSequenceReader
|
||||
randomize=None
|
||||
nbruttsineachrecurrentiter=10
|
||||
|
||||
# word class info
|
||||
wordclass=$DataFolder$\vocab.txt
|
||||
noise_number=$NOISE$
|
||||
mode=nce
|
||||
# if writerType is set, we will cache to a binary file
|
||||
# if the binary file exists, we will use it instead of parsing this file
|
||||
# writerType=BinaryReader
|
||||
|
||||
#### write definition
|
||||
wfile=$ExpFolder$\sequenceSentence.bin
|
||||
#wsize - inital size of the file in MB
|
||||
# if calculated size would be bigger, that is used instead
|
||||
wsize=256
|
||||
|
||||
#wrecords - number of records we should allocate space for in the file
|
||||
# files cannot be expanded, so this should be large enough. If known modify this element in config before creating file
|
||||
wrecords=1000
|
||||
#windowSize - number of records we should include in BinaryWriter window
|
||||
windowSize=$VOCABSIZE$
|
||||
|
||||
file=$DataFolder$\$TRAINFILE$
|
||||
|
||||
#additional features sections
|
||||
#for now store as expanded category data (including label in)
|
||||
features=[
|
||||
# sentence has no features, so need to set dimension to zero
|
||||
dim=0
|
||||
### write definition
|
||||
sectionType=data
|
||||
]
|
||||
# sequence break table, list indexes into sequence records, so we know when a sequence starts/stops
|
||||
sequence=[
|
||||
dim=1
|
||||
wrecords=2
|
||||
### write definition
|
||||
sectionType=data
|
||||
]
|
||||
#labels sections
|
||||
labelIn=[
|
||||
dim=1
|
||||
# vocabulary size
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.txt
|
||||
labelType=Category
|
||||
beginSequence="</s>"
|
||||
endSequence="</s>"
|
||||
|
||||
#### Write definition ####
|
||||
# sizeof(unsigned) which is the label index type
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
#redefine number of records for this section, since we don't need to save it for each data record
|
||||
wrecords=11
|
||||
#variable size so use an average string size
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=11
|
||||
#elementSize=sizeof(ElemType) is default
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
#labels sections
|
||||
labels=[
|
||||
dim=1
|
||||
labelType=NextWord
|
||||
beginSequence="O"
|
||||
endSequence="O"
|
||||
|
||||
# vocabulary size
|
||||
labelDim=$VOCABSIZE$
|
||||
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
#### Write definition ####
|
||||
# sizeof(unsigned) which is the label index type
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
#redefine number of records for this section, since we don't need to save it for each data record
|
||||
wrecords=3
|
||||
#variable size so use an average string size
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=3
|
||||
#elementSize=sizeof(ElemType) is default
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
cvReader=[
|
||||
# reader to use
|
||||
readerType=LMSequenceReader
|
||||
randomize=None
|
||||
mode=softmax
|
||||
# word class info
|
||||
wordclass=$DataFolder$\vocab.txt
|
||||
|
||||
# if writerType is set, we will cache to a binary file
|
||||
# if the binary file exists, we will use it instead of parsing this file
|
||||
# writerType=BinaryReader
|
||||
|
||||
#### write definition
|
||||
wfile=$ExpFolder$\sequenceSentence.valid.bin
|
||||
#wsize - inital size of the file in MB
|
||||
# if calculated size would be bigger, that is used instead
|
||||
wsize=256
|
||||
|
||||
#wrecords - number of records we should allocate space for in the file
|
||||
# files cannot be expanded, so this should be large enough. If known modify this element in config before creating file
|
||||
wrecords=1000
|
||||
#windowSize - number of records we should include in BinaryWriter window
|
||||
windowSize=$VOCABSIZE$
|
||||
|
||||
file=$DataFolder$\$VALIDFILE$
|
||||
|
||||
#additional features sections
|
||||
#for now store as expanded category data (including label in)
|
||||
features=[
|
||||
# sentence has no features, so need to set dimension to zero
|
||||
dim=0
|
||||
### write definition
|
||||
sectionType=data
|
||||
]
|
||||
# sequence break table, list indexes into sequence records, so we know when a sequence starts/stops
|
||||
sequence=[
|
||||
dim=1
|
||||
wrecords=2
|
||||
### write definition
|
||||
sectionType=data
|
||||
]
|
||||
#labels sections
|
||||
# it should be the same as that in the training set
|
||||
labelIn=[
|
||||
dim=1
|
||||
|
||||
# vocabulary size
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
labelType=Category
|
||||
beginSequence="</s>"
|
||||
endSequence="</s>"
|
||||
|
||||
#### Write definition ####
|
||||
# sizeof(unsigned) which is the label index type
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
#redefine number of records for this section, since we don't need to save it for each data record
|
||||
wrecords=11
|
||||
#variable size so use an average string size
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=11
|
||||
#elementSize=sizeof(ElemType) is default
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
#labels sections
|
||||
labels=[
|
||||
dim=1
|
||||
labelType=NextWord
|
||||
beginSequence="O"
|
||||
endSequence="O"
|
||||
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
#### Write definition ####
|
||||
# sizeof(unsigned) which is the label index type
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
#redefine number of records for this section, since we don't need to save it for each data record
|
||||
wrecords=3
|
||||
#variable size so use an average string size
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=3
|
||||
#elementSize=sizeof(ElemType) is default
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
test=[
|
||||
action=eval
|
||||
|
||||
# correspond to the number of words/characteres to train in a minibatch
|
||||
minibatchSize=1
|
||||
# need to be small since models are updated for each minibatch
|
||||
traceLevel=1
|
||||
deviceId=$DEVICEID$
|
||||
epochSize=4430000
|
||||
# which is 886 * 5000
|
||||
recurrentLayer=1
|
||||
defaultHiddenActivity=0.1
|
||||
useValidation=true
|
||||
rnnType=NCELSTM
|
||||
|
||||
modelPath=$ExpFolder$\modelRnnCNTK
|
||||
|
||||
reader=[
|
||||
# reader to use
|
||||
readerType=LMSequenceReader
|
||||
randomize=None
|
||||
mode=softmax
|
||||
# word class info
|
||||
wordclass=$DataFolder$\vocab.txt
|
||||
|
||||
# if writerType is set, we will cache to a binary file
|
||||
# if the binary file exists, we will use it instead of parsing this file
|
||||
# writerType=BinaryReader
|
||||
|
||||
#### write definition
|
||||
wfile=$ExpFolder$\sequenceSentence.bin
|
||||
#wsize - inital size of the file in MB
|
||||
# if calculated size would be bigger, that is used instead
|
||||
wsize=256
|
||||
|
||||
# wrecords - number of records we should allocate space for in the file
|
||||
# files cannot be expanded, so this should be large enough. If known modify this element in config before creating file
|
||||
wrecords=1000
|
||||
# windowSize - number of records we should include in BinaryWriter window
|
||||
windowSize=$VOCABSIZE$
|
||||
|
||||
file=$DataFolder$\$TESTFILE$
|
||||
|
||||
#additional features sections
|
||||
#for now store as expanded category data (including label in)
|
||||
features=[
|
||||
# sentence has no features, so need to set dimension to zero
|
||||
dim=0
|
||||
### write definition
|
||||
sectionType=data
|
||||
]
|
||||
# sequence break table, list indexes into sequence records, so we know when a sequence starts/stops
|
||||
sequence=[
|
||||
dim=1
|
||||
wrecords=2
|
||||
### write definition
|
||||
sectionType=data
|
||||
]
|
||||
#labels sections
|
||||
labelIn=[
|
||||
dim=1
|
||||
|
||||
# vocabulary size
|
||||
labelDim=$VOCABSIZE$
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.txt
|
||||
labelType=Category
|
||||
beginSequence="</s>"
|
||||
endSequence="</s>"
|
||||
|
||||
#### Write definition ####
|
||||
# sizeof(unsigned) which is the label index type
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
#redefine number of records for this section, since we don't need to save it for each data record
|
||||
wrecords=11
|
||||
#variable size so use an average string size
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=11
|
||||
#elementSize=sizeof(ElemType) is default
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
#labels sections
|
||||
labels=[
|
||||
dim=1
|
||||
labelType=NextWord
|
||||
beginSequence="O"
|
||||
endSequence="O"
|
||||
|
||||
# vocabulary size
|
||||
labelDim=$VOCABSIZE$
|
||||
|
||||
labelMappingFile=$ExpFolder$\sentenceLabels.out.txt
|
||||
#### Write definition ####
|
||||
# sizeof(unsigned) which is the label index type
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
#redefine number of records for this section, since we don't need to save it for each data record
|
||||
wrecords=3
|
||||
#variable size so use an average string size
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=3
|
||||
#elementSize=sizeof(ElemType) is default
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
Двоичный файл не отображается.
|
@ -547,30 +547,30 @@ void DoWriteWordAndClassInfo(const ConfigParameters& config)
|
|||
string outputVocabFile = config("outputVocabFile");
|
||||
string outputCls2Index = config("outputCls2Index");
|
||||
size_t vocabSize = config("vocabSize");
|
||||
size_t nbrCls = config("nbrClass");
|
||||
int nbrCls = config("nbrClass", "0");
|
||||
int cutoff = config("cutoff", "1");
|
||||
|
||||
DEVICEID_TYPE deviceId = CPUDEVICE;
|
||||
Matrix<ElemType> wrd2cls(deviceId);
|
||||
Matrix<ElemType> cls2idx(deviceId);
|
||||
|
||||
FILE *fp = fopen(inputFile.c_str(), "rt");
|
||||
if (fp == nullptr)
|
||||
//FILE *fp = fopen(inputFile.c_str(), "rt");
|
||||
ifstream fp(inputFile.c_str());
|
||||
if (!fp)
|
||||
RuntimeError("inputFile cannot be read");
|
||||
|
||||
if (nbrCls > 0)
|
||||
cls2idx.Resize(nbrCls, 1);
|
||||
std::unordered_map<string, double> v_count;
|
||||
|
||||
/// get line
|
||||
char ch2[2048];
|
||||
string str;
|
||||
vector<string> vstr;
|
||||
long long prevClsIdx = -1;
|
||||
string token;
|
||||
while (fgets(ch2, 2048, fp) != nullptr)
|
||||
while (getline(fp, str))
|
||||
{
|
||||
str = ch2;
|
||||
str = trim(str);
|
||||
str.erase(0, str.find_first_not_of(' ')); //prefixing spaces
|
||||
str.erase(str.find_last_not_of(' ') + 1); //surfixing spaces
|
||||
int sposition = str.find("</s> ");
|
||||
int eposition = str.find(" </s>");
|
||||
if (sposition == str.npos)
|
||||
|
@ -581,7 +581,7 @@ void DoWriteWordAndClassInfo(const ConfigParameters& config)
|
|||
for (int i = 1; i < vstr.size(); i++)
|
||||
v_count[vstr[i]]++;
|
||||
}
|
||||
fclose(fp);
|
||||
fp.close();
|
||||
|
||||
std::cerr << "no truncated vocabulary: " << v_count.size() << std::endl;
|
||||
|
||||
|
@ -602,7 +602,7 @@ void DoWriteWordAndClassInfo(const ConfigParameters& config)
|
|||
if (iter->second <= cutoff)
|
||||
wordCountLessCutoff--;
|
||||
if (wordCountLessCutoff <= 0)
|
||||
RuntimeError("no word remained after cutoff\n");
|
||||
throw std::runtime_error("no word remained after cutoff");
|
||||
|
||||
if (vocabSize > wordCountLessCutoff)
|
||||
{
|
||||
|
@ -644,11 +644,14 @@ void DoWriteWordAndClassInfo(const ConfigParameters& config)
|
|||
m_count.resize(removed.size());
|
||||
double total = 0;
|
||||
double dd = 0;
|
||||
|
||||
if (nbrCls > 0)
|
||||
{
|
||||
for (std::unordered_map<std::string, double>::iterator iter = removed.begin(); iter != removed.end(); iter++)
|
||||
total += iter->second;
|
||||
for (std::unordered_map<std::string, double>::iterator iter = removed.begin(); iter != removed.end(); iter++)
|
||||
dd += sqrt(iter->second / total);
|
||||
}
|
||||
|
||||
double df = 0;
|
||||
size_t class_id = 0;
|
||||
m_class.resize(p.size());
|
||||
|
@ -657,11 +660,14 @@ void DoWriteWordAndClassInfo(const ConfigParameters& config)
|
|||
{
|
||||
std::string word = p.top().first;
|
||||
double freq = p.top().second;
|
||||
if (nbrCls > 0)
|
||||
{
|
||||
df += sqrt(freq / total) / dd;
|
||||
if (df > 1)
|
||||
df = 1;
|
||||
if (df > 1.0 * (class_id + 1) / nbrCls && class_id < nbrCls)
|
||||
class_id++;
|
||||
}
|
||||
|
||||
size_t wid = m_words.size();
|
||||
bool inserted = m_index.insert(make_pair(word, wid)).second;
|
||||
|
@ -669,40 +675,43 @@ void DoWriteWordAndClassInfo(const ConfigParameters& config)
|
|||
m_words.push_back(word);
|
||||
|
||||
m_count[wid] = freq;
|
||||
if (nbrCls > 0)
|
||||
m_class[wid] = class_id;
|
||||
p.pop();
|
||||
}
|
||||
|
||||
std::ofstream ofvocab;
|
||||
ofvocab.open(outputVocabFile.c_str());
|
||||
for (size_t i = 0; i < m_index.size(); i++)
|
||||
{
|
||||
if (nbrCls > 0)
|
||||
wrd2cls(i, 0) = (ElemType)m_class[i];
|
||||
long long clsIdx = m_class[i];
|
||||
if (clsIdx != prevClsIdx)
|
||||
long long clsIdx = nbrCls > 0 ? m_class[i] : 0;
|
||||
if (nbrCls > 0 && clsIdx != prevClsIdx)
|
||||
{
|
||||
cls2idx(clsIdx, 0) = (ElemType)i; /// the left boundary of clsIdx
|
||||
prevClsIdx = m_class[i];
|
||||
}
|
||||
ofvocab << " " << i << "\t " << m_count[i] << "\t" << m_words[i] << "\t" << m_class[i] << std::endl;
|
||||
ofvocab << " " << i << "\t " << m_count[i] << "\t" << m_words[i] << "\t" << clsIdx << std::endl;
|
||||
}
|
||||
ofvocab.close();
|
||||
|
||||
if (nbrCls > 0)
|
||||
{
|
||||
/// write the outputs
|
||||
msra::files::make_intermediate_dirs(s2ws(outputWord2Cls));
|
||||
fp = fopen(outputWord2Cls.c_str(), "wt");
|
||||
if (fp == nullptr)
|
||||
ofstream ofp(outputWord2Cls.c_str());
|
||||
if (!ofp)
|
||||
RuntimeError("cannot write to %s", outputWord2Cls.c_str());
|
||||
|
||||
for (size_t r = 0; r < wrd2cls.GetNumRows(); r++)
|
||||
fprintf(fp, "%d\n", (int)wrd2cls(r, 0));
|
||||
fclose(fp);
|
||||
ofp << (int)wrd2cls(r, 0) << endl;
|
||||
ofp.close();
|
||||
|
||||
fp = fopen(outputCls2Index.c_str(), "wt");
|
||||
if (fp == nullptr)
|
||||
ofp.open(outputCls2Index.c_str());
|
||||
if (!ofp)
|
||||
RuntimeError("cannot write to %s", outputCls2Index.c_str());
|
||||
for (size_t r = 0; r < cls2idx.GetNumRows(); r++)
|
||||
fprintf(fp, "%d\n", (int)cls2idx(r, 0));
|
||||
fclose(fp);
|
||||
ofp << (int)cls2idx(r, 0) << endl;
|
||||
ofp.close();
|
||||
}
|
||||
}
|
||||
|
||||
template <typename ElemType>
|
||||
|
@ -1299,10 +1308,11 @@ int wmain(int argc, wchar_t* argv[])
|
|||
fprintf(fp, "successfully finished at %s on %s\n", TimeDateStamp().c_str(), GetHostName().c_str());
|
||||
fcloseOrDie(fp);
|
||||
}
|
||||
fprintf(stderr, "COMPLETED\n");
|
||||
}
|
||||
catch (const std::exception &err)
|
||||
{
|
||||
fprintf(stderr, "EXCEPTION occurred: %s", err.what());
|
||||
fprintf(stderr, "EXCEPTION occurred: %s\n", err.what());
|
||||
#ifdef _DEBUG
|
||||
DebugBreak();
|
||||
#endif
|
||||
|
|
|
@ -992,7 +992,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
trainSetDataReader->StartMinibatchLoop(m_mbSize[epochNumber], epochNumber, m_epochSize);
|
||||
|
||||
startReadMBTime=Timer::MilliSecondElapsed();
|
||||
startReadMBTime=Timer::MilliSecondElapsed();
|
||||
while (trainSetDataReader->GetMinibatch(inputMatrices))
|
||||
{
|
||||
#ifdef MPI_SUPPORT
|
||||
|
|
|
@ -516,17 +516,17 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
if (layerType == "perceptron")
|
||||
{
|
||||
fprintf(stderr, "DBN: Reading (%zu x %zu) perceptron\n", wts.GetNumRows(), wts.GetNumCols());
|
||||
fprintf(stderr, "DBN: Reading (%lu x %lu) perceptron\n", (unsigned long)wts.GetNumRows(), (unsigned long)wts.GetNumCols());
|
||||
output = m_net->Plus(m_net->Times(w, input, nameOfTimes), b, nameOfPlus);
|
||||
}
|
||||
else if (layerType == "rbmisalinearbernoulli" )
|
||||
{
|
||||
fprintf(stderr, "DBN: Reading (%zu x %zu) linear layer\n", wts.GetNumRows(), wts.GetNumCols());
|
||||
fprintf(stderr, "DBN: Reading (%lu x %lu) linear layer\n", (unsigned long)wts.GetNumRows(), (unsigned long)wts.GetNumCols());
|
||||
output = m_net->Plus(m_net->Times(w, input, nameOfTimes), b, nameOfPlus);
|
||||
}
|
||||
else // assume rbmbernoullibernoulli
|
||||
{
|
||||
fprintf(stderr, "DBN: Reading (%zu x %zu) non-linear layer\n", wts.GetNumRows(), wts.GetNumCols());
|
||||
fprintf(stderr, "DBN: Reading (%lu x %lu) non-linear layer\n", (unsigned long)wts.GetNumRows(), (unsigned long)wts.GetNumCols());
|
||||
output = ApplyNonlinearFunction(m_net->Plus(m_net->Times(w, input, nameOfTimes), b, nameOfPlus), i, nameOfH);
|
||||
if (m_addDropoutNodes)
|
||||
input = m_net->Dropout(output, L"Drop" + nameOfH);
|
||||
|
@ -651,24 +651,24 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
switch (m_trainCriterion)
|
||||
{
|
||||
case TrainingCriterion::CrossEntropyWithSoftmax:
|
||||
output = m_net->CrossEntropyWithSoftmax(label, tinput, (trainNodeName == L"")?L"CrossEntropyWithSoftmax":trainNodeName);
|
||||
break;
|
||||
case TrainingCriterion::SquareError:
|
||||
output = m_net->SquareError(label, tinput, (trainNodeName == L"")?L"SquareError":trainNodeName);
|
||||
break;
|
||||
case TrainingCriterion::CrossEntropyWithSoftmax:
|
||||
output = m_net->CrossEntropyWithSoftmax(label, tinput, (trainNodeName == L"")?L"CrossEntropyWithSoftmax":trainNodeName);
|
||||
break;
|
||||
case TrainingCriterion::SquareError:
|
||||
output = m_net->SquareError(label, tinput, (trainNodeName == L"")?L"SquareError":trainNodeName);
|
||||
break;
|
||||
case TrainingCriterion::CRF:
|
||||
assert(trans != nullptr);
|
||||
output = m_net->CRF(label, input, trans, (trainNodeName == L"") ? L"CRF" : trainNodeName);
|
||||
break;
|
||||
case TrainingCriterion::ClassCrossEntropyWithSoftmax:
|
||||
output = m_net->ClassCrossEntropyWithSoftmax(label, input, matrix, clspostprob, (trainNodeName == L"")?L"ClassCrossEntropyWithSoftmax":trainNodeName);
|
||||
break;
|
||||
case TrainingCriterion::NCECrossEntropyWithSoftmax:
|
||||
output = m_net->NoiseContrastiveEstimation(label, input, matrix, clspostprob, (trainNodeName == L"") ? L"NoiseContrastiveEstimationNode" : trainNodeName);
|
||||
break;
|
||||
default:
|
||||
throw std::logic_error("Unsupported training criterion.");
|
||||
case TrainingCriterion::ClassCrossEntropyWithSoftmax:
|
||||
output = m_net->ClassCrossEntropyWithSoftmax(label, input, matrix, clspostprob, (trainNodeName == L"")?L"ClassCrossEntropyWithSoftmax":trainNodeName);
|
||||
break;
|
||||
case TrainingCriterion::NCECrossEntropyWithSoftmax:
|
||||
output = m_net->NoiseContrastiveEstimation(label, input, matrix, clspostprob, (trainNodeName == L"") ? L"NoiseContrastiveEstimationNode" : trainNodeName);
|
||||
break;
|
||||
default:
|
||||
throw std::logic_error("Unsupported training criterion.");
|
||||
}
|
||||
m_net->FinalCriterionNodes().push_back(output);
|
||||
|
||||
|
@ -690,14 +690,14 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
case EvalCriterion::SquareError:
|
||||
output = m_net->SquareError(label, tinput, (evalNodeName == L"")?L"SquareError":evalNodeName);
|
||||
break;
|
||||
case EvalCriterion::ErrorPrediction:
|
||||
case EvalCriterion::ErrorPrediction:
|
||||
output = m_net->ErrorPrediction(label, tinput, (evalNodeName == L"") ? L"ErrorPrediction" : evalNodeName);
|
||||
break;
|
||||
case EvalCriterion::CRF:
|
||||
assert(trans != nullptr);
|
||||
output = m_net->CRF(label, tinput, trans, (evalNodeName == L"") ? L"CRF" : evalNodeName);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
default:
|
||||
throw std::logic_error("Unsupported training criterion.");
|
||||
}
|
||||
}
|
||||
|
@ -797,7 +797,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
ElemType m_forgetGateInitVal;
|
||||
ElemType m_inputGateInitVal;
|
||||
ElemType m_outputGateInitVal;
|
||||
|
||||
|
||||
intargvector m_streamSizes; /// for multiple stream data
|
||||
intargvector m_lookupTabelOrderSizes; /// each stream has its own projection, so need to provide with the lookup table order size for each stream
|
||||
|
||||
|
|
|
@ -887,7 +887,8 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
MoveMatricesToDevice(deviceId);
|
||||
InitRecurrentNode();
|
||||
m_evalMode = xm_evalMode;
|
||||
}
|
||||
|
||||
}
|
||||
NCEEvalMode &EvalMode(){ return m_evalMode; }
|
||||
|
||||
virtual void SaveToFile(File& fstream) const
|
||||
|
@ -912,9 +913,9 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
: ComputationNode<ElemType>(deviceId), m_logSoftmax(deviceId),
|
||||
m_softMax(deviceId), m_grdToSoftMaxInput(deviceId), m_ncePrediction(deviceId)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
LoadFromFile(fstream, modelVersion, deviceId);
|
||||
}
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
LoadFromFile(fstream, modelVersion, deviceId);
|
||||
}
|
||||
|
||||
virtual const std::wstring OperationName() const { return TypeName(); }
|
||||
static const std::wstring TypeName() { return L"NCEBasedCrossEntropyWithSoftmax"; }
|
||||
|
@ -971,8 +972,6 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
FunctionValues().SetValue(0);
|
||||
for (int i = 0; i < Inputs(0)->FunctionValues().GetNumCols(); i++)
|
||||
FunctionValues()(0, 0) -= m_logSoftmax(i, (size_t)Inputs(0)->FunctionValues()(0, i));
|
||||
ElemType val = FunctionValues()(0, 0);
|
||||
val *= 1;
|
||||
}
|
||||
else if (m_evalMode == NCEEvalMode::Unnormalized)
|
||||
{
|
||||
|
@ -1339,7 +1338,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
/// add the class log posterior probability
|
||||
try{
|
||||
Matrix<ElemType>::AddElementToElement(clsLogSoftmax, c_t, t, functionValues, 0, 0);
|
||||
Matrix<ElemType>::AddElementToElement(clsLogSoftmax, c_t, t, functionValues, 0, 0);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
|
|
|
@ -3805,7 +3805,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
template<class ElemType>
|
||||
void CPUMatrix<ElemType>::AssignNCEUnnormalizedEval(const CPUMatrix<ElemType>& a,
|
||||
const CPUMatrix<ElemType>& b, CPUMatrix<ElemType>& c)
|
||||
const CPUMatrix<ElemType>& b, const CPUMatrix<ElemType>& bias, CPUMatrix<ElemType>& c)
|
||||
//this: samples+probs
|
||||
// a: hidden
|
||||
// b: embedding
|
||||
|
@ -3813,28 +3813,17 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
// c: loglikelihood
|
||||
{
|
||||
ElemType log_likelihood = 0.0;
|
||||
size_t sample_size = this->GetNumRows() / 2;
|
||||
size_t batch_size = this->GetNumCols();
|
||||
size_t num_noise_samples = sample_size - 1;
|
||||
ElemType log_num_noise_samples = (ElemType)std::log(num_noise_samples);
|
||||
#pragma omp parallel for reduction(+:log_likelihood)
|
||||
for (int instance_id = 0; instance_id < batch_size; instance_id++)
|
||||
for (int sample_id = 0; sample_id < sample_size; sample_id++)
|
||||
{
|
||||
int sample =(int) (*this)(2 * sample_id, instance_id);
|
||||
ElemType prob = -(*this)(2 * sample_id + 1, instance_id);
|
||||
if (sample_id == 0)
|
||||
prob = -prob;
|
||||
double score = 0;// a[sample];
|
||||
for (int dim = 0; dim < b.GetNumCols(); dim++)
|
||||
score += a(sample, dim)* b(dim, instance_id);
|
||||
double score_noise = log_num_noise_samples + prob;
|
||||
double z = logadd(score, score_noise);
|
||||
double logprob = score - z;
|
||||
double logprob_noise = score_noise - z;
|
||||
log_likelihood += sample_id == 0 ? (ElemType)logprob : (ElemType)logprob_noise;
|
||||
{
|
||||
int sample = -(int)(*this)(0, instance_id);
|
||||
ElemType score = bias(sample, 0);
|
||||
for (int dim = 0; dim < b.GetNumRows(); dim++)
|
||||
score += b(dim, sample)* a(dim, instance_id);
|
||||
log_likelihood += score;
|
||||
}
|
||||
c(0, 0) = log_likelihood;
|
||||
c(0, 0) = -log_likelihood;
|
||||
}
|
||||
|
||||
//samples+prob gradient hidden embedding embedding/hidden
|
||||
|
|
|
@ -218,7 +218,8 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
void AssignNoiseContrastiveEstimation(const CPUMatrix<ElemType>& a, const CPUMatrix<ElemType>& b, const CPUMatrix<ElemType>& bias,
|
||||
size_t sampleCount, CPUMatrix<ElemType>& tmp, CPUMatrix<ElemType>& c);
|
||||
|
||||
void AssignNCEUnnormalizedEval(const CPUMatrix<ElemType>& a, const CPUMatrix<ElemType>& b, CPUMatrix<ElemType>& c);
|
||||
void AssignNCEUnnormalizedEval(const CPUMatrix<ElemType>& a,
|
||||
const CPUMatrix<ElemType>& b, const CPUMatrix<ElemType>& bias, CPUMatrix<ElemType>& c);
|
||||
|
||||
CPUMatrix<ElemType>& AssignNCEDerivative(const CPUMatrix<ElemType>& tmp, const CPUMatrix<ElemType>& a, const CPUMatrix<ElemType>& b, size_t inputIndex, CPUMatrix<ElemType>& c);
|
||||
|
||||
|
|
|
@ -743,7 +743,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
if (GetMatrixType() == MatrixType::DENSE && m_CPUMatrix != nullptr)
|
||||
{
|
||||
m_CPUSparseMatrix->Resize(GetNumRows(), GetNumCols());
|
||||
m_CPUSparseMatrix->Resize(GetNumRows(), GetNumCols(), 1, true, false);
|
||||
if (keepValues)
|
||||
CopyElementsFromDenseToSparse(*m_CPUMatrix, *m_CPUSparseMatrix);
|
||||
}
|
||||
|
@ -788,7 +788,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
if (keepValues)
|
||||
m_GPUSparseMatrix->SetValue(*m_GPUMatrix);
|
||||
else
|
||||
m_GPUSparseMatrix->Resize(m_GPUMatrix->GetNumRows(), m_GPUMatrix->GetNumCols());
|
||||
m_GPUSparseMatrix->Resize(m_GPUMatrix->GetNumRows(), m_GPUMatrix->GetNumCols(), 0, true, false);
|
||||
}
|
||||
|
||||
delete m_GPUMatrix;
|
||||
|
@ -979,16 +979,6 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
if (this == &deepCopyFrom)
|
||||
return;
|
||||
|
||||
if (this->GetDeviceId() != CPUDEVICE && (this->GetMatrixType() == MatrixType::SPARSE) &&
|
||||
deepCopyFrom.GetMatrixType() == MatrixType::SPARSE)
|
||||
{
|
||||
if (deepCopyFrom.GetDeviceId() == CPUDEVICE)
|
||||
this->m_GPUSparseMatrix->SetValue(*deepCopyFrom.m_CPUSparseMatrix);
|
||||
else
|
||||
this->m_GPUSparseMatrix->SetValue(*deepCopyFrom.m_GPUSparseMatrix);
|
||||
return;
|
||||
}
|
||||
|
||||
this->m_preferredDeviceId = deepCopyFrom.m_preferredDeviceId;
|
||||
DecideAndMoveToRightDevice(deepCopyFrom, *this);
|
||||
this->SwitchToMatrixType(deepCopyFrom.GetMatrixType(), format, false);
|
||||
|
@ -1234,14 +1224,14 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
if (numRows != GetNumRows() || numCols != GetNumCols())
|
||||
{
|
||||
DISPATCH_MATRIX_ON_FLAG(this,
|
||||
this,
|
||||
DISPATCH_MATRIX_ON_FLAG(this,
|
||||
this,
|
||||
m_CPUMatrix->Reshape(numRows, numCols),
|
||||
m_GPUMatrix->Reshape(numRows, numCols),
|
||||
NOT_IMPLEMENTED,
|
||||
NOT_IMPLEMENTED
|
||||
);
|
||||
}
|
||||
NOT_IMPLEMENTED,
|
||||
NOT_IMPLEMENTED
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
template<class ElemType>
|
||||
|
@ -3255,7 +3245,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
#define NUM_DEVICE_CHANGED_WARN 20
|
||||
m_numTimesDeviceChanged++;
|
||||
if (m_numTimesDeviceChanged == NUM_DEVICE_CHANGED_WARN)
|
||||
fprintf(stderr, "WARNING: The same matrix with dim [%d, %d] has been transferred between different devices for %d times.\n", GetNumRows(), GetNumCols(), NUM_DEVICE_CHANGED_WARN);
|
||||
fprintf(stderr, "WARNING: The same matrix with dim [%lu, %lu] has been transferred between different devices for %d times.\n", (unsigned long)GetNumRows(), (unsigned long)GetNumCols(), NUM_DEVICE_CHANGED_WARN);
|
||||
|
||||
if (m_matrixType == MatrixType::SPARSE)
|
||||
{
|
||||
|
@ -3560,14 +3550,14 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
return *this;
|
||||
}
|
||||
template<class ElemType>
|
||||
Matrix<ElemType>& Matrix<ElemType>::AssignNceUnnormalizedEval(const Matrix<ElemType>& a, const Matrix<ElemType>& b, const Matrix<ElemType>& c)
|
||||
Matrix<ElemType>& Matrix<ElemType>::AssignNceUnnormalizedEval(const Matrix<ElemType>& a, const Matrix<ElemType>& b, const Matrix<ElemType>& c, const Matrix<ElemType>& bias)
|
||||
{
|
||||
if (a.GetMatrixType() != MatrixType::SPARSE)
|
||||
NOT_IMPLEMENTED;
|
||||
//if (a.GetMatrixType() != MatrixType::SPARSE)
|
||||
// NOT_IMPLEMENTED;
|
||||
|
||||
this->Resize(1, 1);
|
||||
if (this->GetDeviceId() < 0)
|
||||
a.m_CPUMatrix->AssignNCEUnnormalizedEval(*b.m_CPUMatrix, *c.m_CPUMatrix, *this->m_CPUMatrix);
|
||||
a.m_CPUMatrix->AssignNCEUnnormalizedEval(*b.m_CPUMatrix, *c.m_CPUMatrix, *bias.m_CPUMatrix, *this->m_CPUMatrix);
|
||||
else
|
||||
a.m_GPUMatrix->AssignNCEUnnormalizedEval(*b.m_GPUMatrix, *c.m_GPUMatrix, *this->m_GPUMatrix);
|
||||
return *this;
|
||||
|
@ -4250,7 +4240,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
template<class ElemType>
|
||||
bool Matrix<ElemType>::HasElement(const Matrix<ElemType>& a, const ElemType value)
|
||||
{
|
||||
|
@ -4369,7 +4359,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
return *this;
|
||||
}
|
||||
|
||||
template<class ElemType>
|
||||
template<class ElemType>
|
||||
Matrix<ElemType>& Matrix<ElemType>::AssignElementProductOfWithShiftNeg(const Matrix<ElemType>& a, const Matrix<ElemType>& b, size_t shift, size_t negnumber)
|
||||
{
|
||||
if (a.IsEmpty() || b.IsEmpty())
|
||||
|
|
|
@ -148,7 +148,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
Matrix<ElemType>& AssignNCEDerivative(const Matrix<ElemType>& tmp, const Matrix<ElemType>& a, const Matrix<ElemType>& b, const Matrix<ElemType>& c, size_t inputIndex);
|
||||
|
||||
Matrix<ElemType>& AssignNceUnnormalizedEval(const Matrix<ElemType>& a, const Matrix<ElemType>& b, const Matrix<ElemType>& c);
|
||||
Matrix<ElemType>& AssignNceUnnormalizedEval(const Matrix<ElemType>& a, const Matrix<ElemType>& b, const Matrix<ElemType>& c, const Matrix<ElemType>& bias);
|
||||
|
||||
Matrix<ElemType> Transpose(); // This method doesn't change state of Matrix. It should be a const function
|
||||
Matrix<ElemType>& AssignTransposeOf (const Matrix<ElemType>& a);
|
||||
|
|
|
@ -724,22 +724,6 @@ namespace Microsoft {
|
|||
return (*this);
|
||||
}
|
||||
|
||||
template<class ElemType>
|
||||
ElemType GPUMatrix<ElemType>::LogAddSumOfElements() const { return -1; };
|
||||
|
||||
template<class ElemType> void GPUMatrix<ElemType>::RCRFBackwardCompute(
|
||||
const GPUMatrix<ElemType>& alpha, GPUMatrix<ElemType>& beta,
|
||||
const GPUMatrix<ElemType>& lbls,
|
||||
const GPUMatrix<ElemType>& pos_scores, const GPUMatrix<ElemType>& pair_scores, const int shift = 1) {};
|
||||
|
||||
template<class ElemType> void GPUMatrix<ElemType>::RCRFTransGrdCompute(const GPUMatrix<ElemType>& lbls,
|
||||
const GPUMatrix<ElemType>& alpha,
|
||||
const GPUMatrix<ElemType>& beta,
|
||||
const GPUMatrix<ElemType>& pair_scores,
|
||||
GPUMatrix<ElemType>& grd,
|
||||
const int startLbl, /// the time 0 start symbol in the output layer
|
||||
const int shift) {};
|
||||
|
||||
template<class ElemType>
|
||||
GPUMatrix<ElemType>& GPUMatrix<ElemType>::AssignElementProductOfWithShiftNeg(const GPUMatrix<ElemType>& a, const GPUMatrix<ElemType>& b, const size_t shift, const size_t nt)
|
||||
{
|
||||
|
|
|
@ -0,0 +1,141 @@
|
|||
<None Include="..\ExampleSetups\ASR\TIMIT\config\add_layer.mel"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\ae.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\classify.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\create_1layer.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\default_macros.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\globals.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\lstm.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\mtl_fbank_mfcc.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\mtl_senones_dr.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\PAC-RNN.ndl"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\README.txt"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_AdaptLearnRate.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_CrossValidateSimpleNetwork.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_EvalSimpleNetwork.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainAutoEncoder.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainLSTM.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainMultiInput.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainMultiTask.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainNDLNetwork.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainSimpleNetwork.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_TrainWithPreTrain.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_WriteBottleneck.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\config\TIMIT_WriteScaledLogLike.config"><Filter>ExampleSetups\ASR\TIMIT\config</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\LSTM_ExpectedOutputs.txt"><Filter>ExampleSetups\ASR\TIMIT\CPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\TIMIT_DNN.config"><Filter>ExampleSetups\ASR\TIMIT\CPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\CPU\TIMIT_LSTM.config"><Filter>ExampleSetups\ASR\TIMIT\CPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.bigram.arpa"><Filter>ExampleSetups\ASR\TIMIT\decoding</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.dnn_map"><Filter>ExampleSetups\ASR\TIMIT\decoding</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.lookahead"><Filter>ExampleSetups\ASR\TIMIT\decoding</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.tfsa"><Filter>ExampleSetups\ASR\TIMIT\decoding</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\decoding\TIMIT.transitions"><Filter>ExampleSetups\ASR\TIMIT\decoding</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\DNN_ExpectedOutputs.txt"><Filter>ExampleSetups\ASR\TIMIT\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\LSTM_ExpectedOutputs.txt"><Filter>ExampleSetups\ASR\TIMIT\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\TIMIT_DNN.config"><Filter>ExampleSetups\ASR\TIMIT\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\GPU\TIMIT_LSTM.config"><Filter>ExampleSetups\ASR\TIMIT\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\cf\CF.fbank24_zda"><Filter>ExampleSetups\ASR\TIMIT\lib\cf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\make_dr_mlf.pl"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\make_mlf_cntk.pl"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_cistate.mlf"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_cistate.mlf.cntk"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.core.align_dr.mlf.cntk"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_cistate.mlf"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_cistate.mlf.cntk"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.dev.align_dr.mlf.cntk"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.statelist"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_cistate.mlf"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_cistate.mlf.cntk"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\mlf\TIMIT.train.align_dr.mlf.cntk"><Filter>ExampleSetups\ASR\TIMIT\lib\mlf</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.bottleneck.fullpath"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.bottleneck.fullpath.20"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.fbank.fullpath"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.fbank.fullpath.20"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.scaledloglike.fullpath"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.core.scp.scaledloglike.fullpath.20"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath.rnn"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.dev.scp.fbank.fullpath.rnn.20"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.100"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.rnn"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.fbank.fullpath.rnn.100"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.mfcc.fullpath"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\ASR\TIMIT\lib\scp\TIMIT.train.scp.mfcc.fullpath.100"><Filter>ExampleSetups\ASR\TIMIT\lib\scp</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ExpectedResults.log"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\global.config"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlm.gpu.config"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlm.gpu.nce.config.txt"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\lstmlmconfig.txt"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\perplexity.nce100.lr0.1.txt"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.test.cntk.100.txt"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.train.cntk.100.txt"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\LSTMLM\ptb.valid.cntk.100.txt"><Filter>ExampleSetups\LM\LSTMLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\ExpectedOutputs.txt"><Filter>ExampleSetups\LM\RNNLM\CPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\fnnlm.config"><Filter>ExampleSetups\LM\RNNLM\CPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\global.config"><Filter>ExampleSetups\LM\RNNLM\CPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\CPU\rnnlm.config"><Filter>ExampleSetups\LM\RNNLM\CPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\ExpectedOutputs.txt"><Filter>ExampleSetups\LM\RNNLM\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\fnnlm.config"><Filter>ExampleSetups\LM\RNNLM\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\global.config"><Filter>ExampleSetups\LM\RNNLM\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\rnnlm.config"><Filter>ExampleSetups\LM\RNNLM\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\GPU\rnnlm.gpu.config"><Filter>ExampleSetups\LM\RNNLM\GPU</Filter></None>
|
||||
<None Include="..\ExampleSetups\LM\RNNLM\rnnlm.ndl"><Filter>ExampleSetups\LM\RNNLM</Filter></None>
|
||||
<None Include="..\ExampleSetups\NDLExamples.ndl"><Filter>ExampleSetups</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\conlleval.pl"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\feat.txt"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\globals.config"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\lbl.txt"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\lstmNDL.txt"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\README.txt"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\rnnlu.config"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\rnnluModelEditor.txt"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="..\ExampleSetups\SLU\score.sh"><Filter>ExampleSetups\SLU</Filter></None>
|
||||
<None Include="ASR\config\globals_cpu.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\globals_gpu.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\me.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\README.txt"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\runall.bat"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_EvalSimpleNetwork.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_TrainAutoEncoder.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_TrainMultiInput.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_TrainMultiTask.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_TrainNDLNetwork.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_TrainSimpleNetwork.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_WriteBottleneck.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\config\TIMIT_WriteScaledLogLike.config"><Filter>CheckInSuites\ASR\config</Filter></None>
|
||||
<None Include="ASR\CPU\EvalSimpleNetwork.output"><Filter>CheckInSuites\ASR\CPU</Filter></None>
|
||||
<None Include="ASR\CPU\TrainAutoEncoder.output"><Filter>CheckInSuites\ASR\CPU</Filter></None>
|
||||
<None Include="ASR\CPU\TrainMultiInput.output"><Filter>CheckInSuites\ASR\CPU</Filter></None>
|
||||
<None Include="ASR\CPU\TrainMultiTask.output"><Filter>CheckInSuites\ASR\CPU</Filter></None>
|
||||
<None Include="ASR\CPU\TrainNDLNetwork.output"><Filter>CheckInSuites\ASR\CPU</Filter></None>
|
||||
<None Include="ASR\CPU\TrainSimpleNetwork.output"><Filter>CheckInSuites\ASR\CPU</Filter></None>
|
||||
<None Include="ASR\GPU\EvalSimpleNetwork.output"><Filter>CheckInSuites\ASR\GPU</Filter></None>
|
||||
<None Include="ASR\GPU\TrainAutoEncoder.output"><Filter>CheckInSuites\ASR\GPU</Filter></None>
|
||||
<None Include="ASR\GPU\TrainMultiInput.output"><Filter>CheckInSuites\ASR\GPU</Filter></None>
|
||||
<None Include="ASR\GPU\TrainMultiTask.output"><Filter>CheckInSuites\ASR\GPU</Filter></None>
|
||||
<None Include="ASR\GPU\TrainNDLNetwork.output"><Filter>CheckInSuites\ASR\GPU</Filter></None>
|
||||
<None Include="ASR\GPU\TrainSimpleNetwork.output"><Filter>CheckInSuites\ASR\GPU</Filter></None>
|
||||
<None Include="LM\RNNLM\CPU\ExpectedOutputs.txt"><Filter>CheckInSuites\LM\RNNLM\CPU</Filter></None>
|
||||
<None Include="LM\RNNLM\CPU\global.config"><Filter>CheckInSuites\LM\RNNLM\CPU</Filter></None>
|
||||
<None Include="LM\RNNLM\CPU\rnnlm.config"><Filter>CheckInSuites\LM\RNNLM\CPU</Filter></None>
|
||||
<None Include="LM\RNNLM\GPU\ExpectedOutputs.txt"><Filter>CheckInSuites\LM\RNNLM\GPU</Filter></None>
|
||||
<None Include="LM\RNNLM\GPU\rnnlm.config"><Filter>CheckInSuites\LM\RNNLM\GPU</Filter></None>
|
||||
<None Include="MNIST\CPU\expectedresults.txt"><Filter>CheckInSuites\MNIST\CPU</Filter></None>
|
||||
<None Include="MNIST\DefaultMacros.ndl"><Filter>CheckInSuites\MNIST</Filter></None>
|
||||
<None Include="MNIST\Example.ndl"><Filter>CheckInSuites\MNIST</Filter></None>
|
||||
<None Include="MNIST\GPU\expectedresults.txt"><Filter>CheckInSuites\MNIST\GPU</Filter></None>
|
||||
<None Include="MNIST\mnistCheckIn.config"><Filter>CheckInSuites\MNIST</Filter></None>
|
||||
<None Include="MNIST\mnistlabels.txt"><Filter>CheckInSuites\MNIST</Filter></None>
|
||||
<None Include="SLU\atis.dev.IOB.simple"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\atis.test.apos.pred.pos.head.IOB.simple"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\atis.train.apos.pred.pos.head.IOB.simple"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\Expected.log"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\globals.config"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\input.txt"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\inputmap.txt"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\lstmNDL.txt"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\output.txt"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\README.txt"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\rnnlu.config"><Filter>CheckInSuites\SLU</Filter></None>
|
||||
<None Include="SLU\rnnlu.ndl.config"><Filter>CheckInSuites\SLU</Filter></None>
|
Загрузка…
Ссылка в новой задаче