Use .cntk as default extension for CNTK configuration files

This commit is contained in:
Mark Hillebrand 2016-01-29 17:12:13 +01:00
Родитель 966357259f
Коммит 22e8011025
142 изменённых файлов: 261 добавлений и 260 удалений

122
CNTK.sln
Просмотреть файл

@ -105,7 +105,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "QuickE2E", "QuickE2E", "{4B
Tests\EndToEndTests\Speech\QuickE2E\baseline.gpu.txt = Tests\EndToEndTests\Speech\QuickE2E\baseline.gpu.txt
Tests\EndToEndTests\Speech\QuickE2E\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\QuickE2E\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\QuickE2E\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\QuickE2E\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\QuickE2E\cntk.config = Tests\EndToEndTests\Speech\QuickE2E\cntk.config
Tests\EndToEndTests\Speech\QuickE2E\cntk.cntk = Tests\EndToEndTests\Speech\QuickE2E\cntk.cntk
Tests\EndToEndTests\Speech\QuickE2E\run-test = Tests\EndToEndTests\Speech\QuickE2E\run-test
Tests\EndToEndTests\Speech\QuickE2E\testcases.yml = Tests\EndToEndTests\Speech\QuickE2E\testcases.yml
EndProjectSection
@ -121,7 +121,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{5F733BBA-F
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "LSTM", "LSTM", "{19EE975B-232D-49F0-94C7-6F1C6424FB53}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Speech\LSTM\cntk.config = Tests\EndToEndTests\Speech\LSTM\cntk.config
Tests\EndToEndTests\Speech\LSTM\cntk.cntk = Tests\EndToEndTests\Speech\LSTM\cntk.cntk
Tests\EndToEndTests\Speech\LSTM\lstm.bs = Tests\EndToEndTests\Speech\LSTM\lstm.bs
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\macros.txt = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\macros.txt
EndProjectSection
@ -139,7 +139,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "SGDLib", "Source\SGDLib\SGD
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ParallelTraining", "ParallelTraining", "{5E666C53-2D82-49C9-9127-3FDDC321C741}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\ParallelTraining\SimpleMultiGPU.config = Tests\EndToEndTests\ParallelTraining\SimpleMultiGPU.config
Tests\EndToEndTests\ParallelTraining\SimpleMultiGPU.cntk = Tests\EndToEndTests\ParallelTraining\SimpleMultiGPU.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{6D1353D6-F196-466F-B886-F16D48759B20}"
@ -233,7 +233,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "FullUtterance", "FullUttera
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DNN", "DNN", "{6994C86D-A672-4254-824A-51F4DFEB807F}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Speech\DNN\cntk.config = Tests\EndToEndTests\Speech\DNN\cntk.config
Tests\EndToEndTests\Speech\DNN\cntk.cntk = Tests\EndToEndTests\Speech\DNN\cntk.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Parallel1BitQuantization", "Parallel1BitQuantization", "{FD88A77E-A839-4513-AC5C-AD92447BE229}"
@ -263,7 +263,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DiscriminativePreTraining",
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\baseline.gpu.txt = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\baseline.gpu.txt
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\cntk_dpt.config = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\cntk_dpt.config
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\cntk_dpt.cntk = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\cntk_dpt.cntk
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\dnn.txt = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\dnn.txt
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\dnn_1layer.txt = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\dnn_1layer.txt
Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\macros.txt = Tests\EndToEndTests\Speech\DNN\DiscriminativePreTraining\macros.txt
@ -290,7 +290,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SequenceTraining", "Sequenc
Tests\EndToEndTests\Speech\DNN\SequenceTraining\add_layer.mel = Tests\EndToEndTests\Speech\DNN\SequenceTraining\add_layer.mel
Tests\EndToEndTests\Speech\DNN\SequenceTraining\baseline.gpu.txt = Tests\EndToEndTests\Speech\DNN\SequenceTraining\baseline.gpu.txt
Tests\EndToEndTests\Speech\DNN\SequenceTraining\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\DNN\SequenceTraining\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\DNN\SequenceTraining\cntk_sequence.config = Tests\EndToEndTests\Speech\DNN\SequenceTraining\cntk_sequence.config
Tests\EndToEndTests\Speech\DNN\SequenceTraining\cntk_sequence.cntk = Tests\EndToEndTests\Speech\DNN\SequenceTraining\cntk_sequence.cntk
Tests\EndToEndTests\Speech\DNN\SequenceTraining\dnn.txt = Tests\EndToEndTests\Speech\DNN\SequenceTraining\dnn.txt
Tests\EndToEndTests\Speech\DNN\SequenceTraining\dnn_1layer.txt = Tests\EndToEndTests\Speech\DNN\SequenceTraining\dnn_1layer.txt
Tests\EndToEndTests\Speech\DNN\SequenceTraining\macros.txt = Tests\EndToEndTests\Speech\DNN\SequenceTraining\macros.txt
@ -316,7 +316,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "QuickE2E", "QuickE2E", "{2A
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.cpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.cpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\cntk.config = Tests\EndToEndTests\Image\QuickE2E\cntk.config
Tests\EndToEndTests\Image\QuickE2E\cntk.cntk = Tests\EndToEndTests\Image\QuickE2E\cntk.cntk
Tests\EndToEndTests\Image\QuickE2E\Convolution.ndl = Tests\EndToEndTests\Image\QuickE2E\Convolution.ndl
Tests\EndToEndTests\Image\QuickE2E\Macros.ndl = Tests\EndToEndTests\Image\QuickE2E\Macros.ndl
Tests\EndToEndTests\Image\QuickE2E\run-test = Tests\EndToEndTests\Image\QuickE2E\run-test
@ -329,7 +329,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SVD", "SVD", "{669B6203-967
Tests\EndToEndTests\Speech\SVD\baseline.gpu.txt = Tests\EndToEndTests\Speech\SVD\baseline.gpu.txt
Tests\EndToEndTests\Speech\SVD\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\SVD\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\SVD\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\SVD\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\SVD\cntk.config = Tests\EndToEndTests\Speech\SVD\cntk.config
Tests\EndToEndTests\Speech\SVD\cntk.cntk = Tests\EndToEndTests\Speech\SVD\cntk.cntk
Tests\EndToEndTests\Speech\SVD\run-test = Tests\EndToEndTests\Speech\SVD\run-test
Tests\EndToEndTests\Speech\SVD\testcases.yml = Tests\EndToEndTests\Speech\SVD\testcases.yml
EndProjectSection
@ -340,7 +340,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{81AE01
Tests\EndToEndTests\Speech\Simple\baseline.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.gpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\Simple\cntk.config = Tests\EndToEndTests\Speech\Simple\cntk.config
Tests\EndToEndTests\Speech\Simple\cntk.cntk = Tests\EndToEndTests\Speech\Simple\cntk.cntk
Tests\EndToEndTests\Speech\Simple\run-test = Tests\EndToEndTests\Speech\Simple\run-test
Tests\EndToEndTests\Speech\Simple\testcases.yml = Tests\EndToEndTests\Speech\Simple\testcases.yml
EndProjectSection
@ -443,13 +443,13 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "RNNLM", "RNNLM", "{811924DE
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CPU", "CPU", "{96012801-5187-4FAF-A54E-BF4B73C855F8}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\LM\RNNLM\CPU\global.config = Tests\EndToEndTests\LM\RNNLM\CPU\global.config
Tests\EndToEndTests\LM\RNNLM\CPU\rnnlm.config = Tests\EndToEndTests\LM\RNNLM\CPU\rnnlm.config
Tests\EndToEndTests\LM\RNNLM\CPU\global.cntk = Tests\EndToEndTests\LM\RNNLM\CPU\global.cntk
Tests\EndToEndTests\LM\RNNLM\CPU\rnnlm.cntk = Tests\EndToEndTests\LM\RNNLM\CPU\rnnlm.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "GPU", "GPU", "{2A1F0FB0-2304-4F35-87B3-66230C6E58F0}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\LM\RNNLM\GPU\rnnlm.config = Tests\EndToEndTests\LM\RNNLM\GPU\rnnlm.config
Tests\EndToEndTests\LM\RNNLM\GPU\rnnlm.cntk = Tests\EndToEndTests\LM\RNNLM\GPU\rnnlm.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SLU", "SLU", "{BFBC6BE1-C33E-4A80-B8F3-A33410EC00FC}"
@ -457,13 +457,13 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SLU", "SLU", "{BFBC6BE1-C33
Tests\EndToEndTests\SLU\atis.dev.IOB.simple = Tests\EndToEndTests\SLU\atis.dev.IOB.simple
Tests\EndToEndTests\SLU\atis.test.apos.pred.pos.head.IOB.simple = Tests\EndToEndTests\SLU\atis.test.apos.pred.pos.head.IOB.simple
Tests\EndToEndTests\SLU\atis.train.apos.pred.pos.head.IOB.simple = Tests\EndToEndTests\SLU\atis.train.apos.pred.pos.head.IOB.simple
Tests\EndToEndTests\SLU\globals.config = Tests\EndToEndTests\SLU\globals.config
Tests\EndToEndTests\SLU\globals.cntk = Tests\EndToEndTests\SLU\globals.cntk
Tests\EndToEndTests\SLU\input.txt = Tests\EndToEndTests\SLU\input.txt
Tests\EndToEndTests\SLU\inputmap.txt = Tests\EndToEndTests\SLU\inputmap.txt
Tests\EndToEndTests\SLU\lstmNDL.txt = Tests\EndToEndTests\SLU\lstmNDL.txt
Tests\EndToEndTests\SLU\README.txt = Tests\EndToEndTests\SLU\README.txt
Tests\EndToEndTests\SLU\rnnlu.config = Tests\EndToEndTests\SLU\rnnlu.config
Tests\EndToEndTests\SLU\rnnlu.ndl.config = Tests\EndToEndTests\SLU\rnnlu.ndl.config
Tests\EndToEndTests\SLU\rnnlu.cntk = Tests\EndToEndTests\SLU\rnnlu.cntk
Tests\EndToEndTests\SLU\rnnlu.ndl.cntk = Tests\EndToEndTests\SLU\rnnlu.ndl.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MNIST", "MNIST", "{FA33A61E-95C7-4049-8111-22058CE361A3}"
@ -480,27 +480,27 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AdditionalFiles", "Addition
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{6E5A252C-ACCE-42E0-9819-FF4DEF6D739E}"
ProjectSection(SolutionItems) = preProject
Examples\Image\MNIST\Config\01_OneHidden.config = Examples\Image\MNIST\Config\01_OneHidden.config
Examples\Image\MNIST\Config\01_OneHidden.cntk = Examples\Image\MNIST\Config\01_OneHidden.cntk
Examples\Image\MNIST\Config\01_OneHidden.ndl = Examples\Image\MNIST\Config\01_OneHidden.ndl
Examples\Image\MNIST\Config\02_Convolution.config = Examples\Image\MNIST\Config\02_Convolution.config
Examples\Image\MNIST\Config\02_Convolution.cntk = Examples\Image\MNIST\Config\02_Convolution.cntk
Examples\Image\MNIST\Config\02_Convolution.ndl = Examples\Image\MNIST\Config\02_Convolution.ndl
Examples\Image\MNIST\Config\03_ConvBatchNorm.config = Examples\Image\MNIST\Config\03_ConvBatchNorm.config
Examples\Image\MNIST\Config\03_ConvBatchNorm.cntk = Examples\Image\MNIST\Config\03_ConvBatchNorm.cntk
Examples\Image\MNIST\Config\03_ConvBatchNorm.ndl = Examples\Image\MNIST\Config\03_ConvBatchNorm.ndl
Examples\Image\MNIST\Config\Macros.ndl = Examples\Image\MNIST\Config\Macros.ndl
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CIFAR-10", "CIFAR-10", "{77125562-3BF2-45D2-9B73-72CA8E03C78C}"
ProjectSection(SolutionItems) = preProject
Examples\Image\Miscellaneous\CIFAR-10\01_Conv.config = Examples\Image\Miscellaneous\CIFAR-10\01_Conv.config
Examples\Image\Miscellaneous\CIFAR-10\01_Conv.cntk = Examples\Image\Miscellaneous\CIFAR-10\01_Conv.cntk
Examples\Image\Miscellaneous\CIFAR-10\01_Convolution.ndl = Examples\Image\Miscellaneous\CIFAR-10\01_Convolution.ndl
Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.config = Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.config
Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.cntk = Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.cntk
Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.mel = Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.mel
Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.ndl = Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv.ndl
Examples\Image\Miscellaneous\CIFAR-10\03_ProjWeightsGen.py = Examples\Image\Miscellaneous\CIFAR-10\03_ProjWeightsGen.py
Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.config = Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.config
Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.cntk = Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.cntk
Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.mel = Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.mel
Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.ndl = Examples\Image\Miscellaneous\CIFAR-10\03_ResNet.ndl
Examples\Image\Miscellaneous\CIFAR-10\04_ResNet_56.config = Examples\Image\Miscellaneous\CIFAR-10\04_ResNet_56.config
Examples\Image\Miscellaneous\CIFAR-10\04_ResNet_56.cntk = Examples\Image\Miscellaneous\CIFAR-10\04_ResNet_56.cntk
Examples\Image\Miscellaneous\CIFAR-10\04_ResNet_56.ndl = Examples\Image\Miscellaneous\CIFAR-10\04_ResNet_56.ndl
Examples\Image\Miscellaneous\CIFAR-10\16to32.txt = Examples\Image\Miscellaneous\CIFAR-10\16to32.txt
Examples\Image\Miscellaneous\CIFAR-10\32to64.txt = Examples\Image\Miscellaneous\CIFAR-10\32to64.txt
@ -516,7 +516,7 @@ EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AlexNet", "AlexNet", "{D29DC402-98A3-40C7-B683-4CC84DEC5C18}"
ProjectSection(SolutionItems) = preProject
Examples\Image\Miscellaneous\ImageNet\AlexNet\add_top5_layer.mel = Examples\Image\Miscellaneous\ImageNet\AlexNet\add_top5_layer.mel
Examples\Image\Miscellaneous\ImageNet\AlexNet\AlexNet.config = Examples\Image\Miscellaneous\ImageNet\AlexNet\AlexNet.config
Examples\Image\Miscellaneous\ImageNet\AlexNet\AlexNet.cntk = Examples\Image\Miscellaneous\ImageNet\AlexNet\AlexNet.cntk
Examples\Image\Miscellaneous\ImageNet\AlexNet\AlexNet.ndl = Examples\Image\Miscellaneous\ImageNet\AlexNet\AlexNet.ndl
Examples\Image\Miscellaneous\ImageNet\AlexNet\Macros.ndl = Examples\Image\Miscellaneous\ImageNet\AlexNet\Macros.ndl
EndProjectSection
@ -526,11 +526,11 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "VGG", "VGG", "{BC0D6DFF-80C
Examples\Image\Miscellaneous\ImageNet\VGG\add_top5_layer.mel = Examples\Image\Miscellaneous\ImageNet\VGG\add_top5_layer.mel
Examples\Image\Miscellaneous\ImageNet\VGG\ImageNet1K_mean.xml = Examples\Image\Miscellaneous\ImageNet\VGG\ImageNet1K_mean.xml
Examples\Image\Miscellaneous\ImageNet\VGG\Macros.ndl = Examples\Image\Miscellaneous\ImageNet\VGG\Macros.ndl
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_A.config = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_A.config
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_A.cntk = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_A.cntk
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_A.ndl = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_A.ndl
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E.config = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E.config
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E.cntk = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E.cntk
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E.ndl = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E.ndl
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E_BN.config = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E_BN.config
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E_BN.cntk = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E_BN.cntk
Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E_BN.ndl = Examples\Image\Miscellaneous\ImageNet\VGG\VGG_E_BN.ndl
EndProjectSection
EndProject
@ -548,8 +548,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "NdlExamples", "NdlExamples"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{1E37CE40-556D-4693-B58C-F8D4CE349BB7}"
ProjectSection(SolutionItems) = preProject
Examples\Other\Simple2d\Config\Multigpu.config = Examples\Other\Simple2d\Config\Multigpu.config
Examples\Other\Simple2d\Config\Simple.config = Examples\Other\Simple2d\Config\Simple.config
Examples\Other\Simple2d\Config\Multigpu.cntk = Examples\Other\Simple2d\Config\Multigpu.cntk
Examples\Other\Simple2d\Config\Simple.cntk = Examples\Other\Simple2d\Config\Simple.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Miscellaneous", "Miscellaneous", "{BF1A621D-528B-4B84-AAFC-EF1455FC6830}"
@ -563,8 +563,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AN4", "AN4", "{EDA80B25-B18
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{B3E3AF4A-FEF5-46AB-A72A-19AF4F1FDD49}"
ProjectSection(SolutionItems) = preProject
Examples\Speech\AN4\Config\FeedForward.config = Examples\Speech\AN4\Config\FeedForward.config
Examples\Speech\AN4\Config\LSTM-NDL.config = Examples\Speech\AN4\Config\LSTM-NDL.config
Examples\Speech\AN4\Config\FeedForward.cntk = Examples\Speech\AN4\Config\FeedForward.cntk
Examples\Speech\AN4\Config\LSTM-NDL.cntk = Examples\Speech\AN4\Config\LSTM-NDL.cntk
Examples\Speech\AN4\Config\lstmp-3layer-opt.ndl = Examples\Speech\AN4\Config\lstmp-3layer-opt.ndl
EndProjectSection
EndProject
@ -587,16 +587,16 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "cntk_config", "cntk_config"
ProjectSection(SolutionItems) = preProject
Examples\Speech\Miscellaneous\AMI\cntk_config\40fbank.conf = Examples\Speech\Miscellaneous\AMI\cntk_config\40fbank.conf
Examples\Speech\Miscellaneous\AMI\cntk_config\80fbank.conf = Examples\Speech\Miscellaneous\AMI\cntk_config\80fbank.conf
Examples\Speech\Miscellaneous\AMI\cntk_config\Align.config = Examples\Speech\Miscellaneous\AMI\cntk_config\Align.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_dnn.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_dnn.config
Examples\Speech\Miscellaneous\AMI\cntk_config\Align.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\Align.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_dnn.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_dnn.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_dnn_smbr.mel = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_dnn_smbr.mel
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp_smbr.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp_smbr.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_smbr.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_smbr.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_write.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_write.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK_write.config = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK_write.config
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp_smbr.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_lstmp_smbr.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_smbr.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_smbr.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_write.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK2_write.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK_write.cntk = Examples\Speech\Miscellaneous\AMI\cntk_config\CNTK_write.cntk
Examples\Speech\Miscellaneous\AMI\cntk_config\default_macros.ndl = Examples\Speech\Miscellaneous\AMI\cntk_config\default_macros.ndl
Examples\Speech\Miscellaneous\AMI\cntk_config\dnn_3layer.ndl = Examples\Speech\Miscellaneous\AMI\cntk_config\dnn_3layer.ndl
Examples\Speech\Miscellaneous\AMI\cntk_config\dnn_6layer.ndl = Examples\Speech\Miscellaneous\AMI\cntk_config\dnn_6layer.ndl
@ -620,36 +620,36 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "config", "config", "{1C7D22
Examples\Speech\Miscellaneous\TIMIT\config\classify.ndl = Examples\Speech\Miscellaneous\TIMIT\config\classify.ndl
Examples\Speech\Miscellaneous\TIMIT\config\create_1layer.ndl = Examples\Speech\Miscellaneous\TIMIT\config\create_1layer.ndl
Examples\Speech\Miscellaneous\TIMIT\config\default_macros.ndl = Examples\Speech\Miscellaneous\TIMIT\config\default_macros.ndl
Examples\Speech\Miscellaneous\TIMIT\config\globals.config = Examples\Speech\Miscellaneous\TIMIT\config\globals.config
Examples\Speech\Miscellaneous\TIMIT\config\globals.cntk = Examples\Speech\Miscellaneous\TIMIT\config\globals.cntk
Examples\Speech\Miscellaneous\TIMIT\config\lstm.ndl = Examples\Speech\Miscellaneous\TIMIT\config\lstm.ndl
Examples\Speech\Miscellaneous\TIMIT\config\mtl_fbank_mfcc.ndl = Examples\Speech\Miscellaneous\TIMIT\config\mtl_fbank_mfcc.ndl
Examples\Speech\Miscellaneous\TIMIT\config\mtl_senones_dr.ndl = Examples\Speech\Miscellaneous\TIMIT\config\mtl_senones_dr.ndl
Examples\Speech\Miscellaneous\TIMIT\config\PAC-RNN.ndl = Examples\Speech\Miscellaneous\TIMIT\config\PAC-RNN.ndl
Examples\Speech\Miscellaneous\TIMIT\config\README.txt = Examples\Speech\Miscellaneous\TIMIT\config\README.txt
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_AdaptLearnRate.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_AdaptLearnRate.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_CrossValidateSimpleNetwork.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_CrossValidateSimpleNetwork.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_EvalSimpleNetwork.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_EvalSimpleNetwork.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainAutoEncoder.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainAutoEncoder.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainLSTM.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainLSTM.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiInput.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiInput.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiTask.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiTask.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainNDLNetwork.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainNDLNetwork.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainSimpleNetwork.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainSimpleNetwork.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainWithPreTrain.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainWithPreTrain.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteBottleneck.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteBottleneck.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteScaledLogLike.config = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteScaledLogLike.config
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_AdaptLearnRate.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_AdaptLearnRate.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_CrossValidateSimpleNetwork.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_CrossValidateSimpleNetwork.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_EvalSimpleNetwork.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_EvalSimpleNetwork.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainAutoEncoder.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainAutoEncoder.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainLSTM.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainLSTM.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiInput.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiInput.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiTask.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainMultiTask.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainNDLNetwork.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainNDLNetwork.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainSimpleNetwork.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainSimpleNetwork.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainWithPreTrain.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_TrainWithPreTrain.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteBottleneck.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteBottleneck.cntk
Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteScaledLogLike.cntk = Examples\Speech\Miscellaneous\TIMIT\config\TIMIT_WriteScaledLogLike.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CPU", "CPU", "{5ED4F5DC-E016-4E10-BACD-6A760A0CDE89}"
ProjectSection(SolutionItems) = preProject
Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_DNN.config = Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_DNN.config
Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_LSTM.config = Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_LSTM.config
Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_DNN.cntk = Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_DNN.cntk
Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_LSTM.cntk = Examples\Speech\Miscellaneous\TIMIT\CPU\TIMIT_LSTM.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "GPU", "GPU", "{35CFD8E3-7206-4243-AB5C-AAF610109A5C}"
ProjectSection(SolutionItems) = preProject
Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_DNN.config = Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_DNN.config
Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_LSTM.config = Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_LSTM.config
Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_DNN.cntk = Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_DNN.cntk
Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_LSTM.cntk = Examples\Speech\Miscellaneous\TIMIT\GPU\TIMIT_LSTM.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PennTreebank", "PennTreebank", "{6F4125B5-220F-4FB7-B6C4-85A966A0268C}"
@ -659,14 +659,14 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PennTreebank", "PennTreeban
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{850008BC-36B0-4A0A-BD0C-B6D5C2184227}"
ProjectSection(SolutionItems) = preProject
Examples\Text\PennTreebank\Config\rnn.config = Examples\Text\PennTreebank\Config\rnn.config
Examples\Text\PennTreebank\Config\rnn.cntk = Examples\Text\PennTreebank\Config\rnn.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SLU", "SLU", "{E6DC3B7D-303D-4A54-B040-D8DCF8C56E17}"
ProjectSection(SolutionItems) = preProject
Examples\Text\Miscellaneous\SLU\lstmNDL.txt = Examples\Text\Miscellaneous\SLU\lstmNDL.txt
Examples\Text\Miscellaneous\SLU\README.txt = Examples\Text\Miscellaneous\SLU\README.txt
Examples\Text\Miscellaneous\SLU\rnnlu.config = Examples\Text\Miscellaneous\SLU\rnnlu.config
Examples\Text\Miscellaneous\SLU\rnnlu.cntk = Examples\Text\Miscellaneous\SLU\rnnlu.cntk
Examples\Text\Miscellaneous\SLU\rnnluModelEditor.txt = Examples\Text\Miscellaneous\SLU\rnnluModelEditor.txt
EndProjectSection
EndProject
@ -683,11 +683,11 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ResNet", "ResNet", "{06D2C6
Examples\Image\Miscellaneous\ImageNet\ResNet\create_eval_model.mel = Examples\Image\Miscellaneous\ImageNet\ResNet\create_eval_model.mel
Examples\Image\Miscellaneous\ImageNet\ResNet\Macros.ndl = Examples\Image\Miscellaneous\ImageNet\ResNet\Macros.ndl
Examples\Image\Miscellaneous\ImageNet\ResNet\ProjWeightsGen.py = Examples\Image\Miscellaneous\ImageNet\ResNet\ProjWeightsGen.py
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_152.config = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_152.config
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_152.cntk = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_152.cntk
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_152.ndl = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_152.ndl
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_34.config = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_34.config
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_34.cntk = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_34.cntk
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_34.ndl = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_34.ndl
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_50.config = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_50.config
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_50.cntk = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_50.cntk
Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_50.ndl = Examples\Image\Miscellaneous\ImageNet\ResNet\ResNet_50.ndl
EndProjectSection
EndProject

Просмотреть файл

@ -125,12 +125,12 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp.config
cntk configFile=yourExp.cntk
\end_layout
\end_inset
where yourExp.config is a CNTK configuration file, which typically contains
where yourExp.cntk is a CNTK configuration file, which typically contains
several command blocks.
A command block is a top level block of the configuration.
Each command block must specify what action to be carried out with related
@ -5668,7 +5668,7 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp.config
cntk configFile=yourExp.cntk
\end_layout
\end_inset
@ -5788,7 +5788,7 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp.config stderr="c:
cntk configFile=yourExp.cntk stderr="c:
\backslash
temp
\backslash
@ -5816,7 +5816,7 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp.config mnistTrain=[minibatchSize=256]
cntk configFile=yourExp.cntk mnistTrain=[minibatchSize=256]
\end_layout
\end_inset
@ -5831,7 +5831,7 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp.config mnistTrain=[reader=[file="mynewfile.txt"]]
cntk configFile=yourExp.cntk mnistTrain=[reader=[file="mynewfile.txt"]]
\end_layout
@ -5877,12 +5877,12 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp1.config+yourExp2.config
cntk configFile=yourExp1.cntk+yourExp2.cntk
\end_layout
\begin_layout Plain Layout
cntk configFile=yourExp1.config configFile=yourExp2.config
cntk configFile=yourExp1.cntk configFile=yourExp2.cntk
\end_layout
\end_inset
@ -5891,7 +5891,7 @@ cntk configFile=yourExp1.config configFile=yourExp2.config
\end_layout
\begin_layout Standard
If yourExp2.config only contains the string "mnistTrain=[reader=[file=mynewfile.tx
If yourExp2.cntk only contains the string "mnistTrain=[reader=[file=mynewfile.tx
t]]", then both of these commands would be equivalent to:
\end_layout
@ -5902,7 +5902,7 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp1.config mnistTrain=[reader=[file="mynewfile.txt"]]
cntk configFile=yourExp1.cntk mnistTrain=[reader=[file="mynewfile.txt"]]
\end_layout
@ -5926,7 +5926,7 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp1.config+yourExp2.config var1=value configFile=yourExp3.conf
cntk configFile=yourExp1.cntk+yourExp2.cntk var1=value configFile=yourExp3.conf
ig
\end_layout
@ -5943,7 +5943,7 @@ In addition being able to specify multiple configuration files at the command
include
\emph default
one configuration file within another.
For example, if the first line of yourExp2.config was
For example, if the first line of yourExp2.cntk was
\end_layout
\begin_layout Standard
@ -5953,7 +5953,7 @@ status open
\begin_layout Plain Layout
include=yourExp1.config
include=yourExp1.cntk
\end_layout
\end_inset
@ -5968,7 +5968,7 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp2.config
cntk configFile=yourExp2.cntk
\end_layout
\end_inset
@ -5983,12 +5983,12 @@ status open
\begin_layout Plain Layout
cntk configFile=yourExp1.config+yourExp2.config
cntk configFile=yourExp1.cntk+yourExp2.cntk
\end_layout
\end_inset
where in this latter case, yourExp2.config doesn't contain the
where in this latter case, yourExp2.cntk doesn't contain the
\emph on
include
\emph default
@ -6007,9 +6007,9 @@ included
Including a configuration file is equivalent to pasting the contents of
that file at the location of the include statement.
Include statements are resolved recursively (using a depth-first search),
meaning that if yourExpA.config includes yourExpB.config, and yourExpB.config
includes yourExpC.config, then the full chain will be resolved, and yourExpC.conf
ig will effectively be included in yourExpA.config.
meaning that if yourExpA.cntk includes yourExpB.cntk, and yourExpB.cntk
includes yourExpC.cntk, then the full chain will be resolved, and yourExpC.conf
ig will effectively be included in yourExpA.cntk.
If a configuration file is included multiple times (eg, 'A' includes 'B'
and 'C', and 'B' also includes 'C'), then it will effectively only be included
the first time it is encountered.

Просмотреть файл

@ -3920,7 +3920,7 @@ One of the important tasks in spoken language understanding is labeling
\backslash
SLU
\backslash
rnnlu.config.
rnnlu.cntk.
The data is ATIS, which consists of 944 unique words, including <unk>,
in the training/dev set.
Output has 127 dimension, each corresponding to a semantic tag in ATIS.

Просмотреть файл

@ -834,7 +834,7 @@ status open
\begin_layout Plain Layout
cntk configFile=Simple.config
cntk configFile=Simple.cntk
\end_layout
\end_inset
@ -853,7 +853,7 @@ status open
\begin_layout Plain Layout
cntk configFile=Simple.config
cntk configFile=Simple.cntk
\end_layout
\end_inset

Просмотреть файл

@ -44,12 +44,12 @@ or prefix the call to the cntk executable with the corresponding folder.
Run the example from the Image/MNIST/Data folder using:
`cntk configFile=../Config/01_OneHidden.config`
`cntk configFile=../Config/01_OneHidden.cntk`
or run from any folder and specify the Data folder as the `currentDirectory`,
e.g. running from the Image/MNIST folder using:
`cntk configFile=Config/01_OneHidden.config currentDirectory=Data`
`cntk configFile=Config/01_OneHidden.cntk currentDirectory=Data`
The output folder will be created inside Image/MNIST/.
@ -61,21 +61,21 @@ There are three config files and corresponding network description files in the
1. 01_OneHidden.ndl is a simple, one hidden layer network that produces 2.3% of error.
To run the sample, navigate to the Data folder and run the following command:
`cntk configFile=../Config/01_OneHidden.config`
`cntk configFile=../Config/01_OneHidden.cntk`
2. 02_Convolution.ndl is more interesting, convolutional network which has 2 convolutional and 2 max pooling layers.
The network produces 0.87% of error after training for about 2 minutes on GPU.
To run the sample, navigate to the Data folder and run the following command:
`cntk configFile=../Config/02_Convolution.config`
`cntk configFile=../Config/02_Convolution.cntk`
3. 03_ConvBatchNorm.ndl is almost identical to 02_Convolution.ndl
except that it uses batch normalization for the fully connected layer h1.
Note that batch normalization is implemented using just NDL (see Macros.ndl for details).
As a result, it uses less epochs (8 vs 15 in 02_Convolution) to achieve the same accuracy.
To run the sample, navigate to the Data folder and run the following command:
`cntk configFile=../Config/03_ConvBatchNorm.config`
`cntk configFile=../Config/03_ConvBatchNorm.cntk`
For more details, refer to .ndl and corresponding .config files.
For more details, refer to .ndl and corresponding .cntk files.
### Additional files

Просмотреть файл

@ -17,12 +17,12 @@ https://code.google.com/p/cuda-convnet/source/browse/trunk/example-layers/layers
(main differences are usage of max pooling layers everywhere rather than mix of max and average pooling, as well as dropout in fully-connected layer).
The network produces 21% of error after training for about 3 minutes on GPU.
To run the sample, navigate to this folder and run the following command:
<path to CNTK executable> configFile=01_Conv.config configName=01_Conv
<path to CNTK executable> configFile=01_Conv.cntk configName=01_Conv
02_BatchNormConv.ndl is a convolutional network which uses batch normalization technique (http://arxiv.org/abs/1502.03167).
03_ResNet.ndl and 04_ResNet_56.ndl are very deep convolutional networks that use ResNet architecture and have 20 and 56 layers respectively (http://arxiv.org/abs/1512.03385).
With 03_ResNet.config you should get around 8.2% of error after training for about 50 minutes (see log files in the Output directory).
With 03_ResNet.cntk you should get around 8.2% of error after training for about 50 minutes (see log files in the Output directory).
For more details, refer to .ndl and corresponding .config files.
For more details, refer to .ndl and corresponding .cntk files.

Просмотреть файл

@ -1,5 +1,5 @@
# This is a configuration for parallel training of a simple feed-forward neural network using data-parallel SGD.
# The configuration is identical to the Simple.config except for the few additional parallelism related options.
# The configuration is identical to the Simple.cntk except for the few additional parallelism related options.
# Parameters can be overwritten on the command line
# for example: cntk configFile=myConfigFile RootDir=../..

Просмотреть файл

@ -7,7 +7,7 @@
|Purpose |Showcase how to train a simple CNTK network (CPU and GPU) and how to use it for scoring (decoding)
|Network |SimpleNetworkBuilder, 2 hidden layers with 50 sigmoid nodes each, cross entropy with softmax
|Training |Stochastic gradient descent with momentum
|Comments |There are two config files: Simple.config uses a single CPU or GPU, Multigpu.config uses data-parallel SGD for training on multiple GPUs
|Comments |There are two config files: Simple.cntk uses a single CPU or GPU, Multigpu.cntk uses data-parallel SGD for training on multiple GPUs
## Running the example
@ -31,12 +31,12 @@ or prefix the call to the cntk executable with the corresponding folder.
Run the example from the Simple2d/Data folder using:
`cntk configFile=../Config/Simple.config`
`cntk configFile=../Config/Simple.cntk`
or run from any folder and specify the Data folder as the `currentDirectory`,
e.g. running from the Simple2d folder using:
`cntk configFile=Config/Simple.config currentDirectory=Data`
`cntk configFile=Config/Simple.cntk currentDirectory=Data`
The output folder will be created inside Simple2d/.
@ -51,7 +51,7 @@ __It is therefore recommended to generally use `ConfigDir` and `ModelDir` in all
To run on CPU set `deviceId = -1`, to run on GPU set deviceId to "auto" or a specific value >= 0.
Both config files are nearly identical.
Multigpu.config has some additional parameters for parallel training (see parallelTrain in the file).
Multigpu.cntk has some additional parameters for parallel training (see parallelTrain in the file).
Both files define the following three commands: train, test and output.
By default only train and test are executed:

Просмотреть файл

@ -16,7 +16,7 @@ See License.md in the root level folder of the CNTK repository for full license
|Purpose: |Showcase how to train feed forward and LSTM networks for speech data
|Network: |SimpleNetworkBuilder for 2-layer FF, NdlNetworkBuilder for 3-layer LSTM network
|Training: |Data-parallel 1-Bit SGD with automatic mini batch rescaling (FF)
|Comments: |There are two config files: FeedForward.config and LSTM-NDL.config for FF and LSTM training respectively
|Comments: |There are two config files: FeedForward.cntk and LSTM-NDL.cntk for FF and LSTM training respectively
## Running the example
@ -40,12 +40,12 @@ or prefix the call to the cntk executable with the corresponding folder.
Run the example from the Speech/Data folder using:
`cntk configFile=../Config/FeedForward.config`
`cntk configFile=../Config/FeedForward.cntk`
or run from any folder and specify the Data folder as the `currentDirectory`,
e.g. running from the Speech folder using:
`cntk configFile=Config/FeedForward.config currentDirectory=Data`
`cntk configFile=Config/FeedForward.cntk currentDirectory=Data`
The output folder will be created inside Speech/.
@ -59,9 +59,9 @@ These variables will be overwritten when running on the Philly cluster.
__It is therefore recommended to generally use `ConfigDir` and `ModelDir` in all config files.__
To run on CPU set `deviceId = -1`, to run on GPU set deviceId to "auto" or a specific value >= 0.
The FeedForward.config file uses the SimpleNetworkBuilder to create a 2-layer
The FeedForward.cntk file uses the SimpleNetworkBuilder to create a 2-layer
feed forward network with sigmoid nodes and a softmax layer.
The LSTM-NDL.config file uses the NdlNetworkBuilder and refers to the lstmp-3layer-opt.ndl file.
The LSTM-NDL.cntk file uses the NdlNetworkBuilder and refers to the lstmp-3layer-opt.ndl file.
In the ndl file an LSTM component is defined and used to create a 3-layer LSTM network with a softmax layer.
Both configuration only define and execute a single training task:

Просмотреть файл

@ -61,7 +61,7 @@ scripts/train_nnet.sh --num-threads 1 --device 0 --cmd "$cuda_cmd" \
--feat-dim $feat_dim --cntk-train-opts "$cntk_train_opts" \
--learning-rate "0.1:1" --momentum "0:0.9" \
--max-epochs 50 --minibatch-size 256 --evaluate-period 100 \
--cntk-config cntk_config/CNTK2_dnn.config \
--cntk-config cntk_config/CNTK2_dnn.cntk \
--default-macros cntk_config/default_macros.ndl \
--model-ndl cntk_config/dnn_6layer.ndl \
data/sdm1/train_120fbank exp/sdm1/tri3a_ali exp_cntk/sdm1/dnn
@ -70,7 +70,7 @@ scripts/train_nnet.sh --num-threads 1 --device 0 --cmd "$cuda_cmd" \
device=-1
scripts/align.sh --num-threads 1 --nj 60 --cmd "$decode_cmd" \
--feat-dim $feat_dim --device $device \
--cntk-config cntk_config/Align.config \
--cntk-config cntk_config/Align.cntk \
data/sdm1/train_120fbank data/lang \
exp_cntk/sdm1/dnn exp_cntk/sdm1/dnn_ali
@ -92,7 +92,7 @@ scripts/train_nnet.sh --num-threads 1 --device 0 --cmd "$cuda_cmd" \
--learning-rate "0.2:1" --momentum "0:0.9" \
--max-epochs 50 --minibatch-size 20 --evaluate-period 100 \
--clipping-per-sample 0.05 --l2-reg-weight 0.00001 --dropout-rate "0.1*5:0.8"\
--cntk-config cntk_config/CNTK2_lstmp.config \
--cntk-config cntk_config/CNTK2_lstmp.cntk \
--default-macros cntk_config/default_macros.ndl \
--model-ndl cntk_config/lstmp-3layer-highway-dropout.ndl \
data/sdm1/train_80fbank exp_cntk/sdm1/dnn_ali exp_cntk/sdm1/hlstmp
@ -110,7 +110,7 @@ device=-1
alidir=${srcdir}_ali
scripts/align.sh --num-threads 1 --nj 60 --cmd "$decode_cmd" \
--feat-dim $feat_dim --device $device \
--cntk-config cntk_config/Align.config \
--cntk-config cntk_config/Align.cntk \
$data data/lang $srcdir $alidir || exit 1;
# Denominator lattices.
@ -118,7 +118,7 @@ device=-1
denlatdir=${srcdir}_denlats
scripts/make_denlats.sh --num-threads 1 --nj 20 --sub-split 60 \
--feat-dim $feat_dim --cmd "$decode_cmd" --acwt $acwt \
--device $device --cntk-config cntk_config/Align.config \
--device $device --cntk-config cntk_config/Align.cntk \
--ngram-order 2 \
$data data/lang $srcdir $denlatdir || exit 1;
@ -134,7 +134,7 @@ scripts/train_nnet_sequence.sh --num-threads 1 --cmd "$cuda_cmd" --momentum 0.9\
--minibatch-size 20 --cntk-train-opts "$cntk_train_opts" \
--clipping-per-sample 0.05 --smooth-factor 0.1 \
--l2-reg-weight 0.00001 --one-silence-class false --dropout-rate 0 \
--cntk-config cntk_config/CNTK2_lstmp_smbr.config \
--cntk-config cntk_config/CNTK2_lstmp_smbr.cntk \
--model-mel cntk_config/lstmp-smbr.mel \
--model-ndl cntk_config/lstmp-3layer-highway-dropout.ndl \
--default-macros cntk_config/default_macros.ndl \

Просмотреть файл

@ -70,7 +70,7 @@ cp -L $cntk_model $dir || exit 1;
cp -L $kaldi_model $dir || exit 1;
mkdir $dir/configs
cp -f $cntk_config $dir/configs/Align.config
cp -f $cntk_config $dir/configs/Align.cntk
# Features to be fed to CNTK.
feats="scp:$sdata/JOB/feats.scp"

Просмотреть файл

@ -104,7 +104,7 @@ echo "$0 create cntk files"
for i in `seq 1 $labelDim` ; do echo $i ; done > $labelMapping
# echo $cn_gpu
# $cuda_cmd $dir/log/train_cntk.log \
$cn_gpu configFile=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/cntk_config/CNTK.config modelName=cntkSpeech.dnn DeviceNumber=0 ExpDir=$dir logFile=$logFile labelMapping=$labelMapping ndlfile=$ndlfile inputFeat=$inputFeat trainMLF=$trainMLF labelDim=$labelDim cvInputFeat=$cvInputFeat featDim=600 action=TrainDNN phnLabel=no phnDim=no phnMapping=no inputSCP=no outputSCP=no
$cn_gpu configFile=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/cntk_config/CNTK.cntk modelName=cntkSpeech.dnn DeviceNumber=0 ExpDir=$dir logFile=$logFile labelMapping=$labelMapping ndlfile=$ndlfile inputFeat=$inputFeat trainMLF=$trainMLF labelDim=$labelDim cvInputFeat=$cvInputFeat featDim=600 action=TrainDNN phnLabel=no phnDim=no phnMapping=no inputSCP=no outputSCP=no
fi
exit 0;
@ -121,7 +121,7 @@ if [ $stage -le 7 ]; then
cnmodel=$dir/cntkSpeech.dnn.16
graphdir=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/exp/ihm/tri4a/graph_ami_fsh.o3g.kn.pr1-7
class_frame_counts=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/exp/ihm/dnn5b_pretrain-dbn_dnn_realign/ali_train_pdf.counts
cntk_string="$cn_gpu configFile=cntk_config/CNTK_write.config DeviceNumber=1 modelName=$cnmodel labelDim=$labelDim featDim=$featDim action=$action"
cntk_string="$cn_gpu configFile=cntk_config/CNTK_write.cntk DeviceNumber=1 modelName=$cnmodel labelDim=$labelDim featDim=$featDim action=$action"
scripts/decode_cntk.sh --nj 1 --acwt 0.0833 --class-frame-counts "$class_frame_counts" \
$graphdir $data_fmllr/eval $dir/decode_ami_fsh.o3g.kn.pr1-7 "$cntk_string" || exit 1;
fi

Просмотреть файл

@ -14,7 +14,7 @@ minibatch_size=20
evaluate_period=100
cntk_train_opts=
cntk_config=cntk_config/CNTK2_lstmp.config
cntk_config=cntk_config/CNTK2_lstmp.cntk
default_macros=cntk_config/default_macros.ndl
model_ndl=cntk_config/lstmp-3layer.ndl
device=-1

Просмотреть файл

@ -24,7 +24,7 @@ evaluate_period=100
one_silence_class=true
cntk_train_opts=
cntk_config=cntk_config/CNTK2_lstmp_smbr.config
cntk_config=cntk_config/CNTK2_lstmp_smbr.cntk
default_macros=cntk_config/default_macros.ndl
model_ndl=cntk_config/lstmp-3layer.ndl
model_mel=cntk_config/lstmp-3layer-smbr.mel

Просмотреть файл

@ -43,7 +43,7 @@ fi
if [ $stage -le 1 ] ; then
### setup the configuration files for training CNTK models ###
cp cntk_config/CNTK2.config $expdir/CNTK2.config
cp cntk_config/CNTK2.cntk $expdir/CNTK2.cntk
cp cntk_config/default_macros.ndl $expdir/default_macros.ndl
cp cntk_config/dnn_6layer.ndl $expdir/dnn_6layer.ndl
ndlfile=$expdir/dnn_6layer.ndl
@ -68,7 +68,7 @@ cvInputLabels=${expdir}/cntk_valid.labels
EOF
## training command ##
$cn_gpu configFile=${expdir}/Base.config configFile=${expdir}/CNTK2.config DeviceNumber=0 action=TrainDNN ndlfile=$ndlfile
$cn_gpu configFile=${expdir}/Base.config configFile=${expdir}/CNTK2.cntk DeviceNumber=0 action=TrainDNN ndlfile=$ndlfile
echo "$0 successfuly finished.. $dir"
@ -77,7 +77,7 @@ fi
if [ $stage -le 2 ] ; then
config_write=cntk_config/CNTK2_write.config
config_write=cntk_config/CNTK2_write.cntk
cnmodel=$expdir/cntk.dnn.16
action=write
graphdir=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/exp/$mic/tri4a/graph_ami_fsh.o3g.kn.pr1-7

Просмотреть файл

@ -21,17 +21,17 @@ srcdir=${srcdir%/}
device=-1
alidir=${srcdir}_ali
mkdir -p $alidir/configs
cp -f cntk_config/Align.config $alidir/configs/Align.config
cp -f cntk_config/Align.cntk $alidir/configs/Align.cntk
scripts/align.sh --num-threads 1 --nj 30 --cmd "$decode_cmd" \
--feat-dim $feat_dim --device $device \
--cntk-config $alidir/configs/Align.config \
--cntk-config $alidir/configs/Align.cntk \
$data_fmllr/train data/lang $srcdir $alidir || exit 1;
# Denominator lattices.
device=-1
denlatdir=${srcdir}_denlats
mkdir -p $denlatdir/configs
cp -f cntk_config/Align.config $denlatdir/configs/Decode.config
cp -f cntk_config/Align.cntk $denlatdir/configs/Decode.config
scripts/make_denlats.sh --num-threads 1 --nj 20 --sub-split 15 \
--feat-dim $feat_dim --cmd "$decode_cmd" --acwt $acwt \
--device $device --cntk-config $denlatdir/configs/Decode.config \
@ -44,7 +44,7 @@ scripts/make_denlats.sh --num-threads 1 --nj 20 --sub-split 15 \
device=0
smbrdir=${srcdir}_smbr
mkdir -p $smbrdir/configs
cp -f cntk_config/CNTK2_smbr.config $smbrdir/configs/Train.config
cp -f cntk_config/CNTK2_smbr.cntk $smbrdir/configs/Train.config
cp -f cntk_config/dnn_6layer_smbr.ndl $smbrdir/configs/model.ndl
cp -f cntk_config/default_macros.ndl $smbrdir/configs/default_macros.ndl
scripts/train_sequence.sh --num-threads 1 --cmd "$cuda_cmd" \

Просмотреть файл

@ -105,7 +105,7 @@ echo "$0 create cntk files"
# You can either submit your jobs to your cluser, or run it in your local GPU machine
# $cuda_cmd $dir/log/train_cntk.log \
$cn_gpu configFile=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/cntk_config/CNTK.config modelName=cntkSpeech.dnn DeviceNumber=0 ExpDir=$dir logFile=$logFile labelMapping=$labelMapping ndlfile=$ndlfile inputFeat=$inputFeat trainMLF=$trainMLF labelDim=$labelDim cvInputFeat=$cvInputFeat featDim=600 action=TrainDNN phnLabel=no phnDim=no phnMapping=no inputSCP=no outputSCP=no
$cn_gpu configFile=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/cntk_config/CNTK.cntk modelName=cntkSpeech.dnn DeviceNumber=0 ExpDir=$dir logFile=$logFile labelMapping=$labelMapping ndlfile=$ndlfile inputFeat=$inputFeat trainMLF=$trainMLF labelDim=$labelDim cvInputFeat=$cvInputFeat featDim=600 action=TrainDNN phnLabel=no phnDim=no phnMapping=no inputSCP=no outputSCP=no
fi
if [ $stage -le 6 ] ; then
@ -120,7 +120,7 @@ if [ $stage -le 7 ]; then
cnmodel=$dir/cntkSpeech.dnn.17
graphdir=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/exp/$mic/tri4a/graph_ami_fsh.o3g.kn.pr1-7
class_frame_counts=/exports/work/inf_hcrc_cstr_nst/llu/ami/s5b/exp/ihm/dnn5b_pretrain-dbn_dnn_realign/ali_train_pdf.counts
cntk_string="$cn_gpu configFile=cntk_config/CNTK_write.config DeviceNumber=1 modelName=$cnmodel labelDim=$labelDim featDim=$featDim action=$action"
cntk_string="$cn_gpu configFile=cntk_config/CNTK_write.cntk DeviceNumber=1 modelName=$cnmodel labelDim=$labelDim featDim=$featDim action=$action"
scripts/decode_cntk.sh --nj 16 --cmd "$decode_cmd" --acwt 0.0833 --class-frame-counts "$class_frame_counts" \
$graphdir $data_fmllr/eval $dir/decode_ami_fsh.o3g.kn.pr1-7 "$cntk_string" || exit 1;
fi

Просмотреть файл

@ -13,5 +13,5 @@ $HTK/hcopy.exe -C $HCopyConfig -S $HCopyScript
CNdir=../../../
rm -f Models/TrainSimple.dnn*
time $CNdir/cn.exe configFile=TrainSimpleTimit.config
time $CNdir/cn.exe configFile=TrainSimpleTimit.cntk

Просмотреть файл

@ -7,38 +7,38 @@ Below is a brief description of the examples.
To Use:
=======
Modify the following files:
* globals.config in "configs" to reflect your current experimental setup)
* modify "DeviceNumber" in globals.config to specify CPU (<0) or GPU (>=0)
* globals.cntk in "configs" to reflect your current experimental setup)
* modify "DeviceNumber" in globals.cntk to specify CPU (<0) or GPU (>=0)
* all SCP files (lists of files) in "lib/scp" to point to your feature files
Run the command line with both globals.config and the desired config, separated by a +
* for example: cntk configFile=globals.config+TIMIT_TrainSimpleNetwork.config
Run the command line with both globals.cntk and the desired config, separated by a +
* for example: cntk configFile=globals.cntk+TIMIT_TrainSimpleNetwork.cntk
* note that full paths to config files need to be provided if you are not inside the config directory
Path Definitions:
=================
* globals.config [defines paths to feature and label files and experiments]
* globals.cntk [defines paths to feature and label files and experiments]
Network Training Examples:
==========================
* TIMIT_TrainSimpleNetwork.config [train basic feedforward fully connected neural network]
* TIMIT_TrainNDLNetwork.config [train a neural network defined using NDL]
* TIMIT_TrainAdaptLearnRate.config [similar to simple network example, but learning rate adapted based on dev set]
* TIMIT_TrainAutoEncoder.config [train autoencoder with bottleneck layer]
* TIMIT_TrainWithPreTrain.config [pre-train using layerwise discriminative pre-training, then do full network training]
* TIMIT_TrainMultiTask.config [train with multi-task learning with joint prediciton of senone labels and dialect region]
* TIMIT_TrainMultiInput.config [train with 2 different inputs: fbank and mfcc]
* TIMIT_TrainLSTM.config [train single layer LSTM network]
* TIMIT_TrainSimpleNetwork.cntk [train basic feedforward fully connected neural network]
* TIMIT_TrainNDLNetwork.cntk [train a neural network defined using NDL]
* TIMIT_AdaptLearnRate.cntk [similar to simple network example, but learning rate adapted based on dev set]
* TIMIT_TrainAutoEncoder.cntk [train autoencoder with bottleneck layer]
* TIMIT_TrainWithPreTrain.cntk [pre-train using layerwise discriminative pre-training, then do full network training]
* TIMIT_TrainMultiTask.cntk [train with multi-task learning with joint prediction of senone labels and dialect region]
* TIMIT_TrainMultiInput.cntk [train with 2 different inputs: fbank and mfcc]
* TIMIT_TrainLSTM.cntk [train single layer LSTM network]
Network Evaluation Examples:
============================
* TIMIT_CrossValidateSimpleNetwork.config [evaluate the models at all or some epochs and report best performing model]
* TIMIT_EvalSimpleNetwork.config [evaluate a network]
* TIMIT_CrossValidateSimpleNetwork.cntk [evaluate the models at all or some epochs and report best performing model]
* TIMIT_EvalSimpleNetwork.cntk [evaluate a network]
Network Output Writing:
=======================
* TIMIT_WriteBottleneck.config [write bottleneck features from autoencoder model]
* TIMIT_WriteScaledLogLike.config [write scaled likelihoods from simple model]
* TIMIT_WriteBottleneck.cntk [write bottleneck features from autoencoder model]
* TIMIT_WriteScaledLogLike.cntk [write scaled likelihoods from simple model]
Network Description Language (NDL) & Model Editing Language (MEL) files for experiments:
=======================================================================================

Просмотреть файл

@ -7,16 +7,16 @@ Below is a brief description of the examples.
To Use:
=======
Modify the following files:
* globals.config in "configs" to reflect your current experimental setup)
* modify "DeviceNumber" in globals.config to specify CPU (<0) or GPU (>=0)
* globals.cntk in "configs" to reflect your current experimental setup)
* modify "DeviceNumber" in globals.cntk to specify CPU (<0) or GPU (>=0)
* all SCP files (lists of files) in "lib/scp" to point to your feature files
Run the command line with both globals.config and the desired config, separated by a +
* for example: cntk configFile=globals.config+rnnlu.config
Run the command line with both globals.cntk and the desired config, separated by a +
* for example: cntk configFile=globals.cntk+rnnlu.cntk
* note that full paths to config files need to be provided if you are not inside the config directory
* for example
* C:\dev\cntk5\x64\release\CNTK.exe configFile=C:\dev\cntk5\ExampleSetups\SLU\globals.config+C:\dev\cntk5\ExampleSetups\SLU\rnnlu.config
* C:\dev\cntk5\x64\release\CNTK.exe configFile=C:\dev\cntk5\ExampleSetups\SLU\globals.cntk+C:\dev\cntk5\ExampleSetups\SLU\rnnlu.cntk
Scoring
* ./score.sh
@ -25,7 +25,7 @@ Scoring
Path Definitions:
=================
* globals.config [defines paths to feature and label files and experiments]
* globals.cntk [defines paths to feature and label files and experiments]
Check training loss
==========================
@ -38,7 +38,7 @@ Finished Epoch[1]: [Validation Set] Train Loss Per Sample = 0.2035009 EvalErr P
--------------------------------------------------------------
Network Training Examples:
==========================
* rnnlu.config
* rnnlu.cntk
# iter 10, learning rate 0.1
accuracy: 98.01%; precision: 93.75%; recall: 94.04%; FB1: 93.89

Просмотреть файл

@ -1,4 +1,4 @@
cn.exe configFile=global.config+rnnlm.gpu.config train=[SGD=[momentumPerMB=0.9]] ExpFolder=c:\exp\gpurnnlm
cn.exe configFile=global.cntk+rnnlm.gpu.cntk train=[SGD=[momentumPerMB=0.9]] ExpFolder=c:\exp\gpurnnlm
Training phase:
Finished Epoch[1]: [Training Set] Train Loss Per Sample = 6.168982 EvalErr Per Sample = 6.168982 Ave Learn Rate Per Sample = 0.009999999776 Epoch Time=535.896

Просмотреть файл

@ -39,12 +39,12 @@ or prefix the call to the cntk executable with the corresponding folder.
Run the example from the Text/Data folder using:
`cntk configFile=../Config/rnn.config`
`cntk configFile=../Config/rnn.cntk`
or run from any folder and specify the Data folder as the `currentDirectory`,
e.g. running from the Text folder using:
`cntk configFile=Config/rnn.config currentDirectory=Data`
`cntk configFile=Config/rnn.cntk currentDirectory=Data`
The output folder will be created inside Text/.

Просмотреть файл

@ -181,7 +181,7 @@ TIMIT_AddLayer = new EditAction [
sample
------
// This sample is a modification of the original TIMIT_TrainSimpleNetwork.config and TIMIT_TrainNDLNetwork.config.
// This sample is a modification of the original TIMIT_TrainSimpleNetwork.cntk and TIMIT_TrainNDLNetwork.cntk.
// The changes compared to the origina syntax are called out in comments.
stderr = ExpDir + "\TrainSimpleNetwork\log\log" // before: $ExpDir$\TrainSimpleNetwork\log\log

Просмотреть файл

@ -16,5 +16,5 @@ else
imageLayout=cudnn
fi
cntkrun 01_OneHidden.config "train=[reader=[file=$CleanDataDir/Train.txt]] test=[reader=[file=$CleanDataDir/Test.txt]] train=[SGD=[maxEpochs=1]] train=[SGD=[epochSize=100]] train=[reader=[randomize=none]] imageLayout=\"$imageLayout\"" || exit $?
cntkrun 01_OneHidden.cntk "train=[reader=[file=$CleanDataDir/Train.txt]] test=[reader=[file=$CleanDataDir/Test.txt]] train=[SGD=[maxEpochs=1]] train=[SGD=[epochSize=100]] train=[reader=[randomize=none]] imageLayout=\"$imageLayout\"" || exit $?

Просмотреть файл

@ -16,4 +16,4 @@ else
imageLayout=cudnn
fi
cntkrun 02_Convolution.config "train=[reader=[file=$CleanDataDir/Train.txt]] test=[reader=[file=$CleanDataDir/Test.txt]] train=[SGD=[maxEpochs=1]] train=[SGD=[epochSize=128]] train=[reader=[randomize=none]] imageLayout=\"$imageLayout\"" || exit $?
cntkrun 02_Convolution.cntk "train=[reader=[file=$CleanDataDir/Train.txt]] test=[reader=[file=$CleanDataDir/Test.txt]] train=[SGD=[maxEpochs=1]] train=[SGD=[epochSize=128]] train=[reader=[randomize=none]] imageLayout=\"$imageLayout\"" || exit $?

Просмотреть файл

@ -16,5 +16,5 @@ else
imageLayout=cudnn
fi
cntkrun 03_ConvBatchNorm.config "train=[reader=[file=$CleanDataDir/Train.txt]] test=[reader=[file=$CleanDataDir/Test.txt]] train=[SGD=[maxEpochs=1]] train=[SGD=[epochSize=128]] train=[reader=[randomize=none]] imageLayout=\"$imageLayout\"" || exit $?
cntkrun 03_ConvBatchNorm.cntk "train=[reader=[file=$CleanDataDir/Train.txt]] test=[reader=[file=$CleanDataDir/Test.txt]] train=[SGD=[maxEpochs=1]] train=[SGD=[epochSize=128]] train=[reader=[randomize=none]] imageLayout=\"$imageLayout\"" || exit $?

Просмотреть файл

@ -5,5 +5,5 @@
ConfigDir=$TEST_DIR/../../../../../../Examples/Other/Simple2d/Config
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun Multigpu.config "Multigpu_Demo_Train=[SGD=[maxEpochs=1]] Multigpu_Demo_Train=[SGD=[epochSize=100]] Multigpu_Demo_Train=[reader=[randomize=none]]" || exit $?
cntkrun Multigpu.cntk "Multigpu_Demo_Train=[SGD=[maxEpochs=1]] Multigpu_Demo_Train=[SGD=[epochSize=100]] Multigpu_Demo_Train=[reader=[randomize=none]]" || exit $?

Просмотреть файл

@ -5,5 +5,5 @@
ConfigDir=$TEST_DIR/../../../../../../Examples/Other/Simple2d/Config
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun Simple.config "Simple_Demo_Train=[SGD=[maxEpochs=1]] Simple_Demo_Train=[SGD=[epochSize=100]] Simple_Demo_Train=[reader=[randomize=none]]" || exit $?
cntkrun Simple.cntk "Simple_Demo_Train=[SGD=[maxEpochs=1]] Simple_Demo_Train=[SGD=[epochSize=100]] Simple_Demo_Train=[reader=[randomize=none]]" || exit $?

Просмотреть файл

@ -5,5 +5,5 @@
ConfigDir=$TEST_DIR/../../../../../../Examples/Speech/AN4/Config
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun FeedForward.config "speechTrain=[SGD=[maxEpochs=1]] speechTrain=[SGD=[epochSize=2048]]" || exit $?
cntkrun FeedForward.cntk "speechTrain=[SGD=[maxEpochs=1]] speechTrain=[SGD=[epochSize=2048]]" || exit $?

Просмотреть файл

@ -5,5 +5,5 @@
ConfigDir=$TEST_DIR/../../../../../../Examples/Speech/AN4/Config
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun LSTM-NDL.config "speechTrain=[SGD=[maxEpochs=1]] speechTrain=[SGD=[epochSize=64]] parallelTrain=false" || exit $?
cntkrun LSTM-NDL.cntk "speechTrain=[SGD=[maxEpochs=1]] speechTrain=[SGD=[epochSize=64]] parallelTrain=false" || exit $?

Просмотреть файл

@ -5,5 +5,5 @@
ConfigDir=$TEST_DIR/../../../../../../Examples/Text/PennTreebank/Config
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun rnn.config "train=[SGD=[maxEpochs=1]] train=[epochSize=2048]] trainFile=ptb.small.train.txt validFile=ptb.small.valid.txt testFile=ptb.small.test.txt confVocabSize=1000" || exit $?
cntkrun rnn.cntk "train=[SGD=[maxEpochs=1]] train=[epochSize=2048]] trainFile=ptb.small.train.txt validFile=ptb.small.valid.txt testFile=ptb.small.test.txt confVocabSize=1000" || exit $?

Просмотреть файл

@ -3,10 +3,10 @@
. $TEST_ROOT_DIR/run-test-common
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun cntk.config || exit $?
cntkrun cntk.cntk || exit $?
echo === Deleting last epoch data
rm $TEST_RUN_DIR/models/*.dnn
echo ==== Re-running from checkpoint
DeleteExistingModels=0
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun cntk.config 'makeMode=true' || exit $?
cntkrun cntk.cntk 'makeMode=true' || exit $?

Просмотреть файл

@ -8,7 +8,7 @@ Instances=4
NumCPUThreads=$(threadsPerInstance $Instances)
# cntkmpirun <MPI args> <CNTK config file name> <additional CNTK args>
cntkmpirun "-n $Instances" SimpleMultiGPU.config "numCPUThreads=$NumCPUThreads precision=double SimpleMultiGPU=[SGD=[ParallelTrain=[DataParallelSGD=[gradientBits=64]]]]"
cntkmpirun "-n $Instances" SimpleMultiGPU.cntk "numCPUThreads=$NumCPUThreads precision=double SimpleMultiGPU=[SGD=[ParallelTrain=[DataParallelSGD=[gradientBits=64]]]]"
ExitCode=$?
sed 's/^/MPI Rank 0: /' $TEST_RUN_DIR/"$LogFileName"_SimpleMultiGPU.logrank0
sed 's/^/MPI Rank 1: /' $TEST_RUN_DIR/"$LogFileName"_SimpleMultiGPU.logrank1

Просмотреть файл

@ -8,7 +8,7 @@ Instances=4
NumCPUThreads=$(threadsPerInstance $Instances)
# cntkmpirun <MPI args> <CNTK config file name> <additional CNTK args>
cntkmpirun "-n $Instances" SimpleMultiGPU.config "numCPUThreads=$NumCPUThreads precision=float SimpleMultiGPU=[SGD=[ParallelTrain=[DataParallelSGD=[gradientBits=32]]]]"
cntkmpirun "-n $Instances" SimpleMultiGPU.cntk "numCPUThreads=$NumCPUThreads precision=float SimpleMultiGPU=[SGD=[ParallelTrain=[DataParallelSGD=[gradientBits=32]]]]"
ExitCode=$?
sed 's/^/MPI Rank 0: /' $TEST_RUN_DIR/"$LogFileName"_SimpleMultiGPU.logrank0
sed 's/^/MPI Rank 1: /' $TEST_RUN_DIR/"$LogFileName"_SimpleMultiGPU.logrank1

Просмотреть файл

@ -1,4 +1,4 @@
C:\dev\cntk3\CNTKSolution\x64\Release\cntk configFile=globals.config+rnnlu.config
C:\dev\cntk3\CNTKSolution\x64\Release\cntk configFile=globals.cntk+rnnlu.cntk
# expected results, which has a copy at Expected.log is
@ -11,6 +11,6 @@ Finished Epoch[3]: [Training Set] Train Loss Per Sample = 3.6568716 EvalErr P
Finished Epoch[3]: [Validation Set] Train Loss Per Sample = 2.6959986 EvalErr Per Sample = 2.6959986
del /q c:\temp\exp\atis
C:\dev\cntk3\CNTKSolution\x64\Release\cntk configFile=globals.config+rnnlu.ndl.config
C:\dev\cntk3\CNTKSolution\x64\Release\cntk configFile=globals.cntk+rnnlu.ndl.cntk
#should have the same output as above using simple network builder.

Просмотреть файл

@ -3,4 +3,4 @@
. $TEST_ROOT_DIR/run-test-common
# cntkrun <CNTK config file name> <additional CNTK args>
cntkrun cntk_dpt.config || exit $?
cntkrun cntk_dpt.cntk || exit $?

Просмотреть файл

@ -8,7 +8,7 @@ Instances=3
NumCPUThreads=$(threadsPerInstance $Instances)
# cntkmpirun <MPI args> <CNTK config file name> <additional CNTK args>
cntkmpirun "-n $Instances" cntk.config "numCPUThreads=$NumCPUThreads precision=double speechTrain=[SGD=[ParallelTrain=[DataParallelSGD=[gradientBits=1]]]] speechTrain=[SGD=[ParallelTrain=[parallelizationStartEpoch=2]]]"
cntkmpirun "-n $Instances" cntk.cntk "numCPUThreads=$NumCPUThreads precision=double speechTrain=[SGD=[ParallelTrain=[DataParallelSGD=[gradientBits=1]]]] speechTrain=[SGD=[ParallelTrain=[parallelizationStartEpoch=2]]]"
ExitCode=$?
sed 's/^/MPI Rank 0: /' $TEST_RUN_DIR/"$LogFileName"_speechTrain.logrank0
sed 's/^/MPI Rank 1: /' $TEST_RUN_DIR/"$LogFileName"_speechTrain.logrank1

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше