Merge branch 'master' of https://github.com/Microsoft/CNTK into amitaga/cntkv2Library

Conflicts:
	CNTK.sln
This commit is contained in:
Amit Agarwal 2016-06-12 09:14:03 -07:00
Родитель 26c2006541 32869348c2
Коммит e875771784
458 изменённых файлов: 793580 добавлений и 155349 удалений

367
CNTK.sln
Просмотреть файл

@ -121,6 +121,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{5F733BBA-F
Tests\EndToEndTests\Speech\Data\glob_0000.mlf = Tests\EndToEndTests\Speech\Data\glob_0000.mlf
Tests\EndToEndTests\Speech\Data\glob_0000.scp = Tests\EndToEndTests\Speech\Data\glob_0000.scp
Tests\EndToEndTests\Speech\Data\README.txt = Tests\EndToEndTests\Speech\Data\README.txt
Tests\EndToEndTests\Speech\Data\SimpleDataTest_cntk_text.txt = Tests\EndToEndTests\Speech\Data\SimpleDataTest_cntk_text.txt
Tests\EndToEndTests\Speech\Data\SimpleDataTrain_cntk_text.txt = Tests\EndToEndTests\Speech\Data\SimpleDataTrain_cntk_text.txt
Tests\EndToEndTests\Speech\Data\state.list = Tests\EndToEndTests\Speech\Data\state.list
EndProjectSection
EndProject
@ -142,8 +144,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ParallelTraining", "Paralle
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{6D1353D6-F196-466F-B886-F16D48759B20}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain.txt = Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain.txt
Tests\EndToEndTests\ParallelTraining\Data\SimpleMapping.txt = Tests\EndToEndTests\ParallelTraining\Data\SimpleMapping.txt
Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain_cntk_text.txt = Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain_cntk_text.txt
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "NoQuantization", "NoQuantization", "{B6725C9F-A6D2-4269-9B74-7888A90F7884}"
@ -297,22 +298,16 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Image", "Image", "{8071EF60
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{76F9323D-34A1-43A5-A594-C4798931FF21}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Image\Data\labelsmap.txt = Tests\EndToEndTests\Image\Data\labelsmap.txt
Tests\EndToEndTests\Image\Data\Test.txt = Tests\EndToEndTests\Image\Data\Test.txt
Tests\EndToEndTests\Image\Data\Train.txt = Tests\EndToEndTests\Image\Data\Train.txt
Tests\EndToEndTests\Image\Data\Test_cntk_text.txt = Tests\EndToEndTests\Image\Data\Test_cntk_text.txt
Tests\EndToEndTests\Image\Data\Train_cntk_text.txt = Tests\EndToEndTests\Image\Data\Train_cntk_text.txt
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "QuickE2E", "QuickE2E", "{2A884EB5-037C-481E-8170-BCDC8B3EDD93}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Image\QuickE2E\baseline.linux.debug.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.linux.debug.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.linux.release.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.linux.release.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.cpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.cpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.cpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.cpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.linux.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.linux.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.txt
Tests\EndToEndTests\Image\QuickE2E\cntk.cntk = Tests\EndToEndTests\Image\QuickE2E\cntk.cntk
Tests\EndToEndTests\Image\QuickE2E\Convolution.ndl = Tests\EndToEndTests\Image\QuickE2E\Convolution.ndl
Tests\EndToEndTests\Image\QuickE2E\Macros.ndl = Tests\EndToEndTests\Image\QuickE2E\Macros.ndl
Tests\EndToEndTests\Image\QuickE2E\README.txt = Tests\EndToEndTests\Image\QuickE2E\README.txt
Tests\EndToEndTests\Image\QuickE2E\run-test = Tests\EndToEndTests\Image\QuickE2E\run-test
Tests\EndToEndTests\Image\QuickE2E\testcases.yml = Tests\EndToEndTests\Image\QuickE2E\testcases.yml
EndProjectSection
@ -330,11 +325,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SVD", "SVD", "{669B6203-967
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{81AE014F-DD63-47C7-B6E2-DB1D2833DCD1}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Speech\Simple\baseline.cpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.cpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.gpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.linux.cpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.linux.cpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.linux.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.linux.gpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\Simple\cntk.cntk = Tests\EndToEndTests\Speech\Simple\cntk.cntk
Tests\EndToEndTests\Speech\Simple\README.txt = Tests\EndToEndTests\Speech\Simple\README.txt
Tests\EndToEndTests\Speech\Simple\run-test = Tests\EndToEndTests\Speech\Simple\run-test
Tests\EndToEndTests\Speech\Simple\testcases.yml = Tests\EndToEndTests\Speech\Simple\testcases.yml
EndProjectSection
@ -354,6 +350,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ReaderTests", "Tests\UnitTe
ProjectSection(ProjectDependencies) = postProject
{9BD0A711-0BBD-45B6-B81C-053F03C26CFB} = {9BD0A711-0BBD-45B6-B81C-053F03C26CFB}
{33D2FD22-DEF2-4507-A58A-368F641AEBE5} = {33D2FD22-DEF2-4507-A58A-368F641AEBE5}
{7B7A563D-AA8E-4660-A805-D50235A02120} = {7B7A563D-AA8E-4660-A805-D50235A02120}
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
{91973E60-A7BE-4C86-8FDB-59C88A0B3715} = {91973E60-A7BE-4C86-8FDB-59C88A0B3715}
@ -366,6 +363,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "EvalDll", "Source\EvalDll\E
{928ABD1B-4D3B-4017-AEF1-0FA1B4467513} = {928ABD1B-4D3B-4017-AEF1-0FA1B4467513}
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{EB2BE26F-6BD4-4274-971F-86D080779DD1} = {EB2BE26F-6BD4-4274-971F-86D080779DD1}
{F0A9637C-20DA-42F0-83D4-23B4704DE602} = {F0A9637C-20DA-42F0-83D4-23B4704DE602}
{EAD17188-072C-4726-B840-A769C36DAD1B} = {EAD17188-072C-4726-B840-A769C36DAD1B}
EndProjectSection
EndProject
@ -425,12 +423,6 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "UCIFastReader", "Source\Rea
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "EvalTest", "Tests\UnitTests\EvalTest\EvalTest.vcxproj", "{731312A8-6DA3-4841-AFCD-57520BA1BF8E}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MathPerformanceTests", "Tests\UnitTests\MathPerformanceTests\MathPerformanceTests.vcxproj", "{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
@ -710,13 +702,14 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{439BE0E0-F
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MNIST", "MNIST", "{63C6816D-66BF-487E-B541-094142C8272B}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\run-test-common = Tests\EndToEndTests\Examples\Image\MNIST\run-test-common
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "02_Convolution", "02_Convolution", "{6F1D0CE1-0F18-4B4C-9581-1F2146C8D300}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\run-test = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\run-test
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\testcases.yml = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\testcases.yml
EndProjectSection
@ -725,6 +718,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "01_OneHidden", "01_OneHidde
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\run-test = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\run-test
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\testcases.yml = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\testcases.yml
EndProjectSection
@ -733,6 +727,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "03_ConvBatchNorm", "03_Conv
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\run-test = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\run-test
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\testcases.yml = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\testcases.yml
EndProjectSection
@ -743,28 +738,22 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple2d", "Simple2d", "{D4
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MultiGpu", "MultiGpu", "{C86A6572-DE7A-4EBB-ADD0-A6C4906D46A3}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\README.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\README.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\run-test = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\run-test
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\testcases.yml = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{E330CA6B-5954-4EBA-9C64-6058494E338A}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\README.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\README.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\run-test = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\run-test
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\testcases.yml = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\testcases.yml
EndProjectSection
@ -817,7 +806,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CNTKTextFormatReader", "Sou
{F0A9637C-20DA-42F0-83D4-23B4704DE602} = {F0A9637C-20DA-42F0-83D4-23B4704DE602}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ExperimentalHTKMLFReader", "Source\Readers\ExperimentalHTKMLFReader\ExperimentalHTKMLFReader.vcxproj", "{7B7A51ED-AA8E-4660-A805-D50235A02120}"
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "HTKDeserializers", "Source\Readers\HTKDeserializers\HTKDeserializers.vcxproj", "{7B7A51ED-AA8E-4660-A805-D50235A02120}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
@ -892,11 +881,15 @@ EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Miscellaneous", "Miscellaneous", "{8629430A-821E-43BA-AEC5-8B2CF31A2A7A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CIFAR-10", "CIFAR-10", "{0141526B-F257-4574-8CBE-99634726FFCE}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\run-test-common = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\run-test-common
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "01_Convolution", "01_Convolution", "{58286327-6742-44C4-A34E-D2583419E55E}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\README.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\README.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\run-test = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\run-test
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\testcases.yml = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\testcases.yml
EndProjectSection
@ -905,6 +898,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "02_BatchNormConv", "02_Batc
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.linux.gpu.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.linux.gpu.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\README.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\README.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\run-test = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\run-test
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\testcases.yml = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\testcases.yml
EndProjectSection
@ -977,186 +971,6 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CompositeDataReader", "Sour
{F0A9637C-20DA-42F0-83D4-23B4704DE602} = {F0A9637C-20DA-42F0-83D4-23B4704DE602}
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CNTKTextFormatReader", "CNTKTextFormatReader", "{99FAAACE-C360-43CF-B706-20621F164484}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Examples", "Examples", "{629761D1-7A05-409A-B62B-FC1CCC0D6EED}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Image", "Image", "{D4302516-C77F-4FAF-82FB-18DB39F5A53B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ParallelTraining", "ParallelTraining", "{06BE675D-80DD-419A-8E00-26953EF11F25}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\SimpleMultiGPU.cntk = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\SimpleMultiGPU.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Speech", "Speech", "{5642F047-490B-4ABD-8113-8563C872B39F}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Image", "Image", "{2B6CCAB6-A92A-483C-9FDB-8412FA4DC42F}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Other", "Other", "{225F5A3A-7CAF-4C71-9143-3AD2AC4D47A3}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MNIST", "MNIST", "{EBD36FD9-FE5B-420E-A572-DC6117300DB3}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\run-test-common = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\run-test-common
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Miscellaneous", "Miscellaneous", "{08D284FA-2914-4B35-A89C-896DBA2B4484}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CIFAR-10", "CIFAR-10", "{95FAC6A0-6AE7-4947-9DFD-498FE71311AD}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\run-test-common = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\run-test-common
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{A877E526-89C1-422E-9F90-4DDE84135A36}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\Config\01_Conv.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\Config\01_Conv.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\Config\02_BatchNormConv.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\Config\02_BatchNormConv.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\Config\05_ConvLocal.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\Config\05_ConvLocal.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "01_Convolution", "01_Convolution", "{071D8449-D080-4141-869D-600CC3C2A0BE}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.linux.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.linux.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "02_BatchNormConv", "02_BatchNormConv", "{D3A74C52-BC74-4DA3-BE93-8F4241D54EE0}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.linux.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.linux.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "05_ConvLocal", "05_ConvLocal", "{EC466625-BC66-41DF-B55A-EB28AFABE24E}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.linux.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.linux.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "01_OneHidden", "01_OneHidden", "{34D578DB-0101-45C4-9DF0-37DE9AB87C65}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\baseline.linux.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\baseline.linux.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\01_OneHidden\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "02_Convolution", "02_Convolution", "{1FE04815-E02E-498C-B276-6D058D46D754}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\baseline.linux.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\baseline.linux.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\02_Convolution\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "03_ConvBatchNorm", "03_ConvBatchNorm", "{2A125ED5-9C8A-4BDF-A200-862104289608}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\baseline.linux.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\baseline.linux.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\03_ConvBatchNorm\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{E9207003-B860-4D57-B2CA-09AF52FF191F}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\01_OneHidden.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\01_OneHidden.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\01_OneHidden.ndl = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\01_OneHidden.ndl
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\02_Convolution.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\02_Convolution.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\02_Convolution.ndl = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\02_Convolution.ndl
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\03_ConvBatchNorm.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\03_ConvBatchNorm.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\03_ConvBatchNorm.ndl = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\03_ConvBatchNorm.ndl
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\Macros.ndl = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Image\MNIST\Config\Macros.ndl
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple2d", "Simple2d", "{50420947-E502-40B4-8739-2C0BADD93BEE}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MultiGpu", "MultiGpu", "{935E5A95-888D-4922-AB5A-E9C11D65E974}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.linux.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.linux.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.linux.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.linux.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\MultiGpu\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{773313DD-69DD-463F-ADC9-E8A902A5223C}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.linux.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.linux.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.linux.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.linux.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Simple\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{C8E2EF3B-CCBF-4BDD-8127-2252626FB22B}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Config\Multigpu.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Config\Multigpu.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Config\Simple.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Examples\Other\Simple2d\Config\Simple.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "QuickE2E", "QuickE2E", "{A4F79A83-DE30-40FA-88F4-86304C89AC7F}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.linux.txt = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.linux.txt
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\Image_QuickE2E.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\Image_QuickE2E.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{CC47AF62-2558-455F-81CB-36901AF033B0}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\Speech_Simple.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\Speech_Simple.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "NoQuantization", "NoQuantization", "{1BA5209D-3EB6-48E7-BE8A-0622315070C0}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{AA14A8DB-669D-447B-A97F-8B726BF30188}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\Data\SimpleDataTrain.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\Data\SimpleDataTrain.txt
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SinglePrecision", "SinglePrecision", "{CA248859-AA91-47D6-AC05-3542AB27E290}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\run-test = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\run-test
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DoublePrecision", "DoublePrecision", "{8B6E9318-5ED0-49BF-945B-072E0D90A886}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\run-test = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\run-test
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SparseDSSM", "SparseDSSM", "{1FB54750-B668-4AC3-966F-ED504020AC06}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Text\SparseDSSM\baseline.cpu.txt = Tests\EndToEndTests\Text\SparseDSSM\baseline.cpu.txt
@ -1237,17 +1051,9 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ParallelBM", "ParallelBM",
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SequenceToSequence", "SequenceToSequence", "{A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Miscellaneous", "Miscellaneous", "{85A05261-41D0-41DF-80B5-ADB6ABB54632}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "G2P", "G2P", "{4AD12278-9705-4BBA-B2C3-D6D5856AADC3}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\Miscellaneous\G2P\G2P.cntk = Examples\SequenceToSequence\Miscellaneous\G2P\G2P.cntk
Examples\SequenceToSequence\Miscellaneous\G2P\README.txt = Examples\SequenceToSequence\Miscellaneous\G2P\README.txt
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CPPEvalClient", "Source\Extensibility\CPPEvalClient\CPPEvalClient.vcxproj", "{578D52A0-3928-4405-A016-F016E8B49031}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ExperimentalHtkmlfReader", "ExperimentalHtkmlfReader", "{977ECCB7-598D-4548-B95B-BACA9CC7D98B}"
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "HtkDeserializers", "HtkDeserializers", "{977ECCB7-598D-4548-B95B-BACA9CC7D98B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DNN", "DNN", "{1DBB2575-F5C8-43F4-B982-D05D6ADC2F9B}"
EndProject
@ -1259,17 +1065,50 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SVD", "SVD", "{BA6A65C5-92A
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "FullUtterance", "FullUtterance", "{3BDF52CD-7F3C-42BC-AB78-CF5BBC5F4AB4}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.cpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.cpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.gpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.gpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\cntk.cntk = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\cntk.cntk
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\run-test = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\run-test
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\testcases.yml = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\testcases.yml
Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.cpu.txt = Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.cpu.txt
Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.gpu.txt = Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.gpu.txt
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\cntk.cntk = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\cntk.cntk
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\run-test = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\run-test
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\testcases.yml = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Truncated", "Truncated", "{1141DC61-E014-4DEC-9157-F6B1FC055C7A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CMUDict", "CMUDict", "{EC7298E3-AAA9-4672-941F-0B342C494CB3}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\CMUDict\README.md = Examples\SequenceToSequence\CMUDict\README.md
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{ECED747C-86D7-4009-B2A9-0525FE5DF4EB}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\CMUDict\Config\G2P.cntk = Examples\SequenceToSequence\CMUDict\Config\G2P.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{25E144C1-0B7C-4CD4-811A-2E9F4943120D}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test.txt = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test.txt
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21 = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21.txt = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21.txt
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21 = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21.txt = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21.txt
Examples\SequenceToSequence\CMUDict\Data\README.txt = Examples\SequenceToSequence\CMUDict\Data\README.txt
Examples\SequenceToSequence\CMUDict\Data\ThirdPartyNotice.md = Examples\SequenceToSequence\CMUDict\Data\ThirdPartyNotice.md
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "05_ConvLocal", "05_ConvLocal", "{3F77BF79-E0D3-4D60-8685-5A449F164081}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\README.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\README.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\run-test = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\run-test
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\testcases.yml = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\testcases.yml
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "EvalTests", "Tests\UnitTests\EvalTests\EvalTests.vcxproj", "{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
@ -1277,6 +1116,12 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "EvalTests", "Tests\UnitTest
{482999D1-B7E2-466E-9F8D-2119F93EAFD9} = {482999D1-B7E2-466E-9F8D-2119F93EAFD9}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CommandEval", "Tests\UnitTests\CommandEval\CommandEval.vcxproj", "{731312A8-6DA3-4841-AFCD-57520BA1BF8E}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CNTKv2LibraryDll", "Source\CNTKv2LibraryDll\CNTKv2LibraryDll.vcxproj", "{E5606ECE-48CA-4464-BB12-09D81D02B9EF}"
ProjectSection(ProjectDependencies) = postProject
{928ABD1B-4D3B-4017-AEF1-0FA1B4467513} = {928ABD1B-4D3B-4017-AEF1-0FA1B4467513}
@ -1441,14 +1286,6 @@ Global
{E6646FFE-3588-4276-8A15-8D65C22711C1}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{E6646FFE-3588-4276-8A15-8D65C22711C1}.Release|x64.ActiveCfg = Release|x64
{E6646FFE-3588-4276-8A15-8D65C22711C1}.Release|x64.Build.0 = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.ActiveCfg = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.Build.0 = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.ActiveCfg = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.Build.0 = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.ActiveCfg = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.ActiveCfg = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.Build.0 = Release|x64
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}.Debug_CpuOnly|x64.ActiveCfg = Debug_CpuOnly|x64
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}.Debug_CpuOnly|x64.Build.0 = Debug_CpuOnly|x64
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}.Debug|x64.ActiveCfg = Debug|x64
@ -1561,6 +1398,14 @@ Global
{F4CC3AB2-0DB2-4281-929A-2E68E30F0F6E}.Release_CpuOnly|x64.Build.0 = Release|x64
{F4CC3AB2-0DB2-4281-929A-2E68E30F0F6E}.Release|x64.ActiveCfg = Release|x64
{F4CC3AB2-0DB2-4281-929A-2E68E30F0F6E}.Release|x64.Build.0 = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.ActiveCfg = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.Build.0 = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.ActiveCfg = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.Build.0 = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.ActiveCfg = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.ActiveCfg = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@ -1617,7 +1462,6 @@ Global
{62836DC1-DF77-4B98-BF2D-45C943B7DDC6} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{CE429AA2-3778-4619-8FD1-49BA3B81197B} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{E6646FFE-3588-4276-8A15-8D65C22711C1} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{731312A8-6DA3-4841-AFCD-57520BA1BF8E} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
{6E565B48-1923-49CE-9787-9BBB9D96F4C5} = {D45DF403-6781-444E-B654-A96868C5BE68}
{3BF59CCE-D245-420A-9F17-73CE61E284C2} = {6E565B48-1923-49CE-9787-9BBB9D96F4C5}
@ -1694,34 +1538,6 @@ Global
{181664AC-4C95-4798-A923-09B879215B33} = {8656B71D-E24C-4AC2-8BE4-C07B415A3E15}
{86883653-8A61-4038-81A0-2379FAE4200A} = {DD043083-71A4-409A-AA91-F9C548DCF7EC}
{7B7A563D-AA8E-4660-A805-D50235A02120} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{99FAAACE-C360-43CF-B706-20621F164484} = {6E565B48-1923-49CE-9787-9BBB9D96F4C5}
{629761D1-7A05-409A-B62B-FC1CCC0D6EED} = {99FAAACE-C360-43CF-B706-20621F164484}
{D4302516-C77F-4FAF-82FB-18DB39F5A53B} = {99FAAACE-C360-43CF-B706-20621F164484}
{06BE675D-80DD-419A-8E00-26953EF11F25} = {99FAAACE-C360-43CF-B706-20621F164484}
{5642F047-490B-4ABD-8113-8563C872B39F} = {99FAAACE-C360-43CF-B706-20621F164484}
{2B6CCAB6-A92A-483C-9FDB-8412FA4DC42F} = {629761D1-7A05-409A-B62B-FC1CCC0D6EED}
{225F5A3A-7CAF-4C71-9143-3AD2AC4D47A3} = {629761D1-7A05-409A-B62B-FC1CCC0D6EED}
{EBD36FD9-FE5B-420E-A572-DC6117300DB3} = {2B6CCAB6-A92A-483C-9FDB-8412FA4DC42F}
{08D284FA-2914-4B35-A89C-896DBA2B4484} = {2B6CCAB6-A92A-483C-9FDB-8412FA4DC42F}
{95FAC6A0-6AE7-4947-9DFD-498FE71311AD} = {08D284FA-2914-4B35-A89C-896DBA2B4484}
{A877E526-89C1-422E-9F90-4DDE84135A36} = {95FAC6A0-6AE7-4947-9DFD-498FE71311AD}
{071D8449-D080-4141-869D-600CC3C2A0BE} = {95FAC6A0-6AE7-4947-9DFD-498FE71311AD}
{D3A74C52-BC74-4DA3-BE93-8F4241D54EE0} = {95FAC6A0-6AE7-4947-9DFD-498FE71311AD}
{EC466625-BC66-41DF-B55A-EB28AFABE24E} = {95FAC6A0-6AE7-4947-9DFD-498FE71311AD}
{34D578DB-0101-45C4-9DF0-37DE9AB87C65} = {EBD36FD9-FE5B-420E-A572-DC6117300DB3}
{1FE04815-E02E-498C-B276-6D058D46D754} = {EBD36FD9-FE5B-420E-A572-DC6117300DB3}
{2A125ED5-9C8A-4BDF-A200-862104289608} = {EBD36FD9-FE5B-420E-A572-DC6117300DB3}
{E9207003-B860-4D57-B2CA-09AF52FF191F} = {EBD36FD9-FE5B-420E-A572-DC6117300DB3}
{50420947-E502-40B4-8739-2C0BADD93BEE} = {225F5A3A-7CAF-4C71-9143-3AD2AC4D47A3}
{935E5A95-888D-4922-AB5A-E9C11D65E974} = {50420947-E502-40B4-8739-2C0BADD93BEE}
{773313DD-69DD-463F-ADC9-E8A902A5223C} = {50420947-E502-40B4-8739-2C0BADD93BEE}
{C8E2EF3B-CCBF-4BDD-8127-2252626FB22B} = {50420947-E502-40B4-8739-2C0BADD93BEE}
{A4F79A83-DE30-40FA-88F4-86304C89AC7F} = {D4302516-C77F-4FAF-82FB-18DB39F5A53B}
{CC47AF62-2558-455F-81CB-36901AF033B0} = {5642F047-490B-4ABD-8113-8563C872B39F}
{1BA5209D-3EB6-48E7-BE8A-0622315070C0} = {06BE675D-80DD-419A-8E00-26953EF11F25}
{AA14A8DB-669D-447B-A97F-8B726BF30188} = {06BE675D-80DD-419A-8E00-26953EF11F25}
{CA248859-AA91-47D6-AC05-3542AB27E290} = {1BA5209D-3EB6-48E7-BE8A-0622315070C0}
{8B6E9318-5ED0-49BF-945B-072E0D90A886} = {1BA5209D-3EB6-48E7-BE8A-0622315070C0}
{1FB54750-B668-4AC3-966F-ED504020AC06} = {8656B71D-E24C-4AC2-8BE4-C07B415A3E15}
{3E9BD61F-1F0A-4966-BE17-803AEFD1DFA4} = {6994C86D-A672-4254-824A-51F4DFEB807F}
{5560DDD4-1E6E-4F41-B9BD-F52A19DF0B31} = {6994C86D-A672-4254-824A-51F4DFEB807F}
@ -1730,8 +1546,6 @@ Global
{4D6F731C-4A6D-4E21-AC3C-9E1F26E5547E} = {6994C86D-A672-4254-824A-51F4DFEB807F}
{36C42845-0D48-4A46-9C67-2B593A80A09C} = {6994C86D-A672-4254-824A-51F4DFEB807F}
{A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26} = {47755F2E-D674-4175-9E38-8EA053455072}
{85A05261-41D0-41DF-80B5-ADB6ABB54632} = {A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26}
{4AD12278-9705-4BBA-B2C3-D6D5856AADC3} = {85A05261-41D0-41DF-80B5-ADB6ABB54632}
{578D52A0-3928-4405-A016-F016E8B49031} = {60F87E25-BC87-4782-8E20-1621AAEBB113}
{977ECCB7-598D-4548-B95B-BACA9CC7D98B} = {C47CDAA5-6D6C-429E-BC89-7CA0F868FDC8}
{1DBB2575-F5C8-43F4-B982-D05D6ADC2F9B} = {977ECCB7-598D-4548-B95B-BACA9CC7D98B}
@ -1740,8 +1554,13 @@ Global
{BA6A65C5-92A2-4040-ADC3-0727A45694F6} = {977ECCB7-598D-4548-B95B-BACA9CC7D98B}
{3BDF52CD-7F3C-42BC-AB78-CF5BBC5F4AB4} = {772A0DB3-4710-4281-8AA9-A9F1F7C543D3}
{1141DC61-E014-4DEC-9157-F6B1FC055C7A} = {772A0DB3-4710-4281-8AA9-A9F1F7C543D3}
{EC7298E3-AAA9-4672-941F-0B342C494CB3} = {A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26}
{ECED747C-86D7-4009-B2A9-0525FE5DF4EB} = {EC7298E3-AAA9-4672-941F-0B342C494CB3}
{25E144C1-0B7C-4CD4-811A-2E9F4943120D} = {EC7298E3-AAA9-4672-941F-0B342C494CB3}
{3F77BF79-E0D3-4D60-8685-5A449F164081} = {0141526B-F257-4574-8CBE-99634726FFCE}
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
{E5606ECE-48CA-4464-BB12-09D81D02B9EF} = {DD043083-71A4-409A-AA91-F9C548DCF7EC}
{F4CC3AB2-0DB2-4281-929A-2E68E30F0F6E} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
{731312A8-6DA3-4841-AFCD-57520BA1BF8E} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
EndGlobalSection
EndGlobal

Просмотреть файл

@ -50,17 +50,28 @@ def loadLabels(src, cimg):
os.remove(gzfname)
return res.reshape((cimg, 1))
def load(dataSrc, labelsSrc, cimg):
data = loadData(dataSrc, cimg)
labels = loadLabels(labelsSrc, cimg)
return np.hstack((data, labels))
def savetxt(filename, ndarray):
with open(filename, 'w') as f:
labels = map(' '.join, np.eye(10, dtype=np.uint).astype(str))
for row in ndarray:
row_str = row.astype(str)
label_str = labels[row[-1]]
feature_str = ' '.join(row_str[:-1])
f.write('|labels {} |features {}\n'.format(label_str, feature_str))
if __name__ == "__main__":
trnData = loadData('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz', 60000)
trnLbl = loadLabels('http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
trn = np.hstack((trnLbl, trnData))
train = load('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
print 'Writing train text file...'
np.savetxt(r'./../Data/Train-28x28.txt', trn, fmt = '%u', delimiter='\t')
savetxt(r'./../Data/Train-28x28_cntk_text.txt', train)
print 'Done.'
testData = loadData('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz', 10000)
testLbl = loadLabels('http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
test = np.hstack((testLbl, testData))
test = load('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
print 'Writing test text file...'
np.savetxt(r'./../Data/Test-28x28.txt', test, fmt = '%u', delimiter='\t')
print 'Done.'
savetxt(r'./../Data/Test-28x28_cntk_text.txt', test)
print 'Done.'

Просмотреть файл

@ -1,3 +1,4 @@
import sys
import urllib.request
import gzip
import os
@ -49,16 +50,28 @@ def loadLabels(src, cimg):
return res.reshape((cimg, 1))
def load(dataSrc, labelsSrc, cimg):
data = loadData(dataSrc, cimg)
labels = loadLabels(labelsSrc, cimg)
return np.hstack((data, labels))
def savetxt(filename, ndarray):
with open(filename, 'w', encoding="ascii") as f:
labels = list(map(' '.join, np.eye(10, dtype=np.uint).astype(str)))
for row in ndarray:
row_str = row.astype(str)
label_str = labels[row[-1]]
feature_str = ' '.join(row_str[:-1])
f.write('|labels {} |features {}\n'.format(label_str, feature_str))
if __name__ == "__main__":
trnData = loadData('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz', 60000)
trnLbl = loadLabels('http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
trn = np.hstack((trnLbl, trnData))
train = load('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
print ('Writing train text file...')
np.savetxt(r'./../Data/Train-28x28.txt', trn, fmt = '%u', delimiter='\t')
savetxt(r'./../Data/Train-28x28_cntk_text.txt', train)
print ('Done.')
testData = loadData('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz', 10000)
testLbl = loadLabels('http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
test = np.hstack((testLbl, testData))
test = load('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
print ('Writing test text file...')
np.savetxt(r'./../Data/Test-28x28.txt', test, fmt = '%u', delimiter='\t')
savetxt(r'./../Data/Test-28x28_cntk_text.txt', test)
print ('Done.')

Просмотреть файл

@ -2,31 +2,26 @@
# for example: cntk configFile=myConfigFile RootDir=../..
# For running from Visual Studio add
# currentDirectory=$(SolutionDir)/<path to corresponding data folder>
RootDir = ".."
rootDir = ".."
ConfigDir = "$RootDir$/Config"
DataDir = "$RootDir$/Data"
OutputDir = "$RootDir$/Output"
ModelDir = "$OutputDir$/Models"
configDir = "$rootDir$/Config"
dataDir = "$rootDir$/Data"
outputDir = "$rootDir$/Output"
modelDir = "$outputDir$/Models"
deviceId = 0
imageLayout = "cudnn"
# override the above as follows when running on CPU:
# deviceId = -1
command = train:test
precision = "float"
modelPath = "$ModelDir$/01_OneHidden"
ndlMacros = "$ConfigDir$/Macros.ndl"
modelPath = "$modelDir$/01_OneHidden"
# uncomment the following line to write logs to a file
# stderr = "$OutputDir$/01_OneHidden_out"
traceLevel=1
numMBsToShowResult=500
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
# uncomment the following line to write logs to a file
# stderr = "$outputDir$/01_OneHidden_out"
traceLevel = 1
numMBsToShowResult = 500
#######################################
# TRAINING CONFIG #
@ -35,37 +30,74 @@ initOnCPUOnly=true
train = [
action = "train"
# BrainScript version as described in Tutorial II.
# This is currently disabled. To run this, please remove the "_disabled" from "BrainScriptNetworkBuilder_disabled"
# and comment out the NDLNetworkBuilder below.
BrainScriptNetworkBuilder_disabled = [
# macros to include
include "Shared.bs"
featDim = 28 * 28 # number of pixels
labelDim = 10 # number of distinct labels
features = Input (featDim)
featScaled = Constant (1.0 / 256.0) .* features
labels = Input (labelDim)
hiddenDim = 200
# DNNSigmoidLayer and DNNLayer are defined in Shared.bs
h1 = DNNSigmoidLayer (featDim, hiddenDim, featScaled, 1)
z = DNNLayer (hiddenDim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax (labels, z)
errs = ErrorPrediction (labels, z)
# set top5Errs as an evaluation node to compute the top-5 error rate
# This is not marked tag="evaluation" since expensive during training.
# We explicitly select it as an output node in the "test" command.
top5Errs = ErrorPrediction (labels, z, topN=5)
# declare special nodes
featureNodes = (features)
labelNodes = (labels)
criterionNodes = (ce)
evaluationNodes = (errs)
outputNodes = (z)
]
# deprecated NDL version
NDLNetworkBuilder = [
imageLayout = "cudnn"
initOnCPUOnly = true
ndlMacros = "$configDir$/Macros.ndl"
networkDescription = "$ConfigDir$/01_OneHidden.ndl"
]
SGD = [
epochSize = 60000
minibatchSize = 32
learningRatesPerMB = 0.1
momentumPerMB = 0
learningRatesPerSample = 0.003125
momentumAsTimeConstant = 0
maxEpochs = 30
]
# Note: this reader crashes if randomization is turned on.
reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"
features = [
dim = 784
start = 1
readerType = "CNTKTextFormatReader"
# See ../REAMDE.md for details on getting the data (Train-28x28_cntk_text.txt).
file = "$DataDir$/Train-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]
]
#######################################
@ -74,22 +106,22 @@ train = [
test = [
action = "test"
minibatchSize = 16
minibatchSize = 1024
evalNodeNames = ce:errs:top5Errs
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test-28x28.txt"
features = [
dim = 784
start = 1
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]
]

Просмотреть файл

@ -20,17 +20,15 @@ DNN = [
# DNNSigmoidLayer and DNNLayer are defined in Macros.ndl
h1 = DNNSigmoidLayer(featDim, hiddenDim, featScaled, 1)
ol = DNNLayer(hiddenDim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax(labels, ol)
err = ErrorPrediction(labels, ol)
errs = ErrorPrediction(labels, ol)
top5Errs = ErrorPrediction(labels, ol, Const(5), tag="eval") # only used in testing
# Special Nodes
# errTop1 can be used to compute, for example, top-5 error by changing Const(1) to Const(5).
errTop1 = ErrorPrediction(labels, ol, Const(1), tag="eval")
FeatureNodes = (features)
LabelNodes = (labels)
CriterionNodes = (ce)
EvalNodes = (err)
EvalNodes = (errs)
OutputNodes = (ol)
]

Просмотреть файл

@ -1,36 +1,27 @@
# Parameters can be overwritten on the command line
# for example: cntk configFile=myConfigFile RootDir=../..
# for example: cntk configFile=myConfigFile rootDir=../..
# For running from Visual Studio add
# currentDirectory=$(SolutionDir)/<path to corresponding data folder>
RootDir = ".."
rootDir = ".."
ConfigDir = "$RootDir$/Config"
DataDir = "$RootDir$/Data"
OutputDir = "$RootDir$/Output"
ModelDir = "$OutputDir$/Models"
configDir = "$rootDir$/Config"
dataDir = "$rootDir$/Data"
outputDir = "$rootDir$/Output"
modelDir = "$outputDir$/Models"
deviceId = 0
imageLayout = "cudnn"
# Override the above as follows when running on CPU:
# deviceId = -1
# Note: Compared to GPU, this runs very slow.
command = train:test
precision = "float"
modelPath = "$ModelDir$/02_Convolution"
ndlMacros = "$ConfigDir$/Macros.ndl"
modelPath = "$modelDir$/02_Convolution"
# uncomment the following line to write logs to a file
# stderr = "$OutputDir$/02_Convolution_out"
traceLevel=1
numMBsToShowResult=500
# Note: turn off prefetching; known to crash UCIFastReader occasionally.
prefetch=false
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
# uncomment the following line to write logs to a file
# stderr = "$outputDir$/02_Convolution_out"
traceLevel = 1
numMBsToShowResult = 500
#######################################
# TRAINING CONFIG #
@ -40,34 +31,35 @@ train = [
action = "train"
NDLNetworkBuilder = [
imageLayout = "cudnn"
initOnCPUOnly = true
ndlMacros = "$configDir$/Macros.ndl"
networkDescription = "$ConfigDir$/02_Convolution.ndl"
]
SGD = [
epochSize = 60000
minibatchSize = 32
#learningRatesPerSample = 0.003125 # TODO
#momentumAsTimeConstant = 0
learningRatesPerMB = 0.1*5:0.3
momentumPerMB = 0*10:0.7
maxEpochs = 15
]
# Note: this reader crashes if randomization is turned on.
reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See ../REAMDE.md for details on getting the data (Train-28x28_cntk_text.txt).
file = "$DataDir$/Train-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -78,26 +70,20 @@ train = [
test = [
action = test
minibatchSize = 16
NDLNetworkBuilder = [
networkDescription = "$ConfigDir$/02_Convolution.ndl"
]
minibatchSize = 1024
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test-28x28.txt"
features = [
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test-28x28_cntk_text.txt"
input = [
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -58,13 +58,13 @@ DNN=[
ol = DNNLayer(h1Dim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax(labels, ol)
err = ErrorPrediction(labels, ol)
errs = ErrorPrediction(labels, ol)
# Special Nodes
FeatureNodes = (features)
LabelNodes = (labels)
CriterionNodes = (ce)
EvalNodes = (err)
EvalNodes = (errs)
OutputNodes = (ol)
]

Просмотреть файл

@ -10,7 +10,6 @@ OutputDir = "$RootDir$/Output"
ModelDir = "$OutputDir$/Models"
deviceId = 0
imageLayout = "cudnn"
# Note: Batch normalization training on CPU is not yet implemented.
# When it is, override the above as follows when running on CPU:
# deviceId = -1
@ -19,7 +18,6 @@ command = train:test
precision = "float"
modelPath = "$ModelDir$/03_ConvBatchNorm"
ndlMacros = "$ConfigDir$/Macros.ndl"
# uncomment the following line to write logs to a file
# stderr = "$OutputDir$/03_ConvBatchNorm_out"
@ -28,7 +26,6 @@ traceLevel=1
numMBsToShowResult=500
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
#######################################
# TRAINING CONFIG #
@ -38,6 +35,9 @@ train = [
action = "train"
NDLNetworkBuilder = [
imageLayout = "cudnn"
initOnCPUOnly=true
ndlMacros = "$ConfigDir$/Macros.ndl"
networkDescription = "$ConfigDir$/03_ConvBatchNorm.ndl"
]
@ -47,29 +47,24 @@ train = [
learningRatesPerMB = 0.5:0.1
momentumPerMB = 0.9
maxEpochs = 2
#batchNormalizationTimeConstant=0 # Set through NDL
batchNormalizationBlendTimeConstant=0:1#INF
]
# Note: this reader crashes if randomization is turned on.
reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"
features = [
dim = 784
start = 1
readerType = "CNTKTextFormatReader"
# See ../REAMDE.md for details on getting the data (Train-28x28_cntk_text.txt).
file = "$DataDir$/Train-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]
]
#######################################
@ -78,28 +73,22 @@ train = [
test = [
action = "test"
minibatchSize = 32
minibatchSize = 1024
modelPath=$ModelDir$/03_ConvBatchNorm
NDLNetworkBuilder = [
networkDescription = "$ConfigDir$/03_ConvBatchNorm.ndl"
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test-28x28.txt"
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -64,13 +64,13 @@ DNN = [
ol = DNNLayer(h1Dim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax(labels, ol)
err = ErrorPrediction(labels, ol)
errs = ErrorPrediction(labels, ol)
# Special Nodes
FeatureNodes = (features)
LabelNodes = (labels)
CriterionNodes = (ce)
EvalNodes = (err)
EvalNodes = (errs)
OutputNodes = (ol)
]

Просмотреть файл

@ -0,0 +1,81 @@
# Shared.bs -- macros shared by all MNIST examples
# linear layer (no non-linearity)
DNNLayer (inDim, outDim, x, parmScale) = [
W = Parameter (outDim, inDim, init="uniform", initValueScale=parmScale, initOnCPUOnly=true)
b = Parameter (outDim, 1, init="fixedValue", value=0)
z = W * x + b
].z
# sigmoid layer
DNNSigmoidLayer (inDim, outDim, x, parmScale) = Sigmoid (DNNLayer (inDim, outDim, x, parmScale))
# image sigmoid layer --differs from DNNSigmoidLayer in how dimensions are specified
DNNImageSigmoidLayer (inW, inH, inC, outDim, x, parmScale) = [
W = ImageParameter (outDim, inW, inH, inC, init="uniform", initValueScale=parmScale, initOnCPUOnly=true /* , imageLayout=$imageLayout$*/)
b = Parameter (outDim, 1, init="fixedValue", value=0)
t = Times(W, x)
z = Plus(t, b)
y = Sigmoid(z) # TODO: fix this for 02_
].y
# ReLU layer with batch normalization
# TODO: rename to DNN-
DnnBNReLULayer (inDim, outDim, x, wScale, bValue, scValue, bnTimeConst) = [
W = Parameter (outDim, inDim, init = "gaussian", initValueScale = wScale, initOnCPUOnly=true)
b = Parameter (outDim, 1, init = "fixedValue", value = bValue)
sc = Parameter (outDim, 1, init = "fixedValue", value = scValue)
m = Parameter (outDim, 1, init = "fixedValue", value = 0, learningRateMultiplier = 0)
isd = Parameter (outDim, 1, init = "fixedValue", value = 0, learningRateMultiplier = 0)
t = Times(W, x) # TODO: W * x
bn = BatchNormalization(t, sc, b, m, isd, eval = false, spatial = false, normalizationTimeConstant = bnTimeConst)
y = RectifiedLinear(bn)
].y
# macros to create parameters for convolution --TODO: rename to newConvX()
ConvW (outMap, inWCount, wScale) = Parameter (outMap, inWCount, init="uniform", initValueScale=wScale, initOnCPUOnly=true)
ConvB (outMap, bValue) = ImageParameter (1, 1, outMap, init="fixedValue", value=bValue /* , imageLayout=$imageLayout$*/)
# TODO: find out whether Conv2D is identical to -ND by now, then unify
Conv2D (w, inp, kW, kH, outMap, hStride, vStride) =
Convolution (w, inp, kW, kH, outMap, hStride, vStride, zeroPadding=true /* , imageLayout=$imageLayout$*/)
ConvND (w, inp, kW, kH, inMap, outMap, hStride, vStride) =
Convolution (w, inp, (kW:kH:inMap), mapCount=outMap, stride=(hStride:vStride:inMap), sharing=(true:true:true), autoPadding=(true:true:false), lowerPad=0, upperPad=0 /* , imageLayout=$imageLayout$*/)
Conv2DReLULayer (inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue) = [
w = ConvW (outMap, inWCount, wScale)
b = ConvB (outMap, bValue)
c = Conv2D (w, inp, kW, kH, outMap, hStride, vStride)
out = RectifiedLinear (c + b);
].out
ConvNDReLULayer (inp, kW, kH, inMap, inWCount, outMap, hStride, vStride, wScale, bValue) = [
w = ConvW (outMap, inWCount, wScale)
b = ConvB (outMap, bValue)
c = ConvND (w, inp, kW, kH, inMap, outMap, hStride, vStride)
out = RectifiedLinear (c + b);
].out
ConvBNLayerW (W, inp, outMap, kW, kH, hStride, vStride, bValue, scValue, bnTimeConst) = [ # TODO: delete if not needed
b = Parameter(outMap, 1, init="fixedValue", value=bValue)
sc = Parameter(outMap, 1, init="fixedValue", value=scValue)
m = Parameter(outMap, 1, init="fixedValue", value=0, learningRateMultiplier=0)
isd = Parameter(outMap, 1, init="fixedValue", value=0, learningRateMultiplier=0)
c = Convolution(W, inp, kW, kH, outMap, hStride, vStride, zeroPadding=true /* , imageLayout=$imageLayout$*/)
y = BatchNormalization(c, sc, b, m, isd, eval=false, spatial=true, normalizationTimeConstant=bnTimeConst /* , imageLayout=$imageLayout$*/)
].y
ConvBNLayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, bnTimeConst) = [
W = LearnableParameter(outMap, inWCount, init=Gaussian, initValueScale=wScale, initOnCPUOnly=true)
c = ConvBNLayerW(W, inp, outMap, kW, kH, hStride, vStride, bValue, scValue, bnTimeConst)
].c
ConvBNReLULayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, bnTimeConst) = [
c = ConvBNLayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, bnTimeConst)
y = RectifiedLinear(c)
].y
MaxNDPooling(inp, kW, kH, hStride, vStride) =
Pooling(inp, "max", (kW:kH:1), stride=(hStride:vStride:1), autoPadding=(true:true:false), lowerPad=0, upperPad=0 /* , imageLayout=$imageLayout$*/)

Просмотреть файл

@ -19,7 +19,7 @@ downloaded and converted by running the following command from the 'AdditionalFi
`python mnist_convert.py`
The script will download all required files and convert them to CNTK-supported format.
The resulting files (Train-28x28.txt and Test-28x28.txt) will be stored in the 'Data' folder.
The resulting files (Train-28x28_cntk_text.txt and Test-28x28_cntk_text.txt) will be stored in the 'Data' folder.
In case you don't have Python installed, there are 2 options:
1. Download and install latest version of Python 2.7 from: https://www.python.org/downloads/

Просмотреть файл

@ -16,9 +16,6 @@ imageLayout = "cudnn"
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
# Note: turn off prefetching; known to crash UCIFastReader occasionally.
prefetch = "false"
command = Train:Test
modelPath = "$ModelDir$/01_Convolution"
@ -45,20 +42,18 @@ Train = [
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Train.txt"
randomize = "auto"
# Setting minibatchMode="full" is a temporary fix to prevent an exception until the reader is migrated to the new reader.
minibatchMode="full"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See REAMDE.md for details on getting the data (Train_cntk_text.txt).
file = "$DataDir$/Train_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -69,18 +64,17 @@ Test = [
minibatchSize = 16
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test.txt"
randomize = "none"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -16,9 +16,6 @@ imageLayout = "cudnn"
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
# Note: turn off prefetching; known to crash UCIFastReader occasionally.
prefetch = "false"
command = Train:Test
stderr = "$OutputDir$/02_BatchNormConv"
@ -44,19 +41,18 @@ Train = [
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Train.txt"
randomize = "auto"
minibatchMode="full"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See REAMDE.md for details on getting the data (Train_cntk_text.txt).
file = "$DataDir$/Train_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -68,18 +64,17 @@ Test = [
minibatchSize = 16
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test.txt"
randomize = "none"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = $DataDir$/labelsmap.txt
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -13,9 +13,6 @@ imageLayout = "cudnn"
# override the above as follows when running on CPU:
# deviceId = -1
# Note: turn off prefetching; known to crash UCIFastReader occasionally.
prefetch = "false"
command = Train:Test
modelPath = "$ModelDir$/05_ConvLocal"
@ -41,19 +38,18 @@ Train = [
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Train.txt"
randomize = "auto"
minibatchMode="full"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See REAMDE.md for details on getting the data (Train_cntk_text.txt).
file = "$DataDir$/Train_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -64,18 +60,17 @@ Test = [
minibatchSize = 16
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test.txt"
randomize = "none"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -35,7 +35,8 @@ def readBatch(src, outFmt):
print ('Format not supported: ' + outFmt)
usage()
sys.exit(1)
return np.hstack((np.reshape(d['labels'], (len(d['labels']), 1)), feat))
res = np.hstack((feat, np.reshape(d['labels'], (len(d['labels']), 1))))
return res.astype(np.int)
def loadData(src, outFmt):
print ('Downloading ' + src)
@ -47,7 +48,7 @@ def loadData(src, outFmt):
tar.extractall()
print ('Done.')
print ('Preparing train set...')
trn = np.empty((0, NumFeat + 1))
trn = np.empty((0, NumFeat + 1), dtype=np.int)
for i in range(5):
batchName = './cifar-10-batches-py/data_batch_{0}'.format(i + 1)
trn = np.vstack((trn, readBatch(batchName, outFmt)))
@ -83,12 +84,22 @@ def parseCmdOpt(argv):
sys.exit(1)
return fmt
def savetxt(filename, ndarray):
with open(filename, 'w') as f:
labels = map(' '.join, np.eye(10, dtype=np.uint).astype(str))
for row in ndarray:
row_str = row.astype(str)
label_str = labels[row[-1]]
feature_str = ' '.join(row_str[:-1])
f.write('|labels {} |features {}\n'.format(label_str, feature_str))
if __name__ == "__main__":
fmt = parseCmdOpt(sys.argv[1:])
trn, tst = loadData('http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz', fmt)
print ('Writing train text file...')
np.savetxt(r'./Train.txt', trn, fmt = '%u', delimiter='\t')
savetxt(r'./Train_cntk_text.txt', trn)
print ('Done.')
print ('Writing test text file...')
np.savetxt(r'./Test.txt', tst, fmt = '%u', delimiter='\t')
savetxt(r'./Test_cntk_text.txt', tst)
print ('Done.')

Просмотреть файл

@ -17,6 +17,7 @@ The following table contains results as well as links to pre-trained models that
| ResNet-18 | 29.57 | 10.41 | [Download](https://www.cntk.ai/resnet/ResNet_18.model)
| ResNet-34 | 27.31 | 8.97 | [Download](https://www.cntk.ai/resnet/ResNet_34.model)
| ResNet-50 | 24.74 | 7.56 | [Download](https://www.cntk.ai/resnet/ResNet_50.model)
| ResNet-152 | 22.57 | 6.44 | [Download](https://www.cntk.ai/resnet/ResNet_152.model)
## Notes
This work is an implementation of ResNets in CNTK. If you are interested in the original implementation of ResNet, follow [this link](https://github.com/KaimingHe/deep-residual-networks).

Просмотреть файл

@ -50,7 +50,7 @@ Multigpu_Demo_Train=[
SGD = [
# epochSize = 0 means epochSize is the size of the training set
epochSize = 0
minibatchSize = 25
minibatchSize = 25
learningRatesPerMB = 0.5:0.2*20:0.1
momentumPerMB = 0.9
dropoutRate = 0.0
@ -70,22 +70,17 @@ Multigpu_Demo_Train=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTrain.txt"
miniBatchMode = "partial"
randomize = "auto"
verbosity = 1
features = [
dim = 2 # two-dimensional input data
start = 0 # Start with first element on line
]
labels = [
start = 2 # Skip two elements
dim = 1 # One label dimension
labelDim = 2 # Two labels possible
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTrain_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -102,20 +97,17 @@ Multigpu_Demo_Test=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -131,20 +123,17 @@ Multigpu_Demo_Output=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]

Просмотреть файл

@ -41,9 +41,9 @@ Simple_Demo_Train = [
]
SGD = [
# epochSize = 0 means epochSize is the size of the training set
# epochSize = 0 means epochSize is the size of the training set
epochSize = 0
minibatchSize = 25
minibatchSize = 25
learningRatesPerMB = 0.5:0.2*20:0.1
momentumPerMB = 0.9
dropoutRate = 0.0
@ -52,22 +52,17 @@ Simple_Demo_Train = [
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTrain.txt"
miniBatchMode = "partial"
randomize = "auto"
verbosity = 1
features = [
dim = 2 # two-dimensional input data
start = 0 # Start with first element on line
]
labels = [
start = 2 # Skip two elements
dim = 1 # One label dimension
labelDim = 2 # Two labels possible
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTrain_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -84,20 +79,17 @@ Simple_Demo_Test = [
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -113,20 +105,17 @@ Simple_Demo_Output=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]

Просмотреть файл

@ -1,603 +0,0 @@
-1 -1 1
-1 -0.99 1
-1 -0.98 1
-1 -0.97 1
-1 -0.96 1
-1 -0.95 1
-1 -0.94 1
-1 -0.93 1
-1 -0.92 1
-1 -0.91 1
-1 -0.9 1
-1 -0.89 1
-1 -0.88 1
-1 -0.87 1
-1 -0.86 1
-1 -0.85 1
-1 -0.84 1
-1 -0.83 1
-1 -0.82 1
-1 -0.81 1
-1 -0.8 1
-1 -0.79 1
-1 -0.78 1
-1 -0.77 1
-1 -0.76 1
-1 -0.75 1
-1 -0.74 1
-1 -0.73 1
-1 -0.72 1
-1 -0.71 1
-1 -0.7 1
-1 -0.69 1
-1 -0.68 1
-1 -0.67 1
-1 -0.66 1
-1 -0.65 1
-1 -0.64 1
-1 -0.63 1
-1 -0.62 1
-1 -0.61 1
-1 -0.6 1
-1 -0.59 1
-1 -0.58 1
-1 -0.57 1
-1 -0.56 1
-1 -0.55 1
-1 -0.54 1
-1 -0.53 1
-1 -0.52 1
-1 -0.51 1
-1 -0.5 1
-1 -0.49 1
-1 -0.48 1
-1 -0.47 1
-1 -0.46 1
-1 -0.45 1
-1 -0.44 1
-1 -0.43 1
-1 -0.42 1
-1 -0.41 1
-1 -0.4 1
-1 -0.39 1
-1 -0.38 1
-1 -0.37 1
-1 -0.36 1
-1 -0.35 1
-1 -0.34 1
-1 -0.33 1
-1 -0.32 1
-1 -0.31 1
-1 -0.3 1
-1 -0.29 1
-1 -0.28 1
-1 -0.27 1
-1 -0.26 1
-1 -0.25 1
-1 -0.24 1
-1 -0.23 1
-1 -0.22 1
-1 -0.21 1
-1 -0.2 1
-1 -0.19 1
-1 -0.18 1
-1 -0.17 1
-1 -0.16 1
-1 -0.15 1
-1 -0.14 1
-1 -0.13 1
-1 -0.12 1
-1 -0.11 1
-1 -0.1 1
-1 -0.09 0
-1 -0.08 0
-1 -0.07 0
-1 -0.06 0
-1 -0.05 0
-1 -0.04 0
-1 -0.03 0
-1 -0.02 0
-1 -0.01 0
-1 0 0
-1 0.01 0
-1 0.02 0
-1 0.03 0
-1 0.04 0
-1 0.05 0
-1 0.06 0
-1 0.07 0
-1 0.08 0
-1 0.09 0
-1 0.1 0
-1 0.11 0
-1 0.12 0
-1 0.13 0
-1 0.14 0
-1 0.15 0
-1 0.16 0
-1 0.17 0
-1 0.18 0
-1 0.19 0
-1 0.2 0
-1 0.21 0
-1 0.22 0
-1 0.23 0
-1 0.24 0
-1 0.25 0
-1 0.26 0
-1 0.27 0
-1 0.28 0
-1 0.29 0
-1 0.3 0
-1 0.31 0
-1 0.32 0
-1 0.33 0
-1 0.34 0
-1 0.35 0
-1 0.36 0
-1 0.37 0
-1 0.38 0
-1 0.39 0
-1 0.4 0
-1 0.41 0
-1 0.42 0
-1 0.43 0
-1 0.44 0
-1 0.45 0
-1 0.46 0
-1 0.47 0
-1 0.48 0
-1 0.49 0
-1 0.5 0
-1 0.51 0
-1 0.52 0
-1 0.53 0
-1 0.54 0
-1 0.55 0
-1 0.56 0
-1 0.57 0
-1 0.58 0
-1 0.59 0
-1 0.6 0
-1 0.61 0
-1 0.62 0
-1 0.63 0
-1 0.64 0
-1 0.65 0
-1 0.66 0
-1 0.67 0
-1 0.68 0
-1 0.69 0
-1 0.7 0
-1 0.71 0
-1 0.72 0
-1 0.73 0
-1 0.74 0
-1 0.75 0
-1 0.76 0
-1 0.77 0
-1 0.78 0
-1 0.79 0
-1 0.8 0
-1 0.81 0
-1 0.82 0
-1 0.83 0
-1 0.84 0
-1 0.85 0
-1 0.86 0
-1 0.87 0
-1 0.88 0
-1 0.89 0
-1 0.9 0
-1 0.91 0
-1 0.92 0
-1 0.93 0
-1 0.94 0
-1 0.95 0
-1 0.96 0
-1 0.97 0
-1 0.98 0
-1 0.99 0
-1 0 0
0 -1 1
0 -0.99 1
0 -0.98 1
0 -0.97 1
0 -0.96 1
0 -0.95 1
0 -0.94 1
0 -0.93 1
0 -0.92 1
0 -0.91 1
0 -0.9 1
0 -0.89 1
0 -0.88 1
0 -0.87 1
0 -0.86 1
0 -0.85 1
0 -0.84 1
0 -0.83 1
0 -0.82 1
0 -0.81 1
0 -0.8 1
0 -0.79 1
0 -0.78 1
0 -0.77 1
0 -0.76 1
0 -0.75 1
0 -0.74 1
0 -0.73 1
0 -0.72 1
0 -0.71 1
0 -0.7 1
0 -0.69 1
0 -0.68 1
0 -0.67 1
0 -0.66 1
0 -0.65 1
0 -0.64 1
0 -0.63 1
0 -0.62 1
0 -0.61 1
0 -0.6 1
0 -0.59 1
0 -0.58 1
0 -0.57 1
0 -0.56 1
0 -0.55 1
0 -0.54 1
0 -0.53 1
0 -0.52 1
0 -0.51 1
0 -0.5 1
0 -0.49 1
0 -0.48 1
0 -0.47 1
0 -0.46 1
0 -0.45 1
0 -0.44 1
0 -0.43 1
0 -0.42 1
0 -0.41 1
0 -0.4 1
0 -0.39 1
0 -0.38 1
0 -0.37 1
0 -0.36 1
0 -0.35 1
0 -0.34 1
0 -0.33 1
0 -0.32 1
0 -0.31 1
0 -0.3 1
0 -0.29 1
0 -0.28 1
0 -0.27 1
0 -0.26 1
0 -0.25 1
0 -0.24 1
0 -0.23 1
0 -0.22 1
0 -0.21 1
0 -0.2 1
0 -0.19 1
0 -0.18 1
0 -0.17 1
0 -0.16 1
0 -0.15 1
0 -0.14 1
0 -0.13 1
0 -0.12 1
0 -0.11 1
0 -0.1 1
0 -0.09 0
0 -0.08 0
0 -0.07 0
0 -0.06 0
0 -0.05 0
0 -0.04 0
0 -0.03 0
0 -0.02 0
0 -0.01 0
0 0 0
0 0.01 0
0 0.02 0
0 0.03 0
0 0.04 0
0 0.05 0
0 0.06 0
0 0.07 0
0 0.08 0
0 0.09 0
0 0.1 0
0 0.11 0
0 0.12 0
0 0.13 0
0 0.14 0
0 0.15 0
0 0.16 0
0 0.17 0
0 0.18 0
0 0.19 0
0 0.2 0
0 0.21 0
0 0.22 0
0 0.23 0
0 0.24 0
0 0.25 0
0 0.26 0
0 0.27 0
0 0.28 0
0 0.29 0
0 0.3 0
0 0.31 0
0 0.32 0
0 0.33 0
0 0.34 0
0 0.35 0
0 0.36 0
0 0.37 0
0 0.38 0
0 0.39 0
0 0.4 0
0 0.41 0
0 0.42 0
0 0.43 0
0 0.44 0
0 0.45 0
0 0.46 0
0 0.47 0
0 0.48 0
0 0.49 0
0 0.5 0
0 0.51 0
0 0.52 0
0 0.53 0
0 0.54 0
0 0.55 0
0 0.56 0
0 0.57 0
0 0.58 0
0 0.59 0
0 0.6 0
0 0.61 0
0 0.62 0
0 0.63 0
0 0.64 0
0 0.65 0
0 0.66 0
0 0.67 0
0 0.68 0
0 0.69 0
0 0.7 0
0 0.71 0
0 0.72 0
0 0.73 0
0 0.74 0
0 0.75 0
0 0.76 0
0 0.77 0
0 0.78 0
0 0.79 0
0 0.8 0
0 0.81 0
0 0.82 0
0 0.83 0
0 0.84 0
0 0.85 0
0 0.86 0
0 0.87 0
0 0.88 0
0 0.89 0
0 0.9 0
0 0.91 0
0 0.92 0
0 0.93 0
0 0.94 0
0 0.95 0
0 0.96 0
0 0.97 0
0 0.98 0
0 0.99 0
0 1 0
1 -1 1
1 -0.99 1
1 -0.98 1
1 -0.97 1
1 -0.96 1
1 -0.95 1
1 -0.94 1
1 -0.93 1
1 -0.92 1
1 -0.91 1
1 -0.9 1
1 -0.89 1
1 -0.88 1
1 -0.87 1
1 -0.86 1
1 -0.85 1
1 -0.84 1
1 -0.83 1
1 -0.82 1
1 -0.81 1
1 -0.8 1
1 -0.79 1
1 -0.78 1
1 -0.77 1
1 -0.76 1
1 -0.75 1
1 -0.74 1
1 -0.73 1
1 -0.72 1
1 -0.71 1
1 -0.7 1
1 -0.69 1
1 -0.68 1
1 -0.67 1
1 -0.66 1
1 -0.65 1
1 -0.64 1
1 -0.63 1
1 -0.62 1
1 -0.61 1
1 -0.6 1
1 -0.59 1
1 -0.58 1
1 -0.57 1
1 -0.56 1
1 -0.55 1
1 -0.54 1
1 -0.53 1
1 -0.52 1
1 -0.51 1
1 -0.5 1
1 -0.49 1
1 -0.48 1
1 -0.47 1
1 -0.46 1
1 -0.45 1
1 -0.44 1
1 -0.43 1
1 -0.42 1
1 -0.41 1
1 -0.4 1
1 -0.39 1
1 -0.38 1
1 -0.37 1
1 -0.36 1
1 -0.35 1
1 -0.34 1
1 -0.33 1
1 -0.32 1
1 -0.31 1
1 -0.3 1
1 -0.29 1
1 -0.28 1
1 -0.27 1
1 -0.26 1
1 -0.25 1
1 -0.24 1
1 -0.23 1
1 -0.22 1
1 -0.21 1
1 -0.2 1
1 -0.19 1
1 -0.18 1
1 -0.17 1
1 -0.16 1
1 -0.15 1
1 -0.14 1
1 -0.13 1
1 -0.12 1
1 -0.11 1
1 -0.1 1
1 -0.09 1
1 -0.08 1
1 -0.07 1
1 -0.06 1
1 -0.05 1
1 -0.04 1
1 -0.03 1
1 -0.02 1
1 -0.01 1
1 0 0
1 0.01 0
1 0.02 0
1 0.03 0
1 0.04 0
1 0.05 0
1 0.06 0
1 0.07 0
1 0.08 0
1 0.09 0
1 0.1 0
1 0.11 0
1 0.12 0
1 0.13 0
1 0.14 0
1 0.15 0
1 0.16 0
1 0.17 0
1 0.18 0
1 0.19 0
1 0.2 0
1 0.21 0
1 0.22 0
1 0.23 0
1 0.24 0
1 0.25 0
1 0.26 0
1 0.27 0
1 0.28 0
1 0.29 0
1 0.3 0
1 0.31 0
1 0.32 0
1 0.33 0
1 0.34 0
1 0.35 0
1 0.36 0
1 0.37 0
1 0.38 0
1 0.39 0
1 0.4 0
1 0.41 0
1 0.42 0
1 0.43 0
1 0.44 0
1 0.45 0
1 0.46 0
1 0.47 0
1 0.48 0
1 0.49 0
1 0.5 0
1 0.51 0
1 0.52 0
1 0.53 0
1 0.54 0
1 0.55 0
1 0.56 0
1 0.57 0
1 0.58 0
1 0.59 0
1 0.6 0
1 0.61 0
1 0.62 0
1 0.63 0
1 0.64 0
1 0.65 0
1 0.66 0
1 0.67 0
1 0.68 0
1 0.69 0
1 0.7 0
1 0.71 0
1 0.72 0
1 0.73 0
1 0.74 0
1 0.75 0
1 0.76 0
1 0.77 0
1 0.78 0
1 0.79 0
1 0.8 0
1 0.81 0
1 0.82 0
1 0.83 0
1 0.84 0
1 0.85 0
1 0.86 0
1 0.87 0
1 0.88 0
1 0.89 0
1 0.9 0
1 0.91 0
1 0.92 0
1 0.93 0
1 0.94 0
1 0.95 0
1 0.96 0
1 0.97 0
1 0.98 0
1 0.99 0
1 1 0

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,6 +1,9 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# TODO: The new CMUDict data do not match Kaisheng Yao's paper setup (e.g. IH/AH).
# Track down & reconcile before committing this to master.
############################################################################
# G2P.cntk #
# #
@ -10,81 +13,85 @@
# directory defaults (if not overridden)
RunRootDir = "../.." # default if not overridden
RunRootDir = "../.." # default if not overridden
DataDir = "$RunRootDir$/Data"
CacheDir = "$DataDir$/cache" # (not used currently)
ExpRootDir = "$RunRootDir$"
OutDir = "$RunRootDir$/Out"
# command to execute
command = train
#command = write
#command = dump
makeMode = false
makeMode = false # set this to true to enable restarting fr0m checkpoint
traceLevel = 1
# experiment id
deviceId = 0 # set the GPU device here, or "auto" to auto-select; or override from the command line.
ExpId = g2p-1-$deviceId$ # choose a meaningful id here. This is used for unique directory and filenames.
#ExpId = g2p-1-0 # change to different id when decoding a different model
# directories
ExpDir = "$ExpRootDir$/$ExpId$"
ModelDir = "$ExpDir$/Models"
deviceId = 0 # set the GPU device here, or "auto" to auto-select; or override from the command line.
ExpId = g2p-01-$deviceId$ # choose a meaningful id here. This is used for unique directory and filenames.
#ExpId = g2p-01-0 # change to different id when decoding a different model
stderr = $ExpDir$/G2P
# model
precision = "float"
traceLevel = 1
modelPath = "$ModelDir$/G2P.dnn"
modelPath = "$OutDir$/$ExpId$/G2P.dnn"
stderr = "$OutDir$/$ExpId$/G2P"
# decoding config --used by the "write" command ("write" decodes and writes the result)
beamDepth = 3 # 0=predict; 1=greedy; >1=beam
decodeModel = 9
decodeModelPath = "$modelPath$.$decodeModel$" # note: epoch to decode is appended to the model path
decodeOutputPath = "$decodeModelPath$.$beamDepth$" # results are written next to the model, with beamDepth appended
# dump config --used by the "dump" command, for inspecting the model parameters
dumpModelPath = "$modelPath$.2" # put the epoch id here
# top-level model configuration
hiddenDim = 512
precision = "float"
maxLayer = 2
isBidirectional = false
# comment/uncomment this or the next block to switch between readers
# Note: Currently this configuration cannot reach the same result with CNTKTextFormatReader.
# This is being investigated. For now, please use the LMSequenceReader.
# --- begin uncomment for LMSequenceReader ---
readerType = "LMSequenceReader"
useCNTKTextFormatReader = false
inputVocabSize = 69
labelVocabSize = 69
mbSizes = 144:144:288*5:576
shareEmbeddings = true
fileExt = "joint"
fileExt = "txt"
# --- end uncomment ---
# --- begin uncomment for CNTKTextFormatReader ---
# Note: Currently this configuration cannot reach the same result with CNTKTextFormatReader.
# This is being investigated. For now, please use the LMSequenceReader.
#readerType = "CNTKTextFormatReader"
#useCNTKTextFormatReader = true
#inputVocabSize = 29 # 26 letters plus start, end, apostrophe
#labelVocabSize = 41 # 39 phonemes (~AX missing), plus start and end symbol (in index 0)
#mbSizes = 72:72:144:288 # new reader is based on max(stream lengths) instead of sum(stream lengths)
#shareEmbeddings = false
#fileExt = "ctf"
#fileExt = "bsf.ctf"
# --- end uncomment ---
# corpus
maxLength = 20 # 0 disables attention
isAutoEncoder=false
startSymbol = "<s>"
trainFile = "g014b2b.train-dev-20-21.bsf.$fileExt$"
validFile = "g014b2b.train-dev-1-21.bsf.$fileExt$"
testFile = "g014b2b.test.bsf.$fileExt$"
vocabFile = "g014b2b.wl"
startSymbol = "<s>" # (need to override the default which is </s>)
trainFile = "cmudict-0.7b.train-dev-20-21.$fileExt$"
validFile = "cmudict-0.7b.train-dev-1-21.$fileExt$"
testFile = "cmudict-0.7b.test.$fileExt$"
mappingFile = "cmudict-0.7b.mapping"
# some reader variables that occur multiple times
cntkReaderInputDef = [ rawInput = [ alias = "s" ; dim = $inputVocabSize$ ; format = "sparse" ] ; rawLabels = [ alias = "t" ; dim = $labelVocabSize$ ; format = "sparse" ] ]
lmSequenceReaderInputDef = [ dim = 0 ]
lmSequenceReaderInputLabelsDef = [ dim = 1 ; labelType = "category" ; labelDim = "$inputVocabSize$" ; labelMappingFile = "$DataDir$/$vocabFile$" ; beginSequence = "</s>" ; endSequence = "</s>" ]
cntkReaderInputDef = [ rawInput = [ alias = "s" ; dim = $inputVocabSize$ ; format = "sparse" ] ; rawLabels = [ alias = "t" ; dim = $labelVocabSize$ ; format = "sparse" ] ]
lmSequenceReaderInputDef = [ dim = 0 ]
lmSequenceReaderInputLabelsDef = [ dim = 1 ; labelType = "category" ; labelDim = "$inputVocabSize$" ; labelMappingFile = "$DataDir$/$mappingFile$" ; beginSequence = "</s>" ; endSequence = "</s>" ]
#######################################
# network definition #
@ -98,7 +105,7 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
inputVocabDim = $inputVocabSize$
labelVocabDim = $labelVocabSize$
isAutoencoder = $isAutoEncoder$ # input is only one sequence, meant to reproduce itself
isAutoencoder = false # input is only one sequence, meant to reproduce itself (not used for this task)
attentionSpan = $maxLength$ # attention window, must be large enough for largest input sequence. 0 to disable. Exactly 20 is needed for the g2p CMUDict task
useBidirectionalEncoder = $isBidirectional$ # bi-directional LSTM for encoder
@ -161,9 +168,10 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
isFirstLabel = BS.Loop.IsFirst (labelSequence)
#############################################################
# embeddings --as long as we cannot read multiple sequences, we got one embedding
# embeddings
#############################################################
# Note: when reading input and labels from a single text file, we share the token mapping and embedding.
# Note: Embeddings are linear. Should we use BatchNormalization?
# note: this is assumed to be applied transposed, hence the swapped dimensions. Actually--why? Still needed?
@ -183,24 +191,20 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
# encoder (processes inputEmbedded)
#############################################################
# TODO: do not reverse our inputs; instead, if needed, use a backwards-running loop here
# Note: We reverse our input by running the recurrence from right to left.
encoderFunction = if useBidirectionalEncoder then BS.RNNs.RecurrentBirectionalLSTMPStack else BS.RNNs.RecurrentLSTMPStack
encoder = encoderFunction (encoderDims, cellDims=encoderDims, S(inputEmbedded), inputDim=inputEmbeddingDim,
previousHook=BS.RNNs.PreviousHC,
previousHook=if useBidirectionalEncoder then BS.RNNs.PreviousHC else BS.RNNs.NextHC,
enableSelfStabilization=useStabilizer)
encoderOutput = encoder[Length (encoderDims)-1]
# There are three ways of passing encoder state:
# 1. as initial state for decoder (Google style)
# 2. as side information for every decoder step (NYU style)
# 3. attention
# get the final encoder state for use as the initial state
# get the final encoder state for use as the initial state (not used with attention model)
# Since we run right-to-left, the final state is the first, not the last.
# For beam decoding, we will also inject a second dimension.
thoughtVector = [
h = ReshapeDimension (BS.Sequences.Last (encoderOutput.h), 1, (dim:1))
c = ReshapeDimension (BS.Sequences.Last (encoderOutput.c), 1, (dim:1))
h = ReshapeDimension (BS.Sequences.First (encoderOutput.h), 1, (dim:1))
c = ReshapeDimension (BS.Sequences.First (encoderOutput.c), 1, (dim:1))
dim = encoderOutput.dim
]
@ -253,6 +257,11 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
# decoder
#############################################################
# There are three ways of passing encoder state:
# 1. as initial state for decoder (Google style)
# 2. as side information for every decoder step (NYU style)
# 3. attention
decoderInput = Pass (BS.Boolean.If (isFirstLabel, labelSentenceStartEmbeddedScattered, BS.Loop.Previous (decoderHistoryHook)))
decoderInputDim = labelEmbeddingDim
@ -304,12 +313,6 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
# training criteria
#############################################################
#ce = Pass (ReduceLogSum (z) - ReduceSum (labelSequence .* z ), tag='criterion')
#errs = Pass (BS.Constants.One - ReduceSum (labelSequence .* Hardmax (z)), tag='evaluation')
#ce2 = Negate (ReduceSum (labelSequence .* LogSoftmax (z)), tag='evaluation')
#ce1 = CrossEntropyWithSoftmax (labelSequence, z, tag='evaluation') // this is the training objective
#errs = ErrorPrediction (labelSequence, z, tag='evaluation') // this also gets tracked
ce = Pass (ReduceLogSum (z) - TransposeTimes (labelSequence, z), tag='criterion')
errs = Pass (BS.Constants.One - TransposeTimes (labelSequence, Hardmax (z)), tag='evaluation')
@ -340,22 +343,17 @@ train = [
# BrainScriptNetworkBuilder is defined in outer scope
SGD = [
minibatchSize = 144:144:288:576
learningRatesPerSample = 0.007*2:0.0035
minibatchSize = $mbSizes$
learningRatesPerSample = 0.007*2:0.0035 # works well for LMSequenceReader config
momentumAsTimeConstant = 1100
gradientClippingWithTruncation = true # (as opposed to clipping the Frobenius norm of the matrix)
clippingThresholdPerSample = 2.3 # visibly impacts objectives, but not final result, so keep it for safety
maxEpochs = 50
numMBsToShowResult = 100
firstMBsToShowResult = 10
gradUpdateType = "none" # FSAdaGrad?
gradUpdateType = "none" # TODO: Try FSAdaGrad?
loadBestModel = false # true # broken for some models (rereading overwrites something that got set by validation)
# tracing (enable these for debugging)
#traceNodeNamesReal = labelsEmbedded:decoderInput:"decoder[0].lstmState._privateInnards.ht":z.Plus_left.Times_right.result:z:ce
#traceNodeNamesReal = labelsEmbedded:decoderInput:z:ce
#traceNodeNamesCategory = inputSequence.out:labelSequence
dropoutRate = 0.0
# settings for Auto Adjust Learning Rate
@ -461,7 +459,7 @@ write = [
format = [
type = "category"
transpose = false
labelMappingFile = "$DataDir$/$vocabFile$"
labelMappingFile = "$DataDir$/$mappingFile$"
]
minibatchSize = 8192 # choose this to be big enough for the longest sentence

Просмотреть файл

@ -0,0 +1,5 @@
...document source of corpus and post-processing
http://www.speech.cs.cmu.edu/cgi-bin/cmudict
File: http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b

Просмотреть файл

@ -0,0 +1,19 @@
The contents of this folder is based on or incorporates material from the projects listed below. Microsoft is not the original author of the Third Party Code. The original copyright notice and the license under which Microsoft received such Third Party Code, are set forth below. Such licenses and notices are provided for informational purposes only. Microsoft, not the third party, licenses the Third Party Code to you under the terms set forth in the EULA for the Microsoft Product. Microsoft reserves all rights not expressly granted under this agreement, whether by implication, estoppel or otherwise.
Provided for Informational Purposes Only
Carnegie Mellon University Pronouncing Dictionary
Copyright (C) 1993-2015 Carnegie Mellon University. All rights reserved.
BSD License
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ""AS IS"" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,23 +0,0 @@
This example demonstrates the use of CNTK for letter-to-sound conversion using a
sequence-to-sequence model with attention.
The code supports a number of alternative configurations. As configured currently, it implements
* a 3-hidden layer unidirectional LSTM encoder network, all hidden dimensions are 512
* a 3-hidden layer unidirectional LSTM decoder network, all hidden dimensions are 512
* encoder state is passed to the decoder by means of attention, with projection dimension 128 and maximum input length of 20 tokens
* embedding disabled (the vocabulary is very small)
* beam decoder with beam width 3
This example uses the CMUDict as a corpus. The data or a conversion script will be included soon.
To Use:
=======
Modify the following in G2P.cntk:
* pathnames
* deviceId to specify CPU (-1) or GPU (>=0 or "auto")
Run:
* command line: cntk configFile=Examples/SequenceToSequence/Miscellaneous/G2P/G2P.cntk RunRootDir=g2p
* VS Debugger: configFile=$(SolutionDir)Examples/SequenceToSequence/Miscellaneous/G2P/G2P.cntk RunRootDir=$(SolutionDir)g2p

Просмотреть файл

@ -0,0 +1,20 @@
This example demonstrates the use of CNTK for grapheme-to-phoneme (letter-to-sound)
conversion using a sequence-to-sequence model with attention, using the CMUDict dictionary.
The code supports a number of alternative configurations. As configured currently, it implements
* a 3-hidden layer unidirectional LSTM encoder network, all hidden dimensions are 512
* a 3-hidden layer unidirectional LSTM decoder network, all hidden dimensions are 512
* encoder state is passed to the decoder by means of attention, with projection dimension 128 and maximum input length of 20 tokens
* embedding is disabled (because the 'vocabulary' of the task, letters and phonemes, is very small)
* beam decoder with beam width 3
## To Use
Modify the following in G2P.cntk as needed:
* pathnames
* deviceId to specify CPU (-1) or GPU (>=0 or "auto")
Run:
* command line: ``` cntk configFile=Examples/SequenceToSequence/CMUDict/Config/G2P.cntk RunRootDir=g2p```
* VS Debugger: ```configFile=$(SolutionDir)Examples/SequenceToSequence/CMUDict/Config/G2P.cntk RunRootDir=$(SolutionDir)Examples/SequenceToSequence/CMUDict```

Просмотреть файл

@ -54,18 +54,17 @@ TIMIT_TrainMultiInput=[
readerType=HTKMLFReader
readMethod=blockRandomize
# frameMode=true
readMethod=rollingWindow
miniBatchMode=Partial
randomize=Auto
verbosity=0
features1=[
dim=792
scpFile=$ScpDir$/TIMIT.train.scp.fbank.fullpath
scpFile=$ScpDir$/TIMIT.train.scp.fbank.fullpath.rnn
type=Real
]
features2=[
dim=39
scpFile=$ScpDir$/TIMIT.train.scp.mfcc.fullpath
scpFile=$ScpDir$/TIMIT.train.scp.mfcc.fullpath.rnn
type=Real
]
labels=[
@ -75,4 +74,4 @@ TIMIT_TrainMultiInput=[
labelType=Category
]
]
]
]

Просмотреть файл

@ -0,0 +1,192 @@
test-dr1-felc0-si1386.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si1386.fbank_zda[0,549]
test-dr1-felc0-si2016.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si2016.fbank_zda[0,337]
test-dr1-felc0-si756.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si756.fbank_zda[0,416]
test-dr1-felc0-sx126.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx126.fbank_zda[0,288]
test-dr1-felc0-sx216.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx216.fbank_zda[0,217]
test-dr1-felc0-sx306.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx306.fbank_zda[0,247]
test-dr1-felc0-sx36.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx36.fbank_zda[0,349]
test-dr1-felc0-sx396.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx396.fbank_zda[0,379]
test-dr1-mdab0-si1039.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1039.fbank_zda[0,391]
test-dr1-mdab0-si1669.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1669.fbank_zda[0,203]
test-dr1-mdab0-si2299.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si2299.fbank_zda[0,257]
test-dr1-mdab0-sx139.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx139.fbank_zda[0,233]
test-dr1-mdab0-sx229.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx229.fbank_zda[0,128]
test-dr1-mdab0-sx319.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx319.fbank_zda[0,241]
test-dr1-mdab0-sx409.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx409.fbank_zda[0,285]
test-dr1-mdab0-sx49.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx49.fbank_zda[0,217]
test-dr1-mwbt0-si1553.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si1553.fbank_zda[0,473]
test-dr1-mwbt0-si2183.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si2183.fbank_zda[0,186]
test-dr1-mwbt0-si923.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si923.fbank_zda[0,293]
test-dr1-mwbt0-sx113.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx113.fbank_zda[0,330]
test-dr1-mwbt0-sx203.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx203.fbank_zda[0,310]
test-dr1-mwbt0-sx23.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx23.fbank_zda[0,314]
test-dr1-mwbt0-sx293.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx293.fbank_zda[0,359]
test-dr1-mwbt0-sx383.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx383.fbank_zda[0,387]
test-dr2-fpas0-si1272.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-si1272.fbank_zda[0,183]
test-dr2-fpas0-si2204.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-si2204.fbank_zda[0,383]
test-dr2-fpas0-si944.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-si944.fbank_zda[0,233]
test-dr2-fpas0-sx134.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx134.fbank_zda[0,291]
test-dr2-fpas0-sx224.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx224.fbank_zda[0,158]
test-dr2-fpas0-sx314.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx314.fbank_zda[0,291]
test-dr2-fpas0-sx404.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx404.fbank_zda[0,275]
test-dr2-fpas0-sx44.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx44.fbank_zda[0,222]
test-dr2-mtas1-si1473.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-si1473.fbank_zda[0,144]
test-dr2-mtas1-si2098.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-si2098.fbank_zda[0,315]
test-dr2-mtas1-si838.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-si838.fbank_zda[0,262]
test-dr2-mtas1-sx118.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx118.fbank_zda[0,279]
test-dr2-mtas1-sx208.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx208.fbank_zda[0,348]
test-dr2-mtas1-sx28.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx28.fbank_zda[0,245]
test-dr2-mtas1-sx298.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx298.fbank_zda[0,315]
test-dr2-mtas1-sx388.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx388.fbank_zda[0,401]
test-dr2-mwew0-si1361.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-si1361.fbank_zda[0,252]
test-dr2-mwew0-si1991.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-si1991.fbank_zda[0,400]
test-dr2-mwew0-si731.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-si731.fbank_zda[0,197]
test-dr2-mwew0-sx101.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx101.fbank_zda[0,391]
test-dr2-mwew0-sx11.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx11.fbank_zda[0,165]
test-dr2-mwew0-sx191.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx191.fbank_zda[0,250]
test-dr2-mwew0-sx281.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx281.fbank_zda[0,331]
test-dr2-mwew0-sx371.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx371.fbank_zda[0,287]
test-dr3-fpkt0-si1538.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-si1538.fbank_zda[0,310]
test-dr3-fpkt0-si2168.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-si2168.fbank_zda[0,195]
test-dr3-fpkt0-si908.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-si908.fbank_zda[0,224]
test-dr3-fpkt0-sx188.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx188.fbank_zda[0,219]
test-dr3-fpkt0-sx278.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx278.fbank_zda[0,318]
test-dr3-fpkt0-sx368.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx368.fbank_zda[0,306]
test-dr3-fpkt0-sx8.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx8.fbank_zda[0,279]
test-dr3-fpkt0-sx98.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx98.fbank_zda[0,182]
test-dr3-mjmp0-si1535.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-si1535.fbank_zda[0,232]
test-dr3-mjmp0-si1791.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-si1791.fbank_zda[0,465]
test-dr3-mjmp0-si905.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-si905.fbank_zda[0,266]
test-dr3-mjmp0-sx185.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx185.fbank_zda[0,209]
test-dr3-mjmp0-sx275.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx275.fbank_zda[0,274]
test-dr3-mjmp0-sx365.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx365.fbank_zda[0,386]
test-dr3-mjmp0-sx5.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx5.fbank_zda[0,157]
test-dr3-mjmp0-sx95.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx95.fbank_zda[0,306]
test-dr3-mlnt0-si1574.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-si1574.fbank_zda[0,475]
test-dr3-mlnt0-si1902.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-si1902.fbank_zda[0,184]
test-dr3-mlnt0-si642.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-si642.fbank_zda[0,406]
test-dr3-mlnt0-sx102.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx102.fbank_zda[0,364]
test-dr3-mlnt0-sx12.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx12.fbank_zda[0,229]
test-dr3-mlnt0-sx192.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx192.fbank_zda[0,230]
test-dr3-mlnt0-sx282.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx282.fbank_zda[0,316]
test-dr3-mlnt0-sx372.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx372.fbank_zda[0,308]
test-dr4-fjlm0-si1043.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-si1043.fbank_zda[0,339]
test-dr4-fjlm0-si1673.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-si1673.fbank_zda[0,313]
test-dr4-fjlm0-si2303.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-si2303.fbank_zda[0,403]
test-dr4-fjlm0-sx143.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx143.fbank_zda[0,337]
test-dr4-fjlm0-sx233.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx233.fbank_zda[0,249]
test-dr4-fjlm0-sx323.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx323.fbank_zda[0,281]
test-dr4-fjlm0-sx413.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx413.fbank_zda[0,337]
test-dr4-fjlm0-sx53.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx53.fbank_zda[0,307]
test-dr4-mlll0-si1363.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-si1363.fbank_zda[0,496]
test-dr4-mlll0-si1993.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-si1993.fbank_zda[0,245]
test-dr4-mlll0-si733.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-si733.fbank_zda[0,432]
test-dr4-mlll0-sx103.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx103.fbank_zda[0,231]
test-dr4-mlll0-sx13.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx13.fbank_zda[0,270]
test-dr4-mlll0-sx193.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx193.fbank_zda[0,346]
test-dr4-mlll0-sx283.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx283.fbank_zda[0,379]
test-dr4-mlll0-sx373.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx373.fbank_zda[0,277]
test-dr4-mtls0-si1370.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-si1370.fbank_zda[0,407]
test-dr4-mtls0-si2000.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-si2000.fbank_zda[0,284]
test-dr4-mtls0-si740.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-si740.fbank_zda[0,215]
test-dr4-mtls0-sx110.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx110.fbank_zda[0,337]
test-dr4-mtls0-sx20.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx20.fbank_zda[0,298]
test-dr4-mtls0-sx200.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx200.fbank_zda[0,251]
test-dr4-mtls0-sx290.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx290.fbank_zda[0,316]
test-dr4-mtls0-sx380.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx380.fbank_zda[0,218]
test-dr5-fnlp0-si1308.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-si1308.fbank_zda[0,575]
test-dr5-fnlp0-si1938.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-si1938.fbank_zda[0,321]
test-dr5-fnlp0-si678.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-si678.fbank_zda[0,202]
test-dr5-fnlp0-sx138.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx138.fbank_zda[0,359]
test-dr5-fnlp0-sx228.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx228.fbank_zda[0,226]
test-dr5-fnlp0-sx318.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx318.fbank_zda[0,370]
test-dr5-fnlp0-sx408.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx408.fbank_zda[0,307]
test-dr5-fnlp0-sx48.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx48.fbank_zda[0,347]
test-dr5-mbpm0-si1577.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-si1577.fbank_zda[0,194]
test-dr5-mbpm0-si1584.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-si1584.fbank_zda[0,377]
test-dr5-mbpm0-si947.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-si947.fbank_zda[0,306]
test-dr5-mbpm0-sx137.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx137.fbank_zda[0,369]
test-dr5-mbpm0-sx227.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx227.fbank_zda[0,174]
test-dr5-mbpm0-sx317.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx317.fbank_zda[0,230]
test-dr5-mbpm0-sx407.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx407.fbank_zda[0,252]
test-dr5-mbpm0-sx47.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx47.fbank_zda[0,213]
test-dr5-mklt0-si1213.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-si1213.fbank_zda[0,245]
test-dr5-mklt0-si1843.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-si1843.fbank_zda[0,321]
test-dr5-mklt0-si583.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-si583.fbank_zda[0,225]
test-dr5-mklt0-sx133.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx133.fbank_zda[0,261]
test-dr5-mklt0-sx223.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx223.fbank_zda[0,217]
test-dr5-mklt0-sx313.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx313.fbank_zda[0,319]
test-dr5-mklt0-sx403.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx403.fbank_zda[0,272]
test-dr5-mklt0-sx43.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx43.fbank_zda[0,199]
test-dr6-fmgd0-si1564.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-si1564.fbank_zda[0,441]
test-dr6-fmgd0-si2194.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-si2194.fbank_zda[0,469]
test-dr6-fmgd0-si934.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-si934.fbank_zda[0,345]
test-dr6-fmgd0-sx124.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx124.fbank_zda[0,372]
test-dr6-fmgd0-sx214.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx214.fbank_zda[0,300]
test-dr6-fmgd0-sx304.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx304.fbank_zda[0,277]
test-dr6-fmgd0-sx34.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx34.fbank_zda[0,233]
test-dr6-fmgd0-sx394.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx394.fbank_zda[0,233]
test-dr6-mcmj0-si1094.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-si1094.fbank_zda[0,373]
test-dr6-mcmj0-si464.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-si464.fbank_zda[0,386]
test-dr6-mcmj0-si602.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-si602.fbank_zda[0,386]
test-dr6-mcmj0-sx104.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx104.fbank_zda[0,195]
test-dr6-mcmj0-sx14.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx14.fbank_zda[0,291]
test-dr6-mcmj0-sx194.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx194.fbank_zda[0,192]
test-dr6-mcmj0-sx284.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx284.fbank_zda[0,362]
test-dr6-mcmj0-sx374.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx374.fbank_zda[0,222]
test-dr6-mjdh0-si1354.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-si1354.fbank_zda[0,478]
test-dr6-mjdh0-si1984.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-si1984.fbank_zda[0,130]
test-dr6-mjdh0-si724.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-si724.fbank_zda[0,254]
test-dr6-mjdh0-sx184.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx184.fbank_zda[0,211]
test-dr6-mjdh0-sx274.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx274.fbank_zda[0,311]
test-dr6-mjdh0-sx364.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx364.fbank_zda[0,439]
test-dr6-mjdh0-sx4.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx4.fbank_zda[0,229]
test-dr6-mjdh0-sx94.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx94.fbank_zda[0,270]
test-dr7-fdhc0-si1559.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-si1559.fbank_zda[0,337]
test-dr7-fdhc0-si2189.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-si2189.fbank_zda[0,222]
test-dr7-fdhc0-si929.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-si929.fbank_zda[0,283]
test-dr7-fdhc0-sx119.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx119.fbank_zda[0,300]
test-dr7-fdhc0-sx209.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx209.fbank_zda[0,254]
test-dr7-fdhc0-sx29.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx29.fbank_zda[0,251]
test-dr7-fdhc0-sx299.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx299.fbank_zda[0,318]
test-dr7-fdhc0-sx389.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx389.fbank_zda[0,243]
test-dr7-mgrt0-si1450.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-si1450.fbank_zda[0,618]
test-dr7-mgrt0-si2080.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-si2080.fbank_zda[0,228]
test-dr7-mgrt0-si820.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-si820.fbank_zda[0,609]
test-dr7-mgrt0-sx10.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx10.fbank_zda[0,298]
test-dr7-mgrt0-sx100.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx100.fbank_zda[0,490]
test-dr7-mgrt0-sx190.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx190.fbank_zda[0,286]
test-dr7-mgrt0-sx280.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx280.fbank_zda[0,195]
test-dr7-mgrt0-sx370.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx370.fbank_zda[0,333]
test-dr7-mnjm0-si1580.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-si1580.fbank_zda[0,297]
test-dr7-mnjm0-si2210.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-si2210.fbank_zda[0,201]
test-dr7-mnjm0-si950.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-si950.fbank_zda[0,544]
test-dr7-mnjm0-sx140.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx140.fbank_zda[0,287]
test-dr7-mnjm0-sx230.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx230.fbank_zda[0,352]
test-dr7-mnjm0-sx320.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx320.fbank_zda[0,378]
test-dr7-mnjm0-sx410.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx410.fbank_zda[0,272]
test-dr7-mnjm0-sx50.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx50.fbank_zda[0,337]
test-dr8-fmld0-si2185.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-si2185.fbank_zda[0,355]
test-dr8-fmld0-si822.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-si822.fbank_zda[0,362]
test-dr8-fmld0-si925.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-si925.fbank_zda[0,209]
test-dr8-fmld0-sx115.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx115.fbank_zda[0,236]
test-dr8-fmld0-sx205.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx205.fbank_zda[0,304]
test-dr8-fmld0-sx25.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx25.fbank_zda[0,188]
test-dr8-fmld0-sx295.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx295.fbank_zda[0,307]
test-dr8-fmld0-sx385.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx385.fbank_zda[0,188]
test-dr8-mjln0-si1449.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-si1449.fbank_zda[0,430]
test-dr8-mjln0-si2079.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-si2079.fbank_zda[0,209]
test-dr8-mjln0-si819.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-si819.fbank_zda[0,451]
test-dr8-mjln0-sx189.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx189.fbank_zda[0,294]
test-dr8-mjln0-sx279.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx279.fbank_zda[0,356]
test-dr8-mjln0-sx369.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx369.fbank_zda[0,286]
test-dr8-mjln0-sx9.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx9.fbank_zda[0,246]
test-dr8-mjln0-sx99.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx99.fbank_zda[0,313]
test-dr8-mpam0-si1189.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-si1189.fbank_zda[0,252]
test-dr8-mpam0-si1819.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-si1819.fbank_zda[0,283]
test-dr8-mpam0-si1961.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-si1961.fbank_zda[0,439]
test-dr8-mpam0-sx109.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx109.fbank_zda[0,291]
test-dr8-mpam0-sx19.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx19.fbank_zda[0,234]
test-dr8-mpam0-sx199.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx199.fbank_zda[0,349]
test-dr8-mpam0-sx289.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx289.fbank_zda[0,268]
test-dr8-mpam0-sx379.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx379.fbank_zda[0,249]

Просмотреть файл

@ -0,0 +1,20 @@
test-dr1-felc0-si1386.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si1386.fbank_zda[0,549]
test-dr1-felc0-si2016.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si2016.fbank_zda[0,337]
test-dr1-felc0-si756.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si756.fbank_zda[0,416]
test-dr1-felc0-sx126.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx126.fbank_zda[0,288]
test-dr1-felc0-sx216.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx216.fbank_zda[0,217]
test-dr1-felc0-sx306.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx306.fbank_zda[0,247]
test-dr1-felc0-sx36.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx36.fbank_zda[0,349]
test-dr1-felc0-sx396.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx396.fbank_zda[0,379]
test-dr1-mdab0-si1039.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1039.fbank_zda[0,391]
test-dr1-mdab0-si1669.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1669.fbank_zda[0,203]
test-dr1-mdab0-si2299.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si2299.fbank_zda[0,257]
test-dr1-mdab0-sx139.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx139.fbank_zda[0,233]
test-dr1-mdab0-sx229.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx229.fbank_zda[0,128]
test-dr1-mdab0-sx319.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx319.fbank_zda[0,241]
test-dr1-mdab0-sx409.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx409.fbank_zda[0,285]
test-dr1-mdab0-sx49.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx49.fbank_zda[0,217]
test-dr1-mwbt0-si1553.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si1553.fbank_zda[0,473]
test-dr1-mwbt0-si2183.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si2183.fbank_zda[0,186]
test-dr1-mwbt0-si923.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si923.fbank_zda[0,293]
test-dr1-mwbt0-sx113.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx113.fbank_zda[0,330]

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,100 @@
train-dr1-fcjf0-si1027.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-si1027.mfcc_zda[0,306]
train-dr1-fcjf0-si1657.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-si1657.mfcc_zda[0,281]
train-dr1-fcjf0-si648.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-si648.mfcc_zda[0,359]
train-dr1-fcjf0-sx127.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx127.mfcc_zda[0,151]
train-dr1-fcjf0-sx217.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx217.mfcc_zda[0,170]
train-dr1-fcjf0-sx307.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx307.mfcc_zda[0,142]
train-dr1-fcjf0-sx37.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx37.mfcc_zda[0,224]
train-dr1-fcjf0-sx397.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx397.mfcc_zda[0,242]
train-dr1-fdaw0-si1271.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-si1271.mfcc_zda[0,483]
train-dr1-fdaw0-si1406.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-si1406.mfcc_zda[0,251]
train-dr1-fdaw0-si2036.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-si2036.mfcc_zda[0,476]
train-dr1-fdaw0-sx146.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx146.mfcc_zda[0,260]
train-dr1-fdaw0-sx236.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx236.mfcc_zda[0,248]
train-dr1-fdaw0-sx326.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx326.mfcc_zda[0,284]
train-dr1-fdaw0-sx416.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx416.mfcc_zda[0,258]
train-dr1-fdaw0-sx56.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx56.mfcc_zda[0,300]
train-dr1-fdml0-si1149.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-si1149.mfcc_zda[0,359]
train-dr1-fdml0-si1779.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-si1779.mfcc_zda[0,250]
train-dr1-fdml0-si2075.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-si2075.mfcc_zda[0,157]
train-dr1-fdml0-sx159.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx159.mfcc_zda[0,284]
train-dr1-fdml0-sx249.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx249.mfcc_zda[0,197]
train-dr1-fdml0-sx339.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx339.mfcc_zda[0,253]
train-dr1-fdml0-sx429.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx429.mfcc_zda[0,273]
train-dr1-fdml0-sx69.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx69.mfcc_zda[0,254]
train-dr1-fecd0-si1418.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-si1418.mfcc_zda[0,554]
train-dr1-fecd0-si2048.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-si2048.mfcc_zda[0,257]
train-dr1-fecd0-si788.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-si788.mfcc_zda[0,513]
train-dr1-fecd0-sx158.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx158.mfcc_zda[0,254]
train-dr1-fecd0-sx248.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx248.mfcc_zda[0,302]
train-dr1-fecd0-sx338.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx338.mfcc_zda[0,329]
train-dr1-fecd0-sx428.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx428.mfcc_zda[0,364]
train-dr1-fecd0-sx68.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx68.mfcc_zda[0,292]
train-dr1-fetb0-si1148.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-si1148.mfcc_zda[0,254]
train-dr1-fetb0-si1778.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-si1778.mfcc_zda[0,350]
train-dr1-fetb0-si518.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-si518.mfcc_zda[0,345]
train-dr1-fetb0-sx158.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx158.mfcc_zda[0,221]
train-dr1-fetb0-sx248.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx248.mfcc_zda[0,261]
train-dr1-fetb0-sx338.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx338.mfcc_zda[0,316]
train-dr1-fetb0-sx428.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx428.mfcc_zda[0,340]
train-dr1-fetb0-sx68.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx68.mfcc_zda[0,265]
train-dr1-fjsp0-si1434.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-si1434.mfcc_zda[0,435]
train-dr1-fjsp0-si1763.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-si1763.mfcc_zda[0,121]
train-dr1-fjsp0-si804.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-si804.mfcc_zda[0,247]
train-dr1-fjsp0-sx174.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx174.mfcc_zda[0,379]
train-dr1-fjsp0-sx264.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx264.mfcc_zda[0,286]
train-dr1-fjsp0-sx354.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx354.mfcc_zda[0,271]
train-dr1-fjsp0-sx444.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx444.mfcc_zda[0,286]
train-dr1-fjsp0-sx84.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx84.mfcc_zda[0,343]
train-dr1-fkfb0-si1608.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-si1608.mfcc_zda[0,474]
train-dr1-fkfb0-si2238.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-si2238.mfcc_zda[0,217]
train-dr1-fkfb0-si978.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-si978.mfcc_zda[0,353]
train-dr1-fkfb0-sx168.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx168.mfcc_zda[0,232]
train-dr1-fkfb0-sx258.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx258.mfcc_zda[0,340]
train-dr1-fkfb0-sx348.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx348.mfcc_zda[0,368]
train-dr1-fkfb0-sx438.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx438.mfcc_zda[0,635]
train-dr1-fkfb0-sx78.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx78.mfcc_zda[0,244]
train-dr1-fmem0-si1377.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-si1377.mfcc_zda[0,384]
train-dr1-fmem0-si2007.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-si2007.mfcc_zda[0,352]
train-dr1-fmem0-si747.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-si747.mfcc_zda[0,300]
train-dr1-fmem0-sx117.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx117.mfcc_zda[0,320]
train-dr1-fmem0-sx207.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx207.mfcc_zda[0,204]
train-dr1-fmem0-sx297.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx297.mfcc_zda[0,266]
train-dr1-fmem0-sx333.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx333.mfcc_zda[0,511]
train-dr1-fmem0-sx387.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx387.mfcc_zda[0,377]
train-dr1-fsah0-si1244.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-si1244.mfcc_zda[0,351]
train-dr1-fsah0-si1874.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-si1874.mfcc_zda[0,256]
train-dr1-fsah0-si614.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-si614.mfcc_zda[0,465]
train-dr1-fsah0-sx164.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx164.mfcc_zda[0,290]
train-dr1-fsah0-sx327.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx327.mfcc_zda[0,497]
train-dr1-fsah0-sx344.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx344.mfcc_zda[0,202]
train-dr1-fsah0-sx434.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx434.mfcc_zda[0,261]
train-dr1-fsah0-sx74.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx74.mfcc_zda[0,263]
train-dr1-fsjk1-si1025.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-si1025.mfcc_zda[0,309]
train-dr1-fsjk1-si2285.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-si2285.mfcc_zda[0,196]
train-dr1-fsjk1-si696.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-si696.mfcc_zda[0,350]
train-dr1-fsjk1-sx125.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx125.mfcc_zda[0,224]
train-dr1-fsjk1-sx215.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx215.mfcc_zda[0,297]
train-dr1-fsjk1-sx305.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx305.mfcc_zda[0,245]
train-dr1-fsjk1-sx35.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx35.mfcc_zda[0,231]
train-dr1-fsjk1-sx395.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx395.mfcc_zda[0,267]
train-dr1-fsma0-si1621.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-si1621.mfcc_zda[0,155]
train-dr1-fsma0-si2251.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-si2251.mfcc_zda[0,320]
train-dr1-fsma0-si991.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-si991.mfcc_zda[0,446]
train-dr1-fsma0-sx181.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx181.mfcc_zda[0,354]
train-dr1-fsma0-sx271.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx271.mfcc_zda[0,277]
train-dr1-fsma0-sx361.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx361.mfcc_zda[0,261]
train-dr1-fsma0-sx451.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx451.mfcc_zda[0,478]
train-dr1-fsma0-sx91.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx91.mfcc_zda[0,270]
train-dr1-ftbr0-si1402.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-si1402.mfcc_zda[0,447]
train-dr1-ftbr0-si2181.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-si2181.mfcc_zda[0,243]
train-dr1-ftbr0-si921.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-si921.mfcc_zda[0,414]
train-dr1-ftbr0-sx111.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx111.mfcc_zda[0,247]
train-dr1-ftbr0-sx201.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx201.mfcc_zda[0,296]
train-dr1-ftbr0-sx21.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx21.mfcc_zda[0,215]
train-dr1-ftbr0-sx291.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx291.mfcc_zda[0,243]
train-dr1-ftbr0-sx381.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx381.mfcc_zda[0,245]
train-dr1-fvfb0-si1032.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-si1032.mfcc_zda[0,279]
train-dr1-fvfb0-si1510.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-si1510.mfcc_zda[0,401]
train-dr1-fvfb0-si2292.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-si2292.mfcc_zda[0,172]
train-dr1-fvfb0-sx132.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-sx132.mfcc_zda[0,246]

Просмотреть файл

@ -0,0 +1,118 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# 3 class classification with softmax - cntk script -- Network Description Language
# which commands to run
command=Train:Output:dumpNodeInfo:Test
# required...
modelPath = "Models/MC.dnn" # where to write the model to
deviceId = -1 # -1 means CPU; use 0 for your first GPU, 1 for the second etc.
dimension = 2 # input data dimensions
labelDimension = 3
# training config
Train = [
action="train"
# network description
BrainScriptNetworkBuilder=[
# sample and label dimensions
SDim = $dimension$
LDim = $labelDimension$
features = Input (SDim)
labels = Input (LDim)
# parameters to learn
b = Parameter (LDim, 1)
w = Parameter (LDim, SDim)
# operations
z = w * features + b
ce = CrossEntropyWithSoftmax (labels, z)
errs = ErrorPrediction (labels, z)
# root nodes
featureNodes = (features)
labelNodes = (labels)
criterionNodes = (ce)
evaluationNodes = (errs)
outputNodes = (z)
]
# configuration parameters of the SGD procedure
SGD = [
epochSize = 0 # =0 means size of the training set
minibatchSize = 25
learningRatesPerSample = 0.04 # gradient contribution from each sample
maxEpochs = 50
]
# configuration of data reading
reader = [
readerType = "CNTKTextFormatReader"
file = "Train-3Classes_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = $labelDimension$ # there are 3 different labels
format = "dense"
]
]
]
]
# test
Test = [
action = "test"
reader = [
readerType="CNTKTextFormatReader"
file="Test-3Classes_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = $labelDimension$ # there are 3 different labels
format = "dense"
]
]
]
]
# output the results
Output = [
action="write"
reader=[
readerType="CNTKTextFormatReader"
file="Test-3Classes_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = $labelDimension$ # there are 3 different labels
format = "dense"
]
]
]
outputPath = "MC.txt" # dump the output to this text file
]
# dump parameter values
DumpNodeInfo = [
action = "dumpNode"
printValues = true
]

Просмотреть файл

@ -1,2 +1,3 @@
0
1
2

Просмотреть файл

@ -0,0 +1,500 @@
|features 5.170871 -1.020567 |labels 0 0 1
|features 4.327523 -0.815642 |labels 0 0 1
|features 2.678905 0.434324 |labels 0 0 1
|features 7.064864 3.087723 |labels 0 1 0
|features 5.674250 -0.659369 |labels 0 0 1
|features 5.720961 0.614086 |labels 0 0 1
|features 0.817731 0.599577 |labels 1 0 0
|features 4.355816 3.459184 |labels 0 1 0
|features 3.818218 -0.994736 |labels 0 0 1
|features 0.312065 1.951552 |labels 1 0 0
|features 0.741122 -0.148256 |labels 1 0 0
|features 3.429964 -2.544096 |labels 0 0 1
|features 4.642347 1.367120 |labels 0 0 1
|features 4.643550 -0.253437 |labels 0 0 1
|features 2.792871 0.960124 |labels 0 1 0
|features 4.514423 2.196754 |labels 0 0 1
|features -0.666193 -0.032975 |labels 1 0 0
|features 4.810538 3.241911 |labels 0 1 0
|features 3.933512 2.325106 |labels 0 1 0
|features 3.298764 2.599413 |labels 0 1 0
|features 2.145218 0.159719 |labels 1 0 0
|features 5.267208 -0.921221 |labels 0 0 1
|features 1.806416 0.467180 |labels 1 0 0
|features 3.974628 -0.953991 |labels 0 0 1
|features 1.900733 0.460248 |labels 1 0 0
|features -0.507540 -1.202700 |labels 1 0 0
|features 5.731155 2.382330 |labels 0 1 0
|features 4.287938 -0.358895 |labels 0 0 1
|features 4.464944 1.045813 |labels 0 0 1
|features 6.135542 0.311033 |labels 0 0 1
|features 2.230624 3.899346 |labels 0 1 0
|features 1.115160 1.488620 |labels 1 0 0
|features 4.078272 0.648599 |labels 0 0 1
|features 0.946812 -0.586611 |labels 1 0 0
|features 5.043798 1.309346 |labels 0 0 1
|features 4.294848 2.816375 |labels 0 1 0
|features 1.889331 0.041037 |labels 1 0 0
|features 3.259375 2.802361 |labels 0 1 0
|features 1.428610 1.515919 |labels 1 0 0
|features 0.422380 1.107096 |labels 1 0 0
|features 4.158696 3.520900 |labels 0 1 0
|features 2.037615 0.476160 |labels 0 0 1
|features -0.080874 0.684507 |labels 1 0 0
|features 5.304552 3.594974 |labels 0 1 0
|features 4.243805 2.872649 |labels 0 1 0
|features 4.148123 2.440895 |labels 0 1 0
|features 4.693400 3.616597 |labels 0 1 0
|features 6.130004 1.775687 |labels 0 0 1
|features 4.617409 1.618245 |labels 0 0 1
|features 4.337935 3.433942 |labels 0 1 0
|features 1.253423 1.568702 |labels 1 0 0
|features 3.780373 2.457099 |labels 0 0 1
|features 5.223143 1.755579 |labels 0 0 1
|features 2.473481 1.418406 |labels 1 0 0
|features 1.703627 4.378155 |labels 0 1 0
|features 5.434560 4.717314 |labels 0 1 0
|features 5.363743 1.554633 |labels 0 0 1
|features 4.708409 0.266605 |labels 0 0 1
|features 3.527139 3.721405 |labels 0 1 0
|features 4.212410 0.964045 |labels 0 0 1
|features 4.047959 -0.190071 |labels 0 0 1
|features -0.534243 1.178627 |labels 1 0 0
|features 1.476536 -0.159029 |labels 1 0 0
|features 3.346440 2.657206 |labels 0 1 0
|features 3.510718 2.318246 |labels 0 1 0
|features 3.615193 4.605629 |labels 0 1 0
|features 4.832209 0.864395 |labels 0 0 1
|features 4.866461 4.196559 |labels 0 1 0
|features 5.322119 1.825393 |labels 0 0 1
|features 2.902831 -0.095737 |labels 0 0 1
|features 1.840045 2.876644 |labels 0 1 0
|features 0.517413 -0.129987 |labels 1 0 0
|features 2.407253 0.499569 |labels 1 0 0
|features 4.312469 1.582177 |labels 0 0 1
|features 4.995064 0.170178 |labels 0 0 1
|features 4.797494 4.573547 |labels 0 1 0
|features 4.130865 4.410381 |labels 0 1 0
|features 1.105535 1.654471 |labels 1 0 0
|features 5.692636 0.527186 |labels 0 0 1
|features 1.088462 -1.684232 |labels 1 0 0
|features 1.484067 -0.412501 |labels 1 0 0
|features 2.889689 3.690520 |labels 0 1 0
|features 0.511409 -0.004521 |labels 1 0 0
|features 0.947963 -0.651233 |labels 1 0 0
|features 4.363427 2.649857 |labels 0 1 0
|features 3.305984 -0.131774 |labels 0 0 1
|features 5.515684 2.063754 |labels 0 0 1
|features 3.897807 0.829441 |labels 0 0 1
|features 1.382318 0.452016 |labels 1 0 0
|features 2.830457 3.628353 |labels 0 1 0
|features 4.320996 -1.406747 |labels 0 0 1
|features 2.761719 1.222520 |labels 0 0 1
|features 5.451307 0.426850 |labels 0 0 1
|features -0.555044 0.155634 |labels 1 0 0
|features 2.176072 3.696026 |labels 1 0 0
|features 4.240790 3.729701 |labels 0 1 0
|features 3.022974 2.228043 |labels 0 0 1
|features 4.499028 3.498968 |labels 0 1 0
|features 3.882526 -0.643661 |labels 0 0 1
|features 0.416113 -0.179831 |labels 1 0 0
|features 3.748151 1.565968 |labels 0 0 1
|features 3.965157 -0.773285 |labels 0 0 1
|features 5.589582 5.855553 |labels 0 1 0
|features 4.295428 5.171385 |labels 0 1 0
|features 1.138826 0.131605 |labels 1 0 0
|features 4.559609 3.137678 |labels 0 1 0
|features 4.375586 -2.039131 |labels 0 0 1
|features 5.972429 2.140441 |labels 0 1 0
|features 3.785464 0.481651 |labels 0 0 1
|features 4.872444 2.856546 |labels 0 1 0
|features 4.735155 1.706737 |labels 0 0 1
|features 2.564409 1.229335 |labels 0 0 1
|features 4.638684 4.290509 |labels 0 1 0
|features 6.025844 1.859031 |labels 0 1 0
|features 2.991525 3.173522 |labels 0 1 0
|features 1.066838 -2.194139 |labels 1 0 0
|features 3.335534 -0.648720 |labels 0 0 1
|features 5.484550 0.563980 |labels 0 0 1
|features 1.304820 0.038009 |labels 1 0 0
|features 1.890283 -1.570453 |labels 1 0 0
|features 0.794851 1.305584 |labels 1 0 0
|features 3.429155 2.523924 |labels 0 1 0
|features 4.153625 -0.803432 |labels 0 0 1
|features 3.586901 2.625966 |labels 0 1 0
|features 4.372702 0.575025 |labels 0 0 1
|features 1.135865 1.035374 |labels 1 0 0
|features 4.796154 0.862651 |labels 0 0 1
|features 0.369248 2.115445 |labels 1 0 0
|features 3.516012 0.480955 |labels 0 0 1
|features 2.595974 -1.690054 |labels 1 0 0
|features 1.126398 1.824413 |labels 1 0 0
|features 4.077564 0.924271 |labels 0 0 1
|features 4.612824 4.989916 |labels 0 1 0
|features 1.546290 2.119071 |labels 1 0 0
|features 5.618829 2.105525 |labels 0 1 0
|features 3.055336 3.065747 |labels 0 1 0
|features 4.647198 1.542813 |labels 0 0 1
|features 5.091151 2.147099 |labels 0 1 0
|features 3.625071 0.311037 |labels 0 0 1
|features 2.307799 -1.495730 |labels 0 0 1
|features -0.753784 1.024504 |labels 1 0 0
|features 2.467414 0.894651 |labels 0 0 1
|features 2.356993 -1.348528 |labels 1 0 0
|features 2.001782 -0.836787 |labels 0 0 1
|features 2.925553 -0.828339 |labels 0 0 1
|features 4.416055 3.245529 |labels 0 1 0
|features 1.069252 0.725166 |labels 1 0 0
|features 1.294562 -0.197274 |labels 1 0 0
|features 4.477378 2.347177 |labels 0 1 0
|features 3.499737 4.875598 |labels 0 1 0
|features 1.839399 1.836997 |labels 0 1 0
|features 1.092580 2.312231 |labels 1 0 0
|features -0.751483 0.917030 |labels 1 0 0
|features 3.416649 0.959780 |labels 0 0 1
|features 6.272890 1.393839 |labels 0 0 1
|features 0.928108 -0.633631 |labels 1 0 0
|features -0.621571 -0.399249 |labels 1 0 0
|features 1.579312 0.683836 |labels 1 0 0
|features 4.344459 1.574135 |labels 0 1 0
|features 1.819022 0.194714 |labels 1 0 0
|features 3.237937 2.741034 |labels 0 1 0
|features 3.382908 1.775352 |labels 0 1 0
|features 2.903564 0.885940 |labels 0 0 1
|features 4.353755 3.957848 |labels 0 1 0
|features -0.748388 1.073738 |labels 1 0 0
|features 4.757949 3.555670 |labels 0 1 0
|features 4.343514 -0.300948 |labels 0 0 1
|features 1.439724 4.757091 |labels 0 1 0
|features 5.578366 4.132736 |labels 0 1 0
|features 0.227763 -2.450668 |labels 1 0 0
|features 6.338476 3.210632 |labels 0 1 0
|features 3.121639 3.286460 |labels 0 1 0
|features 5.352882 -0.806885 |labels 0 0 1
|features 3.903871 -0.319867 |labels 0 0 1
|features 0.787749 2.035582 |labels 1 0 0
|features 4.744425 0.515576 |labels 0 1 0
|features 4.864004 5.600483 |labels 0 1 0
|features 0.185025 0.221781 |labels 1 0 0
|features 0.111863 -0.153125 |labels 1 0 0
|features 2.884757 4.306858 |labels 0 1 0
|features 3.481979 2.288502 |labels 0 1 0
|features 2.205238 0.502377 |labels 1 0 0
|features 4.693934 4.432471 |labels 0 1 0
|features 3.791779 -0.057670 |labels 0 0 1
|features 1.143819 0.817160 |labels 1 0 0
|features 5.737469 3.878223 |labels 0 1 0
|features 4.741342 3.173934 |labels 0 1 0
|features 2.422221 -0.013868 |labels 0 0 1
|features 0.918625 -0.166115 |labels 1 0 0
|features 3.122271 1.424641 |labels 0 0 1
|features 3.462430 3.956410 |labels 0 1 0
|features 3.654001 2.247638 |labels 0 0 1
|features 3.540817 3.459332 |labels 0 1 0
|features 3.806689 1.696715 |labels 0 0 1
|features 6.119885 -0.400401 |labels 0 0 1
|features 3.911604 3.011729 |labels 0 1 0
|features 2.972734 3.194861 |labels 0 1 0
|features 3.006524 1.140043 |labels 0 0 1
|features 3.007322 2.892493 |labels 0 1 0
|features 4.802557 2.566478 |labels 0 1 0
|features 2.428262 0.396116 |labels 0 0 1
|features 4.030434 3.230760 |labels 0 1 0
|features 5.191659 4.383636 |labels 0 1 0
|features -1.045050 -0.785392 |labels 1 0 0
|features 4.768335 3.893780 |labels 0 1 0
|features 3.939898 2.448906 |labels 0 1 0
|features 3.972909 0.930766 |labels 0 0 1
|features -0.122396 0.163240 |labels 1 0 0
|features 1.012661 0.920772 |labels 1 0 0
|features 4.306722 -0.182788 |labels 0 0 1
|features 3.693470 3.431040 |labels 0 1 0
|features 3.330047 -0.328417 |labels 0 0 1
|features 5.276350 2.810753 |labels 0 1 0
|features -0.866210 -0.176071 |labels 1 0 0
|features 1.161681 0.068793 |labels 1 0 0
|features 4.017895 2.746144 |labels 0 1 0
|features 4.091524 1.494913 |labels 0 0 1
|features 1.086877 -0.661555 |labels 1 0 0
|features 1.096610 1.663877 |labels 1 0 0
|features 3.550485 -1.326387 |labels 0 0 1
|features 3.756918 -0.548134 |labels 0 0 1
|features -0.432257 1.933901 |labels 1 0 0
|features 3.073529 0.512542 |labels 0 0 1
|features 4.690409 0.643014 |labels 0 0 1
|features 1.433133 0.655506 |labels 1 0 0
|features -0.738960 0.091768 |labels 1 0 0
|features 5.770527 1.989314 |labels 0 1 0
|features 3.508179 3.551834 |labels 0 1 0
|features -0.553227 0.665805 |labels 1 0 0
|features 0.063303 1.515440 |labels 1 0 0
|features 4.006607 1.017800 |labels 0 0 1
|features 5.403834 2.985693 |labels 0 1 0
|features 1.250549 0.872075 |labels 1 0 0
|features 1.601300 0.877506 |labels 1 0 0
|features 1.555845 -0.750437 |labels 1 0 0
|features 5.502633 2.444373 |labels 0 1 0
|features 0.502594 2.871184 |labels 1 0 0
|features 3.941683 -0.265121 |labels 0 0 1
|features 1.609009 2.337289 |labels 0 0 1
|features 2.049371 -0.109907 |labels 1 0 0
|features 3.893968 1.147726 |labels 0 0 1
|features 4.181638 3.931917 |labels 0 1 0
|features 5.187949 -0.282560 |labels 0 0 1
|features 0.428814 -0.817809 |labels 1 0 0
|features 4.280497 0.693244 |labels 0 0 1
|features -0.115077 1.271914 |labels 1 0 0
|features 2.340634 3.088115 |labels 0 1 0
|features 5.672981 4.301189 |labels 0 1 0
|features 0.751783 0.215502 |labels 1 0 0
|features 4.884824 2.147961 |labels 0 1 0
|features 4.301601 0.356782 |labels 0 0 1
|features 3.703915 5.951633 |labels 0 1 0
|features 0.308308 0.637447 |labels 1 0 0
|features 3.588032 0.386808 |labels 0 0 1
|features 2.580274 0.968874 |labels 1 0 0
|features 1.931611 -0.374972 |labels 1 0 0
|features 2.027248 0.680231 |labels 1 0 0
|features 3.805778 3.970522 |labels 0 1 0
|features 5.393209 2.870595 |labels 0 1 0
|features 4.341932 1.219185 |labels 0 0 1
|features 3.948779 0.138642 |labels 0 0 1
|features 2.974210 4.078202 |labels 0 1 0
|features 3.466237 1.479102 |labels 0 1 0
|features 3.429529 0.164491 |labels 0 0 1
|features 0.895563 1.207858 |labels 1 0 0
|features 6.222346 2.587637 |labels 0 1 0
|features 3.781328 4.481887 |labels 0 1 0
|features 5.005423 4.339553 |labels 0 1 0
|features 3.342823 3.155918 |labels 0 1 0
|features 0.638335 0.422880 |labels 1 0 0
|features 6.084583 0.524755 |labels 0 0 1
|features 0.544146 -0.675320 |labels 1 0 0
|features 0.937815 -0.158842 |labels 1 0 0
|features 4.878075 3.646459 |labels 0 1 0
|features 4.394876 3.656452 |labels 0 1 0
|features 0.175527 1.051449 |labels 1 0 0
|features 2.068004 3.731130 |labels 0 1 0
|features 3.658874 0.274977 |labels 1 0 0
|features 4.654398 3.224570 |labels 0 1 0
|features 4.217887 -0.269042 |labels 0 0 1
|features 3.868590 0.902664 |labels 0 0 1
|features 4.463083 1.357407 |labels 0 0 1
|features 0.138550 0.810577 |labels 1 0 0
|features 4.721700 0.908698 |labels 0 0 1
|features 3.347677 1.272460 |labels 0 0 1
|features 0.108387 -0.575248 |labels 1 0 0
|features 5.097649 0.290083 |labels 0 0 1
|features 4.497906 0.541197 |labels 0 0 1
|features 3.507372 3.421968 |labels 0 1 0
|features 6.624134 3.603327 |labels 0 1 0
|features -0.587276 0.471565 |labels 1 0 0
|features 1.801156 -1.528190 |labels 1 0 0
|features 3.197821 -1.297906 |labels 0 0 1
|features 2.135339 1.138818 |labels 1 0 0
|features 6.062915 5.061124 |labels 0 1 0
|features 0.362993 -0.847628 |labels 1 0 0
|features 3.520253 2.923445 |labels 0 1 0
|features 3.839261 5.262581 |labels 0 1 0
|features 2.849768 1.676886 |labels 0 1 0
|features 1.543549 1.215295 |labels 1 0 0
|features 0.444973 0.453481 |labels 1 0 0
|features 4.937584 3.562562 |labels 0 1 0
|features 1.857639 0.191374 |labels 1 0 0
|features 4.335236 5.163506 |labels 0 1 0
|features 4.691276 1.898922 |labels 0 1 0
|features 5.904368 0.700075 |labels 0 0 1
|features 5.615622 1.168694 |labels 0 0 1
|features 3.518285 -1.463071 |labels 0 0 1
|features 5.260380 2.033894 |labels 0 0 1
|features 4.402485 -1.466749 |labels 0 0 1
|features 1.661792 2.199234 |labels 1 0 0
|features 3.428152 4.100739 |labels 0 1 0
|features 4.155874 1.180401 |labels 0 0 1
|features 3.560558 1.628193 |labels 0 0 1
|features 1.752789 0.167950 |labels 1 0 0
|features 2.989879 4.547287 |labels 0 1 0
|features 1.472915 1.807051 |labels 1 0 0
|features 3.956969 1.057726 |labels 0 0 1
|features -0.910892 -0.324420 |labels 1 0 0
|features 3.896150 -0.261758 |labels 0 0 1
|features 5.160224 3.879306 |labels 0 1 0
|features 0.508886 0.388303 |labels 1 0 0
|features 4.550465 2.808816 |labels 0 1 0
|features 4.219800 2.157565 |labels 0 0 1
|features 2.973732 -0.025899 |labels 0 0 1
|features 4.850215 2.057425 |labels 0 1 0
|features 4.281529 0.884518 |labels 0 0 1
|features 3.185110 3.578732 |labels 0 1 0
|features 3.550744 3.634269 |labels 0 1 0
|features 3.868571 -0.685954 |labels 0 0 1
|features 4.436760 2.882380 |labels 0 1 0
|features 1.576030 0.721975 |labels 1 0 0
|features 4.668262 0.805866 |labels 0 0 1
|features 3.715000 0.315345 |labels 0 0 1
|features 2.507644 3.547079 |labels 0 1 0
|features 0.894466 1.539280 |labels 1 0 0
|features 0.909938 1.464673 |labels 1 0 0
|features 4.112257 0.148428 |labels 0 0 1
|features 3.106928 1.113714 |labels 0 0 1
|features 1.288402 -0.701025 |labels 1 0 0
|features 3.877478 5.171910 |labels 0 1 0
|features 0.358146 1.337491 |labels 1 0 0
|features 3.112478 4.083398 |labels 0 1 0
|features -0.073140 -0.908321 |labels 1 0 0
|features 2.937791 3.262817 |labels 0 1 0
|features 2.835341 -0.358869 |labels 1 0 0
|features 0.454474 0.626554 |labels 1 0 0
|features 1.123346 0.271387 |labels 1 0 0
|features 3.839966 3.804344 |labels 0 1 0
|features 1.667118 1.689903 |labels 1 0 0
|features 3.290955 2.687145 |labels 0 1 0
|features 3.128941 3.792980 |labels 0 1 0
|features 5.029546 1.430181 |labels 0 0 1
|features 3.298832 3.179733 |labels 0 1 0
|features 3.688343 1.939487 |labels 0 0 1
|features -1.383809 -0.301854 |labels 1 0 0
|features 0.841377 -0.464041 |labels 1 0 0
|features 1.821239 0.271780 |labels 0 0 1
|features 3.014247 2.084152 |labels 0 1 0
|features 5.089541 1.644286 |labels 0 0 1
|features 2.348496 2.055794 |labels 0 0 1
|features 4.780683 0.638380 |labels 0 0 1
|features 1.087268 1.900109 |labels 1 0 0
|features 5.347040 4.701276 |labels 0 1 0
|features 4.871581 -0.863385 |labels 0 0 1
|features 1.192136 -1.018245 |labels 1 0 0
|features 2.876963 1.949194 |labels 0 0 1
|features 1.120886 -0.929929 |labels 1 0 0
|features 0.163176 -0.664774 |labels 1 0 0
|features 3.170573 2.324343 |labels 0 1 0
|features -0.939862 1.074156 |labels 1 0 0
|features 4.808481 4.376259 |labels 0 1 0
|features 2.881816 0.101940 |labels 0 0 1
|features 4.280627 5.079110 |labels 0 1 0
|features 1.342037 1.384894 |labels 1 0 0
|features 4.423114 2.026311 |labels 0 1 0
|features 1.956134 0.905019 |labels 0 0 1
|features 3.601798 1.118188 |labels 0 0 1
|features 2.368481 0.062807 |labels 0 0 1
|features 2.527163 2.606035 |labels 1 0 0
|features 4.159202 2.225367 |labels 0 0 1
|features 4.243090 -1.678570 |labels 0 0 1
|features 3.374625 0.916946 |labels 0 0 1
|features 4.836464 4.250067 |labels 0 1 0
|features 3.865716 0.733695 |labels 0 0 1
|features 4.538941 2.740055 |labels 0 1 0
|features -0.725037 1.923167 |labels 1 0 0
|features 4.348298 3.779133 |labels 0 1 0
|features 3.111166 3.210131 |labels 0 1 0
|features 1.983827 3.583649 |labels 0 1 0
|features 4.348485 0.524232 |labels 0 0 1
|features 0.113734 0.842453 |labels 1 0 0
|features 1.088557 0.351414 |labels 1 0 0
|features 4.140245 0.179361 |labels 0 0 1
|features 3.700337 4.401253 |labels 0 1 0
|features 3.689474 0.529192 |labels 0 0 1
|features 4.249344 2.108636 |labels 0 1 0
|features 2.345218 -1.911326 |labels 1 0 0
|features 5.250122 2.604543 |labels 0 1 0
|features 0.902275 1.307964 |labels 1 0 0
|features 3.449315 2.666064 |labels 0 1 0
|features -0.343801 0.123662 |labels 1 0 0
|features 4.443098 4.993161 |labels 0 1 0
|features 3.987001 1.344293 |labels 0 0 1
|features 3.783282 -0.936724 |labels 0 0 1
|features 1.958721 3.348016 |labels 0 1 0
|features 5.515478 3.471848 |labels 0 1 0
|features 1.584154 1.162307 |labels 1 0 0
|features 0.486644 -0.685806 |labels 1 0 0
|features 4.049450 3.728309 |labels 0 1 0
|features 0.577870 0.066874 |labels 1 0 0
|features 0.934053 0.998711 |labels 1 0 0
|features 0.933781 -0.644142 |labels 1 0 0
|features 1.740673 -2.701543 |labels 1 0 0
|features 5.983059 3.880100 |labels 0 1 0
|features -0.542726 1.520657 |labels 1 0 0
|features 1.820242 -0.127512 |labels 1 0 0
|features 3.869995 1.366239 |labels 0 0 1
|features 3.649993 3.875205 |labels 0 1 0
|features 4.326591 3.908440 |labels 0 1 0
|features -0.671845 2.292655 |labels 1 0 0
|features 4.614205 -0.065502 |labels 0 0 1
|features 5.337743 2.782974 |labels 0 1 0
|features 1.006830 0.502485 |labels 1 0 0
|features 0.646433 0.907665 |labels 1 0 0
|features 3.497074 1.102314 |labels 0 0 1
|features 2.714741 2.815370 |labels 0 1 0
|features 0.840907 -0.788417 |labels 1 0 0
|features 3.681667 3.963016 |labels 0 1 0
|features -0.413474 -1.050748 |labels 1 0 0
|features 5.095382 3.043376 |labels 0 1 0
|features 1.276484 1.763724 |labels 1 0 0
|features 1.093233 1.896627 |labels 1 0 0
|features 0.885230 0.195284 |labels 1 0 0
|features -1.116703 -0.083673 |labels 1 0 0
|features 2.768153 3.253628 |labels 0 1 0
|features 4.494142 0.705453 |labels 0 0 1
|features 1.824851 1.606847 |labels 1 0 0
|features 2.780254 4.475852 |labels 0 1 0
|features 2.187291 -1.004686 |labels 1 0 0
|features 4.102475 0.139971 |labels 0 0 1
|features 2.704021 0.546845 |labels 0 0 1
|features 2.153619 0.703613 |labels 0 0 1
|features 3.926857 -1.438502 |labels 0 0 1
|features 3.926749 0.703817 |labels 0 0 1
|features 2.200351 1.232015 |labels 1 0 0
|features 3.353178 2.328385 |labels 0 0 1
|features -0.503365 1.129519 |labels 1 0 0
|features -0.906905 0.719234 |labels 1 0 0
|features -0.306073 -0.079512 |labels 1 0 0
|features 3.548283 5.826111 |labels 0 1 0
|features 0.536668 0.896596 |labels 1 0 0
|features 3.568629 0.675687 |labels 0 0 1
|features 4.020288 2.491829 |labels 0 0 1
|features 4.877190 1.382423 |labels 0 0 1
|features 2.060446 0.728661 |labels 1 0 0
|features 4.380656 -0.377533 |labels 0 0 1
|features 3.095210 -0.249912 |labels 0 0 1
|features 5.433247 6.082839 |labels 0 1 0
|features 1.226953 1.803442 |labels 1 0 0
|features 1.726498 0.294119 |labels 1 0 0
|features 5.227722 -0.995057 |labels 0 0 1
|features 0.867838 0.103294 |labels 1 0 0
|features 0.191855 0.333970 |labels 1 0 0
|features 4.725049 5.049785 |labels 0 1 0
|features 1.648490 -0.236169 |labels 1 0 0
|features 4.723265 3.068310 |labels 0 0 1
|features 0.383477 0.799365 |labels 1 0 0
|features 2.648195 0.388766 |labels 0 0 1
|features 0.144110 0.829298 |labels 1 0 0
|features 0.580925 0.184520 |labels 1 0 0
|features 5.158404 -0.258398 |labels 0 0 1
|features 2.630052 3.650630 |labels 0 1 0
|features 4.654484 4.140236 |labels 0 1 0
|features 4.471095 2.304949 |labels 0 1 0
|features 1.210868 1.351212 |labels 1 0 0
|features 1.244951 -0.352608 |labels 1 0 0
|features 4.167235 2.325315 |labels 0 1 0
|features 0.749074 -0.227563 |labels 1 0 0
|features 4.469702 2.354917 |labels 0 1 0
|features 2.262076 2.361214 |labels 1 0 0
|features 0.364683 -1.094402 |labels 1 0 0
|features 4.004906 2.918029 |labels 0 1 0
|features 5.916473 2.902482 |labels 0 1 0
|features 3.883862 4.207980 |labels 0 1 0
|features 4.330138 0.662156 |labels 0 0 1
|features 2.197955 -1.037347 |labels 1 0 0
|features 2.564784 4.314187 |labels 0 1 0
|features 3.393377 4.268344 |labels 0 1 0
|features 5.234548 1.507413 |labels 0 0 1
|features 4.162258 3.859534 |labels 0 1 0
|features 3.073434 3.885747 |labels 0 1 0
|features 3.651390 -1.359382 |labels 0 0 1
|features 3.401908 0.200416 |labels 0 0 1
|features 3.765561 4.517499 |labels 0 1 0
|features 4.644557 1.919168 |labels 0 1 0
|features 3.382163 2.761240 |labels 0 0 1
|features 0.177373 0.845746 |labels 1 0 0
|features 1.367353 1.108394 |labels 1 0 0
|features 0.323228 0.872102 |labels 1 0 0

Просмотреть файл

@ -0,0 +1,500 @@
|features 3.854499 4.163941 |labels 1.000000
|features 1.058121 1.204858 |labels 0.000000
|features 1.870621 1.284107 |labels 0.000000
|features 1.134650 1.651822 |labels 0.000000
|features 5.420541 4.557660 |labels 1.000000
|features 6.042731 3.375708 |labels 1.000000
|features 5.667109 2.811728 |labels 1.000000
|features 0.232070 1.814821 |labels 0.000000
|features -0.647150 -1.612478 |labels 0.000000
|features 2.626172 5.321667 |labels 1.000000
|features 1.359751 2.056849 |labels 0.000000
|features 3.534476 6.011925 |labels 1.000000
|features 4.871508 2.245406 |labels 1.000000
|features 4.977201 6.092787 |labels 1.000000
|features 1.597508 2.110568 |labels 0.000000
|features 2.099170 0.073616 |labels 0.000000
|features 0.638281 -0.171881 |labels 0.000000
|features 4.606747 4.092115 |labels 1.000000
|features 5.168790 4.673153 |labels 1.000000
|features 5.084637 4.435160 |labels 1.000000
|features 3.379607 2.765107 |labels 1.000000
|features 3.992242 2.799751 |labels 1.000000
|features 1.807728 0.205914 |labels 0.000000
|features 1.946180 0.303569 |labels 0.000000
|features 0.218267 1.301271 |labels 0.000000
|features 4.932840 2.117177 |labels 1.000000
|features 3.739489 2.458558 |labels 1.000000
|features 1.597743 -2.192362 |labels 0.000000
|features 3.582005 3.350572 |labels 1.000000
|features 3.930642 5.733507 |labels 1.000000
|features 5.747863 3.739415 |labels 1.000000
|features -0.631374 2.314482 |labels 0.000000
|features 0.866484 0.363432 |labels 0.000000
|features 0.293501 0.347385 |labels 0.000000
|features 4.544393 4.699040 |labels 1.000000
|features -0.242005 0.926520 |labels 0.000000
|features 3.637198 5.238140 |labels 1.000000
|features -0.269463 1.525586 |labels 0.000000
|features 0.682529 -0.703649 |labels 0.000000
|features 3.562643 -0.126556 |labels 0.000000
|features 2.671530 3.729066 |labels 1.000000
|features 4.034716 3.458366 |labels 1.000000
|features 5.401503 3.117191 |labels 1.000000
|features 1.157177 1.183186 |labels 0.000000
|features 0.778963 1.394348 |labels 0.000000
|features 4.599715 2.297663 |labels 1.000000
|features 4.532568 4.568362 |labels 1.000000
|features 1.785478 -0.213185 |labels 0.000000
|features 4.617391 4.230360 |labels 1.000000
|features 5.672957 3.668370 |labels 1.000000
|features 4.267738 5.390780 |labels 1.000000
|features 0.707751 2.955391 |labels 0.000000
|features 0.791275 1.654795 |labels 0.000000
|features 1.760541 0.976920 |labels 0.000000
|features 4.543920 2.222765 |labels 1.000000
|features 4.515881 6.199021 |labels 1.000000
|features 3.645005 3.611395 |labels 1.000000
|features 0.965049 1.737265 |labels 0.000000
|features -1.779455 1.595554 |labels 0.000000
|features -0.484797 -0.559924 |labels 0.000000
|features 2.944180 4.429239 |labels 1.000000
|features 3.326649 4.412622 |labels 1.000000
|features 4.275101 2.143945 |labels 1.000000
|features 1.173035 0.641844 |labels 0.000000
|features 4.003884 3.176954 |labels 1.000000
|features 1.960240 -0.244709 |labels 0.000000
|features 0.320283 2.115552 |labels 0.000000
|features 2.303185 3.047043 |labels 1.000000
|features 0.993086 0.074009 |labels 0.000000
|features 5.599144 3.857344 |labels 1.000000
|features 5.325894 3.931000 |labels 1.000000
|features 2.840053 4.781688 |labels 1.000000
|features 4.142453 3.405830 |labels 1.000000
|features 1.084043 1.589581 |labels 0.000000
|features 2.795705 2.319276 |labels 1.000000
|features 1.980552 0.717780 |labels 0.000000
|features 1.875956 -0.571905 |labels 0.000000
|features 2.013802 1.694811 |labels 0.000000
|features 4.690795 2.183334 |labels 1.000000
|features 4.321816 1.876459 |labels 1.000000
|features 4.088717 4.394346 |labels 1.000000
|features 4.991936 4.299770 |labels 1.000000
|features 2.592315 4.783210 |labels 1.000000
|features 0.703270 2.541733 |labels 0.000000
|features 0.467768 -0.007592 |labels 0.000000
|features 1.694096 -0.570847 |labels 0.000000
|features 2.255603 0.663395 |labels 0.000000
|features 1.300394 1.518341 |labels 0.000000
|features 4.354786 4.501928 |labels 1.000000
|features 1.474162 0.603113 |labels 0.000000
|features 1.340782 0.637653 |labels 0.000000
|features -0.351240 0.501893 |labels 0.000000
|features 4.918587 5.366305 |labels 1.000000
|features 2.242199 -0.916682 |labels 0.000000
|features -0.161858 0.448384 |labels 0.000000
|features 1.659615 1.524191 |labels 0.000000
|features 3.072670 1.703225 |labels 0.000000
|features 0.003256 -0.306702 |labels 0.000000
|features -1.792094 1.193539 |labels 0.000000
|features 7.200298 3.962190 |labels 1.000000
|features 4.220305 4.190289 |labels 1.000000
|features 4.096599 3.264797 |labels 1.000000
|features -0.674145 0.751491 |labels 0.000000
|features 3.215213 4.549768 |labels 1.000000
|features 1.522988 3.311437 |labels 0.000000
|features 4.393445 1.822070 |labels 1.000000
|features 1.991048 1.429309 |labels 0.000000
|features 4.741012 3.169984 |labels 1.000000
|features 2.563678 1.798587 |labels 0.000000
|features 3.310656 3.600789 |labels 1.000000
|features 0.559119 -0.193984 |labels 0.000000
|features 3.182626 3.279566 |labels 1.000000
|features 0.145061 1.428861 |labels 0.000000
|features 5.748625 2.766672 |labels 1.000000
|features 1.612338 -0.441931 |labels 0.000000
|features 0.521950 0.355267 |labels 0.000000
|features 4.284910 3.874950 |labels 1.000000
|features 4.911425 3.054658 |labels 1.000000
|features 2.946163 0.502614 |labels 0.000000
|features 4.381390 2.600999 |labels 1.000000
|features 0.585791 -0.528432 |labels 0.000000
|features 1.329802 -0.076910 |labels 0.000000
|features 0.860040 1.153562 |labels 0.000000
|features 0.930515 -0.257435 |labels 0.000000
|features 2.775174 0.751338 |labels 0.000000
|features 2.429059 0.615483 |labels 0.000000
|features 2.546002 1.132210 |labels 0.000000
|features 5.059000 3.423829 |labels 1.000000
|features 1.303533 0.013015 |labels 0.000000
|features 2.160149 -0.400779 |labels 0.000000
|features 5.038046 3.027673 |labels 1.000000
|features 4.583471 5.379319 |labels 1.000000
|features 5.608845 2.082021 |labels 1.000000
|features 3.406426 3.326734 |labels 1.000000
|features 4.267102 3.866177 |labels 1.000000
|features 1.799669 0.489094 |labels 0.000000
|features 1.807634 2.029468 |labels 0.000000
|features 1.536463 1.053052 |labels 0.000000
|features 5.653295 3.369125 |labels 1.000000
|features 2.493326 0.794542 |labels 0.000000
|features 1.528977 0.961929 |labels 0.000000
|features 1.973016 0.696162 |labels 0.000000
|features 2.283974 0.198255 |labels 0.000000
|features 5.227293 4.395268 |labels 1.000000
|features 5.302484 4.021613 |labels 1.000000
|features 6.223076 4.537934 |labels 1.000000
|features 1.460204 -1.055539 |labels 0.000000
|features 2.985097 4.228990 |labels 1.000000
|features 1.685054 0.499576 |labels 0.000000
|features 0.521659 0.510605 |labels 0.000000
|features 1.891089 1.284388 |labels 0.000000
|features 4.620926 3.662371 |labels 1.000000
|features 1.613905 -0.770152 |labels 0.000000
|features 6.007418 4.755721 |labels 1.000000
|features 0.798078 -0.304557 |labels 0.000000
|features 5.242706 2.099872 |labels 1.000000
|features 1.518268 -0.858963 |labels 0.000000
|features 3.733642 4.244483 |labels 1.000000
|features 0.970367 -1.534686 |labels 0.000000
|features 1.334952 2.250191 |labels 0.000000
|features 2.252214 3.343515 |labels 1.000000
|features 3.982213 4.457969 |labels 1.000000
|features 5.086620 3.180442 |labels 1.000000
|features 0.005277 0.197319 |labels 0.000000
|features 2.999128 2.909942 |labels 1.000000
|features 2.412666 2.046286 |labels 0.000000
|features 2.044537 3.416533 |labels 1.000000
|features 2.650439 3.372171 |labels 1.000000
|features 2.480446 1.327368 |labels 0.000000
|features 4.824915 5.603495 |labels 1.000000
|features 0.759204 0.531043 |labels 0.000000
|features 1.965476 1.372763 |labels 0.000000
|features 1.000248 1.208139 |labels 0.000000
|features 1.979980 -0.446807 |labels 0.000000
|features 0.528053 1.178535 |labels 0.000000
|features 5.442396 3.969797 |labels 1.000000
|features -0.145691 1.375993 |labels 0.000000
|features 1.336725 -0.006089 |labels 0.000000
|features 5.291797 3.250537 |labels 1.000000
|features 4.286453 1.117735 |labels 1.000000
|features -0.928654 -0.925485 |labels 0.000000
|features 3.332391 2.603963 |labels 1.000000
|features 3.215562 4.756808 |labels 1.000000
|features 1.610967 0.830856 |labels 0.000000
|features 2.174433 3.501271 |labels 1.000000
|features 4.848584 4.251824 |labels 1.000000
|features 0.810184 1.152021 |labels 0.000000
|features 4.873924 4.517936 |labels 1.000000
|features 1.915303 1.649095 |labels 0.000000
|features 1.623343 -0.081105 |labels 0.000000
|features 1.944076 0.482732 |labels 0.000000
|features 2.442956 1.254540 |labels 0.000000
|features -1.002581 1.265333 |labels 0.000000
|features 0.959354 0.678516 |labels 0.000000
|features -0.478621 2.502554 |labels 0.000000
|features 3.357642 2.993470 |labels 1.000000
|features 5.741979 2.958477 |labels 1.000000
|features 4.474261 3.260622 |labels 1.000000
|features 3.587932 4.572091 |labels 1.000000
|features 1.274866 0.695311 |labels 0.000000
|features 4.557162 4.754880 |labels 1.000000
|features 0.557867 0.280893 |labels 0.000000
|features 1.832047 -2.162059 |labels 0.000000
|features 3.904049 5.257427 |labels 1.000000
|features 3.225019 3.845294 |labels 1.000000
|features 4.451218 4.125344 |labels 1.000000
|features 3.138143 2.869685 |labels 1.000000
|features 4.451703 3.430654 |labels 1.000000
|features 0.124060 1.422203 |labels 0.000000
|features 4.692774 5.156611 |labels 1.000000
|features 0.735314 0.375099 |labels 0.000000
|features 0.727577 1.158726 |labels 0.000000
|features 0.643469 0.283426 |labels 0.000000
|features 5.126834 1.929468 |labels 1.000000
|features -0.172361 2.982370 |labels 0.000000
|features 3.957745 1.561874 |labels 1.000000
|features 5.563733 3.417080 |labels 1.000000
|features 5.181533 1.465063 |labels 1.000000
|features 5.843654 5.040710 |labels 1.000000
|features 0.761570 0.171094 |labels 0.000000
|features 3.163795 3.940869 |labels 1.000000
|features 2.435362 1.047614 |labels 0.000000
|features 2.524330 3.602348 |labels 1.000000
|features 4.200838 3.267377 |labels 1.000000
|features 4.249560 2.926280 |labels 1.000000
|features 0.060257 0.295729 |labels 0.000000
|features 1.528257 1.651867 |labels 0.000000
|features 2.030978 1.566011 |labels 0.000000
|features 4.065243 4.375190 |labels 1.000000
|features 1.406204 0.238570 |labels 0.000000
|features 1.229776 1.186559 |labels 0.000000
|features 2.295681 1.883864 |labels 0.000000
|features 3.966570 4.293142 |labels 1.000000
|features 1.713323 0.534886 |labels 0.000000
|features 0.772032 -0.096214 |labels 0.000000
|features 3.392854 5.195064 |labels 1.000000
|features 5.063653 2.749764 |labels 1.000000
|features 1.410392 1.694554 |labels 0.000000
|features 0.540269 0.376759 |labels 0.000000
|features 4.103946 3.870140 |labels 1.000000
|features 5.132739 3.079176 |labels 1.000000
|features 2.524063 0.486934 |labels 0.000000
|features 0.046403 1.452778 |labels 0.000000
|features 1.705593 0.243750 |labels 0.000000
|features 1.621902 0.203138 |labels 0.000000
|features -0.420733 0.589060 |labels 0.000000
|features 2.887145 2.621849 |labels 1.000000
|features 5.545509 4.473069 |labels 1.000000
|features 0.326439 -0.162102 |labels 0.000000
|features 0.906097 -0.018566 |labels 0.000000
|features 3.398280 5.125843 |labels 1.000000
|features 0.833088 -0.808535 |labels 0.000000
|features 4.535285 4.133511 |labels 1.000000
|features 1.781705 4.123651 |labels 1.000000
|features 4.345894 3.355084 |labels 1.000000
|features 4.770073 3.007432 |labels 1.000000
|features 2.537267 3.813503 |labels 1.000000
|features 0.994347 2.567949 |labels 0.000000
|features 0.337262 -0.224479 |labels 0.000000
|features 4.936596 3.107819 |labels 1.000000
|features 2.177957 -0.544641 |labels 0.000000
|features 3.434811 2.806362 |labels 1.000000
|features 3.172973 4.378089 |labels 1.000000
|features 4.015349 3.000845 |labels 1.000000
|features 3.640748 3.917499 |labels 1.000000
|features 5.432434 4.092587 |labels 1.000000
|features 4.701984 4.063092 |labels 1.000000
|features 3.978015 3.584431 |labels 1.000000
|features 5.029923 2.346036 |labels 1.000000
|features 4.939017 3.209084 |labels 1.000000
|features 3.999592 2.747525 |labels 1.000000
|features 5.233483 4.877698 |labels 1.000000
|features 2.260049 1.023384 |labels 0.000000
|features -1.149943 1.257165 |labels 0.000000
|features -0.026270 0.468090 |labels 0.000000
|features 5.155107 4.620842 |labels 1.000000
|features 4.179414 4.807546 |labels 1.000000
|features 2.560286 0.526253 |labels 0.000000
|features 5.843334 1.439470 |labels 1.000000
|features 4.417442 4.483117 |labels 1.000000
|features 4.354138 4.496168 |labels 1.000000
|features 0.873730 2.230023 |labels 0.000000
|features 4.531298 4.944164 |labels 1.000000
|features 2.010164 -0.358403 |labels 0.000000
|features 1.165044 1.376602 |labels 0.000000
|features 1.451538 -0.197779 |labels 0.000000
|features -1.751961 0.210820 |labels 0.000000
|features 2.431281 3.878465 |labels 1.000000
|features 3.311168 3.697618 |labels 1.000000
|features 2.324742 -0.330745 |labels 0.000000
|features 1.447031 1.028776 |labels 0.000000
|features 0.711003 2.631227 |labels 0.000000
|features 4.872934 3.406132 |labels 1.000000
|features 2.419345 0.297983 |labels 0.000000
|features 0.437814 2.851194 |labels 0.000000
|features 3.105758 4.098041 |labels 1.000000
|features 5.310168 3.519401 |labels 1.000000
|features 1.218607 -1.505891 |labels 0.000000
|features 6.053827 2.848790 |labels 1.000000
|features 3.475758 3.352349 |labels 1.000000
|features 0.911730 -0.213069 |labels 0.000000
|features 1.255973 0.089677 |labels 0.000000
|features 4.152711 3.871858 |labels 1.000000
|features 3.003909 3.288998 |labels 1.000000
|features 0.291281 1.124965 |labels 0.000000
|features 2.155017 0.550642 |labels 0.000000
|features 3.494102 0.710991 |labels 0.000000
|features 4.376613 2.330150 |labels 1.000000
|features 4.707851 6.179972 |labels 1.000000
|features 0.614240 -0.243535 |labels 0.000000
|features 1.130049 0.870765 |labels 0.000000
|features 3.994615 2.855247 |labels 1.000000
|features 1.556420 0.106179 |labels 0.000000
|features 3.182309 5.121422 |labels 1.000000
|features 2.315933 0.418897 |labels 0.000000
|features 1.797904 0.633645 |labels 0.000000
|features 4.012446 3.887718 |labels 1.000000
|features 2.106849 3.776831 |labels 1.000000
|features 4.477828 3.989422 |labels 1.000000
|features 2.871290 4.610706 |labels 1.000000
|features 5.317459 5.621137 |labels 1.000000
|features 2.265963 -0.095395 |labels 0.000000
|features 2.963642 2.804267 |labels 1.000000
|features 5.859384 3.673343 |labels 1.000000
|features 6.365340 3.541960 |labels 1.000000
|features 1.450987 0.721751 |labels 0.000000
|features 4.641593 2.436289 |labels 1.000000
|features -0.126649 0.101750 |labels 0.000000
|features 1.835293 1.594895 |labels 0.000000
|features 2.121195 0.152643 |labels 0.000000
|features 1.881799 1.169974 |labels 0.000000
|features 2.421852 -0.089441 |labels 0.000000
|features 0.110206 -1.491046 |labels 0.000000
|features 6.200556 4.284843 |labels 1.000000
|features 3.545593 5.217408 |labels 1.000000
|features 3.365187 2.790974 |labels 1.000000
|features 6.493131 5.311132 |labels 1.000000
|features 0.800791 0.229630 |labels 0.000000
|features 4.975666 4.214251 |labels 1.000000
|features 1.562586 0.181976 |labels 0.000000
|features 0.899273 0.003180 |labels 0.000000
|features 6.064242 3.482802 |labels 1.000000
|features 1.777259 2.498596 |labels 0.000000
|features 5.479965 5.168898 |labels 1.000000
|features 4.671380 3.356556 |labels 1.000000
|features 1.730588 0.417775 |labels 0.000000
|features 2.463118 -0.305587 |labels 0.000000
|features 3.967679 0.361350 |labels 0.000000
|features 0.164925 -0.167591 |labels 0.000000
|features 4.777002 3.088492 |labels 1.000000
|features 2.049808 3.096552 |labels 0.000000
|features 1.416130 -1.043606 |labels 0.000000
|features 0.318913 -1.539956 |labels 0.000000
|features 6.004351 2.521442 |labels 1.000000
|features 2.969229 3.311301 |labels 1.000000
|features 0.879291 0.094171 |labels 0.000000
|features 5.290177 5.198102 |labels 1.000000
|features -0.305314 0.826116 |labels 0.000000
|features 2.091880 -1.176581 |labels 0.000000
|features 2.816867 2.875016 |labels 1.000000
|features 0.486424 -1.055319 |labels 0.000000
|features 3.012812 4.530291 |labels 1.000000
|features 1.137009 1.323397 |labels 0.000000
|features 0.088114 -0.353501 |labels 0.000000
|features 1.174005 0.188025 |labels 0.000000
|features 1.928114 1.398347 |labels 0.000000
|features 0.128505 1.430034 |labels 0.000000
|features 2.021187 0.577234 |labels 0.000000
|features 1.361335 0.394605 |labels 0.000000
|features 5.125811 4.221355 |labels 1.000000
|features 0.260733 1.758422 |labels 0.000000
|features 2.106970 0.305971 |labels 0.000000
|features 3.675850 5.051226 |labels 1.000000
|features 2.105405 0.240527 |labels 0.000000
|features 3.072167 3.130910 |labels 1.000000
|features 0.987479 0.036861 |labels 0.000000
|features -0.271382 0.094250 |labels 0.000000
|features 4.703495 2.620398 |labels 1.000000
|features 3.005831 2.220124 |labels 1.000000
|features 5.072896 1.477152 |labels 1.000000
|features 4.443991 3.679157 |labels 1.000000
|features 0.845034 0.419956 |labels 0.000000
|features 4.698964 3.109439 |labels 1.000000
|features 1.766144 0.595496 |labels 0.000000
|features 2.046076 0.433007 |labels 0.000000
|features 0.874663 1.010155 |labels 0.000000
|features 4.939031 5.340021 |labels 1.000000
|features 3.881158 3.072467 |labels 1.000000
|features 2.928763 4.160337 |labels 1.000000
|features 5.582289 4.805588 |labels 1.000000
|features 3.180992 3.459563 |labels 1.000000
|features -0.486820 -0.074926 |labels 0.000000
|features 4.091057 2.402846 |labels 1.000000
|features 4.915464 4.543850 |labels 1.000000
|features 1.492434 0.588755 |labels 0.000000
|features 2.594011 0.332043 |labels 0.000000
|features 0.317571 -0.525159 |labels 0.000000
|features 3.936029 4.312181 |labels 1.000000
|features 1.918811 -0.659594 |labels 0.000000
|features 2.657582 0.028525 |labels 0.000000
|features 4.637282 3.562483 |labels 1.000000
|features -0.097472 1.250080 |labels 0.000000
|features 1.340281 -1.399129 |labels 0.000000
|features 4.330372 3.140502 |labels 1.000000
|features 4.358103 3.760854 |labels 1.000000
|features 3.897352 4.806873 |labels 1.000000
|features 4.962704 4.692459 |labels 1.000000
|features 1.667918 -0.134096 |labels 0.000000
|features 4.929650 1.727842 |labels 1.000000
|features 2.434315 3.000448 |labels 1.000000
|features 1.179167 1.894836 |labels 0.000000
|features 0.190498 0.655592 |labels 0.000000
|features 3.408802 4.843020 |labels 1.000000
|features 4.497565 3.844998 |labels 1.000000
|features -0.501596 1.561013 |labels 0.000000
|features 4.158981 4.875362 |labels 1.000000
|features 4.017462 4.655003 |labels 1.000000
|features 3.319263 3.462037 |labels 1.000000
|features 2.635572 1.022114 |labels 0.000000
|features 2.638164 5.051437 |labels 1.000000
|features 4.875001 3.592322 |labels 1.000000
|features -0.276607 0.800369 |labels 0.000000
|features 4.351591 3.321136 |labels 1.000000
|features 3.699848 3.317014 |labels 1.000000
|features 4.947319 4.252134 |labels 1.000000
|features 4.146336 2.162761 |labels 1.000000
|features 5.231704 5.477804 |labels 1.000000
|features 3.302101 3.994218 |labels 1.000000
|features -0.249349 2.069960 |labels 0.000000
|features 4.705134 3.921461 |labels 1.000000
|features 4.652980 4.287917 |labels 1.000000
|features 3.937259 -0.334385 |labels 0.000000
|features 3.257619 2.758094 |labels 1.000000
|features 0.994191 3.135344 |labels 0.000000
|features 4.649768 2.123305 |labels 1.000000
|features 1.634135 0.241517 |labels 0.000000
|features 1.682542 2.057739 |labels 1.000000
|features 5.163117 4.467304 |labels 1.000000
|features 4.638594 4.141250 |labels 1.000000
|features 1.392605 0.635603 |labels 0.000000
|features 4.319784 2.965064 |labels 1.000000
|features 1.872466 1.566002 |labels 0.000000
|features 4.230714 5.179026 |labels 1.000000
|features 2.635294 3.470599 |labels 1.000000
|features 0.988464 0.943613 |labels 0.000000
|features 0.897546 0.129141 |labels 0.000000
|features 3.370731 2.019838 |labels 0.000000
|features 1.424812 0.081647 |labels 0.000000
|features 5.961444 3.372419 |labels 1.000000
|features 2.839070 0.926229 |labels 0.000000
|features 0.279132 1.607793 |labels 0.000000
|features 5.351031 3.693640 |labels 1.000000
|features 2.637437 1.951445 |labels 0.000000
|features -0.179258 0.349339 |labels 0.000000
|features 3.246295 1.013459 |labels 0.000000
|features 5.839643 4.556761 |labels 1.000000
|features 1.435225 0.937185 |labels 0.000000
|features 0.500440 0.348246 |labels 0.000000
|features 4.948782 4.994416 |labels 1.000000
|features 0.810541 0.456830 |labels 0.000000
|features 5.098827 4.142789 |labels 1.000000
|features 2.365307 0.729496 |labels 0.000000
|features -0.117730 0.891913 |labels 0.000000
|features 0.485735 0.513485 |labels 0.000000
|features 0.680270 1.486851 |labels 0.000000
|features 1.143053 0.227480 |labels 0.000000
|features 6.615446 4.561501 |labels 1.000000
|features 1.016051 1.862106 |labels 0.000000
|features 0.668177 -0.212610 |labels 0.000000
|features 2.906047 2.415627 |labels 1.000000
|features 5.576097 5.068683 |labels 1.000000
|features 1.315063 -0.040980 |labels 0.000000
|features 5.375285 3.306877 |labels 1.000000
|features 4.549934 3.805014 |labels 1.000000
|features 1.189238 0.661279 |labels 0.000000
|features 4.156567 3.280736 |labels 1.000000
|features 2.061355 1.090958 |labels 0.000000
|features 4.499387 3.640263 |labels 1.000000
|features 3.503883 1.015591 |labels 0.000000
|features 0.390200 -1.037188 |labels 0.000000
|features 2.922873 4.696711 |labels 1.000000
|features 1.803928 3.846808 |labels 1.000000
|features 0.907921 -2.139287 |labels 0.000000
|features 1.640739 0.592793 |labels 0.000000
|features 5.108193 3.194757 |labels 1.000000
|features 4.297873 4.034234 |labels 1.000000
|features 4.832678 4.073469 |labels 1.000000
|features 4.391764 3.557895 |labels 1.000000
|features 2.006343 0.836557 |labels 0.000000
|features 0.351400 1.534742 |labels 0.000000
|features 4.933823 2.937944 |labels 1.000000
|features 3.926482 2.073712 |labels 1.000000
|features 5.382385 4.818642 |labels 1.000000
|features 4.739010 3.213326 |labels 1.000000
|features 0.026227 0.177150 |labels 0.000000
|features 5.001353 3.300961 |labels 1.000000
|features 5.022782 2.921902 |labels 1.000000
|features 4.225051 4.534986 |labels 1.000000
|features 3.745148 -0.169000 |labels 0.000000
|features 5.891838 2.817417 |labels 1.000000

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft. All rights reserved.
Licensed under the MIT license. See LICENSE file in the project root for full license information.
"""
import numpy as np
from sklearn.utils import shuffle
# number of dimensions
Dim = 2
# number of samples
N_train = 1000
N_test = 500
def generate(N, mean, cov, diff):
#import ipdb;ipdb.set_trace()
num_classes = len(diff)
samples_per_class = int(N/2)
X0 = np.random.multivariate_normal(mean, cov, samples_per_class)
Y0 = np.zeros(samples_per_class)
for ci, d in enumerate(diff):
X1 = np.random.multivariate_normal(mean+d, cov, samples_per_class)
Y1 = (ci+1)*np.ones(samples_per_class)
X0 = np.concatenate((X0,X1))
Y0 = np.concatenate((Y0,Y1))
X, Y = shuffle(X0, Y0)
return X,Y
def create_data_files(num_classes, diff, train_filename, test_filename, regression):
print("Outputting %s and %s"%(train_filename, test_filename))
mean = np.random.randn(num_classes)
cov = np.eye(num_classes)
for filename, N in [(train_filename, N_train), (test_filename, N_test)]:
X, Y = generate(N, mean, cov, diff)
# output in CNTK Text format
with open(filename, "w") as dataset:
num_labels = int((1 + np.amax(Y)))
for i in range(N):
dataset.write("|features ")
for d in range(Dim):
dataset.write("%f " % X[i,d])
if (regression):
dataset.write("|labels %f\n" % Y[i])
else:
labels = ['0'] * num_labels;
labels[int(Y[i])] = '1'
dataset.write("|labels %s\n" % " ".join(labels))
def main():
# random seed (create the same data)
np.random.seed(10)
create_data_files(Dim, [3.0], "Train_cntk_text.txt", "Test_cntk_text.txt", True)
create_data_files(Dim, [[3.0], [3.0, 0.0]], "Train-3Classes_cntk_text.txt", "Test-3Classes_cntk_text.txt", False)
if __name__ == '__main__':
main()

Просмотреть файл

@ -0,0 +1,116 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# logistic regression cntk script -- using network description language BrainScript
# which commands to run
command=Train:Output:DumpNodeInfo:Test
# required...
modelPath = "Models/LR_reg.dnn" # where to write the model to
deviceId = -1 # -1 means CPU; use 0 for your first GPU, 1 for the second etc.
dimension = 2 # input data dimensions
# training config
Train = [ # command=Train --> CNTK will look for a parameter named Train
action = "train" # execute CNTK's 'train' routine
# network description
BrainScriptNetworkBuilder = [
# sample and label dimensions
SDim = $dimension$
LDim = 1
features = Input (SDim)
labels = Input (LDim)
# parameters to learn
b = Parameter (LDim, 1) # bias
w = Parameter (LDim, SDim) # weights
# operations
p = Sigmoid (w * features + b)
lr = Logistic (labels, p)
err = SquareError (labels, p)
# root nodes
featureNodes = (features)
labelNodes = (labels)
criterionNodes = (lr)
evaluationNodes = (err)
outputNodes = (p)
]
# configuration parameters of the SGD procedure
SGD = [
epochSize = 0 # =0 means size of the training set
minibatchSize = 25
learningRatesPerSample = 0.04 # gradient contribution from each sample
maxEpochs = 50
]
# configuration of data reading
reader = [
readerType = "CNTKTextFormatReader"
file = "Train_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = 1
format = "dense"
]
]
]
]
# test
Test = [
action = "test"
reader = [
readerType = "CNTKTextFormatReader"
file = "Test_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = 1
format = "dense"
]
]
]
]
# output the results
Output = [
action = "write"
reader = [
readerType = "CNTKTextFormatReader"
file = "Test_cntk_text.txt"
input = [
features = [
dim = $dimension$ # $$ means variable substitution
format = "dense"
]
labels = [
dim = 1 # label has 1 dimension
format = "dense"
]
]
]
outputPath = "LR.txt" # dump the output to this text file
]
# dump parameter values
DumpNodeInfo = [
action = "dumpNode"
printValues = true
]

Просмотреть файл

@ -457,25 +457,25 @@ $(LIBDIR)/CompositeDataReader.so: $(COMPOSITEDATAREADER_OBJ) | $(CNTKMATH_LIB)
$(CXX) $(LDFLAGS) -shared $(patsubst %,-L%, $(LIBDIR) $(LIBPATH)) $(patsubst %,$(RPATH)%, $(ORIGINDIR) $(LIBPATH)) -o $@ $^ -l$(CNTKMATH)
########################################
# ExperimentalHTKMLFReader plugin
# HTKDeserializers plugin
########################################
EXPERIMENTALHTKMLFREADER_SRC =\
HTKDESERIALIZERS_SRC =\
$(SOURCEDIR)/Readers/HTKMLFReader/DataWriterLocal.cpp \
$(SOURCEDIR)/Readers/HTKMLFReader/HTKMLFWriter.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/ConfigHelper.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/Exports.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/HTKDataDeserializer.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/HTKMLFReader.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/MLFDataDeserializer.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/ConfigHelper.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/Exports.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/HTKDataDeserializer.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/HTKMLFReader.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/MLFDataDeserializer.cpp \
EXPERIMENTALHTKMLFREADER_OBJ := $(patsubst %.cpp, $(OBJDIR)/%.o, $(EXPERIMENTALHTKMLFREADER_SRC))
HTKDESERIALIZERS_OBJ := $(patsubst %.cpp, $(OBJDIR)/%.o, $(HTKDESERIALIZERS_SRC))
EXPERIMENTALHTKMLFREADER:=$(LIBDIR)/ExperimentalHTKMLFReader.so
ALL+=$(EXPERIMENTALHTKMLFREADER)
SRC+=$(EXPERIMENTALHTKMLFREADER_SRC)
HTKDESERIALIZERS:=$(LIBDIR)/HTKDeserializers.so
ALL+=$(HTKDESERIALIZERS)
SRC+=$(HTKDESERIALIZERS_SRC)
$(LIBDIR)/ExperimentalHTKMLFReader.so: $(EXPERIMENTALHTKMLFREADER_OBJ) | $(CNTKMATH_LIB)
$(LIBDIR)/HTKDeserializers.so: $(HTKDESERIALIZERS_OBJ) | $(CNTKMATH_LIB)
@echo $(SEPARATOR)
$(CXX) $(LDFLAGS) -shared $(patsubst %,-L%, $(LIBDIR) $(LIBPATH)) $(patsubst %,$(RPATH)%, $(ORIGINDIR) $(LIBPATH)) -o $@ $^ -l$(CNTKMATH)

Просмотреть файл

@ -1,19 +1,17 @@
# CNTK
## Latest news
*2016-06-10.* See CNTK v.1.5 binary release announcement in the official [Microsoft Research Blog](https://blogs.msdn.microsoft.com/msr_er/2016/06/10/microsoft-improves-programming-flexibility-of-its-ai-toolkit/)
*2016-06-08.* V 1.5 Binary release
CNTK v.1.5 binaries are on the [CNTK Releases page](https://github.com/Microsoft/CNTK/releases)
*2016-06-01.* An updated version of the network-description language has been made available under the new [BrainScript Network Builder](https://github.com/Microsoft/CNTK/wiki/BrainScript-Network-Builder), which features full expression parsing, recursive functions, and more.
*2016-05-19.* A 1-hour talk describing CNTK, how to use it, and how it works, has been posted at [Presentations](https://github.com/Microsoft/CNTK/wiki/Presentations).
*2016-05-16.* An example illustrating [Using CNTK with ResNet](https://github.com/Microsoft/CNTK/tree/master/Examples/Image/Miscellaneous/ImageNet/ResNet) is added to the codebase. The example contains some pre-trained models that can be used in various applications.
*2016-05-16.* CNTK Wiki now has [FAQ Page](https://github.com/Microsoft/CNTK/wiki/CNTK-FAQ)
*2016-05-05.* CNTK now supports *BlockMomentum* Stochastic Gradient Descent (SGD) algorithm.
See the details in the [Multiple GPUs and machines Wiki section](https://github.com/Microsoft/CNTK/wiki/Multiple-GPUs-and-machines)
*2016-05-03.* New transformations are implemented for **Image Reader**.
See the description in the [Image Reader Wiki section](https://github.com/Microsoft/CNTK/wiki/Image-reader)
*2016-04-25.* V 1.1 Binary release
CNTK v.1.1 binaries are on the [CNTK Releases page](https://github.com/Microsoft/CNTK/releases/tag/v1.1)
See [all news](https://github.com/Microsoft/CNTK/wiki/News).
## What is CNTK

Просмотреть файл

@ -20,7 +20,6 @@
#define let const auto
#endif
using namespace std;
using namespace Microsoft::MSR;
using namespace Microsoft::MSR::CNTK; // TODO: we should not have this in a header
@ -32,7 +31,7 @@ template <class ConfigRecordType, typename ElemType>
function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory(const ConfigRecordType& config);
template <class ConfigRecordType, typename ElemType>
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, vector<wstring>& outputNodeNamesVector);
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, const std::wstring& outputNodeNameConfig, std::vector<std::wstring>& outputNodeNamesVector);
// training (TrainActions.cpp)
template <class ConfigRecordType, typename ElemType>

Просмотреть файл

@ -46,7 +46,7 @@ using namespace Microsoft::MSR::CNTK;
template <typename ElemType>
static void DoEvalBase(const ConfigParameters& config, IDataReader& reader)
{
DEVICEID_TYPE deviceId = DeviceFromConfig(config);
//DEVICEID_TYPE deviceId = DeviceFromConfig(config);
ConfigArray minibatchSize = config(L"minibatchSize", "40960");
size_t epochSize = config(L"epochSize", "0");
if (epochSize == 0)
@ -58,26 +58,23 @@ static void DoEvalBase(const ConfigParameters& config, IDataReader& reader)
int traceLevel = config(L"traceLevel", "0");
size_t numMBsToShowResult = config(L"numMBsToShowResult", "100");
size_t firstMBsToShowResult = config(L"firstMBsToShowResult", "0");
size_t maxSamplesInRAM = config(L"maxSamplesInRAM", (size_t)SIZE_MAX);
size_t numSubminiBatches = config(L"numSubminibatches", (size_t)1);
bool enableDistributedMBReading = config(L"distributedMBReading", false);
ConfigArray evalNodeNames = config(L"evalNodeNames", "");
vector<wstring> evalNodeNamesVector;
for (int i = 0; i < evalNodeNames.size(); ++i)
{
evalNodeNamesVector.push_back(evalNodeNames[i]);
}
auto net = ComputationNetwork::CreateFromFile<ElemType>(deviceId, modelPath);
let net = GetModelFromConfig<ConfigParameters, ElemType>(config, L"evalNodeNames", evalNodeNamesVector);
// set tracing flags
net->EnableNodeTracing(config(L"traceNodeNamesReal", ConfigParameters::Array(stringargvector())),
config(L"traceNodeNamesCategory", ConfigParameters::Array(stringargvector())),
config(L"traceNodeNamesSparse", ConfigParameters::Array(stringargvector())));
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult,
firstMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
eval.Evaluate(&reader, evalNodeNamesVector, mbSize[0], epochSize);
}
@ -124,6 +121,7 @@ void DoCrossValidate(const ConfigParameters& config)
int traceLevel = config(L"traceLevel", "0");
size_t numMBsToShowResult = config(L"numMBsToShowResult", "100");
size_t firstMBsToShowResult = config(L"firstMBsToShowResult", "0");
size_t maxSamplesInRAM = config(L"maxSamplesInRAM", (size_t)SIZE_MAX);
size_t numSubminiBatches = config(L"numSubminibatches", (size_t)1);
@ -160,8 +158,10 @@ void DoCrossValidate(const ConfigParameters& config)
cvModels.push_back(cvModelPath);
auto net = ComputationNetwork::CreateFromFile<ElemType>(deviceId, cvModelPath);
// BUGBUG: ^^ Should use GetModelFromConfig()
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult,
firstMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
fprintf(stderr, "Model %ls --> \n", cvModelPath.c_str());
auto evalErrors = eval.Evaluate(&cvDataReader, evalNodeNamesVector, mbSize[0], epochSize);
@ -213,8 +213,6 @@ template <typename ElemType>
void DoWriteOutput(const ConfigParameters& config)
{
ConfigParameters readerConfig(config(L"reader"));
// Why?
//readerConfig.Insert("traceLevel", config(L"traceLevel", "0"));
readerConfig.Insert("randomize", "None"); // we don't want randomization when output results
DataReader testDataReader(readerConfig);
@ -230,7 +228,7 @@ void DoWriteOutput(const ConfigParameters& config)
vector<wstring> outputNodeNamesVector;
let net = GetModelFromConfig<ConfigParameters, ElemType>(config, outputNodeNamesVector);
let net = GetModelFromConfig<ConfigParameters, ElemType>(config, L"outputNodeNames", outputNodeNamesVector);
// set tracing flags
net->EnableNodeTracing(config(L"traceNodeNamesReal", ConfigParameters::Array(stringargvector())),

Просмотреть файл

@ -158,12 +158,12 @@ bool CheckFunction(std::string& p_nodeType, bool* allowUndeterminedVariable)
else if (EqualInsensitive(nodeType, OperationNameOf(CRFNode), L"CRF")) ret = true;
#endif
else if (EqualInsensitive(nodeType, OperationNameOf(ClassBasedCrossEntropyWithSoftmaxNode), L"CBCEWithSM")) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ComparisonEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ComparisonGreaterEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ComparisonGreaterNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ComparisonLessEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ComparsionLessNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ComparisonNotEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(EqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(GreaterEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(GreaterNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(LessEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(LessNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(NotEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ClipNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ConvolutionNode), L"Convolve")) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(PoolingNode))) ret = true;

Просмотреть файл

@ -142,11 +142,11 @@ static void PatchOutputNodes(const ComputationNetworkPtr& net, const ConfigArray
}
template <class ConfigRecordType, typename ElemType>
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, vector<wstring>& outputNodeNamesVector)
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, const wstring& outputNodeNamesConfig, vector<wstring>& outputNodeNamesVector)
{
DEVICEID_TYPE deviceId = DeviceFromConfig(config);
ConfigArray outputNodeNames = config(L"outputNodeNames", ConfigArray(""));
ConfigArray outputNodeNames = config(outputNodeNamesConfig.c_str(), ConfigArray(""));
ComputationNetworkPtr net;
@ -185,5 +185,5 @@ template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<Script
template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<ScriptableObjects::IConfigRecord, double>(const ScriptableObjects::IConfigRecord& config);
template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<ConfigParameters, float>(const ConfigParameters& config);
template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<ConfigParameters, double>(const ConfigParameters& config);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, float>(const ConfigParameters& config, vector<wstring>& outputNodeNamesVector);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, double>(const ConfigParameters& config, vector<wstring>& outputNodeNamesVector);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, float> (const ConfigParameters& config, const wstring&, vector<wstring>& outputNodeNamesVector);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, double>(const ConfigParameters& config, const wstring&, vector<wstring>& outputNodeNamesVector);

Просмотреть файл

@ -59,7 +59,10 @@ template <class C>
shared_ptr<C> CreateObject(const ConfigParameters& config, const wchar_t* id)
{
ConfigParameters readerConfig(config(id));
readerConfig.Insert("traceLevel", config(L"traceLevel", "0")); // TODO: fix this by adding it to all config blocks. Easy to fix in BS as 'config with [ traceLevel = 0 ]'.
if (!readerConfig.ExistsCurrent("traceLevel")) // do not overwrite "traceLevel" if it's already present
{
readerConfig.Insert("traceLevel", config(L"traceLevel", "0")); // TODO: fix this by adding it to all config blocks. Easy to fix in BS as 'config with [ traceLevel = 0 ]'.
}
return make_shared<C>(readerConfig); // old CNTK config specifies a dictionary which then must be explicitly instantiated
}

Просмотреть файл

@ -381,6 +381,7 @@ private:
}
// find a file either at given location or traverse include paths
// TODO: also allow ... syntax, where ... refers to the directory of the enclosing file
static wstring FindSourceFile(const wstring& path, const vector<wstring>& includePaths)
{
if (File::Exists(path))

Просмотреть файл

@ -56,8 +56,9 @@ CNTK2 = [
// 1. Inputs
// Changes: dims -> shape
DynamicAxis(tag='') = new ComputationNode [ operation = 'DynamicAxis' ; /*plus the function args*/ ]
# TODO: Is it a good idea to default to "feature"?
Input(shape, dynamicAxis='', tag='feature') = new ComputationNode [ operation = 'InputValue' ; shape = new TensorShape [ /*shape*/ ] ; isImage = false /*plus the function args*/ ]
// 2. Variables and constants
// Changes: ParameterTensor -> _Parameter; "dims" -> "shape"
// Python API:
@ -67,7 +68,7 @@ CNTK2 = [
// TODO: The API for Parameter is different in current 2.0 design, getting a constant as input for the initial values.
// This needs to be fixed to follow the way the Constant() is exposed in Python
// Making this an internal node with "_" until we agree on the final interface:
_Parameter(shape, value = 0, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*shape */ ] /*plus the function args*/ ]
_Parameter(shape, value = 0, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile|fromLiteral*/, initValueScale = 1, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*shape */ ] /*plus the function args*/ ]
// 3. Shape operations
// Changes: NewReshape -> Reshape, input -> _, dims -> shape
@ -85,6 +86,16 @@ CNTK2 = [
].out
else new ComputationNode [ operation = 'Slice' ; inputs = _ /*plus the function args*/ ] # non-time axis
Splice (_, axis=1, tag='') = # TODO: This is a workaround. RowStack itself shall interpret 'axis' and be renamed to Splice().
if axis < 1 then Fail('Splice does not yet implement splicing the time axis.')
else if axis == 1 then [tag1=tag; out = RowStack (_, tag=tag1)].out
else [ # workaround: swap 'axis' to first position, RowStack, swap back
ArrayTransposeDimensions (_, axis1, axis2) = [ # transpose each element of a BS array
inputsT[i:0..Length(_)-1] = TransposeDimensions (_[i], axis1, axis2)
].inputsT
out = [tag1=tag; out=TransposeDimensions (RowStack (ArrayTransposeDimensions (_, 1, axis)), 1, axis, tag=tag)].out
].out
// Swap two axes of a tensor
TransposeDimensions(_, axis1, axis2, tag='') = new ComputationNode [ operation = 'TransposeDimensions' ; inputs = _ /*plus the function args*/ ]
@ -134,7 +145,7 @@ CNTK2 = [
// No changes here - we said the default input would be the label sequence here, against which the
// empirical sequence is compared to. Keeping this for now.
CrossEntropyWithSoftmax(_, outProbVectorSequence, tag='') = new ComputationNode [ operation = 'CrossEntropyWithSoftmax' ; inputs = (_ : outProbVectorSequence) /*plus the function args*/ ]
ErrorPrediction(_, outVectorSequence, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = (_ : outVectorSequence) /*plus the function args*/ ]
ErrorPrediction(_, outVectorSequence, topN=1, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = if topN == 1 then (_ : outVectorSequence) else (_ : outVectorSequence : Constant (topN)) /*plus the function args*/ ]
// 13. Comparison nodes
Less(_, y, tag='') = new ComputationNode [ operation = 'Less' ; inputs = (_ : y) /*plus the function args*/ ]
@ -149,10 +160,10 @@ CNTK2 = [
Identity(_, tag='') = new ComputationNode [ operation = 'Pass' ; inputs = _ /*plus the function args*/ ]
]
LearnableParameter (outputDim, inputDim, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ dims = (outputDim : inputDim) ] /*plus the function args*/ ]
LearnableParameter (outputDim, inputDim, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile|fromLiteral*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ dims = (outputDim : inputDim) ] /*plus the function args*/ ]
Parameter = LearnableParameter // deprecated
# TODO: make Parameter take tensor dims?
ParameterTensor(dims, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
ParameterTensor(dims, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile|fromLiteral*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
ConstantFromString(literal, tag='') = ParameterTensor((0)/*dim, will be inferred*/, init = 'fromLiteral', initFromLiteral = literal, learningRateMultiplier = 0.0)
DynamicAxis(tag='') = new ComputationNode [ operation = 'DynamicAxis' ; /*plus the function args*/ ]
Input(dims, dynamicAxis='', tag='feature') = new ComputationNode [ operation = 'InputValue' ; shape = new TensorShape [ /*dims*/ ] ; isImage = false /*plus the function args*/ ]
@ -170,14 +181,14 @@ Shift(input, fromOffset, boundaryValue, boundaryMode=-1/*context*/, dim=-1, tag=
RowSlice(beginIndex, numRows, input, tag='') = Slice(beginIndex, beginIndex + numRows, input, axis = 1)
RowRepeat(input, numRepeats, tag='') = new ComputationNode [ operation = 'RowRepeat' ; inputs = input /*plus the function args*/ ]
RowStack(inputs, tag='') = new ComputationNode [ operation = 'RowStack' /*plus the function args*/ ]
Splice (inputs, axis=1) = # TODO: This is a workaround. RowStack itself shall interpret 'axis' and be renamed to Splice().
Splice (inputs, axis=1, tag='') = # TODO: This is a workaround. RowStack itself shall interpret 'axis' and be renamed to Splice().
if axis < 1 then Fail('Splice does not yet implement splicing the time axis.')
else if axis == 1 then RowStack (inputs)
else if axis == 1 then [tag1=tag; out = RowStack (inputs, tag=tag1)].out
else [ # workaround: swap 'axis' to first position, RowStack, swap back
ArrayTransposeDimensions (inputs, axis1, axis2) = [ # transpose each element of a BS array
inputsT[i:0..Length(inputs)-1] = TransposeDimensions (inputs[i], axis1, axis2)
].inputsT
out = TransposeDimensions (RowStack (ArrayTransposeDimensions (inputs, 1, axis)), 1, axis)
out = [tag1=tag; out=TransposeDimensions (RowStack (ArrayTransposeDimensions (inputs, 1, axis)), 1, axis, tag=tag)].out
].out
Reshape(input, numRows, imageWidth = 0, imageHeight = 0, imageChannels = 0, tag='') = new ComputationNode [ operation = 'LegacyReshape' ; inputs = input /*plus the function args*/ ]
NewReshape(input, dims, beginAxis=0, endAxis=0, tag='') = new ComputationNode [ operation = 'Reshape' ; inputs = input ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
@ -232,7 +243,7 @@ DiagTimes(diagonalMatrixAsColumnVector, matrix, tag='') = new ComputationNode [
Dropout(activationVectorSequence, tag='') = new ComputationNode [ operation = 'Dropout' ; inputs = activationVectorSequence /*plus the function args*/ ]
ElementTimes(aMatrix, anotherMatrix, tag='') = new ComputationNode [ operation = 'ElementTimes' ; inputs = (aMatrix : anotherMatrix) /*plus the function args*/ ]
ElementDivide(aMatrix, anotherMatrix, tag='') = ElementTimes(aMatrix, Reciprocal(anotherMatrix), tag=tag)
ErrorPrediction(labelVectorSequence, outVectorSequence, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = (labelVectorSequence : outVectorSequence) /*plus the function args*/ ]
ErrorPrediction = CNTK2.ErrorPrediction
Exp(x, tag='') = new ComputationNode [ operation = 'Exp' ; inputs = x /*plus the function args*/ ]
Floor(x, tag='') = new ComputationNode [ operation = 'Floor' ; inputs = x /*plus the function args*/ ]
GatherPacked(indexSequence, sourceData, tag='') = new ComputationNode [ operation = 'GatherPacked' ; inputs = (indexSequence : sourceData) /*plus the function args*/ ]

Просмотреть файл

@ -265,7 +265,7 @@ void DoCommands(const ConfigParameters& config, const shared_ptr<MPIWrapper>& mp
{
TestCn<ElemType>(config); // for "devtest" action pass the root config instead
}
else if (thisAction == "dumpnode")
else if (thisAction == "dumpNode" /*deprecated:*/|| thisAction == "dumpnode")
{
DumpNodeInfo<ElemType>(commandParams);
}
@ -473,6 +473,7 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wrapp
// parallel training
shared_ptr<Microsoft::MSR::CNTK::MPIWrapper> mpi;
auto ensureMPIWrapperCleanup = MakeScopeExit(&MPIWrapper::DeleteInstance);
bool paralleltrain = config(L"parallelTrain", false);
if (paralleltrain)
mpi = MPIWrapper::GetInstance(true /*create*/);
@ -544,7 +545,6 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wrapp
LOGPRINTF(stderr, "__COMPLETED__\n");
fflush(stderr);
MPIWrapper::DeleteInstance();
return EXIT_SUCCESS;
}
@ -571,6 +571,7 @@ int wmainOldCNTKConfig(int argc, wchar_t* argv[])
// paralleltrain training
shared_ptr<Microsoft::MSR::CNTK::MPIWrapper> mpi;
auto ensureMPIWrapperCleanup = MakeScopeExit(&MPIWrapper::DeleteInstance);
bool paralleltrain = config(L"parallelTrain", "false");
if (paralleltrain)
mpi = MPIWrapper::GetInstance(true /*create*/);
@ -662,7 +663,6 @@ int wmainOldCNTKConfig(int argc, wchar_t* argv[])
LOGPRINTF(stderr, "__COMPLETED__\n");
fflush(stderr);
MPIWrapper::DeleteInstance();
return EXIT_SUCCESS;
}

Просмотреть файл

@ -146,7 +146,6 @@
<ClInclude Include="..\Common\Include\Basics.h" />
<ClInclude Include="..\Common\Include\BestGpu.h" />
<ClInclude Include="..\Common\Include\DataReader.h" />
<ClInclude Include="..\Common\Include\CompositeDataReader.h" />
<ClInclude Include="..\Common\Include\ExceptionWithCallStack.h" />
<ClInclude Include="..\Common\Include\StringUtil.h" />
<ClInclude Include="..\Common\Include\TensorShape.h" />
@ -165,6 +164,7 @@
<ClInclude Include="..\Math\Matrix.h" />
<ClInclude Include="..\ComputationNetworkLib\PreComputeNodes.h" />
<ClInclude Include="..\ComputationNetworkLib\MatrixPool.h" />
<ClInclude Include="..\Readers\CompositeDataReader\CompositeDataReader.h" />
<ClInclude Include="..\Readers\ReaderLib\BlockRandomizer.h" />
<ClInclude Include="..\Readers\ReaderLib\Bundler.h" />
<ClInclude Include="..\Readers\ReaderLib\ChunkRandomizer.h" />
@ -173,12 +173,10 @@
<ClInclude Include="..\Readers\ReaderLib\NoRandomizer.h" />
<ClInclude Include="..\Readers\ReaderLib\Packer.h" />
<ClInclude Include="..\Readers\ReaderLib\Reader.h" />
<ClInclude Include="..\Readers\ReaderLib\SampleModePacker.h" />
<ClInclude Include="..\Readers\ReaderLib\SequencePacker.h" />
<ClInclude Include="..\Readers\ReaderLib\SequenceRandomizer.h" />
<ClInclude Include="..\Readers\ReaderLib\StringToIdMap.h" />
<ClInclude Include="..\Readers\ReaderLib\Transformer.h" />
<ClInclude Include="..\Readers\ReaderLib\TransformerBase.h" />
<ClInclude Include="..\SGDLib\DataReaderHelpers.h" />
<ClInclude Include="..\SGDLib\SGD.h" />
<ClInclude Include="..\SGDLib\SimpleEvaluator.h" />

Просмотреть файл

@ -193,9 +193,6 @@
<ClInclude Include="..\Readers\ReaderLib\Reader.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Readers\ReaderLib\SampleModePacker.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Readers\ReaderLib\SequencePacker.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
@ -208,9 +205,6 @@
<ClInclude Include="..\Readers\ReaderLib\Transformer.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Readers\ReaderLib\TransformerBase.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Common\Include\CompositeDataReader.h">
<Filter>Common\Include</Filter>
</ClInclude>

Просмотреть файл

@ -653,9 +653,43 @@ public:
}
};
#endif
template <typename EF>
struct ScopeExit {
explicit ScopeExit(EF &&f) :
m_exitFunction(std::move(f)), m_exitOnDestruction(true)
{}
~ScopeExit()
{
if (m_exitOnDestruction)
m_exitFunction();
}
ScopeExit(ScopeExit&& other)
: m_exitFunction(std::move(other.m_exitFunction)), m_exitOnDestruction(other.m_exitOnDestruction)
{
other.m_exitOnDestruction = false;
}
private:
// Disallow copy construction, assignment
ScopeExit(const ScopeExit&) = delete;
ScopeExit& operator=(const ScopeExit&) = delete;
// Disallow move assignment
ScopeExit& operator=(ScopeExit&&) = delete;
EF m_exitFunction;
bool m_exitOnDestruction;
};
template <typename EF>
ScopeExit<typename std::remove_reference<EF>::type> MakeScopeExit(EF&& exitFunction)
{
return ScopeExit<typename std::remove_reference<EF>::type>(std::forward<EF>(exitFunction));
}
}
}
}}}
#ifdef _WIN32
// ----------------------------------------------------------------------------

Просмотреть файл

@ -109,9 +109,6 @@ extern "C" EVAL_API void GetEvalF(IEvaluateModel<float>** peval);
extern "C" EVAL_API void GetEvalD(IEvaluateModel<double>** peval);
// Data Reader class
// interface for clients of the Data Reader
// mirrors the IEvaluateModel interface, except the Init method is private (use the constructor)
template <typename ElemType>
class Eval : public IEvaluateModel<ElemType>, protected Plugin
{
@ -120,7 +117,8 @@ private:
void GetEvalClass(const std::string& config);
// Destroy - cleanup and remove this class
// Destroy - cleanup and remove this class. Workaround to ensure that memory allocation / deallocation
// occur within the DLL boundary.
// NOTE: this destroys the object, and it can't be used past this point
virtual void Destroy();
@ -168,13 +166,18 @@ public:
// Extended interface
// ------------------------------------------------------------------------
// Partial instantiation of vector to reduce to one argument.
template <typename ElemType>
using Vector = std::vector<ElemType, std::allocator<ElemType>>;
//
// A buffer to keep data for all samples in a (variable length) sequence
// from a single input or output.
// This is used for both dense and sparse data.
//
template<typename ElemType>
struct VariableBuffer
template<typename ElemType, template<typename> class Container = Vector>
struct ValueBuffer
{
//
// All elements of a sequence, concatenated.
@ -183,7 +186,7 @@ struct VariableBuffer
// [2,2] and 12 elements in the buffer, the number of samples is 3.
// For sparse inputs, the number of samples is indicated by the m_colIndices field.
//
std::vector<ElemType> m_buffer;
Container<ElemType> m_buffer;
// In case of sparse data, the following is also used. Otherwise, the
// contents are ignored.
@ -199,16 +202,45 @@ struct VariableBuffer
// For every element in buffer, an entry in this array gives its position.
// For every vector the entries must be ascending.
//
std::vector<int> m_indices;
Container<int> m_indices;
//
// Contains numberOfsamples + 1 indices into the buffer. The first entry
// is always 0. The last entry points after the last element.
// See http://docs.nvidia.com/cuda/cusparse/#compressed-sparse-column-format-csc
//
std::vector<int> m_colIndices;
Container<int> m_colIndices;
};
//
// Helper class that can be used in exchange of a std::vector if the memory is managed externally.
//
template <typename ElemType>
struct VectorRef
{
ElemType* m_vector;
size_t m_capacity; // ElemTypes allocated
size_t m_size; // ElemTypes used.
VectorRef() : m_vector(nullptr), m_capacity(0), m_size(0) {}
void InitFrom(std::vector<ElemType>& src) { m_vector = src.data(); m_capacity = src.capacity(); m_size = src.size(); }
size_t size() const { return m_size; }
size_t capacity() const { return m_capacity; }
ElemType* data() { return m_vector; }
const ElemType* data() const { return m_vector; }
ElemType* begin() { return m_vector; }
ElemType* end() { return m_vector + m_size; }
void resize(size_t size) { m_size = size; }
ElemType& operator[](size_t idx) { return m_vector[idx]; }
const ElemType& operator[](size_t idx) const { return m_vector[idx]; }
};
template <typename ElemType>
using Values = std::vector<ValueBuffer<ElemType, Vector>>;
template <typename ElemType>
using ValueRefs = std::vector<ValueBuffer<ElemType, VectorRef>>;
//
// Meta data
//
@ -237,19 +269,32 @@ struct VariableLayout
// Dimension of the tensor, flattened to 1 dimension, for one entry on the dynamic axis.
// E.g. for a tensor [2,3,*] this would be 6.
int m_numElements;
// Name of the axis, potentially shared between inputs. For any two inputs sharing the same
// dynamic axis, the sequence cardinality must be the same.
std::wstring m_dynamicAxisName;
};
template <typename ElemType>
using Variables = std::vector<VariableBuffer<ElemType>>;
class VariableSchema : public std::vector<VariableLayout>
{
public:
template<typename ElemType>
Values<ElemType> CreateBuffers(const std::vector<size_t>& maxLengths)
{
if (maxLengths.size() != size())
throw std::exception("Expected max lengths for all variables.");
using VariableSchema = std::vector<VariableLayout>;
Values<ElemType> buffers(size());
for (size_t i = 0; i < size(); ++i)
{
buffers[i].m_buffer.reserve(operator[](i).m_numElements * maxLengths[i]);
}
return buffers;
}
};
//
// Extended interface, allowing for sparse input.
// Implementation constraints:
// - Every output is a single tensor (not a batch),
// - Outputs must be dense.
// - Output buffer must be preallocated.
//
template <typename ElemType>
class IEvaluateModelExtended : public IEvaluateModelBase<ElemType>
@ -265,7 +310,7 @@ public:
// Allocate internal state for calling ForwardPass(). The call restricts the network (inputs and outputs)
// to the functions represented by the output name.
//
virtual void StartForwardEvaluation(std::vector<std::wstring> outputs) = 0;
virtual void StartForwardEvaluation(const std::vector<std::wstring>& outputs) = 0;
//
// GetVariableLayout - retrieve information about tensor shapes and memory layout of inputs necessary for a
@ -275,17 +320,22 @@ public:
virtual VariableSchema GetInputSchema() const = 0;
//
// Evaluate - Evaluate (perform a forward pass for) a single unit using the model with the given inputs and
// ForwardPass - Evaluate (perform a forward pass for) a single unit using the model with the given inputs and
// outputs.
// The layout and shape of the data in inputs vector must match the schema returned by GetInputLayouts.
// Output must be preallocated and sized to avoid memory allocation / deallocation across DLL
// boundaries.
// This method is not reentrant, as the forward pass keeps internal state.
// outputId - output to compute values for. See GetOutputLayouts()
// inputs - vector of input buffers, one for every input as given by GetInputLayouts()
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing
// will happen during evaluation.
// Called after StartForwardEvaluation()
// outputs - vector of output buffers. Must be sized to fit output schema.
//
virtual void ForwardPass(const Variables<ElemType>& inputs, Variables<ElemType>& output) = 0;
virtual void ForwardPass(const Values<ElemType>& inputs, Values<ElemType>& output) = 0;
//
// Same as above, but takes references to static arrays instead of std::vector
// (e.g. when vectors are manages by .net)
//
virtual void ForwardPass(const ValueRefs<ElemType>& inputs, ValueRefs<ElemType>& output) = 0;
};
template <typename ElemType>

Просмотреть файл

@ -157,7 +157,11 @@ public:
fprintf(stderr, "~MPIWrapper\n");
fflush(stderr);
// TODO: Check for error code and throw if !std::uncaught_exception()
MPI_Finalize();
// Do not finalize in event of an exception since calling MPI_Finalize without
// all pending communications being finished results in a hang
if (!std::uncaught_exception())
MPI_Finalize();
}
private:

Просмотреть файл

@ -694,13 +694,13 @@ class auto_file_ptr
FILE* f;
FILE* operator=(auto_file_ptr&); // can't ref-count: no assignment
auto_file_ptr(auto_file_ptr&);
void close() throw()
void close()
{
if (f && f != stdin && f != stdout && f != stderr)
{
int rc = ::fclose(f);
if ((rc != 0) && !std::uncaught_exception())
RuntimeError("auto_file_ptr: failed to close file");
RuntimeError("auto_file_ptr: failed to close file: %s", strerror(errno));
f = NULL;
}

Просмотреть файл

@ -925,24 +925,27 @@ void ComputationNodeBase::EnumerateArcs(std::unordered_set<ComputationNodeBasePt
// ========================================
// BUGBUG: this only currently works for one ElemType, not both
template <class ElemType>
void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDConfig, size_t AlignedSize)
void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDConfig, size_t alignedSize)
{
vector<pair<vector<wstring>, float>> nodeGroups;
wregex NameFilter;
wregex nameFilter;
for (const auto& e : SVDConfig)
{
wstring regexStr = e.first;
float keepRatio = e.second;
vector<wstring> NamesInGroup;
if (regexStr.empty())
continue;
NameFilter.assign(regexStr);
float keepRatio = e.second;
vector<wstring> namesInGroup;
nameFilter.assign(regexStr);
for (auto n = m_nameToNodeMap.begin(); n != m_nameToNodeMap.end(); n++)
{
if (!regexStr.empty() && !regex_match(n->first, NameFilter))
if (!regex_match(n->first, nameFilter))
{
// if regexStr is not empty and the the node node does not match with the regexStr
// if regexStr is not empty and the the node does not match with the regexStr
continue;
}
@ -954,20 +957,20 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
continue;
// still here ?
NamesInGroup.push_back(n->first);
namesInGroup.push_back(n->first);
}
nodeGroups.push_back(make_pair(NamesInGroup, keepRatio));
nodeGroups.push_back(make_pair(namesInGroup, keepRatio));
}
size_t groupID = 0;
for (auto& group : nodeGroups)
{
float keepratio = group.second;
float keepRatio = group.second;
fprintf(stderr,
"--------------------------------------------------------------------------------------------\n");
fprintf(stderr,
"ParameterSVD: start to process group %d with KeepRatio=%.2f\n",
(int) groupID++, keepratio);
(int) groupID++, keepRatio);
fprintf(stderr,
"--------------------------------------------------------------------------------------------\n");
@ -1002,17 +1005,17 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
// S \in R^{min(m,n),1}
// S is in descending order
ElemType totalenergy = 0.0f;
ElemType totalEnergy = 0.0f;
for (size_t i = 0; i < S.GetNumRows(); i++)
totalenergy += S(i, 0);
ElemType keepenergy = totalenergy * keepratio;
ElemType runenergy = 0.0f;
totalEnergy += S(i, 0);
ElemType keepEnergy = totalEnergy * keepRatio;
ElemType runEnergy = 0.0f;
size_t r = 0;
for (size_t indx = 0; indx < S.GetNumRows(); indx++)
{
runenergy += S(indx, 0);
if (runenergy > keepenergy)
runEnergy += S(indx, 0);
if (runEnergy > keepEnergy)
{
r = indx + 1;
break;
@ -1021,10 +1024,10 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
r = r > S.GetNumRows() ? S.GetNumRows() : r;
if (r % AlignedSize != 0)
if (r % alignedSize != 0)
{
r -= r % AlignedSize;
r = r + AlignedSize > S.GetNumRows() ? S.GetNumRows() : r + AlignedSize;
r -= r % alignedSize;
r = r + alignedSize > S.GetNumRows() ? S.GetNumRows() : r + alignedSize;
}
// r = (r + 7) & (~7); // to keep the number of rows/cols of resultant matrix a multipier of 8
// which can be helpful at runtime
@ -1033,7 +1036,7 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
fprintf(stderr,
"Performing SVD for a %5d-by-%-5d matrix (node name: %-20ls) --- computation time %5.2f secs ; keep %4.1f%% energy ===> keep %5d svd values (reduce to %4.1f%% parameters) \n",
(int) m, (int) n, name.c_str(), elapsedtime.count(),
keepratio * 100, (int) r,
keepRatio * 100, (int) r,
((m + n) * r + 0.0f) / m / n * 100);
// redU in R^ {mXr}
@ -1047,28 +1050,49 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
Matrix<ElemType> redS(r, (size_t)1, A.GetDeviceId());
for (size_t i = 0; i < r; i++)
{
ElemType sqrtsigma = (ElemType) sqrt((double) S(i, 0));
redS(i, 0) = sqrtsigma;
ElemType sqrtSigma = (ElemType) sqrt((double) S(i, 0));
redS(i, 0) = sqrtSigma;
}
redU.RowElementMultiplyWith(redS.Transpose());
redVT.ColumnElementMultiplyWith(redS);
// Step 2. create two new Parameter nodes and one Times node
wstring leftChildName = name + L"-U"; // BUGBUG: With BrainScript, node names must be proper identifiers/variable expressions. We can't have '-' in node names.
wstring rightChildName = name + L"-V";
wstring leftChildName = name + L"_U";
wstring rightChildName = name + L"_V";
shared_ptr<ComputationNode<ElemType>> pLeft = AddNodeToNetWithElemType(New<LearnableParameter<ElemType>>(m_deviceId, leftChildName, m, r));
shared_ptr<ComputationNode<ElemType>> pRight = AddNodeToNetWithElemType(New<LearnableParameter<ElemType>>(m_deviceId, rightChildName, r, n));
// TODO: We should be able to move instead of copy but it currently isn't strightforward
// TODO: We should be able to move instead of copy but it currently isn't straightforward
// due to redU and redVT being slices
pLeft->ValueAsMatrix() = redU.DeepClone();
pRight->ValueAsMatrix() = redVT.DeepClone();
shared_ptr<ComputationNode<ElemType>> pTimes = AddNodeToNetAndAttachInputs(New<TimesNode<ElemType>>(m_deviceId, name + L"-SVD"), { pLeft, pRight });
// Step 3. Change the network hierachy to include the SVD nodes
auto parentNodes = GetParentNodes(name);
// Step 3. remove old node
ReplaceLeafNode(name, pTimes);
for (auto& pParentNode : parentNodes)
{
// Change the hierarchy of the network if the node is immediately used in a product
auto pParentTimesNode = dynamic_pointer_cast<TimesNode<ElemType>>(pParentNode);
if (pParentTimesNode)
{
// Change the hierarchy to ensure multiplication order
// U*(V*X)
shared_ptr<ComputationNode<ElemType>> pTimes = New<TimesNode<ElemType>>(m_deviceId, name + L"_SVD");
pTimes->AttachInputs({ pLeft, pParentNode });
InsertNode(pParentNode->GetName(), pTimes, pParentNode->GetTags());
ReplaceLeafNode(name, pRight);
}
else
{
// Default multiplication order
shared_ptr<ComputationNode<ElemType>> pTimes = AddNodeToNetAndAttachInputs(New<TimesNode<ElemType>>(m_deviceId, name + L"_SVD"), { pLeft, pRight });
ReplaceLeafNode(name, pTimes);
}
}
}
}

Просмотреть файл

@ -50,16 +50,16 @@ public:
ComputationNetwork() :
m_randomSeedOffset(0),
m_isCompiled(false),
m_areMatricesAllocated(false),
m_isCompiled(false),
m_areMatricesAllocated(false),
m_pMBLayoutOfNetwork(make_shared<MBLayout>(1, 0, L"*")),
m_environment(make_shared<ComputationEnvironment>())
{
//m_pMBLayoutOfNetwork->SetAxisName(L"T");
}
ComputationNetwork(DEVICEID_TYPE deviceId)
: ComputationNetwork()
ComputationNetwork(DEVICEID_TYPE deviceId) :
ComputationNetwork()
{
SetDeviceId(deviceId);
}
@ -82,6 +82,7 @@ public:
protected:
void ConstructFromRoots(DEVICEID_TYPE deviceId, std::deque<ComputationNodeBasePtr>&& roots, const map<ComputationNodeBasePtr, ComputationNodeBasePtr>& replacements);
void ProcessSpecialNodes(const ScriptableObjects::IConfigRecord& config, std::deque<ComputationNodeBasePtr>& roots);
public:
// -----------------------------------------------------------------------
@ -175,6 +176,7 @@ private:
size_t ValidateNodes(list<ComputationNodeBasePtr> nodes, bool isFirstPass, bool isFinalValidationPass);
bool ValidateNode(ComputationNodeBasePtr node, bool isFinalValidationPass) const;
void MarkValueNonSharableNodes();
void ChangeNodeInputs(ComputationNodeBasePtr fromNode, ComputationNodeBasePtr toNode);
private:
void DetermineSetOfAllRoots();
@ -360,7 +362,8 @@ public:
void RenameNode(const std::wstring& nodeNameOrig, const std::wstring& nodeNameNew);
void RenameNode(ComputationNodeBasePtr node, const std::wstring& newNodeName);
void DeleteNode(const std::wstring& nodeName);
void ChangeNode(wstring nodeName, ComputationNodeBasePtr newNode);
void ReplaceNode(wstring nodeName, ComputationNodeBasePtr newNode);
void InsertNode(wstring nodeName, ComputationNodeBasePtr newNode, const std::set<std::wstring>& newNodeTags);
void ReplaceLeafNode(wstring oldNodeName, ComputationNodeBasePtr newNode);
void ReplaceFinalCriterionNode(wstring oldNodeName, ComputationNodeBasePtr newNode);
void AddFeatureNode(ComputationNodeBasePtr featureNode);
@ -501,7 +504,7 @@ public:
// Collect all input nodes that outputNodes depend on.
std::vector<ComputationNodeBasePtr> InputNodesForOutputs(const std::vector<std::wstring>& outputNodeNames)
{
// use map to remove duplicated items
// use set to remove duplicated items
auto outputNodes = OutputNodesByName(outputNodeNames);
std::set<ComputationNodeBasePtr> inputNodesMap;
@ -609,6 +612,28 @@ public:
return parents;
}
// Return set of immediate output (parent) nodes for given input (child) node
// TODO: there should be a map from output nodes to inputs, so that this operation doesn't take square time
std::vector<ComputationNodeBasePtr> GetParentNodes(const std::wstring& inputNodeName)
{
std::set<ComputationNodeBasePtr> outputNodes;
for (const auto& iter : m_nameToNodeMap)
{
const auto& node = iter.second;
//Iterate over inputs of this node
for (const auto& inputNode : node->GetInputs())
{
if (inputNode->GetName() == inputNodeName)
{
outputNodes.insert(node);
}
}
}
return std::vector<ComputationNodeBasePtr>(outputNodes.begin(), outputNodes.end());
}
std::list<ComputationNodeBasePtr> GetNodesWithType(const wstring typeName, const ComputationNodeBasePtr& rootNode = nullptr)
{
std::list<ComputationNodeBasePtr> nodesWithType;

Просмотреть файл

@ -41,13 +41,13 @@ static shared_ptr<ComputationNode<ElemType>> CreateStandardNode(const std::wstri
if (nodeType == OperationNameOf(AbsNode)) return New<AbsNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ClassBasedCrossEntropyWithSoftmaxNode))return New<ClassBasedCrossEntropyWithSoftmaxNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ClipNode)) return New<ClipNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ComparisonEqualNode)) return New<ComparisonEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ComparisonGreaterEqualNode)) return New<ComparisonGreaterEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ComparisonGreaterNode)) return New<ComparisonGreaterNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ComparisonLessEqualNode)) return New<ComparisonLessEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ComparsionLessNode)) return New<ComparsionLessNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ComparisonNotEqualNode)) return New<ComparisonNotEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CosDistanceNode)) return New<CosDistanceNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(EqualNode)) return New<EqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(GreaterEqualNode)) return New<GreaterEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(GreaterNode)) return New<GreaterNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(LessEqualNode)) return New<LessEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(LessNode)) return New<LessNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(NotEqualNode)) return New<NotEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CosDistanceNode)) return New<CosDistanceNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CosDistanceWithNegativeSamplesNode)) return New<CosDistanceWithNegativeSamplesNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CosineNode)) return New<CosineNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CrossEntropyNode)) return New<CrossEntropyNode<ElemType>>(forward<_Types>(_Args)...);
@ -646,37 +646,37 @@ shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Plus(
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Less(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<ComparsionLessNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
return net.AddNodeToNetAndAttachInputs(New<LessNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Equal(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<ComparisonEqualNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
return net.AddNodeToNetAndAttachInputs(New<EqualNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Greater(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<ComparisonGreaterNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
return net.AddNodeToNetAndAttachInputs(New<GreaterNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::GreaterEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<ComparsionLessNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
return net.AddNodeToNetAndAttachInputs(New<LessNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::NotEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<ComparisonEqualNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
return net.AddNodeToNetAndAttachInputs(New<EqualNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::LessEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<ComparisonGreaterNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
return net.AddNodeToNetAndAttachInputs(New<GreaterNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>

Просмотреть файл

@ -167,25 +167,19 @@ void ComputationNetwork::DeleteNode(const std::wstring& nodeName)
// replace a named node by newNode of the same type under the same name, including moving over all network links
// This is used in the KL-reg based adaptation to reduce feature copy
// need to update all the mappings as well childrens.
void ComputationNetwork::ChangeNode(wstring nodeName, ComputationNodeBasePtr newNode)
void ComputationNetwork::ReplaceNode(wstring nodeName, ComputationNodeBasePtr newNode)
{
ComputationNodeBasePtr oldNode = GetNodeFromName(nodeName);
if (newNode->NodeName() != nodeName) // TODO: This was not tested for earlier; I hope no code depends on this.
InvalidArgument("ChangeNode: newNode must have the same name as the old node.");
if (oldNode->OperationName() != newNode->OperationName())
InvalidArgument("ChangeNode: newNode must have the same type as the old node.");
InvalidArgument("ReplaceNode: newNode must have the same type as the old node.");
InvalidateCompiledNetwork();
// change all nodes to have old node as input to point to the new node instead
for (auto nodeIter = m_nameToNodeMap.begin(); nodeIter != m_nameToNodeMap.end(); nodeIter++)
{
ComputationNodeBasePtr node = nodeIter->second;
for (int i = 0; i < node->GetNumInputs(); i++)
if (node->GetInputs()[i] == oldNode)
node->SetInput(i, newNode);
}
// change all nodes that have old node as input to point to the new node instead
ChangeNodeInputs(oldNode, newNode);
// change all inputs of this new node to share the old one's inputs
for (int i = 0; i < oldNode->GetNumInputs(); i++)
@ -208,10 +202,45 @@ void ComputationNetwork::ChangeNode(wstring nodeName, ComputationNodeBasePtr new
}
}
// Inserts a newNode such that the inputNodeName serves as the input to the newNode
// Prior to this call, inputNodeName should be set as the input to newNode.
void ComputationNetwork::InsertNode(wstring inputNodeName, ComputationNodeBasePtr newNode, const std::set<std::wstring>& newNodeTags)
{
newNode->Validate(false);
ComputationNodeBasePtr inputNode = GetNodeFromName(inputNodeName);
InvalidateCompiledNetwork();
// change all nodes that have old node as input to point to the new node instead
ChangeNodeInputs(inputNode, newNode);
// insert the node in the network
AddNodeToNet(newNode);
// also update node groups
for (auto nodeTag : newNodeTags)
{
AddToNodeGroup(nodeTag, newNode);
}
}
// change all nodes that have fromNode as input to have toNode as input instead
void ComputationNetwork::ChangeNodeInputs(ComputationNodeBasePtr fromNode, ComputationNodeBasePtr toNode)
{
for (auto nodeIter = m_nameToNodeMap.begin(); nodeIter != m_nameToNodeMap.end(); nodeIter++)
{
ComputationNodeBasePtr node = nodeIter->second;
for (int i = 0; i < node->GetNumInputs(); i++)
if (node->GetInputs()[i] == fromNode)
node->SetInput(i, toNode);
}
}
// replace the old node with the current node, assuming the old node is a leaf node
// need to update those nodes who use oldNode as their child
// TODO: Can this be called with a node that's already part of the network? This is currently allowed, but should it?
// BUGBUG: Seems ChangeNode() also updates node groups. Why doesn't this function?
// BUGBUG: Seems ReplaceNode() also updates node groups. Why doesn't this function?
// BUGBUG: What if newNode is the one referenced by oldNodeName?
// BUGBUG: Or what if an unrelated node of the same name exists?
void ComputationNetwork::ReplaceLeafNode(wstring oldNodeName, ComputationNodeBasePtr newNode)

Просмотреть файл

@ -46,6 +46,10 @@ ComputationNetwork::ComputationNetwork(const IConfigRecordPtr configp) :
DEVICEID_TYPE deviceId = (DEVICEID_TYPE)(int) config[L"deviceId"];
deque<ComputationNodeBasePtr> workList;
// process 'special nodes'
ProcessSpecialNodes(config, workList);
// flatten the set of all nodes
// we collect all root ComputationNodes from the config record, and then expand into all their children by work-list processing
// TODO: This currently only supports nodes of the same ElemType. We could allow conversion operators.
@ -62,6 +66,30 @@ ComputationNetwork::ComputationNetwork(const IConfigRecordPtr configp) :
ConstructFromRoots(deviceId, move(workList), map<ComputationNodeBasePtr, ComputationNodeBasePtr>()/*no mapping*/);
}
// process the special-nodes parameters
void ComputationNetwork::ProcessSpecialNodes(const ScriptableObjects::IConfigRecord& config, std::deque<ComputationNodeBasePtr>& workList)
{
set<wstring> nodeGroupNames{ L"feature", L"label", L"criterion", L"evaluation", L"output" };
for (let& id : config.GetMemberIds())
{
let pos = id.find(L"Nodes");
if (pos == wstring::npos || pos != id.size() - 5) // special node name = node-group name + L"Nodes"
continue;
let nodeGroup = id.substr(0, id.size() - 5);
if (nodeGroupNames.find(nodeGroup) == nodeGroupNames.end())
continue;
let nodeSet = config[id];
let nodes = ScriptableObjects::ConfigArray::FlattenedVectorFrom<ComputationNodeBasePtr>(nodeSet);
for (let& node : nodes)
{
node->SetTag(nodeGroup);
workList.push_back(node);
}
}
}
// construct a network from a list of roots (passed in 'workList')
// This will add to m_nameToNodeMap[] all roots and all nodes reachable from those roots.
// If 'replacements' is given, all root pointers as well as all input pointers of reachable nodes will be mapped. This is needed for model editing.
@ -327,7 +355,10 @@ public:
// determine all roots
deque<ComputationNodeBasePtr> roots;
// start with the original network
// process 'special nodes'
// BUGBUG: This does not allow to unset tags. If special nodes are listed, they should completely override existing tags for the same node.
ProcessSpecialNodes(config, workList);
// then the original network
for (let& node : allNodes)
if (parents.find(node)->second.empty()) // no parents: it's a root
roots.push_back(node);

Просмотреть файл

@ -578,20 +578,20 @@ private:
// Index corresponds to different comparison operations.
const static int index = 1 + compType + 3 * polarity;
// The operations are indexed in the same order they appear in enum ElementWiseOperator: "Less", "Equal", "Greater", "GreaterEqual", "NotEqual", "LessEqual".
// This ordering is checked below:
static_assert(1 == ElementWiseOperator::opEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opEQ has wrong value relative to ElementWiseOperator::opLess");
static_assert(2 == ElementWiseOperator::opGreater - ElementWiseOperator::opLess, "ElementWiseOperator::opGT has wrong value relative to ElementWiseOperator::opLess");
static_assert(3 == ElementWiseOperator::opGreaterEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opGE has wrong value relative to ElementWiseOperator::opLess");
static_assert(4 == ElementWiseOperator::opNotEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opNE has wrong value relative to ElementWiseOperator::opLess");
static_assert(5 == ElementWiseOperator::opLessEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opLE has wrong value relative to ElementWiseOperator::opLess");
// The operations are indexed in the same order they appear in enum ElementWiseOperator: "Less", "Equal", "Greater", "GreaterEqual", "NotEqual", "LessEqual".
// This ordering is checked below:
static_assert(1 == ElementWiseOperator::opEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opEqual has wrong value relative to ElementWiseOperator::opLess");
static_assert(2 == ElementWiseOperator::opGreater - ElementWiseOperator::opLess, "ElementWiseOperator::opGreater has wrong value relative to ElementWiseOperator::opLess");
static_assert(3 == ElementWiseOperator::opGreaterEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opGreaterEqual has wrong value relative to ElementWiseOperator::opLess");
static_assert(4 == ElementWiseOperator::opNotEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opNotEqual has wrong value relative to ElementWiseOperator::opLess");
static_assert(5 == ElementWiseOperator::opLessEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opLessEqual has wrong value relative to ElementWiseOperator::opLess");
public:
typedef BinaryElementWiseNode<ElemType> Base; UsingBinaryElementwiseNodeBaseMembers;
static const std::wstring TypeName()
{
const wchar_t* names[] = { L"Less", L"Equal", L"Greater", L"GreaterEqual", L"NotEqual", L"LessEqual" };
const wchar_t* names[] = { L"Less", L"Equal", L"Greater", L"GreaterEqual", L"NotEqual", L"LessEqual" };
return names[index];
}
@ -634,17 +634,17 @@ public: \
DeclareConstructorFromConfigWithNumInputs(ClassName); \
ClassName(DEVICEID_TYPE deviceId, const wstring& name) \
: Base(deviceId, name) \
{ \
} \
{ \
} \
}; \
\
template class ClassName<float>; \
template class ClassName<double>;
DefineComparisonNode(ComparsionLessNode, -1, 0)
DefineComparisonNode(ComparisonEqualNode, 0, 0)
DefineComparisonNode(ComparisonGreaterNode, 1, 0)
DefineComparisonNode(ComparisonGreaterEqualNode, -1, 1)
DefineComparisonNode(ComparisonNotEqualNode, 0, 1)
DefineComparisonNode(ComparisonLessEqualNode, 1, 1)
DefineComparisonNode(LessNode, -1, 0)
DefineComparisonNode(EqualNode, 0, 0)
DefineComparisonNode(GreaterNode, 1, 0)
DefineComparisonNode(GreaterEqualNode, -1, 1)
DefineComparisonNode(NotEqualNode, 0, 1)
DefineComparisonNode(LessEqualNode, 1, 1)
}}}

Просмотреть файл

@ -51,7 +51,7 @@ void CNTKEvalBase<ElemType>::CreateNetwork(const std::string& networkDescription
config.Parse(networkDescription);
std::vector<wstring> outputNodeNames;
m_net = GetModelFromConfig<ConfigParameters, ElemType>(config, outputNodeNames);
m_net = GetModelFromConfig<ConfigParameters, ElemType>(config, L"outputNodeNames", outputNodeNames);
if (m_net == nullptr)
{
@ -244,30 +244,38 @@ VariableLayout CNTKEvalExtended<ElemType>::ToVariableLayout(const ComputationNod
matrix->GetMatrixType() == MatrixType::SPARSE ? VariableLayout::Sparse :
VariableLayout::Undetermined :
VariableLayout::Undetermined,
/* dimension */ n->GetSampleLayout().GetNumElements(),
/* dynamic axis */ wstring(n->GetMBLayout() ? n->GetMBLayout()->GetAxisName() : L"*")
/* dimension */ n->GetSampleLayout().GetNumElements()
};
}
template<typename ElemType>
void CNTKEvalExtended<ElemType>::StartForwardEvaluation(std::vector<wstring> outputNodeNames)
void CNTKEvalExtended<ElemType>::StartForwardEvaluation(const std::vector<wstring>& outputNodeNames)
{
m_scopedNetworkOperationMode = make_shared<ScopedNetworkOperationMode>(m_net, NetworkOperationMode::inferring);
// allocate memory for forward computation
m_outputNodes = m_net->OutputNodesByName(outputNodeNames);
m_inputNodes = m_net->InputNodesForOutputs(outputNodeNames);
// allocate memory for forward computation
m_net->AllocateAllMatrices({}, m_outputNodes, nullptr);
m_net->StartEvaluateMinibatchLoop(m_outputNodes);
m_inputMatrices = DataReaderHelpers::RetrieveInputMatrices(m_inputNodes);
}
for (const auto& node : m_outputNodes)
{
shared_ptr<Matrix<ElemType>> outputMatrix = dynamic_pointer_cast<Matrix<ElemType>>(node->ValuePtr());
if (outputMatrix->GetMatrixType() != MatrixType::DENSE)
RuntimeError("Sparse outputs are not supported by this API.");
}
m_started = true;
}
template<typename ElemType>
VariableSchema CNTKEvalExtended<ElemType>::GetOutputSchema() const
{
VariableSchema schema;
for (const auto& n : m_net->OutputNodes())
auto& nodes = m_started ? m_outputNodes : m_net->OutputNodes();
for (const auto& n : nodes)
{
schema.push_back(ToVariableLayout(n));
}
@ -293,17 +301,24 @@ VariableSchema CNTKEvalExtended<ElemType>::GetInputSchema() const
}
template<typename ElemType>
void CNTKEvalExtended<ElemType>::ForwardPass(const Variables<ElemType>& inputs, Variables<ElemType>& output)
template<template<typename> class ValueContainer>
void CNTKEvalExtended<ElemType>::ForwardPassT(const std::vector<ValueBuffer<ElemType, ValueContainer> >& inputs, std::vector<ValueBuffer<ElemType, ValueContainer> >& outputs)
{
if (inputs.size() != (size_t)std::distance(m_inputMatrices.begin(), m_inputMatrices.end()))
{
RuntimeError("Expected %d inputs, but got %d", (int)std::distance(m_inputMatrices.begin(), m_inputMatrices.end()), (int)inputs.size());
}
if (!m_started)
RuntimeError("ForwardPass() called before StartForwardEvaluation()");
int i = 0;
if (inputs.size() != (size_t)std::distance(m_inputMatrices.begin(), m_inputMatrices.end()))
RuntimeError("Expected %d inputs, but got %d.", (int)std::distance(m_inputMatrices.begin(), m_inputMatrices.end()), (int)inputs.size());
if (outputs.size() != m_outputNodes.size())
RuntimeError("Expected %d outputs, but got %d.", (int)m_outputNodes.size(), (int)outputs.size());
size_t i = 0;
for (auto& input : m_inputMatrices)
{
VariableBuffer<ElemType> buffer = inputs[i];
// const cast: The matrix class takes this over without copying and could theoretically change the contents,
// though it doesn't in this case.
auto& buffer = const_cast<ValueBuffer<ElemType, ValueContainer>&>(inputs[i]);
shared_ptr<Matrix<ElemType>> matrix = dynamic_pointer_cast<Matrix<ElemType>>(input.second.matrix);
auto type = matrix->GetMatrixType();
int numRows = input.second.sampleLayout.GetNumElements();
@ -311,58 +326,50 @@ void CNTKEvalExtended<ElemType>::ForwardPass(const Variables<ElemType>& inputs,
if (type == MatrixType::DENSE)
{
if (buffer.m_buffer.size() % numRows != 0)
{
RuntimeError("Input %ls: Expected input data to be a multiple of %ld, but it is %ld", m_inputNodes[i]->GetName().c_str(), numRows, buffer.m_buffer.size());
}
RuntimeError("Input %ls: Expected input data to be a multiple of %ld, but it is %ld",
m_inputNodes[i]->GetName().c_str(), numRows, buffer.m_buffer.size());
if (buffer.m_buffer.size() == 0)
{
RuntimeError("Input %ls: Expected at least one element.", m_inputNodes[i]->GetName().c_str());
}
}
else if (type == MatrixType::SPARSE)
{
if (buffer.m_colIndices.size() < 2)
{
RuntimeError("Input %ls: Expected at least one element.", m_inputNodes[i]->GetName().c_str());
}
if (buffer.m_colIndices[0] != 0)
{
RuntimeError("Input %ls: First element of column indices must be 0", m_inputNodes[i]->GetName().c_str());
}
if (buffer.m_colIndices[buffer.m_colIndices.size()-1] != buffer.m_indices.size())
{
RuntimeError("Input %ls: Last element of column indices must be equal to the size of indices (%ld), but was %d", m_inputNodes[i]->GetName().c_str(), buffer.m_indices.size(), buffer.m_colIndices[buffer.m_colIndices.size() - 1]);
}
if (buffer.m_colIndices[buffer.m_colIndices.size() - 1] != buffer.m_indices.size())
RuntimeError("Input %ls: Last element of column indices must be equal to the size of indices (%ld), but was %d",
m_inputNodes[i]->GetName().c_str(), buffer.m_indices.size(),
buffer.m_colIndices[buffer.m_colIndices.size() - 1]);
}
int numCols = type == MatrixType::DENSE ? buffer.m_buffer.size() / numRows : buffer.m_colIndices.size() - 1;
assert(numCols >= 1);
input.second.pMBLayout->Init(1, numCols);
input.second.pMBLayout->AddSequence(0, 0, 0, numCols);
if (type == MatrixType::DENSE)
{
matrix->SetValue(numRows, numCols, matrix->GetDeviceId(), buffer.m_buffer.data(), matrixFlagNormal);
}
else if (type == MatrixType::SPARSE)
{
// In the sparse case the m_data layout is identical to CUDA's CSC layout
// (see http://docs.nvidia.com/cuda/cusparse/#compressed-sparse-column-format-csc).
matrix->SetMatrixFromCSCFormat(buffer.m_colIndices.data(), buffer.m_indices.data(), buffer.m_buffer.data(), buffer.m_buffer.size(), numRows, numCols);
matrix->SetMatrixFromCSCFormat(buffer.m_colIndices.data(), buffer.m_indices.data(), buffer.m_buffer.data(),
buffer.m_buffer.size(), numRows, numCols);
}
++i;
}
ComputationNetwork::BumpEvalTimeStamp(m_inputNodes);
for (int i = 0; i < m_outputNodes.size(); ++i)
for (size_t i = 0; i < m_outputNodes.size(); ++i)
{
auto node = m_outputNodes[i];
m_net->ForwardProp(node);
shared_ptr<Matrix<ElemType>> outputMatrix = dynamic_pointer_cast<Matrix<ElemType>>(node->ValuePtr());
auto pMBLayout = node->GetMBLayout();
if (!pMBLayout)
if (!pMBLayout)
{
pMBLayout = make_shared<MBLayout>();
pMBLayout->InitAsFrameMode(1); // treat this as if we have one single sample
@ -370,18 +377,36 @@ void CNTKEvalExtended<ElemType>::ForwardPass(const Variables<ElemType>& inputs,
const auto& seq = pMBLayout->GetAllSequences();
if (seq.size() != 1)
{
RuntimeError("Only 1 sequence supported by this API"); // TODO
}
std::vector<ElemType>& vec = output[i].m_buffer;
vec.resize(outputMatrix->GetNumElements());
ElemType* data = const_cast<ElemType*>(vec.data());
RuntimeError("Only 1 output sequence supported by this API");
ValueContainer<ElemType>& vec = outputs[i].m_buffer;
size_t numElements = outputMatrix->GetNumElements();
if (vec.capacity() < numElements)
{
// Bad luck - we can't reallocate memory of an external object at this point.
RuntimeError("Not enough space in output buffer for output '%ls'.", node->GetName().c_str());
}
vec.resize(numElements);
ElemType* data = const_cast<ElemType*>(vec.data());
outputMatrix->CopyToArray(data, numElements);
}
}
template<typename ElemType>
void CNTKEvalExtended<ElemType>::ForwardPass(const Values<ElemType>& inputs, Values<ElemType>& outputs)
{
ForwardPassT(inputs, outputs);
}
template<typename ElemType>
void CNTKEvalExtended<ElemType>::ForwardPass(const ValueRefs<ElemType>& inputs, ValueRefs<ElemType>& outputs)
{
ForwardPassT(inputs, outputs);
}
template <typename ElemType>
void CNTKEvalExtended<ElemType>::Destroy()
{

Просмотреть файл

@ -89,13 +89,18 @@ public:
template <typename ElemType>
class CNTKEvalExtended : public CNTKEvalBase<ElemType>, public IEvaluateModelExtended<ElemType>
{
public:
CNTKEvalExtended() : CNTKEvalBase<ElemType>(), m_started(false) {}
virtual VariableSchema GetOutputSchema() const override;
virtual void StartForwardEvaluation(std::vector<wstring> outputs) override;
virtual void StartForwardEvaluation(const std::vector<wstring>& outputs) override;
virtual VariableSchema GetInputSchema() const override;
virtual void ForwardPass(const Variables<ElemType>& inputs, Variables<ElemType>& output) override;
virtual void ForwardPass(const Values<ElemType>& inputs, Values<ElemType>& output) override;
virtual void ForwardPass(const ValueRefs<ElemType>& inputs, ValueRefs<ElemType>& output) override;
virtual void Destroy() override;
@ -114,5 +119,10 @@ private:
std::shared_ptr<ScopedNetworkOperationMode> m_scopedNetworkOperationMode;
std::vector<ComputationNodeBasePtr> m_inputNodes;
StreamMinibatchInputs m_inputMatrices;
bool m_started;
template<template<typename> class ValueContainer>
void ForwardPassT(const std::vector < ValueBuffer<ElemType, ValueContainer> >& inputs,
std::vector < ValueBuffer<ElemType, ValueContainer> >& outputs);
};
} } }

Просмотреть файл

@ -59,8 +59,14 @@ int _tmain(int argc, _TCHAR* argv[])
const std::string modelWorkingDirectory = path + "\\..\\..\\Examples\\Image\\MNIST\\Data\\";
const std::string modelFilePath = modelWorkingDirectory + "..\\Output\\Models\\01_OneHidden";
// Load model
model->CreateNetwork("modelPath=\"" + modelFilePath + "\"");
// Load model with desired outputs
std::string networkConfiguration;
// Uncomment the following line to re-define the outputs (include h1.z AND the output ol.z)
// When specifying outputNodeNames in the configuration, it will REPLACE the list of output nodes
// with the ones specified.
//networkConfiguration += "outputNodeNames=\"h1.z:ol.z\"\n";
networkConfiguration += "modelPath=\"" + modelFilePath + "\"";
model->CreateNetwork(networkConfiguration);
// get the model's layers dimensions
std::map<std::wstring, size_t> inDims;
@ -90,6 +96,7 @@ int _tmain(int argc, _TCHAR* argv[])
model->Evaluate(inputLayer, outputLayer);
// Output the results
fprintf(stderr, "Layer '%ls' output:\n", outputLayerName.c_str());
for each (auto& value in outputs)
{
fprintf(stderr, "%f\n", value);

Просмотреть файл

@ -9,6 +9,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
{
@ -16,16 +17,19 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
/// Program for demonstrating how to run model evaluations using the CLIWrapper
/// </summary>
/// <description>
/// This program is a managed client using the CLIWrapper to run the model evaluator in CTNK.
/// There are four cases shown in this program related to model loading/network creation and evaluation.
/// The first two use the trained model from one of the examples provided in the CNTK source code.
/// In order to run this program the model must already exist in the example. To create the model,
/// first run the example in <CNTK>/Examples/Image/MNIST. Once the model file 01_OneHidden is created,
/// you can run this client.
/// The last two cases show how to evaluate a network without first training the model. This is accomplished
/// by building the network and evaluating a single forward pass.
/// This program also shows how to obtaining the output results from the evaluation, either as the default output layer,
/// or by specifying one or more layers as outputs.
/// This program is a managed client using the CLIWrapper to run the model evaluator in CNTK.
/// There are four cases shown in this program related to model loading, network creation and evaluation.
///
/// EvaluateModelSingleLayer and EvaluateModelMultipleLayers
/// --------------------------------------------------------
/// These two cases require the 01_OneHidden model which is part of the <CNTK>/Examples/Image/MNIST example.
/// Refer to <see cref="https://github.com/Microsoft/CNTK/blob/master/Examples/Image/MNIST/README.md"/> for how to train
/// the model used in these examples.
///
/// EvaluateNetworkSingleLayer and EvaluateNetworkSingleLayerNoInput
/// ----------------------------------------------------------------
/// These two cases do not required a trained model (just the network description). These cases show how to extract values from a single forward-pass
/// without any input to the model.
/// </description>
class Program
{
@ -44,7 +48,7 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
Console.WriteLine("\n====== EvaluateModelMultipleLayers ========");
EvaluateModelMultipleLayers();
Console.WriteLine("\n====== EvaluateNetworkSingleLayer ========");
EvaluateNetworkSingleLayer();
@ -58,6 +62,9 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
/// <summary>
/// Evaluates a trained model and obtains a single layer output
/// </summary>
/// <remarks>
/// This example requires the 01_OneHidden trained model
/// </remarks>
private static void EvaluateModelSingleLayer()
{
try
@ -73,12 +80,12 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
{
// Load model
string modelFilePath = Path.Combine(Environment.CurrentDirectory, @"..\Output\Models\01_OneHidden");
model.CreateNetwork(string.Format("modelPath=\"{0}\"", modelFilePath), deviceId:-1);
model.CreateNetwork(string.Format("modelPath=\"{0}\"", modelFilePath), deviceId: -1);
// Generate random input values in the appropriate structure and size
var inDims = model.GetNodeDimensions(NodeGroup.nodeInput);
var inputs = GetDictionary(inDims.First().Key, inDims.First().Value, 255);
// We request the output layer names(s) and dimension, we'll use the first one.
var outDims = model.GetNodeDimensions(NodeGroup.nodeOutput);
outputLayerName = outDims.First().Key;
@ -99,8 +106,11 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
}
/// <summary>
/// Evaluates a trained model and obtains multiple layers output
/// Evaluates a trained model and obtains multiple layers output (including hidden layer)
/// </summary>
/// <remarks>
/// This example requires the 01_OneHidden trained model
/// </remarks>
private static void EvaluateModelMultipleLayers()
{
try
@ -113,20 +123,28 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
using (var model = new IEvaluateModelManagedF())
{
// Desired output layers
string hiddenLayerName = "h1.z";
string outputLayerName = "ol.z";
// Load model
string modelFilePath = Path.Combine(Environment.CurrentDirectory, @"..\Output\Models\01_OneHidden");
model.CreateNetwork(string.Format("modelPath=\"{0}\"", modelFilePath), deviceId:-1);
List<string> desiredOutputLayers = new List<string>() { hiddenLayerName, outputLayerName };
model.CreateNetwork(string.Format("modelPath=\"{0}\"", modelFilePath), deviceId: -1, outputNodeNames: desiredOutputLayers);
// Generate random input values in the appropriate structure and size
var inDims = model.GetNodeDimensions(NodeGroup.nodeInput);
var inputs = GetDictionary(inDims.First().Key, inDims.First().Value, 255);
// We request the output layer names(s) and dimension, we'll use the first one.
// We request the output layer names(s) and dimension, we'll get both the hidden layer and the output layer
var outDims = model.GetNodeDimensions(NodeGroup.nodeOutput);
string outputLayerName = outDims.First().Key;
// We can preallocate the output structure and pass it in (multiple output layers)
outputs = GetDictionary(outputLayerName, outDims[outputLayerName], 1);
outputs = new Dictionary<string, List<float>>()
{
{ hiddenLayerName, GetFloatArray(outDims[hiddenLayerName], 1) },
{ outputLayerName, GetFloatArray(outDims[outputLayerName], 1) }
};
model.Evaluate(inputs, outputs);
}
@ -143,7 +161,7 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
}
/// <summary>
/// Evaluates a network (without a model) and obtains a single layer output
/// Evaluates a network (without a model, but requiring input) and obtains a single layer output
/// </summary>
private static void EvaluateNetworkSingleLayer()
{
@ -163,12 +181,12 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
// This network (AddOperatorConstant.cntk) is a simple network consisting of a single binary operator (Plus)
// operating over a single input and a constant
string networkDescription = File.ReadAllText(Path.Combine(workingDirectory, @"AddOperatorConstant.cntk"));
model.CreateNetwork(networkDescription, deviceId:-1);
model.CreateNetwork(networkDescription, deviceId: -1);
// Generate random input value in the appropriate structure and size
// Prepare input value in the appropriate structure and size
var inputs = new Dictionary<string, List<float>>() { { "features", new List<float>() { 1.0f } } };
// We can call the evaluate method and get back the results (single layer)...
// We can call the evaluate method and get back the results (single layer output)...
var outDims = model.GetNodeDimensions(NodeGroup.nodeOutput);
outputLayerName = outDims.First().Key;
outputs = model.Evaluate(inputs, outputLayerName);
@ -206,7 +224,7 @@ namespace Microsoft.MSR.CNTK.Extensibility.Managed.CSEvalClient
// This network (AddOperatorConstantNoInput.cntk) is a simple network consisting of a single binary operator (Plus)
// operating over a two constants, therefore no input is necessary.
string networkDescription = File.ReadAllText(Path.Combine(workingDirectory, @"AddOperatorConstantNoInput.cntk"));
model.CreateNetwork(networkDescription, deviceId:-1);
model.CreateNetwork(networkDescription, deviceId: -1);
// We can call the evaluate method and get back the results (single layer)...
outputs = model.Evaluate("ol", 1);

Просмотреть файл

@ -23,6 +23,7 @@ using namespace std;
using namespace System;
using namespace System::Collections::Generic;
using namespace System::Collections;
using namespace System::Runtime::Serialization;
using namespace Microsoft::MSR::CNTK;
namespace Microsoft { namespace MSR { namespace CNTK { namespace Extensibility { namespace Managed {
@ -96,7 +97,7 @@ public:
}
}
/// <summary>Creates a network based from the network description in the configuration</summary>
/// <summary>Creates a network based on the network description in the configuration</summary>
/// <param name="networkDescription">The configuration file containing the network description</param>
void CreateNetwork(String^ networkDescription)
{
@ -118,7 +119,22 @@ public:
}
}
/// <summary>Creates a network based from the network description in the configuration</summary>
/// <summary>Creates a network based on the network description in the configuration</summary>
/// <param name="networkDescription">The configuration file containing the network description</param>
/// <param name="outputNodeNames">The output list of nodes (replaces the model's list of output nodes)</param>
void CreateNetwork(String^ networkDescription, List<String^>^ outputNodeNames)
{
if (m_eval == nullptr)
{
throw gcnew ObjectDisposedException("Object has been disposed.");
}
String^ outputNodeNamesProperty = outputNodeNames != nullptr ? String::Concat("outputNodeNames=", String::Join(":", outputNodeNames)) : "";
String^ newNetworkConfig = String::Format("{0}\n{1}", outputNodeNamesProperty, networkDescription);
this->CreateNetwork(newNetworkConfig);
}
/// <summary>Creates a network based on the network description in the configuration</summary>
/// <param name="networkDescription">The configuration file containing the network description</param>
/// <param name="deviceId">The device ID to specify for the network</param>
void CreateNetwork(String^ networkDescription, int deviceId)
@ -128,7 +144,23 @@ public:
throw gcnew ObjectDisposedException("Object has been disposed.");
}
this->CreateNetwork(String::Format("deviceId={0}\n{1}", deviceId, networkDescription));
this->CreateNetwork(networkDescription, deviceId, nullptr);
}
/// <summary>Creates a network based on the network description in the configuration</summary>
/// <param name="networkDescription">The configuration file containing the network description</param>
/// <param name="deviceId">The device ID to specify for the network</param>
/// <param name="outputNodeNames">The output list of nodes (replaces the model's list of output nodes)</param>
void CreateNetwork(String^ networkDescription, int deviceId, List<String^>^ outputNodeNames)
{
if (m_eval == nullptr)
{
throw gcnew ObjectDisposedException("Object has been disposed.");
}
String^ outputNodeNamesProperty = outputNodeNames != nullptr ? String::Concat("outputNodeNames=", String::Join(":", outputNodeNames)) : "";
String^ newNetworkConfig = String::Format("deviceId={0}\n{1}\n{2}", deviceId, outputNodeNamesProperty, networkDescription);
this->CreateNetwork(newNetworkConfig);
}
/// <summary>Evaluates the model using a single forward feed pass and retrieves the output layer data</summary>
@ -524,7 +556,8 @@ public:
}
};
public ref class CNTKException : Exception
[Serializable]
public ref class CNTKException : Exception, ISerializable
{
public:
CNTKException() : Exception()
@ -537,8 +570,23 @@ public:
{}
const String^ NativeCallStack;
[System::Security::Permissions::SecurityPermissionAttribute
(System::Security::Permissions::SecurityAction::LinkDemand,
Flags = System::Security::Permissions::SecurityPermissionFlag::SerializationFormatter)]
virtual void GetObjectData(SerializationInfo^ info, StreamingContext context) override
{
Exception::GetObjectData(info, context);
}
protected:
CNTKException(SerializationInfo^ info, StreamingContext context) : Exception(info, context)
{}
};
[Serializable]
public ref class CNTKRuntimeException : CNTKException
{
public:
@ -547,8 +595,14 @@ public:
CNTKRuntimeException(String^ message, String^ callstack) : CNTKException(message, callstack)
{}
protected:
CNTKRuntimeException(SerializationInfo^ info, StreamingContext context) : CNTKException(info, context)
{}
};
[Serializable]
public ref class CNTKLogicErrorException : CNTKException
{
public:
@ -557,8 +611,14 @@ public:
CNTKLogicErrorException(String^ message, String^ callstack) : CNTKException(message, callstack)
{}
protected:
CNTKLogicErrorException(SerializationInfo^ info, StreamingContext context) : CNTKException(info, context)
{}
};
[Serializable]
public ref class CNTKInvalidArgumentException : CNTKException
{
public:
@ -567,8 +627,14 @@ public:
CNTKInvalidArgumentException(String^ message, String^ callstack) : CNTKException(message, callstack)
{}
protected:
CNTKInvalidArgumentException(SerializationInfo^ info, StreamingContext context) : CNTKException(info, context)
{}
};
[Serializable]
public ref class CNTKBadAllocException : CNTKException
{
public:
@ -577,6 +643,11 @@ public:
CNTKBadAllocException(String^ message) : CNTKException(message)
{}
protected:
CNTKBadAllocException(SerializationInfo^ info, StreamingContext context) : CNTKException(info, context)
{}
};
// This method tricks the compiler into emitting the methods of the classes
@ -594,6 +665,8 @@ void emit()
f.Evaluate("");
f.CreateNetwork("");
f.CreateNetwork("", 0);
f.CreateNetwork("", nullptr);
f.CreateNetwork("", 0, nullptr);
f.GetNodeDimensions(NodeGroup::nodeSpecified);
IEvaluateModelManagedD d;
@ -603,6 +676,8 @@ void emit()
d.Evaluate("");
d.CreateNetwork("");
d.CreateNetwork("", 0);
d.CreateNetwork("", nullptr);
d.CreateNetwork("", 0,nullptr);
d.GetNodeDimensions(NodeGroup::nodeSpecified);
// Deprecated code, hush warnings locally only

Просмотреть файл

@ -1460,13 +1460,15 @@ ElemType GPUSparseMatrix<ElemType>::Adagrad(GPUMatrix<ElemType>& c, const bool n
// sparse X dense = dense
template <class ElemType>
void GPUSparseMatrix<ElemType>::MultiplyAndWeightedAdd(ElemType alpha, const GPUSparseMatrix<ElemType>& a, const bool transposeA,
const GPUMatrix<ElemType>& b, const bool transposeD, ElemType beta, GPUMatrix<ElemType>& c)
const GPUMatrix<ElemType>& b, const bool transposeB, ElemType beta, GPUMatrix<ElemType>& c)
{
if (a.GetFormat() != matrixFormatSparseCSR)
if (transposeB)
NOT_IMPLEMENTED;
if (transposeD)
// Note: This function is written for 'a' being in CSR format. If 'a' is CSC, we reinterpret it as CSR by transposing it.
if (a.GetFormat() != matrixFormatSparseCSR && a.GetFormat() != matrixFormatSparseCSC)
NOT_IMPLEMENTED;
const bool reinterpretAsCSR = a.GetFormat() == matrixFormatSparseCSC;
if (a.GetComputeDeviceId() != b.GetComputeDeviceId() || (b.GetComputeDeviceId() != a.GetComputeDeviceId()))
RuntimeError("MultiplyAndWeightedAdd: All matrices must be on the same GPU");
@ -1478,24 +1480,28 @@ void GPUSparseMatrix<ElemType>::MultiplyAndWeightedAdd(ElemType alpha, const GPU
CUSPARSE_CALL(cusparseCreateMatDescr(&descr));
cusparseSetMatType(descr, CUSPARSE_MATRIX_TYPE_GENERAL);
cusparseSetMatIndexBase(descr, CUSPARSE_INDEX_BASE_ZERO);
cusparseOperation_t oper = transposeA ? CUSPARSE_OPERATION_TRANSPOSE : CUSPARSE_OPERATION_NON_TRANSPOSE;
int m = (int) a.GetNumRows();
int n = (int) b.GetNumCols();
cusparseOperation_t oper = (transposeA != reinterpretAsCSR) ? CUSPARSE_OPERATION_TRANSPOSE : CUSPARSE_OPERATION_NON_TRANSPOSE;
int n = (int)b.GetNumCols();
int m = (int)(reinterpretAsCSR ? a.GetNumCols() : a.GetNumRows());
int k = (int)(reinterpretAsCSR ? a.GetNumRows() : a.GetNumCols());
assert(n == (int) c.GetNumCols());
int k = (int) a.GetNumCols();
const auto& aRowLocation = reinterpretAsCSR ? a.ColLocation() : a.RowLocation();
const auto& aColLocation = reinterpretAsCSR ? a.RowLocation() : a.ColLocation();
SyncGuard syncGuard;
if (sizeof(ElemType) == sizeof(float))
{
CUSPARSE_CALL(cusparseScsrmm(cusparseHandle, oper, m, n, k, (int) a.GetNumElemAllocated(), reinterpret_cast<float*>(&alpha), descr, reinterpret_cast<const float*>(a.Buffer()),
a.RowLocation(), a.ColLocation(), reinterpret_cast<float*>(b.Data()),
aRowLocation, aColLocation, reinterpret_cast<float*>(b.Data()),
(int) b.GetNumRows(), reinterpret_cast<float*>(&beta), reinterpret_cast<float*>(c.Data()), (int) c.GetNumRows()));
}
else
{
CUSPARSE_CALL(cusparseDcsrmm(cusparseHandle, oper, m, n, k, (int) a.GetNumElemAllocated(), reinterpret_cast<double*>(&alpha), descr, reinterpret_cast<const double*>(a.Buffer()),
a.RowLocation(), a.ColLocation(), reinterpret_cast<double*>(b.Data()),
aRowLocation, aColLocation, reinterpret_cast<double*>(b.Data()),
(int) b.GetNumRows(), reinterpret_cast<double*>(&beta), reinterpret_cast<double*>(c.Data()), (int) c.GetNumRows()));
}
CUSPARSE_CALL(cusparseDestroy(cusparseHandle));

Просмотреть файл

@ -11,10 +11,10 @@
namespace Microsoft { namespace MSR { namespace CNTK {
// Stream (input) metadata. This text-reader specific descriptor adds two
// Stream (input) metadata. This text-reader specific descriptor adds two
// additional fields: stream alias (name prefix in each sample) and expected
// sample dimension.
struct StreamDescriptor : StreamDescription
struct StreamDescriptor : StreamDescription
{
std::string m_alias; // sample name prefix used in the input data
size_t m_sampleDimension; // expected number of elements in a sample
@ -22,7 +22,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
};
// Sequence metadata. This text-reader specific descriptor adds two additional
// fields: file offset and size in bytes. Both are required to efficiently
// fields: file offset and size in bytes. Both are required to efficiently
// locate and retrieve a sequence from file, given a sequence descriptor.
struct SequenceDescriptor : SequenceDescription
{
@ -36,19 +36,19 @@ namespace Microsoft { namespace MSR { namespace CNTK {
size_t m_byteSize; // size in bytes
};
// Chunk metadata, similar to the sequence descriptor above,
// Chunk metadata, similar to the sequence descriptor above,
// but used to facilitate indexing and retrieval of blobs of input data of
// some user-specified size.
struct ChunkDescriptor : ChunkDescription
{
{
ChunkDescriptor() : ChunkDescription({}), m_byteSize(0) {}
// TODO: if we don't want to keep the whole index
// TODO: if we don't want to keep the whole index
// (metadata for all sequences in memory), we should not
// leave this empty when building a chunk index, and only
// fill it out when the chunk needs to be loaded
// fill it out when the chunk needs to be loaded
// (the indexer will have to do a second pass for this chunk).
std::vector<SequenceDescriptor> m_sequences;
size_t m_byteSize; // size in bytes
};
@ -80,7 +80,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
// Creating a new chunk if the size is exceeded.
m_chunks.push_back({});
chunk = &m_chunks.back();
chunk->m_id = m_chunks.size() - 1;
chunk->m_id = (ChunkIdType) (m_chunks.size() - 1);
if (CHUNKID_MAX < m_chunks.size())
{
RuntimeError("Maximum number of chunks exceeded");
}
}
chunk->m_byteSize += sd.m_byteSize;
@ -89,7 +93,8 @@ namespace Microsoft { namespace MSR { namespace CNTK {
sd.m_chunkId = chunk->m_id;
sd.m_id = chunk->m_sequences.size();
auto location = std::make_pair(chunk->m_id, sd.m_id);
m_keyToSequenceInChunk.insert(std::make_pair(sd.m_key.m_sequence, location));
auto sequenceId = sd.m_key.m_sequence;
m_keyToSequenceInChunk.insert(std::make_pair(sequenceId, location));
chunk->m_sequences.push_back(sd);
}

Просмотреть файл

@ -66,7 +66,6 @@ void Indexer::BuildFromLines(CorpusDescriptorPtr corpus)
{
SequenceDescriptor sd = {};
sd.m_numberOfSamples = 1;
sd.m_isValid = true;
sd.m_fileOffsetBytes = offset;
offset = GetFileOffset() + 1;
sd.m_byteSize = offset - sd.m_fileOffsetBytes;
@ -86,7 +85,6 @@ void Indexer::BuildFromLines(CorpusDescriptorPtr corpus)
// add a sequence to the index, parser will have to deal with it.
SequenceDescriptor sd = {};
sd.m_numberOfSamples = 1;
sd.m_isValid = true;
sd.m_fileOffsetBytes = offset;
sd.m_byteSize = m_fileOffsetEnd - sd.m_fileOffsetBytes;
AddSequenceIfIncluded(corpus, lines, sd);
@ -135,7 +133,6 @@ void Indexer::Build(CorpusDescriptorPtr corpus)
SequenceDescriptor sd = {};
sd.m_fileOffsetBytes = offset;
sd.m_isValid = true;
size_t currentKey = id;
while (!m_done)
@ -152,7 +149,6 @@ void Indexer::Build(CorpusDescriptorPtr corpus)
sd = {};
sd.m_fileOffsetBytes = offset;
sd.m_isValid = true;
currentKey = id;
}
}
@ -165,7 +161,7 @@ void Indexer::Build(CorpusDescriptorPtr corpus)
void Indexer::AddSequenceIfIncluded(CorpusDescriptorPtr corpus, size_t sequenceKey, SequenceDescriptor& sd)
{
auto& stringRegistry = corpus->GetStringRegistry();
auto key = msra::strfun::utf16(std::to_string(sequenceKey));
auto key = std::to_string(sequenceKey);
if (corpus->IsIncluded(key))
{
sd.m_key.m_sequence = stringRegistry[key];

Просмотреть файл

@ -117,10 +117,12 @@ TextConfigHelper::TextConfigHelper(const ConfigParameters& config)
m_filepath = msra::strfun::utf16(config(L"file"));
// EvalActions inserts randomize = "none" into the reader config in DoWriteOutoput.
wstring randomizeString = config(L"randomize", wstring());
if (!_wcsicmp(randomizeString.c_str(), L"none"))
if (!_wcsicmp(randomizeString.c_str(), L"none")) // TODO: don't support case-insensitive option strings in the new reader
{
// "none" is only accepted to be backwards-compatible (DoWriteOutput() in EvalActions.cpp
// inserts this magic constant into the reader config to prevent it from shuffling the input).
// In user-defined configurations, 'randomize' should be a boolean.
m_randomizationWindow = randomizeNone;
}
else

Просмотреть файл

@ -41,7 +41,7 @@ public:
std::map<size_t, SequenceBuffer> m_sequenceMap;
// chunk id (copied from the descriptor)
size_t m_id;
ChunkIdType m_id;
// a non-owned pointer to the parser that created this chunk
TextParser* m_parser;
@ -206,7 +206,7 @@ ChunkDescriptions TextParser<ElemType>::GetChunkDescriptions()
}
template <class ElemType>
void TextParser<ElemType>::GetSequencesForChunk(size_t chunkId, std::vector<SequenceDescription>& result)
void TextParser<ElemType>::GetSequencesForChunk(ChunkIdType chunkId, std::vector<SequenceDescription>& result)
{
const auto& index = m_indexer->GetIndex();
const auto& chunk = index.m_chunks[chunkId];
@ -219,7 +219,6 @@ void TextParser<ElemType>::GetSequencesForChunk(size_t chunkId, std::vector<Sequ
s.m_id,
s.m_numberOfSamples,
s.m_chunkId,
s.m_isValid,
s.m_key
});
}
@ -272,7 +271,7 @@ void TextParser<ElemType>::TextDataChunk::GetSequence(size_t sequenceId, std::ve
}
template <class ElemType>
ChunkPtr TextParser<ElemType>::GetChunk(size_t chunkId)
ChunkPtr TextParser<ElemType>::GetChunk(ChunkIdType chunkId)
{
const auto& chunkDescriptor = m_indexer->GetIndex().m_chunks[chunkId];
auto textChunk = make_shared<TextDataChunk>(chunkDescriptor, this);
@ -399,7 +398,7 @@ typename TextParser<ElemType>::SequenceBuffer TextParser<ElemType>::LoadSequence
{
fprintf(stderr,
"WARNING: Could not read a row (# %" PRIu64 ")"
" while loading sequence (id = %ls) %ls.\n",
" while loading sequence (id = %s) %ls.\n",
i + 1,
GetSequenceKey(sequenceDsc).c_str(),
GetFileInfo().c_str());
@ -412,7 +411,7 @@ typename TextParser<ElemType>::SequenceBuffer TextParser<ElemType>::LoadSequence
{
fprintf(stderr,
"WARNING: Exhausted all input"
" expected for the current sequence (id = %ls) %ls,"
" expected for the current sequence (id = %s) %ls,"
" but only read %" PRIu64 " out of %" PRIu64 " expected rows.\n",
GetSequenceKey(sequenceDsc).c_str(),
GetFileInfo().c_str(), numRowsRead, expectedRowCount);
@ -424,13 +423,13 @@ typename TextParser<ElemType>::SequenceBuffer TextParser<ElemType>::LoadSequence
// Double check if there are empty input streams.
// TODO this handling needs to be graceful, but currently CNTK complains when we return empty sequences.
bool hasEmptyInputs = false, hasDuplicateInputs = false;
size_t maxInputLength = 0;
uint32_t maxInputLength = 0;
for (size_t i = 0; i < sequence.size(); ++i)
{
if (sequence[i]->m_numberOfSamples == 0)
{
fprintf(stderr,
"ERROR: Input ('%ls') is empty in sequence (id = %ls) %ls.\n",
"ERROR: Input ('%ls') is empty in sequence (id = %s) %ls.\n",
m_streams[i]->m_name.c_str(), GetSequenceKey(sequenceDsc).c_str(), GetFileInfo().c_str());
hasEmptyInputs = true;
}
@ -442,7 +441,7 @@ typename TextParser<ElemType>::SequenceBuffer TextParser<ElemType>::LoadSequence
{
fprintf(stderr,
"WARNING: Input ('%ls') contains more samples than expected"
" (%" PRIu64 " vs. %" PRIu64 ") for sequence (id = %ls) %ls.\n",
" (%u vs. %" PRIu64 ") for sequence (id = %s) %ls.\n",
m_streams[i]->m_name.c_str(), sequence[i]->m_numberOfSamples, expectedRowCount,
GetSequenceKey(sequenceDsc).c_str(), GetFileInfo().c_str());
}
@ -465,8 +464,8 @@ typename TextParser<ElemType>::SequenceBuffer TextParser<ElemType>::LoadSequence
if (ShouldWarn())
{
fprintf(stderr,
"WARNING: Maximum per-input number of samples for sequence (id = %ls) %ls"
" is less than expected (%" PRIu64 " vs. %" PRIu64 ").\n",
"WARNING: Maximum per-input number of samples for sequence (id = %s) %ls"
" is less than expected (%u vs. %" PRIu64 ").\n",
GetSequenceKey(sequenceDsc).c_str(),
GetFileInfo().c_str(), maxInputLength, expectedRowCount);
}
@ -476,7 +475,7 @@ typename TextParser<ElemType>::SequenceBuffer TextParser<ElemType>::LoadSequence
if (m_traceLevel >= Info)
{
fprintf(stderr,
"INFO: Finished loading sequence (id = %ls) %ls,"
"INFO: Finished loading sequence (id = %s) %ls,"
" successfully read %" PRIu64 " out of expected %" PRIu64 " rows.\n",
GetSequenceKey(sequenceDsc).c_str(), GetFileInfo().c_str(), numRowsRead, expectedRowCount);
}
@ -674,8 +673,9 @@ bool TextParser<ElemType>::TryGetInputId(size_t& id, size_t& bytesToRead)
if (ShouldWarn())
{
fprintf(stderr,
"WARNING: Invalid input name ('%s') %ls.\n",
name.c_str(), GetFileInfo().c_str());
"WARNING: Invalid input ('%s') %ls. "
"Input name '%s' was not specified in the reader config section.\n",
name.c_str(), GetFileInfo().c_str(), name.c_str());
}
}
else if (ShouldWarn())
@ -1192,24 +1192,22 @@ std::wstring TextParser<ElemType>::GetFileInfo()
return info.str();
}
static SequenceDescription s_InvalidSequence{0, 0, 0, false, {0, 0}};
template <class ElemType>
void TextParser<ElemType>::GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& result)
bool TextParser<ElemType>::GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& result)
{
const auto& keys = m_indexer->GetIndex().m_keyToSequenceInChunk;
auto sequenceLocation = keys.find(key.m_sequence);
if (sequenceLocation == keys.end())
{
result = s_InvalidSequence;
return;
return false;
}
result = m_indexer->GetIndex().m_chunks[sequenceLocation->second.first].m_sequences[sequenceLocation->second.second];
return true;
}
template <class ElemType>
const wstring& TextParser<ElemType>::GetSequenceKey(const SequenceDescriptor& s) const
const string& TextParser<ElemType>::GetSequenceKey(const SequenceDescriptor& s) const
{
return m_corpus->GetStringRegistry()[s.m_key.m_sequence];
}

Просмотреть файл

@ -16,7 +16,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
template <class ElemType>
class CNTKTextFormatReaderTestRunner;
// TODO: more details when tracing warnings
// TODO: more details when tracing warnings
// (e.g., buffer content around the char that triggered the warning)
template <class ElemType>
class TextParser : public DataDeserializerBase {
@ -28,27 +28,27 @@ public:
~TextParser();
// Retrieves a chunk of data.
ChunkPtr GetChunk(size_t chunkId) override;
ChunkPtr GetChunk(ChunkIdType chunkId) override;
// Get information about chunks.
ChunkDescriptions GetChunkDescriptions() override;
// Get information about particular chunk.
void GetSequencesForChunk(size_t chunkId, std::vector<SequenceDescription>& result) override;
void GetSequencesForChunk(ChunkIdType chunkId, std::vector<SequenceDescription>& result) override;
void GetSequenceDescriptionByKey(const KeyType&, SequenceDescription&) override;
bool GetSequenceDescriptionByKey(const KeyType&, SequenceDescription&) override;
private:
// Builds an index of the input data.
void Initialize();
// A buffer to keep data for all samples in a (variable length) sequence
// A buffer to keep data for all samples in a (variable length) sequence
// from a single input stream.
struct InputStreamBuffer
{
virtual ~InputStreamBuffer() { };
size_t m_numberOfSamples = 0;
uint32_t m_numberOfSamples = 0;
std::vector<ElemType> m_buffer;
};
@ -61,8 +61,8 @@ private:
}
};
// In case of sparse input, we also need a vector of
// indices (one index for each input value) and a vector
// In case of sparse input, we also need a vector of
// indices (one index for each input value) and a vector
// of NNZ counts (one for each sample).
struct SparseInputStreamBuffer : InputStreamBuffer
{
@ -76,10 +76,10 @@ private:
// A chunk of input data in the text format.
class TextDataChunk;
typedef std::shared_ptr<TextDataChunk> TextChunkPtr;
enum TraceLevel
enum TraceLevel
{
Error = 0,
Warning = 1,
@ -115,13 +115,13 @@ private:
bool m_hadWarnings;
unsigned int m_numAllowedErrors;
bool m_skipSequenceIds;
unsigned int m_numRetries; // specifies the number of times an unsuccessful
unsigned int m_numRetries; // specifies the number of times an unsuccessful
// file operation should be repeated (default value is 5).
// Corpus descriptor.
CorpusDescriptorPtr m_corpus;
// throws runtime exception when number of parsing errors is
// throws runtime exception when number of parsing errors is
// greater than the specified threshold
void IncrementNumberOfErrorsOrDie();
@ -188,7 +188,7 @@ private:
friend class CNTKTextFormatReaderTestRunner<ElemType>;
const std::wstring& GetSequenceKey(const SequenceDescriptor& s) const;
const std::string& GetSequenceKey(const SequenceDescriptor& s) const;
DISABLE_COPY_AND_MOVE(TextParser);
};

Просмотреть файл

@ -1,145 +1,146 @@
import argparse
parser = argparse.ArgumentParser(
description="UCI to CNTKText format converter",
epilog=("Quick example - converting MNIST data (see Examples/Image/MNIST):"
"\n\n\t"
"--input_file Examples/Image/MNIST/Data/Train-28x28.txt "
"--features_start 1 "
"--features_dim 784 "
"--labels_start 0 "
"--labels_dim 1 "
"--num_labels 10 "
"--output_file Examples/Image/MNIST/Data/Train-28x28_cntk_text.txt"
"\n\n"
"For more information please visit "
"https://github.com/Microsoft/CNTK/wiki/CNTKTextFormat-Reader"),
formatter_class=argparse.RawTextHelpFormatter)
def convert(file_in, file_out, features_start, features_dim,
labels_start, labels_dim, num_labels, label_type='Category', mapping_file=None):
label_map = {}
if label_type == "Category":
if mapping_file is not None:
with open(mapping_file, 'r') as f:
for line in f.read().splitlines():
label_map[line] = len(label_map)
requiredNamed = parser.add_argument_group('required arguments')
num_labels = max(num_labels, len(label_map))
else:
label_map = {str(x) : x for x in range(num_labels)}
requiredNamed.add_argument("-in", "--input_file",
help="input file path", required=True)
requiredNamed.add_argument("-fs", "--features_start", type=int,
help="start offset of feature columns", required=True)
requiredNamed.add_argument("-fd", "--features_dim", type=int,
help=("dimension of the feature vector "
"(number of feature columns in the input file)"),
required=True)
input_file = open(file_in, 'r')
output_file = open(file_out, 'w')
parser.add_argument("-lt", "--label_type", default="Category",
help=("Label type (indicates how the label columns should "
" be interpreted)"),
choices=["Category", "Regression", "None"])
parser.add_argument("-ls", "--labels_start", type=int,
help=("dimension of the label vector "
"(number of label columns in the input file)"))
parser.add_argument("-nl", "--num_labels", type=int,
help="number of possible label values "
"(required for categorical labels)")
parser.add_argument("-ld", "--labels_dim", type=int, default=1,
help=("dimension of the input label vector "
"(number of label columns in the input file, "
"default is 1)"))
parser.add_argument("--mapping_file",
help=("the path to a file used to map from the label value "
"to a numerical label identifier (if omitted, the "
"label value is interpreted as a numerical "
"identifier)"))
parser.add_argument("-out", "--output_file", help="output file path")
for line in input_file.readlines():
values = line.split()
args = parser.parse_args()
if label_type != 'None':
max_length = max(labels_start + labels_dim, features_start + features_dim)
if len(values) < (labels_dim + features_dim):
raise RuntimeError(("Too few input columns ({} out of expected {}) ")
.format(len(values), (labels_dim + features_dim)))
elif len(values) < max_length:
raise RuntimeError(
("Too few input columns ({} out of expected {}) ")
.format(len(values), max_length))
# a number of sanity checks
if args.label_type != "None" and args.labels_start is None:
parser.error("-ls/--label_start is required when label type is not 'None'")
labels = values[labels_start:labels_start+labels_dim]
if args.label_type == "Category":
if args.num_labels is None:
parser.error("-nl/--num_labels is required when label type is 'Category'")
if args.labels_dim > 1:
parser.error("-ld/--labels_dim cannot be greater than 1 "
"when label type is 'Category'")
if label_type == 'Category':
one_hot = ['0'] * num_labels
# there's only one label
label = labels[0]
if label not in label_map:
raise RuntimeError(("Illegal label value: '{}'").format(label))
one_hot[label_map[label]] = '1'
labels = one_hot
if args.label_type == "Regression":
if args.num_labels > args.labels_dim:
parser.error("-nl/--num_labels is optional and "
" cannot exceed -ld/--labels_dim "
" when label type is 'Regression'")
output_file.write("|labels " + " ".join(labels))
output_file.write("\t")
if args.label_type != 'None':
if (((args.labels_start <= args.features_start) and
(args.labels_start + args.labels_dim > args.features_start)) or
((args.labels_start > args.features_start) and
(args.features_start + args.features_dim > args.labels_start))):
parser.error("Label and feature column ranges must not overlap.")
elif len(values) < features_start+features_dim:
raise RuntimeError(
("Too few input columns ({} out of expected {}) ")
.format(len(values), features_start+features_dim))
file_in = args.input_file
file_out = args.output_file
output_file.write(
"|features " + " ".join(values[features_start:features_start+features_dim]))
output_file.write("\n")
num_labels = args.num_labels
label_map = {}
if args.label_type == "Category":
if args.mapping_file is not None:
with open(args.mapping_file, 'r') as f:
for line in f.read().splitlines():
label_map[line] = len(label_map)
input_file.close()
output_file.close()
num_labels = max(num_labels, len(label_map))
else:
label_map = {str(x) : x for x in range(num_labels)}
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="UCI to CNTKText format converter",
epilog=("Quick example - converting MNIST data (see Examples/Image/MNIST):"
"\n\n\t"
"--input_file Examples/Image/MNIST/Data/Train-28x28.txt "
"--features_start 1 "
"--features_dim 784 "
"--labels_start 0 "
"--labels_dim 1 "
"--num_labels 10 "
"--output_file Examples/Image/MNIST/Data/Train-28x28_cntk_text.txt"
"\n\n"
"For more information please visit "
"https://github.com/Microsoft/CNTK/wiki/CNTKTextFormat-Reader"),
formatter_class=argparse.RawTextHelpFormatter)
if not file_out:
dot = file_in.rfind(".")
if dot == -1:
dot = len(file_in)
file_out = file_in[:dot] + "_cntk_text" + file_in[dot:]
requiredNamed = parser.add_argument_group('required arguments')
print (" Converting from UCI format\n\t '{}'\n"
" to CNTK text format\n\t '{}'".format(file_in, file_out))
requiredNamed.add_argument("-in", "--input_file",
help="input file path", required=True)
requiredNamed.add_argument("-fs", "--features_start", type=int,
help="start offset of feature columns", required=True)
requiredNamed.add_argument("-fd", "--features_dim", type=int,
help=("dimension of the feature vector "
"(number of feature columns in the input file)"),
required=True)
input_file = open(file_in, 'r')
output_file = open(file_out, 'w')
parser.add_argument("-lt", "--label_type", default="Category",
help=("Label type (indicates how the label columns should "
" be interpreted)"),
choices=["Category", "Regression", "None"])
parser.add_argument("-ls", "--labels_start", type=int,
help=("dimension of the label vector "
"(number of label columns in the input file)"))
parser.add_argument("-nl", "--num_labels", type=int,
help="number of possible label values "
"(required for categorical labels)")
parser.add_argument("-ld", "--labels_dim", type=int, default=1,
help=("dimension of the input label vector "
"(number of label columns in the input file, "
"default is 1)"))
parser.add_argument("--mapping_file",
help=("the path to a file used to map from the label value "
"to a numerical label identifier (if omitted, the "
"label value is interpreted as a numerical "
"identifier)"))
parser.add_argument("-out", "--output_file", help="output file path")
for line in input_file.readlines():
values = line.split()
args = parser.parse_args()
if args.label_type != 'None':
max_length = max(args.labels_start + args.labels_dim,
args.features_start + args.features_dim)
if len(values) < (args.labels_dim + args.features_dim):
raise RuntimeError(
("Too few input columns ({} out of expected {}) ")
.format(len(values), (args.labels_dim + args.features_dim)))
elif len(values) < max_length:
raise RuntimeError(
("Too few input columns ({} out of expected {}) ")
.format(len(values), max_length))
# a number of sanity checks
if args.label_type != "None" and args.labels_start is None:
parser.error("-ls/--label_start is required when label type is not 'None'")
if args.label_type == "Category":
if args.num_labels is None:
parser.error("-nl/--num_labels is required when label type is 'Category'")
if args.labels_dim > 1:
parser.error("-ld/--labels_dim cannot be greater than 1 "
"when label type is 'Category'")
labels = values[args.labels_start:args.labels_start+args.labels_dim]
if args.label_type == "Regression":
if args.num_labels > args.labels_dim:
parser.error("-nl/--num_labels is optional and "
" cannot exceed -ld/--labels_dim "
" when label type is 'Regression'")
if args.label_type == 'Category':
one_hot = ['0'] * num_labels
# there's only one label
label = labels[0]
if label not in label_map:
raise RuntimeError(("Illegal label value: '{}'").format(label))
one_hot[label_map[label]] = '1'
labels = one_hot
if args.label_type != 'None':
if (((args.labels_start <= args.features_start) and
(args.labels_start + args.labels_dim > args.features_start)) or
((args.labels_start > args.features_start) and
(args.features_start + args.features_dim > args.labels_start))):
parser.error("Label and feature column ranges must not overlap.")
output_file.write("|labels " + " ".join(labels))
output_file.write("\t")
file_in = args.input_file
file_out = args.output_file
elif len(values) < args.features_start+args.features_dim:
raise RuntimeError(
("Too few input columns ({} out of expected {}) ")
.format(len(values), args.features_start+args.features_dim))
if not file_out:
dot = file_in.rfind(".")
if dot == -1:
dot = len(file_in)
file_out = file_in[:dot] + "_cntk_text" + file_in[dot:]
output_file.write(
"|features " +
" ".join(values[args.features_start:args.features_start+args.features_dim]))
output_file.write("\n")
print (" Converting from UCI format\n\t '{}'\n"
" to CNTK text format\n\t '{}'".format(file_in, file_out))
input_file.close()
output_file.close()
convert(file_in, file_out, args.features_start, args.features_dim,
args.labels_start, args.labels_dim, args.num_labels, args.label_type, args.mapping_file)

Просмотреть файл

@ -38,12 +38,12 @@ struct Minibatch;
// The whole CompositeDataReader is meant as a stopgap to allow deserializers/transformers composition until SGD talkes
// directly to the new Reader API. The example of the cntk configuration that this reader supports can be found at
// Tests/EndToEndTests/Speech/ExperimentalHtkmlfReader/LSTM/FullUtterance/cntk.cntk
// Tests/EndToEndTests/Speech/HtkDeserializers/LSTM/FullUtterance/cntk.cntk
// CompositeDataReader is a factory for the new readers. Its main responsibility is to read the configuration and create the
// corresponding set of deserializers, the corpus descriptor, transformers, randomizer and packer, providing the following functionality:
// - all input sequences are defined by the corpus descriptor
// - deserializers provide sequences according to the corpus descriptor
// - sequences can be transformed by the transformers applied on top of deserializer (TODO: not yet in place)
// - sequences can be transformed by the transformers applied on top of deserializer
// - deserializers are bound together using the bundler - it bundles sequences with the same sequence id retrieved from different deserializers
// - packer is used to pack randomized sequences into the minibatch
// The composite reader is currently also responsible for asynchronous prefetching of the minibatch data.

Просмотреть файл

@ -3,7 +3,6 @@
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
// stdafx.cpp : source file that includes just the standard includes
// ExperimentalHTKMLFReader.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
//

Просмотреть файл

@ -1,71 +0,0 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
#pragma once
#include "DataDeserializer.h"
#include "../HTKMLFReader/htkfeatio.h"
namespace Microsoft { namespace MSR { namespace CNTK {
// This class represents a descriptor for a single utterance.
// It is only used internally by the HTK deserializer.
class UtteranceDescription
{
// Archive filename and frame range in that file.
msra::asr::htkfeatreader::parsedpath m_path;
// Index of the utterance inside the chunk.
size_t m_indexInsideChunk;
// Position of the first sample of the utterance inside the chunk.
size_t m_startFrameIndexInsideChunk;
// Chunk id.
size_t m_chunkId;
// Utterance id.
size_t m_id;
public:
UtteranceDescription(msra::asr::htkfeatreader::parsedpath&& path)
: m_path(std::move(path)), m_indexInsideChunk(0), m_startFrameIndexInsideChunk(0), m_chunkId(SIZE_MAX)
{
}
const msra::asr::htkfeatreader::parsedpath& GetPath() const
{
return m_path;
}
size_t GetNumberOfFrames() const
{
return m_path.numframes();
}
wstring GetKey() const
{
std::wstring filename(m_path);
return filename.substr(0, filename.find_last_of(L"."));
}
void AssignToChunk(size_t chunkId, size_t indexInsideChunk, size_t frameInsideChunk)
{
m_chunkId = chunkId;
m_indexInsideChunk = indexInsideChunk;
m_startFrameIndexInsideChunk = frameInsideChunk;
}
size_t GetId() const { return m_id; }
void SetId(size_t id) { m_id = id; }
size_t GetChunkId() const { return m_chunkId; }
size_t GetIndexInsideChunk() const { return m_indexInsideChunk;}
size_t GetStartFrameIndexInsideChunk() const { return m_startFrameIndexInsideChunk; }
void SetStartFrameInsideChunk(size_t startFrameIndexInsideChunk)
{
m_startFrameIndexInsideChunk = startFrameIndexInsideChunk;
}
};
}}}

Просмотреть файл

@ -228,12 +228,18 @@ vector<wstring> ConfigHelper::GetSequencePaths()
// TODO: possibly change to class File, we should be able to read data from pipelines.E.g.
// scriptPath = "gzip -c -d FILE.txt |", or do a popen with C++ streams, so that we can have a generic open function that returns an ifstream.
ifstream scp(msra::strfun::utf8(scriptPath).c_str());
if (!scp)
RuntimeError("Failed to open input file: %ls", scriptPath.c_str());
string line;
while (getline(scp, line))
{
filelist.push_back(msra::strfun::utf16(line));
}
if (scp.bad())
RuntimeError("An error occurred while reading input file: %ls", scriptPath.c_str());
fprintf(stderr, " %d entries\n", static_cast<int>(filelist.size()));
// post processing file list :

Просмотреть файл

@ -16,6 +16,7 @@
#include "HeapMemoryProvider.h"
#include "HTKDataDeserializer.h"
#include "MLFDataDeserializer.h"
#include "StringUtil.h"
namespace Microsoft { namespace MSR { namespace CNTK {
@ -39,11 +40,11 @@ extern "C" DATAREADER_API void GetReaderD(IDataReader** preader)
// TODO: Not safe from the ABI perspective. Will be uglified to make the interface ABI.
extern "C" DATAREADER_API bool CreateDeserializer(IDataDeserializer** deserializer, const std::wstring& type, const ConfigParameters& deserializerConfig, CorpusDescriptorPtr corpus, bool primary)
{
if (type == L"HTKDataDeserializer")
if (type == L"HTKFeatureDeserializer")
{
*deserializer = new HTKDataDeserializer(corpus, deserializerConfig, primary);
}
else if (type == L"MLFDataDeserializer")
else if (type == L"HTKMLFDeserializer")
{
*deserializer = new MLFDataDeserializer(corpus, deserializerConfig, primary);
}

Просмотреть файл

@ -33,14 +33,14 @@ class HTKChunkDescription
// Total number of frames in this chunk
size_t m_totalFrames = 0;
// Chunk ID (only used for diagnostics)
size_t m_chunkId;
// Chunk id.
ChunkIdType m_chunkId;
public:
HTKChunkDescription() : m_chunkId(SIZE_MAX) { };
HTKChunkDescription() : m_chunkId(CHUNKID_MAX) { };
HTKChunkDescription(size_t chunkId) : m_chunkId(chunkId) { };
HTKChunkDescription(ChunkIdType chunkId) : m_chunkId(chunkId) { };
// Gets number of utterances in the chunk.
size_t GetNumberOfUtterances() const
@ -48,6 +48,11 @@ public:
return m_utterances.size();
}
ChunkIdType GetChunkId() const
{
return m_chunkId;
}
// Adds an utterance to the chunk.
void Add(UtteranceDescription&& utterance)
{
@ -73,16 +78,22 @@ public:
return &m_utterances[index];
}
// Get start frame index inside chunk.
size_t GetStartFrameIndexInsideChunk(size_t index) const
{
return m_firstFrames[index];
}
// Get utterance by the absolute frame index in chunk.
// Uses the upper bound to do the binary search among sequences of the chunk.
size_t GetUtteranceForChunkFrameIndex(size_t frameIndex) const
{
auto result = std::upper_bound(
m_utterances.begin(),
m_utterances.end(),
frameIndex,
[](size_t fi, const UtteranceDescription& a) { return fi < a.GetStartFrameIndexInsideChunk(); });
return result - 1 - m_utterances.begin();
m_firstFrames.begin(),
m_firstFrames.end(),
frameIndex,
[](size_t fi, const size_t& a) { return fi < a; });
return result - 1 - m_firstFrames.begin();
}
// Returns all frames of a given utterance.
@ -130,7 +141,7 @@ public:
if (verbosity)
{
fprintf(stderr, "HTKChunkDescription::RequireData: read physical chunk %" PRIu64 " (%" PRIu64 " utterances, %" PRIu64 " frames, %" PRIu64 " bytes)\n",
fprintf(stderr, "HTKChunkDescription::RequireData: read physical chunk %u (%" PRIu64 " utterances, %" PRIu64 " frames, %" PRIu64 " bytes)\n",
m_chunkId,
m_utterances.size(),
m_totalFrames,
@ -160,7 +171,7 @@ public:
if (verbosity)
{
fprintf(stderr, "HTKChunkDescription::ReleaseData: release physical chunk %" PRIu64 " (%" PRIu64 " utterances, %" PRIu64 " frames, %" PRIu64 " bytes)\n",
fprintf(stderr, "HTKChunkDescription::ReleaseData: release physical chunk %u (%" PRIu64 " utterances, %" PRIu64 " frames, %" PRIu64 " bytes)\n",
m_chunkId,
m_utterances.size(),
m_totalFrames,

Просмотреть файл

@ -4,6 +4,8 @@
//
#include "stdafx.h"
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
#include "HTKDataDeserializer.h"
#include "ConfigHelper.h"
#include "Basics.h"
@ -109,79 +111,83 @@ void HTKDataDeserializer::InitializeChunkDescriptions(ConfigHelper& config)
vector<UtteranceDescription> utterances;
utterances.reserve(paths.size());
auto& stringRegistry = m_corpus->GetStringRegistry();
size_t allUtterances = 0, allFrames = 0;
for (const auto& u : paths)
{
UtteranceDescription description(move(msra::asr::htkfeatreader::parsedpath(u)));
size_t numberOfFrames = description.GetNumberOfFrames();
// TODO: we need at least 2 frames for boundary markers to work
// TODO: this should be removed when MLF deserializer is rewritten.
if (numberOfFrames < 2)
{
fprintf(stderr, "HTKDataDeserializer::HTKDataDeserializer: skipping utterance with %d frames because it has less than 2 frames: %ls\n",
(int)numberOfFrames, description.GetKey().c_str());
continue;
}
// For logging, also account for utterances and frames that we skip
allUtterances++;
allFrames += numberOfFrames;
wstring key = description.GetKey();
string key = description.GetKey();
if (!m_corpus->IsIncluded(key))
{
continue;
}
// No need to store key, releasing it.
description.ClearLogicalPath();
size_t id = stringRegistry[key];
description.SetId(id);
utterances.push_back(description);
m_totalNumberOfFrames += numberOfFrames;
}
const size_t MaxUtterancesPerChunk = 65535;
// distribute them over chunks
// TODO: We should be able to configure IO chunks based on size.
// distribute utterances over chunks
// We simply count off frames until we reach the chunk size.
// Note that we first randomize the chunks, i.e. when used, chunks are non-consecutive and thus cause the disk head to seek for each chunk.
// We have 100 frames in a second.
const size_t FramesPerSec = 100;
// A chunk consitutes 15 minutes
// A chunk constitutes of 15 minutes
const size_t ChunkFrames = 15 * 60 * FramesPerSec; // number of frames to target for each chunk
// Loading an initial 24-hour range will involve 96 disk seeks, acceptable.
// When paging chunk by chunk, chunk size ~14 MB.
m_chunks.resize(0);
m_chunks.reserve(m_totalNumberOfFrames / ChunkFrames);
int chunkId = -1;
size_t startFrameInsideChunk = 0;
ChunkIdType chunkId = 0;
foreach_index(i, utterances)
{
// if exceeding current entry--create a new one
// I.e. our chunks are a little larger than wanted (on av. half the av. utterance length).
if (m_chunks.empty() || m_chunks.back().GetTotalFrames() > ChunkFrames || m_chunks.back().GetNumberOfUtterances() >= MaxUtterancesPerChunk)
if (m_chunks.empty() || m_chunks.back().GetTotalFrames() > ChunkFrames)
{
m_chunks.push_back(HTKChunkDescription(++chunkId));
startFrameInsideChunk = 0;
m_chunks.push_back(HTKChunkDescription(chunkId++));
}
// append utterance to last chunk
HTKChunkDescription& currentChunk = m_chunks.back();
utterances[i].AssignToChunk(chunkId, currentChunk.GetNumberOfUtterances(), startFrameInsideChunk);
if (!m_primary)
{
// Have to store key <-> utterance mapping for non primary deserializers.
m_keyToChunkLocation[utterances[i].GetId()] = make_pair(utterances[i].GetChunkId(), utterances[i].GetIndexInsideChunk());
m_keyToChunkLocation[utterances[i].GetId()] = make_pair(currentChunk.GetChunkId(), currentChunk.GetNumberOfUtterances());
}
startFrameInsideChunk += utterances[i].GetNumberOfFrames();
currentChunk.Add(move(utterances[i]));
}
fprintf(stderr,
"HTKDataDeserializer::HTKDataDeserializer: %d utterances grouped into %d chunks, av. chunk size: %.1f utterances, %.1f frames\n",
(int)utterances.size(),
(int)m_chunks.size(),
"HTKDataDeserializer::HTKDataDeserializer: "
"selected %" PRIu64 " utterances grouped into %" PRIu64 " chunks, "
"average chunk size: %.1f utterances, %.1f frames "
"(for I/O: %.1f utterances, %.1f frames)\n",
utterances.size(),
m_chunks.size(),
utterances.size() / (double)m_chunks.size(),
m_totalNumberOfFrames / (double)m_chunks.size());
m_totalNumberOfFrames / (double)m_chunks.size(),
allUtterances / (double)m_chunks.size(),
allFrames / (double)m_chunks.size());
if (utterances.empty())
{
RuntimeError("HTKDataDeserializer: No utterances to process.");
}
}
// Describes exposed stream - a single stream of htk features.
@ -215,7 +221,7 @@ ChunkDescriptions HTKDataDeserializer::GetChunkDescriptions()
ChunkDescriptions chunks;
chunks.reserve(m_chunks.size());
for (size_t i = 0; i < m_chunks.size(); ++i)
for (ChunkIdType i = 0; i < m_chunks.size(); ++i)
{
auto cd = make_shared<ChunkDescription>();
cd->m_id = i;
@ -230,7 +236,7 @@ ChunkDescriptions HTKDataDeserializer::GetChunkDescriptions()
// Gets sequences for a particular chunk.
// This information is used by the randomizer to fill in current windows of sequences.
void HTKDataDeserializer::GetSequencesForChunk(size_t chunkId, vector<SequenceDescription>& result)
void HTKDataDeserializer::GetSequencesForChunk(ChunkIdType chunkId, vector<SequenceDescription>& result)
{
const HTKChunkDescription& chunk = m_chunks[chunkId];
result.reserve(m_frameMode ? chunk.GetTotalFrames() : chunk.GetNumberOfUtterances());
@ -251,7 +257,6 @@ void HTKDataDeserializer::GetSequencesForChunk(size_t chunkId, vector<SequenceDe
f.m_key.m_sequence = sequence;
f.m_key.m_sample = k;
f.m_id = offsetInChunk++;
f.m_isValid = true;
f.m_numberOfSamples = 1;
result.push_back(f);
}
@ -264,8 +269,12 @@ void HTKDataDeserializer::GetSequencesForChunk(size_t chunkId, vector<SequenceDe
f.m_key.m_sequence = sequence;
f.m_key.m_sample = 0;
f.m_id = offsetInChunk++;
f.m_isValid = true;
f.m_numberOfSamples = utterance->GetNumberOfFrames();
if (SEQUENCELEN_MAX < utterance->GetNumberOfFrames())
{
RuntimeError("Maximum number of samples per sequence exceeded");
}
f.m_numberOfSamples = (uint32_t) utterance->GetNumberOfFrames();
result.push_back(f);
}
}
@ -302,7 +311,7 @@ private:
class HTKDataDeserializer::HTKChunk : public Chunk
{
public:
HTKChunk(HTKDataDeserializer* parent, size_t chunkId) : m_parent(parent), m_chunkId(chunkId)
HTKChunk(HTKDataDeserializer* parent, ChunkIdType chunkId) : m_parent(parent), m_chunkId(chunkId)
{
auto& chunkDescription = m_parent->m_chunks[chunkId];
@ -330,11 +339,11 @@ public:
private:
DISABLE_COPY_AND_MOVE(HTKChunk);
HTKDataDeserializer* m_parent;
size_t m_chunkId;
ChunkIdType m_chunkId;
};
// Gets a data chunk with the specified chunk id.
ChunkPtr HTKDataDeserializer::GetChunk(size_t chunkId)
ChunkPtr HTKDataDeserializer::GetChunk(ChunkIdType chunkId)
{
return make_shared<HTKChunk>(this, chunkId);
};
@ -388,7 +397,11 @@ struct HTKFloatSequenceData : DenseSequenceData
{
HTKFloatSequenceData(FeatureMatrix&& data) : m_buffer(data)
{
m_numberOfSamples = data.GetNumberOfColumns();
m_numberOfSamples = (uint32_t)data.GetNumberOfColumns();
if (m_numberOfSamples != data.GetNumberOfColumns())
{
RuntimeError("Maximum number of samples per sequence exceeded.");
}
m_data = m_buffer.GetData();
}
@ -401,7 +414,11 @@ struct HTKDoubleSequenceData : DenseSequenceData
{
HTKDoubleSequenceData(FeatureMatrix& data) : m_buffer(data.GetData(), data.GetData() + data.GetTotalSize())
{
m_numberOfSamples = data.GetNumberOfColumns();
m_numberOfSamples = (uint32_t)data.GetNumberOfColumns();
if (m_numberOfSamples != data.GetNumberOfColumns())
{
RuntimeError("Maximum number of samples per sequence exceeded.");
}
m_data = m_buffer.data();
}
@ -411,7 +428,7 @@ private:
// Get a sequence by its chunk id and sequence id.
// Sequence ids are guaranteed to be unique inside a chunk.
void HTKDataDeserializer::GetSequenceById(size_t chunkId, size_t id, vector<SequenceDataPtr>& r)
void HTKDataDeserializer::GetSequenceById(ChunkIdType chunkId, size_t id, vector<SequenceDataPtr>& r)
{
const auto& chunkDescription = m_chunks[chunkId];
size_t utteranceIndex = m_frameMode ? chunkDescription.GetUtteranceForChunkFrameIndex(id) : id;
@ -425,7 +442,7 @@ void HTKDataDeserializer::GetSequenceById(size_t chunkId, size_t id, vector<Sequ
if (m_frameMode)
{
// For frame mode augment a single frame.
size_t frameIndex = id - utterance->GetStartFrameIndexInsideChunk();
size_t frameIndex = id - chunkDescription.GetStartFrameIndexInsideChunk(utteranceIndex);
msra::dbn::augmentneighbors(utteranceFramesWrapper, vector<char>(), frameIndex, m_augmentationWindow.first, m_augmentationWindow.second, features, 0);
}
else
@ -455,27 +472,24 @@ void HTKDataDeserializer::GetSequenceById(size_t chunkId, size_t id, vector<Sequ
r.push_back(result);
}
static SequenceDescription s_InvalidSequence{0, 0, 0, false};
// Gets sequence description by its key.
void HTKDataDeserializer::GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& d)
bool HTKDataDeserializer::GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& d)
{
assert(!m_primary);
auto iter = m_keyToChunkLocation.find(key.m_sequence);
if (iter == m_keyToChunkLocation.end())
{
// Unknown sequence. Return invalid.
d = s_InvalidSequence;
}
else
{
const auto& chunk = m_chunks[iter->second.first];
const auto& sequence = chunk.GetUtterance(iter->second.second);
d.m_chunkId = sequence->GetChunkId();
d.m_id = m_frameMode ? sequence->GetStartFrameIndexInsideChunk() + key.m_sample : sequence->GetIndexInsideChunk();
d.m_isValid = true;
d.m_numberOfSamples = m_frameMode ? 1 : sequence->GetNumberOfFrames();
return false;
}
auto chunkId = iter->second.first;
auto utteranceIndexInsideChunk = iter->second.second;
const auto& chunk = m_chunks[chunkId];
const auto& sequence = chunk.GetUtterance(utteranceIndexInsideChunk);
d.m_chunkId = (ChunkIdType)chunkId;
d.m_id = m_frameMode ? chunk.GetStartFrameIndexInsideChunk(utteranceIndexInsideChunk) + key.m_sample : utteranceIndexInsideChunk;
d.m_numberOfSamples = m_frameMode ? 1 : (uint32_t)sequence->GetNumberOfFrames();
return true;
}
}}}

Просмотреть файл

@ -29,13 +29,13 @@ public:
virtual ChunkDescriptions GetChunkDescriptions() override;
// Get information about particular chunk.
virtual void GetSequencesForChunk(size_t chunkId, std::vector<SequenceDescription>& result) override;
virtual void GetSequencesForChunk(ChunkIdType chunkId, std::vector<SequenceDescription>& result) override;
// Retrieves data for a chunk.
virtual ChunkPtr GetChunk(size_t chunkId) override;
virtual ChunkPtr GetChunk(ChunkIdType chunkId) override;
// Gets sequence description by its key.
virtual void GetSequenceDescriptionByKey(const KeyType&, SequenceDescription&) override;
virtual bool GetSequenceDescriptionByKey(const KeyType&, SequenceDescription&) override;
private:
class HTKChunk;
@ -48,7 +48,7 @@ private:
void InitializeAugmentationWindow(ConfigHelper& config);
// Gets sequence by its chunk id and id inside the chunk.
void GetSequenceById(size_t chunkId, size_t id, std::vector<SequenceDataPtr>&);
void GetSequenceById(ChunkIdType chunkId, size_t id, std::vector<SequenceDataPtr>&);
// Dimension of features.
size_t m_dimension;

Просмотреть файл

@ -21,7 +21,7 @@
<PropertyGroup Label="Globals">
<ProjectGuid>{7B7A51ED-AA8E-4660-A805-D50235A02120}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>ExperimentalHTKMLFReader</RootNamespace>
<RootNamespace>HTKDeserializers</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<Import Project="$(SolutionDir)\CNTK.Cpp.props" />

Просмотреть файл

@ -184,7 +184,7 @@ void HTKMLFReader::StartEpoch(const EpochConfiguration& config)
// Old config, the truncation length is specified as the minibatch size.
// In this case the truncation size is mbSize
// and the real minibatch size is truncation size * nbruttsineachrecurrentiter
fprintf(stderr, "Legacy configuration is used for truncated BPTT mode, please adapt the config to explicitly specify truncationLength.");
fprintf(stderr, "Legacy configuration is used for truncated BPTT mode, please adapt the config to explicitly specify truncationLength.\n");
truncationLength = minibatchSize;
size_t numParallelSequences = m_numParallelSequencesForAllEpochs[config.m_epochIndex];
minibatchSize = numParallelSequences * truncationLength;

Просмотреть файл

@ -121,7 +121,6 @@ void MLFDataDeserializer::InitializeChunkDescriptions(CorpusDescriptorPtr corpus
m_elementType = config.GetElementType();
MLFUtterance description;
description.m_isValid = true;
size_t numClasses = 0;
size_t totalFrames = 0;
@ -134,15 +133,14 @@ void MLFDataDeserializer::InitializeChunkDescriptions(CorpusDescriptorPtr corpus
// Currently the string registry contains only utterances described in scp.
// So here we skip all others.
size_t id = 0;
if (!stringRegistry.TryGet(l.first, id))
if (!stringRegistry.TryGet(msra::strfun::utf8(l.first), id))
continue;
description.m_key.m_sequence = id;
const auto& utterance = l.second;
description.m_sequenceStart = m_classIds.size();
description.m_isValid = true;
size_t numberOfFrames = 0;
uint32_t numberOfFrames = 0;
foreach_index(i, utterance)
{
@ -163,6 +161,11 @@ void MLFDataDeserializer::InitializeChunkDescriptions(CorpusDescriptorPtr corpus
RuntimeError("CLASSIDTYPE has too few bits");
}
if (SEQUENCELEN_MAX < timespan.firstframe + timespan.numframes)
{
RuntimeError("Maximum number of sample per sequence exceeded.");
}
numClasses = max(numClasses, (size_t)(1u + timespan.classid));
for (size_t t = timespan.firstframe; t < timespan.firstframe + timespan.numframes; t++)
@ -245,13 +248,13 @@ ChunkDescriptions MLFDataDeserializer::GetChunkDescriptions()
}
// Gets sequences for a particular chunk.
void MLFDataDeserializer::GetSequencesForChunk(size_t, vector<SequenceDescription>& result)
void MLFDataDeserializer::GetSequencesForChunk(ChunkIdType, vector<SequenceDescription>& result)
{
UNUSED(result);
LogicError("Mlf deserializer does not support primary mode - it cannot control chunking.");
}
ChunkPtr MLFDataDeserializer::GetChunk(size_t chunkId)
ChunkPtr MLFDataDeserializer::GetChunk(ChunkIdType chunkId)
{
UNUSED(chunkId);
assert(chunkId == 0);
@ -277,7 +280,7 @@ struct MLFSequenceData : SparseSequenceData
}
m_nnzCounts.resize(numberOfSamples, static_cast<IndexType>(1));
m_numberOfSamples = numberOfSamples;
m_numberOfSamples = (uint32_t) numberOfSamples;
m_totalNnzCount = static_cast<IndexType>(numberOfSamples);
m_indices = m_indicesPtr.get();
m_data = m_values.data();
@ -318,22 +321,18 @@ void MLFDataDeserializer::GetSequenceById(size_t sequenceId, vector<SequenceData
}
}
static SequenceDescription s_InvalidSequence { 0, 0, 0, false, { 0, 0 } };
void MLFDataDeserializer::GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& result)
bool MLFDataDeserializer::GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& result)
{
auto sequenceId = key.m_sequence < m_keyToSequence.size() ? m_keyToSequence[key.m_sequence] : SIZE_MAX;
if (sequenceId == SIZE_MAX)
{
result = s_InvalidSequence;
return;
return false;
}
result.m_chunkId = 0;
result.m_key = key;
result.m_isValid = true;
if (m_frameMode)
{
@ -345,8 +344,9 @@ void MLFDataDeserializer::GetSequenceDescriptionByKey(const KeyType& key, Sequen
{
assert(result.m_key.m_sample == 0);
result.m_id = sequenceId;
result.m_numberOfSamples = m_utteranceIndex[sequenceId + 1] - m_utteranceIndex[sequenceId];
result.m_numberOfSamples = (uint32_t) (m_utteranceIndex[sequenceId + 1] - m_utteranceIndex[sequenceId]);
}
return true;
}
}}}

Просмотреть файл

@ -24,18 +24,18 @@ public:
MLFDataDeserializer(CorpusDescriptorPtr corpus, const ConfigParameters& config, const std::wstring& streamName);
// Retrieves sequence description by its key. Used for deserializers that are not in "primary"/"driving" mode.
void GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& s) override;
bool GetSequenceDescriptionByKey(const KeyType& key, SequenceDescription& s) override;
// Gets description of all chunks.
virtual ChunkDescriptions GetChunkDescriptions() override;
// Get sequence descriptions of a particular chunk.
virtual void GetSequencesForChunk(size_t chunkId, std::vector<SequenceDescription>& s) override;
virtual void GetSequencesForChunk(ChunkIdType chunkId, std::vector<SequenceDescription>& s) override;
// Retrieves a chunk with data.
// TODO: Currently it is a single chunk => all labels are loaded into memory.
// TODO: After we switch the timeline to work in chunks, we will also introduce chunking of labels.
virtual ChunkPtr GetChunk(size_t) override;
virtual ChunkPtr GetChunk(ChunkIdType) override;
private:
class MLFChunk;

Просмотреть файл

@ -0,0 +1,53 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
#pragma once
#include "DataDeserializer.h"
#include "../HTKMLFReader/htkfeatio.h"
namespace Microsoft { namespace MSR { namespace CNTK {
// This class represents a descriptor for a single utterance.
// It is only used internally by the HTK deserializer.
class UtteranceDescription
{
// Archive filename and frame range in that file.
msra::asr::htkfeatreader::parsedpath m_path;
// Utterance id.
size_t m_id;
public:
UtteranceDescription(msra::asr::htkfeatreader::parsedpath&& path)
: m_path(std::move(path))
{
}
const msra::asr::htkfeatreader::parsedpath& GetPath() const
{
return m_path;
}
void ClearLogicalPath()
{
m_path.ClearLogicalPath();
}
size_t GetNumberOfFrames() const
{
return m_path.numframes();
}
string GetKey() const
{
return m_path.GetLogicalPath();
}
size_t GetId() const { return m_id; }
void SetId(size_t id) { m_id = id; }
};
}}}

Просмотреть файл

@ -3,7 +3,7 @@
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
// stdafx.cpp : source file that includes just the standard includes
// ExperimentalHTKMLFReader.pch will be the pre-compiled header
// HTKMLFDeserializer.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
//

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше