Merge branch 'master' into qiwye/asgd-dev

Conflicts:
	Source/CNTK/CNTK.cpp
	Source/SGDLib/SGD.cpp
	Source/SGDLib/SGD.h
This commit is contained in:
Qiwei Ye 2016-06-15 20:22:12 +08:00
Родитель 88883aebfa fee99ca12b
Коммит d17ff5d9eb
541 изменённых файлов: 840224 добавлений и 170162 удалений

4
.gitattributes поставляемый
Просмотреть файл

@ -40,6 +40,10 @@ run-test-common text eol=lf
run-timit-test-common text eol=lf
make_binary_drop_linux text eol=lf
# Used from Unix / Cygwin 'md5sum -c', needs to have LF line endings:
Tests/EndToEndTests/Examples/Speech/TIMIT/WriteBottleneck/expected_output_md5sum.*.txt eol=lf
Tests/EndToEndTests/Examples/Speech/TIMIT/WriteScaledLogLike/expected_output_md5sum.*.txt eol=lf
Makefile text
*.sln text
*.vcxproj text

8
.gitignore поставляемый
Просмотреть файл

@ -152,7 +152,9 @@ ModelManifest.xml
# Python
*.pyc
__pychache__/
__pycache__/
contrib/Python/doc/_build/*
contrib/Python/_cntk_default/*
# =========================
# Windows detritus
@ -188,3 +190,7 @@ Source/CNTK/buildinfo.h$$
# Unit test output
Tests/UnitTests/ReaderTests/Control/**/*_Output.txt
Tests/UnitTests/NetworkTests/Output/
Dependencies/CNTKCustomMKL/Publish
Dependencies/CNTKCustomMKL/CNTKCustomMKL-Linux-*.tgz
Dependencies/CNTKCustomMKL/CNTKCustomMKL-Windows-*.zip

Просмотреть файл

@ -29,6 +29,47 @@
</PropertyGroup>
<Choose>
<When Condition="Exists('$(ACML_PATH)')">
<PropertyGroup>
<MathLibraryName>ACML</MathLibraryName>
<MathIncludePath>$(ACML_PATH)\include</MathIncludePath>
<MathLibraryPath>$(ACML_PATH)\lib</MathLibraryPath>
<MathLinkLibrary>libacml_mp_dll.lib</MathLinkLibrary>
<MathDelayLoad>libacml_mp_dll.dll</MathDelayLoad>
<MathPostBuildCopyPattern>$(ACML_PATH)\lib\*.dll</MathPostBuildCopyPattern>
<UnitTestDlls>$(OutDir)libacml_mp_dll.dll;$(OutDir)libifcoremd.dll;$(OutDir)libifportmd.dll;$(OutDir)libiomp*.dll;$(OutDir)libmmd.dll;$(OutDir)svml_dispmd.dll;</UnitTestDlls>
<MathDefine>USE_ACML</MathDefine>
</PropertyGroup>
</When>
<!-- See https://github.com/Microsoft/CNTK/wiki/Setup-CNTK-on-Windows#optional-mkl on how to configure to build CNTK with MKL -->
<When Condition="'$(CNTK_MKL)' == '1'">
<PropertyGroup>
<CNTKCustomMKLVersion>1</CNTKCustomMKLVersion>
<CNTKCustomMKLPath>$(CNTK_MKL_PATH)\$(CNTKCustomMKLVersion)</CNTKCustomMKLPath>
<MathIncludePath>$(CNTKCustomMKLPath)\include</MathIncludePath>
<MathDefine>USE_MKL</MathDefine>
</PropertyGroup>
<PropertyGroup Condition="'$(CNTK_MKL_SEQUENTIAL)' != '1'">
<MathLibraryName>CNTK custom MKL Parallel (Version: $(CNTKCustomMKLVersion))</MathLibraryName>
<MathLibraryPath>$(CNTKCustomMKLPath)\x64\parallel</MathLibraryPath>
<MathLinkLibrary>mkl_cntk_p.lib</MathLinkLibrary>
<MathDelayLoad>mkl_cntk_p.dll</MathDelayLoad>
<MathPostBuildCopyPattern>$(MathLibraryPath)\*.dll</MathPostBuildCopyPattern>
<UnitTestDlls>$(OutDir)mkl_cntk_p.dll;$(OutDir)libiomp5md.dll;</UnitTestDlls>
</PropertyGroup>
<PropertyGroup Condition="'$(CNTK_MKL_SEQUENTIAL)' == '1'">
<MathLibraryName>CNTK custom MKL Sequential (Version: $(CNTKCustomMKLVersion))</MathLibraryName>
<MathLibraryPath>$(CNTKCustomMKLPath)\x64\sequential</MathLibraryPath>
<MathLinkLibrary>mkl_cntk_s.lib</MathLinkLibrary>
<MathDelayLoad>mkl_cntk_s.dll</MathDelayLoad>
<MathPostBuildCopyPattern>$(MathLibraryPath)\*.dll</MathPostBuildCopyPattern>
<UnitTestDlls>$(OutDir)mkl_cntk_s.dll;</UnitTestDlls>
</PropertyGroup>
</When>
</Choose>
<PropertyGroup Condition="'$(CudaVersion)' == '7.5'">
<CudaPath>$(CUDA_PATH_V7_5)</CudaPath>
<CudaRuntimeDll>cudart64_75.dll</CudaRuntimeDll>

252
CNTK.sln
Просмотреть файл

@ -121,6 +121,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{5F733BBA-F
Tests\EndToEndTests\Speech\Data\glob_0000.mlf = Tests\EndToEndTests\Speech\Data\glob_0000.mlf
Tests\EndToEndTests\Speech\Data\glob_0000.scp = Tests\EndToEndTests\Speech\Data\glob_0000.scp
Tests\EndToEndTests\Speech\Data\README.txt = Tests\EndToEndTests\Speech\Data\README.txt
Tests\EndToEndTests\Speech\Data\SimpleDataTest_cntk_text.txt = Tests\EndToEndTests\Speech\Data\SimpleDataTest_cntk_text.txt
Tests\EndToEndTests\Speech\Data\SimpleDataTrain_cntk_text.txt = Tests\EndToEndTests\Speech\Data\SimpleDataTrain_cntk_text.txt
Tests\EndToEndTests\Speech\Data\state.list = Tests\EndToEndTests\Speech\Data\state.list
EndProjectSection
EndProject
@ -142,8 +144,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ParallelTraining", "Paralle
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{6D1353D6-F196-466F-B886-F16D48759B20}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain.txt = Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain.txt
Tests\EndToEndTests\ParallelTraining\Data\SimpleMapping.txt = Tests\EndToEndTests\ParallelTraining\Data\SimpleMapping.txt
Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain_cntk_text.txt = Tests\EndToEndTests\ParallelTraining\Data\SimpleDataTrain_cntk_text.txt
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "NoQuantization", "NoQuantization", "{B6725C9F-A6D2-4269-9B74-7888A90F7884}"
@ -297,22 +298,16 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Image", "Image", "{8071EF60
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{76F9323D-34A1-43A5-A594-C4798931FF21}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Image\Data\labelsmap.txt = Tests\EndToEndTests\Image\Data\labelsmap.txt
Tests\EndToEndTests\Image\Data\Test.txt = Tests\EndToEndTests\Image\Data\Test.txt
Tests\EndToEndTests\Image\Data\Train.txt = Tests\EndToEndTests\Image\Data\Train.txt
Tests\EndToEndTests\Image\Data\Test_cntk_text.txt = Tests\EndToEndTests\Image\Data\Test_cntk_text.txt
Tests\EndToEndTests\Image\Data\Train_cntk_text.txt = Tests\EndToEndTests\Image\Data\Train_cntk_text.txt
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "QuickE2E", "QuickE2E", "{2A884EB5-037C-481E-8170-BCDC8B3EDD93}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Image\QuickE2E\baseline.linux.debug.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.linux.debug.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.linux.release.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.linux.release.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.cpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.cpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.debug.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.cpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.cpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.gpu.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.release.gpu.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.linux.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.linux.txt
Tests\EndToEndTests\Image\QuickE2E\baseline.windows.txt = Tests\EndToEndTests\Image\QuickE2E\baseline.windows.txt
Tests\EndToEndTests\Image\QuickE2E\cntk.cntk = Tests\EndToEndTests\Image\QuickE2E\cntk.cntk
Tests\EndToEndTests\Image\QuickE2E\Convolution.ndl = Tests\EndToEndTests\Image\QuickE2E\Convolution.ndl
Tests\EndToEndTests\Image\QuickE2E\Macros.ndl = Tests\EndToEndTests\Image\QuickE2E\Macros.ndl
Tests\EndToEndTests\Image\QuickE2E\README.txt = Tests\EndToEndTests\Image\QuickE2E\README.txt
Tests\EndToEndTests\Image\QuickE2E\run-test = Tests\EndToEndTests\Image\QuickE2E\run-test
Tests\EndToEndTests\Image\QuickE2E\testcases.yml = Tests\EndToEndTests\Image\QuickE2E\testcases.yml
EndProjectSection
@ -330,11 +325,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SVD", "SVD", "{669B6203-967
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{81AE014F-DD63-47C7-B6E2-DB1D2833DCD1}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Speech\Simple\baseline.cpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.cpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.gpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.linux.cpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.linux.cpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.linux.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.linux.gpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\Simple\cntk.cntk = Tests\EndToEndTests\Speech\Simple\cntk.cntk
Tests\EndToEndTests\Speech\Simple\README.txt = Tests\EndToEndTests\Speech\Simple\README.txt
Tests\EndToEndTests\Speech\Simple\run-test = Tests\EndToEndTests\Speech\Simple\run-test
Tests\EndToEndTests\Speech\Simple\testcases.yml = Tests\EndToEndTests\Speech\Simple\testcases.yml
EndProjectSection
@ -354,6 +350,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ReaderTests", "Tests\UnitTe
ProjectSection(ProjectDependencies) = postProject
{9BD0A711-0BBD-45B6-B81C-053F03C26CFB} = {9BD0A711-0BBD-45B6-B81C-053F03C26CFB}
{33D2FD22-DEF2-4507-A58A-368F641AEBE5} = {33D2FD22-DEF2-4507-A58A-368F641AEBE5}
{7B7A563D-AA8E-4660-A805-D50235A02120} = {7B7A563D-AA8E-4660-A805-D50235A02120}
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
{91973E60-A7BE-4C86-8FDB-59C88A0B3715} = {91973E60-A7BE-4C86-8FDB-59C88A0B3715}
@ -366,6 +363,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "EvalDll", "Source\EvalDll\E
{928ABD1B-4D3B-4017-AEF1-0FA1B4467513} = {928ABD1B-4D3B-4017-AEF1-0FA1B4467513}
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{EB2BE26F-6BD4-4274-971F-86D080779DD1} = {EB2BE26F-6BD4-4274-971F-86D080779DD1}
{F0A9637C-20DA-42F0-83D4-23B4704DE602} = {F0A9637C-20DA-42F0-83D4-23B4704DE602}
{EAD17188-072C-4726-B840-A769C36DAD1B} = {EAD17188-072C-4726-B840-A769C36DAD1B}
EndProjectSection
EndProject
@ -425,12 +423,6 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "UCIFastReader", "Source\Rea
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "EvalTest", "Tests\UnitTests\EvalTest\EvalTest.vcxproj", "{731312A8-6DA3-4841-AFCD-57520BA1BF8E}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MathPerformanceTests", "Tests\UnitTests\MathPerformanceTests\MathPerformanceTests.vcxproj", "{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
@ -710,13 +702,14 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{439BE0E0-F
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MNIST", "MNIST", "{63C6816D-66BF-487E-B541-094142C8272B}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\run-test-common = Tests\EndToEndTests\Examples\Image\MNIST\run-test-common
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "02_Convolution", "02_Convolution", "{6F1D0CE1-0F18-4B4C-9581-1F2146C8D300}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\run-test = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\run-test
Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\testcases.yml = Tests\EndToEndTests\Examples\Image\MNIST\02_Convolution\testcases.yml
EndProjectSection
@ -725,6 +718,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "01_OneHidden", "01_OneHidde
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\run-test = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\run-test
Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\testcases.yml = Tests\EndToEndTests\Examples\Image\MNIST\01_OneHidden\testcases.yml
EndProjectSection
@ -733,6 +727,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "03_ConvBatchNorm", "03_Conv
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\README.txt = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\README.txt
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\run-test = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\run-test
Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\testcases.yml = Tests\EndToEndTests\Examples\Image\MNIST\03_ConvBatchNorm\testcases.yml
EndProjectSection
@ -743,28 +738,22 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple2d", "Simple2d", "{D4
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "MultiGpu", "MultiGpu", "{C86A6572-DE7A-4EBB-ADD0-A6C4906D46A3}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.linux.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\baseline.windows.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\README.txt = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\README.txt
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\run-test = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\run-test
Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\testcases.yml = Tests\EndToEndTests\Examples\Other\Simple2d\MultiGpu\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{E330CA6B-5954-4EBA-9C64-6058494E338A}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.debug.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.release.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.linux.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\README.txt = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\README.txt
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\run-test = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\run-test
Tests\EndToEndTests\Examples\Other\Simple2d\Simple\testcases.yml = Tests\EndToEndTests\Examples\Other\Simple2d\Simple\testcases.yml
EndProjectSection
@ -817,7 +806,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CNTKTextFormatReader", "Sou
{F0A9637C-20DA-42F0-83D4-23B4704DE602} = {F0A9637C-20DA-42F0-83D4-23B4704DE602}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ExperimentalHTKMLFReader", "Source\Readers\ExperimentalHTKMLFReader\ExperimentalHTKMLFReader.vcxproj", "{7B7A51ED-AA8E-4660-A805-D50235A02120}"
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "HTKDeserializers", "Source\Readers\HTKDeserializers\HTKDeserializers.vcxproj", "{7B7A51ED-AA8E-4660-A805-D50235A02120}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
@ -871,6 +860,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "NetworkTests", "Tests\UnitT
{928ABD1B-4D3B-4017-AEF1-0FA1B4467513} = {928ABD1B-4D3B-4017-AEF1-0FA1B4467513}
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
{91973E60-A7BE-4C86-8FDB-59C88A0B3715} = {91973E60-A7BE-4C86-8FDB-59C88A0B3715}
{EB2BE26F-6BD4-4274-971F-86D080779DD1} = {EB2BE26F-6BD4-4274-971F-86D080779DD1}
{F0A9637C-20DA-42F0-83D4-23B4704DE602} = {F0A9637C-20DA-42F0-83D4-23B4704DE602}
{EAD17188-072C-4726-B840-A769C36DAD1B} = {EAD17188-072C-4726-B840-A769C36DAD1B}
@ -891,11 +881,15 @@ EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Miscellaneous", "Miscellaneous", "{8629430A-821E-43BA-AEC5-8B2CF31A2A7A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CIFAR-10", "CIFAR-10", "{0141526B-F257-4574-8CBE-99634726FFCE}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\run-test-common = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\run-test-common
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "01_Convolution", "01_Convolution", "{58286327-6742-44C4-A34E-D2583419E55E}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\README.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\README.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\run-test = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\run-test
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\testcases.yml = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\01_Convolution\testcases.yml
EndProjectSection
@ -904,6 +898,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "02_BatchNormConv", "02_Batc
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.linux.gpu.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.linux.gpu.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\README.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\README.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\run-test = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\run-test
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\testcases.yml = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\02_BatchNormConv\testcases.yml
EndProjectSection
@ -976,49 +971,6 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CompositeDataReader", "Sour
{F0A9637C-20DA-42F0-83D4-23B4704DE602} = {F0A9637C-20DA-42F0-83D4-23B4704DE602}
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CNTKTextFormatReader", "CNTKTextFormatReader", "{B1110F99-A307-4745-B464-7FD75951645A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ParallelTraining", "ParallelTraining", "{369656B8-DDFD-412E-901B-DFEBCC31ABE0}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\SimpleMultiGPU.cntk = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\SimpleMultiGPU.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Speech", "Speech", "{CCEFD1F9-E843-43E0-B127-EF73EF90582D}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "NoQuantization", "NoQuantization", "{751BEA27-2187-4BE5-82E7-A3668CFCE7A9}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DoublePrecision", "DoublePrecision", "{4084DC9D-0A53-4029-9C86-92AF243C2E09}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\run-test = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\run-test
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\DoublePrecision\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SinglePrecision", "SinglePrecision", "{ACB91DED-FB23-4FF0-A1A1-EBE56B783EFC}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\run-test = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\run-test
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\NoQuantization\SinglePrecision\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Simple", "Simple", "{BCA65A0C-D93B-4F90-81B1-73048DE04DF1}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.linux.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.cpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.cpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.gpu.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\baseline.windows.gpu.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\Speech_Simple.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\Speech_Simple.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Speech\Simple\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SparseDSSM", "SparseDSSM", "{1FB54750-B668-4AC3-966F-ED504020AC06}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Text\SparseDSSM\baseline.cpu.txt = Tests\EndToEndTests\Text\SparseDSSM\baseline.cpu.txt
@ -1031,23 +983,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SparseDSSM", "SparseDSSM",
Tests\EndToEndTests\Text\SparseDSSM\testcases.yml = Tests\EndToEndTests\Text\SparseDSSM\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Image", "Image", "{59980D6E-1732-4809-B17C-6EF4B4F5CF8B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "QuickE2E", "QuickE2E", "{6FB9C411-C19A-403A-94C2-F7DF393F7612}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.linux.txt = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.linux.txt
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.windows.txt = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\baseline.windows.txt
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\Image_QuickE2E.cntk = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\Image_QuickE2E.cntk
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\README.txt = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\README.txt
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\run-test = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\run-test
Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\testcases.yml = Tests\EndToEndTests\CNTKTextFormatReader\Image\QuickE2E\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{C30742B5-4DBE-4D80-B429-901856E4043D}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\Data\SimpleDataTrain.txt = Tests\EndToEndTests\CNTKTextFormatReader\ParallelTraining\Data\SimpleDataTrain.txt
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "WriteCommand", "WriteCommand", "{3E9BD61F-1F0A-4966-BE17-803AEFD1DFA4}"
ProjectSection(SolutionItems) = preProject
tests\endtoendtests\Speech\DNN\WriteCommand\baseline.cpu.txt = tests\endtoendtests\Speech\DNN\WriteCommand\baseline.cpu.txt
@ -1116,17 +1051,9 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ParallelBM", "ParallelBM",
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SequenceToSequence", "SequenceToSequence", "{A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Miscellaneous", "Miscellaneous", "{85A05261-41D0-41DF-80B5-ADB6ABB54632}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "G2P", "G2P", "{4AD12278-9705-4BBA-B2C3-D6D5856AADC3}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\Miscellaneous\G2P\G2P.cntk = Examples\SequenceToSequence\Miscellaneous\G2P\G2P.cntk
Examples\SequenceToSequence\Miscellaneous\G2P\README.txt = Examples\SequenceToSequence\Miscellaneous\G2P\README.txt
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CPPEvalClient", "Source\Extensibility\CPPEvalClient\CPPEvalClient.vcxproj", "{578D52A0-3928-4405-A016-F016E8B49031}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "ExperimentalHtkmlfReader", "ExperimentalHtkmlfReader", "{977ECCB7-598D-4548-B95B-BACA9CC7D98B}"
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "HtkDeserializers", "HtkDeserializers", "{977ECCB7-598D-4548-B95B-BACA9CC7D98B}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DNN", "DNN", "{1DBB2575-F5C8-43F4-B982-D05D6ADC2F9B}"
EndProject
@ -1138,17 +1065,69 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SVD", "SVD", "{BA6A65C5-92A
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "FullUtterance", "FullUtterance", "{3BDF52CD-7F3C-42BC-AB78-CF5BBC5F4AB4}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.cpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.cpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.gpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.gpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\cntk.cntk = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\cntk.cntk
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\run-test = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\run-test
Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\testcases.yml = Tests\EndToEndTests\Speech\ExperimentalHtkmlfReader\LSTM\FullUtterance\testcases.yml
Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.cpu.txt = Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.cpu.txt
Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.gpu.txt = Tests\EndToEndTests\Speech\HtkDeserializers\LSTM\FullUtterance\baseline.gpu.txt
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.cpu.txt = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.cpu.txt
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.gpu.txt = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\baseline.windows.gpu.txt
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\cntk.cntk = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\cntk.cntk
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\run-test = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\run-test
Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\testcases.yml = Tests\EndToEndTests\Speech\HTKDeserializers\LSTM\FullUtterance\testcases.yml
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Truncated", "Truncated", "{1141DC61-E014-4DEC-9157-F6B1FC055C7A}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CMUDict", "CMUDict", "{EC7298E3-AAA9-4672-941F-0B342C494CB3}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\CMUDict\README.md = Examples\SequenceToSequence\CMUDict\README.md
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{ECED747C-86D7-4009-B2A9-0525FE5DF4EB}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\CMUDict\Config\G2P.cntk = Examples\SequenceToSequence\CMUDict\Config\G2P.cntk
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Data", "Data", "{25E144C1-0B7C-4CD4-811A-2E9F4943120D}"
ProjectSection(SolutionItems) = preProject
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test.txt = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.test.txt
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21 = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21.txt = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-1-21.txt
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21 = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21
Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21.txt = Examples\SequenceToSequence\CMUDict\Data\cmudict-0.7b.train-dev-20-21.txt
Examples\SequenceToSequence\CMUDict\Data\README.txt = Examples\SequenceToSequence\CMUDict\Data\README.txt
Examples\SequenceToSequence\CMUDict\Data\ThirdPartyNotice.md = Examples\SequenceToSequence\CMUDict\Data\ThirdPartyNotice.md
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "05_ConvLocal", "05_ConvLocal", "{3F77BF79-E0D3-4D60-8685-5A449F164081}"
ProjectSection(SolutionItems) = preProject
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.linux.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.linux.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.windows.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\baseline.windows.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\README.txt = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\README.txt
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\run-test = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\run-test
Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\testcases.yml = Tests\EndToEndTests\Examples\Image\Miscellaneous\CIFAR-10\05_ConvLocal\testcases.yml
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "EvalTests", "Tests\UnitTests\EvalTests\EvalTests.vcxproj", "{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
{482999D1-B7E2-466E-9F8D-2119F93EAFD9} = {482999D1-B7E2-466E-9F8D-2119F93EAFD9}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "CommandEval", "Tests\UnitTests\CommandEval\CommandEval.vcxproj", "{731312A8-6DA3-4841-AFCD-57520BA1BF8E}"
ProjectSection(ProjectDependencies) = postProject
{60BDB847-D0C4-4FD3-A947-0C15C08BCDB5} = {60BDB847-D0C4-4FD3-A947-0C15C08BCDB5}
{86883653-8A61-4038-81A0-2379FAE4200A} = {86883653-8A61-4038-81A0-2379FAE4200A}
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Scripts", "Scripts", "{68263A2F-1D5F-4C46-B5AF-2304B80FC3D4}"
ProjectSection(SolutionItems) = preProject
Scripts\pytest.ini = Scripts\pytest.ini
Scripts\txt2ctf.py = Scripts\txt2ctf.py
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug_CpuOnly|x64 = Debug_CpuOnly|x64
@ -1299,14 +1278,6 @@ Global
{E6646FFE-3588-4276-8A15-8D65C22711C1}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{E6646FFE-3588-4276-8A15-8D65C22711C1}.Release|x64.ActiveCfg = Release|x64
{E6646FFE-3588-4276-8A15-8D65C22711C1}.Release|x64.Build.0 = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.ActiveCfg = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.Build.0 = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.ActiveCfg = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.Build.0 = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.ActiveCfg = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.ActiveCfg = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.Build.0 = Release|x64
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}.Debug_CpuOnly|x64.ActiveCfg = Debug_CpuOnly|x64
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}.Debug_CpuOnly|x64.Build.0 = Debug_CpuOnly|x64
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976}.Debug|x64.ActiveCfg = Debug|x64
@ -1395,6 +1366,22 @@ Global
{578D52A0-3928-4405-A016-F016E8B49031}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{578D52A0-3928-4405-A016-F016E8B49031}.Release|x64.ActiveCfg = Release|x64
{578D52A0-3928-4405-A016-F016E8B49031}.Release|x64.Build.0 = Release|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Debug_CpuOnly|x64.ActiveCfg = Debug_CpuOnly|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Debug_CpuOnly|x64.Build.0 = Debug_CpuOnly|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Debug|x64.ActiveCfg = Debug|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Debug|x64.Build.0 = Debug|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Release_CpuOnly|x64.ActiveCfg = Release_CpuOnly|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Release|x64.ActiveCfg = Release|x64
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7}.Release|x64.Build.0 = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.ActiveCfg = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug_CpuOnly|x64.Build.0 = Debug_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.ActiveCfg = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Debug|x64.Build.0 = Debug|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.ActiveCfg = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release_CpuOnly|x64.Build.0 = Release_CpuOnly|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.ActiveCfg = Release|x64
{731312A8-6DA3-4841-AFCD-57520BA1BF8E}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@ -1451,7 +1438,6 @@ Global
{62836DC1-DF77-4B98-BF2D-45C943B7DDC6} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{CE429AA2-3778-4619-8FD1-49BA3B81197B} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{E6646FFE-3588-4276-8A15-8D65C22711C1} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{731312A8-6DA3-4841-AFCD-57520BA1BF8E} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
{668BEED5-AC07-4F35-B3AE-EE65A7F9C976} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
{6E565B48-1923-49CE-9787-9BBB9D96F4C5} = {D45DF403-6781-444E-B654-A96868C5BE68}
{3BF59CCE-D245-420A-9F17-73CE61E284C2} = {6E565B48-1923-49CE-9787-9BBB9D96F4C5}
@ -1528,17 +1514,7 @@ Global
{181664AC-4C95-4798-A923-09B879215B33} = {8656B71D-E24C-4AC2-8BE4-C07B415A3E15}
{86883653-8A61-4038-81A0-2379FAE4200A} = {DD043083-71A4-409A-AA91-F9C548DCF7EC}
{7B7A563D-AA8E-4660-A805-D50235A02120} = {33EBFE78-A1A8-4961-8938-92A271941F94}
{B1110F99-A307-4745-B464-7FD75951645A} = {6E565B48-1923-49CE-9787-9BBB9D96F4C5}
{369656B8-DDFD-412E-901B-DFEBCC31ABE0} = {B1110F99-A307-4745-B464-7FD75951645A}
{CCEFD1F9-E843-43E0-B127-EF73EF90582D} = {B1110F99-A307-4745-B464-7FD75951645A}
{751BEA27-2187-4BE5-82E7-A3668CFCE7A9} = {369656B8-DDFD-412E-901B-DFEBCC31ABE0}
{4084DC9D-0A53-4029-9C86-92AF243C2E09} = {751BEA27-2187-4BE5-82E7-A3668CFCE7A9}
{ACB91DED-FB23-4FF0-A1A1-EBE56B783EFC} = {751BEA27-2187-4BE5-82E7-A3668CFCE7A9}
{BCA65A0C-D93B-4F90-81B1-73048DE04DF1} = {CCEFD1F9-E843-43E0-B127-EF73EF90582D}
{1FB54750-B668-4AC3-966F-ED504020AC06} = {8656B71D-E24C-4AC2-8BE4-C07B415A3E15}
{59980D6E-1732-4809-B17C-6EF4B4F5CF8B} = {B1110F99-A307-4745-B464-7FD75951645A}
{6FB9C411-C19A-403A-94C2-F7DF393F7612} = {59980D6E-1732-4809-B17C-6EF4B4F5CF8B}
{C30742B5-4DBE-4D80-B429-901856E4043D} = {369656B8-DDFD-412E-901B-DFEBCC31ABE0}
{3E9BD61F-1F0A-4966-BE17-803AEFD1DFA4} = {6994C86D-A672-4254-824A-51F4DFEB807F}
{5560DDD4-1E6E-4F41-B9BD-F52A19DF0B31} = {6994C86D-A672-4254-824A-51F4DFEB807F}
{9834E864-A8CD-4D28-A3C9-F79FE0F421AE} = {6994C86D-A672-4254-824A-51F4DFEB807F}
@ -1546,8 +1522,6 @@ Global
{4D6F731C-4A6D-4E21-AC3C-9E1F26E5547E} = {6994C86D-A672-4254-824A-51F4DFEB807F}
{36C42845-0D48-4A46-9C67-2B593A80A09C} = {6994C86D-A672-4254-824A-51F4DFEB807F}
{A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26} = {47755F2E-D674-4175-9E38-8EA053455072}
{85A05261-41D0-41DF-80B5-ADB6ABB54632} = {A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26}
{4AD12278-9705-4BBA-B2C3-D6D5856AADC3} = {85A05261-41D0-41DF-80B5-ADB6ABB54632}
{578D52A0-3928-4405-A016-F016E8B49031} = {60F87E25-BC87-4782-8E20-1621AAEBB113}
{977ECCB7-598D-4548-B95B-BACA9CC7D98B} = {C47CDAA5-6D6C-429E-BC89-7CA0F868FDC8}
{1DBB2575-F5C8-43F4-B982-D05D6ADC2F9B} = {977ECCB7-598D-4548-B95B-BACA9CC7D98B}
@ -1556,5 +1530,11 @@ Global
{BA6A65C5-92A2-4040-ADC3-0727A45694F6} = {977ECCB7-598D-4548-B95B-BACA9CC7D98B}
{3BDF52CD-7F3C-42BC-AB78-CF5BBC5F4AB4} = {772A0DB3-4710-4281-8AA9-A9F1F7C543D3}
{1141DC61-E014-4DEC-9157-F6B1FC055C7A} = {772A0DB3-4710-4281-8AA9-A9F1F7C543D3}
{EC7298E3-AAA9-4672-941F-0B342C494CB3} = {A1521DC4-C8EC-47BD-9E63-7BE30ED2EC26}
{ECED747C-86D7-4009-B2A9-0525FE5DF4EB} = {EC7298E3-AAA9-4672-941F-0B342C494CB3}
{25E144C1-0B7C-4CD4-811A-2E9F4943120D} = {EC7298E3-AAA9-4672-941F-0B342C494CB3}
{3F77BF79-E0D3-4D60-8685-5A449F164081} = {0141526B-F257-4574-8CBE-99634726FFCE}
{82125DA1-1CD7-45B5-9281-E6AE7C287CB7} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
{731312A8-6DA3-4841-AFCD-57520BA1BF8E} = {6F19321A-65E7-4829-B00C-3886CD6C6EDE}
EndGlobalSection
EndGlobal

8
Dependencies/CNTKCustomMKL/README-for-redistributable.txt поставляемый Normal file
Просмотреть файл

@ -0,0 +1,8 @@
This archive contains header files as well as redistributable components of
the Intel (r) Math Kernel Library (Intel (r) MKL).
Included is also a custom library created out of Intel (r) Math Kernel Library
(Intel (r) MKL) required for building the Microsoft Computational Network
Toolkit (CNTK). More details on CNTK be found here: http://www.cntk.ai
Please see LICENSE.md for full license information.

23
Dependencies/CNTKCustomMKL/README.md поставляемый Normal file
Просмотреть файл

@ -0,0 +1,23 @@
# CNTK custom MKL
This directory contains the necessary files to create a custom Intel® Math Kernel Library (MKL)
for usage by CNTK ("CNTK custom MKL" for short).
By default, a CNTK binary with Intel® MKL support includes a prebuilt CNTK
custom MKL.
If you want to build CNTK with Intel® MKL support yourself, you can install a
prebuilt CNTK custom MKL, available for download from the [CNTK web site](https://www.cntk.ai/mkl).
See [CNTK's setup instructions](https://github.com/Microsoft/CNTK/wiki/Setup-CNTK-on-your-machine)
for more details.
If you want to add new Intel® MKL functions to be used by CNTK you will have to
build your own CNTK custom MKL.
This requires you to install the [Intel MKL SDK](https://software.intel.com/en-us/intel-mkl/) for your platform.
Then, in this directory,
* extend the file `headers.txt` to expose new headers,
* extend the file `functions.txt` to expose new functions, and
* use `build-linux.sh` or `build-windows.cmd` to build for your platform.
For further documentation please see the Developer Guide for the Intel® MKL, in particular
[Building Custom Shared Objects (Linux)](https://software.intel.com/en-us/node/528533) and
[Building Custom Dynamic-link Libraries (Windows)](https://software.intel.com/en-us/node/528362).

46
Dependencies/CNTKCustomMKL/build-linux.sh поставляемый Executable file
Просмотреть файл

@ -0,0 +1,46 @@
#!/bin/bash
#
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
#
# Stop on error, trace commands
set -e -x
# Enter directory the script is located in
cd "$( dirname "${BASH_SOURCE[0]}" )"
# TODO configurable
MKLROOT=/opt/intel/compilers_and_libraries_2016.2.181/linux/mkl
MKLBUILDERROOT=$MKLROOT/tools/builder
CNTKCUSTOMMKLVERSION=$(cat version.txt)
rm -rf Publish
mkdir Publish{,/$CNTKCUSTOMMKLVERSION{,/x64}}
for THREADING in parallel sequential
do
LIBBASENAME=libmkl_cntk_$(echo $THREADING | cut -c 1)
make -f $MKLBUILDERROOT/makefile libintel64 \
export=functions.txt \
threading=$THREADING \
name=$LIBBASENAME \
MKLROOT=$MKLROOT
mkdir Publish/$CNTKCUSTOMMKLVERSION/x64/$THREADING
mv $LIBBASENAME.so Publish/$CNTKCUSTOMMKLVERSION/x64/$THREADING
done
cp -p $MKLROOT/../compiler/lib/intel64_lin/libiomp5.so Publish/$CNTKCUSTOMMKLVERSION/x64/parallel
rsync -av --files-from headers.txt $MKLROOT/include Publish/$CNTKCUSTOMMKLVERSION/include
cp -p README-for-redistributable.txt Publish/$CNTKCUSTOMMKLVERSION/README.txt
cp -p ../../LICENSE.md Publish/$CNTKCUSTOMMKLVERSION
cd Publish
tar -czf ../CNTKCustomMKL-Linux-$CNTKCUSTOMMKLVERSION.tgz $CNTKCUSTOMMKLVERSION
cd ..

156
Dependencies/CNTKCustomMKL/build-windows.cmd поставляемый Normal file
Просмотреть файл

@ -0,0 +1,156 @@
@echo off
REM
REM Copyright (c) Microsoft. All rights reserved.
REM
REM Licensed under the MIT license. See LICENSE.md file in the project root
REM for full license information.
REM ==============================================================================
REM
echo.
echo This batch file will build a custom MKL dynamic link library for usage by CNTK.
echo.
echo Requirements:
echo - Intel MKL SDK installed on the machine
echo - MKLROOT environment variable is set to the MKL directory inside the Intel MKL SDK
echo - Visual Studio 2013 installed and included in the path
echo.
setlocal enableextensions enabledelayedexpansion
pushd "%~dp0"
if errorlevel 1 (
echo Could not change directory to script location.
exit /b 1
)
if not defined MKLROOT (
echo Error: Environment variable MKLROOT is undefined.
exit /b 1
)
if not exist "%MKLROOT%" (
echo Error: Directory doesn't exist: "%MKLROOT%".
exit /b 1
)
set MKLBUILDERROOT=%MKLROOT%\tools\builder
if not exist "%MKLBUILDERROOT%" (
echo Error: Directory doesn't exist: "%MKLBUILDERROOT%".
exit /b 1
)
where /q nmake.exe
if errorlevel 1 (
echo Error: NMAKE.EXE not in path.
exit /b 1
)
where /q link.exe
if errorlevel 1 (
echo Error: LINK.EXE not in path.
exit /b 1
)
set /p CNTKCUSTOMMKLVERSION=<version.txt
if not defined CNTKCUSTOMMKLVERSION (
echo Cannot determine CNTK custom MKL version.
exit /b 1
)
if exist lib rmdir /s /q lib
if errorlevel 1 exit /b 1
if exist Publish rmdir /s /q Publish
if errorlevel 1 exit /b 1
mkdir Publish\%CNTKCUSTOMMKLVERSION%\x64
echo.
echo Copying "%MKLBUILDERROOT%\lib".
xcopy /s /e /y /i "%MKLBUILDERROOT%\lib" lib
if errorlevel 1 (
exit /b 1
)
echo.
echo Compiling and copying libraries.
for %%t in (
parallel
sequential
) do (
set TFIRSTCHAR=%%t
set TFIRSTCHAR=!TFIRSTCHAR:~0,1!
set LIBBASENAME=mkl_cntk_!TFIRSTCHAR!
echo.
echo Calling NMAKE libintel64 export=functions.txt threading=%%t name=!LIBBASENAME! MKLROOT="%MKLROOT%".
NMAKE /f "%MKLBUILDERROOT%\makefile" ^
libintel64 ^
export=functions.txt ^
threading=%%t ^
name=!LIBBASENAME! ^
MKLROOT="%MKLROOT%"
if errorlevel 1 (
echo Error: NMAKE.exe for threading=%%t failed.
exit /b 1
)
mkdir Publish\%CNTKCUSTOMMKLVERSION%\x64\%%t
if errorlevel 1 exit /b 1
move !LIBBASENAME!.dll Publish\%CNTKCUSTOMMKLVERSION%\x64\%%t
if errorlevel 1 exit /b 1
move !LIBBASENAME!.lib Publish\%CNTKCUSTOMMKLVERSION%\x64\%%t
if errorlevel 1 exit /b 1
del !LIBBASENAME!*
if errorlevel 1 exit /b 1
@REM TODO manifest?
)
echo.
echo Copying libiomp5md.dll.
copy "%MKLROOT%\..\redist\intel64_win\compiler\libiomp5md.dll" Publish\%CNTKCUSTOMMKLVERSION%\x64\parallel
if errorlevel 1 (
exit /b 1
)
echo.
echo Removing LIB directory.
rmdir /s /q lib
if errorlevel 1 exit /b 1
echo.
echo Copying include files to Publish\%CNTKCUSTOMMKLVERSION%\include.
mkdir Publish\%CNTKCUSTOMMKLVERSION%\include
for /f %%h in (headers.txt) do (
copy "%MKLROOT%\include\%%h" Publish\%CNTKCUSTOMMKLVERSION%\include
if errorlevel 1 (
echo Failed to copy "%MKLROOT%\include\%%h".
exit /b 1
)
)
copy README-for-redistributable.txt Publish\%CNTKCUSTOMMKLVERSION%\README.txt
if errorlevel 1 (
echo Failed to copy README.
exit /b 1
)
copy ..\..\LICENSE.md Publish\%CNTKCUSTOMMKLVERSION%
if errorlevel 1 (
echo Failed to copy LICENSE.md.
exit /b 1
)
popd

17
Dependencies/CNTKCustomMKL/functions.txt поставляемый Normal file
Просмотреть файл

@ -0,0 +1,17 @@
cblas_dgemm
cblas_dasum
cblas_daxpy
cblas_dcopy
cblas_ddot
cblas_dnrm2
cblas_dscal
cblas_sasum
cblas_saxpy
cblas_scopy
cblas_sgemm
cblas_sscal
cblas_sdot
cblas_snrm2
dgesvd
sgesvd
MKL_Set_Num_Threads

31
Dependencies/CNTKCustomMKL/headers.txt поставляемый Normal file
Просмотреть файл

@ -0,0 +1,31 @@
mkl_blas.h
mkl_cblas.h
mkl_df_defines.h
mkl_df_functions.h
mkl_df_types.h
mkl_df.h
mkl_dfti.h
mkl_direct_call.h
mkl_dss.h
mkl_lapack.h
mkl_lapacke.h
mkl_pardiso.h
mkl_poisson.h
mkl_rci.h
mkl_service.h
mkl_solvers_ee.h
mkl_sparse_handle.h
mkl_spblas.h
mkl_trans.h
mkl_trig_transforms.h
mkl_types.h
mkl_version.h
mkl_vml_defines.h
mkl_vml_functions.h
mkl_vml_types.h
mkl_vml.h
mkl_vsl_defines.h
mkl_vsl_functions.h
mkl_vsl_types.h
mkl_vsl.h
mkl.h

1
Dependencies/CNTKCustomMKL/version.txt поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
1

Просмотреть файл

@ -50,17 +50,28 @@ def loadLabels(src, cimg):
os.remove(gzfname)
return res.reshape((cimg, 1))
def load(dataSrc, labelsSrc, cimg):
data = loadData(dataSrc, cimg)
labels = loadLabels(labelsSrc, cimg)
return np.hstack((data, labels))
def savetxt(filename, ndarray):
with open(filename, 'w') as f:
labels = map(' '.join, np.eye(10, dtype=np.uint).astype(str))
for row in ndarray:
row_str = row.astype(str)
label_str = labels[row[-1]]
feature_str = ' '.join(row_str[:-1])
f.write('|labels {} |features {}\n'.format(label_str, feature_str))
if __name__ == "__main__":
trnData = loadData('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz', 60000)
trnLbl = loadLabels('http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
trn = np.hstack((trnLbl, trnData))
train = load('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
print 'Writing train text file...'
np.savetxt(r'./../Data/Train-28x28.txt', trn, fmt = '%u', delimiter='\t')
savetxt(r'./../Data/Train-28x28_cntk_text.txt', train)
print 'Done.'
testData = loadData('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz', 10000)
testLbl = loadLabels('http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
test = np.hstack((testLbl, testData))
test = load('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
print 'Writing test text file...'
np.savetxt(r'./../Data/Test-28x28.txt', test, fmt = '%u', delimiter='\t')
print 'Done.'
savetxt(r'./../Data/Test-28x28_cntk_text.txt', test)
print 'Done.'

Просмотреть файл

@ -1,3 +1,4 @@
import sys
import urllib.request
import gzip
import os
@ -49,16 +50,28 @@ def loadLabels(src, cimg):
return res.reshape((cimg, 1))
def load(dataSrc, labelsSrc, cimg):
data = loadData(dataSrc, cimg)
labels = loadLabels(labelsSrc, cimg)
return np.hstack((data, labels))
def savetxt(filename, ndarray):
with open(filename, 'w', encoding="ascii") as f:
labels = list(map(' '.join, np.eye(10, dtype=np.uint).astype(str)))
for row in ndarray:
row_str = row.astype(str)
label_str = labels[row[-1]]
feature_str = ' '.join(row_str[:-1])
f.write('|labels {} |features {}\n'.format(label_str, feature_str))
if __name__ == "__main__":
trnData = loadData('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz', 60000)
trnLbl = loadLabels('http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
trn = np.hstack((trnLbl, trnData))
train = load('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
print ('Writing train text file...')
np.savetxt(r'./../Data/Train-28x28.txt', trn, fmt = '%u', delimiter='\t')
savetxt(r'./../Data/Train-28x28_cntk_text.txt', train)
print ('Done.')
testData = loadData('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz', 10000)
testLbl = loadLabels('http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
test = np.hstack((testLbl, testData))
test = load('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz',
'http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
print ('Writing test text file...')
np.savetxt(r'./../Data/Test-28x28.txt', test, fmt = '%u', delimiter='\t')
savetxt(r'./../Data/Test-28x28_cntk_text.txt', test)
print ('Done.')

Просмотреть файл

@ -2,31 +2,26 @@
# for example: cntk configFile=myConfigFile RootDir=../..
# For running from Visual Studio add
# currentDirectory=$(SolutionDir)/<path to corresponding data folder>
RootDir = ".."
rootDir = ".."
ConfigDir = "$RootDir$/Config"
DataDir = "$RootDir$/Data"
OutputDir = "$RootDir$/Output"
ModelDir = "$OutputDir$/Models"
configDir = "$rootDir$/Config"
dataDir = "$rootDir$/Data"
outputDir = "$rootDir$/Output"
modelDir = "$outputDir$/Models"
deviceId = 0
imageLayout = "cudnn"
# override the above as follows when running on CPU:
# deviceId = -1
command = train:test
precision = "float"
modelPath = "$ModelDir$/01_OneHidden"
ndlMacros = "$ConfigDir$/Macros.ndl"
modelPath = "$modelDir$/01_OneHidden"
# uncomment the following line to write logs to a file
# stderr = "$OutputDir$/01_OneHidden_out"
traceLevel=1
numMBsToShowResult=500
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
# uncomment the following line to write logs to a file
# stderr = "$outputDir$/01_OneHidden_out"
traceLevel = 1
numMBsToShowResult = 500
#######################################
# TRAINING CONFIG #
@ -35,37 +30,74 @@ initOnCPUOnly=true
train = [
action = "train"
# BrainScript version as described in Tutorial II.
# This is currently disabled. To run this, please remove the "_disabled" from "BrainScriptNetworkBuilder_disabled"
# and comment out the NDLNetworkBuilder below.
BrainScriptNetworkBuilder_disabled = [
# macros to include
include "Shared.bs"
featDim = 28 * 28 # number of pixels
labelDim = 10 # number of distinct labels
features = Input (featDim)
featScaled = Constant (1.0 / 256.0) .* features
labels = Input (labelDim)
hiddenDim = 200
# DNNSigmoidLayer and DNNLayer are defined in Shared.bs
h1 = DNNSigmoidLayer (featDim, hiddenDim, featScaled, 1)
z = DNNLayer (hiddenDim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax (labels, z)
errs = ErrorPrediction (labels, z)
# set top5Errs as an evaluation node to compute the top-5 error rate
# This is not marked tag="evaluation" since expensive during training.
# We explicitly select it as an output node in the "test" command.
top5Errs = ErrorPrediction (labels, z, topN=5)
# declare special nodes
featureNodes = (features)
labelNodes = (labels)
criterionNodes = (ce)
evaluationNodes = (errs)
outputNodes = (z)
]
# deprecated NDL version
NDLNetworkBuilder = [
imageLayout = "cudnn"
initOnCPUOnly = true
ndlMacros = "$configDir$/Macros.ndl"
networkDescription = "$ConfigDir$/01_OneHidden.ndl"
]
SGD = [
epochSize = 60000
minibatchSize = 32
learningRatesPerMB = 0.1
momentumPerMB = 0
learningRatesPerSample = 0.003125
momentumAsTimeConstant = 0
maxEpochs = 30
]
# Note: this reader crashes if randomization is turned on.
reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"
features = [
dim = 784
start = 1
readerType = "CNTKTextFormatReader"
# See ../REAMDE.md for details on getting the data (Train-28x28_cntk_text.txt).
file = "$DataDir$/Train-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]
]
#######################################
@ -74,22 +106,22 @@ train = [
test = [
action = "test"
minibatchSize = 16
minibatchSize = 1024 # reduce this if you run out of memory
evalNodeNames = ce:errs:top5Errs
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test-28x28.txt"
features = [
dim = 784
start = 1
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]
]

Просмотреть файл

@ -20,17 +20,15 @@ DNN = [
# DNNSigmoidLayer and DNNLayer are defined in Macros.ndl
h1 = DNNSigmoidLayer(featDim, hiddenDim, featScaled, 1)
ol = DNNLayer(hiddenDim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax(labels, ol)
err = ErrorPrediction(labels, ol)
errs = ErrorPrediction(labels, ol)
top5Errs = ErrorPrediction(labels, ol, Const(5), tag="eval") # only used in testing
# Special Nodes
# errTop1 can be used to compute, for example, top-5 error by changing Const(1) to Const(5).
errTop1 = ErrorPrediction(labels, ol, Const(1), tag="eval")
FeatureNodes = (features)
LabelNodes = (labels)
CriterionNodes = (ce)
EvalNodes = (err)
EvalNodes = (errs)
OutputNodes = (ol)
]

Просмотреть файл

@ -1,35 +1,27 @@
# Parameters can be overwritten on the command line
# for example: cntk configFile=myConfigFile RootDir=../..
# for example: cntk configFile=myConfigFile rootDir=../..
# For running from Visual Studio add
# currentDirectory=$(SolutionDir)/<path to corresponding data folder>
RootDir = ".."
rootDir = ".."
ConfigDir = "$RootDir$/Config"
DataDir = "$RootDir$/Data"
OutputDir = "$RootDir$/Output"
ModelDir = "$OutputDir$/Models"
configDir = "$rootDir$/Config"
dataDir = "$rootDir$/Data"
outputDir = "$rootDir$/Output"
modelDir = "$outputDir$/Models"
deviceId = 0
imageLayout = "cudnn"
# Override the above as follows when running on CPU:
# deviceId = -1
# Note: Compared to GPU, this runs very slow.
command = train:test
precision = "float"
modelPath = "$ModelDir$/02_Convolution"
ndlMacros = "$ConfigDir$/Macros.ndl"
modelPath = "$modelDir$/02_Convolution"
# uncomment the following line to write logs to a file
# stderr = "$OutputDir$/02_Convolution_out"
traceLevel=1
numMBsToShowResult=500
prefetch=true
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
# uncomment the following line to write logs to a file
# stderr = "$outputDir$/02_Convolution_out"
traceLevel = 1
numMBsToShowResult = 500
#######################################
# TRAINING CONFIG #
@ -39,34 +31,35 @@ train = [
action = "train"
NDLNetworkBuilder = [
imageLayout = "cudnn"
initOnCPUOnly = true
ndlMacros = "$configDir$/Macros.ndl"
networkDescription = "$ConfigDir$/02_Convolution.ndl"
]
SGD = [
epochSize = 60000
minibatchSize = 32
#learningRatesPerSample = 0.003125 # TODO
#momentumAsTimeConstant = 0
learningRatesPerMB = 0.1*5:0.3
momentumPerMB = 0*10:0.7
maxEpochs = 15
]
# Note: this reader crashes if randomization is turned on.
reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See ../REAMDE.md for details on getting the data (Train-28x28_cntk_text.txt).
file = "$DataDir$/Train-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -77,26 +70,20 @@ train = [
test = [
action = test
minibatchSize = 16
NDLNetworkBuilder = [
networkDescription = "$ConfigDir$/02_Convolution.ndl"
]
minibatchSize = 1024
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test-28x28.txt"
features = [
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test-28x28_cntk_text.txt"
input = [
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -58,13 +58,13 @@ DNN=[
ol = DNNLayer(h1Dim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax(labels, ol)
err = ErrorPrediction(labels, ol)
errs = ErrorPrediction(labels, ol)
# Special Nodes
FeatureNodes = (features)
LabelNodes = (labels)
CriterionNodes = (ce)
EvalNodes = (err)
EvalNodes = (errs)
OutputNodes = (ol)
]

Просмотреть файл

@ -10,7 +10,6 @@ OutputDir = "$RootDir$/Output"
ModelDir = "$OutputDir$/Models"
deviceId = 0
imageLayout = "cudnn"
# Note: Batch normalization training on CPU is not yet implemented.
# When it is, override the above as follows when running on CPU:
# deviceId = -1
@ -19,7 +18,6 @@ command = train:test
precision = "float"
modelPath = "$ModelDir$/03_ConvBatchNorm"
ndlMacros = "$ConfigDir$/Macros.ndl"
# uncomment the following line to write logs to a file
# stderr = "$OutputDir$/03_ConvBatchNorm_out"
@ -28,7 +26,6 @@ traceLevel=1
numMBsToShowResult=500
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
#######################################
# TRAINING CONFIG #
@ -38,6 +35,9 @@ train = [
action = "train"
NDLNetworkBuilder = [
imageLayout = "cudnn"
initOnCPUOnly=true
ndlMacros = "$ConfigDir$/Macros.ndl"
networkDescription = "$ConfigDir$/03_ConvBatchNorm.ndl"
]
@ -47,29 +47,24 @@ train = [
learningRatesPerMB = 0.5:0.1
momentumPerMB = 0.9
maxEpochs = 2
#batchNormalizationTimeConstant=0 # Set through NDL
batchNormalizationBlendTimeConstant=0:1#INF
]
# Note: this reader crashes if randomization is turned on.
reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"
features = [
dim = 784
start = 1
readerType = "CNTKTextFormatReader"
# See ../REAMDE.md for details on getting the data (Train-28x28_cntk_text.txt).
file = "$DataDir$/Train-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]
]
#######################################
@ -78,28 +73,22 @@ train = [
test = [
action = "test"
minibatchSize = 32
minibatchSize = 1024
modelPath=$ModelDir$/03_ConvBatchNorm
NDLNetworkBuilder = [
networkDescription = "$ConfigDir$/03_ConvBatchNorm.ndl"
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test-28x28.txt"
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test-28x28_cntk_text.txt"
input = [
features = [
dim = 784
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -64,13 +64,13 @@ DNN = [
ol = DNNLayer(h1Dim, labelDim, h1, 1)
ce = CrossEntropyWithSoftmax(labels, ol)
err = ErrorPrediction(labels, ol)
errs = ErrorPrediction(labels, ol)
# Special Nodes
FeatureNodes = (features)
LabelNodes = (labels)
CriterionNodes = (ce)
EvalNodes = (err)
EvalNodes = (errs)
OutputNodes = (ol)
]

Просмотреть файл

@ -0,0 +1,101 @@
# Parameters can be overwritten on the command line
# for example: cntk configFile=myConfigFile RootDir=../..
# For running from Visual Studio add
# currentDirectory=$(SolutionDir)/<path to corresponding data folder>
RootDir = ".."
ConfigDir = "$RootDir$/Config"
DataDir = "$RootDir$/Data"
OutputDir = "$RootDir$/Output"
ModelDir = "$OutputDir$/Models"
deviceId = 0
imageLayout = "cudnn"
# Override the above as follows when running on CPU:
# deviceId = -1
command = train:test
precision = "float"
modelPath = "$ModelDir$/04_DeConv"
ndlMacros = "$ConfigDir$/Macros.ndl"
# uncomment the following line to write logs to a file
# stderr = "$OutputDir$/04_DeConv_out"
traceLevel=1
numMBsToShowResult=500
prefetch=true
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
#######################################
# TRAINING CONFIG #
#######################################
train = [
action = "train"
NDLNetworkBuilder = [
networkDescription = "$ConfigDir$/04_DeConv.ndl"
]
SGD = [
epochSize = 60000
minibatchSize = 32
learningRatesPerMB = 0.001
momentumPerMB = 0.9
maxEpochs = 10
]
# Note: this reader crashes if randomization is turned on.
reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]
#######################################
# TEST CONFIG #
#######################################
test = [
action = test
minibatchSize = 16
NDLNetworkBuilder = [
networkDescription = "$ConfigDir$/04_DeConv.ndl"
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test-28x28.txt"
features = [
dim = 784
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
]
]
]

Просмотреть файл

@ -0,0 +1,60 @@
# macros to include
load = ndlMnistMacros
# the actual NDL that defines the network
run = DNN
ndlMnistMacros = [
imageW = 28
imageH = 28
imageC = 1
labelDim = 10
features = ImageInput(imageW, imageH, imageC, imageLayout=$imageLayout$)
featScale = Constant(0.00390625)
featScaled = Scale(featScale, features)
labels = InputValue(labelDim)
]
DNN=[
# conv1
kW1 = 5
kH1 = 5
cMap1 = 16
hStride1 = 2
vStride1 = 2
wScale1 = 0.1
bValue1 = 0
# weight[cMap1, kW1 * kH1 * inputChannels]
# Conv2DReLULayer is defined in Macros.ndl
conv1 = Conv2DReLULayer(featScaled, cMap1, 25, kW1, kH1, hStride1, vStride1, wScale1, bValue1)
# pool1
pool1W = 2
pool1H = 2
pool1hStride = 2
pool1vStride = 2
# MaxPooling is a standard NDL node.
pool1 = MaxPooling(conv1, pool1W, pool1H, pool1hStride, pool1vStride, imageLayout=$imageLayout$)
#unpool1
unpool1 = MaxUnpool(pool1, conv1, pool1W, pool1H, pool1hStride, pool1vStride)
# deconv1
lpad1 = 2
upad1 = 1
# weight[cMap1, kW1 * kH1 * inputChannels]
# DeconvReLULayer is defined in Macros.ndl
deconv1 = DeconvReLULayer(unpool1, kW1, kH1, imageC, 25, cMap1, hStride1, vStride1, lpad1, upad1, wScale1, bValue1)
mse = SquareError(featScaled, deconv1)
#err = ErrorPrediction(labels, ol)
# Special Nodes
FeatureNodes = (features)
#LabelNodes = (labels)
CriterionNodes = (mse)
#EvalNodes = (err)
#OutputNodes = (deconv1)
]

Просмотреть файл

@ -48,6 +48,10 @@ ConvND(w, inp, kW, kH, inMap, outMap, hStride, vStride) = [
c = Convolution(w, inp, {kW, kH, inMap}, mapCount=outMap, stride={hStride, vStride, inMap}, sharing={true, true, true}, autoPadding={true, true, false}, lowerPad=0, upperPad=0, imageLayout=$imageLayout$)
]
DeConv(w, inp, kW, kH, inMap, outMap, hStride, vStride, lpad, upad) = [
c = Convolution(w, inp, {kW, kH, inMap}, mapCount=outMap, stride={hStride, vStride, inMap}, sharing={true, true, true}, autoPadding=false, lowerPad={lpad, lpad, 0}, upperPad={upad, upad, 0}, transpose=1, imageLayout=$imageLayout$)
]
Conv2DReLULayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue) = [
w = ConvW(outMap, inWCount, wScale)
b = ConvB(outMap, bValue)
@ -84,6 +88,17 @@ ConvBNReLULayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue,
y = RectifiedLinear(c)
]
DeconvReLULayer(inp, kW, kH, inMap, inWCount, outMap, hStride, vStride, lpad, upad, wScale, bValue) = [
# No bias here.
w = ConvW(outMap, inWCount, wScale)
act = RectifiedLinear(inp)
out = DeConv(w, act, kW, kH, inMap, outMap, hStride, vStride, lpad, upad)
]
MaxNDPooling(inp, kW, kH, hStride, vStride) = [
p = Pooling(inp, "max", {kW, kH, 1}, stride={hStride, vStride, 1}, autoPadding={true, true, false}, lowerPad=0, upperPad=0, imageLayout=$imageLayout$)
]
p = Pooling(inp, "max", {kW, kH, 1}, stride={hStride, vStride, 1}, autoPadding={true, true, false}, lowerPad=0, upperPad=0, imageLayout=$imageLayout$)
]
MaxUnpool(inp, poolInp, kW, kH, hStride, vStride) = [
up = MaxUnpooling(inp, poolInp, {kW, kH, 1}, stride={hStride, vStride, 1}, autoPadding={false, false, false}, lowerPad=0, upperPad=0, imageLayout=$imageLayout$)
]

Просмотреть файл

@ -0,0 +1,81 @@
# Shared.bs -- macros shared by all MNIST examples
# linear layer (no non-linearity)
DNNLayer (inDim, outDim, x, parmScale) = [
W = Parameter (outDim, inDim, init="uniform", initValueScale=parmScale, initOnCPUOnly=true)
b = Parameter (outDim, 1, init="fixedValue", value=0)
z = W * x + b
].z
# sigmoid layer
DNNSigmoidLayer (inDim, outDim, x, parmScale) = Sigmoid (DNNLayer (inDim, outDim, x, parmScale))
# image sigmoid layer --differs from DNNSigmoidLayer in how dimensions are specified
DNNImageSigmoidLayer (inW, inH, inC, outDim, x, parmScale) = [
W = ImageParameter (outDim, inW, inH, inC, init="uniform", initValueScale=parmScale, initOnCPUOnly=true /* , imageLayout=$imageLayout$*/)
b = Parameter (outDim, 1, init="fixedValue", value=0)
t = Times(W, x)
z = Plus(t, b)
y = Sigmoid(z) # TODO: fix this for 02_
].y
# ReLU layer with batch normalization
# TODO: rename to DNN-
DnnBNReLULayer (inDim, outDim, x, wScale, bValue, scValue, bnTimeConst) = [
W = Parameter (outDim, inDim, init = "gaussian", initValueScale = wScale, initOnCPUOnly=true)
b = Parameter (outDim, 1, init = "fixedValue", value = bValue)
sc = Parameter (outDim, 1, init = "fixedValue", value = scValue)
m = Parameter (outDim, 1, init = "fixedValue", value = 0, learningRateMultiplier = 0)
isd = Parameter (outDim, 1, init = "fixedValue", value = 0, learningRateMultiplier = 0)
t = Times(W, x) # TODO: W * x
bn = BatchNormalization(t, sc, b, m, isd, eval = false, spatial = false, normalizationTimeConstant = bnTimeConst)
y = RectifiedLinear(bn)
].y
# macros to create parameters for convolution --TODO: rename to newConvX()
ConvW (outMap, inWCount, wScale) = Parameter (outMap, inWCount, init="uniform", initValueScale=wScale, initOnCPUOnly=true)
ConvB (outMap, bValue) = ImageParameter (1, 1, outMap, init="fixedValue", value=bValue /* , imageLayout=$imageLayout$*/)
# TODO: find out whether Conv2D is identical to -ND by now, then unify
Conv2D (w, inp, kW, kH, outMap, hStride, vStride) =
Convolution (w, inp, kW, kH, outMap, hStride, vStride, zeroPadding=true /* , imageLayout=$imageLayout$*/)
ConvND (w, inp, kW, kH, inMap, outMap, hStride, vStride) =
Convolution (w, inp, (kW:kH:inMap), mapCount=outMap, stride=(hStride:vStride:inMap), sharing=(true:true:true), autoPadding=(true:true:false), lowerPad=0, upperPad=0 /* , imageLayout=$imageLayout$*/)
Conv2DReLULayer (inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue) = [
w = ConvW (outMap, inWCount, wScale)
b = ConvB (outMap, bValue)
c = Conv2D (w, inp, kW, kH, outMap, hStride, vStride)
out = RectifiedLinear (c + b);
].out
ConvNDReLULayer (inp, kW, kH, inMap, inWCount, outMap, hStride, vStride, wScale, bValue) = [
w = ConvW (outMap, inWCount, wScale)
b = ConvB (outMap, bValue)
c = ConvND (w, inp, kW, kH, inMap, outMap, hStride, vStride)
out = RectifiedLinear (c + b);
].out
ConvBNLayerW (W, inp, outMap, kW, kH, hStride, vStride, bValue, scValue, bnTimeConst) = [ # TODO: delete if not needed
b = Parameter(outMap, 1, init="fixedValue", value=bValue)
sc = Parameter(outMap, 1, init="fixedValue", value=scValue)
m = Parameter(outMap, 1, init="fixedValue", value=0, learningRateMultiplier=0)
isd = Parameter(outMap, 1, init="fixedValue", value=0, learningRateMultiplier=0)
c = Convolution(W, inp, kW, kH, outMap, hStride, vStride, zeroPadding=true /* , imageLayout=$imageLayout$*/)
y = BatchNormalization(c, sc, b, m, isd, eval=false, spatial=true, normalizationTimeConstant=bnTimeConst /* , imageLayout=$imageLayout$*/)
].y
ConvBNLayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, bnTimeConst) = [
W = LearnableParameter(outMap, inWCount, init=Gaussian, initValueScale=wScale, initOnCPUOnly=true)
c = ConvBNLayerW(W, inp, outMap, kW, kH, hStride, vStride, bValue, scValue, bnTimeConst)
].c
ConvBNReLULayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, bnTimeConst) = [
c = ConvBNLayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, bnTimeConst)
y = RectifiedLinear(c)
].y
MaxNDPooling(inp, kW, kH, hStride, vStride) =
Pooling(inp, "max", (kW:kH:1), stride=(hStride:vStride:1), autoPadding=(true:true:false), lowerPad=0, upperPad=0 /* , imageLayout=$imageLayout$*/)

Просмотреть файл

@ -19,7 +19,7 @@ downloaded and converted by running the following command from the 'AdditionalFi
`python mnist_convert.py`
The script will download all required files and convert them to CNTK-supported format.
The resulting files (Train-28x28.txt and Test-28x28.txt) will be stored in the 'Data' folder.
The resulting files (Train-28x28_cntk_text.txt and Test-28x28_cntk_text.txt) will be stored in the 'Data' folder.
In case you don't have Python installed, there are 2 options:
1. Download and install latest version of Python 2.7 from: https://www.python.org/downloads/

Просмотреть файл

@ -16,8 +16,6 @@ imageLayout = "cudnn"
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
prefetch = "true"
command = Train:Test
modelPath = "$ModelDir$/01_Convolution"
@ -44,20 +42,18 @@ Train = [
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Train.txt"
randomize = "auto"
# Setting minibatchMode="full" is a temporary fix to prevent an exception until the reader is migrated to the new reader.
minibatchMode="full"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See REAMDE.md for details on getting the data (Train_cntk_text.txt).
file = "$DataDir$/Train_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -68,18 +64,17 @@ Test = [
minibatchSize = 16
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test.txt"
randomize = "none"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -16,8 +16,6 @@ imageLayout = "cudnn"
# If set to true, always initialize the network on CPU, making initialization consistent across CPU and GPU targets (for testing).
initOnCPUOnly=true
prefetch = "true"
command = Train:Test
stderr = "$OutputDir$/02_BatchNormConv"
@ -43,19 +41,18 @@ Train = [
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Train.txt"
randomize = "auto"
minibatchMode="full"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See REAMDE.md for details on getting the data (Train_cntk_text.txt).
file = "$DataDir$/Train_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -67,18 +64,17 @@ Test = [
minibatchSize = 16
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test.txt"
randomize = "none"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = $DataDir$/labelsmap.txt
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -13,8 +13,6 @@ imageLayout = "cudnn"
# override the above as follows when running on CPU:
# deviceId = -1
prefetch = "true"
command = Train:Test
modelPath = "$ModelDir$/05_ConvLocal"
@ -40,19 +38,18 @@ Train = [
]
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Train.txt"
randomize = "auto"
minibatchMode="full"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
# See REAMDE.md for details on getting the data (Train_cntk_text.txt).
file = "$DataDir$/Train_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]
@ -63,18 +60,17 @@ Test = [
minibatchSize = 16
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/Test.txt"
randomize = "none"
features = [
dim = 3072
start = 1
]
labels = [
dim = 1
start = 0
labelDim = 10
labelMappingFile = "$DataDir$/labelsmap.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/Test_cntk_text.txt"
input = [
features = [
dim = 3072
format = "dense"
]
labels = [
dim = 10
format = "dense"
]
]
]
]

Просмотреть файл

@ -35,7 +35,8 @@ def readBatch(src, outFmt):
print ('Format not supported: ' + outFmt)
usage()
sys.exit(1)
return np.hstack((np.reshape(d['labels'], (len(d['labels']), 1)), feat))
res = np.hstack((feat, np.reshape(d['labels'], (len(d['labels']), 1))))
return res.astype(np.int)
def loadData(src, outFmt):
print ('Downloading ' + src)
@ -47,7 +48,7 @@ def loadData(src, outFmt):
tar.extractall()
print ('Done.')
print ('Preparing train set...')
trn = np.empty((0, NumFeat + 1))
trn = np.empty((0, NumFeat + 1), dtype=np.int)
for i in range(5):
batchName = './cifar-10-batches-py/data_batch_{0}'.format(i + 1)
trn = np.vstack((trn, readBatch(batchName, outFmt)))
@ -83,12 +84,22 @@ def parseCmdOpt(argv):
sys.exit(1)
return fmt
def savetxt(filename, ndarray):
with open(filename, 'w') as f:
labels = map(' '.join, np.eye(10, dtype=np.uint).astype(str))
for row in ndarray:
row_str = row.astype(str)
label_str = labels[row[-1]]
feature_str = ' '.join(row_str[:-1])
f.write('|labels {} |features {}\n'.format(label_str, feature_str))
if __name__ == "__main__":
fmt = parseCmdOpt(sys.argv[1:])
trn, tst = loadData('http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz', fmt)
print ('Writing train text file...')
np.savetxt(r'./Train.txt', trn, fmt = '%u', delimiter='\t')
savetxt(r'./Train_cntk_text.txt', trn)
print ('Done.')
print ('Writing test text file...')
np.savetxt(r'./Test.txt', tst, fmt = '%u', delimiter='\t')
savetxt(r'./Test_cntk_text.txt', tst)
print ('Done.')

Просмотреть файл

@ -17,6 +17,7 @@ The following table contains results as well as links to pre-trained models that
| ResNet-18 | 29.57 | 10.41 | [Download](https://www.cntk.ai/resnet/ResNet_18.model)
| ResNet-34 | 27.31 | 8.97 | [Download](https://www.cntk.ai/resnet/ResNet_34.model)
| ResNet-50 | 24.74 | 7.56 | [Download](https://www.cntk.ai/resnet/ResNet_50.model)
| ResNet-152 | 22.57 | 6.44 | [Download](https://www.cntk.ai/resnet/ResNet_152.model)
## Notes
This work is an implementation of ResNets in CNTK. If you are interested in the original implementation of ResNet, follow [this link](https://github.com/KaimingHe/deep-residual-networks).

Просмотреть файл

@ -50,7 +50,7 @@ Multigpu_Demo_Train=[
SGD = [
# epochSize = 0 means epochSize is the size of the training set
epochSize = 0
minibatchSize = 25
minibatchSize = 25
learningRatesPerMB = 0.5:0.2*20:0.1
momentumPerMB = 0.9
dropoutRate = 0.0
@ -70,22 +70,17 @@ Multigpu_Demo_Train=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTrain.txt"
miniBatchMode = "partial"
randomize = "auto"
verbosity = 1
features = [
dim = 2 # two-dimensional input data
start = 0 # Start with first element on line
]
labels = [
start = 2 # Skip two elements
dim = 1 # One label dimension
labelDim = 2 # Two labels possible
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTrain_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -102,20 +97,17 @@ Multigpu_Demo_Test=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -131,20 +123,17 @@ Multigpu_Demo_Output=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]

Просмотреть файл

@ -41,9 +41,9 @@ Simple_Demo_Train = [
]
SGD = [
# epochSize = 0 means epochSize is the size of the training set
# epochSize = 0 means epochSize is the size of the training set
epochSize = 0
minibatchSize = 25
minibatchSize = 25
learningRatesPerMB = 0.5:0.2*20:0.1
momentumPerMB = 0.9
dropoutRate = 0.0
@ -52,22 +52,17 @@ Simple_Demo_Train = [
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTrain.txt"
miniBatchMode = "partial"
randomize = "auto"
verbosity = 1
features = [
dim = 2 # two-dimensional input data
start = 0 # Start with first element on line
]
labels = [
start = 2 # Skip two elements
dim = 1 # One label dimension
labelDim = 2 # Two labels possible
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTrain_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -84,20 +79,17 @@ Simple_Demo_Test = [
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]
]
@ -113,20 +105,17 @@ Simple_Demo_Output=[
# Parameter values for the reader
reader = [
readerType = "UCIFastReader"
file = "$DataDir$/SimpleDataTest.txt"
randomize = "none"
features = [
dim = 2
start = 0
]
labels = [
start = 2
dim = 1
labelDim = 2
labelMappingFile = "$DataDir$/SimpleMapping.txt"
readerType = "CNTKTextFormatReader"
file = "$DataDir$/SimpleDataTest_cntk_text.txt"
input = [
features = [
dim = 2 # two-dimensional input data
format = "dense"
]
labels = [
dim = 2 # two-dimensional labels
format = "dense"
]
]
]

Просмотреть файл

@ -1,603 +0,0 @@
-1 -1 1
-1 -0.99 1
-1 -0.98 1
-1 -0.97 1
-1 -0.96 1
-1 -0.95 1
-1 -0.94 1
-1 -0.93 1
-1 -0.92 1
-1 -0.91 1
-1 -0.9 1
-1 -0.89 1
-1 -0.88 1
-1 -0.87 1
-1 -0.86 1
-1 -0.85 1
-1 -0.84 1
-1 -0.83 1
-1 -0.82 1
-1 -0.81 1
-1 -0.8 1
-1 -0.79 1
-1 -0.78 1
-1 -0.77 1
-1 -0.76 1
-1 -0.75 1
-1 -0.74 1
-1 -0.73 1
-1 -0.72 1
-1 -0.71 1
-1 -0.7 1
-1 -0.69 1
-1 -0.68 1
-1 -0.67 1
-1 -0.66 1
-1 -0.65 1
-1 -0.64 1
-1 -0.63 1
-1 -0.62 1
-1 -0.61 1
-1 -0.6 1
-1 -0.59 1
-1 -0.58 1
-1 -0.57 1
-1 -0.56 1
-1 -0.55 1
-1 -0.54 1
-1 -0.53 1
-1 -0.52 1
-1 -0.51 1
-1 -0.5 1
-1 -0.49 1
-1 -0.48 1
-1 -0.47 1
-1 -0.46 1
-1 -0.45 1
-1 -0.44 1
-1 -0.43 1
-1 -0.42 1
-1 -0.41 1
-1 -0.4 1
-1 -0.39 1
-1 -0.38 1
-1 -0.37 1
-1 -0.36 1
-1 -0.35 1
-1 -0.34 1
-1 -0.33 1
-1 -0.32 1
-1 -0.31 1
-1 -0.3 1
-1 -0.29 1
-1 -0.28 1
-1 -0.27 1
-1 -0.26 1
-1 -0.25 1
-1 -0.24 1
-1 -0.23 1
-1 -0.22 1
-1 -0.21 1
-1 -0.2 1
-1 -0.19 1
-1 -0.18 1
-1 -0.17 1
-1 -0.16 1
-1 -0.15 1
-1 -0.14 1
-1 -0.13 1
-1 -0.12 1
-1 -0.11 1
-1 -0.1 1
-1 -0.09 0
-1 -0.08 0
-1 -0.07 0
-1 -0.06 0
-1 -0.05 0
-1 -0.04 0
-1 -0.03 0
-1 -0.02 0
-1 -0.01 0
-1 0 0
-1 0.01 0
-1 0.02 0
-1 0.03 0
-1 0.04 0
-1 0.05 0
-1 0.06 0
-1 0.07 0
-1 0.08 0
-1 0.09 0
-1 0.1 0
-1 0.11 0
-1 0.12 0
-1 0.13 0
-1 0.14 0
-1 0.15 0
-1 0.16 0
-1 0.17 0
-1 0.18 0
-1 0.19 0
-1 0.2 0
-1 0.21 0
-1 0.22 0
-1 0.23 0
-1 0.24 0
-1 0.25 0
-1 0.26 0
-1 0.27 0
-1 0.28 0
-1 0.29 0
-1 0.3 0
-1 0.31 0
-1 0.32 0
-1 0.33 0
-1 0.34 0
-1 0.35 0
-1 0.36 0
-1 0.37 0
-1 0.38 0
-1 0.39 0
-1 0.4 0
-1 0.41 0
-1 0.42 0
-1 0.43 0
-1 0.44 0
-1 0.45 0
-1 0.46 0
-1 0.47 0
-1 0.48 0
-1 0.49 0
-1 0.5 0
-1 0.51 0
-1 0.52 0
-1 0.53 0
-1 0.54 0
-1 0.55 0
-1 0.56 0
-1 0.57 0
-1 0.58 0
-1 0.59 0
-1 0.6 0
-1 0.61 0
-1 0.62 0
-1 0.63 0
-1 0.64 0
-1 0.65 0
-1 0.66 0
-1 0.67 0
-1 0.68 0
-1 0.69 0
-1 0.7 0
-1 0.71 0
-1 0.72 0
-1 0.73 0
-1 0.74 0
-1 0.75 0
-1 0.76 0
-1 0.77 0
-1 0.78 0
-1 0.79 0
-1 0.8 0
-1 0.81 0
-1 0.82 0
-1 0.83 0
-1 0.84 0
-1 0.85 0
-1 0.86 0
-1 0.87 0
-1 0.88 0
-1 0.89 0
-1 0.9 0
-1 0.91 0
-1 0.92 0
-1 0.93 0
-1 0.94 0
-1 0.95 0
-1 0.96 0
-1 0.97 0
-1 0.98 0
-1 0.99 0
-1 0 0
0 -1 1
0 -0.99 1
0 -0.98 1
0 -0.97 1
0 -0.96 1
0 -0.95 1
0 -0.94 1
0 -0.93 1
0 -0.92 1
0 -0.91 1
0 -0.9 1
0 -0.89 1
0 -0.88 1
0 -0.87 1
0 -0.86 1
0 -0.85 1
0 -0.84 1
0 -0.83 1
0 -0.82 1
0 -0.81 1
0 -0.8 1
0 -0.79 1
0 -0.78 1
0 -0.77 1
0 -0.76 1
0 -0.75 1
0 -0.74 1
0 -0.73 1
0 -0.72 1
0 -0.71 1
0 -0.7 1
0 -0.69 1
0 -0.68 1
0 -0.67 1
0 -0.66 1
0 -0.65 1
0 -0.64 1
0 -0.63 1
0 -0.62 1
0 -0.61 1
0 -0.6 1
0 -0.59 1
0 -0.58 1
0 -0.57 1
0 -0.56 1
0 -0.55 1
0 -0.54 1
0 -0.53 1
0 -0.52 1
0 -0.51 1
0 -0.5 1
0 -0.49 1
0 -0.48 1
0 -0.47 1
0 -0.46 1
0 -0.45 1
0 -0.44 1
0 -0.43 1
0 -0.42 1
0 -0.41 1
0 -0.4 1
0 -0.39 1
0 -0.38 1
0 -0.37 1
0 -0.36 1
0 -0.35 1
0 -0.34 1
0 -0.33 1
0 -0.32 1
0 -0.31 1
0 -0.3 1
0 -0.29 1
0 -0.28 1
0 -0.27 1
0 -0.26 1
0 -0.25 1
0 -0.24 1
0 -0.23 1
0 -0.22 1
0 -0.21 1
0 -0.2 1
0 -0.19 1
0 -0.18 1
0 -0.17 1
0 -0.16 1
0 -0.15 1
0 -0.14 1
0 -0.13 1
0 -0.12 1
0 -0.11 1
0 -0.1 1
0 -0.09 0
0 -0.08 0
0 -0.07 0
0 -0.06 0
0 -0.05 0
0 -0.04 0
0 -0.03 0
0 -0.02 0
0 -0.01 0
0 0 0
0 0.01 0
0 0.02 0
0 0.03 0
0 0.04 0
0 0.05 0
0 0.06 0
0 0.07 0
0 0.08 0
0 0.09 0
0 0.1 0
0 0.11 0
0 0.12 0
0 0.13 0
0 0.14 0
0 0.15 0
0 0.16 0
0 0.17 0
0 0.18 0
0 0.19 0
0 0.2 0
0 0.21 0
0 0.22 0
0 0.23 0
0 0.24 0
0 0.25 0
0 0.26 0
0 0.27 0
0 0.28 0
0 0.29 0
0 0.3 0
0 0.31 0
0 0.32 0
0 0.33 0
0 0.34 0
0 0.35 0
0 0.36 0
0 0.37 0
0 0.38 0
0 0.39 0
0 0.4 0
0 0.41 0
0 0.42 0
0 0.43 0
0 0.44 0
0 0.45 0
0 0.46 0
0 0.47 0
0 0.48 0
0 0.49 0
0 0.5 0
0 0.51 0
0 0.52 0
0 0.53 0
0 0.54 0
0 0.55 0
0 0.56 0
0 0.57 0
0 0.58 0
0 0.59 0
0 0.6 0
0 0.61 0
0 0.62 0
0 0.63 0
0 0.64 0
0 0.65 0
0 0.66 0
0 0.67 0
0 0.68 0
0 0.69 0
0 0.7 0
0 0.71 0
0 0.72 0
0 0.73 0
0 0.74 0
0 0.75 0
0 0.76 0
0 0.77 0
0 0.78 0
0 0.79 0
0 0.8 0
0 0.81 0
0 0.82 0
0 0.83 0
0 0.84 0
0 0.85 0
0 0.86 0
0 0.87 0
0 0.88 0
0 0.89 0
0 0.9 0
0 0.91 0
0 0.92 0
0 0.93 0
0 0.94 0
0 0.95 0
0 0.96 0
0 0.97 0
0 0.98 0
0 0.99 0
0 1 0
1 -1 1
1 -0.99 1
1 -0.98 1
1 -0.97 1
1 -0.96 1
1 -0.95 1
1 -0.94 1
1 -0.93 1
1 -0.92 1
1 -0.91 1
1 -0.9 1
1 -0.89 1
1 -0.88 1
1 -0.87 1
1 -0.86 1
1 -0.85 1
1 -0.84 1
1 -0.83 1
1 -0.82 1
1 -0.81 1
1 -0.8 1
1 -0.79 1
1 -0.78 1
1 -0.77 1
1 -0.76 1
1 -0.75 1
1 -0.74 1
1 -0.73 1
1 -0.72 1
1 -0.71 1
1 -0.7 1
1 -0.69 1
1 -0.68 1
1 -0.67 1
1 -0.66 1
1 -0.65 1
1 -0.64 1
1 -0.63 1
1 -0.62 1
1 -0.61 1
1 -0.6 1
1 -0.59 1
1 -0.58 1
1 -0.57 1
1 -0.56 1
1 -0.55 1
1 -0.54 1
1 -0.53 1
1 -0.52 1
1 -0.51 1
1 -0.5 1
1 -0.49 1
1 -0.48 1
1 -0.47 1
1 -0.46 1
1 -0.45 1
1 -0.44 1
1 -0.43 1
1 -0.42 1
1 -0.41 1
1 -0.4 1
1 -0.39 1
1 -0.38 1
1 -0.37 1
1 -0.36 1
1 -0.35 1
1 -0.34 1
1 -0.33 1
1 -0.32 1
1 -0.31 1
1 -0.3 1
1 -0.29 1
1 -0.28 1
1 -0.27 1
1 -0.26 1
1 -0.25 1
1 -0.24 1
1 -0.23 1
1 -0.22 1
1 -0.21 1
1 -0.2 1
1 -0.19 1
1 -0.18 1
1 -0.17 1
1 -0.16 1
1 -0.15 1
1 -0.14 1
1 -0.13 1
1 -0.12 1
1 -0.11 1
1 -0.1 1
1 -0.09 1
1 -0.08 1
1 -0.07 1
1 -0.06 1
1 -0.05 1
1 -0.04 1
1 -0.03 1
1 -0.02 1
1 -0.01 1
1 0 0
1 0.01 0
1 0.02 0
1 0.03 0
1 0.04 0
1 0.05 0
1 0.06 0
1 0.07 0
1 0.08 0
1 0.09 0
1 0.1 0
1 0.11 0
1 0.12 0
1 0.13 0
1 0.14 0
1 0.15 0
1 0.16 0
1 0.17 0
1 0.18 0
1 0.19 0
1 0.2 0
1 0.21 0
1 0.22 0
1 0.23 0
1 0.24 0
1 0.25 0
1 0.26 0
1 0.27 0
1 0.28 0
1 0.29 0
1 0.3 0
1 0.31 0
1 0.32 0
1 0.33 0
1 0.34 0
1 0.35 0
1 0.36 0
1 0.37 0
1 0.38 0
1 0.39 0
1 0.4 0
1 0.41 0
1 0.42 0
1 0.43 0
1 0.44 0
1 0.45 0
1 0.46 0
1 0.47 0
1 0.48 0
1 0.49 0
1 0.5 0
1 0.51 0
1 0.52 0
1 0.53 0
1 0.54 0
1 0.55 0
1 0.56 0
1 0.57 0
1 0.58 0
1 0.59 0
1 0.6 0
1 0.61 0
1 0.62 0
1 0.63 0
1 0.64 0
1 0.65 0
1 0.66 0
1 0.67 0
1 0.68 0
1 0.69 0
1 0.7 0
1 0.71 0
1 0.72 0
1 0.73 0
1 0.74 0
1 0.75 0
1 0.76 0
1 0.77 0
1 0.78 0
1 0.79 0
1 0.8 0
1 0.81 0
1 0.82 0
1 0.83 0
1 0.84 0
1 0.85 0
1 0.86 0
1 0.87 0
1 0.88 0
1 0.89 0
1 0.9 0
1 0.91 0
1 0.92 0
1 0.93 0
1 0.94 0
1 0.95 0
1 0.96 0
1 0.97 0
1 0.98 0
1 0.99 0
1 1 0

Просмотреть файл

@ -0,0 +1,603 @@
|labels 0 1 |features -1 -1
|labels 0 1 |features -1 -0.99
|labels 0 1 |features -1 -0.98
|labels 0 1 |features -1 -0.97
|labels 0 1 |features -1 -0.96
|labels 0 1 |features -1 -0.95
|labels 0 1 |features -1 -0.94
|labels 0 1 |features -1 -0.93
|labels 0 1 |features -1 -0.92
|labels 0 1 |features -1 -0.91
|labels 0 1 |features -1 -0.9
|labels 0 1 |features -1 -0.89
|labels 0 1 |features -1 -0.88
|labels 0 1 |features -1 -0.87
|labels 0 1 |features -1 -0.86
|labels 0 1 |features -1 -0.85
|labels 0 1 |features -1 -0.84
|labels 0 1 |features -1 -0.83
|labels 0 1 |features -1 -0.82
|labels 0 1 |features -1 -0.81
|labels 0 1 |features -1 -0.8
|labels 0 1 |features -1 -0.79
|labels 0 1 |features -1 -0.78
|labels 0 1 |features -1 -0.77
|labels 0 1 |features -1 -0.76
|labels 0 1 |features -1 -0.75
|labels 0 1 |features -1 -0.74
|labels 0 1 |features -1 -0.73
|labels 0 1 |features -1 -0.72
|labels 0 1 |features -1 -0.71
|labels 0 1 |features -1 -0.7
|labels 0 1 |features -1 -0.69
|labels 0 1 |features -1 -0.68
|labels 0 1 |features -1 -0.67
|labels 0 1 |features -1 -0.66
|labels 0 1 |features -1 -0.65
|labels 0 1 |features -1 -0.64
|labels 0 1 |features -1 -0.63
|labels 0 1 |features -1 -0.62
|labels 0 1 |features -1 -0.61
|labels 0 1 |features -1 -0.6
|labels 0 1 |features -1 -0.59
|labels 0 1 |features -1 -0.58
|labels 0 1 |features -1 -0.57
|labels 0 1 |features -1 -0.56
|labels 0 1 |features -1 -0.55
|labels 0 1 |features -1 -0.54
|labels 0 1 |features -1 -0.53
|labels 0 1 |features -1 -0.52
|labels 0 1 |features -1 -0.51
|labels 0 1 |features -1 -0.5
|labels 0 1 |features -1 -0.49
|labels 0 1 |features -1 -0.48
|labels 0 1 |features -1 -0.47
|labels 0 1 |features -1 -0.46
|labels 0 1 |features -1 -0.45
|labels 0 1 |features -1 -0.44
|labels 0 1 |features -1 -0.43
|labels 0 1 |features -1 -0.42
|labels 0 1 |features -1 -0.41
|labels 0 1 |features -1 -0.4
|labels 0 1 |features -1 -0.39
|labels 0 1 |features -1 -0.38
|labels 0 1 |features -1 -0.37
|labels 0 1 |features -1 -0.36
|labels 0 1 |features -1 -0.35
|labels 0 1 |features -1 -0.34
|labels 0 1 |features -1 -0.33
|labels 0 1 |features -1 -0.32
|labels 0 1 |features -1 -0.31
|labels 0 1 |features -1 -0.3
|labels 0 1 |features -1 -0.29
|labels 0 1 |features -1 -0.28
|labels 0 1 |features -1 -0.27
|labels 0 1 |features -1 -0.26
|labels 0 1 |features -1 -0.25
|labels 0 1 |features -1 -0.24
|labels 0 1 |features -1 -0.23
|labels 0 1 |features -1 -0.22
|labels 0 1 |features -1 -0.21
|labels 0 1 |features -1 -0.2
|labels 0 1 |features -1 -0.19
|labels 0 1 |features -1 -0.18
|labels 0 1 |features -1 -0.17
|labels 0 1 |features -1 -0.16
|labels 0 1 |features -1 -0.15
|labels 0 1 |features -1 -0.14
|labels 0 1 |features -1 -0.13
|labels 0 1 |features -1 -0.12
|labels 0 1 |features -1 -0.11
|labels 0 1 |features -1 -0.1
|labels 1 0 |features -1 -0.09
|labels 1 0 |features -1 -0.08
|labels 1 0 |features -1 -0.07
|labels 1 0 |features -1 -0.06
|labels 1 0 |features -1 -0.05
|labels 1 0 |features -1 -0.04
|labels 1 0 |features -1 -0.03
|labels 1 0 |features -1 -0.02
|labels 1 0 |features -1 -0.01
|labels 1 0 |features -1 0
|labels 1 0 |features -1 0.01
|labels 1 0 |features -1 0.02
|labels 1 0 |features -1 0.03
|labels 1 0 |features -1 0.04
|labels 1 0 |features -1 0.05
|labels 1 0 |features -1 0.06
|labels 1 0 |features -1 0.07
|labels 1 0 |features -1 0.08
|labels 1 0 |features -1 0.09
|labels 1 0 |features -1 0.1
|labels 1 0 |features -1 0.11
|labels 1 0 |features -1 0.12
|labels 1 0 |features -1 0.13
|labels 1 0 |features -1 0.14
|labels 1 0 |features -1 0.15
|labels 1 0 |features -1 0.16
|labels 1 0 |features -1 0.17
|labels 1 0 |features -1 0.18
|labels 1 0 |features -1 0.19
|labels 1 0 |features -1 0.2
|labels 1 0 |features -1 0.21
|labels 1 0 |features -1 0.22
|labels 1 0 |features -1 0.23
|labels 1 0 |features -1 0.24
|labels 1 0 |features -1 0.25
|labels 1 0 |features -1 0.26
|labels 1 0 |features -1 0.27
|labels 1 0 |features -1 0.28
|labels 1 0 |features -1 0.29
|labels 1 0 |features -1 0.3
|labels 1 0 |features -1 0.31
|labels 1 0 |features -1 0.32
|labels 1 0 |features -1 0.33
|labels 1 0 |features -1 0.34
|labels 1 0 |features -1 0.35
|labels 1 0 |features -1 0.36
|labels 1 0 |features -1 0.37
|labels 1 0 |features -1 0.38
|labels 1 0 |features -1 0.39
|labels 1 0 |features -1 0.4
|labels 1 0 |features -1 0.41
|labels 1 0 |features -1 0.42
|labels 1 0 |features -1 0.43
|labels 1 0 |features -1 0.44
|labels 1 0 |features -1 0.45
|labels 1 0 |features -1 0.46
|labels 1 0 |features -1 0.47
|labels 1 0 |features -1 0.48
|labels 1 0 |features -1 0.49
|labels 1 0 |features -1 0.5
|labels 1 0 |features -1 0.51
|labels 1 0 |features -1 0.52
|labels 1 0 |features -1 0.53
|labels 1 0 |features -1 0.54
|labels 1 0 |features -1 0.55
|labels 1 0 |features -1 0.56
|labels 1 0 |features -1 0.57
|labels 1 0 |features -1 0.58
|labels 1 0 |features -1 0.59
|labels 1 0 |features -1 0.6
|labels 1 0 |features -1 0.61
|labels 1 0 |features -1 0.62
|labels 1 0 |features -1 0.63
|labels 1 0 |features -1 0.64
|labels 1 0 |features -1 0.65
|labels 1 0 |features -1 0.66
|labels 1 0 |features -1 0.67
|labels 1 0 |features -1 0.68
|labels 1 0 |features -1 0.69
|labels 1 0 |features -1 0.7
|labels 1 0 |features -1 0.71
|labels 1 0 |features -1 0.72
|labels 1 0 |features -1 0.73
|labels 1 0 |features -1 0.74
|labels 1 0 |features -1 0.75
|labels 1 0 |features -1 0.76
|labels 1 0 |features -1 0.77
|labels 1 0 |features -1 0.78
|labels 1 0 |features -1 0.79
|labels 1 0 |features -1 0.8
|labels 1 0 |features -1 0.81
|labels 1 0 |features -1 0.82
|labels 1 0 |features -1 0.83
|labels 1 0 |features -1 0.84
|labels 1 0 |features -1 0.85
|labels 1 0 |features -1 0.86
|labels 1 0 |features -1 0.87
|labels 1 0 |features -1 0.88
|labels 1 0 |features -1 0.89
|labels 1 0 |features -1 0.9
|labels 1 0 |features -1 0.91
|labels 1 0 |features -1 0.92
|labels 1 0 |features -1 0.93
|labels 1 0 |features -1 0.94
|labels 1 0 |features -1 0.95
|labels 1 0 |features -1 0.96
|labels 1 0 |features -1 0.97
|labels 1 0 |features -1 0.98
|labels 1 0 |features -1 0.99
|labels 1 0 |features -1 0
|labels 0 1 |features 0 -1
|labels 0 1 |features 0 -0.99
|labels 0 1 |features 0 -0.98
|labels 0 1 |features 0 -0.97
|labels 0 1 |features 0 -0.96
|labels 0 1 |features 0 -0.95
|labels 0 1 |features 0 -0.94
|labels 0 1 |features 0 -0.93
|labels 0 1 |features 0 -0.92
|labels 0 1 |features 0 -0.91
|labels 0 1 |features 0 -0.9
|labels 0 1 |features 0 -0.89
|labels 0 1 |features 0 -0.88
|labels 0 1 |features 0 -0.87
|labels 0 1 |features 0 -0.86
|labels 0 1 |features 0 -0.85
|labels 0 1 |features 0 -0.84
|labels 0 1 |features 0 -0.83
|labels 0 1 |features 0 -0.82
|labels 0 1 |features 0 -0.81
|labels 0 1 |features 0 -0.8
|labels 0 1 |features 0 -0.79
|labels 0 1 |features 0 -0.78
|labels 0 1 |features 0 -0.77
|labels 0 1 |features 0 -0.76
|labels 0 1 |features 0 -0.75
|labels 0 1 |features 0 -0.74
|labels 0 1 |features 0 -0.73
|labels 0 1 |features 0 -0.72
|labels 0 1 |features 0 -0.71
|labels 0 1 |features 0 -0.7
|labels 0 1 |features 0 -0.69
|labels 0 1 |features 0 -0.68
|labels 0 1 |features 0 -0.67
|labels 0 1 |features 0 -0.66
|labels 0 1 |features 0 -0.65
|labels 0 1 |features 0 -0.64
|labels 0 1 |features 0 -0.63
|labels 0 1 |features 0 -0.62
|labels 0 1 |features 0 -0.61
|labels 0 1 |features 0 -0.6
|labels 0 1 |features 0 -0.59
|labels 0 1 |features 0 -0.58
|labels 0 1 |features 0 -0.57
|labels 0 1 |features 0 -0.56
|labels 0 1 |features 0 -0.55
|labels 0 1 |features 0 -0.54
|labels 0 1 |features 0 -0.53
|labels 0 1 |features 0 -0.52
|labels 0 1 |features 0 -0.51
|labels 0 1 |features 0 -0.5
|labels 0 1 |features 0 -0.49
|labels 0 1 |features 0 -0.48
|labels 0 1 |features 0 -0.47
|labels 0 1 |features 0 -0.46
|labels 0 1 |features 0 -0.45
|labels 0 1 |features 0 -0.44
|labels 0 1 |features 0 -0.43
|labels 0 1 |features 0 -0.42
|labels 0 1 |features 0 -0.41
|labels 0 1 |features 0 -0.4
|labels 0 1 |features 0 -0.39
|labels 0 1 |features 0 -0.38
|labels 0 1 |features 0 -0.37
|labels 0 1 |features 0 -0.36
|labels 0 1 |features 0 -0.35
|labels 0 1 |features 0 -0.34
|labels 0 1 |features 0 -0.33
|labels 0 1 |features 0 -0.32
|labels 0 1 |features 0 -0.31
|labels 0 1 |features 0 -0.3
|labels 0 1 |features 0 -0.29
|labels 0 1 |features 0 -0.28
|labels 0 1 |features 0 -0.27
|labels 0 1 |features 0 -0.26
|labels 0 1 |features 0 -0.25
|labels 0 1 |features 0 -0.24
|labels 0 1 |features 0 -0.23
|labels 0 1 |features 0 -0.22
|labels 0 1 |features 0 -0.21
|labels 0 1 |features 0 -0.2
|labels 0 1 |features 0 -0.19
|labels 0 1 |features 0 -0.18
|labels 0 1 |features 0 -0.17
|labels 0 1 |features 0 -0.16
|labels 0 1 |features 0 -0.15
|labels 0 1 |features 0 -0.14
|labels 0 1 |features 0 -0.13
|labels 0 1 |features 0 -0.12
|labels 0 1 |features 0 -0.11
|labels 0 1 |features 0 -0.1
|labels 1 0 |features 0 -0.09
|labels 1 0 |features 0 -0.08
|labels 1 0 |features 0 -0.07
|labels 1 0 |features 0 -0.06
|labels 1 0 |features 0 -0.05
|labels 1 0 |features 0 -0.04
|labels 1 0 |features 0 -0.03
|labels 1 0 |features 0 -0.02
|labels 1 0 |features 0 -0.01
|labels 1 0 |features 0 0
|labels 1 0 |features 0 0.01
|labels 1 0 |features 0 0.02
|labels 1 0 |features 0 0.03
|labels 1 0 |features 0 0.04
|labels 1 0 |features 0 0.05
|labels 1 0 |features 0 0.06
|labels 1 0 |features 0 0.07
|labels 1 0 |features 0 0.08
|labels 1 0 |features 0 0.09
|labels 1 0 |features 0 0.1
|labels 1 0 |features 0 0.11
|labels 1 0 |features 0 0.12
|labels 1 0 |features 0 0.13
|labels 1 0 |features 0 0.14
|labels 1 0 |features 0 0.15
|labels 1 0 |features 0 0.16
|labels 1 0 |features 0 0.17
|labels 1 0 |features 0 0.18
|labels 1 0 |features 0 0.19
|labels 1 0 |features 0 0.2
|labels 1 0 |features 0 0.21
|labels 1 0 |features 0 0.22
|labels 1 0 |features 0 0.23
|labels 1 0 |features 0 0.24
|labels 1 0 |features 0 0.25
|labels 1 0 |features 0 0.26
|labels 1 0 |features 0 0.27
|labels 1 0 |features 0 0.28
|labels 1 0 |features 0 0.29
|labels 1 0 |features 0 0.3
|labels 1 0 |features 0 0.31
|labels 1 0 |features 0 0.32
|labels 1 0 |features 0 0.33
|labels 1 0 |features 0 0.34
|labels 1 0 |features 0 0.35
|labels 1 0 |features 0 0.36
|labels 1 0 |features 0 0.37
|labels 1 0 |features 0 0.38
|labels 1 0 |features 0 0.39
|labels 1 0 |features 0 0.4
|labels 1 0 |features 0 0.41
|labels 1 0 |features 0 0.42
|labels 1 0 |features 0 0.43
|labels 1 0 |features 0 0.44
|labels 1 0 |features 0 0.45
|labels 1 0 |features 0 0.46
|labels 1 0 |features 0 0.47
|labels 1 0 |features 0 0.48
|labels 1 0 |features 0 0.49
|labels 1 0 |features 0 0.5
|labels 1 0 |features 0 0.51
|labels 1 0 |features 0 0.52
|labels 1 0 |features 0 0.53
|labels 1 0 |features 0 0.54
|labels 1 0 |features 0 0.55
|labels 1 0 |features 0 0.56
|labels 1 0 |features 0 0.57
|labels 1 0 |features 0 0.58
|labels 1 0 |features 0 0.59
|labels 1 0 |features 0 0.6
|labels 1 0 |features 0 0.61
|labels 1 0 |features 0 0.62
|labels 1 0 |features 0 0.63
|labels 1 0 |features 0 0.64
|labels 1 0 |features 0 0.65
|labels 1 0 |features 0 0.66
|labels 1 0 |features 0 0.67
|labels 1 0 |features 0 0.68
|labels 1 0 |features 0 0.69
|labels 1 0 |features 0 0.7
|labels 1 0 |features 0 0.71
|labels 1 0 |features 0 0.72
|labels 1 0 |features 0 0.73
|labels 1 0 |features 0 0.74
|labels 1 0 |features 0 0.75
|labels 1 0 |features 0 0.76
|labels 1 0 |features 0 0.77
|labels 1 0 |features 0 0.78
|labels 1 0 |features 0 0.79
|labels 1 0 |features 0 0.8
|labels 1 0 |features 0 0.81
|labels 1 0 |features 0 0.82
|labels 1 0 |features 0 0.83
|labels 1 0 |features 0 0.84
|labels 1 0 |features 0 0.85
|labels 1 0 |features 0 0.86
|labels 1 0 |features 0 0.87
|labels 1 0 |features 0 0.88
|labels 1 0 |features 0 0.89
|labels 1 0 |features 0 0.9
|labels 1 0 |features 0 0.91
|labels 1 0 |features 0 0.92
|labels 1 0 |features 0 0.93
|labels 1 0 |features 0 0.94
|labels 1 0 |features 0 0.95
|labels 1 0 |features 0 0.96
|labels 1 0 |features 0 0.97
|labels 1 0 |features 0 0.98
|labels 1 0 |features 0 0.99
|labels 1 0 |features 0 1
|labels 0 1 |features 1 -1
|labels 0 1 |features 1 -0.99
|labels 0 1 |features 1 -0.98
|labels 0 1 |features 1 -0.97
|labels 0 1 |features 1 -0.96
|labels 0 1 |features 1 -0.95
|labels 0 1 |features 1 -0.94
|labels 0 1 |features 1 -0.93
|labels 0 1 |features 1 -0.92
|labels 0 1 |features 1 -0.91
|labels 0 1 |features 1 -0.9
|labels 0 1 |features 1 -0.89
|labels 0 1 |features 1 -0.88
|labels 0 1 |features 1 -0.87
|labels 0 1 |features 1 -0.86
|labels 0 1 |features 1 -0.85
|labels 0 1 |features 1 -0.84
|labels 0 1 |features 1 -0.83
|labels 0 1 |features 1 -0.82
|labels 0 1 |features 1 -0.81
|labels 0 1 |features 1 -0.8
|labels 0 1 |features 1 -0.79
|labels 0 1 |features 1 -0.78
|labels 0 1 |features 1 -0.77
|labels 0 1 |features 1 -0.76
|labels 0 1 |features 1 -0.75
|labels 0 1 |features 1 -0.74
|labels 0 1 |features 1 -0.73
|labels 0 1 |features 1 -0.72
|labels 0 1 |features 1 -0.71
|labels 0 1 |features 1 -0.7
|labels 0 1 |features 1 -0.69
|labels 0 1 |features 1 -0.68
|labels 0 1 |features 1 -0.67
|labels 0 1 |features 1 -0.66
|labels 0 1 |features 1 -0.65
|labels 0 1 |features 1 -0.64
|labels 0 1 |features 1 -0.63
|labels 0 1 |features 1 -0.62
|labels 0 1 |features 1 -0.61
|labels 0 1 |features 1 -0.6
|labels 0 1 |features 1 -0.59
|labels 0 1 |features 1 -0.58
|labels 0 1 |features 1 -0.57
|labels 0 1 |features 1 -0.56
|labels 0 1 |features 1 -0.55
|labels 0 1 |features 1 -0.54
|labels 0 1 |features 1 -0.53
|labels 0 1 |features 1 -0.52
|labels 0 1 |features 1 -0.51
|labels 0 1 |features 1 -0.5
|labels 0 1 |features 1 -0.49
|labels 0 1 |features 1 -0.48
|labels 0 1 |features 1 -0.47
|labels 0 1 |features 1 -0.46
|labels 0 1 |features 1 -0.45
|labels 0 1 |features 1 -0.44
|labels 0 1 |features 1 -0.43
|labels 0 1 |features 1 -0.42
|labels 0 1 |features 1 -0.41
|labels 0 1 |features 1 -0.4
|labels 0 1 |features 1 -0.39
|labels 0 1 |features 1 -0.38
|labels 0 1 |features 1 -0.37
|labels 0 1 |features 1 -0.36
|labels 0 1 |features 1 -0.35
|labels 0 1 |features 1 -0.34
|labels 0 1 |features 1 -0.33
|labels 0 1 |features 1 -0.32
|labels 0 1 |features 1 -0.31
|labels 0 1 |features 1 -0.3
|labels 0 1 |features 1 -0.29
|labels 0 1 |features 1 -0.28
|labels 0 1 |features 1 -0.27
|labels 0 1 |features 1 -0.26
|labels 0 1 |features 1 -0.25
|labels 0 1 |features 1 -0.24
|labels 0 1 |features 1 -0.23
|labels 0 1 |features 1 -0.22
|labels 0 1 |features 1 -0.21
|labels 0 1 |features 1 -0.2
|labels 0 1 |features 1 -0.19
|labels 0 1 |features 1 -0.18
|labels 0 1 |features 1 -0.17
|labels 0 1 |features 1 -0.16
|labels 0 1 |features 1 -0.15
|labels 0 1 |features 1 -0.14
|labels 0 1 |features 1 -0.13
|labels 0 1 |features 1 -0.12
|labels 0 1 |features 1 -0.11
|labels 0 1 |features 1 -0.1
|labels 0 1 |features 1 -0.09
|labels 0 1 |features 1 -0.08
|labels 0 1 |features 1 -0.07
|labels 0 1 |features 1 -0.06
|labels 0 1 |features 1 -0.05
|labels 0 1 |features 1 -0.04
|labels 0 1 |features 1 -0.03
|labels 0 1 |features 1 -0.02
|labels 0 1 |features 1 -0.01
|labels 1 0 |features 1 0
|labels 1 0 |features 1 0.01
|labels 1 0 |features 1 0.02
|labels 1 0 |features 1 0.03
|labels 1 0 |features 1 0.04
|labels 1 0 |features 1 0.05
|labels 1 0 |features 1 0.06
|labels 1 0 |features 1 0.07
|labels 1 0 |features 1 0.08
|labels 1 0 |features 1 0.09
|labels 1 0 |features 1 0.1
|labels 1 0 |features 1 0.11
|labels 1 0 |features 1 0.12
|labels 1 0 |features 1 0.13
|labels 1 0 |features 1 0.14
|labels 1 0 |features 1 0.15
|labels 1 0 |features 1 0.16
|labels 1 0 |features 1 0.17
|labels 1 0 |features 1 0.18
|labels 1 0 |features 1 0.19
|labels 1 0 |features 1 0.2
|labels 1 0 |features 1 0.21
|labels 1 0 |features 1 0.22
|labels 1 0 |features 1 0.23
|labels 1 0 |features 1 0.24
|labels 1 0 |features 1 0.25
|labels 1 0 |features 1 0.26
|labels 1 0 |features 1 0.27
|labels 1 0 |features 1 0.28
|labels 1 0 |features 1 0.29
|labels 1 0 |features 1 0.3
|labels 1 0 |features 1 0.31
|labels 1 0 |features 1 0.32
|labels 1 0 |features 1 0.33
|labels 1 0 |features 1 0.34
|labels 1 0 |features 1 0.35
|labels 1 0 |features 1 0.36
|labels 1 0 |features 1 0.37
|labels 1 0 |features 1 0.38
|labels 1 0 |features 1 0.39
|labels 1 0 |features 1 0.4
|labels 1 0 |features 1 0.41
|labels 1 0 |features 1 0.42
|labels 1 0 |features 1 0.43
|labels 1 0 |features 1 0.44
|labels 1 0 |features 1 0.45
|labels 1 0 |features 1 0.46
|labels 1 0 |features 1 0.47
|labels 1 0 |features 1 0.48
|labels 1 0 |features 1 0.49
|labels 1 0 |features 1 0.5
|labels 1 0 |features 1 0.51
|labels 1 0 |features 1 0.52
|labels 1 0 |features 1 0.53
|labels 1 0 |features 1 0.54
|labels 1 0 |features 1 0.55
|labels 1 0 |features 1 0.56
|labels 1 0 |features 1 0.57
|labels 1 0 |features 1 0.58
|labels 1 0 |features 1 0.59
|labels 1 0 |features 1 0.6
|labels 1 0 |features 1 0.61
|labels 1 0 |features 1 0.62
|labels 1 0 |features 1 0.63
|labels 1 0 |features 1 0.64
|labels 1 0 |features 1 0.65
|labels 1 0 |features 1 0.66
|labels 1 0 |features 1 0.67
|labels 1 0 |features 1 0.68
|labels 1 0 |features 1 0.69
|labels 1 0 |features 1 0.7
|labels 1 0 |features 1 0.71
|labels 1 0 |features 1 0.72
|labels 1 0 |features 1 0.73
|labels 1 0 |features 1 0.74
|labels 1 0 |features 1 0.75
|labels 1 0 |features 1 0.76
|labels 1 0 |features 1 0.77
|labels 1 0 |features 1 0.78
|labels 1 0 |features 1 0.79
|labels 1 0 |features 1 0.8
|labels 1 0 |features 1 0.81
|labels 1 0 |features 1 0.82
|labels 1 0 |features 1 0.83
|labels 1 0 |features 1 0.84
|labels 1 0 |features 1 0.85
|labels 1 0 |features 1 0.86
|labels 1 0 |features 1 0.87
|labels 1 0 |features 1 0.88
|labels 1 0 |features 1 0.89
|labels 1 0 |features 1 0.9
|labels 1 0 |features 1 0.91
|labels 1 0 |features 1 0.92
|labels 1 0 |features 1 0.93
|labels 1 0 |features 1 0.94
|labels 1 0 |features 1 0.95
|labels 1 0 |features 1 0.96
|labels 1 0 |features 1 0.97
|labels 1 0 |features 1 0.98
|labels 1 0 |features 1 0.99
|labels 1 0 |features 1 1

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,6 +1,9 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# TODO: The new CMUDict data do not match Kaisheng Yao's paper setup (e.g. IH/AH).
# Track down & reconcile before committing this to master.
############################################################################
# G2P.cntk #
# #
@ -10,81 +13,85 @@
# directory defaults (if not overridden)
RunRootDir = "../.." # default if not overridden
RunRootDir = "../.." # default if not overridden
DataDir = "$RunRootDir$/Data"
CacheDir = "$DataDir$/cache" # (not used currently)
ExpRootDir = "$RunRootDir$"
OutDir = "$RunRootDir$/Out"
# command to execute
command = train
#command = write
#command = dump
makeMode = false
makeMode = false # set this to true to enable restarting fr0m checkpoint
traceLevel = 1
# experiment id
deviceId = 0 # set the GPU device here, or "auto" to auto-select; or override from the command line.
ExpId = g2p-1-$deviceId$ # choose a meaningful id here. This is used for unique directory and filenames.
#ExpId = g2p-1-0 # change to different id when decoding a different model
# directories
ExpDir = "$ExpRootDir$/$ExpId$"
ModelDir = "$ExpDir$/Models"
deviceId = 0 # set the GPU device here, or "auto" to auto-select; or override from the command line.
ExpId = g2p-01-$deviceId$ # choose a meaningful id here. This is used for unique directory and filenames.
#ExpId = g2p-01-0 # change to different id when decoding a different model
stderr = $ExpDir$/G2P
# model
precision = "float"
traceLevel = 1
modelPath = "$ModelDir$/G2P.dnn"
modelPath = "$OutDir$/$ExpId$/G2P.dnn"
stderr = "$OutDir$/$ExpId$/G2P"
# decoding config --used by the "write" command ("write" decodes and writes the result)
beamDepth = 3 # 0=predict; 1=greedy; >1=beam
decodeModel = 9
decodeModelPath = "$modelPath$.$decodeModel$" # note: epoch to decode is appended to the model path
decodeOutputPath = "$decodeModelPath$.$beamDepth$" # results are written next to the model, with beamDepth appended
# dump config --used by the "dump" command, for inspecting the model parameters
dumpModelPath = "$modelPath$.2" # put the epoch id here
# top-level model configuration
hiddenDim = 512
precision = "float"
maxLayer = 2
isBidirectional = false
# comment/uncomment this or the next block to switch between readers
# Note: Currently this configuration cannot reach the same result with CNTKTextFormatReader.
# This is being investigated. For now, please use the LMSequenceReader.
# --- begin uncomment for LMSequenceReader ---
readerType = "LMSequenceReader"
useCNTKTextFormatReader = false
inputVocabSize = 69
labelVocabSize = 69
mbSizes = 144:144:288*5:576
shareEmbeddings = true
fileExt = "joint"
fileExt = "txt"
# --- end uncomment ---
# --- begin uncomment for CNTKTextFormatReader ---
# Note: Currently this configuration cannot reach the same result with CNTKTextFormatReader.
# This is being investigated. For now, please use the LMSequenceReader.
#readerType = "CNTKTextFormatReader"
#useCNTKTextFormatReader = true
#inputVocabSize = 29 # 26 letters plus start, end, apostrophe
#labelVocabSize = 41 # 39 phonemes (~AX missing), plus start and end symbol (in index 0)
#mbSizes = 72:72:144:288 # new reader is based on max(stream lengths) instead of sum(stream lengths)
#shareEmbeddings = false
#fileExt = "ctf"
#fileExt = "bsf.ctf"
# --- end uncomment ---
# corpus
maxLength = 20 # 0 disables attention
isAutoEncoder=false
startSymbol = "<s>"
trainFile = "g014b2b.train-dev-20-21.bsf.$fileExt$"
validFile = "g014b2b.train-dev-1-21.bsf.$fileExt$"
testFile = "g014b2b.test.bsf.$fileExt$"
vocabFile = "g014b2b.wl"
startSymbol = "<s>" # (need to override the default which is </s>)
trainFile = "cmudict-0.7b.train-dev-20-21.$fileExt$"
validFile = "cmudict-0.7b.train-dev-1-21.$fileExt$"
testFile = "cmudict-0.7b.test.$fileExt$"
mappingFile = "cmudict-0.7b.mapping"
# some reader variables that occur multiple times
cntkReaderInputDef = [ rawInput = [ alias = "s" ; dim = $inputVocabSize$ ; format = "sparse" ] ; rawLabels = [ alias = "t" ; dim = $labelVocabSize$ ; format = "sparse" ] ]
lmSequenceReaderInputDef = [ dim = 0 ]
lmSequenceReaderInputLabelsDef = [ dim = 1 ; labelType = "category" ; labelDim = "$inputVocabSize$" ; labelMappingFile = "$DataDir$/$vocabFile$" ; beginSequence = "</s>" ; endSequence = "</s>" ]
cntkReaderInputDef = [ rawInput = [ alias = "s" ; dim = $inputVocabSize$ ; format = "sparse" ] ; rawLabels = [ alias = "t" ; dim = $labelVocabSize$ ; format = "sparse" ] ]
lmSequenceReaderInputDef = [ dim = 0 ]
lmSequenceReaderInputLabelsDef = [ dim = 1 ; labelType = "category" ; labelDim = "$inputVocabSize$" ; labelMappingFile = "$DataDir$/$mappingFile$" ; beginSequence = "</s>" ; endSequence = "</s>" ]
#######################################
# network definition #
@ -98,7 +105,7 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
inputVocabDim = $inputVocabSize$
labelVocabDim = $labelVocabSize$
isAutoencoder = $isAutoEncoder$ # input is only one sequence, meant to reproduce itself
isAutoencoder = false # input is only one sequence, meant to reproduce itself (not used for this task)
attentionSpan = $maxLength$ # attention window, must be large enough for largest input sequence. 0 to disable. Exactly 20 is needed for the g2p CMUDict task
useBidirectionalEncoder = $isBidirectional$ # bi-directional LSTM for encoder
@ -161,9 +168,10 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
isFirstLabel = BS.Loop.IsFirst (labelSequence)
#############################################################
# embeddings --as long as we cannot read multiple sequences, we got one embedding
# embeddings
#############################################################
# Note: when reading input and labels from a single text file, we share the token mapping and embedding.
# Note: Embeddings are linear. Should we use BatchNormalization?
# note: this is assumed to be applied transposed, hence the swapped dimensions. Actually--why? Still needed?
@ -183,24 +191,20 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
# encoder (processes inputEmbedded)
#############################################################
# TODO: do not reverse our inputs; instead, if needed, use a backwards-running loop here
# Note: We reverse our input by running the recurrence from right to left.
encoderFunction = if useBidirectionalEncoder then BS.RNNs.RecurrentBirectionalLSTMPStack else BS.RNNs.RecurrentLSTMPStack
encoder = encoderFunction (encoderDims, cellDims=encoderDims, S(inputEmbedded), inputDim=inputEmbeddingDim,
previousHook=BS.RNNs.PreviousHC,
previousHook=if useBidirectionalEncoder then BS.RNNs.PreviousHC else BS.RNNs.NextHC,
enableSelfStabilization=useStabilizer)
encoderOutput = encoder[Length (encoderDims)-1]
# There are three ways of passing encoder state:
# 1. as initial state for decoder (Google style)
# 2. as side information for every decoder step (NYU style)
# 3. attention
# get the final encoder state for use as the initial state
# get the final encoder state for use as the initial state (not used with attention model)
# Since we run right-to-left, the final state is the first, not the last.
# For beam decoding, we will also inject a second dimension.
thoughtVector = [
h = ReshapeDimension (BS.Sequences.Last (encoderOutput.h), 1, (dim:1))
c = ReshapeDimension (BS.Sequences.Last (encoderOutput.c), 1, (dim:1))
h = ReshapeDimension (BS.Sequences.First (encoderOutput.h), 1, (dim:1))
c = ReshapeDimension (BS.Sequences.First (encoderOutput.c), 1, (dim:1))
dim = encoderOutput.dim
]
@ -253,6 +257,11 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
# decoder
#############################################################
# There are three ways of passing encoder state:
# 1. as initial state for decoder (Google style)
# 2. as side information for every decoder step (NYU style)
# 3. attention
decoderInput = Pass (BS.Boolean.If (isFirstLabel, labelSentenceStartEmbeddedScattered, BS.Loop.Previous (decoderHistoryHook)))
decoderInputDim = labelEmbeddingDim
@ -304,12 +313,6 @@ BrainScriptNetworkBuilder = (new ComputationNetwork [
# training criteria
#############################################################
#ce = Pass (ReduceLogSum (z) - ReduceSum (labelSequence .* z ), tag='criterion')
#errs = Pass (BS.Constants.One - ReduceSum (labelSequence .* Hardmax (z)), tag='evaluation')
#ce2 = Negate (ReduceSum (labelSequence .* LogSoftmax (z)), tag='evaluation')
#ce1 = CrossEntropyWithSoftmax (labelSequence, z, tag='evaluation') // this is the training objective
#errs = ErrorPrediction (labelSequence, z, tag='evaluation') // this also gets tracked
ce = Pass (ReduceLogSum (z) - TransposeTimes (labelSequence, z), tag='criterion')
errs = Pass (BS.Constants.One - TransposeTimes (labelSequence, Hardmax (z)), tag='evaluation')
@ -340,22 +343,17 @@ train = [
# BrainScriptNetworkBuilder is defined in outer scope
SGD = [
minibatchSize = 144:144:288:576
learningRatesPerSample = 0.007*2:0.0035
minibatchSize = $mbSizes$
learningRatesPerSample = 0.007*2:0.0035 # works well for LMSequenceReader config
momentumAsTimeConstant = 1100
gradientClippingWithTruncation = true # (as opposed to clipping the Frobenius norm of the matrix)
clippingThresholdPerSample = 2.3 # visibly impacts objectives, but not final result, so keep it for safety
maxEpochs = 50
numMBsToShowResult = 100
firstMBsToShowResult = 10
gradUpdateType = "none" # FSAdaGrad?
gradUpdateType = "none" # TODO: Try FSAdaGrad?
loadBestModel = false # true # broken for some models (rereading overwrites something that got set by validation)
# tracing (enable these for debugging)
#traceNodeNamesReal = labelsEmbedded:decoderInput:"decoder[0].lstmState._privateInnards.ht":z.Plus_left.Times_right.result:z:ce
#traceNodeNamesReal = labelsEmbedded:decoderInput:z:ce
#traceNodeNamesCategory = inputSequence.out:labelSequence
dropoutRate = 0.0
# settings for Auto Adjust Learning Rate
@ -461,7 +459,7 @@ write = [
format = [
type = "category"
transpose = false
labelMappingFile = "$DataDir$/$vocabFile$"
labelMappingFile = "$DataDir$/$mappingFile$"
]
minibatchSize = 8192 # choose this to be big enough for the longest sentence

Просмотреть файл

@ -0,0 +1,5 @@
...document source of corpus and post-processing
http://www.speech.cs.cmu.edu/cgi-bin/cmudict
File: http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b

Просмотреть файл

@ -0,0 +1,19 @@
The contents of this folder is based on or incorporates material from the projects listed below. Microsoft is not the original author of the Third Party Code. The original copyright notice and the license under which Microsoft received such Third Party Code, are set forth below. Such licenses and notices are provided for informational purposes only. Microsoft, not the third party, licenses the Third Party Code to you under the terms set forth in the EULA for the Microsoft Product. Microsoft reserves all rights not expressly granted under this agreement, whether by implication, estoppel or otherwise.
Provided for Informational Purposes Only
Carnegie Mellon University Pronouncing Dictionary
Copyright (C) 1993-2015 Carnegie Mellon University. All rights reserved.
BSD License
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ""AS IS"" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,69 @@
'
</s>
<s/>
<s>
A
B
C
D
E
F
G
H
I
J
K
L
M
N
O
P
Q
R
S
T
U
V
W
X
Y
Z
~AA
~AE
~AH
~AO
~AW
~AY
~B
~CH
~D
~DH
~EH
~ER
~EY
~F
~G
~HH
~IH
~IY
~JH
~K
~L
~M
~N
~NG
~OW
~OY
~P
~R
~S
~SH
~T
~TH
~UH
~UW
~V
~W
~Y
~Z
~ZH

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,23 +0,0 @@
This example demonstrates the use of CNTK for letter-to-sound conversion using a
sequence-to-sequence model with attention.
The code supports a number of alternative configurations. As configured currently, it implements
* a 3-hidden layer unidirectional LSTM encoder network, all hidden dimensions are 512
* a 3-hidden layer unidirectional LSTM decoder network, all hidden dimensions are 512
* encoder state is passed to the decoder by means of attention, with projection dimension 128 and maximum input length of 20 tokens
* embedding disabled (the vocabulary is very small)
* beam decoder with beam width 3
This example uses the CMUDict as a corpus. The data or a conversion script will be included soon.
To Use:
=======
Modify the following in G2P.cntk:
* pathnames
* deviceId to specify CPU (-1) or GPU (>=0 or "auto")
Run:
* command line: cntk configFile=Examples/SequenceToSequence/Miscellaneous/G2P/G2P.cntk RunRootDir=g2p
* VS Debugger: configFile=$(SolutionDir)Examples/SequenceToSequence/Miscellaneous/G2P/G2P.cntk RunRootDir=$(SolutionDir)g2p

Просмотреть файл

@ -0,0 +1,20 @@
This example demonstrates the use of CNTK for grapheme-to-phoneme (letter-to-sound)
conversion using a sequence-to-sequence model with attention, using the CMUDict dictionary.
The code supports a number of alternative configurations. As configured currently, it implements
* a 3-hidden layer unidirectional LSTM encoder network, all hidden dimensions are 512
* a 3-hidden layer unidirectional LSTM decoder network, all hidden dimensions are 512
* encoder state is passed to the decoder by means of attention, with projection dimension 128 and maximum input length of 20 tokens
* embedding is disabled (because the 'vocabulary' of the task, letters and phonemes, is very small)
* beam decoder with beam width 3
## To Use
Modify the following in G2P.cntk as needed:
* pathnames
* deviceId to specify CPU (-1) or GPU (>=0 or "auto")
Run:
* command line: ``` cntk configFile=Examples/SequenceToSequence/CMUDict/Config/G2P.cntk RunRootDir=g2p```
* VS Debugger: ```configFile=$(SolutionDir)Examples/SequenceToSequence/CMUDict/Config/G2P.cntk RunRootDir=$(SolutionDir)Examples/SequenceToSequence/CMUDict```

Просмотреть файл

@ -54,18 +54,17 @@ TIMIT_TrainMultiInput=[
readerType=HTKMLFReader
readMethod=blockRandomize
# frameMode=true
readMethod=rollingWindow
miniBatchMode=Partial
randomize=Auto
verbosity=0
features1=[
dim=792
scpFile=$ScpDir$/TIMIT.train.scp.fbank.fullpath
scpFile=$ScpDir$/TIMIT.train.scp.fbank.fullpath.rnn
type=Real
]
features2=[
dim=39
scpFile=$ScpDir$/TIMIT.train.scp.mfcc.fullpath
scpFile=$ScpDir$/TIMIT.train.scp.mfcc.fullpath.rnn
type=Real
]
labels=[
@ -75,4 +74,4 @@ TIMIT_TrainMultiInput=[
labelType=Category
]
]
]
]

Просмотреть файл

@ -0,0 +1,192 @@
test-dr1-felc0-si1386.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si1386.fbank_zda[0,549]
test-dr1-felc0-si2016.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si2016.fbank_zda[0,337]
test-dr1-felc0-si756.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si756.fbank_zda[0,416]
test-dr1-felc0-sx126.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx126.fbank_zda[0,288]
test-dr1-felc0-sx216.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx216.fbank_zda[0,217]
test-dr1-felc0-sx306.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx306.fbank_zda[0,247]
test-dr1-felc0-sx36.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx36.fbank_zda[0,349]
test-dr1-felc0-sx396.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx396.fbank_zda[0,379]
test-dr1-mdab0-si1039.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1039.fbank_zda[0,391]
test-dr1-mdab0-si1669.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1669.fbank_zda[0,203]
test-dr1-mdab0-si2299.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si2299.fbank_zda[0,257]
test-dr1-mdab0-sx139.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx139.fbank_zda[0,233]
test-dr1-mdab0-sx229.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx229.fbank_zda[0,128]
test-dr1-mdab0-sx319.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx319.fbank_zda[0,241]
test-dr1-mdab0-sx409.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx409.fbank_zda[0,285]
test-dr1-mdab0-sx49.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx49.fbank_zda[0,217]
test-dr1-mwbt0-si1553.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si1553.fbank_zda[0,473]
test-dr1-mwbt0-si2183.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si2183.fbank_zda[0,186]
test-dr1-mwbt0-si923.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si923.fbank_zda[0,293]
test-dr1-mwbt0-sx113.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx113.fbank_zda[0,330]
test-dr1-mwbt0-sx203.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx203.fbank_zda[0,310]
test-dr1-mwbt0-sx23.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx23.fbank_zda[0,314]
test-dr1-mwbt0-sx293.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx293.fbank_zda[0,359]
test-dr1-mwbt0-sx383.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx383.fbank_zda[0,387]
test-dr2-fpas0-si1272.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-si1272.fbank_zda[0,183]
test-dr2-fpas0-si2204.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-si2204.fbank_zda[0,383]
test-dr2-fpas0-si944.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-si944.fbank_zda[0,233]
test-dr2-fpas0-sx134.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx134.fbank_zda[0,291]
test-dr2-fpas0-sx224.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx224.fbank_zda[0,158]
test-dr2-fpas0-sx314.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx314.fbank_zda[0,291]
test-dr2-fpas0-sx404.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx404.fbank_zda[0,275]
test-dr2-fpas0-sx44.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/fpas0/test-dr2-fpas0-sx44.fbank_zda[0,222]
test-dr2-mtas1-si1473.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-si1473.fbank_zda[0,144]
test-dr2-mtas1-si2098.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-si2098.fbank_zda[0,315]
test-dr2-mtas1-si838.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-si838.fbank_zda[0,262]
test-dr2-mtas1-sx118.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx118.fbank_zda[0,279]
test-dr2-mtas1-sx208.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx208.fbank_zda[0,348]
test-dr2-mtas1-sx28.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx28.fbank_zda[0,245]
test-dr2-mtas1-sx298.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx298.fbank_zda[0,315]
test-dr2-mtas1-sx388.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mtas1/test-dr2-mtas1-sx388.fbank_zda[0,401]
test-dr2-mwew0-si1361.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-si1361.fbank_zda[0,252]
test-dr2-mwew0-si1991.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-si1991.fbank_zda[0,400]
test-dr2-mwew0-si731.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-si731.fbank_zda[0,197]
test-dr2-mwew0-sx101.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx101.fbank_zda[0,391]
test-dr2-mwew0-sx11.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx11.fbank_zda[0,165]
test-dr2-mwew0-sx191.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx191.fbank_zda[0,250]
test-dr2-mwew0-sx281.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx281.fbank_zda[0,331]
test-dr2-mwew0-sx371.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr2/mwew0/test-dr2-mwew0-sx371.fbank_zda[0,287]
test-dr3-fpkt0-si1538.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-si1538.fbank_zda[0,310]
test-dr3-fpkt0-si2168.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-si2168.fbank_zda[0,195]
test-dr3-fpkt0-si908.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-si908.fbank_zda[0,224]
test-dr3-fpkt0-sx188.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx188.fbank_zda[0,219]
test-dr3-fpkt0-sx278.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx278.fbank_zda[0,318]
test-dr3-fpkt0-sx368.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx368.fbank_zda[0,306]
test-dr3-fpkt0-sx8.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx8.fbank_zda[0,279]
test-dr3-fpkt0-sx98.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/fpkt0/test-dr3-fpkt0-sx98.fbank_zda[0,182]
test-dr3-mjmp0-si1535.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-si1535.fbank_zda[0,232]
test-dr3-mjmp0-si1791.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-si1791.fbank_zda[0,465]
test-dr3-mjmp0-si905.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-si905.fbank_zda[0,266]
test-dr3-mjmp0-sx185.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx185.fbank_zda[0,209]
test-dr3-mjmp0-sx275.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx275.fbank_zda[0,274]
test-dr3-mjmp0-sx365.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx365.fbank_zda[0,386]
test-dr3-mjmp0-sx5.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx5.fbank_zda[0,157]
test-dr3-mjmp0-sx95.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mjmp0/test-dr3-mjmp0-sx95.fbank_zda[0,306]
test-dr3-mlnt0-si1574.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-si1574.fbank_zda[0,475]
test-dr3-mlnt0-si1902.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-si1902.fbank_zda[0,184]
test-dr3-mlnt0-si642.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-si642.fbank_zda[0,406]
test-dr3-mlnt0-sx102.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx102.fbank_zda[0,364]
test-dr3-mlnt0-sx12.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx12.fbank_zda[0,229]
test-dr3-mlnt0-sx192.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx192.fbank_zda[0,230]
test-dr3-mlnt0-sx282.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx282.fbank_zda[0,316]
test-dr3-mlnt0-sx372.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr3/mlnt0/test-dr3-mlnt0-sx372.fbank_zda[0,308]
test-dr4-fjlm0-si1043.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-si1043.fbank_zda[0,339]
test-dr4-fjlm0-si1673.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-si1673.fbank_zda[0,313]
test-dr4-fjlm0-si2303.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-si2303.fbank_zda[0,403]
test-dr4-fjlm0-sx143.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx143.fbank_zda[0,337]
test-dr4-fjlm0-sx233.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx233.fbank_zda[0,249]
test-dr4-fjlm0-sx323.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx323.fbank_zda[0,281]
test-dr4-fjlm0-sx413.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx413.fbank_zda[0,337]
test-dr4-fjlm0-sx53.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/fjlm0/test-dr4-fjlm0-sx53.fbank_zda[0,307]
test-dr4-mlll0-si1363.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-si1363.fbank_zda[0,496]
test-dr4-mlll0-si1993.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-si1993.fbank_zda[0,245]
test-dr4-mlll0-si733.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-si733.fbank_zda[0,432]
test-dr4-mlll0-sx103.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx103.fbank_zda[0,231]
test-dr4-mlll0-sx13.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx13.fbank_zda[0,270]
test-dr4-mlll0-sx193.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx193.fbank_zda[0,346]
test-dr4-mlll0-sx283.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx283.fbank_zda[0,379]
test-dr4-mlll0-sx373.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mlll0/test-dr4-mlll0-sx373.fbank_zda[0,277]
test-dr4-mtls0-si1370.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-si1370.fbank_zda[0,407]
test-dr4-mtls0-si2000.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-si2000.fbank_zda[0,284]
test-dr4-mtls0-si740.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-si740.fbank_zda[0,215]
test-dr4-mtls0-sx110.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx110.fbank_zda[0,337]
test-dr4-mtls0-sx20.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx20.fbank_zda[0,298]
test-dr4-mtls0-sx200.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx200.fbank_zda[0,251]
test-dr4-mtls0-sx290.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx290.fbank_zda[0,316]
test-dr4-mtls0-sx380.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr4/mtls0/test-dr4-mtls0-sx380.fbank_zda[0,218]
test-dr5-fnlp0-si1308.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-si1308.fbank_zda[0,575]
test-dr5-fnlp0-si1938.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-si1938.fbank_zda[0,321]
test-dr5-fnlp0-si678.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-si678.fbank_zda[0,202]
test-dr5-fnlp0-sx138.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx138.fbank_zda[0,359]
test-dr5-fnlp0-sx228.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx228.fbank_zda[0,226]
test-dr5-fnlp0-sx318.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx318.fbank_zda[0,370]
test-dr5-fnlp0-sx408.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx408.fbank_zda[0,307]
test-dr5-fnlp0-sx48.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/fnlp0/test-dr5-fnlp0-sx48.fbank_zda[0,347]
test-dr5-mbpm0-si1577.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-si1577.fbank_zda[0,194]
test-dr5-mbpm0-si1584.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-si1584.fbank_zda[0,377]
test-dr5-mbpm0-si947.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-si947.fbank_zda[0,306]
test-dr5-mbpm0-sx137.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx137.fbank_zda[0,369]
test-dr5-mbpm0-sx227.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx227.fbank_zda[0,174]
test-dr5-mbpm0-sx317.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx317.fbank_zda[0,230]
test-dr5-mbpm0-sx407.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx407.fbank_zda[0,252]
test-dr5-mbpm0-sx47.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mbpm0/test-dr5-mbpm0-sx47.fbank_zda[0,213]
test-dr5-mklt0-si1213.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-si1213.fbank_zda[0,245]
test-dr5-mklt0-si1843.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-si1843.fbank_zda[0,321]
test-dr5-mklt0-si583.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-si583.fbank_zda[0,225]
test-dr5-mklt0-sx133.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx133.fbank_zda[0,261]
test-dr5-mklt0-sx223.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx223.fbank_zda[0,217]
test-dr5-mklt0-sx313.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx313.fbank_zda[0,319]
test-dr5-mklt0-sx403.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx403.fbank_zda[0,272]
test-dr5-mklt0-sx43.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr5/mklt0/test-dr5-mklt0-sx43.fbank_zda[0,199]
test-dr6-fmgd0-si1564.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-si1564.fbank_zda[0,441]
test-dr6-fmgd0-si2194.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-si2194.fbank_zda[0,469]
test-dr6-fmgd0-si934.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-si934.fbank_zda[0,345]
test-dr6-fmgd0-sx124.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx124.fbank_zda[0,372]
test-dr6-fmgd0-sx214.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx214.fbank_zda[0,300]
test-dr6-fmgd0-sx304.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx304.fbank_zda[0,277]
test-dr6-fmgd0-sx34.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx34.fbank_zda[0,233]
test-dr6-fmgd0-sx394.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/fmgd0/test-dr6-fmgd0-sx394.fbank_zda[0,233]
test-dr6-mcmj0-si1094.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-si1094.fbank_zda[0,373]
test-dr6-mcmj0-si464.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-si464.fbank_zda[0,386]
test-dr6-mcmj0-si602.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-si602.fbank_zda[0,386]
test-dr6-mcmj0-sx104.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx104.fbank_zda[0,195]
test-dr6-mcmj0-sx14.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx14.fbank_zda[0,291]
test-dr6-mcmj0-sx194.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx194.fbank_zda[0,192]
test-dr6-mcmj0-sx284.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx284.fbank_zda[0,362]
test-dr6-mcmj0-sx374.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mcmj0/test-dr6-mcmj0-sx374.fbank_zda[0,222]
test-dr6-mjdh0-si1354.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-si1354.fbank_zda[0,478]
test-dr6-mjdh0-si1984.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-si1984.fbank_zda[0,130]
test-dr6-mjdh0-si724.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-si724.fbank_zda[0,254]
test-dr6-mjdh0-sx184.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx184.fbank_zda[0,211]
test-dr6-mjdh0-sx274.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx274.fbank_zda[0,311]
test-dr6-mjdh0-sx364.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx364.fbank_zda[0,439]
test-dr6-mjdh0-sx4.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx4.fbank_zda[0,229]
test-dr6-mjdh0-sx94.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr6/mjdh0/test-dr6-mjdh0-sx94.fbank_zda[0,270]
test-dr7-fdhc0-si1559.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-si1559.fbank_zda[0,337]
test-dr7-fdhc0-si2189.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-si2189.fbank_zda[0,222]
test-dr7-fdhc0-si929.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-si929.fbank_zda[0,283]
test-dr7-fdhc0-sx119.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx119.fbank_zda[0,300]
test-dr7-fdhc0-sx209.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx209.fbank_zda[0,254]
test-dr7-fdhc0-sx29.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx29.fbank_zda[0,251]
test-dr7-fdhc0-sx299.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx299.fbank_zda[0,318]
test-dr7-fdhc0-sx389.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/fdhc0/test-dr7-fdhc0-sx389.fbank_zda[0,243]
test-dr7-mgrt0-si1450.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-si1450.fbank_zda[0,618]
test-dr7-mgrt0-si2080.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-si2080.fbank_zda[0,228]
test-dr7-mgrt0-si820.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-si820.fbank_zda[0,609]
test-dr7-mgrt0-sx10.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx10.fbank_zda[0,298]
test-dr7-mgrt0-sx100.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx100.fbank_zda[0,490]
test-dr7-mgrt0-sx190.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx190.fbank_zda[0,286]
test-dr7-mgrt0-sx280.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx280.fbank_zda[0,195]
test-dr7-mgrt0-sx370.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mgrt0/test-dr7-mgrt0-sx370.fbank_zda[0,333]
test-dr7-mnjm0-si1580.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-si1580.fbank_zda[0,297]
test-dr7-mnjm0-si2210.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-si2210.fbank_zda[0,201]
test-dr7-mnjm0-si950.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-si950.fbank_zda[0,544]
test-dr7-mnjm0-sx140.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx140.fbank_zda[0,287]
test-dr7-mnjm0-sx230.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx230.fbank_zda[0,352]
test-dr7-mnjm0-sx320.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx320.fbank_zda[0,378]
test-dr7-mnjm0-sx410.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx410.fbank_zda[0,272]
test-dr7-mnjm0-sx50.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr7/mnjm0/test-dr7-mnjm0-sx50.fbank_zda[0,337]
test-dr8-fmld0-si2185.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-si2185.fbank_zda[0,355]
test-dr8-fmld0-si822.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-si822.fbank_zda[0,362]
test-dr8-fmld0-si925.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-si925.fbank_zda[0,209]
test-dr8-fmld0-sx115.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx115.fbank_zda[0,236]
test-dr8-fmld0-sx205.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx205.fbank_zda[0,304]
test-dr8-fmld0-sx25.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx25.fbank_zda[0,188]
test-dr8-fmld0-sx295.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx295.fbank_zda[0,307]
test-dr8-fmld0-sx385.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/fmld0/test-dr8-fmld0-sx385.fbank_zda[0,188]
test-dr8-mjln0-si1449.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-si1449.fbank_zda[0,430]
test-dr8-mjln0-si2079.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-si2079.fbank_zda[0,209]
test-dr8-mjln0-si819.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-si819.fbank_zda[0,451]
test-dr8-mjln0-sx189.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx189.fbank_zda[0,294]
test-dr8-mjln0-sx279.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx279.fbank_zda[0,356]
test-dr8-mjln0-sx369.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx369.fbank_zda[0,286]
test-dr8-mjln0-sx9.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx9.fbank_zda[0,246]
test-dr8-mjln0-sx99.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mjln0/test-dr8-mjln0-sx99.fbank_zda[0,313]
test-dr8-mpam0-si1189.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-si1189.fbank_zda[0,252]
test-dr8-mpam0-si1819.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-si1819.fbank_zda[0,283]
test-dr8-mpam0-si1961.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-si1961.fbank_zda[0,439]
test-dr8-mpam0-sx109.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx109.fbank_zda[0,291]
test-dr8-mpam0-sx19.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx19.fbank_zda[0,234]
test-dr8-mpam0-sx199.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx199.fbank_zda[0,349]
test-dr8-mpam0-sx289.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx289.fbank_zda[0,268]
test-dr8-mpam0-sx379.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr8/mpam0/test-dr8-mpam0-sx379.fbank_zda[0,249]

Просмотреть файл

@ -0,0 +1,20 @@
test-dr1-felc0-si1386.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si1386.fbank_zda[0,549]
test-dr1-felc0-si2016.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si2016.fbank_zda[0,337]
test-dr1-felc0-si756.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-si756.fbank_zda[0,416]
test-dr1-felc0-sx126.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx126.fbank_zda[0,288]
test-dr1-felc0-sx216.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx216.fbank_zda[0,217]
test-dr1-felc0-sx306.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx306.fbank_zda[0,247]
test-dr1-felc0-sx36.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx36.fbank_zda[0,349]
test-dr1-felc0-sx396.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/felc0/test-dr1-felc0-sx396.fbank_zda[0,379]
test-dr1-mdab0-si1039.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1039.fbank_zda[0,391]
test-dr1-mdab0-si1669.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si1669.fbank_zda[0,203]
test-dr1-mdab0-si2299.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-si2299.fbank_zda[0,257]
test-dr1-mdab0-sx139.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx139.fbank_zda[0,233]
test-dr1-mdab0-sx229.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx229.fbank_zda[0,128]
test-dr1-mdab0-sx319.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx319.fbank_zda[0,241]
test-dr1-mdab0-sx409.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx409.fbank_zda[0,285]
test-dr1-mdab0-sx49.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mdab0/test-dr1-mdab0-sx49.fbank_zda[0,217]
test-dr1-mwbt0-si1553.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si1553.fbank_zda[0,473]
test-dr1-mwbt0-si2183.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si2183.fbank_zda[0,186]
test-dr1-mwbt0-si923.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-si923.fbank_zda[0,293]
test-dr1-mwbt0-sx113.fbank_zda=d:/temp/cntk/TIMIT/feat/test/dr1/mwbt0/test-dr1-mwbt0-sx113.fbank_zda[0,330]

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,100 @@
train-dr1-fcjf0-si1027.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-si1027.mfcc_zda[0,306]
train-dr1-fcjf0-si1657.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-si1657.mfcc_zda[0,281]
train-dr1-fcjf0-si648.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-si648.mfcc_zda[0,359]
train-dr1-fcjf0-sx127.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx127.mfcc_zda[0,151]
train-dr1-fcjf0-sx217.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx217.mfcc_zda[0,170]
train-dr1-fcjf0-sx307.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx307.mfcc_zda[0,142]
train-dr1-fcjf0-sx37.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx37.mfcc_zda[0,224]
train-dr1-fcjf0-sx397.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fcjf0/train-dr1-fcjf0-sx397.mfcc_zda[0,242]
train-dr1-fdaw0-si1271.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-si1271.mfcc_zda[0,483]
train-dr1-fdaw0-si1406.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-si1406.mfcc_zda[0,251]
train-dr1-fdaw0-si2036.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-si2036.mfcc_zda[0,476]
train-dr1-fdaw0-sx146.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx146.mfcc_zda[0,260]
train-dr1-fdaw0-sx236.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx236.mfcc_zda[0,248]
train-dr1-fdaw0-sx326.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx326.mfcc_zda[0,284]
train-dr1-fdaw0-sx416.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx416.mfcc_zda[0,258]
train-dr1-fdaw0-sx56.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdaw0/train-dr1-fdaw0-sx56.mfcc_zda[0,300]
train-dr1-fdml0-si1149.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-si1149.mfcc_zda[0,359]
train-dr1-fdml0-si1779.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-si1779.mfcc_zda[0,250]
train-dr1-fdml0-si2075.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-si2075.mfcc_zda[0,157]
train-dr1-fdml0-sx159.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx159.mfcc_zda[0,284]
train-dr1-fdml0-sx249.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx249.mfcc_zda[0,197]
train-dr1-fdml0-sx339.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx339.mfcc_zda[0,253]
train-dr1-fdml0-sx429.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx429.mfcc_zda[0,273]
train-dr1-fdml0-sx69.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fdml0/train-dr1-fdml0-sx69.mfcc_zda[0,254]
train-dr1-fecd0-si1418.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-si1418.mfcc_zda[0,554]
train-dr1-fecd0-si2048.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-si2048.mfcc_zda[0,257]
train-dr1-fecd0-si788.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-si788.mfcc_zda[0,513]
train-dr1-fecd0-sx158.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx158.mfcc_zda[0,254]
train-dr1-fecd0-sx248.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx248.mfcc_zda[0,302]
train-dr1-fecd0-sx338.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx338.mfcc_zda[0,329]
train-dr1-fecd0-sx428.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx428.mfcc_zda[0,364]
train-dr1-fecd0-sx68.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fecd0/train-dr1-fecd0-sx68.mfcc_zda[0,292]
train-dr1-fetb0-si1148.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-si1148.mfcc_zda[0,254]
train-dr1-fetb0-si1778.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-si1778.mfcc_zda[0,350]
train-dr1-fetb0-si518.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-si518.mfcc_zda[0,345]
train-dr1-fetb0-sx158.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx158.mfcc_zda[0,221]
train-dr1-fetb0-sx248.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx248.mfcc_zda[0,261]
train-dr1-fetb0-sx338.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx338.mfcc_zda[0,316]
train-dr1-fetb0-sx428.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx428.mfcc_zda[0,340]
train-dr1-fetb0-sx68.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fetb0/train-dr1-fetb0-sx68.mfcc_zda[0,265]
train-dr1-fjsp0-si1434.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-si1434.mfcc_zda[0,435]
train-dr1-fjsp0-si1763.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-si1763.mfcc_zda[0,121]
train-dr1-fjsp0-si804.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-si804.mfcc_zda[0,247]
train-dr1-fjsp0-sx174.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx174.mfcc_zda[0,379]
train-dr1-fjsp0-sx264.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx264.mfcc_zda[0,286]
train-dr1-fjsp0-sx354.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx354.mfcc_zda[0,271]
train-dr1-fjsp0-sx444.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx444.mfcc_zda[0,286]
train-dr1-fjsp0-sx84.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fjsp0/train-dr1-fjsp0-sx84.mfcc_zda[0,343]
train-dr1-fkfb0-si1608.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-si1608.mfcc_zda[0,474]
train-dr1-fkfb0-si2238.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-si2238.mfcc_zda[0,217]
train-dr1-fkfb0-si978.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-si978.mfcc_zda[0,353]
train-dr1-fkfb0-sx168.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx168.mfcc_zda[0,232]
train-dr1-fkfb0-sx258.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx258.mfcc_zda[0,340]
train-dr1-fkfb0-sx348.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx348.mfcc_zda[0,368]
train-dr1-fkfb0-sx438.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx438.mfcc_zda[0,635]
train-dr1-fkfb0-sx78.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fkfb0/train-dr1-fkfb0-sx78.mfcc_zda[0,244]
train-dr1-fmem0-si1377.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-si1377.mfcc_zda[0,384]
train-dr1-fmem0-si2007.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-si2007.mfcc_zda[0,352]
train-dr1-fmem0-si747.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-si747.mfcc_zda[0,300]
train-dr1-fmem0-sx117.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx117.mfcc_zda[0,320]
train-dr1-fmem0-sx207.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx207.mfcc_zda[0,204]
train-dr1-fmem0-sx297.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx297.mfcc_zda[0,266]
train-dr1-fmem0-sx333.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx333.mfcc_zda[0,511]
train-dr1-fmem0-sx387.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fmem0/train-dr1-fmem0-sx387.mfcc_zda[0,377]
train-dr1-fsah0-si1244.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-si1244.mfcc_zda[0,351]
train-dr1-fsah0-si1874.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-si1874.mfcc_zda[0,256]
train-dr1-fsah0-si614.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-si614.mfcc_zda[0,465]
train-dr1-fsah0-sx164.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx164.mfcc_zda[0,290]
train-dr1-fsah0-sx327.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx327.mfcc_zda[0,497]
train-dr1-fsah0-sx344.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx344.mfcc_zda[0,202]
train-dr1-fsah0-sx434.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx434.mfcc_zda[0,261]
train-dr1-fsah0-sx74.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsah0/train-dr1-fsah0-sx74.mfcc_zda[0,263]
train-dr1-fsjk1-si1025.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-si1025.mfcc_zda[0,309]
train-dr1-fsjk1-si2285.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-si2285.mfcc_zda[0,196]
train-dr1-fsjk1-si696.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-si696.mfcc_zda[0,350]
train-dr1-fsjk1-sx125.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx125.mfcc_zda[0,224]
train-dr1-fsjk1-sx215.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx215.mfcc_zda[0,297]
train-dr1-fsjk1-sx305.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx305.mfcc_zda[0,245]
train-dr1-fsjk1-sx35.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx35.mfcc_zda[0,231]
train-dr1-fsjk1-sx395.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsjk1/train-dr1-fsjk1-sx395.mfcc_zda[0,267]
train-dr1-fsma0-si1621.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-si1621.mfcc_zda[0,155]
train-dr1-fsma0-si2251.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-si2251.mfcc_zda[0,320]
train-dr1-fsma0-si991.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-si991.mfcc_zda[0,446]
train-dr1-fsma0-sx181.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx181.mfcc_zda[0,354]
train-dr1-fsma0-sx271.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx271.mfcc_zda[0,277]
train-dr1-fsma0-sx361.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx361.mfcc_zda[0,261]
train-dr1-fsma0-sx451.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx451.mfcc_zda[0,478]
train-dr1-fsma0-sx91.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fsma0/train-dr1-fsma0-sx91.mfcc_zda[0,270]
train-dr1-ftbr0-si1402.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-si1402.mfcc_zda[0,447]
train-dr1-ftbr0-si2181.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-si2181.mfcc_zda[0,243]
train-dr1-ftbr0-si921.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-si921.mfcc_zda[0,414]
train-dr1-ftbr0-sx111.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx111.mfcc_zda[0,247]
train-dr1-ftbr0-sx201.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx201.mfcc_zda[0,296]
train-dr1-ftbr0-sx21.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx21.mfcc_zda[0,215]
train-dr1-ftbr0-sx291.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx291.mfcc_zda[0,243]
train-dr1-ftbr0-sx381.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/ftbr0/train-dr1-ftbr0-sx381.mfcc_zda[0,245]
train-dr1-fvfb0-si1032.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-si1032.mfcc_zda[0,279]
train-dr1-fvfb0-si1510.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-si1510.mfcc_zda[0,401]
train-dr1-fvfb0-si2292.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-si2292.mfcc_zda[0,172]
train-dr1-fvfb0-sx132.mfcc_zda=d:/temp/cntk/TIMIT/feat/train/dr1/fvfb0/train-dr1-fvfb0-sx132.mfcc_zda[0,246]

Просмотреть файл

@ -0,0 +1,118 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# 3 class classification with softmax - cntk script -- Network Description Language
# which commands to run
command=Train:Output:dumpNodeInfo:Test
# required...
modelPath = "Models/MC.dnn" # where to write the model to
deviceId = -1 # -1 means CPU; use 0 for your first GPU, 1 for the second etc.
dimension = 2 # input data dimensions
labelDimension = 3
# training config
Train = [
action="train"
# network description
BrainScriptNetworkBuilder=[
# sample and label dimensions
SDim = $dimension$
LDim = $labelDimension$
features = Input (SDim)
labels = Input (LDim)
# parameters to learn
b = Parameter (LDim, 1)
w = Parameter (LDim, SDim)
# operations
z = w * features + b
ce = CrossEntropyWithSoftmax (labels, z)
errs = ErrorPrediction (labels, z)
# root nodes
featureNodes = (features)
labelNodes = (labels)
criterionNodes = (ce)
evaluationNodes = (errs)
outputNodes = (z)
]
# configuration parameters of the SGD procedure
SGD = [
epochSize = 0 # =0 means size of the training set
minibatchSize = 25
learningRatesPerSample = 0.04 # gradient contribution from each sample
maxEpochs = 50
]
# configuration of data reading
reader = [
readerType = "CNTKTextFormatReader"
file = "Train-3Classes_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = $labelDimension$ # there are 3 different labels
format = "dense"
]
]
]
]
# test
Test = [
action = "test"
reader = [
readerType="CNTKTextFormatReader"
file="Test-3Classes_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = $labelDimension$ # there are 3 different labels
format = "dense"
]
]
]
]
# output the results
Output = [
action="write"
reader=[
readerType="CNTKTextFormatReader"
file="Test-3Classes_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = $labelDimension$ # there are 3 different labels
format = "dense"
]
]
]
outputPath = "MC.txt" # dump the output to this text file
]
# dump parameter values
DumpNodeInfo = [
action = "dumpNode"
printValues = true
]

Просмотреть файл

@ -1,2 +1,3 @@
0
1
2

Просмотреть файл

@ -0,0 +1,500 @@
|features 5.170871 -1.020567 |labels 0 0 1
|features 4.327523 -0.815642 |labels 0 0 1
|features 2.678905 0.434324 |labels 0 0 1
|features 7.064864 3.087723 |labels 0 1 0
|features 5.674250 -0.659369 |labels 0 0 1
|features 5.720961 0.614086 |labels 0 0 1
|features 0.817731 0.599577 |labels 1 0 0
|features 4.355816 3.459184 |labels 0 1 0
|features 3.818218 -0.994736 |labels 0 0 1
|features 0.312065 1.951552 |labels 1 0 0
|features 0.741122 -0.148256 |labels 1 0 0
|features 3.429964 -2.544096 |labels 0 0 1
|features 4.642347 1.367120 |labels 0 0 1
|features 4.643550 -0.253437 |labels 0 0 1
|features 2.792871 0.960124 |labels 0 1 0
|features 4.514423 2.196754 |labels 0 0 1
|features -0.666193 -0.032975 |labels 1 0 0
|features 4.810538 3.241911 |labels 0 1 0
|features 3.933512 2.325106 |labels 0 1 0
|features 3.298764 2.599413 |labels 0 1 0
|features 2.145218 0.159719 |labels 1 0 0
|features 5.267208 -0.921221 |labels 0 0 1
|features 1.806416 0.467180 |labels 1 0 0
|features 3.974628 -0.953991 |labels 0 0 1
|features 1.900733 0.460248 |labels 1 0 0
|features -0.507540 -1.202700 |labels 1 0 0
|features 5.731155 2.382330 |labels 0 1 0
|features 4.287938 -0.358895 |labels 0 0 1
|features 4.464944 1.045813 |labels 0 0 1
|features 6.135542 0.311033 |labels 0 0 1
|features 2.230624 3.899346 |labels 0 1 0
|features 1.115160 1.488620 |labels 1 0 0
|features 4.078272 0.648599 |labels 0 0 1
|features 0.946812 -0.586611 |labels 1 0 0
|features 5.043798 1.309346 |labels 0 0 1
|features 4.294848 2.816375 |labels 0 1 0
|features 1.889331 0.041037 |labels 1 0 0
|features 3.259375 2.802361 |labels 0 1 0
|features 1.428610 1.515919 |labels 1 0 0
|features 0.422380 1.107096 |labels 1 0 0
|features 4.158696 3.520900 |labels 0 1 0
|features 2.037615 0.476160 |labels 0 0 1
|features -0.080874 0.684507 |labels 1 0 0
|features 5.304552 3.594974 |labels 0 1 0
|features 4.243805 2.872649 |labels 0 1 0
|features 4.148123 2.440895 |labels 0 1 0
|features 4.693400 3.616597 |labels 0 1 0
|features 6.130004 1.775687 |labels 0 0 1
|features 4.617409 1.618245 |labels 0 0 1
|features 4.337935 3.433942 |labels 0 1 0
|features 1.253423 1.568702 |labels 1 0 0
|features 3.780373 2.457099 |labels 0 0 1
|features 5.223143 1.755579 |labels 0 0 1
|features 2.473481 1.418406 |labels 1 0 0
|features 1.703627 4.378155 |labels 0 1 0
|features 5.434560 4.717314 |labels 0 1 0
|features 5.363743 1.554633 |labels 0 0 1
|features 4.708409 0.266605 |labels 0 0 1
|features 3.527139 3.721405 |labels 0 1 0
|features 4.212410 0.964045 |labels 0 0 1
|features 4.047959 -0.190071 |labels 0 0 1
|features -0.534243 1.178627 |labels 1 0 0
|features 1.476536 -0.159029 |labels 1 0 0
|features 3.346440 2.657206 |labels 0 1 0
|features 3.510718 2.318246 |labels 0 1 0
|features 3.615193 4.605629 |labels 0 1 0
|features 4.832209 0.864395 |labels 0 0 1
|features 4.866461 4.196559 |labels 0 1 0
|features 5.322119 1.825393 |labels 0 0 1
|features 2.902831 -0.095737 |labels 0 0 1
|features 1.840045 2.876644 |labels 0 1 0
|features 0.517413 -0.129987 |labels 1 0 0
|features 2.407253 0.499569 |labels 1 0 0
|features 4.312469 1.582177 |labels 0 0 1
|features 4.995064 0.170178 |labels 0 0 1
|features 4.797494 4.573547 |labels 0 1 0
|features 4.130865 4.410381 |labels 0 1 0
|features 1.105535 1.654471 |labels 1 0 0
|features 5.692636 0.527186 |labels 0 0 1
|features 1.088462 -1.684232 |labels 1 0 0
|features 1.484067 -0.412501 |labels 1 0 0
|features 2.889689 3.690520 |labels 0 1 0
|features 0.511409 -0.004521 |labels 1 0 0
|features 0.947963 -0.651233 |labels 1 0 0
|features 4.363427 2.649857 |labels 0 1 0
|features 3.305984 -0.131774 |labels 0 0 1
|features 5.515684 2.063754 |labels 0 0 1
|features 3.897807 0.829441 |labels 0 0 1
|features 1.382318 0.452016 |labels 1 0 0
|features 2.830457 3.628353 |labels 0 1 0
|features 4.320996 -1.406747 |labels 0 0 1
|features 2.761719 1.222520 |labels 0 0 1
|features 5.451307 0.426850 |labels 0 0 1
|features -0.555044 0.155634 |labels 1 0 0
|features 2.176072 3.696026 |labels 1 0 0
|features 4.240790 3.729701 |labels 0 1 0
|features 3.022974 2.228043 |labels 0 0 1
|features 4.499028 3.498968 |labels 0 1 0
|features 3.882526 -0.643661 |labels 0 0 1
|features 0.416113 -0.179831 |labels 1 0 0
|features 3.748151 1.565968 |labels 0 0 1
|features 3.965157 -0.773285 |labels 0 0 1
|features 5.589582 5.855553 |labels 0 1 0
|features 4.295428 5.171385 |labels 0 1 0
|features 1.138826 0.131605 |labels 1 0 0
|features 4.559609 3.137678 |labels 0 1 0
|features 4.375586 -2.039131 |labels 0 0 1
|features 5.972429 2.140441 |labels 0 1 0
|features 3.785464 0.481651 |labels 0 0 1
|features 4.872444 2.856546 |labels 0 1 0
|features 4.735155 1.706737 |labels 0 0 1
|features 2.564409 1.229335 |labels 0 0 1
|features 4.638684 4.290509 |labels 0 1 0
|features 6.025844 1.859031 |labels 0 1 0
|features 2.991525 3.173522 |labels 0 1 0
|features 1.066838 -2.194139 |labels 1 0 0
|features 3.335534 -0.648720 |labels 0 0 1
|features 5.484550 0.563980 |labels 0 0 1
|features 1.304820 0.038009 |labels 1 0 0
|features 1.890283 -1.570453 |labels 1 0 0
|features 0.794851 1.305584 |labels 1 0 0
|features 3.429155 2.523924 |labels 0 1 0
|features 4.153625 -0.803432 |labels 0 0 1
|features 3.586901 2.625966 |labels 0 1 0
|features 4.372702 0.575025 |labels 0 0 1
|features 1.135865 1.035374 |labels 1 0 0
|features 4.796154 0.862651 |labels 0 0 1
|features 0.369248 2.115445 |labels 1 0 0
|features 3.516012 0.480955 |labels 0 0 1
|features 2.595974 -1.690054 |labels 1 0 0
|features 1.126398 1.824413 |labels 1 0 0
|features 4.077564 0.924271 |labels 0 0 1
|features 4.612824 4.989916 |labels 0 1 0
|features 1.546290 2.119071 |labels 1 0 0
|features 5.618829 2.105525 |labels 0 1 0
|features 3.055336 3.065747 |labels 0 1 0
|features 4.647198 1.542813 |labels 0 0 1
|features 5.091151 2.147099 |labels 0 1 0
|features 3.625071 0.311037 |labels 0 0 1
|features 2.307799 -1.495730 |labels 0 0 1
|features -0.753784 1.024504 |labels 1 0 0
|features 2.467414 0.894651 |labels 0 0 1
|features 2.356993 -1.348528 |labels 1 0 0
|features 2.001782 -0.836787 |labels 0 0 1
|features 2.925553 -0.828339 |labels 0 0 1
|features 4.416055 3.245529 |labels 0 1 0
|features 1.069252 0.725166 |labels 1 0 0
|features 1.294562 -0.197274 |labels 1 0 0
|features 4.477378 2.347177 |labels 0 1 0
|features 3.499737 4.875598 |labels 0 1 0
|features 1.839399 1.836997 |labels 0 1 0
|features 1.092580 2.312231 |labels 1 0 0
|features -0.751483 0.917030 |labels 1 0 0
|features 3.416649 0.959780 |labels 0 0 1
|features 6.272890 1.393839 |labels 0 0 1
|features 0.928108 -0.633631 |labels 1 0 0
|features -0.621571 -0.399249 |labels 1 0 0
|features 1.579312 0.683836 |labels 1 0 0
|features 4.344459 1.574135 |labels 0 1 0
|features 1.819022 0.194714 |labels 1 0 0
|features 3.237937 2.741034 |labels 0 1 0
|features 3.382908 1.775352 |labels 0 1 0
|features 2.903564 0.885940 |labels 0 0 1
|features 4.353755 3.957848 |labels 0 1 0
|features -0.748388 1.073738 |labels 1 0 0
|features 4.757949 3.555670 |labels 0 1 0
|features 4.343514 -0.300948 |labels 0 0 1
|features 1.439724 4.757091 |labels 0 1 0
|features 5.578366 4.132736 |labels 0 1 0
|features 0.227763 -2.450668 |labels 1 0 0
|features 6.338476 3.210632 |labels 0 1 0
|features 3.121639 3.286460 |labels 0 1 0
|features 5.352882 -0.806885 |labels 0 0 1
|features 3.903871 -0.319867 |labels 0 0 1
|features 0.787749 2.035582 |labels 1 0 0
|features 4.744425 0.515576 |labels 0 1 0
|features 4.864004 5.600483 |labels 0 1 0
|features 0.185025 0.221781 |labels 1 0 0
|features 0.111863 -0.153125 |labels 1 0 0
|features 2.884757 4.306858 |labels 0 1 0
|features 3.481979 2.288502 |labels 0 1 0
|features 2.205238 0.502377 |labels 1 0 0
|features 4.693934 4.432471 |labels 0 1 0
|features 3.791779 -0.057670 |labels 0 0 1
|features 1.143819 0.817160 |labels 1 0 0
|features 5.737469 3.878223 |labels 0 1 0
|features 4.741342 3.173934 |labels 0 1 0
|features 2.422221 -0.013868 |labels 0 0 1
|features 0.918625 -0.166115 |labels 1 0 0
|features 3.122271 1.424641 |labels 0 0 1
|features 3.462430 3.956410 |labels 0 1 0
|features 3.654001 2.247638 |labels 0 0 1
|features 3.540817 3.459332 |labels 0 1 0
|features 3.806689 1.696715 |labels 0 0 1
|features 6.119885 -0.400401 |labels 0 0 1
|features 3.911604 3.011729 |labels 0 1 0
|features 2.972734 3.194861 |labels 0 1 0
|features 3.006524 1.140043 |labels 0 0 1
|features 3.007322 2.892493 |labels 0 1 0
|features 4.802557 2.566478 |labels 0 1 0
|features 2.428262 0.396116 |labels 0 0 1
|features 4.030434 3.230760 |labels 0 1 0
|features 5.191659 4.383636 |labels 0 1 0
|features -1.045050 -0.785392 |labels 1 0 0
|features 4.768335 3.893780 |labels 0 1 0
|features 3.939898 2.448906 |labels 0 1 0
|features 3.972909 0.930766 |labels 0 0 1
|features -0.122396 0.163240 |labels 1 0 0
|features 1.012661 0.920772 |labels 1 0 0
|features 4.306722 -0.182788 |labels 0 0 1
|features 3.693470 3.431040 |labels 0 1 0
|features 3.330047 -0.328417 |labels 0 0 1
|features 5.276350 2.810753 |labels 0 1 0
|features -0.866210 -0.176071 |labels 1 0 0
|features 1.161681 0.068793 |labels 1 0 0
|features 4.017895 2.746144 |labels 0 1 0
|features 4.091524 1.494913 |labels 0 0 1
|features 1.086877 -0.661555 |labels 1 0 0
|features 1.096610 1.663877 |labels 1 0 0
|features 3.550485 -1.326387 |labels 0 0 1
|features 3.756918 -0.548134 |labels 0 0 1
|features -0.432257 1.933901 |labels 1 0 0
|features 3.073529 0.512542 |labels 0 0 1
|features 4.690409 0.643014 |labels 0 0 1
|features 1.433133 0.655506 |labels 1 0 0
|features -0.738960 0.091768 |labels 1 0 0
|features 5.770527 1.989314 |labels 0 1 0
|features 3.508179 3.551834 |labels 0 1 0
|features -0.553227 0.665805 |labels 1 0 0
|features 0.063303 1.515440 |labels 1 0 0
|features 4.006607 1.017800 |labels 0 0 1
|features 5.403834 2.985693 |labels 0 1 0
|features 1.250549 0.872075 |labels 1 0 0
|features 1.601300 0.877506 |labels 1 0 0
|features 1.555845 -0.750437 |labels 1 0 0
|features 5.502633 2.444373 |labels 0 1 0
|features 0.502594 2.871184 |labels 1 0 0
|features 3.941683 -0.265121 |labels 0 0 1
|features 1.609009 2.337289 |labels 0 0 1
|features 2.049371 -0.109907 |labels 1 0 0
|features 3.893968 1.147726 |labels 0 0 1
|features 4.181638 3.931917 |labels 0 1 0
|features 5.187949 -0.282560 |labels 0 0 1
|features 0.428814 -0.817809 |labels 1 0 0
|features 4.280497 0.693244 |labels 0 0 1
|features -0.115077 1.271914 |labels 1 0 0
|features 2.340634 3.088115 |labels 0 1 0
|features 5.672981 4.301189 |labels 0 1 0
|features 0.751783 0.215502 |labels 1 0 0
|features 4.884824 2.147961 |labels 0 1 0
|features 4.301601 0.356782 |labels 0 0 1
|features 3.703915 5.951633 |labels 0 1 0
|features 0.308308 0.637447 |labels 1 0 0
|features 3.588032 0.386808 |labels 0 0 1
|features 2.580274 0.968874 |labels 1 0 0
|features 1.931611 -0.374972 |labels 1 0 0
|features 2.027248 0.680231 |labels 1 0 0
|features 3.805778 3.970522 |labels 0 1 0
|features 5.393209 2.870595 |labels 0 1 0
|features 4.341932 1.219185 |labels 0 0 1
|features 3.948779 0.138642 |labels 0 0 1
|features 2.974210 4.078202 |labels 0 1 0
|features 3.466237 1.479102 |labels 0 1 0
|features 3.429529 0.164491 |labels 0 0 1
|features 0.895563 1.207858 |labels 1 0 0
|features 6.222346 2.587637 |labels 0 1 0
|features 3.781328 4.481887 |labels 0 1 0
|features 5.005423 4.339553 |labels 0 1 0
|features 3.342823 3.155918 |labels 0 1 0
|features 0.638335 0.422880 |labels 1 0 0
|features 6.084583 0.524755 |labels 0 0 1
|features 0.544146 -0.675320 |labels 1 0 0
|features 0.937815 -0.158842 |labels 1 0 0
|features 4.878075 3.646459 |labels 0 1 0
|features 4.394876 3.656452 |labels 0 1 0
|features 0.175527 1.051449 |labels 1 0 0
|features 2.068004 3.731130 |labels 0 1 0
|features 3.658874 0.274977 |labels 1 0 0
|features 4.654398 3.224570 |labels 0 1 0
|features 4.217887 -0.269042 |labels 0 0 1
|features 3.868590 0.902664 |labels 0 0 1
|features 4.463083 1.357407 |labels 0 0 1
|features 0.138550 0.810577 |labels 1 0 0
|features 4.721700 0.908698 |labels 0 0 1
|features 3.347677 1.272460 |labels 0 0 1
|features 0.108387 -0.575248 |labels 1 0 0
|features 5.097649 0.290083 |labels 0 0 1
|features 4.497906 0.541197 |labels 0 0 1
|features 3.507372 3.421968 |labels 0 1 0
|features 6.624134 3.603327 |labels 0 1 0
|features -0.587276 0.471565 |labels 1 0 0
|features 1.801156 -1.528190 |labels 1 0 0
|features 3.197821 -1.297906 |labels 0 0 1
|features 2.135339 1.138818 |labels 1 0 0
|features 6.062915 5.061124 |labels 0 1 0
|features 0.362993 -0.847628 |labels 1 0 0
|features 3.520253 2.923445 |labels 0 1 0
|features 3.839261 5.262581 |labels 0 1 0
|features 2.849768 1.676886 |labels 0 1 0
|features 1.543549 1.215295 |labels 1 0 0
|features 0.444973 0.453481 |labels 1 0 0
|features 4.937584 3.562562 |labels 0 1 0
|features 1.857639 0.191374 |labels 1 0 0
|features 4.335236 5.163506 |labels 0 1 0
|features 4.691276 1.898922 |labels 0 1 0
|features 5.904368 0.700075 |labels 0 0 1
|features 5.615622 1.168694 |labels 0 0 1
|features 3.518285 -1.463071 |labels 0 0 1
|features 5.260380 2.033894 |labels 0 0 1
|features 4.402485 -1.466749 |labels 0 0 1
|features 1.661792 2.199234 |labels 1 0 0
|features 3.428152 4.100739 |labels 0 1 0
|features 4.155874 1.180401 |labels 0 0 1
|features 3.560558 1.628193 |labels 0 0 1
|features 1.752789 0.167950 |labels 1 0 0
|features 2.989879 4.547287 |labels 0 1 0
|features 1.472915 1.807051 |labels 1 0 0
|features 3.956969 1.057726 |labels 0 0 1
|features -0.910892 -0.324420 |labels 1 0 0
|features 3.896150 -0.261758 |labels 0 0 1
|features 5.160224 3.879306 |labels 0 1 0
|features 0.508886 0.388303 |labels 1 0 0
|features 4.550465 2.808816 |labels 0 1 0
|features 4.219800 2.157565 |labels 0 0 1
|features 2.973732 -0.025899 |labels 0 0 1
|features 4.850215 2.057425 |labels 0 1 0
|features 4.281529 0.884518 |labels 0 0 1
|features 3.185110 3.578732 |labels 0 1 0
|features 3.550744 3.634269 |labels 0 1 0
|features 3.868571 -0.685954 |labels 0 0 1
|features 4.436760 2.882380 |labels 0 1 0
|features 1.576030 0.721975 |labels 1 0 0
|features 4.668262 0.805866 |labels 0 0 1
|features 3.715000 0.315345 |labels 0 0 1
|features 2.507644 3.547079 |labels 0 1 0
|features 0.894466 1.539280 |labels 1 0 0
|features 0.909938 1.464673 |labels 1 0 0
|features 4.112257 0.148428 |labels 0 0 1
|features 3.106928 1.113714 |labels 0 0 1
|features 1.288402 -0.701025 |labels 1 0 0
|features 3.877478 5.171910 |labels 0 1 0
|features 0.358146 1.337491 |labels 1 0 0
|features 3.112478 4.083398 |labels 0 1 0
|features -0.073140 -0.908321 |labels 1 0 0
|features 2.937791 3.262817 |labels 0 1 0
|features 2.835341 -0.358869 |labels 1 0 0
|features 0.454474 0.626554 |labels 1 0 0
|features 1.123346 0.271387 |labels 1 0 0
|features 3.839966 3.804344 |labels 0 1 0
|features 1.667118 1.689903 |labels 1 0 0
|features 3.290955 2.687145 |labels 0 1 0
|features 3.128941 3.792980 |labels 0 1 0
|features 5.029546 1.430181 |labels 0 0 1
|features 3.298832 3.179733 |labels 0 1 0
|features 3.688343 1.939487 |labels 0 0 1
|features -1.383809 -0.301854 |labels 1 0 0
|features 0.841377 -0.464041 |labels 1 0 0
|features 1.821239 0.271780 |labels 0 0 1
|features 3.014247 2.084152 |labels 0 1 0
|features 5.089541 1.644286 |labels 0 0 1
|features 2.348496 2.055794 |labels 0 0 1
|features 4.780683 0.638380 |labels 0 0 1
|features 1.087268 1.900109 |labels 1 0 0
|features 5.347040 4.701276 |labels 0 1 0
|features 4.871581 -0.863385 |labels 0 0 1
|features 1.192136 -1.018245 |labels 1 0 0
|features 2.876963 1.949194 |labels 0 0 1
|features 1.120886 -0.929929 |labels 1 0 0
|features 0.163176 -0.664774 |labels 1 0 0
|features 3.170573 2.324343 |labels 0 1 0
|features -0.939862 1.074156 |labels 1 0 0
|features 4.808481 4.376259 |labels 0 1 0
|features 2.881816 0.101940 |labels 0 0 1
|features 4.280627 5.079110 |labels 0 1 0
|features 1.342037 1.384894 |labels 1 0 0
|features 4.423114 2.026311 |labels 0 1 0
|features 1.956134 0.905019 |labels 0 0 1
|features 3.601798 1.118188 |labels 0 0 1
|features 2.368481 0.062807 |labels 0 0 1
|features 2.527163 2.606035 |labels 1 0 0
|features 4.159202 2.225367 |labels 0 0 1
|features 4.243090 -1.678570 |labels 0 0 1
|features 3.374625 0.916946 |labels 0 0 1
|features 4.836464 4.250067 |labels 0 1 0
|features 3.865716 0.733695 |labels 0 0 1
|features 4.538941 2.740055 |labels 0 1 0
|features -0.725037 1.923167 |labels 1 0 0
|features 4.348298 3.779133 |labels 0 1 0
|features 3.111166 3.210131 |labels 0 1 0
|features 1.983827 3.583649 |labels 0 1 0
|features 4.348485 0.524232 |labels 0 0 1
|features 0.113734 0.842453 |labels 1 0 0
|features 1.088557 0.351414 |labels 1 0 0
|features 4.140245 0.179361 |labels 0 0 1
|features 3.700337 4.401253 |labels 0 1 0
|features 3.689474 0.529192 |labels 0 0 1
|features 4.249344 2.108636 |labels 0 1 0
|features 2.345218 -1.911326 |labels 1 0 0
|features 5.250122 2.604543 |labels 0 1 0
|features 0.902275 1.307964 |labels 1 0 0
|features 3.449315 2.666064 |labels 0 1 0
|features -0.343801 0.123662 |labels 1 0 0
|features 4.443098 4.993161 |labels 0 1 0
|features 3.987001 1.344293 |labels 0 0 1
|features 3.783282 -0.936724 |labels 0 0 1
|features 1.958721 3.348016 |labels 0 1 0
|features 5.515478 3.471848 |labels 0 1 0
|features 1.584154 1.162307 |labels 1 0 0
|features 0.486644 -0.685806 |labels 1 0 0
|features 4.049450 3.728309 |labels 0 1 0
|features 0.577870 0.066874 |labels 1 0 0
|features 0.934053 0.998711 |labels 1 0 0
|features 0.933781 -0.644142 |labels 1 0 0
|features 1.740673 -2.701543 |labels 1 0 0
|features 5.983059 3.880100 |labels 0 1 0
|features -0.542726 1.520657 |labels 1 0 0
|features 1.820242 -0.127512 |labels 1 0 0
|features 3.869995 1.366239 |labels 0 0 1
|features 3.649993 3.875205 |labels 0 1 0
|features 4.326591 3.908440 |labels 0 1 0
|features -0.671845 2.292655 |labels 1 0 0
|features 4.614205 -0.065502 |labels 0 0 1
|features 5.337743 2.782974 |labels 0 1 0
|features 1.006830 0.502485 |labels 1 0 0
|features 0.646433 0.907665 |labels 1 0 0
|features 3.497074 1.102314 |labels 0 0 1
|features 2.714741 2.815370 |labels 0 1 0
|features 0.840907 -0.788417 |labels 1 0 0
|features 3.681667 3.963016 |labels 0 1 0
|features -0.413474 -1.050748 |labels 1 0 0
|features 5.095382 3.043376 |labels 0 1 0
|features 1.276484 1.763724 |labels 1 0 0
|features 1.093233 1.896627 |labels 1 0 0
|features 0.885230 0.195284 |labels 1 0 0
|features -1.116703 -0.083673 |labels 1 0 0
|features 2.768153 3.253628 |labels 0 1 0
|features 4.494142 0.705453 |labels 0 0 1
|features 1.824851 1.606847 |labels 1 0 0
|features 2.780254 4.475852 |labels 0 1 0
|features 2.187291 -1.004686 |labels 1 0 0
|features 4.102475 0.139971 |labels 0 0 1
|features 2.704021 0.546845 |labels 0 0 1
|features 2.153619 0.703613 |labels 0 0 1
|features 3.926857 -1.438502 |labels 0 0 1
|features 3.926749 0.703817 |labels 0 0 1
|features 2.200351 1.232015 |labels 1 0 0
|features 3.353178 2.328385 |labels 0 0 1
|features -0.503365 1.129519 |labels 1 0 0
|features -0.906905 0.719234 |labels 1 0 0
|features -0.306073 -0.079512 |labels 1 0 0
|features 3.548283 5.826111 |labels 0 1 0
|features 0.536668 0.896596 |labels 1 0 0
|features 3.568629 0.675687 |labels 0 0 1
|features 4.020288 2.491829 |labels 0 0 1
|features 4.877190 1.382423 |labels 0 0 1
|features 2.060446 0.728661 |labels 1 0 0
|features 4.380656 -0.377533 |labels 0 0 1
|features 3.095210 -0.249912 |labels 0 0 1
|features 5.433247 6.082839 |labels 0 1 0
|features 1.226953 1.803442 |labels 1 0 0
|features 1.726498 0.294119 |labels 1 0 0
|features 5.227722 -0.995057 |labels 0 0 1
|features 0.867838 0.103294 |labels 1 0 0
|features 0.191855 0.333970 |labels 1 0 0
|features 4.725049 5.049785 |labels 0 1 0
|features 1.648490 -0.236169 |labels 1 0 0
|features 4.723265 3.068310 |labels 0 0 1
|features 0.383477 0.799365 |labels 1 0 0
|features 2.648195 0.388766 |labels 0 0 1
|features 0.144110 0.829298 |labels 1 0 0
|features 0.580925 0.184520 |labels 1 0 0
|features 5.158404 -0.258398 |labels 0 0 1
|features 2.630052 3.650630 |labels 0 1 0
|features 4.654484 4.140236 |labels 0 1 0
|features 4.471095 2.304949 |labels 0 1 0
|features 1.210868 1.351212 |labels 1 0 0
|features 1.244951 -0.352608 |labels 1 0 0
|features 4.167235 2.325315 |labels 0 1 0
|features 0.749074 -0.227563 |labels 1 0 0
|features 4.469702 2.354917 |labels 0 1 0
|features 2.262076 2.361214 |labels 1 0 0
|features 0.364683 -1.094402 |labels 1 0 0
|features 4.004906 2.918029 |labels 0 1 0
|features 5.916473 2.902482 |labels 0 1 0
|features 3.883862 4.207980 |labels 0 1 0
|features 4.330138 0.662156 |labels 0 0 1
|features 2.197955 -1.037347 |labels 1 0 0
|features 2.564784 4.314187 |labels 0 1 0
|features 3.393377 4.268344 |labels 0 1 0
|features 5.234548 1.507413 |labels 0 0 1
|features 4.162258 3.859534 |labels 0 1 0
|features 3.073434 3.885747 |labels 0 1 0
|features 3.651390 -1.359382 |labels 0 0 1
|features 3.401908 0.200416 |labels 0 0 1
|features 3.765561 4.517499 |labels 0 1 0
|features 4.644557 1.919168 |labels 0 1 0
|features 3.382163 2.761240 |labels 0 0 1
|features 0.177373 0.845746 |labels 1 0 0
|features 1.367353 1.108394 |labels 1 0 0
|features 0.323228 0.872102 |labels 1 0 0

Просмотреть файл

@ -0,0 +1,500 @@
|features 3.854499 4.163941 |labels 1.000000
|features 1.058121 1.204858 |labels 0.000000
|features 1.870621 1.284107 |labels 0.000000
|features 1.134650 1.651822 |labels 0.000000
|features 5.420541 4.557660 |labels 1.000000
|features 6.042731 3.375708 |labels 1.000000
|features 5.667109 2.811728 |labels 1.000000
|features 0.232070 1.814821 |labels 0.000000
|features -0.647150 -1.612478 |labels 0.000000
|features 2.626172 5.321667 |labels 1.000000
|features 1.359751 2.056849 |labels 0.000000
|features 3.534476 6.011925 |labels 1.000000
|features 4.871508 2.245406 |labels 1.000000
|features 4.977201 6.092787 |labels 1.000000
|features 1.597508 2.110568 |labels 0.000000
|features 2.099170 0.073616 |labels 0.000000
|features 0.638281 -0.171881 |labels 0.000000
|features 4.606747 4.092115 |labels 1.000000
|features 5.168790 4.673153 |labels 1.000000
|features 5.084637 4.435160 |labels 1.000000
|features 3.379607 2.765107 |labels 1.000000
|features 3.992242 2.799751 |labels 1.000000
|features 1.807728 0.205914 |labels 0.000000
|features 1.946180 0.303569 |labels 0.000000
|features 0.218267 1.301271 |labels 0.000000
|features 4.932840 2.117177 |labels 1.000000
|features 3.739489 2.458558 |labels 1.000000
|features 1.597743 -2.192362 |labels 0.000000
|features 3.582005 3.350572 |labels 1.000000
|features 3.930642 5.733507 |labels 1.000000
|features 5.747863 3.739415 |labels 1.000000
|features -0.631374 2.314482 |labels 0.000000
|features 0.866484 0.363432 |labels 0.000000
|features 0.293501 0.347385 |labels 0.000000
|features 4.544393 4.699040 |labels 1.000000
|features -0.242005 0.926520 |labels 0.000000
|features 3.637198 5.238140 |labels 1.000000
|features -0.269463 1.525586 |labels 0.000000
|features 0.682529 -0.703649 |labels 0.000000
|features 3.562643 -0.126556 |labels 0.000000
|features 2.671530 3.729066 |labels 1.000000
|features 4.034716 3.458366 |labels 1.000000
|features 5.401503 3.117191 |labels 1.000000
|features 1.157177 1.183186 |labels 0.000000
|features 0.778963 1.394348 |labels 0.000000
|features 4.599715 2.297663 |labels 1.000000
|features 4.532568 4.568362 |labels 1.000000
|features 1.785478 -0.213185 |labels 0.000000
|features 4.617391 4.230360 |labels 1.000000
|features 5.672957 3.668370 |labels 1.000000
|features 4.267738 5.390780 |labels 1.000000
|features 0.707751 2.955391 |labels 0.000000
|features 0.791275 1.654795 |labels 0.000000
|features 1.760541 0.976920 |labels 0.000000
|features 4.543920 2.222765 |labels 1.000000
|features 4.515881 6.199021 |labels 1.000000
|features 3.645005 3.611395 |labels 1.000000
|features 0.965049 1.737265 |labels 0.000000
|features -1.779455 1.595554 |labels 0.000000
|features -0.484797 -0.559924 |labels 0.000000
|features 2.944180 4.429239 |labels 1.000000
|features 3.326649 4.412622 |labels 1.000000
|features 4.275101 2.143945 |labels 1.000000
|features 1.173035 0.641844 |labels 0.000000
|features 4.003884 3.176954 |labels 1.000000
|features 1.960240 -0.244709 |labels 0.000000
|features 0.320283 2.115552 |labels 0.000000
|features 2.303185 3.047043 |labels 1.000000
|features 0.993086 0.074009 |labels 0.000000
|features 5.599144 3.857344 |labels 1.000000
|features 5.325894 3.931000 |labels 1.000000
|features 2.840053 4.781688 |labels 1.000000
|features 4.142453 3.405830 |labels 1.000000
|features 1.084043 1.589581 |labels 0.000000
|features 2.795705 2.319276 |labels 1.000000
|features 1.980552 0.717780 |labels 0.000000
|features 1.875956 -0.571905 |labels 0.000000
|features 2.013802 1.694811 |labels 0.000000
|features 4.690795 2.183334 |labels 1.000000
|features 4.321816 1.876459 |labels 1.000000
|features 4.088717 4.394346 |labels 1.000000
|features 4.991936 4.299770 |labels 1.000000
|features 2.592315 4.783210 |labels 1.000000
|features 0.703270 2.541733 |labels 0.000000
|features 0.467768 -0.007592 |labels 0.000000
|features 1.694096 -0.570847 |labels 0.000000
|features 2.255603 0.663395 |labels 0.000000
|features 1.300394 1.518341 |labels 0.000000
|features 4.354786 4.501928 |labels 1.000000
|features 1.474162 0.603113 |labels 0.000000
|features 1.340782 0.637653 |labels 0.000000
|features -0.351240 0.501893 |labels 0.000000
|features 4.918587 5.366305 |labels 1.000000
|features 2.242199 -0.916682 |labels 0.000000
|features -0.161858 0.448384 |labels 0.000000
|features 1.659615 1.524191 |labels 0.000000
|features 3.072670 1.703225 |labels 0.000000
|features 0.003256 -0.306702 |labels 0.000000
|features -1.792094 1.193539 |labels 0.000000
|features 7.200298 3.962190 |labels 1.000000
|features 4.220305 4.190289 |labels 1.000000
|features 4.096599 3.264797 |labels 1.000000
|features -0.674145 0.751491 |labels 0.000000
|features 3.215213 4.549768 |labels 1.000000
|features 1.522988 3.311437 |labels 0.000000
|features 4.393445 1.822070 |labels 1.000000
|features 1.991048 1.429309 |labels 0.000000
|features 4.741012 3.169984 |labels 1.000000
|features 2.563678 1.798587 |labels 0.000000
|features 3.310656 3.600789 |labels 1.000000
|features 0.559119 -0.193984 |labels 0.000000
|features 3.182626 3.279566 |labels 1.000000
|features 0.145061 1.428861 |labels 0.000000
|features 5.748625 2.766672 |labels 1.000000
|features 1.612338 -0.441931 |labels 0.000000
|features 0.521950 0.355267 |labels 0.000000
|features 4.284910 3.874950 |labels 1.000000
|features 4.911425 3.054658 |labels 1.000000
|features 2.946163 0.502614 |labels 0.000000
|features 4.381390 2.600999 |labels 1.000000
|features 0.585791 -0.528432 |labels 0.000000
|features 1.329802 -0.076910 |labels 0.000000
|features 0.860040 1.153562 |labels 0.000000
|features 0.930515 -0.257435 |labels 0.000000
|features 2.775174 0.751338 |labels 0.000000
|features 2.429059 0.615483 |labels 0.000000
|features 2.546002 1.132210 |labels 0.000000
|features 5.059000 3.423829 |labels 1.000000
|features 1.303533 0.013015 |labels 0.000000
|features 2.160149 -0.400779 |labels 0.000000
|features 5.038046 3.027673 |labels 1.000000
|features 4.583471 5.379319 |labels 1.000000
|features 5.608845 2.082021 |labels 1.000000
|features 3.406426 3.326734 |labels 1.000000
|features 4.267102 3.866177 |labels 1.000000
|features 1.799669 0.489094 |labels 0.000000
|features 1.807634 2.029468 |labels 0.000000
|features 1.536463 1.053052 |labels 0.000000
|features 5.653295 3.369125 |labels 1.000000
|features 2.493326 0.794542 |labels 0.000000
|features 1.528977 0.961929 |labels 0.000000
|features 1.973016 0.696162 |labels 0.000000
|features 2.283974 0.198255 |labels 0.000000
|features 5.227293 4.395268 |labels 1.000000
|features 5.302484 4.021613 |labels 1.000000
|features 6.223076 4.537934 |labels 1.000000
|features 1.460204 -1.055539 |labels 0.000000
|features 2.985097 4.228990 |labels 1.000000
|features 1.685054 0.499576 |labels 0.000000
|features 0.521659 0.510605 |labels 0.000000
|features 1.891089 1.284388 |labels 0.000000
|features 4.620926 3.662371 |labels 1.000000
|features 1.613905 -0.770152 |labels 0.000000
|features 6.007418 4.755721 |labels 1.000000
|features 0.798078 -0.304557 |labels 0.000000
|features 5.242706 2.099872 |labels 1.000000
|features 1.518268 -0.858963 |labels 0.000000
|features 3.733642 4.244483 |labels 1.000000
|features 0.970367 -1.534686 |labels 0.000000
|features 1.334952 2.250191 |labels 0.000000
|features 2.252214 3.343515 |labels 1.000000
|features 3.982213 4.457969 |labels 1.000000
|features 5.086620 3.180442 |labels 1.000000
|features 0.005277 0.197319 |labels 0.000000
|features 2.999128 2.909942 |labels 1.000000
|features 2.412666 2.046286 |labels 0.000000
|features 2.044537 3.416533 |labels 1.000000
|features 2.650439 3.372171 |labels 1.000000
|features 2.480446 1.327368 |labels 0.000000
|features 4.824915 5.603495 |labels 1.000000
|features 0.759204 0.531043 |labels 0.000000
|features 1.965476 1.372763 |labels 0.000000
|features 1.000248 1.208139 |labels 0.000000
|features 1.979980 -0.446807 |labels 0.000000
|features 0.528053 1.178535 |labels 0.000000
|features 5.442396 3.969797 |labels 1.000000
|features -0.145691 1.375993 |labels 0.000000
|features 1.336725 -0.006089 |labels 0.000000
|features 5.291797 3.250537 |labels 1.000000
|features 4.286453 1.117735 |labels 1.000000
|features -0.928654 -0.925485 |labels 0.000000
|features 3.332391 2.603963 |labels 1.000000
|features 3.215562 4.756808 |labels 1.000000
|features 1.610967 0.830856 |labels 0.000000
|features 2.174433 3.501271 |labels 1.000000
|features 4.848584 4.251824 |labels 1.000000
|features 0.810184 1.152021 |labels 0.000000
|features 4.873924 4.517936 |labels 1.000000
|features 1.915303 1.649095 |labels 0.000000
|features 1.623343 -0.081105 |labels 0.000000
|features 1.944076 0.482732 |labels 0.000000
|features 2.442956 1.254540 |labels 0.000000
|features -1.002581 1.265333 |labels 0.000000
|features 0.959354 0.678516 |labels 0.000000
|features -0.478621 2.502554 |labels 0.000000
|features 3.357642 2.993470 |labels 1.000000
|features 5.741979 2.958477 |labels 1.000000
|features 4.474261 3.260622 |labels 1.000000
|features 3.587932 4.572091 |labels 1.000000
|features 1.274866 0.695311 |labels 0.000000
|features 4.557162 4.754880 |labels 1.000000
|features 0.557867 0.280893 |labels 0.000000
|features 1.832047 -2.162059 |labels 0.000000
|features 3.904049 5.257427 |labels 1.000000
|features 3.225019 3.845294 |labels 1.000000
|features 4.451218 4.125344 |labels 1.000000
|features 3.138143 2.869685 |labels 1.000000
|features 4.451703 3.430654 |labels 1.000000
|features 0.124060 1.422203 |labels 0.000000
|features 4.692774 5.156611 |labels 1.000000
|features 0.735314 0.375099 |labels 0.000000
|features 0.727577 1.158726 |labels 0.000000
|features 0.643469 0.283426 |labels 0.000000
|features 5.126834 1.929468 |labels 1.000000
|features -0.172361 2.982370 |labels 0.000000
|features 3.957745 1.561874 |labels 1.000000
|features 5.563733 3.417080 |labels 1.000000
|features 5.181533 1.465063 |labels 1.000000
|features 5.843654 5.040710 |labels 1.000000
|features 0.761570 0.171094 |labels 0.000000
|features 3.163795 3.940869 |labels 1.000000
|features 2.435362 1.047614 |labels 0.000000
|features 2.524330 3.602348 |labels 1.000000
|features 4.200838 3.267377 |labels 1.000000
|features 4.249560 2.926280 |labels 1.000000
|features 0.060257 0.295729 |labels 0.000000
|features 1.528257 1.651867 |labels 0.000000
|features 2.030978 1.566011 |labels 0.000000
|features 4.065243 4.375190 |labels 1.000000
|features 1.406204 0.238570 |labels 0.000000
|features 1.229776 1.186559 |labels 0.000000
|features 2.295681 1.883864 |labels 0.000000
|features 3.966570 4.293142 |labels 1.000000
|features 1.713323 0.534886 |labels 0.000000
|features 0.772032 -0.096214 |labels 0.000000
|features 3.392854 5.195064 |labels 1.000000
|features 5.063653 2.749764 |labels 1.000000
|features 1.410392 1.694554 |labels 0.000000
|features 0.540269 0.376759 |labels 0.000000
|features 4.103946 3.870140 |labels 1.000000
|features 5.132739 3.079176 |labels 1.000000
|features 2.524063 0.486934 |labels 0.000000
|features 0.046403 1.452778 |labels 0.000000
|features 1.705593 0.243750 |labels 0.000000
|features 1.621902 0.203138 |labels 0.000000
|features -0.420733 0.589060 |labels 0.000000
|features 2.887145 2.621849 |labels 1.000000
|features 5.545509 4.473069 |labels 1.000000
|features 0.326439 -0.162102 |labels 0.000000
|features 0.906097 -0.018566 |labels 0.000000
|features 3.398280 5.125843 |labels 1.000000
|features 0.833088 -0.808535 |labels 0.000000
|features 4.535285 4.133511 |labels 1.000000
|features 1.781705 4.123651 |labels 1.000000
|features 4.345894 3.355084 |labels 1.000000
|features 4.770073 3.007432 |labels 1.000000
|features 2.537267 3.813503 |labels 1.000000
|features 0.994347 2.567949 |labels 0.000000
|features 0.337262 -0.224479 |labels 0.000000
|features 4.936596 3.107819 |labels 1.000000
|features 2.177957 -0.544641 |labels 0.000000
|features 3.434811 2.806362 |labels 1.000000
|features 3.172973 4.378089 |labels 1.000000
|features 4.015349 3.000845 |labels 1.000000
|features 3.640748 3.917499 |labels 1.000000
|features 5.432434 4.092587 |labels 1.000000
|features 4.701984 4.063092 |labels 1.000000
|features 3.978015 3.584431 |labels 1.000000
|features 5.029923 2.346036 |labels 1.000000
|features 4.939017 3.209084 |labels 1.000000
|features 3.999592 2.747525 |labels 1.000000
|features 5.233483 4.877698 |labels 1.000000
|features 2.260049 1.023384 |labels 0.000000
|features -1.149943 1.257165 |labels 0.000000
|features -0.026270 0.468090 |labels 0.000000
|features 5.155107 4.620842 |labels 1.000000
|features 4.179414 4.807546 |labels 1.000000
|features 2.560286 0.526253 |labels 0.000000
|features 5.843334 1.439470 |labels 1.000000
|features 4.417442 4.483117 |labels 1.000000
|features 4.354138 4.496168 |labels 1.000000
|features 0.873730 2.230023 |labels 0.000000
|features 4.531298 4.944164 |labels 1.000000
|features 2.010164 -0.358403 |labels 0.000000
|features 1.165044 1.376602 |labels 0.000000
|features 1.451538 -0.197779 |labels 0.000000
|features -1.751961 0.210820 |labels 0.000000
|features 2.431281 3.878465 |labels 1.000000
|features 3.311168 3.697618 |labels 1.000000
|features 2.324742 -0.330745 |labels 0.000000
|features 1.447031 1.028776 |labels 0.000000
|features 0.711003 2.631227 |labels 0.000000
|features 4.872934 3.406132 |labels 1.000000
|features 2.419345 0.297983 |labels 0.000000
|features 0.437814 2.851194 |labels 0.000000
|features 3.105758 4.098041 |labels 1.000000
|features 5.310168 3.519401 |labels 1.000000
|features 1.218607 -1.505891 |labels 0.000000
|features 6.053827 2.848790 |labels 1.000000
|features 3.475758 3.352349 |labels 1.000000
|features 0.911730 -0.213069 |labels 0.000000
|features 1.255973 0.089677 |labels 0.000000
|features 4.152711 3.871858 |labels 1.000000
|features 3.003909 3.288998 |labels 1.000000
|features 0.291281 1.124965 |labels 0.000000
|features 2.155017 0.550642 |labels 0.000000
|features 3.494102 0.710991 |labels 0.000000
|features 4.376613 2.330150 |labels 1.000000
|features 4.707851 6.179972 |labels 1.000000
|features 0.614240 -0.243535 |labels 0.000000
|features 1.130049 0.870765 |labels 0.000000
|features 3.994615 2.855247 |labels 1.000000
|features 1.556420 0.106179 |labels 0.000000
|features 3.182309 5.121422 |labels 1.000000
|features 2.315933 0.418897 |labels 0.000000
|features 1.797904 0.633645 |labels 0.000000
|features 4.012446 3.887718 |labels 1.000000
|features 2.106849 3.776831 |labels 1.000000
|features 4.477828 3.989422 |labels 1.000000
|features 2.871290 4.610706 |labels 1.000000
|features 5.317459 5.621137 |labels 1.000000
|features 2.265963 -0.095395 |labels 0.000000
|features 2.963642 2.804267 |labels 1.000000
|features 5.859384 3.673343 |labels 1.000000
|features 6.365340 3.541960 |labels 1.000000
|features 1.450987 0.721751 |labels 0.000000
|features 4.641593 2.436289 |labels 1.000000
|features -0.126649 0.101750 |labels 0.000000
|features 1.835293 1.594895 |labels 0.000000
|features 2.121195 0.152643 |labels 0.000000
|features 1.881799 1.169974 |labels 0.000000
|features 2.421852 -0.089441 |labels 0.000000
|features 0.110206 -1.491046 |labels 0.000000
|features 6.200556 4.284843 |labels 1.000000
|features 3.545593 5.217408 |labels 1.000000
|features 3.365187 2.790974 |labels 1.000000
|features 6.493131 5.311132 |labels 1.000000
|features 0.800791 0.229630 |labels 0.000000
|features 4.975666 4.214251 |labels 1.000000
|features 1.562586 0.181976 |labels 0.000000
|features 0.899273 0.003180 |labels 0.000000
|features 6.064242 3.482802 |labels 1.000000
|features 1.777259 2.498596 |labels 0.000000
|features 5.479965 5.168898 |labels 1.000000
|features 4.671380 3.356556 |labels 1.000000
|features 1.730588 0.417775 |labels 0.000000
|features 2.463118 -0.305587 |labels 0.000000
|features 3.967679 0.361350 |labels 0.000000
|features 0.164925 -0.167591 |labels 0.000000
|features 4.777002 3.088492 |labels 1.000000
|features 2.049808 3.096552 |labels 0.000000
|features 1.416130 -1.043606 |labels 0.000000
|features 0.318913 -1.539956 |labels 0.000000
|features 6.004351 2.521442 |labels 1.000000
|features 2.969229 3.311301 |labels 1.000000
|features 0.879291 0.094171 |labels 0.000000
|features 5.290177 5.198102 |labels 1.000000
|features -0.305314 0.826116 |labels 0.000000
|features 2.091880 -1.176581 |labels 0.000000
|features 2.816867 2.875016 |labels 1.000000
|features 0.486424 -1.055319 |labels 0.000000
|features 3.012812 4.530291 |labels 1.000000
|features 1.137009 1.323397 |labels 0.000000
|features 0.088114 -0.353501 |labels 0.000000
|features 1.174005 0.188025 |labels 0.000000
|features 1.928114 1.398347 |labels 0.000000
|features 0.128505 1.430034 |labels 0.000000
|features 2.021187 0.577234 |labels 0.000000
|features 1.361335 0.394605 |labels 0.000000
|features 5.125811 4.221355 |labels 1.000000
|features 0.260733 1.758422 |labels 0.000000
|features 2.106970 0.305971 |labels 0.000000
|features 3.675850 5.051226 |labels 1.000000
|features 2.105405 0.240527 |labels 0.000000
|features 3.072167 3.130910 |labels 1.000000
|features 0.987479 0.036861 |labels 0.000000
|features -0.271382 0.094250 |labels 0.000000
|features 4.703495 2.620398 |labels 1.000000
|features 3.005831 2.220124 |labels 1.000000
|features 5.072896 1.477152 |labels 1.000000
|features 4.443991 3.679157 |labels 1.000000
|features 0.845034 0.419956 |labels 0.000000
|features 4.698964 3.109439 |labels 1.000000
|features 1.766144 0.595496 |labels 0.000000
|features 2.046076 0.433007 |labels 0.000000
|features 0.874663 1.010155 |labels 0.000000
|features 4.939031 5.340021 |labels 1.000000
|features 3.881158 3.072467 |labels 1.000000
|features 2.928763 4.160337 |labels 1.000000
|features 5.582289 4.805588 |labels 1.000000
|features 3.180992 3.459563 |labels 1.000000
|features -0.486820 -0.074926 |labels 0.000000
|features 4.091057 2.402846 |labels 1.000000
|features 4.915464 4.543850 |labels 1.000000
|features 1.492434 0.588755 |labels 0.000000
|features 2.594011 0.332043 |labels 0.000000
|features 0.317571 -0.525159 |labels 0.000000
|features 3.936029 4.312181 |labels 1.000000
|features 1.918811 -0.659594 |labels 0.000000
|features 2.657582 0.028525 |labels 0.000000
|features 4.637282 3.562483 |labels 1.000000
|features -0.097472 1.250080 |labels 0.000000
|features 1.340281 -1.399129 |labels 0.000000
|features 4.330372 3.140502 |labels 1.000000
|features 4.358103 3.760854 |labels 1.000000
|features 3.897352 4.806873 |labels 1.000000
|features 4.962704 4.692459 |labels 1.000000
|features 1.667918 -0.134096 |labels 0.000000
|features 4.929650 1.727842 |labels 1.000000
|features 2.434315 3.000448 |labels 1.000000
|features 1.179167 1.894836 |labels 0.000000
|features 0.190498 0.655592 |labels 0.000000
|features 3.408802 4.843020 |labels 1.000000
|features 4.497565 3.844998 |labels 1.000000
|features -0.501596 1.561013 |labels 0.000000
|features 4.158981 4.875362 |labels 1.000000
|features 4.017462 4.655003 |labels 1.000000
|features 3.319263 3.462037 |labels 1.000000
|features 2.635572 1.022114 |labels 0.000000
|features 2.638164 5.051437 |labels 1.000000
|features 4.875001 3.592322 |labels 1.000000
|features -0.276607 0.800369 |labels 0.000000
|features 4.351591 3.321136 |labels 1.000000
|features 3.699848 3.317014 |labels 1.000000
|features 4.947319 4.252134 |labels 1.000000
|features 4.146336 2.162761 |labels 1.000000
|features 5.231704 5.477804 |labels 1.000000
|features 3.302101 3.994218 |labels 1.000000
|features -0.249349 2.069960 |labels 0.000000
|features 4.705134 3.921461 |labels 1.000000
|features 4.652980 4.287917 |labels 1.000000
|features 3.937259 -0.334385 |labels 0.000000
|features 3.257619 2.758094 |labels 1.000000
|features 0.994191 3.135344 |labels 0.000000
|features 4.649768 2.123305 |labels 1.000000
|features 1.634135 0.241517 |labels 0.000000
|features 1.682542 2.057739 |labels 1.000000
|features 5.163117 4.467304 |labels 1.000000
|features 4.638594 4.141250 |labels 1.000000
|features 1.392605 0.635603 |labels 0.000000
|features 4.319784 2.965064 |labels 1.000000
|features 1.872466 1.566002 |labels 0.000000
|features 4.230714 5.179026 |labels 1.000000
|features 2.635294 3.470599 |labels 1.000000
|features 0.988464 0.943613 |labels 0.000000
|features 0.897546 0.129141 |labels 0.000000
|features 3.370731 2.019838 |labels 0.000000
|features 1.424812 0.081647 |labels 0.000000
|features 5.961444 3.372419 |labels 1.000000
|features 2.839070 0.926229 |labels 0.000000
|features 0.279132 1.607793 |labels 0.000000
|features 5.351031 3.693640 |labels 1.000000
|features 2.637437 1.951445 |labels 0.000000
|features -0.179258 0.349339 |labels 0.000000
|features 3.246295 1.013459 |labels 0.000000
|features 5.839643 4.556761 |labels 1.000000
|features 1.435225 0.937185 |labels 0.000000
|features 0.500440 0.348246 |labels 0.000000
|features 4.948782 4.994416 |labels 1.000000
|features 0.810541 0.456830 |labels 0.000000
|features 5.098827 4.142789 |labels 1.000000
|features 2.365307 0.729496 |labels 0.000000
|features -0.117730 0.891913 |labels 0.000000
|features 0.485735 0.513485 |labels 0.000000
|features 0.680270 1.486851 |labels 0.000000
|features 1.143053 0.227480 |labels 0.000000
|features 6.615446 4.561501 |labels 1.000000
|features 1.016051 1.862106 |labels 0.000000
|features 0.668177 -0.212610 |labels 0.000000
|features 2.906047 2.415627 |labels 1.000000
|features 5.576097 5.068683 |labels 1.000000
|features 1.315063 -0.040980 |labels 0.000000
|features 5.375285 3.306877 |labels 1.000000
|features 4.549934 3.805014 |labels 1.000000
|features 1.189238 0.661279 |labels 0.000000
|features 4.156567 3.280736 |labels 1.000000
|features 2.061355 1.090958 |labels 0.000000
|features 4.499387 3.640263 |labels 1.000000
|features 3.503883 1.015591 |labels 0.000000
|features 0.390200 -1.037188 |labels 0.000000
|features 2.922873 4.696711 |labels 1.000000
|features 1.803928 3.846808 |labels 1.000000
|features 0.907921 -2.139287 |labels 0.000000
|features 1.640739 0.592793 |labels 0.000000
|features 5.108193 3.194757 |labels 1.000000
|features 4.297873 4.034234 |labels 1.000000
|features 4.832678 4.073469 |labels 1.000000
|features 4.391764 3.557895 |labels 1.000000
|features 2.006343 0.836557 |labels 0.000000
|features 0.351400 1.534742 |labels 0.000000
|features 4.933823 2.937944 |labels 1.000000
|features 3.926482 2.073712 |labels 1.000000
|features 5.382385 4.818642 |labels 1.000000
|features 4.739010 3.213326 |labels 1.000000
|features 0.026227 0.177150 |labels 0.000000
|features 5.001353 3.300961 |labels 1.000000
|features 5.022782 2.921902 |labels 1.000000
|features 4.225051 4.534986 |labels 1.000000
|features 3.745148 -0.169000 |labels 0.000000
|features 5.891838 2.817417 |labels 1.000000

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,67 @@
# -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft. All rights reserved.
Licensed under the MIT license. See LICENSE file in the project root for full license information.
"""
import numpy as np
from sklearn.utils import shuffle
# number of dimensions
Dim = 2
# number of samples
N_train = 1000
N_test = 500
def generate(N, mean, cov, diff):
#import ipdb;ipdb.set_trace()
num_classes = len(diff)
samples_per_class = int(N/2)
X0 = np.random.multivariate_normal(mean, cov, samples_per_class)
Y0 = np.zeros(samples_per_class)
for ci, d in enumerate(diff):
X1 = np.random.multivariate_normal(mean+d, cov, samples_per_class)
Y1 = (ci+1)*np.ones(samples_per_class)
X0 = np.concatenate((X0,X1))
Y0 = np.concatenate((Y0,Y1))
X, Y = shuffle(X0, Y0)
return X,Y
def create_data_files(num_classes, diff, train_filename, test_filename, regression):
print("Outputting %s and %s"%(train_filename, test_filename))
mean = np.random.randn(num_classes)
cov = np.eye(num_classes)
for filename, N in [(train_filename, N_train), (test_filename, N_test)]:
X, Y = generate(N, mean, cov, diff)
# output in CNTK Text format
with open(filename, "w") as dataset:
num_labels = int((1 + np.amax(Y)))
for i in range(N):
dataset.write("|features ")
for d in range(Dim):
dataset.write("%f " % X[i,d])
if (regression):
dataset.write("|labels %f\n" % Y[i])
else:
labels = ['0'] * num_labels;
labels[int(Y[i])] = '1'
dataset.write("|labels %s\n" % " ".join(labels))
def main():
# random seed (create the same data)
np.random.seed(10)
create_data_files(Dim, [3.0], "Train_cntk_text.txt", "Test_cntk_text.txt", True)
create_data_files(Dim, [[3.0], [3.0, 0.0]], "Train-3Classes_cntk_text.txt", "Test-3Classes_cntk_text.txt", False)
if __name__ == '__main__':
main()

Просмотреть файл

@ -0,0 +1,116 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
# logistic regression cntk script -- using network description language BrainScript
# which commands to run
command=Train:Output:DumpNodeInfo:Test
# required...
modelPath = "Models/LR_reg.dnn" # where to write the model to
deviceId = -1 # -1 means CPU; use 0 for your first GPU, 1 for the second etc.
dimension = 2 # input data dimensions
# training config
Train = [ # command=Train --> CNTK will look for a parameter named Train
action = "train" # execute CNTK's 'train' routine
# network description
BrainScriptNetworkBuilder = [
# sample and label dimensions
SDim = $dimension$
LDim = 1
features = Input (SDim)
labels = Input (LDim)
# parameters to learn
b = Parameter (LDim, 1) # bias
w = Parameter (LDim, SDim) # weights
# operations
p = Sigmoid (w * features + b)
lr = Logistic (labels, p)
err = SquareError (labels, p)
# root nodes
featureNodes = (features)
labelNodes = (labels)
criterionNodes = (lr)
evaluationNodes = (err)
outputNodes = (p)
]
# configuration parameters of the SGD procedure
SGD = [
epochSize = 0 # =0 means size of the training set
minibatchSize = 25
learningRatesPerSample = 0.04 # gradient contribution from each sample
maxEpochs = 50
]
# configuration of data reading
reader = [
readerType = "CNTKTextFormatReader"
file = "Train_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = 1
format = "dense"
]
]
]
]
# test
Test = [
action = "test"
reader = [
readerType = "CNTKTextFormatReader"
file = "Test_cntk_text.txt"
input = [
features = [
dim = $dimension$
format = "dense"
]
labels = [
dim = 1
format = "dense"
]
]
]
]
# output the results
Output = [
action = "write"
reader = [
readerType = "CNTKTextFormatReader"
file = "Test_cntk_text.txt"
input = [
features = [
dim = $dimension$ # $$ means variable substitution
format = "dense"
]
labels = [
dim = 1 # label has 1 dimension
format = "dense"
]
]
]
outputPath = "LR.txt" # dump the output to this text file
]
# dump parameter values
DumpNodeInfo = [
action = "dumpNode"
printValues = true
]

Просмотреть файл

@ -12,7 +12,11 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
This project is based on or incorporates material from the projects listed below (Third Party IP). The original copyright notice and the license under which Microsoft received such Third Party IP, are set forth below. Such licenses and notices are provided for informational purposes only. Where permitted, Microsoft licenses the Third Party IP to you under the licensing terms for the Microsoft product. Microsoft reserves all other rights not expressly granted under this agreement, whether by implication, estoppel or otherwise.
### a. BOOST C++ LIBRARIES
### a. INTEL (R) MATH KERNEL LIBRARY (INTEL (R) MKL)
CNTK distribution contains Redistributable components of Intel (r) Math Kernel Library (Intel (r) MKL)
### b. BOOST C++ LIBRARIES
Copyright Beman Dawes, David Abrahams, 1998-2005.
Copyright Rene Rivera 2004-2007.
@ -27,7 +31,7 @@ The copyright notices in the Software and this entire statement, including the a
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
### b. ATIS DATASETS
### c. ATIS DATASETS
CNTK distribution contains a subset of ATIS Datasets:
@ -39,23 +43,23 @@ Dahl, Deborah, et al. ATIS3 Test Data LDC95S26. Web Download. Philadelphia: Ling
Dahl, Deborah, et al. ATIS3 Training Data LDC94S19. Web Download. Philadelphia: Linguistic Data Consortium, 1994.
### c. TIMIT ACOUSTIC-PHONETIC CONTINUOUS SPEECH CORPUS
### d. TIMIT ACOUSTIC-PHONETIC CONTINUOUS SPEECH CORPUS
CNTK distribution contains a subset of TIMIT Acoustic-Phonetic Continuous Speech Corpus:
Garofolo, John, et al. TIMIT Acoustic-Phonetic Continuous Speech Corpus LDC93S1. Web Download. Philadelphia: Linguistic Data Consortium, 1993.
### d. THE PENN TREEBANK PROJECT
### e. THE PENN TREEBANK PROJECT
CNTK distribution contains a subset of the data of The Penn Treebank Project:
Marcus, Mitchell, Beatrice Santorini, and Mary Ann Marcinkiewicz. Treebank-2 LDC95T7. Web Download. Philadelphia: Linguistic Data Consortium, 1995.
### e. THE CMU AUDIO DATABASES
### f. THE CMU AUDIO DATABASES
CNTK distribution contains a subset of the CMU Audio Databases
Copyright (c) 1991-2005 Carnegie Mellon University. All rights reserved.
### f. THE MNIST DATABASE OF HANDWRITTEN DIGITS
### g. THE MNIST DATABASE OF HANDWRITTEN DIGITS
CNTK distribution contains a subset of the MNIST Database of Handwritten Digits
CNTK distribution contains a subset of the MNIST Database of Handwritten Digits

Просмотреть файл

@ -11,7 +11,11 @@
# defaults to release
# ACML_PATH= path to ACML library installation
# only needed if MATHLIB=acml
# MKL_PATH= path to MKL library installation
# MKL_PATH= path to CNTK custom MKL installation
# only needed if MATHLIB=mkl
# CNTK_CUSTOM_MKL_VERSION=2
# version for the CNTK custom MKL installation
# MKL_THREADING=parallel|sequential
# only needed if MATHLIB=mkl
# GDK_PATH= path to cuda gdk installation, so $(GDK_PATH)/include/nvidia/gdk/nvml.h exists
# defaults to /usr
@ -131,9 +135,15 @@ ifeq ("$(MATHLIB)","acml")
endif
ifeq ("$(MATHLIB)","mkl")
INCLUDEPATH += $(MKL_PATH)/mkl/include
LIBPATH += $(MKL_PATH)/compiler/lib/intel64 $(MKL_PATH)/mkl/lib/intel64 $(MKL_PATH)/compiler/lib/mic $(MKL_PATH)/mkl/lib/mic
LIBS += -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -lm -liomp5 -lpthread
INCLUDEPATH += $(MKL_PATH)/$(CNTK_CUSTOM_MKL_VERSION)/include
LIBS += -lm
ifeq ("$(MKL_THREADING)","sequential")
LIBPATH += $(MKL_PATH)/$(CNTK_CUSTOM_MKL_VERSION)/x64/sequential
LIBS += -lmkl_cntk_s
else
LIBPATH += $(MKL_PATH)/$(CNTK_CUSTOM_MKL_VERSION)/x64/parallel
LIBS += -lmkl_cntk_p -liomp5 -lpthread
endif
COMMON_FLAGS += -DUSE_MKL
endif
@ -247,6 +257,7 @@ READER_SRC =\
$(SOURCEDIR)/Readers/ReaderLib/TruncatedBpttPacker.cpp \
$(SOURCEDIR)/Readers/ReaderLib/PackerBase.cpp \
$(SOURCEDIR)/Readers/ReaderLib/FramePacker.cpp \
$(SOURCEDIR)/Readers/ReaderLib/ChunkCache.cpp \
COMMON_SRC =\
$(SOURCEDIR)/Common/Config.cpp \
@ -367,25 +378,25 @@ $(LIBDIR)/CompositeDataReader.so: $(COMPOSITEDATAREADER_OBJ) | $(CNTKMATH_LIB)
$(CXX) $(LDFLAGS) -shared $(patsubst %,-L%, $(LIBDIR) $(LIBPATH)) $(patsubst %,$(RPATH)%, $(ORIGINDIR) $(LIBPATH)) -o $@ $^ -l$(CNTKMATH)
########################################
# ExperimentalHTKMLFReader plugin
# HTKDeserializers plugin
########################################
EXPERIMENTALHTKMLFREADER_SRC =\
HTKDESERIALIZERS_SRC =\
$(SOURCEDIR)/Readers/HTKMLFReader/DataWriterLocal.cpp \
$(SOURCEDIR)/Readers/HTKMLFReader/HTKMLFWriter.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/ConfigHelper.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/Exports.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/HTKDataDeserializer.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/HTKMLFReader.cpp \
$(SOURCEDIR)/Readers/ExperimentalHTKMLFReader/MLFDataDeserializer.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/ConfigHelper.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/Exports.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/HTKDataDeserializer.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/HTKMLFReader.cpp \
$(SOURCEDIR)/Readers/HTKDeserializers/MLFDataDeserializer.cpp \
EXPERIMENTALHTKMLFREADER_OBJ := $(patsubst %.cpp, $(OBJDIR)/%.o, $(EXPERIMENTALHTKMLFREADER_SRC))
HTKDESERIALIZERS_OBJ := $(patsubst %.cpp, $(OBJDIR)/%.o, $(HTKDESERIALIZERS_SRC))
EXPERIMENTALHTKMLFREADER:=$(LIBDIR)/ExperimentalHTKMLFReader.so
ALL+=$(EXPERIMENTALHTKMLFREADER)
SRC+=$(EXPERIMENTALHTKMLFREADER_SRC)
HTKDESERIALIZERS:=$(LIBDIR)/HTKDeserializers.so
ALL+=$(HTKDESERIALIZERS)
SRC+=$(HTKDESERIALIZERS_SRC)
$(LIBDIR)/ExperimentalHTKMLFReader.so: $(EXPERIMENTALHTKMLFREADER_OBJ) | $(CNTKMATH_LIB)
$(LIBDIR)/HTKDeserializers.so: $(HTKDESERIALIZERS_OBJ) | $(CNTKMATH_LIB)
@echo $(SEPARATOR)
$(CXX) $(LDFLAGS) -shared $(patsubst %,-L%, $(LIBDIR) $(LIBPATH)) $(patsubst %,$(RPATH)%, $(ORIGINDIR) $(LIBPATH)) -o $@ $^ -l$(CNTKMATH)
@ -688,15 +699,15 @@ DEP := $(patsubst %.o, %.d, $(OBJ))
# will result in the rebuild.
-include ${DEP}
MAKEFILES := Makefile $(BUILD_TOP)/Config.make
$(OBJDIR)/%.o : %.cu $(MAKEFILES)
BUILD_CONFIGURATION := Makefile $(BUILD_TOP)/Config.make
$(OBJDIR)/%.o : %.cu $(BUILD_CONFIGURATION)
@echo $(SEPARATOR)
@echo creating $@ for $(ARCH) with build type $(BUILDTYPE)
@mkdir -p $(dir $@)
$(NVCC) -c $< -o $@ $(COMMON_FLAGS) $(CUFLAGS) $(INCLUDEPATH:%=-I%) -Xcompiler "-fPIC -Werror"
$(OBJDIR)/%.o : %.cpp $(MAKEFILES)
$(OBJDIR)/%.o : %.cpp $(BUILD_CONFIGURATION)
@echo $(SEPARATOR)
@echo creating $@ for $(ARCH) with build type $(BUILDTYPE)
@mkdir -p $(dir $@)

Просмотреть файл

@ -1,16 +1,16 @@
# CNTK
## Latest news
*2016-05-05.* CNTK now supports *BlockMomentum* Stochastic Gradient Descent (SGD) algorithm.
See the details in the [Multiple GPUs and machines Wiki section](https://github.com/Microsoft/CNTK/wiki/Multiple-GPUs-and-machines)
*2016-06-10.* See CNTK v.1.5 binary release announcement in the official [Microsoft Research Blog](https://blogs.msdn.microsoft.com/msr_er/2016/06/10/microsoft-improves-programming-flexibility-of-its-ai-toolkit/)
*2016-05-03.* New transformations are implemented for **Image Reader**.
See the description in the [Image Reader Wiki section](https://github.com/Microsoft/CNTK/wiki/Image-reader)
*2016-06-08.* V 1.5 Binary release
CNTK v.1.5 binaries are on the [CNTK Releases page](https://github.com/Microsoft/CNTK/releases)
*2016-04-25.* V 1.1 Binary release
CNTK v.1.1 binaries are on the [CNTK Releases page](https://github.com/Microsoft/CNTK/releases/tag/v1.1)
*2016-06-01.* An updated version of the network-description language has been made available under the new [BrainScript Network Builder](https://github.com/Microsoft/CNTK/wiki/BrainScript-Network-Builder), which features full expression parsing, recursive functions, and more.
*2016-04-12.* CNTK is available as [Azure Virtual Machines](https://github.com/Microsoft/CNTK/wiki/CNTK-on-Azure) and [Docker Containers](https://github.com/Microsoft/CNTK/wiki/CNTK-Docker-Containers)
*2016-05-19.* A 1-hour talk describing CNTK, how to use it, and how it works, has been posted at [Presentations](https://github.com/Microsoft/CNTK/wiki/Presentations).
*2016-05-16.* An example illustrating [Using CNTK with ResNet](https://github.com/Microsoft/CNTK/tree/master/Examples/Image/Miscellaneous/ImageNet/ResNet) is added to the codebase. The example contains some pre-trained models that can be used in various applications.
See [all news](https://github.com/Microsoft/CNTK/wiki/News).

2
Scripts/pytest.ini Normal file
Просмотреть файл

@ -0,0 +1,2 @@
[pytest]
python_files = *.py

116
Scripts/txt2ctf.py Normal file
Просмотреть файл

@ -0,0 +1,116 @@
#!/usr/bin/env python
# This script takes a list of dictionary files and a plain text file and converts this text input file to CNTK text format.
#
# The input text file must contain N streams per line (N TAB-separated "columns") and should be accompanied by N dictionary files.
# The input text file must be in the following form:
# text1 TAB text2 TAB ... TAB textN
# .....
# where each line represents one sequence across all N input streams.
# Each text consists of one or more space-separated word tokens (samples).
#
# Dictionary files are text files that are required to be specified for all streams,
# so the #dictionaries = #columns in the input file.
# A dictionary contains a single token per line. The zero-based line number becomes the numeric index
# of the token in the output CNTK text format file.
# Example usage (i.e. for PennTreebank files):
# sed -e 's/^<\/s> //' -e 's/ <\/s>$//' < en.txt > en.txt1
# sed -e 's/^<\/s> //' -e 's/ <\/s>$//' < fr.txt > fr.txt1
# paste en.txt1 fr.txt1 | txt2ctf.py --map en.dict fr.dict > en-fr.ctf
#
import sys
import argparse
def convert(dictionaryStreams, inputs, output, annotated):
# create in memory dictionaries
dictionaries = [{ line.rstrip('\r\n').strip():index for index, line in enumerate(dic) } for dic in dictionaryStreams]
# convert inputs
for input in inputs:
sequenceId = 0
for index, line in enumerate(input):
line = line.rstrip('\r\n')
columns = line.split("\t")
if len(columns) != len(dictionaries):
raise Exception("Number of dictionaries {0} does not correspond to the number of streams in line {1}:'{2}'"
.format(len(dictionaries), index, line))
_convertSequence(dictionaries, columns, sequenceId, output, annotated)
sequenceId += 1
def _convertSequence(dictionaries, streams, sequenceId, output, annotated):
tokensPerStream = [[t for t in s.strip(' ').split(' ') if t != ""] for s in streams]
maxLen = max(len(tokens) for tokens in tokensPerStream)
# writing to the output file
for sampleIndex in range(maxLen):
output.write(str(sequenceId))
for streamIndex in range(len(tokensPerStream)):
if len(tokensPerStream[streamIndex]) <= sampleIndex:
output.write("\t")
continue
token = tokensPerStream[streamIndex][sampleIndex]
if token not in dictionaries[streamIndex]:
raise Exception("Token '{0}' cannot be found in the dictionary for stream {1}".format(token, streamIndex))
value = dictionaries[streamIndex][token]
output.write("\t|S" + str(streamIndex) + " "+ str(value) + ":1")
if annotated:
output.write(" |# " + token)
output.write("\n")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Transforms text file given dictionaries into CNTK text format.")
parser.add_argument('--map', help='List of dictionaries, given in the same order as streams in the input files',
nargs="+", required=True)
parser.add_argument('--annotated', help='Whether to annotate indices with tokens. Default is false',
choices=["True", "False"], default="False", required=False)
parser.add_argument('--output', help='Name of the output file, stdout if not given', default="", required=False)
parser.add_argument('--input', help='Name of the inputs files, stdin if not given', default="", nargs="*", required=False)
args = parser.parse_args()
# creating inputs
inputs = [sys.stdin]
if len(args.input) != 0:
inputs = [open(i) for i in args.input]
# creating output
output = sys.stdout
if args.output != "":
output = open(args.output, "w")
convert([open(d) for d in args.map], inputs, output, args.annotated == "True")
#####################################################################################################
# Tests
#####################################################################################################
import StringIO
import pytest
def test_simpleSanityCheck():
dictionary1 = StringIO.StringIO("hello\nmy\nworld\nof\nnothing\n")
dictionary2 = StringIO.StringIO("let\nme\nbe\nclear\nabout\nit\n")
input = StringIO.StringIO("hello my\tclear about\nworld of\tit let clear\n")
output = StringIO.StringIO()
convert([dictionary1, dictionary2], [input], output, False)
expectedOutput = StringIO.StringIO()
expectedOutput.write("0\t|S0 0:1\t|S1 3:1\n")
expectedOutput.write("0\t|S0 1:1\t|S1 4:1\n")
expectedOutput.write("1\t|S0 2:1\t|S1 5:1\n")
expectedOutput.write("1\t|S0 3:1\t|S1 0:1\n")
expectedOutput.write("1\t\t|S1 3:1\n")
assert expectedOutput.getvalue() == output.getvalue()
def test_nonExistingWord():
dictionary1 = StringIO.StringIO("hello\nmy\nworld\nof\nnothing\n")
input = StringIO.StringIO("hello my\nworld of nonexistent\n")
output = StringIO.StringIO()
with pytest.raises(Exception) as info:
convert([dictionary1], [input], output, False)
assert info.value.message == "Token 'nonexistent' cannot be found in the dictionary for stream 0"

Просмотреть файл

@ -20,7 +20,6 @@
#define let const auto
#endif
using namespace std;
using namespace Microsoft::MSR;
using namespace Microsoft::MSR::CNTK; // TODO: we should not have this in a header
@ -32,7 +31,7 @@ template <class ConfigRecordType, typename ElemType>
function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory(const ConfigRecordType& config);
template <class ConfigRecordType, typename ElemType>
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, vector<wstring>& outputNodeNamesVector);
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, const std::wstring& outputNodeNameConfig, std::vector<std::wstring>& outputNodeNamesVector);
// training (TrainActions.cpp)
template <class ConfigRecordType, typename ElemType>

Просмотреть файл

@ -46,7 +46,7 @@ using namespace Microsoft::MSR::CNTK;
template <typename ElemType>
static void DoEvalBase(const ConfigParameters& config, IDataReader& reader)
{
DEVICEID_TYPE deviceId = DeviceFromConfig(config);
//DEVICEID_TYPE deviceId = DeviceFromConfig(config);
ConfigArray minibatchSize = config(L"minibatchSize", "40960");
size_t epochSize = config(L"epochSize", "0");
if (epochSize == 0)
@ -58,26 +58,23 @@ static void DoEvalBase(const ConfigParameters& config, IDataReader& reader)
int traceLevel = config(L"traceLevel", "0");
size_t numMBsToShowResult = config(L"numMBsToShowResult", "100");
size_t firstMBsToShowResult = config(L"firstMBsToShowResult", "0");
size_t maxSamplesInRAM = config(L"maxSamplesInRAM", (size_t)SIZE_MAX);
size_t numSubminiBatches = config(L"numSubminibatches", (size_t)1);
bool enableDistributedMBReading = config(L"distributedMBReading", false);
ConfigArray evalNodeNames = config(L"evalNodeNames", "");
vector<wstring> evalNodeNamesVector;
for (int i = 0; i < evalNodeNames.size(); ++i)
{
evalNodeNamesVector.push_back(evalNodeNames[i]);
}
auto net = ComputationNetwork::CreateFromFile<ElemType>(deviceId, modelPath);
let net = GetModelFromConfig<ConfigParameters, ElemType>(config, L"evalNodeNames", evalNodeNamesVector);
// set tracing flags
net->EnableNodeTracing(config(L"traceNodeNamesReal", ConfigParameters::Array(stringargvector())),
config(L"traceNodeNamesCategory", ConfigParameters::Array(stringargvector())),
config(L"traceNodeNamesSparse", ConfigParameters::Array(stringargvector())));
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult,
firstMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
eval.Evaluate(&reader, evalNodeNamesVector, mbSize[0], epochSize);
}
@ -124,6 +121,7 @@ void DoCrossValidate(const ConfigParameters& config)
int traceLevel = config(L"traceLevel", "0");
size_t numMBsToShowResult = config(L"numMBsToShowResult", "100");
size_t firstMBsToShowResult = config(L"firstMBsToShowResult", "0");
size_t maxSamplesInRAM = config(L"maxSamplesInRAM", (size_t)SIZE_MAX);
size_t numSubminiBatches = config(L"numSubminibatches", (size_t)1);
@ -160,8 +158,10 @@ void DoCrossValidate(const ConfigParameters& config)
cvModels.push_back(cvModelPath);
auto net = ComputationNetwork::CreateFromFile<ElemType>(deviceId, cvModelPath);
// BUGBUG: ^^ Should use GetModelFromConfig()
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
SimpleEvaluator<ElemType> eval(net, MPIWrapper::GetInstance(), enableDistributedMBReading, numMBsToShowResult,
firstMBsToShowResult, traceLevel, maxSamplesInRAM, numSubminiBatches);
fprintf(stderr, "Model %ls --> \n", cvModelPath.c_str());
auto evalErrors = eval.Evaluate(&cvDataReader, evalNodeNamesVector, mbSize[0], epochSize);
@ -213,8 +213,6 @@ template <typename ElemType>
void DoWriteOutput(const ConfigParameters& config)
{
ConfigParameters readerConfig(config(L"reader"));
// Why?
//readerConfig.Insert("traceLevel", config(L"traceLevel", "0"));
readerConfig.Insert("randomize", "None"); // we don't want randomization when output results
DataReader testDataReader(readerConfig);
@ -230,7 +228,7 @@ void DoWriteOutput(const ConfigParameters& config)
vector<wstring> outputNodeNamesVector;
let net = GetModelFromConfig<ConfigParameters, ElemType>(config, outputNodeNamesVector);
let net = GetModelFromConfig<ConfigParameters, ElemType>(config, L"outputNodeNames", outputNodeNamesVector);
// set tracing flags
net->EnableNodeTracing(config(L"traceNodeNamesReal", ConfigParameters::Array(stringargvector())),

Просмотреть файл

@ -292,24 +292,33 @@ void NDLNodeEvaluatorImpl<ElemType>::Evaluate(NDLNode<ElemType>* node, const wst
nodePtr = builder.FutureValue(NULL, defaultHiddenActivity, rows, timeStep, name);
}
}
else if (cnNodeType == OperationNameOf(ConvolutionNode) || cnNodeType == OperationNameOf(PoolingNode))
else if (cnNodeType == OperationNameOf(ConvolutionNode) ||
cnNodeType == OperationNameOf(PoolingNode) ||
cnNodeType == OperationNameOf(MaxUnpoolingNode))
{
if (parameter.size() != 3 && parameter.size() != 7)
if (parameter.size() != 2 && parameter.size() != 3 && parameter.size() != 7)
{
if (cnNodeType == OperationNameOf(ConvolutionNode))
{
RuntimeError("%ls: unexpected parameter count. %ls supports 2 modes: \n"
"1. 2D convolution which takes 7 fixed parameters [weightNodeName, inputValueNodeName, kernelWidth, kernelHeight, outputChannels,horizontalSubsample, verticalSubsample] \n"
"1. 2D convolution which takes 7 fixed parameters [weightNodeName, inputValueNodeName, kernelWidth, kernelHeight, outputChannels, horizontalSubsample, verticalSubsample] \n"
"and two optional parameters [zeroPadding = [false|yourvalue], maxTempMemSizeInSamples = [0|yourvalue], imageLayout = \"HWC\"|\"cudnn\"]. \n"
"2. ND convolution which takes 3 fixed parameters [weightNodeName, inputValueNodeName, kernelShape] and \n"
"9 optional parameters [mapCount = [1|yourvalue], stride = [1|yourvalue], sharing = [true|yourvalue], autoPadding = [true|yourvalue], lowerPad = [0|yourvalue], upperPad = [0|yourvalue], maxTempMemSizeInSamples = [0|yourvalue], imageLayout = \"cudnn\"|\"HWC\"]. \n"
"10 optional parameters [mapCount = [1|yourvalue], stride = [1|yourvalue], sharing = [true|yourvalue], autoPadding = [true|yourvalue], lowerPad = [0|yourvalue], upperPad = [0|yourvalue], bool transpose = [false|yourvalue], maxTempMemSizeInSamples = [0|yourvalue], imageLayout = \"cudnn\"|\"HWC\"]. \n"
"For ND convolution, parameters kernelShape, mapCount, stride, sharing, autoPadding, lowerPad, upperPad can be arrays, e.g. kernelShape={5, 5, 3}",
cnNodeType.c_str(), cnNodeType.c_str());
}
else
else if (cnNodeType == OperationNameOf(PoolingNode))
{
RuntimeError("%ls: unexpected parameter count. %ls 3 fixed parameters [inputValueNodeName, poolKind, kernelShape] and \n"
"5 optional parameters stride = [1|yourvalue], autoPadding = [true|yourvalue], lowerPad = [0|yourvalue], upperPad = [0|yourvalue], imageLayout = \"cudnn\"|\"HWC\"]. \n"
"5 optional parameters stride = [1|yourvalue], autoPadding = [true|yourvalue], lowerPad = [0|yourvalue], upperPad = [0|yourvalue], imageLayout = \"cudnn\"]. \n"
"Parameters kernelShape, stride, autoPadding, lowerPad, upperPad can be arrays, e.g. kernelShape={5, 5, 3}",
cnNodeType.c_str(), cnNodeType.c_str());
}
else if (cnNodeType == OperationNameOf(MaxUnpoolingNode))
{
RuntimeError("%ls: unexpected parameter count. %ls 3 fixed parameters [inputValueNodeName, mask, kernelShape] and \n"
"5 optional parameters stride = [1|yourvalue], autoPadding = [true|yourvalue], lowerPad = [0|yourvalue], upperPad = [0|yourvalue], imageLayout = \"cudnn\"]. \n"
"Parameters kernelShape, stride, autoPadding, lowerPad, upperPad can be arrays, e.g. kernelShape={5, 5, 3}",
cnNodeType.c_str(), cnNodeType.c_str());
}
@ -317,11 +326,13 @@ void NDLNodeEvaluatorImpl<ElemType>::Evaluate(NDLNode<ElemType>* node, const wst
// setup the parameter position of children so we can hook them up later
nodeParamStart = 0;
nodeParamCount = cnNodeType == OperationNameOf(ConvolutionNode) ? 2 : 1;
nodeParamCount = (cnNodeType == OperationNameOf(ConvolutionNode) || cnNodeType == OperationNameOf(MaxUnpoolingNode))
? 2
: 1;
if (pass == ndlPassInitial)
{
if (parameter.size() == 3)
if (parameter.size() == 2 || parameter.size() == 3)
{
auto reqParams = node->GetParameters(false);
auto optParams = node->GetParameters(true);
@ -378,21 +389,19 @@ void NDLNodeEvaluatorImpl<ElemType>::Evaluate(NDLNode<ElemType>* node, const wst
ImageLayoutKind imageLayout = ImageLayoutKindFrom(node->GetOptionalParameter("imageLayout", "CHW"));
size_t maxTempMemSizeInSamples = node->GetOptionalParameter("maxTempMemSizeInSamples", "0");
auto pool = PoolKind::None;
if (cnNodeType == OperationNameOf(PoolingNode))
if (cnNodeType == OperationNameOf(MaxUnpoolingNode))
nodePtr = builder.MaxUnpooling(NULL, NULL, kernelShape, stride, autoPad, lowerPad, upperPad, imageLayout, name);
else if (cnNodeType == OperationNameOf(PoolingNode))
{
auto parm = node->GetParentScript()->ParseVariable(reqParams[1]->GetValue(), false);
pool = PoolKindFrom(wstring(parm->GetValue()));
}
if (pool == PoolKind::None)
{
nodePtr = builder.Convolution(NULL, NULL, kernelShape, mapCount, stride, sharing,
autoPad, lowerPad, upperPad, imageLayout, maxTempMemSizeInSamples, name);
auto pool = PoolKindFrom(wstring(parm->GetValue()));
nodePtr = builder.Pooling(NULL, pool, kernelShape, stride, autoPad, lowerPad, upperPad, imageLayout, name);
}
else
{
nodePtr = builder.Pooling(NULL, pool, kernelShape, stride, autoPad, lowerPad, upperPad, imageLayout, name);
bool transpose = node->GetOptionalParameter("transpose", "false");
nodePtr = builder.Convolution(NULL, NULL, kernelShape, mapCount, stride, sharing,
autoPad, lowerPad, upperPad, transpose, imageLayout, maxTempMemSizeInSamples, name);
}
}

Просмотреть файл

@ -158,6 +158,12 @@ bool CheckFunction(std::string& p_nodeType, bool* allowUndeterminedVariable)
else if (EqualInsensitive(nodeType, OperationNameOf(CRFNode), L"CRF")) ret = true;
#endif
else if (EqualInsensitive(nodeType, OperationNameOf(ClassBasedCrossEntropyWithSoftmaxNode), L"CBCEWithSM")) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(EqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(GreaterEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(GreaterNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(LessEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(LessNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(NotEqualNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ClipNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(ConvolutionNode), L"Convolve")) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(PoolingNode))) ret = true;
@ -191,6 +197,7 @@ bool CheckFunction(std::string& p_nodeType, bool* allowUndeterminedVariable)
else if (EqualInsensitive(nodeType, OperationNameOf(MatrixL1RegNode), L"L1Reg")) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(MatrixL2RegNode), L"L2Reg")) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(MaxPoolingNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(MaxUnpoolingNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(MeanNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(MinusNode))) ret = true;
else if (EqualInsensitive(nodeType, OperationNameOf(NegateNode))) ret = true;

Просмотреть файл

@ -142,11 +142,11 @@ static void PatchOutputNodes(const ComputationNetworkPtr& net, const ConfigArray
}
template <class ConfigRecordType, typename ElemType>
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, vector<wstring>& outputNodeNamesVector)
ComputationNetworkPtr GetModelFromConfig(const ConfigRecordType& config, const wstring& outputNodeNamesConfig, vector<wstring>& outputNodeNamesVector)
{
DEVICEID_TYPE deviceId = DeviceFromConfig(config);
ConfigArray outputNodeNames = config(L"outputNodeNames", ConfigArray(""));
ConfigArray outputNodeNames = config(outputNodeNamesConfig.c_str(), ConfigArray(""));
ComputationNetworkPtr net;
@ -185,5 +185,5 @@ template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<Script
template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<ScriptableObjects::IConfigRecord, double>(const ScriptableObjects::IConfigRecord& config);
template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<ConfigParameters, float>(const ConfigParameters& config);
template function<ComputationNetworkPtr(DEVICEID_TYPE)> GetNetworkFactory<ConfigParameters, double>(const ConfigParameters& config);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, float>(const ConfigParameters& config, vector<wstring>& outputNodeNamesVector);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, double>(const ConfigParameters& config, vector<wstring>& outputNodeNamesVector);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, float> (const ConfigParameters& config, const wstring&, vector<wstring>& outputNodeNamesVector);
template ComputationNetworkPtr GetModelFromConfig<ConfigParameters, double>(const ConfigParameters& config, const wstring&, vector<wstring>& outputNodeNamesVector);

Просмотреть файл

@ -59,7 +59,10 @@ template <class C>
shared_ptr<C> CreateObject(const ConfigParameters& config, const wchar_t* id)
{
ConfigParameters readerConfig(config(id));
readerConfig.Insert("traceLevel", config(L"traceLevel", "0")); // TODO: fix this by adding it to all config blocks. Easy to fix in BS as 'config with [ traceLevel = 0 ]'.
if (!readerConfig.ExistsCurrent("traceLevel")) // do not overwrite "traceLevel" if it's already present
{
readerConfig.Insert("traceLevel", config(L"traceLevel", "0")); // TODO: fix this by adding it to all config blocks. Easy to fix in BS as 'config with [ traceLevel = 0 ]'.
}
return make_shared<C>(readerConfig); // old CNTK config specifies a dictionary which then must be explicitly instantiated
}

Просмотреть файл

@ -874,27 +874,48 @@ public:
{
let &config = *configp;
double &us = *this; // we write to this
let arg = config[L"arg"];
let whatArg = config[L"what"];
wstring what = whatArg;
if (what == L"Floor")
us = floor((double) arg);
else if (what == L"Length")
if (what == L"Floor" || what == L"Length") // one-arg functions
{
if (arg.Is<String>())
us = (double) ((wstring &) arg).size();
else // otherwise expect an array
let arg = config[L"arg"];
if (what == L"Floor")
{
let & arr = arg.AsRef<ConfigArray>();
let range = arr.GetIndexRange();
us = (double) (range.second + 1 - range.first);
us = floor((double)arg);
}
else if (what == L"Length")
{
if (arg.Is<String>())
us = (double)((wstring &)arg).size();
else // otherwise expect an array
{
let & arr = arg.AsRef<ConfigArray>();
let range = arr.GetIndexRange();
us = (double)(range.second + 1 - range.first);
}
}
}
else if (what == L"Mod" || what == L"IntDiv") //two-arg int functions
{
let argsArg = config[L"args"];
let& args = argsArg.AsRef<ConfigArray>();
auto range = args.GetIndexRange();
if (range.second != range.first + 1)
argsArg.Fail(L"Mod/IntDiv expects two arguments");
let arg1 = (int)args.At(range.first);
let arg2 = (int)args.At(range.second);
if (what == L"Mod")
us = (int)(arg1 % arg2);
else if (what == L"IntDiv")
us = (int)(arg1 / arg2);
}
else
whatArg.Fail(L"Unknown 'what' value to NumericFunction: " + what);
}
};
// CompareFunctions
// - IsSameObject()
class CompareFunction : public BoxOf<Bool>

Просмотреть файл

@ -381,6 +381,7 @@ private:
}
// find a file either at given location or traverse include paths
// TODO: also allow ... syntax, where ... refers to the directory of the enclosing file
static wstring FindSourceFile(const wstring& path, const vector<wstring>& includePaths)
{
if (File::Exists(path))

Просмотреть файл

@ -21,6 +21,20 @@ Min(a,b) = if a < b then a else b
Max(a,b) = if a > b then a else b
Fac(n) = if n > 1 then Fac(n-1) * n else 1
IsSameObject(a,b) = new CompareFunction [ what = 'IsSameObject' ; args = (a : b) ]
Mod(x, y) = new NumericFunction [ what = 'Mod' ; args = (x:y) ]
IntDiv(x, y) = new NumericFunction [ what = 'IntDiv' ; args = (x:y) ]
##############################################################################
# comparison functions
##############################################################################
Less = CNTK2.Less
Equal = CNTK2.Equal
Greater = CNTK2.Greater
GreaterEqual = CNTK2.GreaterEqual
NotEqual = CNTK2.NotEqual
LessEqual = CNTK2.LessEqual
##############################################################################
# ComputationNodes
@ -42,8 +56,9 @@ CNTK2 = [
// 1. Inputs
// Changes: dims -> shape
DynamicAxis(tag='') = new ComputationNode [ operation = 'DynamicAxis' ; /*plus the function args*/ ]
# TODO: Is it a good idea to default to "feature"?
Input(shape, dynamicAxis='', tag='feature') = new ComputationNode [ operation = 'InputValue' ; shape = new TensorShape [ /*shape*/ ] ; isImage = false /*plus the function args*/ ]
// 2. Variables and constants
// Changes: ParameterTensor -> _Parameter; "dims" -> "shape"
// Python API:
@ -53,11 +68,36 @@ CNTK2 = [
// TODO: The API for Parameter is different in current 2.0 design, getting a constant as input for the initial values.
// This needs to be fixed to follow the way the Constant() is exposed in Python
// Making this an internal node with "_" until we agree on the final interface:
_Parameter(shape, value = 0, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*shape */ ] /*plus the function args*/ ]
_Parameter(shape, value = 0, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile|fromLiteral*/, initValueScale = 1, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*shape */ ] /*plus the function args*/ ]
// 3. Shape operations
// Changes: NewReshape -> Reshape, input -> _, dims -> shape
Reshape(_, shape, beginAxis=0, endAxis=0, tag='') = new ComputationNode [ operation = 'Reshape' ; inputs = _ ; shape = new TensorShape [ /*shape*/ ] /*plus the function args*/ ]
Slice(_, beginIndex, endIndex, axis=1, tag='') =
if axis < 0 then [ # time axis: specify -1
beginFlags = if beginIndex > 0 then BS.Boolean.Not (BS.Loop.IsFirstN (beginIndex, _)) else BS.Loop.IsLastN (-beginIndex, _)
endFlags = if endIndex > 0 then BS.Loop.IsFirstN (endIndex, _) else BS.Boolean.Not (BS.Loop.IsLastN (-endIndex, _))
flags = if beginIndex == 0 then endFlags
else if endIndex == 0 then beginFlags
else BS.Boolean.And (beginFlags, endFlags)
out = if beginIndex == 0 && endIndex == 0
then _
else BS.Sequences.Gather (flags, _)
].out
else new ComputationNode [ operation = 'Slice' ; inputs = _ /*plus the function args*/ ] # non-time axis
Splice (_, axis=1, tag='') = # TODO: This is a workaround. RowStack itself shall interpret 'axis' and be renamed to Splice().
if axis < 1 then Fail('Splice does not yet implement splicing the time axis.')
else if axis == 1 then [tag1=tag; out = RowStack (_, tag=tag1)].out
else [ # workaround: swap 'axis' to first position, RowStack, swap back
ArrayTransposeDimensions (_, axis1, axis2) = [ # transpose each element of a BS array
inputsT[i:0..Length(_)-1] = TransposeDimensions (_[i], axis1, axis2)
].inputsT
out = [tag1=tag; out=TransposeDimensions (RowStack (ArrayTransposeDimensions (_, 1, axis)), 1, axis, tag=tag)].out
].out
// Swap two axes of a tensor
TransposeDimensions(_, axis1, axis2, tag='') = new ComputationNode [ operation = 'TransposeDimensions' ; inputs = _ /*plus the function args*/ ]
// 4. Tensor operations
// Changes: Matrix -> Tensor. A -> x, B -> y. Data must come on y ("default parameter") hence not using _
@ -70,10 +110,14 @@ CNTK2 = [
Clip(_, minValue, maxValue, tag='') = new ComputationNode [ operation = 'Clip' ; inputs = (minValue : maxValue : _) /* plus the function args*/ ]
ElementDivide(_, y, tag='') = ElementTimes(_, Reciprocal(y), tag=tag)
ElementTimes(_, y, tag='') = new ComputationNode [ operation = 'ElementTimes' ; inputs = (_ : y) /*plus the function args*/ ]
Exp(_, tag='') = new ComputationNode [ operation = 'Exp' ; inputs = _ /*plus the function args*/ ]
Floor(_, tag='') = new ComputationNode [ operation = 'Floor' ; inputs = _ /*plus the function args*/ ]
Log(_, tag='') = new ComputationNode [ operation = 'Log' ; inputs = _ /*plus the function args*/ ]
Minus(_, y, tag='') = new ComputationNode [ operation = 'Minus' ; inputs = (_ : y) /*plus the function args*/ ]
Plus(_, y, tag='') = new ComputationNode [ operation = 'Plus' ; inputs = (_ : y) /*plus the function args*/ ]
Round(_, tag='') = Floor(Plus(_, ConstantTensor(0.5, (1))), tag=tag)
Sqrt(_, tag='') = new ComputationNode [ operation = 'Sqrt' ; inputs = _ /*plus the function args*/ ]
Square(_, tag='') = ElementTimes(_, _, tag=tag)
Tanh(_, tag='') = new ComputationNode [ operation = 'Tanh' ; inputs = _ /*plus the function args*/ ]
// 6. Reductions
@ -95,21 +139,31 @@ CNTK2 = [
Relu(_, tag='') = new ComputationNode [ operation = 'RectifiedLinear' ; inputs = _ /*plus the function args*/ ]
Sigmoid(_, tag='') = new ComputationNode [ operation = 'Sigmoid' ; inputs = _ /*plus the function args*/ ]
Softmax(_, tag='') = new ComputationNode [ operation = 'Softmax' ; inputs = _ /*plus the function args*/ ]
Dropout(_, tag='') = new ComputationNode [ operation = 'Dropout' ; inputs = _ /*plus the function args*/ ]
// 11. Criterion nodes
// No changes here - we said the default input would be the label sequence here, against which the
// empirical sequence is compared to. Keeping this for now.
CrossEntropyWithSoftmax(_, outProbVectorSequence, tag='') = new ComputationNode [ operation = 'CrossEntropyWithSoftmax' ; inputs = (_ : outProbVectorSequence) /*plus the function args*/ ]
ErrorPrediction(_, outVectorSequence, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = (_ : outVectorSequence) /*plus the function args*/ ]
ErrorPrediction(_, outVectorSequence, topN=1, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = if topN == 1 then (_ : outVectorSequence) else (_ : outVectorSequence : Constant (topN)) /*plus the function args*/ ]
// 13. Comparison nodes
Less(_, y, tag='') = new ComputationNode [ operation = 'Less' ; inputs = (_ : y) /*plus the function args*/ ]
Equal(_, y, tag='') = new ComputationNode [ operation = 'Equal' ; inputs = (_ : y) /*plus the function args*/ ]
Greater(_, y, tag='') = new ComputationNode [ operation = 'Greater' ; inputs = (_ : y) /*plus the function args*/ ]
GreaterEqual(_, y, tag='') = new ComputationNode [ operation = 'GreaterEqual' ; inputs = (_ : y) /*plus the function args*/ ]
NotEqual(_, y, tag='') = new ComputationNode [ operation = 'NotEqual' ; inputs = (_ : y) /*plus the function args*/ ]
LessEqual(_, y, tag='') = new ComputationNode [ operation = 'LessEqual' ; inputs = (_ : y) /*plus the function args*/ ]
// 13. Others
// 12. Others
// None so far.
Identity(_, tag='') = new ComputationNode [ operation = 'Pass' ; inputs = _ /*plus the function args*/ ]
]
LearnableParameter (outputDim, inputDim, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ dims = (outputDim : inputDim) ] /*plus the function args*/ ]
LearnableParameter (outputDim, inputDim, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile|fromLiteral*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ dims = (outputDim : inputDim) ] /*plus the function args*/ ]
Parameter = LearnableParameter // deprecated
# TODO: make Parameter take tensor dims?
ParameterTensor(dims, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
ParameterTensor(dims, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile|fromLiteral*/, initValueScale = 1, value = 0, initFromFilePath = '', initFromLiteral = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
ConstantFromString(literal, tag='') = ParameterTensor((0)/*dim, will be inferred*/, init = 'fromLiteral', initFromLiteral = literal, learningRateMultiplier = 0.0)
DynamicAxis(tag='') = new ComputationNode [ operation = 'DynamicAxis' ; /*plus the function args*/ ]
Input(dims, dynamicAxis='', tag='feature') = new ComputationNode [ operation = 'InputValue' ; shape = new TensorShape [ /*dims*/ ] ; isImage = false /*plus the function args*/ ]
@ -127,14 +181,14 @@ Shift(input, fromOffset, boundaryValue, boundaryMode=-1/*context*/, dim=-1, tag=
RowSlice(beginIndex, numRows, input, tag='') = Slice(beginIndex, beginIndex + numRows, input, axis = 1)
RowRepeat(input, numRepeats, tag='') = new ComputationNode [ operation = 'RowRepeat' ; inputs = input /*plus the function args*/ ]
RowStack(inputs, tag='') = new ComputationNode [ operation = 'RowStack' /*plus the function args*/ ]
Splice (inputs, axis=1) = # TODO: This is a workaround. RowStack itself shall interpret 'axis' and be renamed to Splice().
Splice (inputs, axis=1, tag='') = # TODO: This is a workaround. RowStack itself shall interpret 'axis' and be renamed to Splice().
if axis < 1 then Fail('Splice does not yet implement splicing the time axis.')
else if axis == 1 then RowStack (inputs)
else if axis == 1 then [tag1=tag; out = RowStack (inputs, tag=tag1)].out
else [ # workaround: swap 'axis' to first position, RowStack, swap back
ArrayTransposeDimensions (inputs, axis1, axis2) = [ # transpose each element of a BS array
inputsT[i:0..Length(inputs)-1] = TransposeDimensions (inputs[i], axis1, axis2)
].inputsT
out = TransposeDimensions (RowStack (ArrayTransposeDimensions (inputs, 1, axis)), 1, axis)
out = [tag1=tag; out=TransposeDimensions (RowStack (ArrayTransposeDimensions (inputs, 1, axis)), 1, axis, tag=tag)].out
].out
Reshape(input, numRows, imageWidth = 0, imageHeight = 0, imageChannels = 0, tag='') = new ComputationNode [ operation = 'LegacyReshape' ; inputs = input /*plus the function args*/ ]
NewReshape(input, dims, beginAxis=0, endAxis=0, tag='') = new ComputationNode [ operation = 'Reshape' ; inputs = input ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
@ -162,8 +216,11 @@ WeightedLogistic(label, probability, instanceWeight, tag='') = new ComputationNo
ReconcileDynamicAxis(dataInput, layoutInput, tag='') = new ComputationNode [ operation = 'ReconcileDynamicAxis' ; inputs = (dataInput : layoutInput) /*plus the function args*/ ]
ReconcileMBLayout = ReconcileDynamicAxis # back compat
CastAs (type, data) = ReconcileDynamicAxis (data, type) # read as CastAs<type>(data) where the cast may consist of rearranging the data w.r.t. MBLayout or broadcasting across sequence items
Convolution(weightNode, inputValueNode, kernelDims, mapDims = 1, stride = 1, sharing = true, autoPadding = true, lowerPad = 0, upperPad = 0, imageLayout='CHW', maxTempMemSizeInSamples = 0, tag='') = new ComputationNode [ operation = 'Convolution' ; inputs = (weightNode : inputValueNode); kernelShape = new TensorShape [ dims = kernelDims ] ; mapCount = new TensorShape [ dims = mapDims ] ; strideShape = new TensorShape [ dims = stride ] ; dimSharing = new BoolVector [ items = sharing ] ; dimPadding = new BoolVector [ items = autoPadding ] ; dimPadLower = new TensorShape [ dims = lowerPad ] ; dimPadUpper = new TensorShape [ dims = upperPad ] /*plus the function args*/ ]
Convolution(weightNode, inputValueNode, kernelDims, mapDims = 1, stride = 1, sharing = true, autoPadding = true, lowerPad = 0, upperPad = 0, transpose=false, imageLayout='CHW', maxTempMemSizeInSamples = 0, tag='') = new ComputationNode [ operation = 'Convolution' ; inputs = (weightNode : inputValueNode); kernelShape = new TensorShape [ dims = kernelDims ] ; mapCount = new TensorShape [ dims = mapDims ] ; strideShape = new TensorShape [ dims = stride ] ; dimSharing = new BoolVector [ items = sharing ] ; dimPadding = new BoolVector [ items = autoPadding ] ; dimPadLower = new TensorShape [ dims = lowerPad ] ; dimPadUpper = new TensorShape [ dims = upperPad ] /*plus the function args*/ ]
# ND pooling/unpooling
Pooling(input, poolKind/*'max'|'average'*/, kernelDims, stride=1, autoPadding = true, lowerPad = 0, upperPad = 0, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'Pooling' ; inputs = (input); pool = poolKind ; kernelShape = new TensorShape [ dims = kernelDims ] ; strideShape = new TensorShape [ dims = stride ] ; dimPadding = new BoolVector [ items = autoPadding ] ; dimPadLower = new TensorShape [ dims = lowerPad ] ; dimPadUpper = new TensorShape [ dims = upperPad ] /*plus the function args*/ ]
MaxUnpooling(unpoolInput, poolInput, kernelDims, stride=1, autoPadding = true, lowerPad = 0, upperPad = 0, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'MaxUnpooling' ; inputs = (unpoolInput : poolInput); kernelShape = new TensorShape [ dims = kernelDims ] ; strideShape = new TensorShape [ dims = stride ] ; dimPadding = new BoolVector [ items = autoPadding ] ; dimPadLower = new TensorShape [ dims = lowerPad ] ; dimPadUpper = new TensorShape [ dims = upperPad ] /*plus the function args*/ ]
# 2D pooling
MaxPooling(input, windowWidth, windowHeight, horizontalSubsample, verticalSubsample, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'MaxPooling' ; inputs = input /*plus the function args*/ ]
AveragePooling(input, windowWidth, windowHeight, horizontalSubsample, verticalSubsample, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'AveragePooling' ; inputs = input /*plus the function args*/ ]
ColumnwiseCrossProduct = KhatriRaoProduct // deprecated
@ -189,7 +246,7 @@ DiagTimes(diagonalMatrixAsColumnVector, matrix, tag='') = new ComputationNode [
Dropout(activationVectorSequence, tag='') = new ComputationNode [ operation = 'Dropout' ; inputs = activationVectorSequence /*plus the function args*/ ]
ElementTimes(aMatrix, anotherMatrix, tag='') = new ComputationNode [ operation = 'ElementTimes' ; inputs = (aMatrix : anotherMatrix) /*plus the function args*/ ]
ElementDivide(aMatrix, anotherMatrix, tag='') = ElementTimes(aMatrix, Reciprocal(anotherMatrix), tag=tag)
ErrorPrediction(labelVectorSequence, outVectorSequence, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = (labelVectorSequence : outVectorSequence) /*plus the function args*/ ]
ErrorPrediction = CNTK2.ErrorPrediction
Exp(x, tag='') = new ComputationNode [ operation = 'Exp' ; inputs = x /*plus the function args*/ ]
Floor(x, tag='') = new ComputationNode [ operation = 'Floor' ; inputs = x /*plus the function args*/ ]
GatherPacked(indexSequence, sourceData, tag='') = new ComputationNode [ operation = 'GatherPacked' ; inputs = (indexSequence : sourceData) /*plus the function args*/ ]
@ -246,7 +303,7 @@ SumElements(matrix, tag='') = new ComputationNode [ operation = 'SumElements' ;
# ^^ TODO: Rename to ReduceSumMB?
Tanh(z, tag='') = new ComputationNode [ operation = 'Tanh' ; inputs = z /*plus the function args*/ ]
TimeReverse(vectorSequence, tag='') = new ComputationNode [ operation = 'TimeReverse' ; inputs = vectorSequence /*plus the function args*/ ]
Trace (node, say='', logFrequency=traceFrequency, logFirst=10, logGradientToo=false, onlyUpToRow=100000000, onlyUpToT=100000000, format=[], tag='') = new ComputationNode [ operation = 'Trace' ; inputs = node ]
Trace (node, say='', logFrequency=100, logFirst=10, logGradientToo=false, onlyUpToRow=100000000, onlyUpToT=100000000, format=[], tag='') = new ComputationNode [ operation = 'Trace' ; inputs = node ]
TransposeTimes(leftMatrix, rightMatrix, tag='') = new ComputationNode [ operation = 'TransposeTimes' ; inputs = (leftMatrix : rightMatrix) /*plus the function args*/ ]
Where(cond, tag='') = new ComputationNode [ operation = 'Where' ; inputs = cond /*plus the function args*/ ]

Просмотреть файл

@ -237,66 +237,66 @@ void DoCommands(const ConfigParameters& config, const shared_ptr<MPIWrapper>& mp
if (thisAction == "train" || thisAction == "trainRNN")
{
LOGPRINTF(stderr, "CNTKCommandTrainBegin: %s\n", command[i].c_str());
DoTrain<ConfigParameters, ElemType>(commandParams);
DoTrain<ConfigParameters, ElemType>(commandParams);
LOGPRINTF(stderr, "CNTKCommandTrainEnd: %s\n", command[i].c_str());
fullEpochsOffset += GetMaxEpochs(commandParams);
}
fullEpochsOffset += GetMaxEpochs(commandParams);
}
else if (thisAction == "adapt")
{
DoAdapt<ElemType>(commandParams);
}
{
DoAdapt<ElemType>(commandParams);
}
else if (thisAction == "test" || thisAction == "eval")
{
DoEval<ElemType>(commandParams);
}
{
DoEval<ElemType>(commandParams);
}
else if (thisAction == "edit")
{
DoEdit<ElemType>(commandParams);
}
{
DoEdit<ElemType>(commandParams);
}
else if (thisAction == "cv")
{
DoCrossValidate<ElemType>(commandParams);
}
{
DoCrossValidate<ElemType>(commandParams);
}
else if (thisAction == "write")
{
DoWriteOutput<ElemType>(commandParams);
}
{
DoWriteOutput<ElemType>(commandParams);
}
else if (thisAction == "devtest")
{
TestCn<ElemType>(config); // for "devtest" action pass the root config instead
}
else if (thisAction == "dumpnode")
{
DumpNodeInfo<ElemType>(commandParams);
}
{
TestCn<ElemType>(config); // for "devtest" action pass the root config instead
}
else if (thisAction == "dumpNode" /*deprecated:*/|| thisAction == "dumpnode")
{
DumpNodeInfo<ElemType>(commandParams);
}
else if (thisAction == "convertdbn")
{
DoConvertFromDbn<ElemType>(commandParams);
}
{
DoConvertFromDbn<ElemType>(commandParams);
}
else if (thisAction == "exportdbn")
{
{
DoExportToDbn<ElemType>(commandParams);
}
else if (thisAction == "createLabelMap")
{
DoCreateLabelMap<ElemType>(commandParams);
}
DoCreateLabelMap<ElemType>(commandParams);
}
else if (thisAction == "writeWordAndClass")
{
DoWriteWordAndClassInfo<ElemType>(commandParams);
}
{
DoWriteWordAndClassInfo<ElemType>(commandParams);
}
else if (thisAction == "plot")
{
DoTopologyPlot<ElemType>(commandParams);
}
{
DoTopologyPlot<ElemType>(commandParams);
}
else if (thisAction == "SVD")
{
DoParameterSVD<ElemType>(commandParams);
}
else
{
{
DoParameterSVD<ElemType>(commandParams);
}
else
{
RuntimeError("unknown action: %s in command set: %s", thisAction.c_str(), command[i].c_str());
}
}
}
fprintf(stderr, "\n");
@ -476,6 +476,7 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wrapp
// parallel training
shared_ptr<Microsoft::MSR::CNTK::MPIWrapper> mpi;
auto ensureMPIWrapperCleanup = MakeScopeExit(&MPIWrapper::DeleteInstance);
bool paralleltrain = config(L"parallelTrain", false);
if (paralleltrain)
mpi = MPIWrapper::GetInstance(true /*create*/);
@ -547,7 +548,6 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wrapp
LOGPRINTF(stderr, "__COMPLETED__\n");
fflush(stderr);
MPIWrapper::DeleteInstance();
return EXIT_SUCCESS;
}
@ -574,6 +574,7 @@ int wmainOldCNTKConfig(int argc, wchar_t* argv[])
// paralleltrain training
shared_ptr<Microsoft::MSR::CNTK::MPIWrapper> mpi;
auto ensureMPIWrapperCleanup = MakeScopeExit(&MPIWrapper::DeleteInstance);
bool paralleltrain = config(L"parallelTrain", "false");
if (paralleltrain)
mpi = MPIWrapper::GetInstance(true /*create*/);
@ -665,7 +666,6 @@ int wmainOldCNTKConfig(int argc, wchar_t* argv[])
LOGPRINTF(stderr, "__COMPLETED__\n");
fflush(stderr);
MPIWrapper::DeleteInstance();
return EXIT_SUCCESS;
}
@ -686,7 +686,7 @@ int wmain1(int argc, wchar_t* argv[]) // called from wmain which is a wrapper th
std::set_new_handler(AllocationFailureHandler);
try
{
{
PrintBuiltInfo(); // print build info directly in case that user provides zero argument (convenient for checking build type)
if (argc <= 1)

Просмотреть файл

@ -146,7 +146,6 @@
<ClInclude Include="..\Common\Include\Basics.h" />
<ClInclude Include="..\Common\Include\BestGpu.h" />
<ClInclude Include="..\Common\Include\DataReader.h" />
<ClInclude Include="..\Common\Include\CompositeDataReader.h" />
<ClInclude Include="..\Common\Include\ExceptionWithCallStack.h" />
<ClInclude Include="..\Common\Include\StringUtil.h" />
<ClInclude Include="..\Common\Include\TensorShape.h" />
@ -165,6 +164,7 @@
<ClInclude Include="..\Math\Matrix.h" />
<ClInclude Include="..\ComputationNetworkLib\PreComputeNodes.h" />
<ClInclude Include="..\ComputationNetworkLib\MatrixPool.h" />
<ClInclude Include="..\Readers\CompositeDataReader\CompositeDataReader.h" />
<ClInclude Include="..\Readers\ReaderLib\BlockRandomizer.h" />
<ClInclude Include="..\Readers\ReaderLib\Bundler.h" />
<ClInclude Include="..\Readers\ReaderLib\ChunkRandomizer.h" />
@ -173,12 +173,10 @@
<ClInclude Include="..\Readers\ReaderLib\NoRandomizer.h" />
<ClInclude Include="..\Readers\ReaderLib\Packer.h" />
<ClInclude Include="..\Readers\ReaderLib\Reader.h" />
<ClInclude Include="..\Readers\ReaderLib\SampleModePacker.h" />
<ClInclude Include="..\Readers\ReaderLib\SequencePacker.h" />
<ClInclude Include="..\Readers\ReaderLib\SequenceRandomizer.h" />
<ClInclude Include="..\Readers\ReaderLib\StringToIdMap.h" />
<ClInclude Include="..\Readers\ReaderLib\Transformer.h" />
<ClInclude Include="..\Readers\ReaderLib\TransformerBase.h" />
<ClInclude Include="..\SGDLib\DataReaderHelpers.h" />
<ClInclude Include="..\SGDLib\SGD.h" />
<ClInclude Include="..\SGDLib\SimpleEvaluator.h" />

Просмотреть файл

@ -193,9 +193,6 @@
<ClInclude Include="..\Readers\ReaderLib\Reader.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Readers\ReaderLib\SampleModePacker.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Readers\ReaderLib\SequencePacker.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
@ -208,9 +205,6 @@
<ClInclude Include="..\Readers\ReaderLib\Transformer.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Readers\ReaderLib\TransformerBase.h">
<Filter>from ReaderLib</Filter>
</ClInclude>
<ClInclude Include="..\Common\Include\CompositeDataReader.h">
<Filter>Common\Include</Filter>
</ClInclude>

Просмотреть файл

@ -1,6 +1,12 @@
@echo off
setlocal enableDelayedexpansion
::: Copyright (c) Microsoft. All rights reserved.
:::
::: Licensed under the MIT license. See LICENSE.md file in the project root
::: for full license information.
::: ==============================================================================
:::
::: This is called as a pre-build step for the CNTK executable.
::: It receives the build's configuration, $(Configuration), as first paramter.
::: It creates buildinfo.h, which makes version information available to the executable itself.
@ -28,9 +34,14 @@ if not errorlevel 1 (
)
)
:: For now, math lib is basically hardwired
if exist ACML_PATH (
echo #define _MATHLIB_ "acml">> buildinfo.h$$
if "%CNTK_MKL%" == "1" (
if "%CNTK_MKL_SEQUENTIAL%" == "1" (
echo #define _MATHLIB_ "mkl-sequential">> buildinfo.h$$
) else (
echo #define _MATHLIB_ "mkl">> buildinfo.h$$
)
) else (
echo #define _MATHLIB_ "acml">> buildinfo.h$$
)
echo #define _BUILDER_ "%USERNAME%" >> buildinfo.h$$

Просмотреть файл

@ -98,8 +98,8 @@ DataReader::DataReader(const ConfigRecordType& config)
for (const auto& ioName : ioNames) // inputNames should map to node names
{
const ConfigRecordType& thisIO = config(ioName);
// get the name for the reader we want to use, default to UCIFastReader
GetReaderProc getReaderProc = (GetReaderProc) Plugin::Load(thisIO(L"readerType", L"UCIFastReader"), GetReaderName(precision));
// get the name for the reader we want to use, default to CNTKTextFormatReader
GetReaderProc getReaderProc = (GetReaderProc) Plugin::Load(thisIO(L"readerType", L"CNTKTextFormatReader"), GetReaderName(precision));
m_ioNames.push_back(ioName);
assert(getReaderProc != nullptr);
getReaderProc(&m_dataReaders[ioName]); // instantiates the reader with the default constructor (no config processed at this point)
@ -115,12 +115,12 @@ DataReader::DataReader(const ConfigRecordType& config)
assert(getReaderProc != nullptr);
getReaderProc(&m_dataReaders[ioName]);
}
else // legacy
else
{
wstring ioName = L"ioName";
// backward support to use only one type of data reader
// get the name for the reader we want to use, default to UCIFastReader
GetReaderProc getReaderProc = (GetReaderProc)Plugin::Load(config(L"readerType", L"UCIFastReader"), GetReaderName(precision));
// get the name for the reader we want to use, default to CNTKTextFormatReader
GetReaderProc getReaderProc = (GetReaderProc)Plugin::Load(config(L"readerType", L"CNTKTextFormatReader"), GetReaderName(precision));
m_ioNames.push_back(ioName);
assert(getReaderProc != nullptr);
getReaderProc(&m_dataReaders[ioName]);

Просмотреть файл

@ -653,9 +653,43 @@ public:
}
};
#endif
template <typename EF>
struct ScopeExit {
explicit ScopeExit(EF &&f) :
m_exitFunction(std::move(f)), m_exitOnDestruction(true)
{}
~ScopeExit()
{
if (m_exitOnDestruction)
m_exitFunction();
}
ScopeExit(ScopeExit&& other)
: m_exitFunction(std::move(other.m_exitFunction)), m_exitOnDestruction(other.m_exitOnDestruction)
{
other.m_exitOnDestruction = false;
}
private:
// Disallow copy construction, assignment
ScopeExit(const ScopeExit&) = delete;
ScopeExit& operator=(const ScopeExit&) = delete;
// Disallow move assignment
ScopeExit& operator=(ScopeExit&&) = delete;
EF m_exitFunction;
bool m_exitOnDestruction;
};
template <typename EF>
ScopeExit<typename std::remove_reference<EF>::type> MakeScopeExit(EF&& exitFunction)
{
return ScopeExit<typename std::remove_reference<EF>::type>(std::forward<EF>(exitFunction));
}
}
}
}}}
#ifdef _WIN32
// ----------------------------------------------------------------------------

Просмотреть файл

@ -275,25 +275,22 @@ class DataReader : public IDataReader, protected Plugin, public ScriptableObject
// Init - Reader Initialize for multiple data sets
// config - [in] configuration parameters for the datareader
// Sample format below for UCIReader:
// Sample format below for CNTKTextFormatReader:
// # Parameter values for the reader
// reader=[
// # reader to use
// readerType="UCIFastReader"
// miniBatchMode="partial"
// randomize=None
// features=[
// dim=784
// start=1
// file="c:\speech\mnist\mnist_test.txt"
// ]
// labels=[
// dim=1
// start=0
// file="c:\speech\mnist\mnist_test.txt"
// labelMappingFile="c:\speech\mnist\labels.txt"
// labelDim=10
// labelType="category"
// readerType="CNTKTextFormatReader"
// randomize=false
// file="c:\speech\mnist\mnist_test_cntk_text.txt"
// input=[
// features=[
// dim=784
// format="dense"
// ]
// labels=[
// dim=10
// format="dense"
// ]
// ]
//]
template <class ConfigRecordType>

Просмотреть файл

@ -28,6 +28,32 @@
namespace Microsoft { namespace MSR { namespace CNTK {
template <typename ElemType>
class IEvaluateModelBase
{
public:
//
// Load a model based on configuration. The syntax is the same as when calling the cntk executable.
// e.g. "modelFile=model.dat deviceId=0".
// numCPUThreads can be used to set the thread count of BLAS.
//
virtual void Init(const std::string& config) = 0;
//
// Create a network based on an (NDL) network description.
//
virtual void CreateNetwork(const std::string& networkDescription) = 0;
//
// Free resources
//
virtual void Destroy() = 0;
};
// ------------------------------------------------------------------------
// Basic (legacy) interface
// ------------------------------------------------------------------------
enum NodeGroup
{
nodeInput, // an input node
@ -39,33 +65,51 @@ enum NodeGroup
// NOTICE: This interface is a public interface for evaluating models in CNTK.
// Changes to this interface may affect other projects, such as Argon and LatGen,
// and therefore need to be communicated with such groups.
template <class ElemType>
class IEvaluateModel // Evaluate Model Interface
template <typename ElemType>
class IEvaluateModel : public IEvaluateModelBase<ElemType> // Evaluate Model Interface
{
public:
virtual void Init(const std::string& config) = 0;
virtual void Destroy() = 0;
virtual void CreateNetwork(const std::string& networkDescription) = 0;
//
// Retrieves the (flattened) dimensions
//
virtual void GetNodeDimensions(std::map<std::wstring, size_t>& dimensions, NodeGroup nodeGroup) = 0;
//
// Allocate resources for a particular output.
//
virtual void StartEvaluateMinibatchLoop(const std::wstring& outputNodeName) = 0;
//
// Evaluate a model in frame mode. This does not support dynamic axes or sparse input data.
// Given a feature vector of dimension d, the inputs may contain n * d elements. The output will then be computed
// for n samples.
// inputs - map from node name to array of input tensors, flattened to vector
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will
// happen during evaluation
//
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& inputs, std::map<std::wstring, std::vector<ElemType>*>& outputs) = 0;
//
// Evaluate - Evaluate using the network without input and provide the outputs
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will
// happen during evaluation
//
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& outputs) = 0;
virtual void ResetState() = 0;
};
// GetEval - get a evaluator type from the DLL
// since we have 2 evaluator types based on template parameters, exposes 2 exports
// could be done directly with the templated name, but that requires mangled C++ names
template <class ElemType>
template <typename ElemType>
void EVAL_API GetEval(IEvaluateModel<ElemType>** peval);
extern "C" EVAL_API void GetEvalF(IEvaluateModel<float>** peval);
extern "C" EVAL_API void GetEvalD(IEvaluateModel<double>** peval);
// Data Reader class
// interface for clients of the Data Reader
// mirrors the IEvaluateModel interface, except the Init method is private (use the constructor)
template <class ElemType>
template <typename ElemType>
class Eval : public IEvaluateModel<ElemType>, protected Plugin
{
private:
@ -73,7 +117,8 @@ private:
void GetEvalClass(const std::string& config);
// Destroy - cleanup and remove this class
// Destroy - cleanup and remove this class. Workaround to ensure that memory allocation / deallocation
// occur within the DLL boundary.
// NOTE: this destroys the object, and it can't be used past this point
virtual void Destroy();
@ -84,6 +129,7 @@ public:
// modelPath=c:\models\model.dnn (model path, if not specified, must call LoadModel() method before Evaluate()
// minibatchSize=1024 (minibatch size used during evaluation if < passed data size)
Eval(const std::string& config);
virtual ~Eval();
// CreateNetwork - create a network based on the network description
@ -101,14 +147,200 @@ public:
// Evaluate - Evaluate using the model with the given inputs and outputs
// inputs - map from node name to input vector
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will
// happen during evaluation
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& inputs, std::map<std::wstring, std::vector<ElemType>*>& outputs);
// Evaluate - Evaluate using the network without input, and provide the outputs
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will
// happen during evaluation
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& outputs);
virtual void Init(const std::string& config);
virtual void ResetState();
};
// ------------------------------------------------------------------------
// Extended interface
// ------------------------------------------------------------------------
// Partial instantiation of vector to reduce to one argument.
template <typename ElemType>
using Vector = std::vector<ElemType, std::allocator<ElemType>>;
//
// A buffer to keep data for all samples in a (variable length) sequence
// from a single input or output.
// This is used for both dense and sparse data.
//
template<typename ElemType, template<typename> class Container = Vector>
struct ValueBuffer
{
//
// All elements of a sequence, concatenated.
// For dense inputs, the number of samples is given by the the length of
// this vector / product of tensor dimensions. E.g. for a tensor of dimension
// [2,2] and 12 elements in the buffer, the number of samples is 3.
// For sparse inputs, the number of samples is indicated by the m_colIndices field.
//
Container<ElemType> m_buffer;
// In case of sparse data, the following is also used. Otherwise, the
// contents are ignored.
// E.g. a sequence of three sparse vectors with 2 / 4 / 2 non-zero values
// could be represented as the following:
// colIdx: 0 2 6 8
// v v v v
// indices 1 3 2 3 5 6 2 7
// buffer 0 1 2 3 4 5 6 7
//
// For every element in buffer, an entry in this array gives its position.
// For every vector the entries must be ascending.
//
Container<int> m_indices;
//
// Contains numberOfsamples + 1 indices into the buffer. The first entry
// is always 0. The last entry points after the last element.
// See http://docs.nvidia.com/cuda/cusparse/#compressed-sparse-column-format-csc
//
Container<int> m_colIndices;
};
//
// Helper class that can be used in exchange of a std::vector if the memory is managed externally.
//
template <typename ElemType>
struct VectorRef
{
ElemType* m_vector;
size_t m_capacity; // ElemTypes allocated
size_t m_size; // ElemTypes used.
VectorRef() : m_vector(nullptr), m_capacity(0), m_size(0) {}
void InitFrom(std::vector<ElemType>& src) { m_vector = src.data(); m_capacity = src.capacity(); m_size = src.size(); }
size_t size() const { return m_size; }
size_t capacity() const { return m_capacity; }
ElemType* data() { return m_vector; }
const ElemType* data() const { return m_vector; }
ElemType* begin() { return m_vector; }
ElemType* end() { return m_vector + m_size; }
void resize(size_t size) { m_size = size; }
ElemType& operator[](size_t idx) { return m_vector[idx]; }
const ElemType& operator[](size_t idx) const { return m_vector[idx]; }
};
template <typename ElemType>
using Values = std::vector<ValueBuffer<ElemType, Vector>>;
template <typename ElemType>
using ValueRefs = std::vector<ValueBuffer<ElemType, VectorRef>>;
//
// Meta data
//
struct VariableLayout
{
enum DataType
{
Float32,
Float64
};
enum StorageType
{
Undetermined,
Dense,
Sparse,
};
// Name of the input
std::wstring m_name;
DataType m_dataType;
StorageType m_storageType;
// Dimension of the tensor, flattened to 1 dimension, for one entry on the dynamic axis.
// E.g. for a tensor [2,3,*] this would be 6.
int m_numElements;
};
class VariableSchema : public std::vector<VariableLayout>
{
public:
template<typename ElemType>
Values<ElemType> CreateBuffers(const std::vector<size_t>& maxLengths)
{
if (maxLengths.size() != size())
throw std::exception("Expected max lengths for all variables.");
Values<ElemType> buffers(size());
for (size_t i = 0; i < size(); ++i)
{
buffers[i].m_buffer.reserve(operator[](i).m_numElements * maxLengths[i]);
}
return buffers;
}
};
//
// Extended interface, allowing for sparse input.
// Implementation constraints:
// - Every output is a single tensor (not a batch),
// - Outputs must be dense.
// - Output buffer must be preallocated.
//
template <typename ElemType>
class IEvaluateModelExtended : public IEvaluateModelBase<ElemType>
{
public:
//
// GetOutputSchema - retrieve information about tensor shapes and memory layout of the outputs for this
// model.
//
virtual VariableSchema GetOutputSchema() const = 0;
//
// Allocate internal state for calling ForwardPass(). The call restricts the network (inputs and outputs)
// to the functions represented by the output name.
//
virtual void StartForwardEvaluation(const std::vector<std::wstring>& outputs) = 0;
//
// GetVariableLayout - retrieve information about tensor shapes and memory layout of inputs necessary for a
// particular output. By default this returns all available inputs. After StartForwardEvaluation(), this
// returns all the inputs necessary to compute the outputs.
//
virtual VariableSchema GetInputSchema() const = 0;
//
// ForwardPass - Evaluate (perform a forward pass for) a single unit using the model with the given inputs and
// outputs.
// The layout and shape of the data in inputs vector must match the schema returned by GetInputLayouts.
// Output must be preallocated and sized to avoid memory allocation / deallocation across DLL
// boundaries.
// This method is not reentrant, as the forward pass keeps internal state.
// inputs - vector of input buffers, one for every input as given by GetInputLayouts()
// outputs - vector of output buffers. Must be sized to fit output schema.
//
virtual void ForwardPass(const Values<ElemType>& inputs, Values<ElemType>& output) = 0;
//
// Same as above, but takes references to static arrays instead of std::vector
// (e.g. when vectors are manages by .net)
//
virtual void ForwardPass(const ValueRefs<ElemType>& inputs, ValueRefs<ElemType>& output) = 0;
};
template <typename ElemType>
void EVAL_API GetEvalExtended(IEvaluateModelExtended<ElemType>** peval);
extern "C" EVAL_API void GetEvalExtendedF(IEvaluateModelExtended<float>** peval);
extern "C" EVAL_API void GetEvalExtendedD(IEvaluateModelExtended<double>** peval);
} } }

Просмотреть файл

@ -159,7 +159,11 @@ public:
fprintf(stderr, "~MPIWrapper\n");
fflush(stderr);
// TODO: Check for error code and throw if !std::uncaught_exception()
MPI_Finalize();
// Do not finalize in event of an exception since calling MPI_Finalize without
// all pending communications being finished results in a hang
if (!std::uncaught_exception())
MPI_Finalize();
}
private:

Просмотреть файл

@ -694,13 +694,13 @@ class auto_file_ptr
FILE* f;
FILE* operator=(auto_file_ptr&); // can't ref-count: no assignment
auto_file_ptr(auto_file_ptr&);
void close() throw()
void close()
{
if (f && f != stdin && f != stdout && f != stderr)
{
int rc = ::fclose(f);
if ((rc != 0) && !std::uncaught_exception())
RuntimeError("auto_file_ptr: failed to close file");
RuntimeError("auto_file_ptr: failed to close file: %s", strerror(errno));
f = NULL;
}

Просмотреть файл

@ -498,7 +498,7 @@ template <class ElemType>
fprintf(stderr, "Setting dropout rate to %.8g.\n", dropoutRate);
// TODO: Change this to use an interface that is independent of <ElemType>.
if (dropoutNodes.size() == 0 && dropoutRate > 0)
fprintf(stderr, "WARNING: there is no dropout node.\n");
fprintf(stderr, "WARNING: Attempting to set dropout rate, but there is no dropout node in the network.\n");
}
// Each dropout node gets a distinct seed. The actual seed for each dropout node is computed as follows:
@ -925,24 +925,27 @@ void ComputationNodeBase::EnumerateArcs(std::unordered_set<ComputationNodeBasePt
// ========================================
// BUGBUG: this only currently works for one ElemType, not both
template <class ElemType>
void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDConfig, size_t AlignedSize)
void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDConfig, size_t alignedSize)
{
vector<pair<vector<wstring>, float>> nodeGroups;
wregex NameFilter;
wregex nameFilter;
for (const auto& e : SVDConfig)
{
wstring regexStr = e.first;
float keepRatio = e.second;
vector<wstring> NamesInGroup;
if (regexStr.empty())
continue;
NameFilter.assign(regexStr);
float keepRatio = e.second;
vector<wstring> namesInGroup;
nameFilter.assign(regexStr);
for (auto n = m_nameToNodeMap.begin(); n != m_nameToNodeMap.end(); n++)
{
if (!regexStr.empty() && !regex_match(n->first, NameFilter))
if (!regex_match(n->first, nameFilter))
{
// if regexStr is not empty and the the node node does not match with the regexStr
// if regexStr is not empty and the the node does not match with the regexStr
continue;
}
@ -954,20 +957,20 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
continue;
// still here ?
NamesInGroup.push_back(n->first);
namesInGroup.push_back(n->first);
}
nodeGroups.push_back(make_pair(NamesInGroup, keepRatio));
nodeGroups.push_back(make_pair(namesInGroup, keepRatio));
}
size_t groupID = 0;
for (auto& group : nodeGroups)
{
float keepratio = group.second;
float keepRatio = group.second;
fprintf(stderr,
"--------------------------------------------------------------------------------------------\n");
fprintf(stderr,
"ParameterSVD: start to process group %d with KeepRatio=%.2f\n",
(int) groupID++, keepratio);
(int) groupID++, keepRatio);
fprintf(stderr,
"--------------------------------------------------------------------------------------------\n");
@ -1002,17 +1005,17 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
// S \in R^{min(m,n),1}
// S is in descending order
ElemType totalenergy = 0.0f;
ElemType totalEnergy = 0.0f;
for (size_t i = 0; i < S.GetNumRows(); i++)
totalenergy += S(i, 0);
ElemType keepenergy = totalenergy * keepratio;
ElemType runenergy = 0.0f;
totalEnergy += S(i, 0);
ElemType keepEnergy = totalEnergy * keepRatio;
ElemType runEnergy = 0.0f;
size_t r = 0;
for (size_t indx = 0; indx < S.GetNumRows(); indx++)
{
runenergy += S(indx, 0);
if (runenergy > keepenergy)
runEnergy += S(indx, 0);
if (runEnergy > keepEnergy)
{
r = indx + 1;
break;
@ -1021,10 +1024,10 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
r = r > S.GetNumRows() ? S.GetNumRows() : r;
if (r % AlignedSize != 0)
if (r % alignedSize != 0)
{
r -= r % AlignedSize;
r = r + AlignedSize > S.GetNumRows() ? S.GetNumRows() : r + AlignedSize;
r -= r % alignedSize;
r = r + alignedSize > S.GetNumRows() ? S.GetNumRows() : r + alignedSize;
}
// r = (r + 7) & (~7); // to keep the number of rows/cols of resultant matrix a multipier of 8
// which can be helpful at runtime
@ -1033,7 +1036,7 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
fprintf(stderr,
"Performing SVD for a %5d-by-%-5d matrix (node name: %-20ls) --- computation time %5.2f secs ; keep %4.1f%% energy ===> keep %5d svd values (reduce to %4.1f%% parameters) \n",
(int) m, (int) n, name.c_str(), elapsedtime.count(),
keepratio * 100, (int) r,
keepRatio * 100, (int) r,
((m + n) * r + 0.0f) / m / n * 100);
// redU in R^ {mXr}
@ -1047,28 +1050,49 @@ void ComputationNetwork::PerformSVDecomposition(const map<wstring, float>& SVDCo
Matrix<ElemType> redS(r, (size_t)1, A.GetDeviceId());
for (size_t i = 0; i < r; i++)
{
ElemType sqrtsigma = (ElemType) sqrt((double) S(i, 0));
redS(i, 0) = sqrtsigma;
ElemType sqrtSigma = (ElemType) sqrt((double) S(i, 0));
redS(i, 0) = sqrtSigma;
}
redU.RowElementMultiplyWith(redS.Transpose());
redVT.ColumnElementMultiplyWith(redS);
// Step 2. create two new Parameter nodes and one Times node
wstring leftChildName = name + L"-U"; // BUGBUG: With BrainScript, node names must be proper identifiers/variable expressions. We can't have '-' in node names.
wstring rightChildName = name + L"-V";
wstring leftChildName = name + L"_U";
wstring rightChildName = name + L"_V";
shared_ptr<ComputationNode<ElemType>> pLeft = AddNodeToNetWithElemType(New<LearnableParameter<ElemType>>(m_deviceId, leftChildName, m, r));
shared_ptr<ComputationNode<ElemType>> pRight = AddNodeToNetWithElemType(New<LearnableParameter<ElemType>>(m_deviceId, rightChildName, r, n));
// TODO: We should be able to move instead of copy but it currently isn't strightforward
// TODO: We should be able to move instead of copy but it currently isn't straightforward
// due to redU and redVT being slices
pLeft->ValueAsMatrix() = redU.DeepClone();
pRight->ValueAsMatrix() = redVT.DeepClone();
shared_ptr<ComputationNode<ElemType>> pTimes = AddNodeToNetAndAttachInputs(New<TimesNode<ElemType>>(m_deviceId, name + L"-SVD"), { pLeft, pRight });
// Step 3. Change the network hierachy to include the SVD nodes
auto parentNodes = GetParentNodes(name);
// Step 3. remove old node
ReplaceLeafNode(name, pTimes);
for (auto& pParentNode : parentNodes)
{
// Change the hierarchy of the network if the node is immediately used in a product
auto pParentTimesNode = dynamic_pointer_cast<TimesNode<ElemType>>(pParentNode);
if (pParentTimesNode)
{
// Change the hierarchy to ensure multiplication order
// U*(V*X)
shared_ptr<ComputationNode<ElemType>> pTimes = New<TimesNode<ElemType>>(m_deviceId, name + L"_SVD");
pTimes->AttachInputs({ pLeft, pParentNode });
InsertNode(pParentNode->GetName(), pTimes, pParentNode->GetTags());
ReplaceLeafNode(name, pRight);
}
else
{
// Default multiplication order
shared_ptr<ComputationNode<ElemType>> pTimes = AddNodeToNetAndAttachInputs(New<TimesNode<ElemType>>(m_deviceId, name + L"_SVD"), { pLeft, pRight });
ReplaceLeafNode(name, pTimes);
}
}
}
}

Просмотреть файл

@ -50,16 +50,16 @@ public:
ComputationNetwork() :
m_randomSeedOffset(0),
m_isCompiled(false),
m_areMatricesAllocated(false),
m_isCompiled(false),
m_areMatricesAllocated(false),
m_pMBLayoutOfNetwork(make_shared<MBLayout>(1, 0, L"*")),
m_environment(make_shared<ComputationEnvironment>())
{
//m_pMBLayoutOfNetwork->SetAxisName(L"T");
}
ComputationNetwork(DEVICEID_TYPE deviceId)
: ComputationNetwork()
ComputationNetwork(DEVICEID_TYPE deviceId) :
ComputationNetwork()
{
SetDeviceId(deviceId);
}
@ -82,6 +82,7 @@ public:
protected:
void ConstructFromRoots(DEVICEID_TYPE deviceId, std::deque<ComputationNodeBasePtr>&& roots, const map<ComputationNodeBasePtr, ComputationNodeBasePtr>& replacements);
void ProcessSpecialNodes(const ScriptableObjects::IConfigRecord& config, std::deque<ComputationNodeBasePtr>& roots);
public:
// -----------------------------------------------------------------------
@ -175,6 +176,7 @@ private:
size_t ValidateNodes(list<ComputationNodeBasePtr> nodes, bool isFirstPass, bool isFinalValidationPass);
bool ValidateNode(ComputationNodeBasePtr node, bool isFinalValidationPass) const;
void MarkValueNonSharableNodes();
void ChangeNodeInputs(ComputationNodeBasePtr fromNode, ComputationNodeBasePtr toNode);
private:
void DetermineSetOfAllRoots();
@ -360,7 +362,8 @@ public:
void RenameNode(const std::wstring& nodeNameOrig, const std::wstring& nodeNameNew);
void RenameNode(ComputationNodeBasePtr node, const std::wstring& newNodeName);
void DeleteNode(const std::wstring& nodeName);
void ChangeNode(wstring nodeName, ComputationNodeBasePtr newNode);
void ReplaceNode(wstring nodeName, ComputationNodeBasePtr newNode);
void InsertNode(wstring nodeName, ComputationNodeBasePtr newNode, const std::set<std::wstring>& newNodeTags);
void ReplaceLeafNode(wstring oldNodeName, ComputationNodeBasePtr newNode);
void ReplaceFinalCriterionNode(wstring oldNodeName, ComputationNodeBasePtr newNode);
void AddFeatureNode(ComputationNodeBasePtr featureNode);
@ -478,6 +481,47 @@ public:
return std::vector<ComputationNodeBasePtr>{node};
}
std::vector<ComputationNodeBasePtr> OutputNodesByName(const std::vector<std::wstring>& outputNodeNames)
{
std::vector<ComputationNodeBasePtr> outputNodes;
if (outputNodeNames.size() == 0)
{
if (OutputNodes().size() == 0)
RuntimeError("There is no default output node specified in the network.");
outputNodes = OutputNodes();
}
else
{
for (int i = 0; i < outputNodeNames.size(); i++)
outputNodes.push_back(GetNodeFromName(outputNodeNames[i]));
}
return outputNodes;
}
// Collect all input nodes that outputNodes depend on.
std::vector<ComputationNodeBasePtr> InputNodesForOutputs(const std::vector<std::wstring>& outputNodeNames)
{
// use set to remove duplicated items
auto outputNodes = OutputNodesByName(outputNodeNames);
std::set<ComputationNodeBasePtr> inputNodesMap;
for (auto& onode : outputNodes)
{
for (auto& inode : InputNodes(onode))
inputNodesMap.insert(inode);
}
std::vector<ComputationNodeBasePtr> inputNodes;
for (auto& inode : inputNodesMap)
inputNodes.push_back(inode);
return inputNodes;
}
// these are specified as such by the user
const std::vector<ComputationNodeBasePtr>& FeatureNodes() const { return m_featureNodes ; }
const std::vector<ComputationNodeBasePtr>& LabelNodes() const { return m_labelNodes ; }
@ -568,6 +612,28 @@ public:
return parents;
}
// Return set of immediate output (parent) nodes for given input (child) node
// TODO: there should be a map from output nodes to inputs, so that this operation doesn't take square time
std::vector<ComputationNodeBasePtr> GetParentNodes(const std::wstring& inputNodeName)
{
std::set<ComputationNodeBasePtr> outputNodes;
for (const auto& iter : m_nameToNodeMap)
{
const auto& node = iter.second;
//Iterate over inputs of this node
for (const auto& inputNode : node->GetInputs())
{
if (inputNode->GetName() == inputNodeName)
{
outputNodes.insert(node);
}
}
}
return std::vector<ComputationNodeBasePtr>(outputNodes.begin(), outputNodes.end());
}
std::list<ComputationNodeBasePtr> GetNodesWithType(const wstring typeName, const ComputationNodeBasePtr& rootNode = nullptr)
{
std::list<ComputationNodeBasePtr> nodesWithType;

Просмотреть файл

@ -41,6 +41,12 @@ static shared_ptr<ComputationNode<ElemType>> CreateStandardNode(const std::wstri
if (nodeType == OperationNameOf(AbsNode)) return New<AbsNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ClassBasedCrossEntropyWithSoftmaxNode))return New<ClassBasedCrossEntropyWithSoftmaxNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(ClipNode)) return New<ClipNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(EqualNode)) return New<EqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(GreaterEqualNode)) return New<GreaterEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(GreaterNode)) return New<GreaterNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(LessEqualNode)) return New<LessEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(LessNode)) return New<LessNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(NotEqualNode)) return New<NotEqualNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CosDistanceNode)) return New<CosDistanceNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CosDistanceWithNegativeSamplesNode)) return New<CosDistanceWithNegativeSamplesNode<ElemType>>(forward<_Types>(_Args)...);
else if (nodeType == OperationNameOf(CosineNode)) return New<CosineNode<ElemType>>(forward<_Types>(_Args)...);
@ -124,6 +130,7 @@ static shared_ptr<ComputationNode<ElemType>> CreateStandardNode(const std::wstri
#if 1
else if (nodeType == OperationNameOf(LegacyReshapeNode)) return New<LegacyReshapeNode<ElemType>>(forward<_Types>(_Args)...);
#endif
else if (nodeType == OperationNameOf(MaxUnpoolingNode)) return New<MaxUnpoolingNode<ElemType>>(forward<_Types>(_Args)...);
else InvalidArgument("Attempted to instantiate undefined operation %ls.", nodeType.c_str());
}
@ -243,12 +250,12 @@ template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::CreateConvolutionNode(const std::wstring& nodeName, const TensorShape& kernelShape, const TensorShape& mapCount,
const TensorShape& strideShape, const std::vector<bool>& sharing,
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples)
bool transpose, ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples)
{
return net.AddNodeToNetWithElemType(New<ConvolutionNode<ElemType>>(net.GetDeviceId(), nodeName,
kernelShape, mapCount, strideShape,
sharing, autoPadding, lowerPad, upperPad,
imageLayout, maxTempMemSizeInSamples));
transpose, imageLayout, maxTempMemSizeInSamples));
}
template <class ElemType>
@ -308,13 +315,13 @@ shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Convo
const TensorShape& kernelShape, const TensorShape& mapCount,
const TensorShape& strideShape, const std::vector<bool>& sharing,
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples,
bool transpose, ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples,
const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<ConvolutionNode<ElemType>>(net.GetDeviceId(), nodeName,
kernelShape, mapCount, strideShape,
sharing, autoPadding, lowerPad, upperPad,
imageLayout, maxTempMemSizeInSamples),
transpose, imageLayout, maxTempMemSizeInSamples),
{ weight, inputValues });
}
@ -330,6 +337,19 @@ shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Pooli
{ inputValues });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::MaxUnpooling(const ComputationNodePtr unpoolInputValues,
const ComputationNodePtr poolInputValues,
const TensorShape& kernelShape, const TensorShape& strideShape,
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout,
const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<MaxUnpoolingNode<ElemType>>(net.GetDeviceId(), nodeName,
kernelShape, strideShape, autoPadding, lowerPad, upperPad, imageLayout),
{ unpoolInputValues, poolInputValues });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::MaxPooling(const ComputationNodePtr inputValues,
const size_t windowWidth, const size_t windowHeight, const size_t horizontalSubsample, const size_t verticalSubsample, ImageLayoutKind imageLayoutKind,
@ -637,6 +657,42 @@ shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Plus(
return net.AddNodeToNetAndAttachInputs(New<PlusNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Less(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<LessNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Equal(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<EqualNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Greater(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<GreaterNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::GreaterEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<LessNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::NotEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<EqualNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::LessEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{
return net.AddNodeToNetAndAttachInputs(New<GreaterNode<ElemType>>(net.GetDeviceId(), nodeName), { a, b });
}
template <class ElemType>
shared_ptr<ComputationNode<ElemType>> ComputationNetworkBuilder<ElemType>::Minus(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName)
{

Просмотреть файл

@ -54,7 +54,7 @@ public:
ComputationNodePtr CreateSparseInputNode(const std::wstring& inputName, const TensorShape& sampleLayout, const wstring& dynamicAxisName = L"");
ComputationNodePtr CreateConvolutionNode(const std::wstring& nodeName, const TensorShape& kernelShape, const TensorShape& mapCount, const TensorShape& strideShape,
const std::vector<bool>& sharing, const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples);
bool transpose, ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples);
ComputationNodePtr CreateConvolutionNode(const std::wstring& nodeName, const size_t kernelWidth, const size_t kernelHeight, const size_t outputChannels,
const size_t horizontalSubsample, const size_t verticalSubsample,
ImageLayoutKind imageLayoutKind, const bool zeroPadding = false, const size_t maxTempMemSizeInSamples = 0);
@ -81,13 +81,19 @@ public:
const ComputationNodePtr inputValues,
const TensorShape& kernelShape, const TensorShape& mapCount, const TensorShape& strideShape,
const std::vector<bool>& sharing, const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples,
bool transpose, ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples,
const std::wstring nodeName = L"");
ComputationNodePtr Pooling(const ComputationNodePtr inputValues,
PoolKind poolKind, const TensorShape& kernelShape, const TensorShape& strideShape,
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout,
const std::wstring nodeName = L"");
ComputationNodePtr MaxUnpooling(const ComputationNodePtr unpoolInputValues,
const ComputationNodePtr poolInputValues,
const TensorShape& kernelShape, const TensorShape& strideShape,
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout,
const std::wstring nodeName = L"");
ComputationNodePtr MaxPooling(const ComputationNodePtr inputValues,
const size_t windowWidth, const size_t windowHeight, const size_t horizontalSubsample, const size_t verticalSubsample, ImageLayoutKind imageLayoutKind,
const std::wstring nodeName = L"");
@ -98,6 +104,12 @@ public:
ComputationNodePtr CRF(const ComputationNodePtr label, const ComputationNodePtr postDepScore, const ComputationNodePtr transition_score, const std::wstring nodeName = L"");
#endif
ComputationNodePtr Abs(const ComputationNodePtr a, const std::wstring nodeName = L"");
ComputationNodePtr Less(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName = L"");
ComputationNodePtr Equal(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName = L"");
ComputationNodePtr Greater(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName = L"");
ComputationNodePtr GreaterEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName = L"");
ComputationNodePtr NotEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName = L"");
ComputationNodePtr LessEqual(const ComputationNodePtr a, const ComputationNodePtr b, const std::wstring nodeName = L"");
ComputationNodePtr ClassCrossEntropyWithSoftmax(const ComputationNodePtr label, const ComputationNodePtr prediction, const ComputationNodePtr input_weight, const ComputationNodePtr cls_log_post_prob, const std::wstring nodeName = L"");
ComputationNodePtr Clip(const ComputationNodePtr a, const ComputationNodePtr b, const ComputationNodePtr c, const std::wstring nodeName = L"");
ComputationNodePtr Cos(const ComputationNodePtr a, const std::wstring nodeName = L"");

Просмотреть файл

@ -167,25 +167,19 @@ void ComputationNetwork::DeleteNode(const std::wstring& nodeName)
// replace a named node by newNode of the same type under the same name, including moving over all network links
// This is used in the KL-reg based adaptation to reduce feature copy
// need to update all the mappings as well childrens.
void ComputationNetwork::ChangeNode(wstring nodeName, ComputationNodeBasePtr newNode)
void ComputationNetwork::ReplaceNode(wstring nodeName, ComputationNodeBasePtr newNode)
{
ComputationNodeBasePtr oldNode = GetNodeFromName(nodeName);
if (newNode->NodeName() != nodeName) // TODO: This was not tested for earlier; I hope no code depends on this.
InvalidArgument("ChangeNode: newNode must have the same name as the old node.");
if (oldNode->OperationName() != newNode->OperationName())
InvalidArgument("ChangeNode: newNode must have the same type as the old node.");
InvalidArgument("ReplaceNode: newNode must have the same type as the old node.");
InvalidateCompiledNetwork();
// change all nodes to have old node as input to point to the new node instead
for (auto nodeIter = m_nameToNodeMap.begin(); nodeIter != m_nameToNodeMap.end(); nodeIter++)
{
ComputationNodeBasePtr node = nodeIter->second;
for (int i = 0; i < node->GetNumInputs(); i++)
if (node->GetInputs()[i] == oldNode)
node->SetInput(i, newNode);
}
// change all nodes that have old node as input to point to the new node instead
ChangeNodeInputs(oldNode, newNode);
// change all inputs of this new node to share the old one's inputs
for (int i = 0; i < oldNode->GetNumInputs(); i++)
@ -208,10 +202,45 @@ void ComputationNetwork::ChangeNode(wstring nodeName, ComputationNodeBasePtr new
}
}
// Inserts a newNode such that the inputNodeName serves as the input to the newNode
// Prior to this call, inputNodeName should be set as the input to newNode.
void ComputationNetwork::InsertNode(wstring inputNodeName, ComputationNodeBasePtr newNode, const std::set<std::wstring>& newNodeTags)
{
newNode->Validate(false);
ComputationNodeBasePtr inputNode = GetNodeFromName(inputNodeName);
InvalidateCompiledNetwork();
// change all nodes that have old node as input to point to the new node instead
ChangeNodeInputs(inputNode, newNode);
// insert the node in the network
AddNodeToNet(newNode);
// also update node groups
for (auto nodeTag : newNodeTags)
{
AddToNodeGroup(nodeTag, newNode);
}
}
// change all nodes that have fromNode as input to have toNode as input instead
void ComputationNetwork::ChangeNodeInputs(ComputationNodeBasePtr fromNode, ComputationNodeBasePtr toNode)
{
for (auto nodeIter = m_nameToNodeMap.begin(); nodeIter != m_nameToNodeMap.end(); nodeIter++)
{
ComputationNodeBasePtr node = nodeIter->second;
for (int i = 0; i < node->GetNumInputs(); i++)
if (node->GetInputs()[i] == fromNode)
node->SetInput(i, toNode);
}
}
// replace the old node with the current node, assuming the old node is a leaf node
// need to update those nodes who use oldNode as their child
// TODO: Can this be called with a node that's already part of the network? This is currently allowed, but should it?
// BUGBUG: Seems ChangeNode() also updates node groups. Why doesn't this function?
// BUGBUG: Seems ReplaceNode() also updates node groups. Why doesn't this function?
// BUGBUG: What if newNode is the one referenced by oldNodeName?
// BUGBUG: Or what if an unrelated node of the same name exists?
void ComputationNetwork::ReplaceLeafNode(wstring oldNodeName, ComputationNodeBasePtr newNode)

Просмотреть файл

@ -46,6 +46,10 @@ ComputationNetwork::ComputationNetwork(const IConfigRecordPtr configp) :
DEVICEID_TYPE deviceId = (DEVICEID_TYPE)(int) config[L"deviceId"];
deque<ComputationNodeBasePtr> workList;
// process 'special nodes'
ProcessSpecialNodes(config, workList);
// flatten the set of all nodes
// we collect all root ComputationNodes from the config record, and then expand into all their children by work-list processing
// TODO: This currently only supports nodes of the same ElemType. We could allow conversion operators.
@ -62,6 +66,30 @@ ComputationNetwork::ComputationNetwork(const IConfigRecordPtr configp) :
ConstructFromRoots(deviceId, move(workList), map<ComputationNodeBasePtr, ComputationNodeBasePtr>()/*no mapping*/);
}
// process the special-nodes parameters
void ComputationNetwork::ProcessSpecialNodes(const ScriptableObjects::IConfigRecord& config, std::deque<ComputationNodeBasePtr>& workList)
{
set<wstring> nodeGroupNames{ L"feature", L"label", L"criterion", L"evaluation", L"output" };
for (let& id : config.GetMemberIds())
{
let pos = id.find(L"Nodes");
if (pos == wstring::npos || pos != id.size() - 5) // special node name = node-group name + L"Nodes"
continue;
let nodeGroup = id.substr(0, id.size() - 5);
if (nodeGroupNames.find(nodeGroup) == nodeGroupNames.end())
continue;
let nodeSet = config[id];
let nodes = ScriptableObjects::ConfigArray::FlattenedVectorFrom<ComputationNodeBasePtr>(nodeSet);
for (let& node : nodes)
{
node->SetTag(nodeGroup);
workList.push_back(node);
}
}
}
// construct a network from a list of roots (passed in 'workList')
// This will add to m_nameToNodeMap[] all roots and all nodes reachable from those roots.
// If 'replacements' is given, all root pointers as well as all input pointers of reachable nodes will be mapped. This is needed for model editing.
@ -327,7 +355,10 @@ public:
// determine all roots
deque<ComputationNodeBasePtr> roots;
// start with the original network
// process 'special nodes'
// BUGBUG: This does not allow to unset tags. If special nodes are listed, they should completely override existing tags for the same node.
ProcessSpecialNodes(config, workList);
// then the original network
for (let& node : allNodes)
if (parents.find(node)->second.empty()) // no parents: it's a root
roots.push_back(node);

Просмотреть файл

@ -37,7 +37,8 @@
#define CNTK_MODEL_VERSION_6 6 // Batch norm blending
#define CNTK_MODEL_VERSION_7 7 // ElemType tag in model file
#define CNTK_MODEL_VERSION_8 8 // DynamicAxis for inputs
#define CURRENT_CNTK_MODEL_VERSION CNTK_MODEL_VERSION_8
#define CNTK_MODEL_VERSION_9 9 // Transpose flag in ConvolutionNode to support deconvolution.
#define CURRENT_CNTK_MODEL_VERSION CNTK_MODEL_VERSION_9
extern bool g_shareNodeValueMatrices;

Просмотреть файл

@ -52,15 +52,15 @@ class ConvolutionNodeBase : public ComputationNode<ElemType>
public:
ConvolutionNodeBase(DEVICEID_TYPE deviceId, const wstring& name)
: Base(deviceId, name), m_poolKind(PoolKind::None), m_maxTempMemSizeInSamples(0)
: Base(deviceId, name), m_poolKind(PoolKind::None), m_transpose(false), m_maxTempMemSizeInSamples(0)
{
}
ConvolutionNodeBase(DEVICEID_TYPE deviceId, const wstring& name, const TensorShape& kernelShape, const TensorShape& mapCount, const TensorShape& strideShape,
const std::vector<bool>& sharing, const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
PoolKind poolKind, ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples)
: Base(deviceId, name), m_kernelShape(kernelShape), m_mapCount(mapCount), m_stride(strideShape), m_sharing(sharing),
m_autoPad(autoPadding), m_lowerPad(lowerPad), m_upperPad(upperPad), m_poolKind(poolKind),
m_imageLayout(imageLayout), m_maxTempMemSizeInSamples(maxTempMemSizeInSamples)
const std::vector<bool>& sharing, const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
PoolKind poolKind, bool transpose, ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples)
: Base(deviceId, name), m_kernelShape(kernelShape), m_mapCount(mapCount), m_stride(strideShape), m_sharing(sharing),
m_autoPad(autoPadding), m_lowerPad(lowerPad), m_upperPad(upperPad), m_poolKind(poolKind), m_transpose(transpose),
m_imageLayout(imageLayout), m_maxTempMemSizeInSamples(maxTempMemSizeInSamples)
{
}
@ -79,6 +79,7 @@ public:
fstream << (int32_t)m_poolKind;
fstream << (int32_t)m_imageLayout;
fstream << m_maxTempMemSizeInSamples;
fstream << m_transpose;
}
void Load(File& fstream, size_t modelVersion) override
@ -102,7 +103,11 @@ public:
fstream >> layout;
m_imageLayout = (ImageLayoutKind)layout;
fstream >> m_maxTempMemSizeInSamples;
}
}
if (modelVersion >= CNTK_MODEL_VERSION_9)
{
fstream >> m_transpose;
}
}
void CopyTo(ComputationNodeBasePtr nodeP, const std::wstring& newName, const CopyNodeFlags flags) const override
@ -119,64 +124,12 @@ public:
node->m_lowerPad = m_lowerPad;
node->m_upperPad = m_upperPad;
node->m_poolKind = m_poolKind;
node->m_transpose = m_transpose;
node->m_imageLayout = m_imageLayout;
node->m_maxTempMemSizeInSamples = m_maxTempMemSizeInSamples;
}
}
void BackpropTo(const size_t inputIndex, const FrameRange& fr) override
{
auto sliceOutputGrad = GradientFor(fr);
if (m_poolKind == PoolKind::None)
{
if (inputIndex == 0) // derivative with respect to the weight matrix
{
auto& grad = Input(0)->GradientAsMatrix();
auto sliceInput1Value = Input(1)->ValueFor(fr);
m_convEng->BackwardKernel(sliceOutputGrad, sliceInput1Value, grad, fr.IsAllFrames(), *m_tempMatrix);
}
else if (inputIndex == 1) // derivative with respect to the input feature
{
auto& input0 = Input(0)->ValueAsMatrix();
auto sliceInput1Grad = Input(1)->GradientFor(fr);
m_convEng->BackwardData(sliceOutputGrad, input0, sliceInput1Grad, *m_tempMatrix);
}
}
else
{
Matrix<ElemType> sliceInput0Grad = Input(0)->GradientFor(fr);
Matrix<ElemType> sliceInput0Value = Input(0)->ValueFor(fr);
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
m_convEng->BackwardPooling(sliceOutputValue, sliceOutputGrad, sliceInput0Value, sliceInput0Grad);
}
}
bool OutputUsedInComputingInputNodesGradients() const override
{
// The ConvolutionNode requires output values only for max pooling.
return m_poolKind == PoolKind::Max;
}
void ForwardProp(const FrameRange& fr) override
{
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
if (m_poolKind == PoolKind::None)
{
const Matrix<ElemType>& input0 = Input(0)->ValueAsMatrix();
Matrix<ElemType> sliceInput1Value = Input(1)->ValueFor(fr);
m_convEng->Forward(sliceInput1Value, input0, sliceOutputValue, *m_tempMatrix);
}
else
{
const Matrix<ElemType>& input0 = Input(0)->ValueFor(fr);
m_convEng->ForwardPooling(input0, sliceOutputValue);
}
}
void DumpNodeInfo(const bool printValues, const bool printMetadata, File& fstream) const override
{
Base::DumpNodeInfo(printValues, printMetadata, fstream);
@ -195,6 +148,7 @@ protected:
TensorShape m_lowerPad;
TensorShape m_upperPad;
PoolKind m_poolKind;
bool m_transpose;
ImageLayoutKind m_imageLayout;
size_t m_maxTempMemSizeInSamples;
@ -214,6 +168,7 @@ protected: \
using Base::m_lowerPad; \
using Base::m_upperPad; \
using Base::m_poolKind; \
using Base::m_transpose; \
using Base::m_imageLayout; \
using Base::m_maxTempMemSizeInSamples; \
using Base::m_tempMatrix; \
@ -241,8 +196,8 @@ public:
}
ConvolutionNode(DEVICEID_TYPE deviceId, const wstring& name, const TensorShape& kernelShape, const TensorShape& mapCount, const TensorShape& strideShape,
const std::vector<bool>& sharing, const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples)
: Base(deviceId, name, kernelShape, mapCount, strideShape, sharing, autoPadding, lowerPad, upperPad, PoolKind::None, imageLayout, maxTempMemSizeInSamples),
bool transpose, ImageLayoutKind imageLayout, size_t maxTempMemSizeInSamples)
: Base(deviceId, name, kernelShape, mapCount, strideShape, sharing, autoPadding, lowerPad, upperPad, PoolKind::None, transpose, imageLayout, maxTempMemSizeInSamples),
m_convolution2D(false)
{
}
@ -250,16 +205,16 @@ public:
const size_t horizontalSubsample, const size_t verticalSubsample, ImageLayoutKind imageLayout,
bool zeroPadding, size_t maxTempMemSizeInSamples)
: ConvolutionNode(deviceId, name, TensorShape(kernelWidth, kernelHeight, 1), TensorShape(1, 1, outputChannels),
TensorShape(horizontalSubsample, verticalSubsample, 1), vector<bool>{true},
TensorShape(horizontalSubsample, verticalSubsample, 1), vector<bool>{true},
vector<bool>{zeroPadding}, TensorShape(0), TensorShape(0),
imageLayout, maxTempMemSizeInSamples)
false, imageLayout, maxTempMemSizeInSamples)
{
m_convolution2D = true;
}
ConvolutionNode(const ScriptableObjects::IConfigRecordPtr configp)
: ConvolutionNode(configp->Get(L"deviceId"), L"<placeholder>", configp->Get(L"kernelShape"), configp->Get(L"mapCount"), configp->Get(L"strideShape"),
configp->Get(L"dimSharing"), configp->Get(L"dimPadding"), configp->Get(L"dimPadLower"), configp->Get(L"dimPadUpper"),
ImageLayoutKindFrom(configp->Get(L"imageLayout")), configp->Get(L"maxTempMemSizeInSamples"))
configp->Get(L"transpose"), ImageLayoutKindFrom(configp->Get(L"imageLayout")), configp->Get(L"maxTempMemSizeInSamples"))
{
AttachInputsFromConfig(configp, GetExpectedNumInputs());
}
@ -317,6 +272,48 @@ public:
}
}
void ForwardProp(const FrameRange& fr) override
{
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
const Matrix<ElemType>& input0 = Input(0)->ValueAsMatrix();
Matrix<ElemType> sliceInput1Value = Input(1)->ValueFor(fr);
if (!m_transpose)
m_convEng->Forward(sliceInput1Value, input0, sliceOutputValue, *m_tempMatrix);
else
{
// BackwardData adds results to the output so need to zero them out first.
// REVIEW alexeyk: should be rolled into BackwardData itself.
sliceOutputValue.SetValue(0);
m_convEng->BackwardData(sliceInput1Value, input0, sliceOutputValue, *m_tempMatrix);
}
}
void BackpropTo(const size_t inputIndex, const FrameRange& fr) override
{
auto sliceOutputGrad = GradientFor(fr);
if (inputIndex == 0) // derivative with respect to the weight matrix
{
auto& grad = Input(0)->GradientAsMatrix();
auto sliceInput1Value = Input(1)->ValueFor(fr);
if (!m_transpose)
m_convEng->BackwardKernel(sliceOutputGrad, sliceInput1Value, grad, fr.IsAllFrames(), *m_tempMatrix);
else
m_convEng->BackwardKernel(sliceInput1Value, sliceOutputGrad, grad, fr.IsAllFrames(), *m_tempMatrix);
}
else if (inputIndex == 1) // derivative with respect to the input feature
{
auto& input0 = Input(0)->ValueAsMatrix();
auto sliceInput1Grad = Input(1)->GradientFor(fr);
if (!m_transpose)
m_convEng->BackwardData(sliceOutputGrad, input0, sliceInput1Grad, *m_tempMatrix);
else
{
// REVIEW alexeyk: Forward overwrites values in sliceInput1Grad. Should handle correctly instead.
m_convEng->Forward(sliceOutputGrad, input0, sliceInput1Grad, *m_tempMatrix);
}
}
}
void Validate(bool isFinalValidationPass) override
{
Base::Validate(isFinalValidationPass);
@ -324,6 +321,8 @@ public:
size_t inputIdx = GetExpectedNumInputs() - 1;
TensorShape inputShape;
TensorShape outputShape;
// If 2D convolution syntax is used then some of the tensor dimensions need to be inferred.
if (m_convolution2D)
{
// Need to update some tensors with correct input dims.
@ -346,38 +345,42 @@ public:
if (isFinalValidationPass && (Input(0)->GetAsMatrixNumCols() != weightCols || Input(0)->GetAsMatrixNumRows() != mapCount))
{
LogicError("Convolution weight matrix %ls should have dimension [%d, %d] which is [outputChannels, kernelWidth * kernelHeight * inputChannels]",
LogicError("Convolution weight matrix %ls should have dimension [%d, %d] which is [outputChannels, kernelWidth * kernelHeight * inputChannels]",
Input(0)->NodeName().c_str(), (int)mapCount, (int)weightCols);
}
auto outDims = ConvolveGeometry::ComputeOutputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
// ConvolveGeometry always uses CHW.
SetDims(ImageDimensions(outDims, ImageLayoutKind::CHW).AsTensorShape(m_imageLayout), HasMBLayout());
outputShape = ConvolveGeometry::ComputeOutputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
}
else
{
if (m_imageLayout != ImageLayoutKind::CHW)
{
InvalidArgument(
"%ls %ls supports only cuDNN (CHW) data layout. "
"Please specify imageLayout=\"cudnn\" in %ls node in your script "
"and make sure input data layout is CHW", NodeName().c_str(), OperationName().c_str(), NodeName().c_str());
}
inputShape = GetInputSampleLayout(inputIdx);
auto outDims = ConvolveGeometry::ComputeOutputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
SetDims(outDims, HasMBLayout());
if (!m_transpose)
{
outputShape = ConvolveGeometry::ComputeOutputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
}
else
{
// In case of transpose (deconvolution), node input (inputShape) is really the output of the convolution
// and node output (outDims) is convolution input. ConvolveGeometry does not care about deconvolutions (it does not have to).
outputShape = ConvolveGeometry::ComputeInputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
}
}
// ConvolveGeometry always uses CHW.
SetDims(ImageDimensions(outputShape, ImageLayoutKind::CHW).AsTensorShape(m_imageLayout), HasMBLayout());
if (isFinalValidationPass)
{
if (m_convEng == nullptr)
{
auto geometry = std::make_shared<ConvolveGeometry>(inputShape, m_kernelShape, m_mapCount, m_stride,
auto geometry = std::make_shared<ConvolveGeometry>(!m_transpose ? inputShape : outputShape,
m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
m_convEng = ConvolutionEngine<ElemType>::Create(geometry, m_deviceId, m_imageLayout,
m_maxTempMemSizeInSamples, m_poolKind);
m_maxTempMemSizeInSamples, m_poolKind,
ConvolutionEngineKind::All, NodeName());
}
if (Input(0)->GetAsMatrixNumCols() != m_kernelShape.GetNumElements() ||
@ -404,14 +407,18 @@ public:
void SetmMaxTempMemSizeInSamples(const size_t maxTempMemSizeInSamples)
{
m_maxTempMemSizeInSamples = maxTempMemSizeInSamples;
if (m_convEng != nullptr)
m_convEng->SetmMaxTempMemSizeInSamples(maxTempMemSizeInSamples);
}
protected:
// Flag that indicates whether the node is created using 2D-syntax.
bool m_convolution2D;
};
// -----------------------------------------------------------------------
// PoolingNode (inputFeature)
// Performs max or average ND pooling.
// -----------------------------------------------------------------------
template <class ElemType>
@ -430,9 +437,9 @@ public:
{
}
PoolingNode(DEVICEID_TYPE deviceId, const wstring& name, PoolKind pool, const TensorShape& kernelShape, const TensorShape& strideShape,
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout)
: Base(deviceId, name, kernelShape, TensorShape(1), strideShape, vector<bool>{true}, autoPadding, lowerPad, upperPad, pool, imageLayout, 0)
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout)
: Base(deviceId, name, kernelShape, TensorShape(1), strideShape, vector<bool>{true}, autoPadding, lowerPad, upperPad, pool, false, imageLayout, 0)
{
}
PoolingNode(const ScriptableObjects::IConfigRecordPtr configp)
@ -445,6 +452,129 @@ public:
}
public:
void ForwardProp(const FrameRange& fr) override
{
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
const Matrix<ElemType>& input0 = Input(0)->ValueFor(fr);
m_convEng->ForwardPooling(input0, sliceOutputValue);
}
void BackpropTo(const size_t inputIndex, const FrameRange& fr) override
{
auto sliceOutputGrad = GradientFor(fr);
Matrix<ElemType> sliceInput0Grad = Input(0)->GradientFor(fr);
Matrix<ElemType> sliceInput0Value = Input(0)->ValueFor(fr);
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
m_convEng->BackwardPooling(sliceOutputValue, sliceOutputGrad, sliceInput0Value, sliceInput0Grad);
}
bool OutputUsedInComputingInputNodesGradients() const override
{
// The PoolingNode requires output values only for max pooling.
return m_poolKind == PoolKind::Max;
}
void Validate(bool isFinalValidationPass) override
{
auto inputShape = GetInputSampleLayout(0);
ValidatePooling(inputShape, isFinalValidationPass);
if (isFinalValidationPass)
{
if (m_convEng == nullptr)
{
auto geometry = std::make_shared<ConvolveGeometry>(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
m_convEng = ConvolutionEngine<ElemType>::Create(geometry, m_deviceId, m_imageLayout,
m_maxTempMemSizeInSamples, m_poolKind,
ConvolutionEngineKind::All, NodeName());
}
}
}
protected:
void ValidatePooling(const TensorShape& inputShape, bool isFinalValidationPass)
{
Base::Validate(isFinalValidationPass);
InferMBLayoutFromInputsForStandardCase(isFinalValidationPass);
if (m_imageLayout != ImageLayoutKind::CHW)
{
InvalidArgument(
"%ls %ls supports only cuDNN (CHW) data layout. "
"Please specify imageLayout=\"cudnn\" in %ls node in your script "
"and make sure input data layout is CHW", NodeName().c_str(), OperationName().c_str(), NodeName().c_str());
}
auto outDims = ConvolveGeometry::ComputeOutputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
SetDims(outDims, HasMBLayout());
}
};
// -----------------------------------------------------------------------
// MaxUnpoolingNode (unpoolInputValues, poolInputValues)
// Performs "max unpooling" operation. Max unpooling mirrors the operation
// performed by max pooling node and depends on the values provided to
// the max pooling node (so unlike deconvolution operation, it is not
// completely independent). Unpooling takes 2 inputs: features to be unpooled,
// which tensor has the same shape as corresponding max pooling node output
// and inputs for the original pooling node. Unpooling node
// produces an output which has the same dimensions as input to the
// corresponding max pooling node (i.e. poolInputValues).
// TODO: need to add support for other pooling types, for example,
// average unpooling. Note that in this case, generic unpooling operation
// will take different number of inputs depending on pooling type.
// -----------------------------------------------------------------------
template <class ElemType>
class MaxUnpoolingNode : public ConvolutionNodeBase<ElemType>, public NumInputs<2>
{
typedef ConvolutionNodeBase<ElemType> Base;
UsingConvolutionNodeBaseMembers;
static const std::wstring TypeName() { return L"MaxUnpooling"; }
public:
MaxUnpoolingNode(DEVICEID_TYPE deviceId, const wstring& name)
: Base(deviceId, name)
{
}
MaxUnpoolingNode(DEVICEID_TYPE deviceId, const wstring& name, const TensorShape& kernelShape, const TensorShape& strideShape,
const std::vector<bool>& autoPadding, const TensorShape& lowerPad, const TensorShape& upperPad,
ImageLayoutKind imageLayout)
: Base(deviceId, name, kernelShape, TensorShape(1), strideShape, vector<bool>{true}, autoPadding, lowerPad, upperPad, PoolKind::Max, true, imageLayout, 0)
{
}
MaxUnpoolingNode(const ScriptableObjects::IConfigRecordPtr configp)
: MaxUnpoolingNode(configp->Get(L"deviceId"), L"<placeholder>", configp->Get(L"kernelShape"),
configp->Get(L"strideShape"), configp->Get(L"dimPadding"), configp->Get(L"dimPadLower"), configp->Get(L"dimPadUpper"),
ImageLayoutKindFrom(configp->Get(L"imageLayout")))
{
AttachInputsFromConfig(configp, GetExpectedNumInputs());
}
public:
void ForwardProp(const FrameRange& fr) override
{
const Matrix<ElemType>& unpoolInput = Input(0)->ValueFor(fr);
const Matrix<ElemType>& poolInput = Input(1)->ValueFor(fr);
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
m_convEng->MaxUnpooling(unpoolInput, poolInput, sliceOutputValue);
}
void BackpropTo(const size_t inputIndex, const FrameRange& fr) override
{
if (inputIndex != 0)
return;
auto sliceOutputGrad = GradientFor(fr);
Matrix<ElemType> sliceInput0Grad = Input(0)->GradientFor(fr);
// BUGBUG: ForwardPooling overwrites values in sliceInput1Grad. Should handle correctly instead.
m_convEng->ForwardPooling(sliceOutputGrad, sliceInput0Grad);
}
bool OutputUsedInComputingInputNodesGradients() const override { return false; }
void Validate(bool isFinalValidationPass) override
{
Base::Validate(isFinalValidationPass);
@ -459,18 +589,22 @@ public:
}
auto inputShape = GetInputSampleLayout(0);
auto outDims = ConvolveGeometry::ComputeOutputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
SetDims(outDims, HasMBLayout());
// Same as in case of deconvolution, node input (inputShape) is really the output of the max pooling
// and node output (outDims) is pooling input.
auto outputShape = ConvolveGeometry::ComputeInputShape(inputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
SetDims(outputShape, HasMBLayout());
if (isFinalValidationPass)
{
if (m_convEng == nullptr)
{
auto geometry = std::make_shared<ConvolveGeometry>(inputShape, m_kernelShape, m_mapCount, m_stride,
auto geometry = std::make_shared<ConvolveGeometry>(outputShape, m_kernelShape, m_mapCount, m_stride,
m_sharing, m_autoPad, m_lowerPad, m_upperPad);
// Create reference engine as it's the only engine that implements unpooling.
m_convEng = ConvolutionEngine<ElemType>::Create(geometry, m_deviceId, m_imageLayout,
m_maxTempMemSizeInSamples, m_poolKind);
m_maxTempMemSizeInSamples, m_poolKind,
ConvolutionEngineKind::Reference,
NodeName());
}
}
}
@ -489,20 +623,20 @@ class PoolingNodeBase : public ComputationNode<ElemType>, public NumInputs<1>
public:
PoolingNodeBase(DEVICEID_TYPE deviceId, const wstring& name)
: Base(deviceId, name),
m_windowWidth(SIZE_MAX),
m_windowHeight(SIZE_MAX),
m_horizontalSubsample(SIZE_MAX),
m_verticalSubsample(SIZE_MAX),
m_imageLayoutKind(ImageLayoutKind::HWC)
m_windowWidth(SIZE_MAX),
m_windowHeight(SIZE_MAX),
m_horizontalSubsample(SIZE_MAX),
m_verticalSubsample(SIZE_MAX),
m_imageLayoutKind(ImageLayoutKind::HWC)
{
}
PoolingNodeBase(DEVICEID_TYPE deviceId, const wstring& name, const size_t windowWidth, const size_t windowHeight, const size_t horizontalSubsample, const size_t verticalSubsample, ImageLayoutKind imageLayoutKind)
: Base(deviceId, name),
m_windowWidth(windowWidth),
m_windowHeight(windowHeight),
m_horizontalSubsample(horizontalSubsample),
m_verticalSubsample(verticalSubsample),
m_imageLayoutKind(imageLayoutKind)
m_windowWidth(windowWidth),
m_windowHeight(windowHeight),
m_horizontalSubsample(horizontalSubsample),
m_verticalSubsample(verticalSubsample),
m_imageLayoutKind(imageLayoutKind)
{
}
PoolingNodeBase(const ScriptableObjects::IConfigRecordPtr configp)
@ -515,8 +649,8 @@ public:
void Save(File& fstream) const override
{
Base::Save(fstream);
uint32_t imageLayoutKind = (uint32_t) m_imageLayoutKind;
uint32_t windowWidth = (uint32_t) m_windowWidth;
uint32_t imageLayoutKind = (uint32_t)m_imageLayoutKind;
uint32_t windowWidth = (uint32_t)m_windowWidth;
fstream << windowWidth << imageLayoutKind << m_windowHeight << m_horizontalSubsample << m_verticalSubsample;
}
@ -549,6 +683,14 @@ public:
}
}
void ForwardProp(const FrameRange& fr) override
{
Matrix<ElemType> sliceInput0Value = Input(0)->ValueFor(fr);
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
m_convEng->ForwardPooling(sliceInput0Value, sliceOutputValue);
}
void BackpropTo(const size_t /*inputIndex*/, const FrameRange& fr) override
{
Matrix<ElemType> sliceInput0Grad = Input(0)->GradientFor(fr);
@ -560,14 +702,6 @@ public:
m_convEng->BackwardPooling(sliceOutputValue, sliceOutputGrad, sliceInput0Value, sliceInput0Grad);
}
void ForwardProp(const FrameRange& fr) override
{
Matrix<ElemType> sliceInput0Value = Input(0)->ValueFor(fr);
Matrix<ElemType> sliceOutputValue = ValueFor(fr);
m_convEng->ForwardPooling(sliceInput0Value, sliceOutputValue);
}
void Validate(bool isFinalValidationPass) override
{
Base::Validate(isFinalValidationPass);
@ -684,7 +818,11 @@ public:
{
Base::Validate(isFinalValidationPass);
if (isFinalValidationPass && m_convEng == nullptr)
m_convEng = ConvolutionEngine<ElemType>::Create(m_geometry, m_deviceId, m_imageLayoutKind, 0, PoolKind::Max);
{
m_convEng = ConvolutionEngine<ElemType>::Create(m_geometry, m_deviceId, m_imageLayoutKind,
0, PoolKind::Max,
ConvolutionEngineKind::All, NodeName());
}
}
};
@ -720,7 +858,11 @@ public:
{
Base::Validate(isFinalValidationPass);
if (isFinalValidationPass && m_convEng == nullptr)
m_convEng = ConvolutionEngine<ElemType>::Create(m_geometry, m_deviceId, m_imageLayoutKind, 0, PoolKind::Average);
{
m_convEng = ConvolutionEngine<ElemType>::Create(m_geometry, m_deviceId, m_imageLayoutKind,
0, PoolKind::Average,
ConvolutionEngineKind::All, NodeName());
}
}
};

Просмотреть файл

@ -562,4 +562,89 @@ public:
template class ClipNode<float>;
template class ClipNode<double>;
// -----------------------------------------------------------------------
// CompareNode(a,b)
// -----------------------------------------------------------------------
// Template parameters compType (-1, 0, 1) and polarity (0, 1) are used selecting one of the six basic comparison operations.
// Note: parametrizing the 6 comparison operations with the the two parameters 'compType' an 'polarity' is motivated by:
//
// comp(a, b, compType, polarity) <==> sign(a-b) == compType, if polarity == 0
// sign(a-b) != compType, if polarity == 1
template <class ElemType, int compType, int polarity>
class ComparisonNode : public BinaryElementWiseNode<ElemType>
{
private:
// Index corresponds to different comparison operations.
const static int index = 1 + compType + 3 * polarity;
// The operations are indexed in the same order they appear in enum ElementWiseOperator: "Less", "Equal", "Greater", "GreaterEqual", "NotEqual", "LessEqual".
// This ordering is checked below:
static_assert(1 == ElementWiseOperator::opEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opEqual has wrong value relative to ElementWiseOperator::opLess");
static_assert(2 == ElementWiseOperator::opGreater - ElementWiseOperator::opLess, "ElementWiseOperator::opGreater has wrong value relative to ElementWiseOperator::opLess");
static_assert(3 == ElementWiseOperator::opGreaterEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opGreaterEqual has wrong value relative to ElementWiseOperator::opLess");
static_assert(4 == ElementWiseOperator::opNotEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opNotEqual has wrong value relative to ElementWiseOperator::opLess");
static_assert(5 == ElementWiseOperator::opLessEqual - ElementWiseOperator::opLess, "ElementWiseOperator::opLessEqual has wrong value relative to ElementWiseOperator::opLess");
public:
typedef BinaryElementWiseNode<ElemType> Base; UsingBinaryElementwiseNodeBaseMembers;
static const std::wstring TypeName()
{
const wchar_t* names[] = { L"Less", L"Equal", L"Greater", L"GreaterEqual", L"NotEqual", L"LessEqual" };
return names[index];
}
DeclareConstructorFromConfigWithNumInputs(ComparisonNode);
ComparisonNode(DEVICEID_TYPE deviceId, const wstring& name)
: Base(deviceId, name)
{
}
virtual bool InputUsedInComputingInputNodesGradients(size_t childIndex) const override { return childIndex == 0; }
virtual bool OutputUsedInComputingInputNodesGradients() const override { return false; }
virtual void /*ComputationNode::*/ ForwardProp(const FrameRange& fr) override
{
size_t rank = DetermineElementwiseTensorRank();
auto result = ValueTensorFor(rank, fr);
auto input0 = Input(0)->ValueTensorFor(rank, fr.AllowBroadcast());
auto input1 = Input(1)->ValueTensorFor(rank, fr.AllowBroadcast());
result.DoBinaryOpOf(0, input0, input1, 1.0f, static_cast<ElementWiseOperator> (ElementWiseOperator::opLess + index), ElementWiseOperator::opSum);
}
virtual void /*ComputationNode::*/ BackpropTo(const size_t inputIndex, const FrameRange& fr) override
{
// Function is piecewise constant --> gradient = 0
}
};
// Define macro that defines and instantiates different comparison nodes.
// Unfortuanately the C++ 11 type alias syntax doesn't work for mpic++ so we use this more ugly way.
#define DefineComparisonNode(ClassName, compType, polarity) \
template <class ElemType> \
class ClassName : public ComparisonNode<ElemType, compType, polarity> \
{ \
typedef ComparisonNode<ElemType, compType, polarity> Base; \
UsingComputationNodeMembersBoilerplate; \
\
public: \
static const std::wstring TypeName() { return Base::TypeName(); } \
DeclareConstructorFromConfigWithNumInputs(ClassName); \
ClassName(DEVICEID_TYPE deviceId, const wstring& name) \
: Base(deviceId, name) \
{ \
} \
}; \
\
template class ClassName<float>; \
template class ClassName<double>;
DefineComparisonNode(LessNode, -1, 0)
DefineComparisonNode(EqualNode, 0, 0)
DefineComparisonNode(GreaterNode, 1, 0)
DefineComparisonNode(GreaterEqualNode, -1, 1)
DefineComparisonNode(NotEqualNode, 0, 1)
DefineComparisonNode(LessEqualNode, 1, 1)
}}}

Просмотреть файл

@ -18,6 +18,11 @@
#endif
#include "BestGpu.h"
#include "MPIWrapper.h"
#include "DataDeserializer.h"
#include "SequencePacker.h"
#include "NoRandomizer.h"
#include "HeapMemoryProvider.h"
#include "InputAndParamNodes.h"
// TODO: Temporary mechanism to enable memory sharing for
// node output value matrices. This will go away when the
@ -26,7 +31,50 @@ bool g_shareNodeValueMatrices = false;
namespace Microsoft { namespace MSR { namespace CNTK {
template <class ElemType>
template <typename ElemType>
void CNTKEvalBase<ElemType>::Init(const std::string& config)
{
m_config.Parse(config);
size_t nThreads = m_config("numCPUThreads", "1");
CPUMatrix<ElemType>::SetNumThreads(nThreads);
g_shareNodeValueMatrices = m_config(L"shareNodeValueMatrices", false);
}
// CreateNetwork - create a network based on the network description
// networkDescription - network description
template <typename ElemType>
void CNTKEvalBase<ElemType>::CreateNetwork(const std::string& networkDescription)
{
ConfigParameters config;
config.Parse(networkDescription);
std::vector<wstring> outputNodeNames;
m_net = GetModelFromConfig<ConfigParameters, ElemType>(config, L"outputNodeNames", outputNodeNames);
if (m_net == nullptr)
{
LogicError("Unable to construct network from description");
}
}
// Destroy - cleanup and remove this class
// NOTE: this destroys the object, and it can't be used past this point
template <typename ElemType>
void CNTKEvalBase<ElemType>::Destroy()
{
// cleanup everything
m_net.reset();
}
// ----------------------------------------------------------------------------
// Basic interface
// ----------------------------------------------------------------------------
template <typename ElemType>
void EVAL_API GetEval(IEvaluateModel<ElemType>** peval)
{
*peval = new CNTKEval<ElemType>();
@ -41,51 +89,11 @@ extern "C" EVAL_API void GetEvalD(IEvaluateModel<double>** peval)
GetEval(peval);
}
template <class ElemType>
void CNTKEval<ElemType>::Init(const std::string& config)
{
m_start = 0;
m_config.Parse(config);
size_t nThreads = m_config("numCPUThreads", "1");
CPUMatrix<ElemType>::SetNumThreads(nThreads);
g_shareNodeValueMatrices = m_config(L"shareNodeValueMatrices", false);
}
// Destroy - cleanup and remove this class
// NOTE: this destroys the object, and it can't be used past this point
template <class ElemType>
void CNTKEval<ElemType>::Destroy()
{
// cleanup everything
m_net.reset();
delete m_reader;
delete m_writer;
delete this;
}
// CreateNetwork - create a network based on the network description
// networkDescription - network description
template <class ElemType>
void CNTKEval<ElemType>::CreateNetwork(const std::string& networkDescription)
{
ConfigParameters config;
config.Parse(networkDescription);
std::vector<wstring> outputNodeNames;
m_net = GetModelFromConfig<ConfigParameters, ElemType>(config, outputNodeNames);
if (m_net == nullptr)
{
LogicError("Unable to construct network from description");
}
}
// GetNodeDimensions - Get the node dimensions of the specified nodes
// dimensions - map from name of node to dimension of the node, will be appended to for Input/Output scenarios
// nodeGroup - type of node we are requesting (input/output/specified)
// NOTE: when nodeGroup==specified the dimensions map is expected to be populated with the string names of the nodes requested, dimensions will be modified return the current value.
template <class ElemType>
template <typename ElemType>
void CNTKEval<ElemType>::GetNodeDimensions(std::map<std::wstring, size_t>& dimensions, NodeGroup nodeGroup)
{
if (m_net == NULL)
@ -137,7 +145,7 @@ void CNTKEval<ElemType>::GetNodeDimensions(std::map<std::wstring, size_t>& dimen
// StartEvaluateMinibatchLoop - Prepare network for Evaluate() calls.
// ouputNodeName - name of node that will be evaluated
template <class ElemType>
template <typename ElemType>
void CNTKEval<ElemType>::StartEvaluateMinibatchLoop(const std::wstring& outputNodeName)
{
m_net->StartEvaluateMinibatchLoop(m_net->GetNodeFromName(outputNodeName));
@ -146,7 +154,7 @@ void CNTKEval<ElemType>::StartEvaluateMinibatchLoop(const std::wstring& outputNo
// Evaluate - Evalute using the model with the given inputs and outputs
// inputs - map from node name to input vector
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
template <class ElemType>
template <typename ElemType>
void CNTKEval<ElemType>::Evaluate(std::map<std::wstring, std::vector<ElemType>*>& inputs, std::map<std::wstring, std::vector<ElemType>*>& outputs)
{
size_t minibatchSize = m_config(L"minibatchSize", (size_t) 10240);
@ -183,7 +191,7 @@ void CNTKEval<ElemType>::Evaluate(std::map<std::wstring, std::vector<ElemType>*>
// Evaluate - Evalute using the model with the given inputs and outputs
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
template <class ElemType>
template <typename ElemType>
void CNTKEval<ElemType>::Evaluate(std::map<std::wstring, std::vector<ElemType>*>& outputs)
{
// get the evaluation names from the output string
@ -206,14 +214,221 @@ void CNTKEval<ElemType>::Evaluate(std::map<std::wstring, std::vector<ElemType>*>
eval.WriteOutput(*m_writer, outNodeNames);
}
// ResetState - Reset the cell state when we get start of an utterance
template <class ElemType>
void CNTKEval<ElemType>::ResetState()
template <typename ElemType>
void CNTKEval<ElemType>::Destroy()
{
m_start = 1 - m_start;
CNTKEvalBase<ElemType>::Destroy();
delete m_reader;
delete m_writer;
delete this;
}
// instantiate all the combinations we expect to be used
template class CNTKEval<double>;
template class CNTKEval<float>;
// ----------------------------------------------------------------------------
// Extended interface
// ----------------------------------------------------------------------------
template<typename ElemType>
VariableLayout CNTKEvalExtended<ElemType>::ToVariableLayout(const ComputationNodeBasePtr n)
{
auto matrix = dynamic_pointer_cast<Matrix<ElemType>>(n->ValuePtr());
return VariableLayout
{
/* name */ n->GetName(),
/* type */ sizeof(ElemType) == sizeof(float) ? VariableLayout::Float32 : VariableLayout::Float64,
/* storage */ matrix ? matrix->GetMatrixType() == MatrixType::DENSE ? VariableLayout::Dense :
matrix->GetMatrixType() == MatrixType::SPARSE ? VariableLayout::Sparse :
VariableLayout::Undetermined :
VariableLayout::Undetermined,
/* dimension */ n->GetSampleLayout().GetNumElements()
};
}
template<typename ElemType>
void CNTKEvalExtended<ElemType>::StartForwardEvaluation(const std::vector<wstring>& outputNodeNames)
{
m_scopedNetworkOperationMode = make_shared<ScopedNetworkOperationMode>(m_net, NetworkOperationMode::inferring);
m_outputNodes = m_net->OutputNodesByName(outputNodeNames);
m_inputNodes = m_net->InputNodesForOutputs(outputNodeNames);
// allocate memory for forward computation
m_net->AllocateAllMatrices({}, m_outputNodes, nullptr);
m_net->StartEvaluateMinibatchLoop(m_outputNodes);
m_inputMatrices = DataReaderHelpers::RetrieveInputMatrices(m_inputNodes);
for (const auto& node : m_outputNodes)
{
shared_ptr<Matrix<ElemType>> outputMatrix = dynamic_pointer_cast<Matrix<ElemType>>(node->ValuePtr());
if (outputMatrix->GetMatrixType() != MatrixType::DENSE)
RuntimeError("Sparse outputs are not supported by this API.");
}
m_started = true;
}
template<typename ElemType>
VariableSchema CNTKEvalExtended<ElemType>::GetOutputSchema() const
{
VariableSchema schema;
auto& nodes = m_started ? m_outputNodes : m_net->OutputNodes();
for (const auto& n : nodes)
{
schema.push_back(ToVariableLayout(n));
}
return schema;
}
template<typename ElemType>
VariableSchema CNTKEvalExtended<ElemType>::GetInputSchema() const
{
VariableSchema inputLayouts;
auto nodes = m_inputNodes;
if (nodes.size() == 0)
{
// Default to all nodes
nodes = m_net->InputNodesForOutputs({});
}
for (const auto& n : nodes)
{
inputLayouts.push_back(ToVariableLayout(n));
}
return inputLayouts;
}
template<typename ElemType>
template<template<typename> class ValueContainer>
void CNTKEvalExtended<ElemType>::ForwardPassT(const std::vector<ValueBuffer<ElemType, ValueContainer> >& inputs, std::vector<ValueBuffer<ElemType, ValueContainer> >& outputs)
{
if (!m_started)
RuntimeError("ForwardPass() called before StartForwardEvaluation()");
if (inputs.size() != (size_t)std::distance(m_inputMatrices.begin(), m_inputMatrices.end()))
RuntimeError("Expected %d inputs, but got %d.", (int)std::distance(m_inputMatrices.begin(), m_inputMatrices.end()), (int)inputs.size());
if (outputs.size() != m_outputNodes.size())
RuntimeError("Expected %d outputs, but got %d.", (int)m_outputNodes.size(), (int)outputs.size());
size_t i = 0;
for (auto& input : m_inputMatrices)
{
// const cast: The matrix class takes this over without copying and could theoretically change the contents,
// though it doesn't in this case.
auto& buffer = const_cast<ValueBuffer<ElemType, ValueContainer>&>(inputs[i]);
shared_ptr<Matrix<ElemType>> matrix = dynamic_pointer_cast<Matrix<ElemType>>(input.second.matrix);
auto type = matrix->GetMatrixType();
int numRows = input.second.sampleLayout.GetNumElements();
if (type == MatrixType::DENSE)
{
if (buffer.m_buffer.size() % numRows != 0)
RuntimeError("Input %ls: Expected input data to be a multiple of %ld, but it is %ld",
m_inputNodes[i]->GetName().c_str(), numRows, buffer.m_buffer.size());
if (buffer.m_buffer.size() == 0)
RuntimeError("Input %ls: Expected at least one element.", m_inputNodes[i]->GetName().c_str());
}
else if (type == MatrixType::SPARSE)
{
if (buffer.m_colIndices.size() < 2)
RuntimeError("Input %ls: Expected at least one element.", m_inputNodes[i]->GetName().c_str());
if (buffer.m_colIndices[0] != 0)
RuntimeError("Input %ls: First element of column indices must be 0", m_inputNodes[i]->GetName().c_str());
if (buffer.m_colIndices[buffer.m_colIndices.size() - 1] != buffer.m_indices.size())
RuntimeError("Input %ls: Last element of column indices must be equal to the size of indices (%ld), but was %d",
m_inputNodes[i]->GetName().c_str(), buffer.m_indices.size(),
buffer.m_colIndices[buffer.m_colIndices.size() - 1]);
}
int numCols = type == MatrixType::DENSE ? buffer.m_buffer.size() / numRows : buffer.m_colIndices.size() - 1;
assert(numCols >= 1);
input.second.pMBLayout->Init(1, numCols);
input.second.pMBLayout->AddSequence(0, 0, 0, numCols);
if (type == MatrixType::DENSE)
matrix->SetValue(numRows, numCols, matrix->GetDeviceId(), buffer.m_buffer.data(), matrixFlagNormal);
else if (type == MatrixType::SPARSE)
{
// In the sparse case the m_data layout is identical to CUDA's CSC layout
// (see http://docs.nvidia.com/cuda/cusparse/#compressed-sparse-column-format-csc).
matrix->SetMatrixFromCSCFormat(buffer.m_colIndices.data(), buffer.m_indices.data(), buffer.m_buffer.data(),
buffer.m_buffer.size(), numRows, numCols);
}
++i;
}
ComputationNetwork::BumpEvalTimeStamp(m_inputNodes);
for (size_t i = 0; i < m_outputNodes.size(); ++i)
{
auto node = m_outputNodes[i];
m_net->ForwardProp(node);
shared_ptr<Matrix<ElemType>> outputMatrix = dynamic_pointer_cast<Matrix<ElemType>>(node->ValuePtr());
auto pMBLayout = node->GetMBLayout();
if (!pMBLayout)
{
pMBLayout = make_shared<MBLayout>();
pMBLayout->InitAsFrameMode(1); // treat this as if we have one single sample
}
const auto& seq = pMBLayout->GetAllSequences();
if (seq.size() != 1)
RuntimeError("Only 1 output sequence supported by this API");
ValueContainer<ElemType>& vec = outputs[i].m_buffer;
size_t numElements = outputMatrix->GetNumElements();
if (vec.capacity() < numElements)
{
// Bad luck - we can't reallocate memory of an external object at this point.
RuntimeError("Not enough space in output buffer for output '%ls'.", node->GetName().c_str());
}
vec.resize(numElements);
ElemType* data = const_cast<ElemType*>(vec.data());
outputMatrix->CopyToArray(data, numElements);
}
}
template<typename ElemType>
void CNTKEvalExtended<ElemType>::ForwardPass(const Values<ElemType>& inputs, Values<ElemType>& outputs)
{
ForwardPassT(inputs, outputs);
}
template<typename ElemType>
void CNTKEvalExtended<ElemType>::ForwardPass(const ValueRefs<ElemType>& inputs, ValueRefs<ElemType>& outputs)
{
ForwardPassT(inputs, outputs);
}
template <typename ElemType>
void CNTKEvalExtended<ElemType>::Destroy()
{
CNTKEvalBase<ElemType>::Destroy();
delete this;
}
template <typename ElemType>
void EVAL_API GetEvalExtended(IEvaluateModelExtended<ElemType>** peval)
{
*peval = new CNTKEvalExtended<ElemType>();
}
extern "C" EVAL_API void GetEvalExtendedF(IEvaluateModelExtended<float>** peval)
{
GetEvalExtended(peval);
}
extern "C" EVAL_API void GetEvalExtendedD(IEvaluateModelExtended<double>** peval)
{
GetEvalExtended(peval);
}
template class CNTKEvalExtended<double>;
template class CNTKEvalExtended<float>;
} } }

Просмотреть файл

@ -22,48 +22,107 @@
namespace Microsoft { namespace MSR { namespace CNTK {
template <class ElemType>
class CNTKEval : public IEvaluateModel<ElemType>
template <typename ElemType>
class CNTKEvalBase : public IEvaluateModelBase<ElemType>
{
protected:
typedef shared_ptr<ComputationNode<ElemType>> ComputationNodePtr;
EvalReader<ElemType>* m_reader;
EvalWriter<ElemType>* m_writer;
ConfigParameters m_config;
ComputationNetworkPtr m_net;
std::map<std::wstring, size_t> m_dimensions;
size_t m_start;
public:
// constructor
CNTKEval()
: m_reader(nullptr), m_net(nullptr)
{
}
CNTKEvalBase() : m_net(nullptr) { }
public:
// CreateNetwork - create a network based on the network description
// networkDescription - network description
virtual void CreateNetwork(const std::string& networkDescription);
// GetNodeDimensions - Get the node dimensions of the specified nodes
// dimensions - map from name of node to dimension of the node
// nodeGroup - type of node we are requesting (input/output/specified)
virtual void GetNodeDimensions(std::map<std::wstring, size_t>& dimensions, NodeGroup nodeGroup);
// StartEvaluateMinibatchLoop - Prepare network for Evaluate() calls.
// ouputNodeName - name of node that will be evaluated
virtual void StartEvaluateMinibatchLoop(const std::wstring& outputNodeName);
// Evaluate - Evalute using the model with the given inputs and outputs
// inputs - map from node name to input vector
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& inputs, std::map<std::wstring, std::vector<ElemType>*>& outputs);
// Evaluate - Evalute using the model with the given inputs and outputs
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& outputs);
virtual void Init(const std::string& config);
virtual void Destroy();
virtual void ResetState();
};
// ------------------------------------------------------------------------
// Basic interface
// ------------------------------------------------------------------------
template <typename ElemType>
class CNTKEval : public CNTKEvalBase<ElemType>, public IEvaluateModel<ElemType>
{
EvalReader<ElemType>* m_reader;
EvalWriter<ElemType>* m_writer;
std::map<std::wstring, size_t> m_dimensions;
size_t m_start;
public:
CNTKEval() : CNTKEvalBase<ElemType>(), m_reader(nullptr), m_writer(nullptr) {}
virtual void GetNodeDimensions(std::map<std::wstring, size_t>& dimensions, NodeGroup nodeGroup);
virtual void StartEvaluateMinibatchLoop(const std::wstring& outputNodeName);
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& inputs, std::map<std::wstring, std::vector<ElemType>*>& outputs);
virtual void Evaluate(std::map<std::wstring, std::vector<ElemType>*>& outputs);
virtual void Destroy() override;
virtual void CreateNetwork(const std::string& networkDescription) override
{
CNTKEvalBase<ElemType>::CreateNetwork(networkDescription);
}
virtual void Init(const std::string& config) override
{
CNTKEvalBase<ElemType>::Init(config);
m_start = 0;
}
virtual void ResetState() override
{
m_start = 1 - m_start;
}
};
// ------------------------------------------------------------------------
// Extended interface
// ------------------------------------------------------------------------
template <typename ElemType>
class CNTKEvalExtended : public CNTKEvalBase<ElemType>, public IEvaluateModelExtended<ElemType>
{
public:
CNTKEvalExtended() : CNTKEvalBase<ElemType>(), m_started(false) {}
virtual VariableSchema GetOutputSchema() const override;
virtual void StartForwardEvaluation(const std::vector<wstring>& outputs) override;
virtual VariableSchema GetInputSchema() const override;
virtual void ForwardPass(const Values<ElemType>& inputs, Values<ElemType>& output) override;
virtual void ForwardPass(const ValueRefs<ElemType>& inputs, ValueRefs<ElemType>& output) override;
virtual void Destroy() override;
virtual void CreateNetwork(const std::string& networkDescription) override
{
CNTKEvalBase<ElemType>::CreateNetwork(networkDescription);
}
virtual void Init(const std::string& config) override
{
CNTKEvalBase<ElemType>::Init(config);
}
private:
static VariableLayout ToVariableLayout(const ComputationNodeBasePtr n);
std::vector<ComputationNodeBasePtr> m_outputNodes;
std::shared_ptr<ScopedNetworkOperationMode> m_scopedNetworkOperationMode;
std::vector<ComputationNodeBasePtr> m_inputNodes;
StreamMinibatchInputs m_inputMatrices;
bool m_started;
template<template<typename> class ValueContainer>
void ForwardPassT(const std::vector < ValueBuffer<ElemType, ValueContainer> >& inputs,
std::vector < ValueBuffer<ElemType, ValueContainer> >& outputs);
};
} } }

Просмотреть файл

@ -55,8 +55,8 @@
<TargetName>EvalDll</TargetName>
</PropertyGroup>
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>$(SolutionDir)Source\SGDLib;$(SolutionDir)Source\ComputationNetworkLib;$(SolutionDir)Source\SequenceTrainingLib;$(SolutionDir)Source\Math;$(SolutionDir)Source\Common\Include;$(SolutionDir)Source\CNTK\BrainScript;$(SolutionDir)Source\ActionsLib;$(MSMPI_INC);$(NvmlInclude)</AdditionalIncludeDirectories>
<ClCompile>
<AdditionalIncludeDirectories>$(SolutionDir)Source\Readers\ReaderLib;$(SolutionDir)Source\SGDLib;$(SolutionDir)Source\ComputationNetworkLib;$(SolutionDir)Source\SequenceTrainingLib;$(SolutionDir)Source\Math;$(SolutionDir)Source\Common\Include;$(SolutionDir)Source\CNTK\BrainScript;$(SolutionDir)Source\ActionsLib;$(MSMPI_INC);$(NvmlInclude)</AdditionalIncludeDirectories>
</ClCompile>
<Link>
<AdditionalLibraryDirectories>$(SolutionDir)Source\ComputationNetworkLib;$(SolutionDir)Source\Math;$(MSMPI_LIB64);$(SolutionDir)$(Platform)\$(Configuration);$(NvmlLibPath)</AdditionalLibraryDirectories>
@ -99,7 +99,7 @@
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
<AdditionalDependencies>ComputationNetworkLib.lib; Math.lib; Common.lib; ActionsLib.lib; kernel32.lib; user32.lib; shell32.lib; SequenceTrainingLib.lib; %(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>ComputationNetworkLib.lib; Math.lib; Common.lib; ActionsLib.lib; kernel32.lib; user32.lib; shell32.lib; SequenceTrainingLib.lib;ReaderLib.lib; %(AdditionalDependencies)</AdditionalDependencies>
<Profile>true</Profile>
<DelayLoadDLLs>Math.dll; nvml.dll; $(CudaRuntimeDll)</DelayLoadDLLs>
</Link>
@ -153,4 +153,4 @@
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>
</Project>

Просмотреть файл

@ -2,39 +2,18 @@
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<ClCompile Include="CNTKEval.cpp" />
<ClCompile Include="..\Common\fileutil.cpp">
<Filter>Common</Filter>
</ClCompile>
<ClCompile Include="..\Common\File.cpp">
<Filter>Common</Filter>
</ClCompile>
<ClCompile Include="..\Common\TimerUtility.cpp">
<Filter>Common</Filter>
</ClCompile>
<ClCompile Include="dllmain.cpp">
<Filter>Misc</Filter>
</ClCompile>
<ClCompile Include="stdafx.cpp">
<Filter>Misc</Filter>
</ClCompile>
<ClCompile Include="..\Common\Config.cpp">
<Filter>Common</Filter>
</ClCompile>
<ClCompile Include="..\Common\Eval.cpp">
<Filter>For External Use</Filter>
</ClCompile>
<ClCompile Include="..\Common\ExceptionWithCallStack.cpp">
<Filter>Common</Filter>
</ClCompile>
<ClCompile Include="..\CNTK\BrainScript\BrainScriptEvaluator.cpp">
<Filter>BrainScript</Filter>
</ClCompile>
<ClCompile Include="..\CNTK\BrainScript\BrainScriptParser.cpp">
<Filter>BrainScript</Filter>
</ClCompile>
<ClCompile Include="..\Common\DataReader.cpp">
<Filter>Common</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="EvalReader.h" />

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше