This commit is contained in:
Yongqiang Wang 2015-11-24 16:57:40 -08:00
Родитель 33cf6a013f cf884dc7f7
Коммит d8b03dfdf6
6 изменённых файлов: 13 добавлений и 20 удалений

Просмотреть файл

@ -1102,7 +1102,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
pLeft->FunctionValues() = redU; pLeft->FunctionValues() = redU;
pRight->FunctionValues() = redVT; pRight->FunctionValues() = redVT;
shared_ptr<ComputationNode<ElemType>> pTimes = AddNodeToNetAndAttachInputs(New<TimesNode<ElemType>>(m_deviceId, name + L"-SVD", true /*createOutputMatrix*/), pLeft, pRight); shared_ptr<ComputationNode<ElemType>> pTimes = AddNodeToNetAndAttachInputs(New<TimesNode<ElemType>>(m_deviceId, name + L"-SVD"), pLeft, pRight);
//======================================== //========================================
// Step 3. remove old node // Step 3. remove old node

Просмотреть файл

@ -392,18 +392,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
static const std::wstring TypeName() { return L"Times"; } static const std::wstring TypeName() { return L"Times"; }
public: public:
// TODO: The createOutputMatrix parameter here is temporarily added to allow creating the function values
// matrix for the times node added during SVD decomposition. Since ValidateSubNetwork is called after addition
// of the times node, the validation crashes if the function values matrix has not yet been allocated
// This can be removed after the Validation has been fixed to not access the function values matrix at all
DeclareConstructorFromConfigWithNumInputs(TimesNode); DeclareConstructorFromConfigWithNumInputs(TimesNode);
TimesNode(DEVICEID_TYPE deviceId, const wstring & name, bool createOutputMatrix = false) : TimesNode(DEVICEID_TYPE deviceId, const wstring & name) :
Base(deviceId, name) Base(deviceId, name)
{ {
if (createOutputMatrix)
{
CreateMatrixIfNull(m_functionValues);
}
} }
virtual void /*ComputationNode::*/ComputeInputPartial(const size_t inputIndex, const FrameRange & frameRange) override virtual void /*ComputationNode::*/ComputeInputPartial(const size_t inputIndex, const FrameRange & frameRange) override

Просмотреть файл

@ -893,8 +893,8 @@ already there from last epoch
Starting minibatch loop. Starting minibatch loop.
randomordering: 21 retries for 100 elements (21.0%) to ensure window condition randomordering: 21 retries for 100 elements (21.0%) to ensure window condition
randomordering: recached sequence for seed 11: 6, 31, ... randomordering: recached sequence for seed 11: 6, 31, ...
Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37213734; EvalErr[0]PerSample = 0.00000000; TotalTime = 0.65604s; TotalTimePerSample = 6.56038ms; SamplesPerSecond = 152 Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37077690; EvalErr[0]PerSample = 0.00000000; TotalTime = 0.65604s; TotalTimePerSample = 6.56038ms; SamplesPerSecond = 152
Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37213734; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=0.656382 Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37077689; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=0.656382
CNTKCommandTrainEnd: Train CNTKCommandTrainEnd: Train
@ -2269,8 +2269,8 @@ reading from record 0 to 100 to be positioned properly for epoch
Starting minibatch loop. Starting minibatch loop.
randomordering: 21 retries for 100 elements (21.0%) to ensure window condition randomordering: 21 retries for 100 elements (21.0%) to ensure window condition
randomordering: recached sequence for seed 11: 6, 31, ... randomordering: recached sequence for seed 11: 6, 31, ...
Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37792297; EvalErr[0]PerSample = 0.00000000; TotalTime = 1.34518s; TotalTimePerSample = 13.45185ms; SamplesPerSecond = 74 Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37650299; EvalErr[0]PerSample = 0.00000000; TotalTime = 1.34518s; TotalTimePerSample = 13.45185ms; SamplesPerSecond = 74
Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37792295; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=1.371377 Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37650299; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=1.371377
CNTKCommandTrainEnd: Train CNTKCommandTrainEnd: Train

Просмотреть файл

@ -864,8 +864,8 @@ already there from last epoch
Starting minibatch loop. Starting minibatch loop.
randomordering: 21 retries for 100 elements (21.0%) to ensure window condition randomordering: 21 retries for 100 elements (21.0%) to ensure window condition
randomordering: recached sequence for seed 11: 6, 31, ... randomordering: recached sequence for seed 11: 6, 31, ...
Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37213734; EvalErr[0]PerSample = 0.00000000; TotalTime = 0.08724s; TotalTimePerSample = 0.87241ms; SamplesPerSecond = 1146 Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37077690; EvalErr[0]PerSample = 0.00000000; TotalTime = 0.08724s; TotalTimePerSample = 0.87241ms; SamplesPerSecond = 1146
Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37213734; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=0.087336 Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37077689; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=0.087336
CNTKCommandTrainEnd: Train CNTKCommandTrainEnd: Train
@ -2182,8 +2182,8 @@ reading from record 0 to 100 to be positioned properly for epoch
Starting minibatch loop. Starting minibatch loop.
randomordering: 21 retries for 100 elements (21.0%) to ensure window condition randomordering: 21 retries for 100 elements (21.0%) to ensure window condition
randomordering: recached sequence for seed 11: 6, 31, ... randomordering: recached sequence for seed 11: 6, 31, ...
Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37792297; EvalErr[0]PerSample = 0.00000000; TotalTime = 0.89367s; TotalTimePerSample = 8.93670ms; SamplesPerSecond = 111 Epoch[12 of 12]-Minibatch[ 1- 10 of 10]: SamplesSeen = 100; TrainLossPerSample = 0.37650299; EvalErr[0]PerSample = 0.00000000; TotalTime = 0.89367s; TotalTimePerSample = 8.93670ms; SamplesPerSecond = 111
Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37792295; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=0.908817 Finished Epoch[12 of 12]: [Training Set] TrainLossPerSample = 0.37650299; EvalErrPerSample = 0; Ave LearnRatePerSample = 0.004999999888; EpochTime=0.908817
CNTKCommandTrainEnd: Train CNTKCommandTrainEnd: Train

Просмотреть файл

@ -5,6 +5,7 @@ deviceId=$DeviceId$
ndlMacros=$ConfigDir$/Macros.ndl ndlMacros=$ConfigDir$/Macros.ndl
parallelTrain=false parallelTrain=false
NumCPUThreads=8
Train=[ Train=[
action=train action=train

Просмотреть файл

@ -1,9 +1,9 @@
dataDir: ../Data dataDir: ../Data
tags: tags:
# running on every BVT job in 'I' (Image) leg: # running on every BVT job in 'I' (Image) leg:
- bvt-i os=='windows' or device=='gpu' - bvt-i device=='gpu'
# running every Nightly job in 'I' leg # running every Nightly job in 'I' leg
- nightly-i os=='windows' or device=='gpu' - nightly-i device=='gpu'
testCases: testCases:
CNTK Run must be completed: CNTK Run must be completed: