Changing %ws to %ls in *printf() within the MachineLearning/cn/* source
This commit is contained in:
Родитель
4d46e5f8dd
Коммит
d044c66fe8
|
@ -56,7 +56,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
char str[4096];
|
||||
sprintf(str, "[%lu,%lu] ", FunctionValues().GetNumRows(), FunctionValues().GetNumCols());
|
||||
fstream << string(str);
|
||||
sprintf(str, "HasComputed=%ws", HasComputed()? L"true" : L"false");
|
||||
sprintf(str, "HasComputed=%ls", HasComputed()? L"true" : L"false");
|
||||
fstream << string(str);
|
||||
|
||||
PrintNodeValuesToFile(printValues, fstream);
|
||||
|
@ -1057,7 +1057,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
if (m_children[0]->FunctionValues().GetNumCols() != weightCols || m_children[0]->FunctionValues().GetNumRows() != m_outputChannels)
|
||||
{
|
||||
msra::strfun::strprintf msg("convolutionWeight matrix %ws should have dimension [%d, %d] which is [outputChannels, kernelWidth * kernelHeight * inputChannels]",
|
||||
msra::strfun::strprintf msg("convolutionWeight matrix %ls should have dimension [%d, %d] which is [outputChannels, kernelWidth * kernelHeight * inputChannels]",
|
||||
m_children[0]->NodeName().c_str(), m_outputChannels, weightCols);
|
||||
throw std::logic_error(msg.c_str());
|
||||
}
|
||||
|
@ -1070,7 +1070,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
if (m_children[1]->FunctionValues().GetNumRows() != inputDim)
|
||||
{
|
||||
msra::strfun::strprintf msg("each column of input to the convolution node %ws is a sample and should have dimension %d, which is inputWidth * inputHeight * inputChannels",
|
||||
msra::strfun::strprintf msg("each column of input to the convolution node %ls is a sample and should have dimension %d, which is inputWidth * inputHeight * inputChannels",
|
||||
NodeName().c_str(), inputDim);
|
||||
throw std::logic_error(msg.c_str());
|
||||
}
|
||||
|
@ -1132,7 +1132,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
fstream << string(str);
|
||||
sprintf(str, "Output[Width:%lu, Height:%lu, Channels:%lu] \n", m_outputWidth, m_outputHeight, m_outputChannels);
|
||||
fstream << string(str);
|
||||
sprintf(str, "ZeroPadding=%ws maxTempMemSizeInSamples=%lu\n", m_zeroPadding? L"true" : L"false", m_maxTempMemSizeInSamples);
|
||||
sprintf(str, "ZeroPadding=%ls maxTempMemSizeInSamples=%lu\n", m_zeroPadding? L"true" : L"false", m_maxTempMemSizeInSamples);
|
||||
fstream << string(str);
|
||||
}
|
||||
|
||||
|
@ -1471,7 +1471,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
if (m_children[0]->FunctionValues().GetNumRows() != m_inputSizePerSample)
|
||||
{
|
||||
msra::strfun::strprintf msg("each column of input to the MaxPooling node %ws is a sample and should have dimension %d, which is inputWidth * inputHeight * inputChannels",
|
||||
msra::strfun::strprintf msg("each column of input to the MaxPooling node %ls is a sample and should have dimension %d, which is inputWidth * inputHeight * inputChannels",
|
||||
NodeName().c_str(), m_inputSizePerSample);
|
||||
throw std::logic_error(msg.c_str());
|
||||
}
|
||||
|
@ -1732,7 +1732,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
if (m_children[0]->FunctionValues().GetNumRows() != m_inputSizePerSample)
|
||||
{
|
||||
msra::strfun::strprintf msg("each column of input to the AveragePooling node %ws is a sample and should have dimension %d, which is inputWidth * inputHeight * inputChannels",
|
||||
msra::strfun::strprintf msg("each column of input to the AveragePooling node %ls is a sample and should have dimension %d, which is inputWidth * inputHeight * inputChannels",
|
||||
NodeName().c_str(), m_inputSizePerSample);
|
||||
throw std::logic_error(msg.c_str());
|
||||
}
|
||||
|
@ -2240,4 +2240,4 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
template class GMMLogLikelihoodNode<float>;
|
||||
template class GMMLogLikelihoodNode<double>;
|
||||
}}}
|
||||
}}}
|
||||
|
|
|
@ -88,7 +88,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
newNode = new LookupTableNode<ElemType>(fstream, m_deviceId, nodeName);
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ws, with name %ws\n", nodeType.c_str(), nodeName.c_str());
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ls, with name %ls\n", nodeType.c_str(), nodeName.c_str());
|
||||
throw std::invalid_argument("Invalid node type.");
|
||||
}
|
||||
|
||||
|
@ -162,7 +162,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
newNode = new LookupTableNode<ElemType>(m_deviceId, nodeName);
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ws, with name %ws\n", nodeType.c_str(), nodeName.c_str());
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ls, with name %ls\n", nodeType.c_str(), nodeName.c_str());
|
||||
throw std::invalid_argument("Invalid node type.");
|
||||
}
|
||||
|
||||
|
@ -171,4 +171,4 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
}
|
||||
|
||||
|
||||
}}}
|
||||
}}}
|
||||
|
|
|
@ -112,7 +112,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
}
|
||||
else //node name is not found, dump all nodes
|
||||
{
|
||||
fprintf (stderr, "Warning: node name %ws does not exist in the network. dumping all nodes.\n", nodeName.c_str());
|
||||
fprintf (stderr, "Warning: node name %ls does not exist in the network. dumping all nodes.\n", nodeName.c_str());
|
||||
DumpAllNodesToFile(printValues, outputFile);
|
||||
}
|
||||
}
|
||||
|
@ -780,7 +780,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
newNode = new GMMLogLikelihoodNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ws, with name %ws\n", nodeType.c_str(), nodeName.c_str());
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ls, with name %ls\n", nodeType.c_str(), nodeName.c_str());
|
||||
throw std::invalid_argument("Invalid node type.");
|
||||
}
|
||||
|
||||
|
@ -934,7 +934,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
newNode = new GMMLogLikelihoodNode<ElemType>(m_deviceId, nodeName);
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ws, with name %ws\n", nodeType.c_str(), nodeName.c_str());
|
||||
fprintf(stderr, "Error creating new ComputationNode of type %ls, with name %ls\n", nodeType.c_str(), nodeName.c_str());
|
||||
throw std::invalid_argument("Invalid node type.");
|
||||
}
|
||||
|
||||
|
@ -1428,7 +1428,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
fprintf (stderr, "Evaluate Node: %s\n",(msra::strfun::utf8 ((*nodeIter)->NodeName())).c_str());
|
||||
#endif
|
||||
#if DUMPOUTPUT
|
||||
fprintf(stderr,"Forward_%ws\n",(*nodeIter)->NodeName().c_str());
|
||||
fprintf(stderr,"Forward_%ls\n",(*nodeIter)->NodeName().c_str());
|
||||
#endif
|
||||
(*nodeIter)->EvaluateThisNode(); // we manage time stamp here so that derived classes don't need to worry about it
|
||||
(*nodeIter)->UpdateEvalTimeStamp();
|
||||
|
@ -1776,7 +1776,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
void ValidateNetwork(const ComputationNodePtr rootNode)
|
||||
{
|
||||
fprintf(stderr, "\n\nValidating node %ws \n", rootNode->NodeName().c_str());
|
||||
fprintf(stderr, "\n\nValidating node %ls \n", rootNode->NodeName().c_str());
|
||||
|
||||
std::list<ComputationNodePtr>& nodes = GetEvalOrder(rootNode);
|
||||
|
||||
|
@ -1908,7 +1908,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
fprintf(stderr, " nodes in the recurrent loops : \n");
|
||||
for (auto itr = (*iter).m_recurrentNodes.begin(); itr != (*iter).m_recurrentNodes.end(); itr++)
|
||||
{
|
||||
fprintf (stderr, "%ws\t", (*itr)->NodeName().c_str() );
|
||||
fprintf (stderr, "%ls\t", (*itr)->NodeName().c_str() );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1939,7 +1939,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
fprintf(stderr, " nodes in the recurrent loops : \n");
|
||||
for (auto itr = (*iter).m_recurrentNodes.begin(); itr != (*iter).m_recurrentNodes.end(); itr++)
|
||||
{
|
||||
fprintf (stderr, "%ws\t", (*itr)->NodeName().c_str() );
|
||||
fprintf (stderr, "%ls\t", (*itr)->NodeName().c_str() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2047,7 +2047,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
size_t max_visitedOrderInLoop = 0;
|
||||
for (auto itr = (*iter).m_recurrentNodes.begin(); itr != (*iter).m_recurrentNodes.end(); itr++)
|
||||
{
|
||||
fprintf (stderr, "%ws\t", (*itr)->NodeName().c_str() );
|
||||
fprintf (stderr, "%ls\t", (*itr)->NodeName().c_str() );
|
||||
if (max_visitedOrderInLoop < (*itr)->GetVisitedOrder())
|
||||
{
|
||||
max_visitedOrderInLoop = (*itr)->GetVisitedOrder();
|
||||
|
@ -2135,7 +2135,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
fprintf(stderr, "Reordered nodes\n");
|
||||
for (auto itr = nodes.begin(); itr != nodes.end(); itr++)
|
||||
{
|
||||
fprintf (stderr, "%ws\n", (*itr)->NodeName().c_str() );
|
||||
fprintf (stderr, "%ls\n", (*itr)->NodeName().c_str() );
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
|
|
@ -290,7 +290,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
//for debugging purpose
|
||||
virtual void PrintSelf(bool printMatrices = false) const
|
||||
{
|
||||
fprintf(stderr, "\n%ws[%lu, %lu] = %ws", NodeName().c_str(), FunctionValues().GetNumRows(), FunctionValues().GetNumCols(), OperationName().c_str());
|
||||
fprintf(stderr, "\n%ls[%lu, %lu] = %ls", NodeName().c_str(), FunctionValues().GetNumRows(), FunctionValues().GetNumCols(), OperationName().c_str());
|
||||
|
||||
if (!IsLeaf())
|
||||
{
|
||||
|
@ -299,7 +299,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
if (i > 0)
|
||||
fprintf(stderr, ", ");
|
||||
fprintf(stderr, "%ws[%lu, %lu]", Inputs(i)?Inputs(i)->NodeName().c_str():L"NULL", Inputs(i)->FunctionValues().GetNumRows(), Inputs(i)->FunctionValues().GetNumCols());
|
||||
fprintf(stderr, "%ls[%lu, %lu]", Inputs(i)?Inputs(i)->NodeName().c_str():L"NULL", Inputs(i)->FunctionValues().GetNumRows(), Inputs(i)->FunctionValues().GetNumCols());
|
||||
}
|
||||
fprintf(stderr, ")");
|
||||
}
|
||||
|
@ -417,7 +417,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
(msra::strfun::utf8 (child->NodeName())).c_str());
|
||||
#endif
|
||||
#if DUMPOUTPUT
|
||||
fprintf(stderr,"Backprop%d_%ws\n",i,NodeName().c_str());
|
||||
fprintf(stderr,"Backprop%d_%ls\n",i,NodeName().c_str());
|
||||
#endif
|
||||
ComputeInputPartial(i); //this computes partial wrt to the child and sums the gradient value in the child
|
||||
}
|
||||
|
@ -600,7 +600,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
virtual void PrintSelfBeforeValidation(bool allowNulls=false) const
|
||||
{
|
||||
fprintf(stderr, "\nValidating --> %ws = %ws", NodeName().c_str(), OperationName().c_str());
|
||||
fprintf(stderr, "\nValidating --> %ls = %ls", NodeName().c_str(), OperationName().c_str());
|
||||
|
||||
if (!IsLeaf())
|
||||
{
|
||||
|
@ -623,10 +623,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
|
||||
if (IsChildAnImage(i)) //image
|
||||
fprintf(stderr, "%ws[%lu {W=%lu, H=%lu, C=%lu}, %lu]", child->NodeName().c_str(), child->FunctionValues().GetNumRows(),
|
||||
fprintf(stderr, "%ls[%lu {W=%lu, H=%lu, C=%lu}, %lu]", child->NodeName().c_str(), child->FunctionValues().GetNumRows(),
|
||||
child->m_outputWidth, child->m_outputHeight, child->m_outputChannels, child->FunctionValues().GetNumCols());
|
||||
else
|
||||
fprintf(stderr, "%ws[%lu, %lu]", child->NodeName().c_str(), child->FunctionValues().GetNumRows(), child->FunctionValues().GetNumCols());
|
||||
fprintf(stderr, "%ls[%lu, %lu]", child->NodeName().c_str(), child->FunctionValues().GetNumRows(), child->FunctionValues().GetNumCols());
|
||||
|
||||
}
|
||||
fprintf(stderr, ")");
|
||||
|
|
|
@ -842,7 +842,7 @@ void PTaskGraphBuilder<ElemType>::CreateTaskDescriptorsForComputationNodes()
|
|||
ComputationNodePtr node = *nodeIter;
|
||||
std::wstring opName = node->OperationName();
|
||||
|
||||
if (m_verbosity >= 1) fprintf(stderr, " %ws(%ws): ",
|
||||
if (m_verbosity >= 1) fprintf(stderr, " %ls(%ls): ",
|
||||
opName.c_str(), node->NodeName().c_str());
|
||||
|
||||
// Learnable parameter node types.
|
||||
|
@ -905,7 +905,7 @@ void PTaskGraphBuilder<ElemType>::CreateTaskDescriptorsForComputationNodes()
|
|||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "PTaskGraphBuilder does not (yet) support ComputationNode type %ws.\n",
|
||||
fprintf(stderr, "PTaskGraphBuilder does not (yet) support ComputationNode type %ls.\n",
|
||||
opName.c_str());
|
||||
throw exception("Unsupported computation node type");
|
||||
}
|
||||
|
|
|
@ -309,12 +309,12 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
if (startEpoch >= 0)
|
||||
{
|
||||
wstring modelFileName = GetModelNameForEpoch(int(startEpoch)-1);
|
||||
fprintf(stderr, "Starting from checkpoint. Load Network From File %ws.\n", modelFileName.c_str());
|
||||
fprintf(stderr, "Starting from checkpoint. Load Network From File %ls.\n", modelFileName.c_str());
|
||||
net.LoadFromFile(modelFileName);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Load Network From the original model file %ws.\n", origModelFileName.c_str());
|
||||
fprintf(stderr, "Load Network From the original model file %ls.\n", origModelFileName.c_str());
|
||||
net.LoadFromFile(origModelFileName);
|
||||
}
|
||||
|
||||
|
@ -324,7 +324,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_needRegularization = m_adaptationRegType != AdaptationRegType::None && m_adaptationRegWeight > 0;
|
||||
if (m_needRegularization)
|
||||
{
|
||||
fprintf(stderr, "Load reference Network From the original model file %ws.\n", origModelFileName.c_str());
|
||||
fprintf(stderr, "Load reference Network From the original model file %ls.\n", origModelFileName.c_str());
|
||||
refNet.LoadFromFile(origModelFileName);
|
||||
}
|
||||
|
||||
|
@ -355,7 +355,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
wstring modelFileName = GetModelNameForEpoch(int(startEpoch)-1);
|
||||
if (startEpoch >= 0)
|
||||
fprintf(stderr, "Starting from checkpoint. Load Network From File %ws.\n", modelFileName.c_str());
|
||||
fprintf(stderr, "Starting from checkpoint. Load Network From File %ls.\n", modelFileName.c_str());
|
||||
ComputationNetwork<ElemType>& net =
|
||||
startEpoch<0? netBuilder->BuildNetworkFromDescription() : netBuilder->LoadNetworkFromFile(modelFileName);
|
||||
startEpoch = max(startEpoch, 0);
|
||||
|
@ -569,9 +569,9 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
for (size_t j = 0; j < epochEvalErrors.size(); j++)
|
||||
fprintf(stderr, "[%lu]=%.8g ", j, epochEvalErrors[j]);
|
||||
fprintf(stderr, "Ave Learn Rate Per Sample = %.10g Epoch Time=%.8g\n", learnRatePerSample, epochTime);
|
||||
fprintf(stderr, "Finished Epoch[%lu]: Criterion Node [%ws] Per Sample = %.8g\n", i + 1, criterionNodes[0]->NodeName().c_str(), epochCriterion);
|
||||
fprintf(stderr, "Finished Epoch[%lu]: Criterion Node [%ls] Per Sample = %.8g\n", i + 1, criterionNodes[0]->NodeName().c_str(), epochCriterion);
|
||||
for (size_t j = 0; j < epochEvalErrors.size(); j++)
|
||||
fprintf(stderr, "Finished Epoch[%lu]: Evaluation Node [%ws] Per Sample = %.8g\n", i + 1, evalNodeNames[j].c_str(), epochEvalErrors[j]);
|
||||
fprintf(stderr, "Finished Epoch[%lu]: Evaluation Node [%ls] Per Sample = %.8g\n", i + 1, evalNodeNames[j].c_str(), epochEvalErrors[j]);
|
||||
}
|
||||
|
||||
if (validationSetDataReader != trainSetDataReader && validationSetDataReader != nullptr)
|
||||
|
@ -697,7 +697,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
for (auto nodeIter = nodes.begin(); nodeIter != nodes.end(); nodeIter++)
|
||||
{
|
||||
PreComputedNode<ElemType>* node = static_cast<PreComputedNode<ElemType>*> (*nodeIter);
|
||||
fprintf(stderr, "\tNodeName: %ws\n", (node->NodeName()).c_str());
|
||||
fprintf(stderr, "\tNodeName: %ls\n", (node->NodeName()).c_str());
|
||||
}
|
||||
|
||||
//compute
|
||||
|
@ -1143,7 +1143,7 @@ protected:
|
|||
void UpdateWeights(const ComputationNodePtr node, Matrix<ElemType>& smoothedGradient, const ElemType learnRatePerSample, const size_t actualMBSize, const size_t expectedMBSize) const
|
||||
{
|
||||
#if DUMPOUTPUT
|
||||
fprintf(stderr, "Update_%ws\n",node->NodeName().c_str());
|
||||
fprintf(stderr, "Update_%ls\n",node->NodeName().c_str());
|
||||
#endif
|
||||
UpdateWeightsS(this, node->FunctionValues(), node->GradientValues(), smoothedGradient, learnRatePerSample, actualMBSize, expectedMBSize);
|
||||
node->UpdateEvalTimeStamp();
|
||||
|
@ -1330,7 +1330,7 @@ protected:
|
|||
irow = max(0, irow);
|
||||
icol = max(0, icol);
|
||||
|
||||
fprintf(stderr, "\n###### d%ws######\n", node->NodeName().c_str());
|
||||
fprintf(stderr, "\n###### d%ls######\n", node->NodeName().c_str());
|
||||
// node->FunctionValues().Print();
|
||||
ElemType eOrg = node->FunctionValues()(irow,icol);
|
||||
|
||||
|
@ -1363,7 +1363,7 @@ protected:
|
|||
bool wrong = (std::isnan(diff) || diff > threshold);
|
||||
if (wrong)
|
||||
{
|
||||
fprintf (stderr, "\nd%ws Numeric gradient = %e, Error BP gradient = %e\n", node->NodeName().c_str(), eGradNum, eGradErr);
|
||||
fprintf (stderr, "\nd%ls Numeric gradient = %e, Error BP gradient = %e\n", node->NodeName().c_str(), eGradNum, eGradErr);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1453,4 +1453,4 @@ protected:
|
|||
template class SGD<float>;
|
||||
template class SGD<double>;
|
||||
|
||||
}}}
|
||||
}}}
|
||||
|
|
|
@ -334,7 +334,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
for (size_t i=0; i<evalResults.size(); i++)
|
||||
{
|
||||
fprintf(stderr, "%ws/Sample = %.8g ", evalNodes[i]->NodeName().c_str(), (evalResults[i]-evalResultsLastMBs[i])/numSamplesLastMBs);
|
||||
fprintf(stderr, "%ls/Sample = %.8g ", evalNodes[i]->NodeName().c_str(), (evalResults[i]-evalResultsLastMBs[i])/numSamplesLastMBs);
|
||||
}
|
||||
|
||||
fprintf(stderr, "\n");
|
||||
|
|
|
@ -183,11 +183,11 @@ void DoCrossValidate(const ConfigParameters& config)
|
|||
bool finalModelEvaluated = false;
|
||||
for (size_t i=cvInterval[0]; i<=cvInterval[2]; i+=cvInterval[1])
|
||||
{
|
||||
wstring cvModelPath = msra::strfun::wstrprintf (L"%ws.%lld", modelPath.c_str(), i);
|
||||
wstring cvModelPath = msra::strfun::wstrprintf (L"%ls.%lld", modelPath.c_str(), i);
|
||||
|
||||
if (!fexists (cvModelPath))
|
||||
{
|
||||
fprintf(stderr, "model %ws does not exist.\n", cvModelPath.c_str());
|
||||
fprintf(stderr, "model %ls does not exist.\n", cvModelPath.c_str());
|
||||
if (finalModelEvaluated || !fexists (modelPath))
|
||||
continue; // file missing
|
||||
else
|
||||
|
@ -204,7 +204,7 @@ void DoCrossValidate(const ConfigParameters& config)
|
|||
|
||||
SimpleEvaluator<ElemType> eval(net, numMBsToShowResult, traceLevel);
|
||||
|
||||
fprintf(stderr, "model %ws --> \n",cvModelPath.c_str());
|
||||
fprintf(stderr, "model %ls --> \n",cvModelPath.c_str());
|
||||
std::vector<ElemType> evalErrors;
|
||||
evalErrors = eval.Evaluate(cvDataReader, evalNodeNamesVector, mbSize[0], epochSize);
|
||||
cvErrorResults.push_back(evalErrors);
|
||||
|
@ -242,7 +242,7 @@ void DoCrossValidate(const ConfigParameters& config)
|
|||
fprintf(stderr,"------------\n");
|
||||
for (int i=0; i < minErrors.size(); ++i)
|
||||
{
|
||||
fprintf(stderr,"Based on Err[%d]: Best model = %ws with min err %.8g\n", i, cvModels[minErrIds[i]].c_str(), minErrors[i]);
|
||||
fprintf(stderr,"Based on Err[%d]: Best model = %ls with min err %.8g\n", i, cvModels[minErrIds[i]].c_str(), minErrors[i]);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче