2014-08-30 03:21:42 +04:00
//
2016-01-18 11:36:17 +03:00
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
2014-08-30 03:21:42 +04:00
//
// CNTKEval.cpp : Defines the exported functions for the CNTK DLL.
//
2016-06-15 20:49:35 +03:00
# define __STDC_FORMAT_MACROS
# include <inttypes.h>
2016-06-27 12:43:56 +03:00
# include <stdio.h>
# include <math.h>
2016-01-18 11:36:14 +03:00
# define EVAL_EXPORTS // creating the exports here
2014-08-30 03:21:42 +04:00
# include "Eval.h"
2016-03-16 18:36:25 +03:00
# include "Actions.h"
2014-08-30 03:21:42 +04:00
# include "CNTKEval.h"
2016-01-18 11:36:14 +03:00
# include "CPUMatrix.h" // for SetNumThreads()
2014-08-30 03:21:42 +04:00
# include "SimpleOutputWriter.h"
2016-03-15 18:58:45 +03:00
# include "NDLNetworkBuilder.h"
2014-08-30 03:21:42 +04:00
# ifdef LEAKDETECT
# include <vld.h> // leak detection
# endif
2014-11-01 09:39:29 +03:00
# include "BestGpu.h"
2015-09-18 08:02:44 +03:00
# include "MPIWrapper.h"
2016-05-18 18:21:51 +03:00
# include "DataDeserializer.h"
# include "SequencePacker.h"
# include "NoRandomizer.h"
# include "HeapMemoryProvider.h"
# include "InputAndParamNodes.h"
2016-06-20 14:51:38 +03:00
# include "latticearchive.h"
2016-09-12 06:53:28 +03:00
# include <limits>
2014-08-30 03:21:42 +04:00
2015-12-11 13:20:00 +03:00
// TODO: Temporary mechanism to enable memory sharing for
2016-01-18 11:36:14 +03:00
// node output value matrices. This will go away when the
2015-12-11 13:20:00 +03:00
// sharing is ready to be enabled by default
bool g_shareNodeValueMatrices = false ;
2014-08-30 03:21:42 +04:00
namespace Microsoft { namespace MSR { namespace CNTK {
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-05-18 18:21:51 +03:00
void CNTKEvalBase < ElemType > : : Init ( const std : : string & config )
2014-08-30 03:21:42 +04:00
{
m_config . Parse ( config ) ;
2015-09-16 23:24:54 +03:00
size_t nThreads = m_config ( " numCPUThreads " , " 1 " ) ;
2015-09-18 09:30:50 +03:00
CPUMatrix < ElemType > : : SetNumThreads ( nThreads ) ;
2015-12-11 13:20:00 +03:00
g_shareNodeValueMatrices = m_config ( L " shareNodeValueMatrices " , false ) ;
2014-08-30 03:21:42 +04:00
}
2016-03-15 18:58:45 +03:00
// CreateNetwork - create a network based on the network description
// networkDescription - network description
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-05-18 18:21:51 +03:00
void CNTKEvalBase < ElemType > : : CreateNetwork ( const std : : string & networkDescription )
2016-03-15 18:58:45 +03:00
{
ConfigParameters config ;
config . Parse ( networkDescription ) ;
2016-03-16 18:36:25 +03:00
2016-03-17 15:51:41 +03:00
std : : vector < wstring > outputNodeNames ;
2016-06-15 20:49:35 +03:00
this - > m_net = GetModelFromConfig < ConfigParameters , ElemType > ( config , L " outputNodeNames " , outputNodeNames ) ;
2016-03-17 15:51:41 +03:00
2016-06-15 20:49:35 +03:00
if ( this - > m_net = = nullptr )
2016-03-15 18:58:45 +03:00
{
2016-03-17 15:51:41 +03:00
LogicError ( " Unable to construct network from description " ) ;
2016-03-15 18:58:45 +03:00
}
}
2016-05-18 18:21:51 +03:00
// Destroy - cleanup and remove this class
// NOTE: this destroys the object, and it can't be used past this point
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-05-18 18:21:51 +03:00
void CNTKEvalBase < ElemType > : : Destroy ( )
{
// cleanup everything
2016-06-15 20:49:35 +03:00
this - > m_net . reset ( ) ;
2016-05-18 18:21:51 +03:00
}
// ----------------------------------------------------------------------------
// Basic interface
// ----------------------------------------------------------------------------
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-05-18 18:21:51 +03:00
void EVAL_API GetEval ( IEvaluateModel < ElemType > * * peval )
{
* peval = new CNTKEval < ElemType > ( ) ;
}
extern " C " EVAL_API void GetEvalF ( IEvaluateModel < float > * * peval )
{
GetEval ( peval ) ;
}
extern " C " EVAL_API void GetEvalD ( IEvaluateModel < double > * * peval )
{
GetEval ( peval ) ;
}
2014-08-30 03:21:42 +04:00
// GetNodeDimensions - Get the node dimensions of the specified nodes
// dimensions - map from name of node to dimension of the node, will be appended to for Input/Output scenarios
// nodeGroup - type of node we are requesting (input/output/specified)
// NOTE: when nodeGroup==specified the dimensions map is expected to be populated with the string names of the nodes requested, dimensions will be modified return the current value.
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2014-08-30 03:21:42 +04:00
void CNTKEval < ElemType > : : GetNodeDimensions ( std : : map < std : : wstring , size_t > & dimensions , NodeGroup nodeGroup )
{
2016-06-27 12:43:56 +03:00
// On Linux with gcc 4.8.4, it is required to add "this->" when referencing m_net, which is the protected member of the base class with templates,
2016-06-20 14:51:38 +03:00
// in order to make the name correctly resolved by the compiler.
2016-06-15 20:49:35 +03:00
if ( this - > m_net = = NULL )
2014-08-30 03:21:42 +04:00
{
2014-11-05 00:47:58 +03:00
for ( auto iter = dimensions . begin ( ) ; iter ! = dimensions . end ( ) ; iter + + )
2014-08-30 03:21:42 +04:00
iter - > second = 0 ;
return ;
}
2016-06-15 20:49:35 +03:00
const auto & outputNodes = this - > m_net - > OutputNodes ( ) ;
2014-08-30 03:21:42 +04:00
switch ( nodeGroup )
{
case nodeInput :
2016-01-18 11:36:14 +03:00
{
2016-04-18 13:26:21 +03:00
if ( outputNodes . size ( ) = = 0 )
{
LogicError ( " No Output nodes found: Cannot determine Input node dimensions due to lack of Output nodes. \n (are 'outputNodeNames' and/or 'OutputNodes' properly defined in the configuration file?) " ) ;
}
2016-06-15 20:49:35 +03:00
auto & nodes = this - > m_net - > InputNodes ( outputNodes [ 0 ] ) ;
2016-01-18 11:36:14 +03:00
for ( auto & node : nodes )
2014-08-30 03:21:42 +04:00
{
std : : wstring name = node - > NodeName ( ) ;
2016-01-21 08:33:38 +03:00
size_t size = node - > GetSampleMatrixNumRows ( ) ;
2014-08-30 03:21:42 +04:00
dimensions [ name ] = size ;
}
break ;
2016-01-18 11:36:14 +03:00
}
2014-08-30 03:21:42 +04:00
case nodeOutput :
2016-01-18 11:36:14 +03:00
{
const auto & nodes = outputNodes ;
for ( auto & node : nodes )
2014-08-30 03:21:42 +04:00
{
std : : wstring name = node - > NodeName ( ) ;
2016-01-21 08:33:38 +03:00
size_t size = node - > GetSampleMatrixNumRows ( ) ;
2014-08-30 03:21:42 +04:00
dimensions [ name ] = size ;
}
break ;
2016-01-18 11:36:14 +03:00
}
2014-08-30 03:21:42 +04:00
case nodeSpecified :
2014-11-05 00:47:58 +03:00
for ( auto iter = dimensions . begin ( ) ; iter ! = dimensions . end ( ) ; iter + + )
2014-08-30 03:21:42 +04:00
{
2016-06-15 20:49:35 +03:00
auto node = this - > m_net - > GetNodeFromName ( iter - > first ) ;
2016-01-21 08:33:38 +03:00
iter - > second = node - > GetSampleMatrixNumRows ( ) ;
2014-08-30 03:21:42 +04:00
}
break ;
}
}
2015-10-07 02:16:14 +03:00
// StartEvaluateMinibatchLoop - Prepare network for Evaluate() calls.
// ouputNodeName - name of node that will be evaluated
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-01-18 11:36:14 +03:00
void CNTKEval < ElemType > : : StartEvaluateMinibatchLoop ( const std : : wstring & outputNodeName )
2015-10-07 02:16:14 +03:00
{
2016-06-15 20:49:35 +03:00
this - > m_net - > StartEvaluateMinibatchLoop ( this - > m_net - > GetNodeFromName ( outputNodeName ) ) ;
2015-10-07 02:16:14 +03:00
}
2014-08-30 03:21:42 +04:00
// Evaluate - Evalute using the model with the given inputs and outputs
// inputs - map from node name to input vector
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2014-08-30 03:21:42 +04:00
void CNTKEval < ElemType > : : Evaluate ( std : : map < std : : wstring , std : : vector < ElemType > * > & inputs , std : : map < std : : wstring , std : : vector < ElemType > * > & outputs )
{
2016-06-15 20:49:35 +03:00
size_t minibatchSize = this - > m_config ( L " minibatchSize " , ( size_t ) 10240 ) ;
2014-08-30 03:21:42 +04:00
// get the evaluation names from the output string
vector < wstring > outNodeNames ;
ConfigParameters config ;
2016-06-15 20:49:35 +03:00
// config["deviceId"] = to_string(this->m_net->GetDeviceId());
2014-08-30 03:21:42 +04:00
// create the reader if necessary
if ( m_reader = = nullptr )
{
m_reader = new EvalReader < ElemType > ( config ) ;
}
// now set the data in the reader
GetNodeDimensions ( m_dimensions , nodeInput ) ;
m_reader - > SetData ( & inputs , & m_dimensions ) ;
2014-11-07 06:24:05 +03:00
m_reader - > SetBoundary ( m_start ) ;
2016-03-16 19:40:57 +03:00
// create the writer if necessary
2014-08-30 03:21:42 +04:00
if ( m_writer = = nullptr )
{
m_writer = new EvalWriter < ElemType > ( config ) ;
}
2016-03-16 19:40:57 +03:00
// now set the data in the writer
2014-08-30 03:21:42 +04:00
GetNodeDimensions ( m_dimensions , nodeOutput ) ;
m_writer - > SetData ( & outputs , & m_dimensions ) ;
// call the evaluator
2016-06-15 20:49:35 +03:00
SimpleOutputWriter < ElemType > eval ( this - > m_net ) ;
2014-08-30 03:21:42 +04:00
eval . WriteOutput ( * m_reader , minibatchSize , * m_writer , outNodeNames ) ;
}
2016-03-16 19:40:57 +03:00
// Evaluate - Evalute using the model with the given inputs and outputs
// outputs - map from node name to output vector, outputs vectors need to be preallocated by caller, sizing will happen during evaluation
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-03-16 19:40:57 +03:00
void CNTKEval < ElemType > : : Evaluate ( std : : map < std : : wstring , std : : vector < ElemType > * > & outputs )
{
// get the evaluation names from the output string
vector < wstring > outNodeNames ;
ConfigParameters config ;
// create the writer if necessary
if ( m_writer = = nullptr )
{
m_writer = new EvalWriter < ElemType > ( config ) ;
}
// now set the data in the writer
GetNodeDimensions ( m_dimensions , nodeOutput ) ;
m_writer - > SetData ( & outputs , & m_dimensions ) ;
// call the evaluator
2016-06-15 20:49:35 +03:00
SimpleOutputWriter < ElemType > eval ( this - > m_net ) ;
2016-03-16 19:40:57 +03:00
eval . WriteOutput ( * m_writer , outNodeNames ) ;
}
2016-05-18 18:21:51 +03:00
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-05-18 18:21:51 +03:00
void CNTKEval < ElemType > : : Destroy ( )
2014-08-30 03:21:42 +04:00
{
2016-05-18 18:21:51 +03:00
CNTKEvalBase < ElemType > : : Destroy ( ) ;
delete m_reader ;
delete m_writer ;
delete this ;
2014-08-30 03:21:42 +04:00
}
// instantiate all the combinations we expect to be used
2016-01-18 11:36:14 +03:00
template class CNTKEval < double > ;
2014-08-30 03:21:42 +04:00
template class CNTKEval < float > ;
2016-05-18 18:21:51 +03:00
// ----------------------------------------------------------------------------
// Extended interface
// ----------------------------------------------------------------------------
template < typename ElemType >
VariableLayout CNTKEvalExtended < ElemType > : : ToVariableLayout ( const ComputationNodeBasePtr n )
{
auto matrix = dynamic_pointer_cast < Matrix < ElemType > > ( n - > ValuePtr ( ) ) ;
return VariableLayout
{
2016-05-20 10:20:48 +03:00
/* name */ n - > GetName ( ) ,
/* type */ sizeof ( ElemType ) = = sizeof ( float ) ? VariableLayout : : Float32 : VariableLayout : : Float64 ,
/* storage */ matrix ? matrix - > GetMatrixType ( ) = = MatrixType : : DENSE ? VariableLayout : : Dense :
2016-05-18 18:21:51 +03:00
matrix - > GetMatrixType ( ) = = MatrixType : : SPARSE ? VariableLayout : : Sparse :
VariableLayout : : Undetermined :
VariableLayout : : Undetermined ,
2016-06-03 02:22:45 +03:00
/* dimension */ n - > GetSampleLayout ( ) . GetNumElements ( )
2016-05-18 18:21:51 +03:00
} ;
}
template < typename ElemType >
2016-06-03 02:22:45 +03:00
void CNTKEvalExtended < ElemType > : : StartForwardEvaluation ( const std : : vector < wstring > & outputNodeNames )
2016-05-18 18:21:51 +03:00
{
2016-06-15 20:49:35 +03:00
m_scopedNetworkOperationMode = make_shared < ScopedNetworkOperationMode > ( this - > m_net , NetworkOperationMode : : inferring ) ;
m_outputNodes = this - > m_net - > OutputNodesByName ( outputNodeNames ) ;
m_inputNodes = this - > m_net - > InputNodesForOutputs ( outputNodeNames ) ;
2016-05-18 18:21:51 +03:00
// allocate memory for forward computation
2016-06-15 20:49:35 +03:00
this - > m_net - > AllocateAllMatrices ( { } , m_outputNodes , nullptr ) ;
this - > m_net - > StartEvaluateMinibatchLoop ( m_outputNodes ) ;
2016-05-18 18:21:51 +03:00
m_inputMatrices = DataReaderHelpers : : RetrieveInputMatrices ( m_inputNodes ) ;
2016-06-03 02:22:45 +03:00
for ( const auto & node : m_outputNodes )
{
shared_ptr < Matrix < ElemType > > outputMatrix = dynamic_pointer_cast < Matrix < ElemType > > ( node - > ValuePtr ( ) ) ;
if ( outputMatrix - > GetMatrixType ( ) ! = MatrixType : : DENSE )
RuntimeError ( " Sparse outputs are not supported by this API. " ) ;
}
m_started = true ;
}
2016-05-18 18:21:51 +03:00
template < typename ElemType >
VariableSchema CNTKEvalExtended < ElemType > : : GetOutputSchema ( ) const
{
VariableSchema schema ;
2016-06-15 20:49:35 +03:00
auto & nodes = m_started ? m_outputNodes : this - > m_net - > OutputNodes ( ) ;
2016-06-03 02:22:45 +03:00
for ( const auto & n : nodes )
2016-05-18 18:21:51 +03:00
{
schema . push_back ( ToVariableLayout ( n ) ) ;
}
return schema ;
}
template < typename ElemType >
VariableSchema CNTKEvalExtended < ElemType > : : GetInputSchema ( ) const
{
VariableSchema inputLayouts ;
auto nodes = m_inputNodes ;
if ( nodes . size ( ) = = 0 )
{
// Default to all nodes
2016-06-15 20:49:35 +03:00
nodes = this - > m_net - > InputNodesForOutputs ( { } ) ;
2016-05-18 18:21:51 +03:00
}
for ( const auto & n : nodes )
{
inputLayouts . push_back ( ToVariableLayout ( n ) ) ;
}
return inputLayouts ;
}
template < typename ElemType >
2016-06-09 11:14:07 +03:00
template < template < typename > class ValueContainer >
2016-09-20 23:46:17 +03:00
void CNTKEvalExtended < ElemType > : : ForwardPassT ( const std : : vector < ValueBuffer < ElemType , ValueContainer > > & inputs , std : : vector < ValueBuffer < ElemType , ValueContainer > > & outputs , bool resetRNN )
2016-05-18 18:21:51 +03:00
{
2016-06-03 02:22:45 +03:00
if ( ! m_started )
RuntimeError ( " ForwardPass() called before StartForwardEvaluation() " ) ;
2016-05-18 18:21:51 +03:00
if ( inputs . size ( ) ! = ( size_t ) std : : distance ( m_inputMatrices . begin ( ) , m_inputMatrices . end ( ) ) )
2016-06-03 02:22:45 +03:00
RuntimeError ( " Expected %d inputs, but got %d. " , ( int ) std : : distance ( m_inputMatrices . begin ( ) , m_inputMatrices . end ( ) ) , ( int ) inputs . size ( ) ) ;
if ( outputs . size ( ) ! = m_outputNodes . size ( ) )
RuntimeError ( " Expected %d outputs, but got %d. " , ( int ) m_outputNodes . size ( ) , ( int ) outputs . size ( ) ) ;
2016-05-18 18:21:51 +03:00
2016-06-03 02:22:45 +03:00
size_t i = 0 ;
2016-07-13 13:04:58 +03:00
for ( auto & inputNode : m_inputNodes )
2016-05-18 18:21:51 +03:00
{
2016-06-09 11:14:07 +03:00
// const cast: The matrix class takes this over without copying and could theoretically change the contents,
// though it doesn't in this case.
2016-06-09 14:57:50 +03:00
auto & buffer = const_cast < ValueBuffer < ElemType , ValueContainer > & > ( inputs [ i ] ) ;
2016-07-13 13:04:58 +03:00
auto matrix = dynamic_pointer_cast < Matrix < ElemType > > ( inputNode - > ValuePtr ( ) ) ;
2016-05-25 14:30:38 +03:00
auto type = matrix - > GetMatrixType ( ) ;
2016-07-13 13:04:58 +03:00
size_t numRows = inputNode - > GetSampleLayout ( ) . GetNumElements ( ) ;
2016-05-18 18:21:51 +03:00
2016-07-13 13:04:58 +03:00
if ( buffer . m_buffer . data ( ) = = nullptr )
RuntimeError ( " Input %ls: Buffer is not allocated. " , m_inputNodes [ i ] - > GetName ( ) . c_str ( ) ) ;
2016-05-25 14:30:38 +03:00
if ( type = = MatrixType : : DENSE )
{
if ( buffer . m_buffer . size ( ) % numRows ! = 0 )
2016-06-15 20:49:35 +03:00
RuntimeError ( " Input %ls: Expected input data to be a multiple of % " PRIu64 " , but it is % " PRIu64 " . " ,
2016-06-09 14:57:50 +03:00
m_inputNodes [ i ] - > GetName ( ) . c_str ( ) , numRows , buffer . m_buffer . size ( ) ) ;
2016-05-25 14:30:38 +03:00
if ( buffer . m_buffer . size ( ) = = 0 )
RuntimeError ( " Input %ls: Expected at least one element. " , m_inputNodes [ i ] - > GetName ( ) . c_str ( ) ) ;
}
else if ( type = = MatrixType : : SPARSE )
{
2016-07-13 13:04:58 +03:00
if ( buffer . m_colIndices . data ( ) = = nullptr )
RuntimeError ( " Input %ls: Due to sparse input format, expected colIndices array, but was nullptr. " , m_inputNodes [ i ] - > GetName ( ) . c_str ( ) ) ;
if ( buffer . m_indices . data ( ) = = nullptr )
RuntimeError ( " Input %ls: Due to sparse input format, expected Indices array, but was nullptr. " , m_inputNodes [ i ] - > GetName ( ) . c_str ( ) ) ;
2016-05-25 14:30:38 +03:00
if ( buffer . m_colIndices . size ( ) < 2 )
2016-07-13 13:04:58 +03:00
RuntimeError ( " Input %ls: Expected at least one element (2 entries in colIndices array). " , m_inputNodes [ i ] - > GetName ( ) . c_str ( ) ) ;
2016-05-25 14:30:38 +03:00
if ( buffer . m_colIndices [ 0 ] ! = 0 )
RuntimeError ( " Input %ls: First element of column indices must be 0 " , m_inputNodes [ i ] - > GetName ( ) . c_str ( ) ) ;
2016-06-09 11:14:07 +03:00
if ( buffer . m_colIndices [ buffer . m_colIndices . size ( ) - 1 ] ! = buffer . m_indices . size ( ) )
2016-06-09 14:57:50 +03:00
RuntimeError ( " Input %ls: Last element of column indices must be equal to the size of indices (%ld), but was %d " ,
m_inputNodes [ i ] - > GetName ( ) . c_str ( ) , buffer . m_indices . size ( ) ,
buffer . m_colIndices [ buffer . m_colIndices . size ( ) - 1 ] ) ;
2016-05-25 14:30:38 +03:00
}
int numCols = type = = MatrixType : : DENSE ? buffer . m_buffer . size ( ) / numRows : buffer . m_colIndices . size ( ) - 1 ;
assert ( numCols > = 1 ) ;
2016-07-13 13:04:58 +03:00
inputNode - > GetMBLayout ( ) - > Init ( 1 , numCols ) ;
2016-09-20 23:46:17 +03:00
// INT_MIN is used to specify the lower bound of look-back step of recurrent nodes
inputNode - > GetMBLayout ( ) - > AddSequence ( 0 , 0 , resetRNN ? 0 : INT_MIN , numCols ) ;
2016-06-09 11:14:07 +03:00
2016-05-18 18:21:51 +03:00
if ( type = = MatrixType : : DENSE )
matrix - > SetValue ( numRows , numCols , matrix - > GetDeviceId ( ) , buffer . m_buffer . data ( ) , matrixFlagNormal ) ;
else if ( type = = MatrixType : : SPARSE )
{
// In the sparse case the m_data layout is identical to CUDA's CSC layout
// (see http://docs.nvidia.com/cuda/cusparse/#compressed-sparse-column-format-csc).
2016-06-09 14:57:50 +03:00
matrix - > SetMatrixFromCSCFormat ( buffer . m_colIndices . data ( ) , buffer . m_indices . data ( ) , buffer . m_buffer . data ( ) ,
buffer . m_buffer . size ( ) , numRows , numCols ) ;
2016-05-18 18:21:51 +03:00
}
+ + i ;
}
ComputationNetwork : : BumpEvalTimeStamp ( m_inputNodes ) ;
2016-06-09 11:14:07 +03:00
2016-06-03 02:22:45 +03:00
for ( size_t i = 0 ; i < m_outputNodes . size ( ) ; + + i )
2016-05-18 18:21:51 +03:00
{
auto node = m_outputNodes [ i ] ;
2016-06-15 20:49:35 +03:00
this - > m_net - > ForwardProp ( node ) ;
2016-05-18 18:21:51 +03:00
shared_ptr < Matrix < ElemType > > outputMatrix = dynamic_pointer_cast < Matrix < ElemType > > ( node - > ValuePtr ( ) ) ;
auto pMBLayout = node - > GetMBLayout ( ) ;
2016-06-09 11:14:07 +03:00
if ( ! pMBLayout )
2016-05-18 18:21:51 +03:00
{
pMBLayout = make_shared < MBLayout > ( ) ;
pMBLayout - > InitAsFrameMode ( 1 ) ; // treat this as if we have one single sample
}
const auto & seq = pMBLayout - > GetAllSequences ( ) ;
if ( seq . size ( ) ! = 1 )
2016-06-03 02:22:45 +03:00
RuntimeError ( " Only 1 output sequence supported by this API " ) ;
2016-06-09 11:14:07 +03:00
ValueContainer < ElemType > & vec = outputs [ i ] . m_buffer ;
2016-05-18 18:21:51 +03:00
size_t numElements = outputMatrix - > GetNumElements ( ) ;
2016-06-03 02:22:45 +03:00
if ( vec . capacity ( ) < numElements )
{
// Bad luck - we can't reallocate memory of an external object at this point.
RuntimeError ( " Not enough space in output buffer for output '%ls'. " , node - > GetName ( ) . c_str ( ) ) ;
}
vec . resize ( numElements ) ;
ElemType * data = const_cast < ElemType * > ( vec . data ( ) ) ;
2016-05-18 18:21:51 +03:00
outputMatrix - > CopyToArray ( data , numElements ) ;
}
}
2016-06-09 11:14:07 +03:00
template < typename ElemType >
void CNTKEvalExtended < ElemType > : : ForwardPass ( const Values < ElemType > & inputs , Values < ElemType > & outputs )
{
2016-09-20 23:46:17 +03:00
ForwardPassT ( inputs , outputs , false ) ;
}
template < typename ElemType >
void CNTKEvalExtended < ElemType > : : ForwardPass ( const Values < ElemType > & inputs , Values < ElemType > & outputs , bool resetRNN )
{
ForwardPassT ( inputs , outputs , resetRNN ) ;
2016-06-09 11:14:07 +03:00
}
template < typename ElemType >
void CNTKEvalExtended < ElemType > : : ForwardPass ( const ValueRefs < ElemType > & inputs , ValueRefs < ElemType > & outputs )
{
2016-09-20 23:46:17 +03:00
ForwardPassT ( inputs , outputs , false ) ;
2016-06-09 11:14:07 +03:00
}
2016-09-20 23:46:17 +03:00
template < typename ElemType >
void CNTKEvalExtended < ElemType > : : ForwardPass ( const ValueRefs < ElemType > & inputs , ValueRefs < ElemType > & outputs , bool resetRNN )
2016-05-18 18:21:51 +03:00
{
2016-09-20 23:46:17 +03:00
ForwardPassT ( inputs , outputs , resetRNN ) ;
2016-05-18 18:21:51 +03:00
}
2016-09-12 06:53:28 +03:00
template < typename ElemType >
2016-09-20 23:46:17 +03:00
void CNTKEvalExtended < ElemType > : : Destroy ( )
2016-09-12 06:53:28 +03:00
{
2016-09-28 22:58:48 +03:00
// Since m_scopeNetworkOperationMode has a reference to m_net, it has to be released first.
m_scopedNetworkOperationMode . reset ( ) ;
2016-09-20 23:46:17 +03:00
CNTKEvalBase < ElemType > : : Destroy ( ) ;
delete this ;
2016-09-12 06:53:28 +03:00
}
2016-05-19 18:41:17 +03:00
template < typename ElemType >
2016-05-18 18:21:51 +03:00
void EVAL_API GetEvalExtended ( IEvaluateModelExtended < ElemType > * * peval )
{
* peval = new CNTKEvalExtended < ElemType > ( ) ;
}
extern " C " EVAL_API void GetEvalExtendedF ( IEvaluateModelExtended < float > * * peval )
{
GetEvalExtended ( peval ) ;
}
extern " C " EVAL_API void GetEvalExtendedD ( IEvaluateModelExtended < double > * * peval )
{
GetEvalExtended ( peval ) ;
}
template class CNTKEvalExtended < double > ;
template class CNTKEvalExtended < float > ;
2016-01-18 11:36:14 +03:00
} } }