2016-05-31 16:40:40 +03:00
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//
// This is the main header of the CNTK library API containing the entire public API definition.
//
# pragma once
2016-06-30 10:22:37 +03:00
# ifdef SWIG
# define final
# define explicit
# define static_assert(condition, message)
# endif
2016-06-29 12:45:15 +03:00
2016-05-31 16:40:40 +03:00
# include "CNTKLibraryInternals.h"
# include <memory>
# include <vector>
# include <array>
# include <stdarg.h>
# include <assert.h>
2016-08-18 15:33:38 +03:00
# include <map>
2016-05-31 16:40:40 +03:00
# include <unordered_map>
# include <unordered_set>
2016-06-05 04:26:52 +03:00
# include <string>
2016-07-08 07:18:20 +03:00
# include <sstream>
2016-08-18 15:33:38 +03:00
# include <iosfwd>
2016-07-09 04:14:05 +03:00
# include <algorithm>
2016-05-31 16:40:40 +03:00
namespace CNTK
{
///
/// Enumeration type denoting data type of symbolic data entities or actual data.
///
enum class DataType
{
2016-06-05 04:26:52 +03:00
Unknown ,
2016-05-31 16:40:40 +03:00
Float ,
Double ,
/* TODO:
Bit ,
Char ,
UChar ,
Short ,
UShort ,
Int ,
UInt ,
Long ,
ULong ,
Float8 ,
Float16 ,
Complex ,
String ,
*/
} ;
///
/// Get the 'DataType' corresponding to the ElementType template type argument.
///
template < typename ElementType >
2016-06-11 22:21:15 +03:00
inline DataType AsDataType ( )
2016-05-31 16:40:40 +03:00
{
if ( std : : is_same < ElementType , float > ( ) )
return DataType : : Float ;
else if ( std : : is_same < ElementType , double > ( ) )
return DataType : : Double ;
else
NOT_IMPLEMENTED ;
}
2016-09-02 16:12:07 +03:00
inline const char * DataTypeName ( DataType dataType )
{
if ( dataType = = DataType : : Float )
return " Float " ;
else if ( dataType = = DataType : : Double )
return " Double " ;
else
LogicError ( " Unknown DataType " ) ;
}
2016-05-31 16:40:40 +03:00
///
/// Enumeration type denoting the format of storage underlying an instance of a NDArrayView.
///
enum class StorageFormat
{
Dense ,
2016-06-09 08:39:07 +03:00
SparseCSC ,
SparseBlockCol ,
2016-05-31 16:40:40 +03:00
} ;
2016-06-09 08:39:07 +03:00
inline bool IsSparseStorageFormat ( StorageFormat storageFormat )
{
return ( storageFormat ! = StorageFormat : : Dense ) ;
}
2016-05-31 16:40:40 +03:00
///
/// Enumeration type denoting the type of a compute device.
///
2016-06-15 21:58:32 +03:00
enum class DeviceKind
2016-05-31 16:40:40 +03:00
{
CPU ,
GPU ,
// TODO: FPGA
} ;
///
/// Denotes a compute device instance.
///
2016-07-09 03:56:44 +03:00
class DeviceDescriptor final
2016-05-31 16:40:40 +03:00
{
2016-08-26 22:14:23 +03:00
static std : : atomic < bool > s_defaultDeviceFrozen ;
static std : : shared_ptr < DeviceDescriptor > s_defaultDevice ;
2016-05-31 16:40:40 +03:00
public :
///
/// Returns the Id of 'this' device.
///
2016-06-15 21:58:32 +03:00
int Id ( ) const { return m_deviceId ; }
2016-05-31 16:40:40 +03:00
///
2016-06-15 21:58:32 +03:00
/// Returns the DeviceKind of 'this' device.
2016-05-31 16:40:40 +03:00
///
2016-06-15 21:58:32 +03:00
DeviceKind Type ( ) const { return m_deviceType ; }
2016-05-31 16:40:40 +03:00
///
/// Static method to get the descriptor of the CPU device on the local system.
///
2016-06-15 21:58:32 +03:00
static DeviceDescriptor CPUDevice ( ) { return { 0 , DeviceKind : : CPU } ; }
2016-05-31 16:40:40 +03:00
///
/// Static method to get the descriptor of the GPU device on the local system with the specified CUDA device ID.
///
2016-06-15 21:58:32 +03:00
static DeviceDescriptor GPUDevice ( unsigned int deviceId ) { return { deviceId , DeviceKind : : GPU } ; }
2016-05-31 16:40:40 +03:00
///
/// Static method to get the descriptor of the default device for the current process.
/// This device is used for all CNTK operations where a device needs to be specified and one is not explicitly specified.
///
2016-07-09 03:56:44 +03:00
CNTK_API static DeviceDescriptor DefaultDevice ( ) ;
2016-05-31 16:40:40 +03:00
2016-08-26 22:14:23 +03:00
///
/// Static method to get the descriptor of the default device for the current process.
/// This device is used for all CNTK operations where a device needs to be specified and one is not explicitly specified.
/// Additionally after this method gets executed for the first time, it freezes the default device of the process disallowing
/// changing the default device by further calls to SetDefaultDevice.
///
CNTK_API static DeviceDescriptor UseDefaultDevice ( ) ;
///
/// The default device can only be changed if it has not yet been implicitly used by any previous operation in the CNTK library.
///
CNTK_API static void SetDefaultDevice ( const DeviceDescriptor & newDefaultDevice ) ;
2016-05-31 16:40:40 +03:00
private :
2016-06-15 21:58:32 +03:00
DeviceDescriptor ( unsigned int deviceId , DeviceKind deviceType )
2016-05-31 16:40:40 +03:00
: m_deviceId ( deviceId ) , m_deviceType ( deviceType )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
private :
unsigned int m_deviceId ;
2016-06-15 21:58:32 +03:00
DeviceKind m_deviceType ;
2016-05-31 16:40:40 +03:00
} ;
inline bool operator = = ( const DeviceDescriptor & left , const DeviceDescriptor & right )
{
return ( ( left . Type ( ) = = right . Type ( ) ) & & ( left . Id ( ) = = right . Id ( ) ) ) ;
}
inline bool operator ! = ( const DeviceDescriptor & left , const DeviceDescriptor & right )
{
return ! ( left = = right ) ;
}
///
/// Denotes a multi-dimensional rectangular shape.
///
2016-07-09 03:56:44 +03:00
class NDShape final
2016-05-31 16:40:40 +03:00
{
friend bool operator = = ( const NDShape & first , const NDShape & second ) ;
public :
///
/// A placeholder value to use for an axis whose dimension is unknown and is to be inferred by the system.
///
static const size_t InferredDimension = ( size_t ) - 1 ;
public :
2016-07-23 02:46:09 +03:00
///
/// Construct a NDShape with 0 axes, which denotes a scalar.
///
NDShape ( ) { }
2016-05-31 16:40:40 +03:00
///
/// Contruct a NDShape instance with the specified number of axes and dimensionality in each axis.
///
explicit NDShape ( size_t numAxes , size_t dimension = InferredDimension )
: m_shapeDims ( numAxes , dimension )
{ }
///
/// Contruct a NDShape instance with specified dimensions.
///
NDShape ( const std : : vector < size_t > & dimensions )
2016-07-06 11:15:13 +03:00
: m_shapeDims ( dimensions )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
///
/// Contruct a NDShape instance with specified dimensions.
///
NDShape ( const std : : initializer_list < size_t > & dimensions )
2016-07-06 11:15:13 +03:00
: m_shapeDims ( dimensions )
2016-05-31 16:40:40 +03:00
{ }
2016-07-09 03:56:44 +03:00
///
/// Returns the dimensions of 'this' shape as a std::vector<size_t>
///
const std : : vector < size_t > & Dimensions ( ) const { return m_shapeDims ; }
2016-05-31 16:40:40 +03:00
///
/// Returns the number of axes of 'this' shape.
///
2016-09-02 16:12:07 +03:00
size_t Rank ( ) const { return m_shapeDims . size ( ) ; }
2016-05-31 16:40:40 +03:00
///
/// Returns a reference to dimension size for the specified axis.
///
2016-06-15 21:58:32 +03:00
size_t & operator [ ] ( size_t axisId ) { return m_shapeDims [ axisId ] ; }
2016-05-31 16:40:40 +03:00
///
/// Returns the dimension size for the specified axis.
///
2016-06-15 21:58:32 +03:00
size_t operator [ ] ( size_t axisId ) const { return m_shapeDims [ axisId ] ; }
2016-05-31 16:40:40 +03:00
///
2016-07-09 03:56:44 +03:00
/// Creates and returns a new NDShape instance with the same dimensions as 'this' shape's specified axis range [beginAxisId, endAxisId).
2016-05-31 16:40:40 +03:00
///
2016-06-15 21:58:32 +03:00
NDShape SubShape ( size_t beginAxisId = 0 , size_t endAxisId = SIZE_MAX ) const
2016-05-31 16:40:40 +03:00
{
2016-09-02 16:12:07 +03:00
endAxisId = ( endAxisId = = SIZE_MAX ) ? Rank ( ) : endAxisId ;
if ( ( endAxisId < beginAxisId ) | | ( endAxisId > Rank ( ) ) )
InvalidArgument ( " NDShape::SubShape : The specified endAxisId (%d) cannot exceed the number of axes (%d) of 'this' NDShape and must be >= than the specified beginAxisId (%d) " , ( int ) endAxisId , ( int ) Rank ( ) , ( int ) beginAxisId ) ;
2016-05-31 16:40:40 +03:00
2016-07-09 03:56:44 +03:00
std : : vector < size_t > subShapeDims ( m_shapeDims . begin ( ) + beginAxisId , m_shapeDims . begin ( ) + endAxisId ) ;
return subShapeDims ;
2016-05-31 16:40:40 +03:00
}
///
/// Returns a boolean value indicating if the dimension size for any of the axes of 'this' shape is unknown/inferred (aka == NDShape::InferredDimension).
///
bool HasInferredDimension ( ) const
{
2016-07-09 04:14:05 +03:00
return ( std : : find ( m_shapeDims . begin ( ) , m_shapeDims . end ( ) , ( size_t ) InferredDimension ) ! = m_shapeDims . end ( ) ) ;
2016-05-31 16:40:40 +03:00
}
///
/// Returns the total size of the rectangular shape that 'this' shape denotes.
///
size_t TotalSize ( ) const
{
if ( HasInferredDimension ( ) )
RuntimeError ( " NDShape::TotalSize : TotalSize cannot be determined for a NDShape with one or more dimensions being InferredDimension " ) ;
size_t totalSize = 1 ;
2016-07-09 03:56:44 +03:00
for ( auto dim : m_shapeDims )
totalSize * = dim ;
2016-05-31 16:40:40 +03:00
return totalSize ;
}
///
2016-08-18 15:33:38 +03:00
/// Creates and returns a new shape constructed by appending the dimensions of the specified 'shape' to 'this' shape's dimensions.
2016-05-31 16:40:40 +03:00
///
NDShape AppendShape ( const NDShape & shape ) const
{
2016-09-02 16:12:07 +03:00
std : : vector < size_t > newShapeDims ( Rank ( ) + shape . Rank ( ) ) ;
2016-07-09 03:56:44 +03:00
std : : copy ( m_shapeDims . begin ( ) , m_shapeDims . end ( ) , newShapeDims . begin ( ) ) ;
std : : copy ( shape . m_shapeDims . begin ( ) , shape . m_shapeDims . end ( ) , newShapeDims . begin ( ) + m_shapeDims . size ( ) ) ;
2016-05-31 16:40:40 +03:00
2016-07-09 03:56:44 +03:00
return newShapeDims ;
2016-05-31 16:40:40 +03:00
}
2016-07-08 07:18:20 +03:00
///
/// Create a string representation of 'this' NDShape for display/printing purposes
///
std : : wstring AsString ( ) const
{
std : : wstringstream wStrStream ( L " { " ) ;
2016-09-02 16:12:07 +03:00
for ( size_t i = 0 ; i < Rank ( ) ; i + + )
2016-07-08 07:18:20 +03:00
{
if ( i ! = 0 )
wStrStream < < L " , " ;
2016-07-09 03:56:44 +03:00
wStrStream < < m_shapeDims [ i ] ;
2016-07-08 07:18:20 +03:00
}
wStrStream < < L " } " ;
return wStrStream . str ( ) ;
}
2016-05-31 16:40:40 +03:00
private :
2016-07-09 03:56:44 +03:00
std : : vector < size_t > m_shapeDims ;
2016-05-31 16:40:40 +03:00
} ;
inline bool operator = = ( const NDShape & first , const NDShape & second )
{
return first . m_shapeDims = = second . m_shapeDims ;
}
inline bool operator ! = ( const NDShape & first , const NDShape & second )
{
return ! ( first = = second ) ;
}
2016-06-09 08:39:07 +03:00
typedef int SparseIndexType ;
2016-05-31 16:40:40 +03:00
///
/// Denotes a multi-dimensional writable or read-only array of elemental values.
/// This type denotes a view and there may be multiple simultaneous views of the data underlying a NDArrayView instance.
/// The underlying data is stored in sparse or dense format, and is located on a specific device.
/// The actual underlying storage is either external or internal in which case its lifetime is managed through reference counting.
///
2016-07-09 03:56:44 +03:00
class NDArrayView final : public std : : enable_shared_from_this < NDArrayView >
2016-05-31 16:40:40 +03:00
{
friend class CompositeFunction ;
2016-07-13 10:37:06 +03:00
friend class LearnerBase ;
2016-05-31 16:40:40 +03:00
2016-07-08 07:18:20 +03:00
template < typename T , typename . . . CtorArgTypes >
2016-07-09 03:56:44 +03:00
friend inline std : : shared_ptr < T > MakeSharedObject ( CtorArgTypes & & . . . ctorArgs ) ;
2016-05-31 16:40:40 +03:00
public :
///
/// Construct a NDArrayView with the specified 'dataBuffer' as the backing storage.
/// The 'dataBuffer' must have been allocated on the specified 'device', must be at least
/// as large as the total size of the specified 'viewShape' and must outlive the created NDArrayView object.
///
2016-07-09 03:56:44 +03:00
CNTK_API NDArrayView ( CNTK : : DataType dataType , const NDShape & viewShape , void * dataBuffer , size_t bufferSizeInBytes , const DeviceDescriptor & device , bool readOnly = false ) ;
2016-05-31 16:40:40 +03:00
2016-06-15 21:58:32 +03:00
/// Construct a read-only NDArrayView with the specified 'dataBuffer' as the backing storage.
/// The 'dataBuffer' must have been allocated on the specified 'device', must be at least
/// as large as the total size of the specified 'viewShape' and must outlive the created NDArrayView object.
///
NDArrayView ( CNTK : : DataType dataType , const NDShape & viewShape , const void * dataBuffer , size_t bufferSizeInBytes , const DeviceDescriptor & device )
: NDArrayView ( dataType , viewShape , const_cast < void * > ( dataBuffer ) , bufferSizeInBytes , device , /*readOnly =*/ true )
{ }
2016-06-09 08:39:07 +03:00
///
/// Construct a NDArrayView with newly allocated sparse storage in SparseCSC format on the specified 'device' and initialize its contents
// with the specified Sparse CSC format data.
///
template < typename ElementType >
2016-07-09 03:56:44 +03:00
CNTK_API NDArrayView ( const NDShape & viewShape , const SparseIndexType * colStarts , const SparseIndexType * rowIndices , const ElementType * nonZeroValues , size_t numNonZeroValues , const DeviceDescriptor & device , bool readOnly = false ) ;
2016-06-09 08:39:07 +03:00
///
/// Construct a NDArrayView over newly allocated storage in the specified format on the specified 'device'.
///
2016-07-09 03:56:44 +03:00
CNTK_API NDArrayView ( CNTK : : DataType dataType , CNTK : : StorageFormat storageType , const NDShape & viewShape , const DeviceDescriptor & device ) ;
2016-06-09 08:39:07 +03:00
2016-05-31 16:40:40 +03:00
///
/// Construct a NDArrayView over newly allocated dense storage on the specified 'device'.
///
2016-06-09 08:39:07 +03:00
NDArrayView ( CNTK : : DataType dataType , const NDShape & viewShape , const DeviceDescriptor & device )
: NDArrayView ( dataType , StorageFormat : : Dense , viewShape , device )
2016-05-31 16:40:40 +03:00
{ }
///
/// Construct a NDArrayView with the specified 'dataBuffer' as the backing storage.
/// The 'dataBuffer' must have been allocated on the specified 'device', must be at least
/// as large as the total size of the specified 'viewShape' and must outlive the created NDArrayView object.
///
template < typename ElementType >
NDArrayView ( const NDShape & viewShape , ElementType * dataBuffer , size_t numBufferElements , const DeviceDescriptor & device , bool readOnly = false )
2016-06-11 22:21:15 +03:00
: NDArrayView ( AsDataType < ElementType > ( ) , viewShape , dataBuffer , numBufferElements * sizeof ( ElementType ) , device , readOnly )
2016-05-31 16:40:40 +03:00
{ }
2016-06-15 21:58:32 +03:00
///
/// Construct a read-only NDArrayView with the specified 'dataBuffer' as the backing storage.
/// The 'dataBuffer' must have been allocated on the specified 'device', must be at least
/// as large as the total size of the specified 'viewShape' and must outlive the created NDArrayView object.
///
template < typename ElementType >
NDArrayView ( const NDShape & viewShape , const ElementType * dataBuffer , size_t numBufferElements , const DeviceDescriptor & device )
: NDArrayView ( AsDataType < ElementType > ( ) , viewShape , dataBuffer , numBufferElements * sizeof ( ElementType ) , device )
{ }
2016-05-31 16:40:40 +03:00
///
/// Construct a NDArrayView with the buffer underlying the specified std::vector or std::aray being the underlying storage.
2016-06-15 21:58:32 +03:00
/// The container must be at least as large as the total size of the specified 'viewShape' and should outlive the created NDArrayView object.
2016-05-31 16:40:40 +03:00
///
template < typename ContainerType , typename std : : enable_if < std : : is_same < ContainerType , std : : vector < typename ContainerType : : value_type > > : : value | |
std : : is_same < ContainerType , std : : array < typename ContainerType : : value_type , sizeof ( ContainerType ) / sizeof ( typename ContainerType : : value_type ) > > : : value > : : type * = nullptr >
NDArrayView ( const NDShape & viewShape , ContainerType & sourceContainer , bool readOnly = false )
: NDArrayView ( viewShape , sourceContainer . data ( ) , sourceContainer . size ( ) , DeviceDescriptor : : CPUDevice ( ) , readOnly )
{ }
2016-06-15 21:58:32 +03:00
///
/// Construct a read-only NDArrayView with the buffer underlying the specified std::vector or std::aray being the underlying storage.
/// The container must be the same size as the total size of the specified 'viewShape' and should outlive the created NDArrayView object.
///
template < typename ContainerType , typename std : : enable_if < std : : is_same < ContainerType , std : : vector < typename ContainerType : : value_type > > : : value | |
std : : is_same < ContainerType , std : : array < typename ContainerType : : value_type , sizeof ( ContainerType ) / sizeof ( typename ContainerType : : value_type ) > > : : value > : : type * = nullptr >
NDArrayView ( const NDShape & viewShape , const ContainerType & sourceContainer )
: NDArrayView ( viewShape , sourceContainer . data ( ) , sourceContainer . size ( ) , DeviceDescriptor : : CPUDevice ( ) )
{
if ( sourceContainer . size ( ) ! = viewShape . TotalSize ( ) )
InvalidArgument ( " The size of the STL container does not match the size of the specified viewShape " ) ;
}
2016-05-31 16:40:40 +03:00
///
/// Construct a NDArrayView over newly allocated dense storage on the specified device and
/// assign the specified value to each element of the view.
///
template < typename ElementType >
2016-08-26 22:14:23 +03:00
explicit NDArrayView ( const ElementType & value , const NDShape & viewShape = { 1 } , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , bool readOnly = false )
2016-06-11 22:21:15 +03:00
: NDArrayView ( AsDataType < ElementType > ( ) , viewShape , device )
2016-05-31 16:40:40 +03:00
{
SetValue ( value ) ;
m_isReadOnly = readOnly ;
}
2016-09-02 16:12:07 +03:00
///
/// Construct a NDArrayView over newly allocated dense storage on the specified device and assign the specified value to each element of the view.
/// The specified value is cast to the specified DataType.
///
explicit NDArrayView ( double value , DataType dataType = DataType : : Float , const NDShape & viewShape = { 1 } , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , bool readOnly = false )
: NDArrayView ( dataType , viewShape , device )
{
switch ( m_dataType )
{
case DataType : : Float :
SetValue ( ( float ) value ) ;
break ;
case DataType : : Double :
SetValue ( value ) ;
break ;
default :
LogicError ( " Unsupported DataType %s " , DataTypeName ( m_dataType ) ) ;
break ;
}
m_isReadOnly = readOnly ;
}
2016-05-31 16:40:40 +03:00
///
2016-07-09 03:56:44 +03:00
/// Destruct 'this' NDArrayView object
///
CNTK_API ~ NDArrayView ( ) ;
2016-05-31 16:40:40 +03:00
///
/// Returns a writable pointer to the data buffer underlying 'this' view
/// Throws an exception if 'this' view is read-only
///
template < typename ElementType >
2016-07-09 03:56:44 +03:00
CNTK_API ElementType * WritableDataBuffer ( ) ;
2016-05-31 16:40:40 +03:00
///
/// Returns a read-only pointer to the data buffer underlying 'this' view
///
template < typename ElementType >
2016-07-09 03:56:44 +03:00
CNTK_API const ElementType * DataBuffer ( ) const ;
2016-05-31 16:40:40 +03:00
///
/// Returns the descriptor of the device that 'this' view resides on
///
2016-06-15 21:58:32 +03:00
DeviceDescriptor Device ( ) const { return m_device ; }
2016-05-31 16:40:40 +03:00
///
/// Returns the data type of 'this' view's contents.
///
2016-06-15 21:58:32 +03:00
DataType GetDataType ( ) const { return m_dataType ; }
2016-05-31 16:40:40 +03:00
///
/// Returns the storage format of 'this' view.
///
2016-06-15 21:58:32 +03:00
StorageFormat GetStorageFormat ( ) const { return m_storageFormat ; }
2016-05-31 16:40:40 +03:00
///
/// Returns the shape 'this' view.
///
2016-07-09 03:56:44 +03:00
const NDShape & Shape ( ) const { return m_viewShape ; }
2016-05-31 16:40:40 +03:00
2016-06-09 08:39:07 +03:00
///
/// Returns a boolean indicating if 'this' view contains data in sparse storage format.
///
bool IsSparse ( ) const
{
2016-06-11 22:21:15 +03:00
return ( GetStorageFormat ( ) ! = StorageFormat : : Dense ) ;
2016-06-09 08:39:07 +03:00
}
2016-05-31 16:40:40 +03:00
///
/// Returns a boolean indicating if 'this' view is read-only.
///
2016-06-15 21:58:32 +03:00
bool IsReadOnly ( ) const { return m_isReadOnly ; }
2016-05-31 16:40:40 +03:00
2016-07-23 02:46:09 +03:00
// TODO: The set methods should be offered in template from
///
/// Fill 'this' NDArrayView with the specified value. The underlying DataType of 'this' view should be DataType::Float.
///
CNTK_API void SetValue ( float value ) ;
///
/// Fill 'this' NDArrayView with the specified value. The underlying DataType of 'this' view should be DataType::Double.
///
CNTK_API void SetValue ( double value ) ;
2016-09-04 03:31:02 +03:00
///
/// Creates a new NDArrayView with newly allocated storage on the specified device and copies 'this' view's contents into the newly allocated view.
///
CNTK_API NDArrayViewPtr DeepClone ( const DeviceDescriptor & device , bool readOnly = false ) const ;
2016-05-31 16:40:40 +03:00
///
/// Creates a new NDArrayView with newly allocated storage on the same device as 'this' view and copies 'this' view's contents into the newly allocated view.
///
2016-09-04 03:31:02 +03:00
inline NDArrayViewPtr DeepClone ( bool readOnly = false ) const
{
return DeepClone ( this - > Device ( ) , readOnly ) ;
}
2016-05-31 16:40:40 +03:00
///
2016-07-06 11:15:13 +03:00
/// Creates a new NDArrayView which is an alias of 'this' view; i.e. a new view of the same shape as 'this' over the same underlying data.
2016-05-31 16:40:40 +03:00
///
2016-07-09 03:56:44 +03:00
CNTK_API NDArrayViewPtr Alias ( bool readOnly = false ) const ;
2016-05-31 16:40:40 +03:00
///
/// Copies the contents of the 'source' NDArrayView to 'this' view.
/// The shapes of the 'source' view and 'this' view must be identical.
///
2016-07-09 03:56:44 +03:00
CNTK_API void CopyFrom ( const NDArrayView & source ) ;
2016-05-31 16:40:40 +03:00
///
/// Static method to construct a new NDArrayView object whose contents are drawn from a normal distribution with the specified mean and standard deviation..
///
template < typename ElementType >
2016-08-26 22:14:23 +03:00
CNTK_API static NDArrayViewPtr RandomNormal ( const NDShape & shape , double mean , double stdDev , unsigned long seed = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) ) ;
2016-05-31 16:40:40 +03:00
///
/// Static method to construct a new NDArrayView object whose contents are drawn from a uniform distribution in the specified value range.
///
template < typename ElementType >
2016-08-26 22:14:23 +03:00
CNTK_API static NDArrayViewPtr RandomUniform ( const NDShape & shape , double rangeStart , double rangeEnd , unsigned long seed = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) ) ;
2016-05-31 16:40:40 +03:00
private :
2016-06-15 21:58:32 +03:00
// Disallow copy and move construction and assignment
NDArrayView ( const NDArrayView & ) = delete ; NDArrayView & operator = ( const NDArrayView & ) = delete ; NDArrayView & operator = ( NDArrayView & & ) = delete ; NDArrayView ( NDArrayView & & other ) = delete ;
2016-05-31 16:40:40 +03:00
2016-06-05 04:26:52 +03:00
private :
static const size_t AutoSelectRowColSplitPoint = SIZE_MAX ;
2016-05-31 16:40:40 +03:00
private :
2016-07-09 03:56:44 +03:00
CNTK_API NDArrayView ( CNTK : : DataType dataType , const DeviceDescriptor & device , CNTK : : StorageFormat storageType , const NDShape & viewShape , bool readOnly , void * tensorView ) ;
2016-05-31 16:40:40 +03:00
template < typename ElementType >
2016-06-09 08:39:07 +03:00
static std : : shared_ptr < Microsoft : : MSR : : CNTK : : Matrix < ElementType > > GetMatrixImpl ( const Microsoft : : MSR : : CNTK : : TensorView < ElementType > * tensorView , size_t rowColSplitPoint ) ;
2016-05-31 16:40:40 +03:00
template < typename ElementType >
2016-06-05 04:26:52 +03:00
std : : shared_ptr < const Microsoft : : MSR : : CNTK : : Matrix < ElementType > > GetMatrix ( size_t rowColSplitPoint = AutoSelectRowColSplitPoint ) const ;
2016-05-31 16:40:40 +03:00
template < typename ElementType >
2016-06-05 04:26:52 +03:00
std : : shared_ptr < Microsoft : : MSR : : CNTK : : Matrix < ElementType > > GetWritableMatrix ( size_t rowColSplitPoint = AutoSelectRowColSplitPoint ) ;
2016-05-31 16:40:40 +03:00
template < typename ElementType >
const Microsoft : : MSR : : CNTK : : TensorView < ElementType > * GetTensorView ( ) const ;
template < typename ElementType >
Microsoft : : MSR : : CNTK : : TensorView < ElementType > * GetWritableTensorView ( ) ;
private :
CNTK : : DataType m_dataType ;
DeviceDescriptor m_device ;
CNTK : : StorageFormat m_storageFormat ;
NDShape m_viewShape ;
bool m_isReadOnly ;
2016-07-09 03:56:44 +03:00
std : : shared_ptr < void > m_tensorView ; // Microsoft::MSR::CNTK::TensorView<ElemType>*
2016-05-31 16:40:40 +03:00
} ;
2016-06-05 04:26:52 +03:00
///
/// Denotes a multi-dimensional mask used for specifying specific sections of a NDArrayView object as masked/invalid.
/// This type denotes a view and there may be multiple simultaneous views of the data underlying a NDMask instance.
///
2016-07-09 03:56:44 +03:00
class NDMask final : public std : : enable_shared_from_this < NDMask >
2016-06-05 04:26:52 +03:00
{
friend class CompositeFunction ;
2016-07-08 07:18:20 +03:00
template < typename T , typename . . . CtorArgTypes >
2016-07-09 03:56:44 +03:00
friend inline std : : shared_ptr < T > MakeSharedObject ( CtorArgTypes & & . . . ctorArgs ) ;
2016-06-05 04:26:52 +03:00
public :
///
/// Construct a new Mask object of specified shape
///
2016-08-26 22:14:23 +03:00
CNTK_API explicit NDMask ( const NDShape & shape , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) ) ;
2016-06-05 04:26:52 +03:00
///
2016-07-09 03:56:44 +03:00
/// Destruct 'this' NDMask object
///
CNTK_API ~ NDMask ( ) ;
2016-06-05 04:26:52 +03:00
///
/// Mask out the specified sub-section of 'this' mask
///
2016-07-09 03:56:44 +03:00
CNTK_API void MaskSection ( const std : : vector < size_t > & sectionOffset , const NDShape & sectionShape ) ;
2016-06-05 04:26:52 +03:00
///
/// Clear the mask; i.e. unmask all currently masked values
///
2016-07-09 03:56:44 +03:00
CNTK_API void Clear ( ) ;
2016-06-05 04:26:52 +03:00
2016-07-23 02:46:09 +03:00
///
/// Returns the number of masked/invalid values
///
CNTK_API size_t MaskedCount ( ) const ;
2016-06-05 04:26:52 +03:00
///
/// Returns the descriptor of the device that 'this' mask resides on
///
2016-06-15 21:58:32 +03:00
DeviceDescriptor Device ( ) const { return m_device ; }
2016-06-05 04:26:52 +03:00
///
/// Returns the shape 'this' mask.
///
2016-07-09 03:56:44 +03:00
const NDShape & Shape ( ) const { return m_maskShape ; }
2016-06-05 04:26:52 +03:00
2016-07-26 08:43:11 +03:00
///
/// Returns a read-only pointer to the data buffer underlying 'this' Mask object
///
CNTK_API const char * DataBuffer ( ) const ;
2016-06-05 04:26:52 +03:00
///
/// Creates a new NDMask with newly allocated storage on the same device as 'this' mask and copies 'this' mask's contents into the newly allocated mask.
///
2016-07-09 03:56:44 +03:00
CNTK_API NDMaskPtr DeepClone ( ) const ;
2016-06-05 04:26:52 +03:00
///
/// Creates a new NDMask which is an alias of 'this' mask.
///
2016-07-09 03:56:44 +03:00
CNTK_API NDMaskPtr Alias ( ) const ;
2016-06-05 04:26:52 +03:00
///
/// Copies the contents of the 'source' NDMask to 'this' mask.
/// The shapes of the 'source' mask and 'this' mask must be identical.
///
2016-07-09 03:56:44 +03:00
CNTK_API void CopyFrom ( const NDMask & source ) ;
2016-06-05 04:26:52 +03:00
private :
NDMask ( const NDShape & shape , Microsoft : : MSR : : CNTK : : Matrix < char > * matrix ) ;
Microsoft : : MSR : : CNTK : : Matrix < char > * GetMatrix ( ) const ;
2016-06-15 21:58:32 +03:00
// Disallow copy and move construction and assignment
NDMask ( const NDMask & ) = delete ; NDMask & operator = ( const NDMask & ) = delete ; NDMask & operator = ( NDMask & & ) = delete ; NDMask ( NDMask & & other ) = delete ;
2016-06-05 04:26:52 +03:00
private :
DeviceDescriptor m_device ;
NDShape m_maskShape ;
2016-07-09 03:56:44 +03:00
std : : shared_ptr < Microsoft : : MSR : : CNTK : : Matrix < char > > m_matrixView ;
2016-06-05 04:26:52 +03:00
} ;
2016-05-31 16:40:40 +03:00
///
2016-06-05 04:26:52 +03:00
/// Denotes a multi-dimensional array with an optional mask and is the actual data fed into or produced from a computation.
2016-07-08 07:18:20 +03:00
/// The mask is typically lower dimensionality than the data, meaning data is masked in coarse individual sample units where
2016-09-02 16:12:07 +03:00
/// sample shape is data.Shape().SubShape(0, data.Shape().Rank() - mask.Shape().Rank)
/// Also, note that the size of the data's trailing mask.Shape().Rank() dimensions must match the mask shape dimensions.
2016-05-31 16:40:40 +03:00
///
2016-07-09 03:56:44 +03:00
class Value : public std : : enable_shared_from_this < Value >
2016-05-31 16:40:40 +03:00
{
public :
///
/// A multi-dimensional value with no mask.
///
2016-07-09 03:56:44 +03:00
CNTK_API Value ( const NDArrayViewPtr & data ) ;
2016-05-31 16:40:40 +03:00
2016-06-05 04:26:52 +03:00
///
/// A multi-dimensional value with an associated mask.
///
2016-07-09 03:56:44 +03:00
CNTK_API Value ( const NDArrayViewPtr & data , const NDMaskPtr & mask ) ;
2016-06-05 04:26:52 +03:00
///
/// Create a new Value object containing a collection of variable length sequences.
/// The created Value object contains a copy of the specified 'sequences' data.
///
template < typename ElementType >
2016-07-09 03:56:44 +03:00
CNTK_API static ValuePtr Create ( const NDShape & sampleShape , const std : : vector < std : : vector < ElementType > > & sequences , const DeviceDescriptor & device , bool readOnly = false ) ;
2016-06-05 04:26:52 +03:00
2016-06-09 08:39:07 +03:00
///
/// Create a new Value object containing a collection of variable length sequences of one hot vectors
/// The created Value object contains a copy of the specified 'sequences' data.
///
template < typename ElementType >
2016-07-09 03:56:44 +03:00
CNTK_API static ValuePtr Create ( size_t vocabularySize , const std : : vector < std : : vector < size_t > > & oneHotSequences , const DeviceDescriptor & device , bool readOnly = false ) ;
2016-06-09 08:39:07 +03:00
2016-05-31 16:40:40 +03:00
///
/// Destruct 'this' Value object.
///
2016-07-09 03:56:44 +03:00
CNTK_API virtual ~ Value ( ) ;
2016-05-31 16:40:40 +03:00
///
/// Returns the NDArrayView object corresponding to the data contents of 'this value object.
///
2016-07-09 03:56:44 +03:00
CNTK_API virtual NDArrayViewPtr Data ( ) const ;
2016-05-31 16:40:40 +03:00
2016-06-05 04:26:52 +03:00
///
/// Returns the NDMask object corresponding to the mask associated with 'this value object.
///
2016-07-09 03:56:44 +03:00
CNTK_API virtual NDMaskPtr Mask ( ) const ;
2016-06-05 04:26:52 +03:00
///
/// Creates a new Value with newly allocated storage on the same device as 'this' Value and copies 'this' Value's contents into the newly allocated Value.
///
2016-07-09 03:56:44 +03:00
CNTK_API virtual ValuePtr DeepClone ( bool readOnly = false ) const ;
2016-06-05 04:26:52 +03:00
///
/// Creates a new Value which is an alias of 'this' Value.
///
2016-07-09 03:56:44 +03:00
CNTK_API virtual ValuePtr Alias ( bool readOnly = false ) const ;
2016-06-05 04:26:52 +03:00
///
/// Copies the contents of the 'source' Value to 'this' Value.
/// The shapes of the 'source' Value's data and mask must be identical to 'this' Value's data and mask.
///
2016-07-09 03:56:44 +03:00
CNTK_API virtual void CopyFrom ( const Value & source ) ;
2016-06-05 04:26:52 +03:00
2016-05-31 16:40:40 +03:00
private :
2016-06-15 21:58:32 +03:00
// Disallow copy and move construction and assignment
Value ( const Value & ) = delete ; Value & operator = ( const Value & ) = delete ; Value ( Value & & ) = delete ; Value & operator = ( Value & & ) = delete ;
2016-05-31 16:40:40 +03:00
private :
NDArrayViewPtr m_data ;
2016-06-05 04:26:52 +03:00
NDMaskPtr m_mask ;
2016-05-31 16:40:40 +03:00
} ;
2016-06-05 04:26:52 +03:00
///
/// Denotes an Axis of a Variable and is used for specifying the axes parameters of certain Functions such as reductions.
2016-08-21 13:49:03 +03:00
/// Besides the static axes corresponding to each of the axes of the Variable's shape, Variables of kind 'Input' and any
/// 'Output' Variables dependent on an 'Input' Variable also have 2 additional dynamic axes whose dimensions are known only
/// when the Variable is bound to actual data during compute (viz. sequence axis and batch axis denoting the axis along which
/// multiple sequences are batched)
2016-06-05 04:26:52 +03:00
///
class Axis final
{
2016-08-26 22:14:23 +03:00
CNTK_API static const std : : wstring StaticAxisNamePrefix ;
static const size_t SentinelStaticAxisIndexValueForDynamicAxes = SIZE_MAX ;
2016-08-31 08:50:25 +03:00
// TODO: Make this thread-safe
CNTK_API static std : : unordered_set < std : : wstring > s_allKnownDynamicAxisNames ;
2016-09-02 16:12:07 +03:00
public :
CNTK_API static const std : : vector < Axis > DefaultInputVariableDynamicAxes ;
2016-06-05 04:26:52 +03:00
public :
///
/// Construct an Axis object denoting a static axis with the specified index.
///
2016-08-21 13:49:03 +03:00
explicit Axis ( size_t staticAxisIdx )
2016-08-26 22:14:23 +03:00
: m_staticAxisIdx ( staticAxisIdx ) , m_isOrderedDynamicAxis ( false )
2016-06-05 04:26:52 +03:00
{
2016-08-26 22:14:23 +03:00
m_name = StaticAxisNamePrefix + std : : to_wstring ( staticAxisIdx ) ;
2016-06-05 04:26:52 +03:00
}
///
/// Construct a dynamic axis with the specified name.
///
2016-08-26 22:14:23 +03:00
explicit Axis ( const std : : wstring & name , bool isOrderedDynamicAxis = true )
: m_staticAxisIdx ( SentinelStaticAxisIndexValueForDynamicAxes ) , m_name ( name ) , m_isOrderedDynamicAxis ( isOrderedDynamicAxis )
2016-08-31 08:50:25 +03:00
{
RegisterAxisName ( name ) ;
}
2016-06-05 04:26:52 +03:00
///
/// Returns a boolean indicating if 'this' Axis corresponds to a static axis
///
2016-08-26 22:14:23 +03:00
bool IsStaticAxis ( ) const { return m_staticAxisIdx ! = SentinelStaticAxisIndexValueForDynamicAxes ; }
2016-06-05 04:26:52 +03:00
///
2016-08-26 22:14:23 +03:00
/// Returns a boolean indicating if 'this' Axis is ordered; i.e. if there is an ordering between the dimensions along this axis.
2016-06-05 04:26:52 +03:00
///
2016-08-26 22:14:23 +03:00
bool IsOrdered ( ) const { return IsStaticAxis ( ) | | m_isOrderedDynamicAxis ; }
///
/// Returns the axis index if 'this' Axis is a static axis. Throws an exception otherwise if checked == true.
///
size_t StaticAxisIndex ( bool checked = true ) const
2016-06-05 04:26:52 +03:00
{
2016-08-26 22:14:23 +03:00
if ( checked & & ! IsStaticAxis ( ) )
2016-06-05 04:26:52 +03:00
InvalidArgument ( " Cannot query the static axis index for a non-static axis " ) ;
return m_staticAxisIdx ;
}
///
/// Static Axis object representing the default dynamic axis.
///
2016-07-09 03:56:44 +03:00
CNTK_API static const Axis & DefaultDynamicAxis ( ) ;
2016-06-05 04:26:52 +03:00
///
/// Static Axis object representing the batch axis.
///
2016-08-21 13:49:03 +03:00
CNTK_API static const Axis & DefaultBatchAxis ( ) ;
2016-06-05 04:26:52 +03:00
2016-08-31 08:50:25 +03:00
///
/// Returns a new unique Dynamic axis
///
CNTK_API static Axis NewUniqueDynamicAxis ( const std : : wstring & axisNamePrefix , bool isOrderedDynamicAxis = true ) ;
2016-06-05 04:26:52 +03:00
///
/// Name of 'this' axis
///
2016-07-09 03:56:44 +03:00
const std : : wstring & Name ( ) const { return m_name ; }
2016-06-05 04:26:52 +03:00
///
/// Default constructor; results in an invalid axis object.
///
Axis ( )
2016-08-26 22:14:23 +03:00
: m_staticAxisIdx ( SentinelStaticAxisIndexValueForDynamicAxes )
2016-06-15 21:58:32 +03:00
{ }
2016-06-05 04:26:52 +03:00
2016-08-31 08:50:25 +03:00
private :
CNTK_API void RegisterAxisName ( const std : : wstring & axisName ) ;
2016-06-05 04:26:52 +03:00
private :
size_t m_staticAxisIdx ;
2016-07-09 03:56:44 +03:00
std : : wstring m_name ;
2016-08-26 22:14:23 +03:00
bool m_isOrderedDynamicAxis ;
2016-06-05 04:26:52 +03:00
} ;
inline bool operator = = ( const Axis & first , const Axis & second )
{
if ( first . IsStaticAxis ( ) ! = second . IsStaticAxis ( ) )
return false ;
if ( first . IsStaticAxis ( ) )
return first . StaticAxisIndex ( ) = = second . StaticAxisIndex ( ) ;
else
return first . Name ( ) = = second . Name ( ) ;
}
inline bool operator ! = ( const Axis & first , const Axis & second )
{
return ! ( first = = second ) ;
}
2016-08-21 13:49:03 +03:00
}
2016-06-05 04:26:52 +03:00
2016-08-21 13:49:03 +03:00
namespace std {
template < > struct hash < CNTK : : Axis >
{
size_t operator ( ) ( const CNTK : : Axis & x ) const
{
return std : : hash < std : : wstring > ( ) ( x . Name ( ) ) ;
}
} ;
}
namespace CNTK
{
2016-05-31 16:40:40 +03:00
///
/// Enumeration type denoting the kind of a symbolic Variable object
///
enum class VariableKind
{
Input ,
Output ,
Parameter ,
2016-06-05 04:26:52 +03:00
Constant ,
Placeholder
2016-05-31 16:40:40 +03:00
} ;
2016-09-04 03:31:02 +03:00
inline const wchar_t * VariableKindName ( VariableKind variableKind )
{
switch ( variableKind )
{
case VariableKind : : Input :
return L " Input " ;
case VariableKind : : Output :
return L " Output " ;
case VariableKind : : Parameter :
return L " Parameter " ;
case VariableKind : : Constant :
return L " Constant " ;
case VariableKind : : Placeholder :
return L " Placeholder " ;
default :
LogicError ( " Unknown VariableKind " ) ;
}
}
namespace Internal
{
inline std : : wstring GenerateUid ( VariableKind varKind )
{
return std : : wstring ( VariableKindName ( varKind ) ) + std : : to_wstring ( Internal : : NewUniqueId ( ) ) ;
}
}
2016-09-02 16:12:07 +03:00
// Forward declarations
inline Variable PlaceholderVariable ( const NDShape & shape , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes ) ;
2016-09-05 08:54:53 +03:00
inline Variable InputVariable ( const NDShape & shape , bool isSparse , CNTK : : DataType dataType , bool needsGradient , const std : : wstring & name , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes ) ;
2016-09-02 16:12:07 +03:00
inline Variable OutputVariable ( const NDShape & shape , CNTK : : DataType dataType , Function * ownerFunction , const std : : vector < Axis > & dynamicAxes , const std : : wstring & name = L " " ) ;
2016-05-31 16:40:40 +03:00
///
/// Denotes a symbolic entity corresponding to the inputs and outputs of a Function.
/// A Variable is symbolic and does not represent the actual values.
2016-07-06 11:15:13 +03:00
/// Also, Variable type is a value type and copies of a Variable object are aliases of the
/// source Variable object itself and have the same identity.
2016-05-31 16:40:40 +03:00
///
2016-07-09 03:56:44 +03:00
class Variable
2016-05-31 16:40:40 +03:00
{
friend bool operator = = ( const Variable & first , const Variable & second ) ;
friend class Function ;
2016-09-02 16:12:07 +03:00
friend class CompositeFunction ;
2016-05-31 16:40:40 +03:00
template < typename T >
friend struct std : : hash ;
2016-09-04 03:31:02 +03:00
template < typename ElementType >
friend Variable GetVariable ( const Microsoft : : MSR : : CNTK : : ComputationNodeBasePtr & node ,
std : : unordered_map < Microsoft : : MSR : : CNTK : : ComputationNodeBasePtr , Variable > & nodeToVariableMap ,
std : : unordered_map < Variable , Variable > & placeholderReplacements ,
std : : unordered_set < FunctionPtr > & allPrimitiveFunctions ) ;
2016-09-05 08:54:53 +03:00
# ifndef SWIG
2016-09-02 16:12:07 +03:00
private :
friend inline Variable PlaceholderVariable ( const NDShape & shape , const std : : vector < Axis > & dynamicAxes /*= Axis::DefaultInputVariableDynamicAxes*/ ) ;
2016-09-05 08:54:53 +03:00
friend inline Variable InputVariable ( const NDShape & shape , bool isSparse , CNTK : : DataType dataType , bool needsGradient , const std : : wstring & name , const std : : vector < Axis > & dynamicAxes /*= Axis::DefaultInputVariableDynamicAxes*/ ) ;
2016-09-02 16:12:07 +03:00
friend inline Variable OutputVariable ( const NDShape & shape , CNTK : : DataType dataType , Function * ownerFunction , const std : : vector < Axis > & dynamicAxes , const std : : wstring & name /*= L""*/ ) ;
2016-09-05 08:54:53 +03:00
# endif
2016-05-31 16:40:40 +03:00
2016-09-04 03:31:02 +03:00
public :
2016-05-31 16:40:40 +03:00
///
/// Create an 'Output' variable aliasing the output of the specified Function
/// Throws an exception if called for a Function instance with multiple outputs
///
2016-07-09 03:56:44 +03:00
CNTK_API Variable ( const FunctionPtr & function ) ;
2016-05-31 16:40:40 +03:00
2016-08-31 08:50:25 +03:00
///
/// Implicit conversion to a FunctionPtr; creates a pass through primitive function
///
CNTK_API operator FunctionPtr ( ) const ;
2016-07-06 11:15:13 +03:00
///
/// Default constructor for creating an invalid/null Variable instance.
/// Required for use in a std::vector container.
///
2016-07-09 03:56:44 +03:00
Variable ( ) { }
2016-07-06 11:15:13 +03:00
2016-05-31 16:40:40 +03:00
///
/// Returns the shape of 'this' variable
///
2016-07-09 03:56:44 +03:00
const NDShape & Shape ( ) const { return m_dataFields - > m_shape ; }
2016-05-31 16:40:40 +03:00
2016-06-05 04:26:52 +03:00
///
/// Returns the dynamic axes of 'this' variable
///
2016-07-09 03:56:44 +03:00
const std : : vector < Axis > & DynamicAxes ( ) const { return m_dataFields - > m_dynamicAxes ; }
2016-06-05 04:26:52 +03:00
2016-05-31 16:40:40 +03:00
///
/// Returns the VariableKind of 'this' variable
///
2016-06-15 21:58:32 +03:00
VariableKind Kind ( ) const { return m_dataFields - > m_varKind ; }
2016-05-31 16:40:40 +03:00
2016-06-09 08:39:07 +03:00
///
/// Returns a boolean value indicating if 'this' variable denotes sparse data
///
2016-08-21 13:49:03 +03:00
bool IsSparse ( ) const { return m_dataFields - > m_isSparse ; }
2016-06-09 08:39:07 +03:00
2016-06-15 21:58:32 +03:00
///
/// Returns a boolean value indicating if 'this' variable is an Input
///
bool IsInput ( ) const { return Kind ( ) = = VariableKind : : Input ; }
///
/// Returns a boolean value indicating if 'this' variable is an Output
///
bool IsOutput ( ) const { return Kind ( ) = = VariableKind : : Output ; }
2016-05-31 16:40:40 +03:00
///
2016-06-05 04:26:52 +03:00
/// Returns a boolean value indicating if 'this' variable is a Parameter
2016-05-31 16:40:40 +03:00
///
2016-06-15 21:58:32 +03:00
bool IsParameter ( ) const { return Kind ( ) = = VariableKind : : Parameter ; }
2016-05-31 16:40:40 +03:00
///
2016-06-05 04:26:52 +03:00
/// Returns a boolean value indicating if 'this' variable is a Constant
2016-05-31 16:40:40 +03:00
///
2016-06-15 21:58:32 +03:00
bool IsConstant ( ) const { return Kind ( ) = = VariableKind : : Constant ; }
2016-05-31 16:40:40 +03:00
2016-06-05 04:26:52 +03:00
///
/// Returns a boolean value indicating if 'this' variable is a Placeholder
///
2016-06-15 21:58:32 +03:00
bool IsPlaceholder ( ) const { return Kind ( ) = = VariableKind : : Placeholder ; }
2016-06-05 04:26:52 +03:00
2016-05-31 16:40:40 +03:00
///
/// Returns the name of 'this' variable
///
2016-07-09 03:56:44 +03:00
const std : : wstring & Name ( ) const { return m_dataFields - > m_name ; }
2016-05-31 16:40:40 +03:00
2016-09-04 03:31:02 +03:00
///
/// Returns the internally generated unique name of the variable
///
const std : : wstring & Uid ( ) const { return m_dataFields - > m_uid ; }
2016-05-31 16:40:40 +03:00
///
/// Returns the Function object which 'this' variable is an ouptut of.
/// Returns null when called for a Variable that is not of 'Output' VariableKind.
///
2016-07-09 03:56:44 +03:00
CNTK_API FunctionPtr Owner ( ) const ;
2016-05-31 16:40:40 +03:00
///
/// Returns the DataType of the data that 'this' Variable symbolically represents
///
2016-06-15 21:58:32 +03:00
DataType GetDataType ( ) const { return m_dataFields - > m_dataType ; }
2016-05-31 16:40:40 +03:00
///
/// Returns a boolean value indicating if gradient computation is enabled for this variable.
///
2016-06-15 21:58:32 +03:00
bool NeedsGradient ( ) const { return m_dataFields - > m_needsGradient ; }
2016-05-31 16:40:40 +03:00
protected :
2016-09-05 08:54:53 +03:00
# ifdef SWIG
public :
# endif
2016-09-04 03:31:02 +03:00
Variable ( const NDShape & shape , VariableKind varType , CNTK : : DataType dataType , const NDArrayViewPtr & value , bool needsGradient , const std : : vector < Axis > & dynamicAxes , const std : : wstring & name , const std : : wstring & uid )
: Variable ( shape , varType , dataType , nullptr , value , needsGradient , dynamicAxes , /*isSparse =*/ false , name , uid )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
2016-09-05 08:54:53 +03:00
protected :
2016-05-31 16:40:40 +03:00
NDArrayViewPtr Value ( ) const
{
assert ( m_dataFields - > m_value ! = nullptr ) ;
return m_dataFields - > m_value ;
}
private :
2016-09-05 08:54:53 +03:00
# ifdef SWIG
public :
# endif
2016-09-04 03:31:02 +03:00
Variable ( const NDShape & shape , bool isSparse , CNTK : : DataType dataType , bool needsGradient , const std : : wstring & name , const std : : vector < Axis > & dynamicAxes , const std : : wstring & uid )
: Variable ( shape , VariableKind : : Input , dataType , nullptr , nullptr , needsGradient , dynamicAxes , isSparse , name , uid )
{ }
Variable ( const NDShape & shape , VariableKind varType , CNTK : : DataType dataType , Function * ownerFunction , const NDArrayViewPtr & value , bool needsGradient , const std : : vector < Axis > & dynamicAxes , bool isSparse , const std : : wstring & name , const std : : wstring & uid )
: m_dataFields ( MakeSharedObject < VariableFields > ( shape , varType , dataType , ownerFunction , value , needsGradient , dynamicAxes , isSparse , name , uid ) )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
private :
2016-07-09 03:56:44 +03:00
struct VariableFields final : public std : : enable_shared_from_this < VariableFields >
2016-05-31 16:40:40 +03:00
{
2016-09-02 16:12:07 +03:00
friend class CompositeFunction ;
2016-05-31 16:40:40 +03:00
NDShape m_shape ;
VariableKind m_varKind ;
CNTK : : DataType m_dataType ;
Function * m_ownerFunction ; // Variable does not keep the Function alive
NDArrayViewPtr m_value ;
bool m_needsGradient ;
2016-07-09 03:56:44 +03:00
std : : wstring m_name ;
std : : vector < Axis > m_dynamicAxes ;
2016-06-09 08:39:07 +03:00
bool m_isSparse ;
2016-09-04 03:31:02 +03:00
std : : wstring m_uid ;
2016-05-31 16:40:40 +03:00
2016-09-04 03:31:02 +03:00
VariableFields ( const NDShape & shape , VariableKind varType , CNTK : : DataType type , Function * ownerFunction , const NDArrayViewPtr & value , bool needsGradient , const std : : vector < Axis > & dynamicAxes , bool isSparse , const std : : wstring & name , const std : : wstring & uid )
: m_shape ( shape ) , m_varKind ( varType ) , m_dataType ( type ) , m_ownerFunction ( ownerFunction ) , m_value ( value ) , m_needsGradient ( needsGradient ) , m_dynamicAxes ( dynamicAxes ) , m_isSparse ( isSparse ) , m_name ( name ) , m_uid ( uid )
2016-05-31 16:40:40 +03:00
{
2016-09-02 16:12:07 +03:00
if ( value & & ( type ! = value - > GetDataType ( ) ) )
InvalidArgument ( " The DataType of the Parameter/Constant Variable does not match the DataType of the associated Value " ) ;
2016-08-21 13:49:03 +03:00
// Validate that each of the dynamic axes are unique
std : : unordered_set < Axis > uniqueDynamicAxis ;
for ( auto & currentDynamicAxis : dynamicAxes )
{
auto retVal = uniqueDynamicAxis . insert ( currentDynamicAxis ) ;
if ( ! retVal . second )
InvalidArgument ( " Dynamic axis named %S is specified more than once for Variable object " , currentDynamicAxis . Name ( ) . c_str ( ) ) ;
}
2016-05-31 16:40:40 +03:00
}
2016-06-05 04:26:52 +03:00
private :
2016-06-15 21:58:32 +03:00
// Disallow copy and move construction and assignment
2016-07-06 11:15:13 +03:00
VariableFields ( const VariableFields & ) = delete ; VariableFields & operator = ( const VariableFields & other ) = delete ; VariableFields ( VariableFields & & ) = delete ; VariableFields & operator = ( VariableFields & & ) = delete ;
2016-05-31 16:40:40 +03:00
} ;
2016-07-09 03:56:44 +03:00
typedef std : : shared_ptr < VariableFields > VariableFieldsPtr ;
2016-05-31 16:40:40 +03:00
2016-07-06 11:15:13 +03:00
VariableFieldsPtr m_dataFields ;
2016-05-31 16:40:40 +03:00
} ;
inline bool operator = = ( const Variable & first , const Variable & second )
{
return first . m_dataFields = = second . m_dataFields ;
}
2016-07-18 02:44:44 +03:00
inline bool operator ! = ( const Variable & first , const Variable & second )
{
return ! ( first = = second ) ;
}
2016-09-02 16:12:07 +03:00
///
/// Create a Placeholder variable to be used as a temporary/placeholder input to a Function.
/// All placeholder inputs of a Function must be replaced with non-placeholder Variables before Forward evaluation of the Function.
///
inline Variable PlaceholderVariable ( const NDShape & shape , const std : : vector < Axis > & dynamicAxes /*= Axis::DefaultInputVariableDynamicAxes*/ )
{
2016-09-04 03:31:02 +03:00
auto varKind = VariableKind : : Placeholder ;
return Variable ( shape , varKind , DataType : : Unknown , nullptr , false , dynamicAxes , L " " , Internal : : GenerateUid ( varKind ) ) ;
}
///
/// Create an 'Input' Variable denoting sparse data and specify if gradients are to be computed for this input
///
inline Variable InputVariable ( const NDShape & shape , bool isSparse , CNTK : : DataType dataType , bool needsGradient , const std : : wstring & name /*= L""*/ , const std : : vector < Axis > & dynamicAxes /*= Axis::DefaultInputVariableDynamicAxes*/ )
{
return Variable ( shape , isSparse , dataType , needsGradient , name , dynamicAxes , Internal : : GenerateUid ( VariableKind : : Input ) ) ;
2016-09-02 16:12:07 +03:00
}
///
/// Create an 'Input' Variable and specify if gradients are to be computed for this input
///
inline Variable InputVariable ( const NDShape & shape , CNTK : : DataType dataType , bool needsGradient , const std : : wstring & name = L " " , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes )
{
2016-09-04 03:31:02 +03:00
return InputVariable ( shape , /*isSparse =*/ false , dataType , needsGradient , name , dynamicAxes ) ;
2016-09-02 16:12:07 +03:00
}
///
/// Create an 'Input' Variable.
///
2016-09-05 08:54:53 +03:00
inline Variable InputVariable ( const NDShape & shape , DataType dataType , const std : : wstring & name , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes )
2016-09-02 16:12:07 +03:00
{
return InputVariable ( shape , dataType , /*needsGradient =*/ false , name , dynamicAxes ) ;
}
///
/// Create an 'Input' Variable.
///
inline Variable InputVariable ( const NDShape & shape , DataType dataType , const wchar_t * name , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes )
{
return InputVariable ( shape , dataType , std : : wstring ( name ) , dynamicAxes ) ;
}
///
/// Create an 'Input' Variable.
///
inline Variable InputVariable ( const NDShape & shape , DataType dataType , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes )
{
return InputVariable ( shape , dataType , L " " , dynamicAxes ) ;
}
///
/// Create an 'Input' Variable denoting sparse data.
///
2016-09-05 08:54:53 +03:00
inline Variable InputVariable ( const NDShape & shape , bool isSparse , CNTK : : DataType dataType , const std : : wstring & name , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes )
2016-09-02 16:12:07 +03:00
{
return InputVariable ( shape , isSparse , dataType , /*needsGradient =*/ false , name , dynamicAxes ) ;
}
///
/// Create an 'Input' Variable denoting sparse data.
///
inline Variable InputVariable ( const NDShape & shape , bool isSparse , CNTK : : DataType dataType , const wchar_t * name , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes )
{
return InputVariable ( shape , isSparse , dataType , std : : wstring ( name ) , dynamicAxes ) ;
}
///
/// Create an 'Input' Variable denoting sparse data.
///
inline Variable InputVariable ( const NDShape & shape , bool isSparse , CNTK : : DataType dataType , const std : : vector < Axis > & dynamicAxes = Axis : : DefaultInputVariableDynamicAxes )
{
return InputVariable ( shape , isSparse , dataType , L " " , dynamicAxes ) ;
}
///
/// Create an 'Output' variable
///
inline Variable OutputVariable ( const NDShape & shape , CNTK : : DataType dataType , Function * ownerFunction , const std : : vector < Axis > & dynamicAxes , const std : : wstring & name /*= L""*/ )
{
2016-09-04 03:31:02 +03:00
return Variable ( shape , VariableKind : : Output , dataType , ownerFunction , nullptr , /*needsGradient =*/ false , dynamicAxes , /*isSparse =*/ false , name , Internal : : GenerateUid ( VariableKind : : Output ) ) ;
2016-09-02 16:12:07 +03:00
}
2016-05-31 16:40:40 +03:00
///
/// Denotes Parameter inputs of a Function.
///
class Parameter final : public Variable
{
template < typename T >
friend struct std : : hash ;
2016-09-04 03:31:02 +03:00
template < typename ElementType >
friend Variable GetVariable ( const Microsoft : : MSR : : CNTK : : ComputationNodeBasePtr & node ,
std : : unordered_map < Microsoft : : MSR : : CNTK : : ComputationNodeBasePtr , Variable > & nodeToVariableMap ,
std : : unordered_map < Variable , Variable > & placeholderReplacements ,
std : : unordered_set < FunctionPtr > & allPrimitiveFunctions ) ;
2016-05-31 16:40:40 +03:00
public :
///
/// Construct a parameter whose initial contents are a copy of the specified 'value'
///
explicit Parameter ( const NDArrayViewPtr & value , const std : : wstring & name = L " " )
2016-09-04 03:31:02 +03:00
: Parameter ( value , name , Internal : : GenerateUid ( VariableKind : : Parameter ) )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
// TODO: Constructor to move a specified NDArrayView value
///
/// Construct a parameter of specified shape whose contents are initialized with the specified 'initValue'
///
template < typename ElemType >
2016-08-26 22:14:23 +03:00
Parameter ( const NDShape & shape , ElemType initValue , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
2016-09-04 03:31:02 +03:00
: Variable ( shape , VariableKind : : Parameter , AsDataType < ElemType > ( ) , MakeSharedObject < NDArrayView > ( initValue , shape , device ) , true , { } , name , Internal : : GenerateUid ( VariableKind : : Parameter ) )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
2016-09-02 16:12:07 +03:00
///
/// Construct a constant of specified shape whose contents are initialized with the specified 'initValue'
///
Parameter ( const NDShape & shape , DataType dataType , double initValue , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
2016-09-04 03:31:02 +03:00
: Variable ( shape , VariableKind : : Parameter , dataType , MakeSharedObject < NDArrayView > ( initValue , dataType , shape , device ) , true , { } , name , Internal : : GenerateUid ( VariableKind : : Parameter ) )
2016-09-02 16:12:07 +03:00
{ }
2016-08-31 08:50:25 +03:00
///
/// Create a Parameter initialized with random values drawn from a Uniform distribution in the range [-0.05, 0.05]
///
static Parameter Uniform ( const NDShape & shape , DataType type = DataType : : Float , unsigned long seed = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
{
return UniformInitParameter ( shape , type , 1.0 / 20 , seed , device , name ) ;
}
///
/// Create a Parameter initialized with random values from a Uniform distribution in the range [-sqrt(6 / fanIn), sqrt(6 / fanIn)]
///
static Parameter HeUniform ( const NDShape & shape , DataType type = DataType : : Float , unsigned long seed = 1 , size_t fanOutRank = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
{
NDShape fanInShape = shape . SubShape ( fanOutRank ) ;
return UniformInitParameter ( shape , type , std : : sqrt ( 6.0 / fanInShape . TotalSize ( ) ) , seed , device , name ) ;
}
///
/// Create a Parameter initialized with random values from a Uniform distribution in the range [-sqrt(6 / (fanIn + fanOut)), sqrt(6 / (fanIn + fanOut))]
///
static Parameter GlorotUniform ( const NDShape & shape , DataType type = DataType : : Float , unsigned long seed = 1 , size_t fanOutRank = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
{
NDShape fanOutShape = shape . SubShape ( 0 , fanOutRank ) ;
NDShape fanInShape = shape . SubShape ( fanOutRank ) ;
return UniformInitParameter ( shape , type , std : : sqrt ( 6.0 / ( fanInShape . TotalSize ( ) + fanOutShape . TotalSize ( ) ) ) , seed , device , name ) ;
}
///
/// Create a Parameter initialized with random values from a Uniform distribution in the range [-sqrt(3 / fanIn), sqrt(3 / fanIn)]
///
static Parameter Xavier ( const NDShape & shape , DataType type = DataType : : Float , unsigned long seed = 1 , size_t fanOutRank = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
{
NDShape fanInShape = shape . SubShape ( fanOutRank ) ;
return UniformInitParameter ( shape , type , std : : sqrt ( 3.0 / fanInShape . TotalSize ( ) ) , seed , device , name ) ;
}
///
/// Create a Parameter initialized with random values drawn from a Gaussian distribution with [mean = 0, stdDev = sqrt(0.04 / fanIn)]
///
static Parameter Gaussian ( const NDShape & shape , DataType type = DataType : : Float , unsigned long seed = 1 , size_t fanOutRank = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
{
NDShape fanInShape = shape . SubShape ( fanOutRank ) ;
return NormalInitParameter ( shape , type , std : : sqrt ( 0.04 / fanInShape . TotalSize ( ) ) , seed , device , name ) ;
}
///
/// Create a Parameter initialized with random values from a Gaussian distribution with [mean = 0, stdDev = sqrt(2 / fanIn)]
///
static Parameter HeNormal ( const NDShape & shape , DataType type = DataType : : Float , unsigned long seed = 1 , size_t fanOutRank = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
{
NDShape fanInShape = shape . SubShape ( fanOutRank ) ;
return NormalInitParameter ( shape , type , std : : sqrt ( 2.0 / fanInShape . TotalSize ( ) ) , seed , device , name ) ;
}
///
/// Create a Parameter initialized with random values from a Gaussian distribution with [mean = 0, stdDev = sqrt(2 / (fanIn + fanOut))]
///
static Parameter GlorotNormal ( const NDShape & shape , DataType type = DataType : : Float , unsigned long seed = 1 , size_t fanOutRank = 1 , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
{
NDShape fanOutShape = shape . SubShape ( 0 , fanOutRank ) ;
NDShape fanInShape = shape . SubShape ( fanOutRank ) ;
return NormalInitParameter ( shape , type , std : : sqrt ( 2.0 / ( fanInShape . TotalSize ( ) + fanOutShape . TotalSize ( ) ) ) , seed , device , name ) ;
}
2016-05-31 16:40:40 +03:00
///
/// DownCast a Variable to a Parameter. Only allowed if the VariableKind is Parameter and throws an exception otherwise.
///
explicit Parameter ( const Variable & variable )
: Variable ( variable )
{
if ( ! IsParameter ( ) )
InvalidArgument ( " A non-parameter Variable being converted to a Parameter " ) ;
}
///
/// Get the value of 'this' parameter
///
NDArrayViewPtr Value ( ) const
{
return Variable : : Value ( ) ;
}
2016-08-31 08:50:25 +03:00
2016-09-04 03:31:02 +03:00
private :
explicit Parameter ( const NDArrayViewPtr & value , const std : : wstring & name , const std : : wstring & uid )
: Variable ( value - > Shape ( ) , VariableKind : : Parameter , value - > GetDataType ( ) , value - > DeepClone ( ) , true , { } , name , uid )
{ }
2016-08-31 08:50:25 +03:00
private :
// Helper methods for Parameter construction
CNTK_API static Parameter UniformInitParameter ( const NDShape & shape , DataType type , double range , unsigned long seed , const DeviceDescriptor & device , const std : : wstring & name ) ;
CNTK_API static Parameter NormalInitParameter ( const NDShape & shape , DataType type , double stdDev , unsigned long seed , const DeviceDescriptor & device , const std : : wstring & name ) ;
2016-05-31 16:40:40 +03:00
} ;
2016-07-06 11:15:13 +03:00
// Implementation note: The Variable type is a value type and not polymorphic in nature.
// However we have a couple of derivatives of the type to extend the base interface and thus we ensure that the derived types do not have additional fields.
// This check is weak in that the derives types may sneak in some additional fields if the base type had some padding at the end, without changing the object size
// but it should be good enough for catching any accidental additon of fields.
2016-05-31 16:40:40 +03:00
static_assert ( sizeof ( Parameter ) = = sizeof ( Variable ) , " The Parameter type should not have any data fields beyond what it's base type 'Variable' has. " ) ;
///
/// Denotes Constant inputs of a Function.
///
class Constant final : public Variable
{
template < typename T >
friend struct std : : hash ;
2016-09-04 03:31:02 +03:00
template < typename ElementType >
friend Variable GetVariable ( const Microsoft : : MSR : : CNTK : : ComputationNodeBasePtr & node ,
std : : unordered_map < Microsoft : : MSR : : CNTK : : ComputationNodeBasePtr , Variable > & nodeToVariableMap ,
std : : unordered_map < Variable , Variable > & placeholderReplacements ,
std : : unordered_set < FunctionPtr > & allPrimitiveFunctions ) ;
2016-05-31 16:40:40 +03:00
public :
///
/// Contruct a Constant whose initial contents are a copy of the specified value
///
Constant ( const NDArrayViewPtr & value , const std : : wstring & name = L " " )
2016-09-04 03:31:02 +03:00
: Constant ( value , name , Internal : : GenerateUid ( VariableKind : : Constant ) )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
// TODO: Constructor to move a specified NDArrayView value
///
/// Construct a constant of specified shape whose contents are initialized with the specified 'initValue'
///
template < typename ElemType >
2016-08-26 22:14:23 +03:00
Constant ( const NDShape & shape , ElemType initValue , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
2016-09-04 03:31:02 +03:00
: Variable ( shape , VariableKind : : Constant , AsDataType < ElemType > ( ) , MakeSharedObject < NDArrayView > ( initValue , shape , device ) , false , { } , name , Internal : : GenerateUid ( VariableKind : : Constant ) )
2016-06-15 21:58:32 +03:00
{ }
2016-05-31 16:40:40 +03:00
2016-09-02 16:12:07 +03:00
///
/// Construct a constant of specified shape whose contents are initialized with the specified 'initValue'
///
Constant ( const NDShape & shape , DataType dataType , double initValue , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) , const std : : wstring & name = L " " )
2016-09-04 03:31:02 +03:00
: Variable ( shape , VariableKind : : Constant , dataType , MakeSharedObject < NDArrayView > ( initValue , dataType , shape , device ) , false , { } , name , Internal : : GenerateUid ( VariableKind : : Constant ) )
2016-09-02 16:12:07 +03:00
{ }
///
/// Create a scalar constant. The specified value is cast to the specified DataType
///
static inline CNTK : : Constant Scalar ( CNTK : : DataType dataType , double value , const CNTK : : DeviceDescriptor & device = CNTK : : DeviceDescriptor : : CPUDevice ( ) )
{
return Constant ( { } , dataType , value , device ) ;
}
///
/// Create a scalar constant. The specified value is cast to the specified DataType
///
template < typename ElementType >
static inline CNTK : : Constant Scalar ( ElementType value , const CNTK : : DeviceDescriptor & device = CNTK : : DeviceDescriptor : : CPUDevice ( ) )
{
return Constant ( { } , value , device ) ;
}
2016-05-31 16:40:40 +03:00
///
/// DownCast a Variable to a Constant. Only allowed if the VariableKind is Constant and throws an exception otherwise.
///
explicit Constant ( const Variable & variable )
: Variable ( variable )
{
if ( ! IsConstant ( ) )
InvalidArgument ( " A non-constant Variable being converted to a Constant " ) ;
}
///
/// Get the value of 'this' Constant
///
NDArrayViewPtr Value ( ) const
{
return Variable : : Value ( ) ;
}
2016-09-04 03:31:02 +03:00
private :
Constant ( const NDArrayViewPtr & value , const std : : wstring & name , const std : : wstring & uid )
: Variable ( value - > Shape ( ) , VariableKind : : Constant , value - > GetDataType ( ) , value - > DeepClone ( true ) , false , { } , name , uid )
{ }
2016-05-31 16:40:40 +03:00
} ;
2016-07-06 11:15:13 +03:00
// Implementation note: The Variable type is a value type and not polymorphic in nature.
// However we have a couple of derivatives of the type to extend the base interface and thus we ensure that the derived types do not have additional fields.
// This check is weak in that the derives types may sneak in some additional fields if the base type had some padding at the end, without changing the object size
// but it should be good enough for catching any accidental additon of fields.
2016-05-31 16:40:40 +03:00
static_assert ( sizeof ( Constant ) = = sizeof ( Variable ) , " The Constant type should not have any data fields beyond what it's base type 'Variable' has. " ) ;
}
namespace std {
2016-08-09 17:14:19 +03:00
template < > struct hash < CNTK : : NDShape >
{
size_t operator ( ) ( const CNTK : : NDShape & x ) const
{
return std : : hash < std : : wstring > ( ) ( x . AsString ( ) ) ;
}
} ;
2016-07-21 15:32:42 +03:00
template < > struct hash < CNTK : : Variable >
{
size_t operator ( ) ( const CNTK : : Variable & x ) const
{
return std : : hash < const void * > ( ) ( x . m_dataFields . get ( ) ) ;
}
} ;
template < > struct hash < CNTK : : Parameter >
{
size_t operator ( ) ( const CNTK : : Parameter & x ) const
{
return std : : hash < CNTK : : Variable > ( ) ( x ) ;
}
} ;
template < > struct hash < CNTK : : Constant >
{
size_t operator ( ) ( const CNTK : : Constant & x ) const
{
return std : : hash < CNTK : : Variable > ( ) ( x ) ;
}
} ;
2016-05-31 16:40:40 +03:00
}
namespace CNTK
{
///
/// Encapsulates the internal computation state of a Function computed as part of the 'Forward' call on a Function
2016-07-06 11:15:13 +03:00
/// that must be passed to a subsequent 'Backward' call on the same Function to backpropagate gradient values
2016-05-31 16:40:40 +03:00
/// for the same computation backwards through the Function
///
2016-07-09 03:56:44 +03:00
class BackPropState : public std : : enable_shared_from_this < BackPropState >
2016-05-31 16:40:40 +03:00
{
public :
///
/// Returns the Function that 'this' BackPropState belongs to
///
FunctionPtr Function ( ) const { return m_function ; }
2016-06-15 21:58:32 +03:00
virtual ~ BackPropState ( ) { }
2016-05-31 16:40:40 +03:00
protected :
BackPropState ( const FunctionPtr & function ) : m_function ( function ) { }
protected :
FunctionPtr m_function ;
} ;
2016-07-09 03:56:44 +03:00
typedef std : : shared_ptr < BackPropState > BackPropStatePtr ;
2016-06-05 04:26:52 +03:00
2016-05-31 16:40:40 +03:00
///
2016-06-15 21:58:32 +03:00
/// Represents a function (optionally differentiable w.r.t. its inputs)
2016-07-06 11:15:13 +03:00
/// A Function denotes a symbolic computation with zero or more input arguments and one or more outputs.
2016-05-31 16:40:40 +03:00
/// A Function may be primitive or composite (comprised of other function instances whose inputs and outputs are wired together).
2016-06-15 21:58:32 +03:00
/// A Function effectively is a computation graph composed of other primitive Functions (denoting computation) as nodes and Variable objects
/// (denoting data) as the edges and leaves of the graph.
2016-05-31 16:40:40 +03:00
///
2016-07-09 03:56:44 +03:00
class Function : public std : : enable_shared_from_this < Function >
2016-05-31 16:40:40 +03:00
{
2016-06-05 04:26:52 +03:00
friend class CompositeFunction ;
2016-05-31 16:40:40 +03:00
public :
///
/// Computes and stores the values of speficied variables in the 'outputs' map, using provided 'inputs' values corresponding
/// to each leaf variable of the function of VariableKind 'Input'.
/// The variables specified in the 'outputs' map denote the subset of 'this' Function's output variables that the caller wants to obtain values of.
/// Callers may specify the storage to be used for storing the 'outputs' Values or pass null in which case the implementation allocates the actual storage
/// for the 'outputs' for which the ValuePtr mapping was left null by the caller.
/// The optional 'outputsToRetainBackwardStateFor' parameter specifies the subset of the Function's output variables for which gradients will be specified
/// in a subsequent Backward call for backpropagation.
/// The method returns a BackPropState object containing all intermediate variable values needed during backpropagation of gradients from the
2016-07-06 11:15:13 +03:00
/// 'outputsToRetainBackwardStateFor' outputs of the function to any of the inputs of the Function, in a subsequent Backward call.
2016-05-31 16:40:40 +03:00
/// Note that the returned BackPropState instance also stores a reference to the supplied 'inputs' Values and generated 'outputs' Values
/// and the user is responsible for ensuring that the contents of the inputs and outputs are unchanged until after any uses of the BackPropState instance
/// for backpropagating gradients through this function.
///
2016-07-23 02:46:09 +03:00
CNTK_API virtual BackPropStatePtr Forward ( const std : : unordered_map < Variable , ValuePtr > & arguments ,
2016-07-09 03:56:44 +03:00
std : : unordered_map < Variable , ValuePtr > & outputs ,
2016-08-26 22:14:23 +03:00
const DeviceDescriptor & computeDevice = DeviceDescriptor : : UseDefaultDevice ( ) ,
2016-07-09 03:56:44 +03:00
const std : : unordered_set < Variable > & outputsToRetainBackwardStateFor = { } ) = 0 ;
2016-05-31 16:40:40 +03:00
///
/// Backpropagates supplied 'rootGradientValues' for one or more of the output variables of the Function, to produce gradient Values
/// corresponding to the specified set of input variables in 'backPropagatedGradientValuesForInputs'.
/// Callers may specify the actual storage to be used for storing the 'backPropagatedGradientValuesForInputs' Values or leave them to be null
/// in which case the implementation allocates the actual storage for storing the gradients.
/// In case an existing storage is specified, the gradients are aggregated with existing values in the specified storage.
/// The 'state' parameter is an instance of an BackPropState instance obtained from a previous call to the Forward method on 'this; Function for the
/// computation that this gradient backpropagation corresponds to.
///
2016-07-09 03:56:44 +03:00
CNTK_API virtual void Backward ( const BackPropStatePtr & state ,
2016-07-23 02:46:09 +03:00
const std : : unordered_map < Variable , ValuePtr > & rootGradientValues ,
2016-07-09 03:56:44 +03:00
std : : unordered_map < Variable , ValuePtr > & backPropagatedGradientValuesForInputs ) = 0 ;
2016-05-31 16:40:40 +03:00
public :
// Optional overrides
///
/// Destruct this Function.
///
2016-07-09 03:56:44 +03:00
virtual ~ Function ( ) { }
2016-05-31 16:40:40 +03:00
public :
///
/// Returns the name of 'this' variable.
///
2016-07-09 03:56:44 +03:00
const std : : wstring & Name ( ) const { return m_name ; }
2016-05-31 16:40:40 +03:00
///
/// Returns the primitive Function at the root of the graph of Functions underlying this Function.
/// If 'this' Function itself is a primitive function then (this->RootFunction() == this).
///
FunctionPtr RootFunction ( ) const
{
2016-07-09 03:56:44 +03:00
return ( m_rootFunction = = nullptr ) ? const_cast < Function * > ( this ) - > shared_from_this ( ) : m_rootFunction ;
2016-05-31 16:40:40 +03:00
}
///
/// Returns all Input variables of 'this' Function.
///
std : : vector < Variable > Inputs ( ) const
{
2016-07-09 03:56:44 +03:00
return * ( InputsImpl ( ) . get ( ) ) ;
2016-05-31 16:40:40 +03:00
}
///
/// Returns the Output variable of 'this' Function. Throws an exception of 'this' Function has more that one output.
///
Variable Output ( ) const
{
2016-07-09 03:56:44 +03:00
if ( m_outputs . size ( ) > 1 )
2016-07-06 11:15:13 +03:00
RuntimeError ( " A Function instance with more than one output cannot be implicitly converted to a Variable " ) ;
2016-05-31 16:40:40 +03:00
return m_outputs [ 0 ] ;
}
///
/// Returns a vector consisting of all Output variables of 'this' Function.
///
2016-07-09 03:56:44 +03:00
const std : : vector < Variable > & Outputs ( ) const { return m_outputs ; }
2016-05-31 16:40:40 +03:00
///
2016-07-06 11:15:13 +03:00
/// Returns a set comprising of all input variables of 'this' Function's variables that are not of kind 'Parameter' or 'Constant'.
2016-05-31 16:40:40 +03:00
///
std : : unordered_set < Variable > Arguments ( ) const
{
2016-06-05 04:26:52 +03:00
return FilteredInputs < Variable > ( [ ] ( const Variable & var ) {
2016-06-15 21:58:32 +03:00
return ( var . IsInput ( ) | | var . IsOutput ( ) ) ;
2016-06-05 04:26:52 +03:00
} ) ;
2016-05-31 16:40:40 +03:00
}
///
/// Returns the set of all Parameter variables of 'this' Function.
///
std : : unordered_set < Parameter > Parameters ( ) const
{
2016-06-05 04:26:52 +03:00
return FilteredInputs < Parameter > ( [ ] ( const Variable & var ) {
2016-06-15 21:58:32 +03:00
return var . IsParameter ( ) ;
2016-06-05 04:26:52 +03:00
} ) ;
2016-05-31 16:40:40 +03:00
}
///
/// Returns the set of all Constant variables of 'this' Function.
///
std : : unordered_set < Constant > Constants ( ) const
{
2016-06-05 04:26:52 +03:00
return FilteredInputs < Constant > ( [ ] ( const Variable & var ) {
2016-06-15 21:58:32 +03:00
return var . IsConstant ( ) ;
2016-06-05 04:26:52 +03:00
} ) ;
}
///
/// Returns the set of all Constant variables of 'this' Function.
///
2016-09-02 16:12:07 +03:00
std : : unordered_set < Variable > Placeholders ( ) const
2016-06-05 04:26:52 +03:00
{
2016-09-02 16:12:07 +03:00
return FilteredInputs < Variable > ( [ ] ( const Variable & var ) {
2016-06-15 21:58:32 +03:00
return var . IsPlaceholder ( ) ;
2016-06-05 04:26:52 +03:00
} ) ;
}
2016-09-02 16:12:07 +03:00
CNTK_API FunctionPtr ReplacePlaceholders ( const std : : unordered_map < Variable , Variable > & placeholderReplacements ) ;
2016-05-31 16:40:40 +03:00
private :
2016-06-05 04:26:52 +03:00
template < typename VariableType , typename FilterFunction >
std : : unordered_set < VariableType > FilteredInputs ( FilterFunction & & filterFunc ) const
{
std : : unordered_set < VariableType > filteredInputs ;
auto inputs = Inputs ( ) ;
2016-06-15 21:58:32 +03:00
for ( auto inputVar : inputs )
2016-06-05 04:26:52 +03:00
{
2016-06-15 21:58:32 +03:00
if ( filterFunc ( inputVar ) )
filteredInputs . insert ( VariableType ( inputVar ) ) ;
2016-06-05 04:26:52 +03:00
}
return filteredInputs ;
}
2016-07-09 03:56:44 +03:00
CNTK_API std : : shared_ptr < std : : vector < Variable > > InputsImpl ( ) const ;
2016-09-02 16:12:07 +03:00
virtual void ReplacePlaceholders ( const std : : unordered_map < Variable , Variable > & placeholderReplacements ,
2016-07-09 03:56:44 +03:00
std : : unordered_set < const Function * > & visitedFunctions ,
2016-09-02 16:12:07 +03:00
std : : unordered_set < Variable > & replacedPlaceholders ) ;
2016-06-05 04:26:52 +03:00
2016-05-31 16:40:40 +03:00
// Disallow copy and move construction and assignment
2016-06-15 21:58:32 +03:00
Function ( const Function & ) = delete ; Function ( Function & & ) = delete ; Function & operator = ( const Function & ) = delete ; Function & operator = ( Function & & ) = delete ;
2016-05-31 16:40:40 +03:00
protected :
///
/// Protected constructor for derived 'Function' types to specify the actual input and output variables for the Function instance.
///
Function ( const std : : vector < Variable > & inputs , const std : : vector < Variable > & outputs , const FunctionPtr & rootFunction = nullptr , const std : : wstring & name = L " " )
2016-07-09 03:56:44 +03:00
: m_rootFunction ( rootFunction ) , m_name ( name )
2016-05-31 16:40:40 +03:00
{
2016-06-15 21:58:32 +03:00
for ( auto inputVar : inputs )
2016-05-31 16:40:40 +03:00
{
2016-07-09 03:56:44 +03:00
m_inputs . push_back ( inputVar ) ;
2016-05-31 16:40:40 +03:00
2016-06-15 21:58:32 +03:00
if ( ! inputVar . IsInput ( ) & &
! inputVar . IsOutput ( ) & &
! inputVar . IsParameter ( ) & &
! inputVar . IsConstant ( ) & &
! inputVar . IsPlaceholder ( ) )
2016-05-31 16:40:40 +03:00
{
InvalidArgument ( " Function input has invalid VariableKind! " ) ;
}
}
2016-07-06 11:15:13 +03:00
std : : unordered_set < Variable > uniqueOutputs ;
2016-06-15 21:58:32 +03:00
for ( auto outputVar : outputs )
2016-05-31 16:40:40 +03:00
{
2016-07-06 11:15:13 +03:00
if ( uniqueOutputs . find ( outputVar ) ! = uniqueOutputs . end ( ) )
2016-05-31 16:40:40 +03:00
RuntimeError ( " Same variable appears multiple times in the outputs vector passed to Function constructor " ) ;
2016-08-31 08:50:25 +03:00
m_outputs . push_back ( outputVar ) ;
uniqueOutputs . insert ( outputVar ) ;
2016-05-31 16:40:40 +03:00
}
}
private :
2016-07-09 03:56:44 +03:00
std : : vector < Variable > m_inputs ;
std : : vector < Variable > m_outputs ;
2016-05-31 16:40:40 +03:00
2016-07-06 11:15:13 +03:00
FunctionPtr m_rootFunction ; // nullptr for primitive function instances
2016-07-09 03:56:44 +03:00
std : : wstring m_name ;
2016-05-31 16:40:40 +03:00
} ;
///
2016-07-22 23:46:19 +03:00
/// Create an instance of the CNTK built-in elementwise negate operation with the specified input operand.
2016-05-31 16:40:40 +03:00
///
2016-07-22 23:46:19 +03:00
CNTK_API FunctionPtr Negate ( const Variable & operand , const std : : wstring & name = L " " ) ;
2016-08-31 08:50:25 +03:00
///
/// Unary negation operator corresponding to the Negate operation
///
inline FunctionPtr operator - ( const Variable & operand )
{
return Negate ( operand ) ;
}
2016-07-22 23:46:19 +03:00
///
/// Create an instance of the CNTK built-in elementwise sigmoid operation with the specified input operand.
///
CNTK_API FunctionPtr Sigmoid ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise tanh operation with the specified input operand.
///
CNTK_API FunctionPtr Tanh ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise linear rectifier operation with the specified input operand.
///
CNTK_API FunctionPtr ReLU ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise exp operation with the specified input operand.
///
CNTK_API FunctionPtr Exp ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise log operation with the specified input operand.
///
CNTK_API FunctionPtr Log ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise square operation with the specified input operand.
///
CNTK_API FunctionPtr Square ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise square-root operation with the specified input operand.
///
CNTK_API FunctionPtr Sqrt ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise round operation with the specified input operand.
///
CNTK_API FunctionPtr Round ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise floor operation with the specified input operand.
///
CNTK_API FunctionPtr Floor ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise ceil operation with the specified input operand.
///
CNTK_API FunctionPtr Ceil ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise abs operation with the specified input operand.
///
CNTK_API FunctionPtr Abs ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise reciprocal operation with the specified input operand.
///
CNTK_API FunctionPtr Reciprocal ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in softmax operation on specified tensor input operand
///
CNTK_API FunctionPtr Softmax ( const Variable & operand , const std : : wstring & name = L " " ) ;
2016-05-31 16:40:40 +03:00
2016-08-21 13:49:03 +03:00
///
/// Create an instance of the CNTK built-in hardmax operation on specified tensor input operand
///
CNTK_API FunctionPtr Hardmax ( const Variable & operand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in transpose dimensions operation on specified tensor input operand
///
CNTK_API FunctionPtr TransposeAxes ( const Variable & operand , const Axis & axis1 , const Axis & axis2 , const std : : wstring & name = L " " ) ;
2016-08-26 22:14:23 +03:00
///
/// Create an instance of the CNTK built-in transpose operation on the specified 1D or 2D input operand
///
CNTK_API FunctionPtr Transpose ( const Variable & operand , const std : : wstring & name = L " " ) ;
2016-08-21 13:49:03 +03:00
///
/// Create an instance of the slice operation on specified tensor input operand
///
CNTK_API FunctionPtr Slice ( const Variable & operand , const Axis & axis , int beginIndex , int endIndex , const std : : wstring & name = L " " ) ;
2016-08-26 22:14:23 +03:00
///
/// Create an instance of the dropout operation on specified tensor input operand
///
// TODO: The initial random seed should be specifiable
CNTK_API FunctionPtr Dropout ( const Variable & operand , double dropoutRate , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the reshape operation on specified tensor input operand
///
CNTK_API FunctionPtr Reshape ( const Variable & operand , const NDShape & newShape , const std : : wstring & name = L " " ) ;
2016-05-31 16:40:40 +03:00
///
/// Create an instance of the CNTK built-in elementwise tensor addition operation with the specified input operands.
///
CNTK_API FunctionPtr Plus ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
2016-08-31 08:50:25 +03:00
///
/// Binary addition operator corresponding to the Plus operation
///
inline FunctionPtr operator + ( const Variable & leftOperand , const Variable & rightOperand )
{
return Plus ( leftOperand , rightOperand ) ;
}
2016-05-31 16:40:40 +03:00
///
2016-07-22 23:46:19 +03:00
/// Create an instance of the CNTK built-in elementwise tensor subtraction operation with the specified input operands.
2016-05-31 16:40:40 +03:00
///
2016-07-22 23:46:19 +03:00
CNTK_API FunctionPtr Minus ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
2016-08-31 08:50:25 +03:00
///
/// Binary minus operator corresponding to the Minus operation
///
inline FunctionPtr operator - ( const Variable & leftOperand , const Variable & rightOperand )
{
return Minus ( leftOperand , rightOperand ) ;
}
2016-06-05 04:26:52 +03:00
///
2016-07-22 23:46:19 +03:00
/// Create an instance of the CNTK built-in elementwise multiplication operation on specified tensor input operands.
2016-06-05 04:26:52 +03:00
///
2016-07-22 23:46:19 +03:00
CNTK_API FunctionPtr ElementTimes ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
2016-06-05 04:26:52 +03:00
2016-05-31 16:40:40 +03:00
///
2016-07-22 23:46:19 +03:00
/// Create an instance of the CNTK built-in elementwise division operation on specified tensor input operands.
2016-05-31 16:40:40 +03:00
///
2016-07-22 23:46:19 +03:00
CNTK_API FunctionPtr ElementDivide ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
2016-05-31 16:40:40 +03:00
///
2016-07-22 23:46:19 +03:00
/// Create an instance of the CNTK built-in elementwise equality comparison operation on specified tensor input operands.
2016-05-31 16:40:40 +03:00
///
2016-07-22 23:46:19 +03:00
CNTK_API FunctionPtr Equal ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
2016-05-31 16:40:40 +03:00
2016-06-05 04:26:52 +03:00
///
2016-07-22 23:46:19 +03:00
/// Create an instance of the CNTK built-in elementwise not-equal comparison operation on specified tensor input operands.
2016-06-05 04:26:52 +03:00
///
2016-07-22 23:46:19 +03:00
CNTK_API FunctionPtr NotEqual ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise less than comparison operation on specified tensor input operands.
///
CNTK_API FunctionPtr Less ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise less than or equal to comparison operation on specified tensor input operands.
///
CNTK_API FunctionPtr LessEqual ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise greater than comparison operation on specified tensor input operands.
///
CNTK_API FunctionPtr Greater ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in elementwise greater than or equal to comparison operation on specified tensor input operands.
///
CNTK_API FunctionPtr GreaterEqual ( const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in matrix multiplication operation with the specified input operands.
/// TODO: Specify the constraints on the shapes of the operands.
///
2016-08-26 22:14:23 +03:00
CNTK_API FunctionPtr Times ( const Variable & leftOperand , const Variable & rightOperand , size_t outputRank = 1 , const std : : wstring & name = L " " ) ;
2016-07-22 23:46:19 +03:00
2016-08-21 13:49:03 +03:00
///
/// Create an instance of the CNTK built-in matrix multiplication operation with the transpose of the left input operand
/// and the specified right operand. Only accepts left operands of ranks 1 or 2.
/// TODO: Specify the constraints on the shapes of the operands.
///
2016-08-26 22:14:23 +03:00
CNTK_API FunctionPtr TransposeTimes ( const Variable & leftOperand , const Variable & rightOperand , size_t outputRank = 1 , const std : : wstring & name = L " " ) ;
2016-08-21 13:49:03 +03:00
2016-07-22 23:46:19 +03:00
///
/// Create an instance of the CNTK built-in operation to compute squared-error for specified input operands.
///
CNTK_API FunctionPtr SquaredError ( const Variable & prediction , const Variable & targets , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in operation to compute cross-entropy with softmax for specified input operands.
///
CNTK_API FunctionPtr CrossEntropyWithSoftmax ( const Variable & prediction , const Variable & labels , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in operation for computing the classification prediction error for specified operands.
///
CNTK_API FunctionPtr ClassificationError ( const Variable & prediction , const Variable & labels , const std : : wstring & name = L " " ) ;
2016-06-05 04:26:52 +03:00
///
/// Create an instance of the CNTK built-in operation for getting the past value along the lone dynamic axis of the specified operand.
/// Throws an exception of the operand has more than one dynamic axis.
///
2016-09-02 16:12:07 +03:00
CNTK_API FunctionPtr PastValue ( const Variable & operand , const Variable & initialState , size_t offset = 1 , const std : : wstring & name = L " " ) ;
2016-06-05 04:26:52 +03:00
2016-09-02 16:12:07 +03:00
///
/// Create an instance of the CNTK built-in operation for getting the past value along the lone dynamic axis of the specified operand.
/// This overload uses an initial state value of 0.
/// Throws an exception of the operand has more than one dynamic axis.
///
inline FunctionPtr PastValue ( const Variable & operand , size_t offset = 1 , const std : : wstring & name = L " " )
{
2016-09-04 03:31:02 +03:00
static const auto defaultInitialState = Constant : : Scalar ( 0.0f ) ;
return PastValue ( operand , defaultInitialState , offset , name ) ;
2016-09-02 16:12:07 +03:00
}
2016-06-05 04:26:52 +03:00
///
/// Create an instance of the CNTK built-in operation for getting the future value along the lone dynamic axis of the specified operand.
/// Throws an exception of the operand has more than one dynamic axis.
///
2016-09-02 16:12:07 +03:00
CNTK_API FunctionPtr FutureValue ( const Variable & operand , const Variable & initialState , size_t offset = 1 , const std : : wstring & name = L " " ) ;
2016-06-05 04:26:52 +03:00
2016-09-02 16:12:07 +03:00
///
/// Create an instance of the CNTK built-in operation for getting the future value along the lone dynamic axis of the specified operand.
/// This overload uses an initial state value of 0.
/// Throws an exception of the operand has more than one dynamic axis.
///
inline FunctionPtr FutureValue ( const Variable & operand , size_t offset = 1 , const std : : wstring & name = L " " )
{
2016-09-04 03:31:02 +03:00
static const auto defaultInitialState = Constant : : Scalar ( 0.0f ) ;
return FutureValue ( operand , defaultInitialState , offset , name ) ;
2016-09-02 16:12:07 +03:00
}
2016-06-05 04:26:52 +03:00
///
/// Create an instance of the CNTK built-in sum reduction operation on specified tensor input operand along all the axes
///
CNTK_API FunctionPtr ReduceSum ( const Variable & operand , const std : : wstring & name = L " " ) ;
2016-08-21 13:49:03 +03:00
///
/// Create an instance of the CNTK built-in sum reduction operation on specified tensor input operand along the specified axis
///
CNTK_API FunctionPtr ReduceSum ( const Variable & operand , const Axis & axis , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in LogSum reduction operation on specified tensor input operand along the specified axis
///
CNTK_API FunctionPtr ReduceLogSum ( const Variable & operand , const Axis & axis , const std : : wstring & name = L " " ) ;
2016-06-05 04:26:52 +03:00
2016-08-26 22:14:23 +03:00
///
/// Create an instance of the CNTK built-in Mean reduction operation on specified tensor input operand along the specified axis
///
CNTK_API FunctionPtr ReduceMean ( const Variable & operand , const Axis & axis , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in Max reduction operation on specified tensor input operand along the specified axis
///
CNTK_API FunctionPtr ReduceMax ( const Variable & operand , const Axis & axis , const std : : wstring & name = L " " ) ;
///
/// Create an instance of the CNTK built-in Min reduction operation on specified tensor input operand along the specified axis
///
CNTK_API FunctionPtr ReduceMin ( const Variable & operand , const Axis & axis , const std : : wstring & name = L " " ) ;
2016-07-31 02:16:41 +03:00
///
/// Per dimension mean-variance normalization of the specified input operand.
///
CNTK_API FunctionPtr PerDimMeanVarianceNormalize ( const Variable & operand , const NDArrayViewPtr & mean , const NDArrayViewPtr & invStdDev , const std : : wstring & name = L " " ) ;
///
/// TODO:
///
CNTK_API FunctionPtr Convolution ( const Variable & convolutionMap ,
const Variable & operand ,
const NDShape & strides = { 1 } ,
const std : : vector < bool > & sharing = { true } ,
const std : : vector < bool > & autoPadding = { true } ,
const NDShape & lowerPad = { 0 } ,
const NDShape & upperPad = { 0 } ,
bool transpose = false ,
size_t maxTempMemSizeInSamples = 0 ,
const std : : wstring & name = L " " ) ;
///
/// TODO:
///
enum class PoolingType
{
Max ,
Average ,
} ;
///
/// TODO:
///
CNTK_API FunctionPtr Pooling ( const Variable & operand ,
PoolingType poolingType ,
const NDShape & poolingWindowShape ,
const NDShape & strides = { 1 } ,
const std : : vector < bool > & autoPadding = { false } ,
const NDShape & lowerPad = { 0 } ,
const NDShape & upperPad = { 0 } ,
const std : : wstring & name = L " " ) ;
///
/// TODO:
///
2016-08-26 22:14:23 +03:00
// TODO: Do we need a separate "spatial" parameter or can it be inferred from the tensor dimensions
2016-07-31 02:16:41 +03:00
CNTK_API FunctionPtr BatchNormalization ( const Variable & operand ,
const Variable & scale ,
const Variable & bias ,
const Variable & runningMean ,
2016-08-26 22:14:23 +03:00
const Variable & runningInvStd ,
2016-08-08 10:13:39 +03:00
bool spatial ,
2016-07-31 02:16:41 +03:00
double normalizationTimeConstant = 0 ,
double blendTimeConstant = 0 ,
double epsilon = 0.00001 ,
bool useCuDNNEngine = false ,
const std : : wstring & name = L " " ) ;
2016-08-26 22:14:23 +03:00
///
/// Create an instance of the CNTK built-in elementwise clip operation on the tensor operand
///
CNTK_API FunctionPtr Clip ( const Variable & operand , const Variable & min , const Variable & max , const std : : wstring & name = L " " ) ;
2016-08-31 08:50:25 +03:00
///
/// Create an instance of the CNTK built-in elementwise choice operation using a condition tensor for specified tensor operands.
///
CNTK_API FunctionPtr ElementSelect ( const Variable & condition , const Variable & leftOperand , const Variable & rightOperand , const std : : wstring & name = L " " ) ;
2016-08-26 22:14:23 +03:00
///
/// Create an instance of the CNTK built-in splice operation to splice together all the specified tensor operands into a single output tensor
///
CNTK_API FunctionPtr Splice ( const std : : vector < Variable > & operands , size_t axis , const std : : wstring & name = L " " ) ;
2016-05-31 16:40:40 +03:00
///
/// Create a new Function instance which just combines the outputs of the specified list of 'operands' Functions such that the 'Outputs' of the
/// new 'Function' are union of the 'Outputs' of each of the specified 'operands' Functions.
2016-06-15 21:58:32 +03:00
/// E.g. When creating a classification model, typically the CrossEntropy loss Function and the ClassificationError Function comprise the two roots
/// of the computation graph which can be "Combine"d to create a single Function with 2 outputs; viz. CrossEntropy loss and ClassificationError output.
2016-05-31 16:40:40 +03:00
///
2016-07-18 02:44:44 +03:00
CNTK_API FunctionPtr Combine ( const std : : vector < FunctionPtr > & operands , const std : : wstring & name = L " " ) ;
2016-08-31 08:50:25 +03:00
namespace Sequence
{
CNTK_API FunctionPtr IsFirst ( const Variable & operand , const std : : wstring & name = L " " ) ;
CNTK_API FunctionPtr IsLast ( const Variable & operand , const std : : wstring & name = L " " ) ;
CNTK_API FunctionPtr First ( const Variable & operand , const std : : wstring & name = L " " ) ;
CNTK_API FunctionPtr Last ( const Variable & operand , const std : : wstring & name = L " " ) ;
CNTK_API FunctionPtr Where ( const Variable & condition , const std : : wstring & name = L " " ) ;
CNTK_API FunctionPtr Gather ( const Variable & operand , const Variable & condition , const std : : wstring & name = L " " ) ;
CNTK_API FunctionPtr Scatter ( const Variable & operand , const Variable & condition , const std : : wstring & name = L " " ) ;
CNTK_API FunctionPtr BroadcastAs ( const Variable & operand , const Variable & broadcastAs , const std : : wstring & name = L " " ) ;
}
2016-07-18 02:44:44 +03:00
///
/// Load a legacy CNTK v1 format model
///
2016-09-02 16:12:07 +03:00
CNTK_API FunctionPtr LoadLegacyModel ( DataType dataType , const std : : wstring & modelFile , const DeviceDescriptor & computeDevice = DeviceDescriptor : : UseDefaultDevice ( ) ) ;
2016-07-13 10:37:06 +03:00
2016-07-18 02:44:44 +03:00
///
/// Save a Composite Function instance to a file in CNTK legacy model format
///
CNTK_API void SaveAsLegacyModel ( const FunctionPtr & rootFunction , const std : : wstring & modelFile ) ;
2016-07-13 10:37:06 +03:00
///
/// A serializable value represents one of:
/// a) Boolean
/// b) Signed long integer
/// c) Single and double precision floating point values
/// d) NDShape
2016-08-26 22:14:23 +03:00
/// e) Axis
/// f) vector<DictionaryValue>
/// g) Dictionary
/// h) NDArrayView
2016-07-13 10:37:06 +03:00
///
2016-07-23 02:46:09 +03:00
/// TODO: We need to have native support for DictionaryValue<vector> and DictionaryValue<NDArrayView>.
class DictionaryValue final
2016-07-13 10:37:06 +03:00
{
public :
enum class Type : unsigned int
{
None ,
Bool ,
SizeT ,
Float ,
Double ,
2016-07-23 02:46:09 +03:00
String ,
2016-07-13 10:37:06 +03:00
NDShape ,
2016-08-26 22:14:23 +03:00
Axis ,
2016-07-23 02:46:09 +03:00
Vector ,
Dictionary ,
2016-08-18 15:33:38 +03:00
NDArrayView ,
2016-07-13 10:37:06 +03:00
} ;
static const char * TypeName ( Type type )
{
switch ( type )
{
case Type : : None :
return " None " ;
case Type : : Bool :
return " Bool " ;
case Type : : SizeT :
return " SizeT " ;
case Type : : Float :
return " Float " ;
case Type : : Double :
return " Double " ;
2016-07-23 02:46:09 +03:00
case Type : : String :
return " String " ;
2016-07-13 10:37:06 +03:00
case Type : : NDShape :
return " NDShape " ;
2016-08-26 22:14:23 +03:00
case Type : : Axis :
return " Axis " ;
2016-07-13 10:37:06 +03:00
case Type : : Vector :
return " Vector " ;
2016-07-23 02:46:09 +03:00
case Type : : Dictionary :
return " Dictionary " ;
2016-08-18 15:33:38 +03:00
case Type : : NDArrayView :
return " NDArrayView " ;
2016-07-13 10:37:06 +03:00
default :
LogicError ( " Unknown DictionaryValue::Type " ) ;
}
}
public :
DictionaryValue ( ) : m_valueType ( Type : : None )
{
}
DictionaryValue ( bool value ) : m_valueType ( GetValueType < bool > ( ) )
{
m_data . m_boolean = value ;
}
DictionaryValue ( size_t value ) : m_valueType ( GetValueType < size_t > ( ) )
{
m_data . m_sizeT = value ;
}
DictionaryValue ( float value ) : m_valueType ( GetValueType < float > ( ) )
{
m_data . m_float = value ;
}
DictionaryValue ( double value ) : m_valueType ( GetValueType < double > ( ) )
{
m_data . m_double = value ;
}
2016-07-23 02:46:09 +03:00
DictionaryValue ( const wchar_t * value )
: DictionaryValue ( std : : wstring ( value ) )
{ }
2016-08-02 17:52:12 +03:00
// Due to SWIG we had to flatten this template for vector<DictionaryValue>
DictionaryValue ( const std : : vector < CNTK : : DictionaryValue > & value ) : m_valueType ( GetValueType < std : : vector < CNTK : : DictionaryValue > > ( ) )
{
AllocateDataPtr ( value ) ;
}
2016-07-13 10:37:06 +03:00
template < typename T >
DictionaryValue ( const T & value ) : m_valueType ( GetValueType < T > ( ) )
{
2016-09-01 15:07:03 +03:00
static_assert ( ( std : : is_same < T , NDShape > : : value | |
2016-08-26 22:14:23 +03:00
std : : is_same < T , Axis > : : value | |
2016-08-31 08:50:25 +03:00
std : : is_same < T , std : : wstring > : : value | |
std : : is_same < T , std : : vector < DictionaryValue > > : : value | |
std : : is_same < T , Dictionary > : : value | |
2016-09-01 15:07:03 +03:00
std : : is_same < T , NDArrayView > : : value ) ,
2016-07-13 10:37:06 +03:00
" Unsupported ValueType " ) ;
AllocateDataPtr ( value ) ;
}
DictionaryValue ( const DictionaryValue & other ) : m_valueType ( Type : : Bool )
{
// The m_valueType must have been set to a non-ptr type to prevent an attempt to interpret
// the underlying underlying uninitialized value as a ptr and free it.
* this = other ;
}
2016-08-18 15:33:38 +03:00
DictionaryValue ( DictionaryValue & & other ) : m_valueType ( Type : : Bool )
{
// The m_valueType must have been set to a non-ptr type to prevent an attempt to interpret
// the underlying underlying uninitialized value as a ptr and free it.
* this = std : : move ( other ) ;
}
2016-07-13 10:37:06 +03:00
DictionaryValue & operator = ( const DictionaryValue & other )
{
if ( this ! = & other )
{
FreeDataPtr ( ) ;
m_valueType = other . m_valueType ;
m_data = other . m_data ;
2016-07-23 02:46:09 +03:00
if ( other . m_valueType = = Type : : String )
2016-08-26 22:14:23 +03:00
AllocateDataPtr ( other . Value < std : : wstring > ( ) ) ;
2016-07-23 02:46:09 +03:00
else if ( other . m_valueType = = Type : : NDShape )
2016-08-26 22:14:23 +03:00
AllocateDataPtr ( other . Value < NDShape > ( ) ) ;
else if ( other . m_valueType = = Type : : Axis )
AllocateDataPtr ( other . Value < Axis > ( ) ) ;
2016-07-13 10:37:06 +03:00
else if ( other . m_valueType = = Type : : Vector )
2016-08-26 22:14:23 +03:00
AllocateDataPtr ( other . Value < std : : vector < DictionaryValue > > ( ) ) ;
2016-07-23 02:46:09 +03:00
else if ( other . m_valueType = = Type : : Dictionary )
2016-08-26 22:14:23 +03:00
AllocateDataPtr ( other . Value < Dictionary > ( ) ) ;
2016-08-18 15:33:38 +03:00
else if ( other . m_valueType = = Type : : NDArrayView )
2016-08-26 22:14:23 +03:00
AllocateDataPtr ( other . Value < NDArrayView > ( ) ) ;
2016-07-13 10:37:06 +03:00
}
return * this ;
}
2016-08-18 15:33:38 +03:00
DictionaryValue & operator = ( DictionaryValue & & other )
{
FreeDataPtr ( ) ;
m_valueType = other . m_valueType ;
m_data = other . m_data ;
if ( other . m_valueType = = Type : : String | |
other . m_valueType = = Type : : NDShape | |
2016-08-26 22:14:23 +03:00
other . m_valueType = = Type : : Axis | |
2016-08-18 15:33:38 +03:00
other . m_valueType = = Type : : Vector | |
other . m_valueType = = Type : : Dictionary | |
other . m_valueType = = Type : : NDArrayView )
{
other . m_data . m_ptr = nullptr ;
2016-07-13 10:37:06 +03:00
}
2016-08-18 15:33:38 +03:00
other . m_valueType = Type : : None ;
2016-07-13 10:37:06 +03:00
return * this ;
}
~ DictionaryValue ( )
{
FreeDataPtr ( ) ;
}
template < typename T , typename std : : enable_if < std : : is_same < T , bool > : : value > : : type * = nullptr >
2016-08-26 22:14:23 +03:00
const T & Value ( ) const
2016-07-13 10:37:06 +03:00
{
VerifyType < T > ( ) ;
return m_data . m_boolean ;
}
2016-08-26 22:14:23 +03:00
template < typename T , typename std : : enable_if < std : : is_same < T , bool > : : value > : : type * = nullptr >
T & Value ( )
{
VerifyType < T > ( ) ;
return m_data . m_boolean ;
}
template < typename T , typename std : : enable_if < std : : is_same < T , size_t > : : value > : : type * = nullptr >
const T & Value ( ) const
{
VerifyType < T > ( ) ;
return m_data . m_sizeT ;
}
2016-07-13 10:37:06 +03:00
template < typename T , typename std : : enable_if < std : : is_same < T , size_t > : : value > : : type * = nullptr >
2016-08-26 22:14:23 +03:00
T & Value ( )
2016-07-13 10:37:06 +03:00
{
VerifyType < T > ( ) ;
return m_data . m_sizeT ;
}
template < typename T , typename std : : enable_if < std : : is_same < T , float > : : value > : : type * = nullptr >
2016-08-26 22:14:23 +03:00
const T & Value ( ) const
{
VerifyType < T > ( ) ;
return m_data . m_float ;
}
template < typename T , typename std : : enable_if < std : : is_same < T , float > : : value > : : type * = nullptr >
T & Value ( )
2016-07-13 10:37:06 +03:00
{
VerifyType < T > ( ) ;
return m_data . m_float ;
}
template < typename T , typename std : : enable_if < std : : is_same < T , double > : : value > : : type * = nullptr >
2016-08-26 22:14:23 +03:00
const T & Value ( ) const
{
VerifyType < T > ( ) ;
return m_data . m_double ;
}
template < typename T , typename std : : enable_if < std : : is_same < T , double > : : value > : : type * = nullptr >
T & Value ( )
2016-07-13 10:37:06 +03:00
{
VerifyType < T > ( ) ;
return m_data . m_double ;
}
2016-07-23 02:46:09 +03:00
template < typename T , typename std : : enable_if < std : : is_same < T , NDShape > : : value | |
2016-08-26 22:14:23 +03:00
std : : is_same < T , Axis > : : value | |
2016-07-23 02:46:09 +03:00
std : : is_same < T , std : : wstring > : : value | |
std : : is_same < T , std : : vector < DictionaryValue > > : : value | |
2016-08-18 15:33:38 +03:00
std : : is_same < T , Dictionary > : : value | |
std : : is_same < T , NDArrayView > : : value > : : type * = nullptr >
2016-08-26 22:14:23 +03:00
const T & Value ( ) const
{
VerifyType < T > ( ) ;
return * ( reinterpret_cast < T * > ( m_data . m_ptr ) ) ;
}
template < typename T , typename std : : enable_if < std : : is_same < T , NDShape > : : value | |
std : : is_same < T , Axis > : : value | |
std : : is_same < T , std : : wstring > : : value | |
std : : is_same < T , std : : vector < DictionaryValue > > : : value | |
std : : is_same < T , Dictionary > : : value | |
std : : is_same < T , NDArrayView > : : value > : : type * = nullptr >
T & Value ( )
2016-07-13 10:37:06 +03:00
{
VerifyType < T > ( ) ;
return * ( reinterpret_cast < T * > ( m_data . m_ptr ) ) ;
}
bool HasValue ( ) const
{
return m_valueType ! = Type : : None ;
}
Type ValueType ( ) const
{
return m_valueType ;
}
2016-08-18 15:33:38 +03:00
CNTK_API bool operator = = ( const DictionaryValue & other ) const ;
CNTK_API bool operator ! = ( const DictionaryValue & other ) const ;
friend CNTK_API std : : istream & operator > > ( std : : istream & stream , DictionaryValue & us ) ;
friend CNTK_API std : : ostream & operator < < ( std : : ostream & stream , const DictionaryValue & us ) ;
2016-07-13 10:37:06 +03:00
private :
template < typename T >
static Type GetValueType ( )
{
2016-09-01 15:07:03 +03:00
static_assert ( ( std : : is_same < T , bool > : : value | |
2016-07-13 10:37:06 +03:00
std : : is_same < T , size_t > : : value | |
std : : is_same < T , float > : : value | |
std : : is_same < T , double > : : value | |
2016-07-31 02:16:41 +03:00
std : : is_same < T , std : : wstring > : : value | |
2016-07-13 10:37:06 +03:00
std : : is_same < T , NDShape > : : value | |
2016-08-26 22:14:23 +03:00
std : : is_same < T , Axis > : : value | |
2016-07-23 02:46:09 +03:00
std : : is_same < T , std : : vector < DictionaryValue > > : : value | |
2016-08-18 15:33:38 +03:00
std : : is_same < T , Dictionary > : : value | |
2016-09-01 15:07:03 +03:00
std : : is_same < T , NDArrayView > : : value ) ,
2016-07-13 10:37:06 +03:00
" Unsupported ValueType " ) ;
if ( std : : is_same < T , bool > : : value ) return Type : : Bool ;
if ( std : : is_same < T , size_t > : : value ) return Type : : SizeT ;
if ( std : : is_same < T , float > : : value ) return Type : : Float ;
if ( std : : is_same < T , double > : : value ) return Type : : Double ;
2016-07-23 02:46:09 +03:00
if ( std : : is_same < T , std : : wstring > : : value ) return Type : : String ;
2016-07-13 10:37:06 +03:00
if ( std : : is_same < T , NDShape > : : value ) return Type : : NDShape ;
2016-08-26 22:14:23 +03:00
if ( std : : is_same < T , Axis > : : value ) return Type : : Axis ;
2016-07-13 10:37:06 +03:00
if ( std : : is_same < T , std : : vector < DictionaryValue > > : : value ) return Type : : Vector ;
2016-07-23 02:46:09 +03:00
if ( std : : is_same < T , Dictionary > : : value ) return Type : : Dictionary ;
2016-08-18 15:33:38 +03:00
if ( std : : is_same < T , NDArrayView > : : value ) return Type : : NDArrayView ;
2016-07-13 10:37:06 +03:00
}
template < typename T >
void VerifyType ( ) const
{
if ( GetValueType < T > ( ) ! = m_valueType )
RuntimeError ( " Reading a DictionaryValue as the wrong type; Reading as type %s when actual type is %s " , typeid ( T ) . name ( ) , DictionaryValue : : TypeName ( m_valueType ) ) ;
}
template < typename T >
2016-07-23 02:46:09 +03:00
CNTK_API void AllocateDataPtr ( const T & value ) ;
2016-07-13 10:37:06 +03:00
template < typename T >
2016-07-23 02:46:09 +03:00
CNTK_API void FreePtrAsType ( ) ;
2016-07-13 10:37:06 +03:00
2016-07-23 02:46:09 +03:00
CNTK_API void FreeDataPtr ( )
{
if ( m_valueType = = Type : : String )
FreePtrAsType < std : : wstring > ( ) ;
else if ( m_valueType = = Type : : NDShape )
FreePtrAsType < NDShape > ( ) ;
2016-08-26 22:14:23 +03:00
else if ( m_valueType = = Type : : Axis )
FreePtrAsType < Axis > ( ) ;
2016-07-23 02:46:09 +03:00
else if ( m_valueType = = Type : : Vector )
FreePtrAsType < std : : vector < DictionaryValue > > ( ) ;
else if ( m_valueType = = Type : : Dictionary )
FreePtrAsType < Dictionary > ( ) ;
2016-08-18 15:33:38 +03:00
else if ( m_valueType = = Type : : Dictionary )
FreePtrAsType < NDArrayView > ( ) ;
2016-07-23 02:46:09 +03:00
}
2016-07-13 10:37:06 +03:00
Type m_valueType ;
union ValueData
{
bool m_boolean ;
size_t m_sizeT ;
float m_float ;
double m_double ;
void * m_ptr ;
} m_data ;
const size_t version = 1 ;
} ;
///
/// A type denoting a dictionary (keyed by Unicode strings) of serializable values (dynamically typed).
///
2016-07-23 02:46:09 +03:00
class Dictionary final
2016-07-13 10:37:06 +03:00
{
2016-07-23 02:46:09 +03:00
friend inline void AddConfigString ( std : : wstringstream & s , const DictionaryValue & value , size_t numIndentationSpaces ) ;
friend class CompositeMinibatchSource ;
2016-07-13 10:37:06 +03:00
public :
2016-07-23 02:46:09 +03:00
CNTK_API Dictionary ( ) ;
CNTK_API ~ Dictionary ( ) ;
2016-07-13 10:37:06 +03:00
2016-07-23 02:46:09 +03:00
CNTK_API Dictionary ( const Dictionary & ) ;
CNTK_API Dictionary & operator = ( const Dictionary & ) ;
2016-07-13 10:37:06 +03:00
2016-07-23 02:46:09 +03:00
CNTK_API Dictionary ( Dictionary & & other ) ;
CNTK_API Dictionary & operator = ( Dictionary & & other ) ;
2016-07-13 10:37:06 +03:00
2016-07-23 02:46:09 +03:00
CNTK_API DictionaryValue & operator [ ] ( const wchar_t * key ) ;
2016-07-13 10:37:06 +03:00
DictionaryValue & operator [ ] ( const std : : wstring & key )
{
return operator [ ] ( key . c_str ( ) ) ;
}
2016-07-23 02:46:09 +03:00
CNTK_API DictionaryValue operator [ ] ( const wchar_t * key ) const ;
2016-07-13 10:37:06 +03:00
DictionaryValue operator [ ] ( const std : : wstring & key ) const
{
return operator [ ] ( key . c_str ( ) ) ;
}
2016-07-23 02:46:09 +03:00
CNTK_API bool Contains ( const wchar_t * key ) const ;
2016-07-13 10:37:06 +03:00
bool Contains ( const std : : wstring & key ) const
{
return Contains ( key . c_str ( ) ) ;
}
2016-08-18 15:33:38 +03:00
CNTK_API bool operator = = ( const Dictionary & other ) const ;
CNTK_API bool operator ! = ( const Dictionary & other ) const ;
2016-07-13 10:37:06 +03:00
2016-08-18 15:33:38 +03:00
friend CNTK_API std : : istream & operator > > ( std : : istream & stream , Dictionary & us ) ;
friend CNTK_API std : : ostream & operator < < ( std : : ostream & stream , const Dictionary & us ) ;
2016-07-13 10:37:06 +03:00
private :
2016-07-23 02:46:09 +03:00
std : : shared_ptr < std : : unordered_map < std : : wstring , DictionaryValue > > m_dictionaryData ;
2016-07-13 10:37:06 +03:00
const size_t version = 1 ;
} ;
///
/// Abstraction for learning a subset of parameters of a learnable function using first order gradient values
/// For e.g momentum, AdaGrad, RMSProp etc. are different types of learners with their own algorithms for
/// learning parameter values using first order gradients.
///
class Learner : public std : : enable_shared_from_this < Learner >
{
public :
//
// Method to update the parameters associated with this learner. By returning false, this method indicates that
// learning has stopped for all of the parameters associated with this learner
//
2016-07-23 02:46:09 +03:00
CNTK_API virtual bool Update ( const std : : unordered_map < Parameter , NDArrayViewPtr > & gradientValues , size_t trainingSampleCount ) = 0 ;
2016-07-13 10:37:06 +03:00
///
/// Returns the set of parameters associated with this learner.
///
2016-07-23 02:46:09 +03:00
const std : : unordered_set < Parameter > & Parameters ( ) const { return m_parameters ; }
2016-07-13 10:37:06 +03:00
///
/// Optionally overridable method to checkpoint the learner's state.
///
2016-07-23 02:46:09 +03:00
// TODO: move the following two methods into ISerializable interface, make
// Learner (and all other entities that need checkpointing capability) implement it.
CNTK_API virtual Dictionary GetCheckpointState ( ) const { return Dictionary ( ) ; }
2016-07-13 10:37:06 +03:00
///
/// Optionally overridable method to restore the learner's state from a previous checkpoint.
///
2016-07-23 02:46:09 +03:00
CNTK_API virtual void RestoreFromCheckpoint ( const Dictionary & /*checkpoint*/ ) { }
2016-07-13 10:37:06 +03:00
2016-08-21 13:49:03 +03:00
///
/// Destruct this Learner.
///
2016-07-23 02:46:09 +03:00
virtual ~ Learner ( ) { }
2016-07-13 10:37:06 +03:00
protected :
2016-07-23 02:46:09 +03:00
Learner ( const std : : unordered_set < Parameter > & parameters )
2016-07-13 10:37:06 +03:00
: m_parameters ( parameters )
2016-07-23 02:46:09 +03:00
{ }
2016-07-13 10:37:06 +03:00
2016-07-23 02:46:09 +03:00
std : : unordered_set < Parameter > m_parameters ;
2016-07-13 10:37:06 +03:00
} ;
2016-08-18 15:33:38 +03:00
///
/// A collection of key-value pairs that represents training parameter schedule in
/// terms of the number of processed samples.
/// This class provides a number of convenience constructors to allow easy conversion
/// from a single value, a vector of values and a list of pairs to the training schedule.
///
template < typename T >
class TrainingParameterSchedule
{
public :
///
/// Create a schedule with a constant parameter value.
///
TrainingParameterSchedule ( T value )
: m_schedule ( { std : : make_pair ( 0 , value ) } ) , m_unit ( 1 )
{ }
///
/// Create a schedule where the parameter changes its value every 'unit' samples:
/// schedule[0] is used for the first 'unit' samples, schedule[1] -- for the second,
/// and so on. The last value is then used repeatedly until the end of training.
///
TrainingParameterSchedule ( const std : : vector < T > & schedule , size_t unit = 1 )
: m_unit ( unit )
{
// TODO: 0 will be used to mean "the entire sweep"
if ( unit = = 0 )
RuntimeError ( " TrainingParameterSchedule::constructor : 'unit' cannot be 0. " ) ;
if ( schedule . size ( ) = = 0 )
RuntimeError ( " TrainingParameterSchedule::constructor : schedule is empty. " ) ;
size_t i = 1 ;
for ( const auto & value : schedule )
{
m_schedule [ m_unit * i + + ] = value ;
}
}
///
/// Create a schedule using the list of key-value pairs, where the key specifies
/// the number of 'units' the parameter should maintain the corresponding value.
/// The value from the last pair is used repeatedly until the end of training.
/// For example, {{1, 0.05}, {2, 0.1}, {1, 0.005}} and unit = 100, corresponds to
/// a schedule where the value of '0.05' is used for the first 100 samples, then
/// '0.1' is used for the second 200 samples, after which the values is switched
/// to '0.005'.
///
TrainingParameterSchedule ( const std : : initializer_list < std : : pair < const size_t , T > > & schedule , size_t unit = 1 )
: m_unit ( unit )
{
// TODO: 0 will be used to mean "the entire sweep"
if ( unit = = 0 )
RuntimeError ( " TrainingParameterSchedule::constructor : 'unit' cannot be 0. " ) ;
if ( schedule . size ( ) = = 0 )
RuntimeError ( " TrainingParameterSchedule::constructor : schedule is empty. " ) ;
size_t i = 0 ;
for ( const auto & it : schedule )
{
if ( it . first = = 0 )
RuntimeError ( " TrainingParameterSchedule::constructor : unit count cannot be 0. " ) ;
i + = it . first ;
m_schedule [ m_unit * i ] = it . second ;
}
}
///
/// Returns a value corresponding to the absolute sample count from the beginning of training.
///
CNTK_API const T & operator [ ] ( size_t samleCount ) const ;
private :
std : : map < size_t , T > m_schedule ;
size_t m_unit ;
} ;
typedef TrainingParameterSchedule < double > LearningRatesPerSample ;
typedef TrainingParameterSchedule < double > MomentumsPerSample ;
2016-07-13 10:37:06 +03:00
///
/// Create an instance of the CNTK built-in SGD learner.
///
2016-08-18 15:33:38 +03:00
CNTK_API LearnerPtr SGDLearner ( const std : : unordered_set < Parameter > & parameters ,
2016-08-31 08:50:25 +03:00
const LearningRatesPerSample & learningRates ,
double clippingThresholdPerSample = std : : numeric_limits < double > : : infinity ( ) ,
bool gradientClippingWithTruncation = true ) ;
2016-07-13 10:37:06 +03:00
///
/// Create an instance of the CNTK built-in Momentum SGD learner.
///
2016-08-18 15:33:38 +03:00
CNTK_API LearnerPtr MomentumSGDLearner ( const std : : unordered_set < Parameter > & parameters ,
const LearningRatesPerSample & learningRates ,
2016-08-31 08:50:25 +03:00
const MomentumsPerSample & momentums ,
double clippingThresholdPerSample = std : : numeric_limits < double > : : infinity ( ) ,
bool gradientClippingWithTruncation = true ) ;
2016-07-13 10:37:06 +03:00
///
/// Create an instance of the CNTK built-in Nesterov's accelerated SGD learner.
///
2016-08-18 15:33:38 +03:00
CNTK_API LearnerPtr NesterovLearner ( const std : : unordered_set < Parameter > & parameters ,
const LearningRatesPerSample & learningRates ,
2016-08-31 08:50:25 +03:00
const MomentumsPerSample & momentums ,
double clippingThresholdPerSample = std : : numeric_limits < double > : : infinity ( ) ,
bool gradientClippingWithTruncation = true ) ;
2016-07-13 10:37:06 +03:00
///
/// Create an instance of the CNTK built-in FSAdaGrad (improved AdaGrad) learner.
///
2016-08-18 15:33:38 +03:00
CNTK_API LearnerPtr FSAdaGradLearner ( const std : : unordered_set < Parameter > & parameters ,
const LearningRatesPerSample & learningRates ,
2016-08-31 08:50:25 +03:00
const MomentumsPerSample & momentums ,
double clippingThresholdPerSample = std : : numeric_limits < double > : : infinity ( ) ,
bool gradientClippingWithTruncation = true ) ;
///
/// Create an instance of the CNTK built-in AdaGrad learner.
///
CNTK_API LearnerPtr AdaGradLearner ( const std : : unordered_set < Parameter > & parameters ,
const LearningRatesPerSample & learningRates ,
bool needAveMultiplier = true ,
double clippingThresholdPerSample = std : : numeric_limits < double > : : infinity ( ) ,
bool gradientClippingWithTruncation = true ) ;
2016-07-13 10:37:06 +03:00
///
/// Create an instance of the CNTK built-in RMSProp learner.
///
2016-07-23 02:46:09 +03:00
CNTK_API LearnerPtr RMSPropLearner ( const std : : unordered_set < Parameter > & parameters ,
2016-08-18 15:33:38 +03:00
const LearningRatesPerSample & learningRates ,
2016-07-23 02:46:09 +03:00
double gamma ,
double inc ,
double dec ,
double max ,
double min ,
2016-08-31 08:50:25 +03:00
bool needAveMultiplier = true ,
double clippingThresholdPerSample = std : : numeric_limits < double > : : infinity ( ) ,
bool gradientClippingWithTruncation = true ) ;
2016-07-23 02:46:09 +03:00
///
/// Trainer is the top-level abstraction responsible for the orchestration of the training of a model
2016-08-18 15:33:38 +03:00
/// using the specified learners and training data either explicitly supplied as Value objects or from
2016-07-23 02:46:09 +03:00
/// a MinibatchSource object.
///
class Trainer
{
public :
///
/// Construct a Trainer to train the specified 'model' with the specified 'trainingLoss' Variable as the training criterion
/// and using the specified set of 'parameterLearners' for updating the model's parameters using computed gradients.
///
2016-08-31 08:50:25 +03:00
CNTK_API Trainer ( const FunctionPtr & model , const FunctionPtr & lossFunction , const std : : unordered_set < LearnerPtr > & parameterLearners ) ;
///
/// Construct a Trainer to train the specified 'model' with the specified 'trainingLoss' as the training criterion,
/// the specified 'evaluationFunction' as the criterion for evaluating the trained model's quality, and using the specified set
/// of 'parameterLearners' for updating the model's parameters using computed gradients.
///
// TODO: Add overload for multiple evaluation criterion
CNTK_API Trainer ( const FunctionPtr & model , const FunctionPtr & lossFunction , const FunctionPtr & evaluationFunction , const std : : unordered_set < LearnerPtr > & parameterLearners ) ;
2016-07-23 02:46:09 +03:00
///
/// Optimize model parameters using the specified 'arguments' minibatch of training samples.
/// Returns false if all parameter learners indicate end of learning (through their Update method's return value).
///
2016-08-26 22:14:23 +03:00
CNTK_API bool TrainMinibatch ( const std : : unordered_map < Variable , ValuePtr > & arguments , const DeviceDescriptor & computeDevice = DeviceDescriptor : : UseDefaultDevice ( ) ) ;
2016-07-23 02:46:09 +03:00
2016-08-31 08:50:25 +03:00
///
/// Test the model on the specified batch of samples using the evaluation Function specified during construction of the Trainer
/// Returns the average evaluation criterion value per sample for the tested minibatch of samples
///
CNTK_API double TestMinbatch ( const std : : unordered_map < Variable , ValuePtr > & arguments , const DeviceDescriptor & computeDevice = DeviceDescriptor : : UseDefaultDevice ( ) ) ;
2016-09-02 16:12:07 +03:00
///
/// Checkpoint the model and other Trainer state at the specified file location
///
CNTK_API void SaveCheckpoint ( const std : : wstring & modelFilePath ) ;
///
/// Restore the model and trainer state from a previously saved model and checkpoint from the specified file location
///
CNTK_API void RestoreFromCheckpoint ( const std : : wstring & modelFilePath ) ;
2016-07-23 02:46:09 +03:00
///
/// Model being trained by 'this' Trainer.
///
FunctionPtr Model ( ) const { return m_model ; }
///
2016-08-31 08:50:25 +03:00
/// Loss function that is used as the optimization criterion for learning the model's parameters.
2016-07-23 02:46:09 +03:00
///
2016-08-31 08:50:25 +03:00
FunctionPtr LossFunction ( ) const { return m_lossFunction ; }
2016-07-23 02:46:09 +03:00
///
2016-08-31 08:50:25 +03:00
/// Evaluation Function that is used as for the criterion for evaluating the trained model's quality.
2016-07-23 02:46:09 +03:00
///
2016-08-31 08:50:25 +03:00
FunctionPtr EvaluationFunction ( ) const { return m_evaluationFunction ; }
2016-07-23 02:46:09 +03:00
2016-08-26 22:14:23 +03:00
///
2016-08-31 08:50:25 +03:00
/// Returns the average training loss per sample for the last minibatch trained.
2016-08-26 22:14:23 +03:00
///
2016-08-31 08:50:25 +03:00
CNTK_API double PreviousMinibatchLossAverage ( ) const ;
///
/// Returns the average evaluation criterion value per sample for the last minibatch trained.
///
CNTK_API double PreviousMinibatchEvaluationAverage ( ) const ;
///
/// Returns the number of samples in the last minibatch trained with
///
size_t PreviousMinibatchSampleCount ( ) const { return m_prevMinibatchNumSamples ; }
2016-08-26 22:14:23 +03:00
2016-07-23 02:46:09 +03:00
///
/// Learners associated with this Trainer for updating the model's parameters using computed gradients.
///
const std : : unordered_set < LearnerPtr > & ParameterLearners ( ) const { return m_parameterLearners ; }
private :
2016-08-31 08:50:25 +03:00
FunctionPtr m_combinedTrainingFunction ;
2016-07-23 02:46:09 +03:00
FunctionPtr m_model ;
2016-08-31 08:50:25 +03:00
FunctionPtr m_lossFunction ;
FunctionPtr m_evaluationFunction ;
2016-07-23 02:46:09 +03:00
std : : unordered_set < LearnerPtr > m_parameterLearners ;
2016-08-31 08:50:25 +03:00
size_t m_prevMinibatchNumSamples ;
ValuePtr m_prevMinibatchAggregateTrainingLossValue ;
ValuePtr m_prevMinibatchAggregateEvalCriterionValue ;
2016-07-23 02:46:09 +03:00
} ;
///
/// Describes an input stream: its name, element type, storage, etc.
///
2016-08-26 22:14:23 +03:00
struct StreamInformation
2016-07-23 02:46:09 +03:00
{
std : : wstring m_name ; // Unique name of the stream
size_t m_id ; // Unique identifier of the stream
StorageFormat m_storageFormat ; // Storage format of the stream
DataType m_elementType ; // Element type of the stream
NDShape m_sampleLayout ; // Layout of the sample for the stream
} ;
2016-08-26 22:14:23 +03:00
inline bool operator = = ( const StreamInformation & left , const StreamInformation & right )
2016-07-23 02:46:09 +03:00
{
2016-07-31 02:16:41 +03:00
return ( ( left . m_id = = right . m_id ) & &
( left . m_name = = right . m_name ) & &
( left . m_storageFormat = = right . m_storageFormat ) & &
( left . m_elementType = = right . m_elementType ) & &
( left . m_sampleLayout = = right . m_sampleLayout ) ) ;
2016-07-23 02:46:09 +03:00
}
}
namespace std {
2016-08-26 22:14:23 +03:00
template < > struct hash < CNTK : : StreamInformation >
2016-07-23 02:46:09 +03:00
{
2016-08-26 22:14:23 +03:00
size_t operator ( ) ( const CNTK : : StreamInformation & x ) const
2016-07-23 02:46:09 +03:00
{
return std : : hash < size_t > ( ) ( x . m_id ) ;
}
} ;
}
namespace CNTK
{
2016-07-31 02:16:41 +03:00
struct MinibatchData
{
size_t m_numSequences ;
size_t m_numSamples ;
ValuePtr m_data ;
} ;
2016-07-23 02:46:09 +03:00
///
2016-08-18 15:33:38 +03:00
/// Abstraction for generating minibatches of samples for training/evaluation.
2016-07-23 02:46:09 +03:00
///
class MinibatchSource : public std : : enable_shared_from_this < MinibatchSource >
{
public :
///
/// Describes the streams 'this' MinibatchSource produces.
///
2016-08-26 22:14:23 +03:00
virtual const std : : unordered_set < StreamInformation > & StreamInfos ( ) = 0 ;
2016-07-23 02:46:09 +03:00
///
2016-08-26 22:14:23 +03:00
/// Reads a minibatch that contains data for all input streams.
/// The minibatch size is specified terms of #samples and/or #sequences for the primary input stream; value of 0 for #samples/#sequences means unspecified.
/// In case the size is specified in terms of both #sequences and #samples, the smaller of the 2 is taken.
/// An empty map is returned when the MinibatchSource has no more data to return.
2016-07-23 02:46:09 +03:00
///
2016-08-26 22:14:23 +03:00
virtual const std : : unordered_map < StreamInformation , MinibatchData > & GetNextMinibatch ( size_t minibatchSizeInSamples ,
size_t minibatchSizeInSequences ,
const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) ) = 0 ;
2016-08-21 13:49:03 +03:00
///
/// Destruct this MinibatchSource.
///
virtual ~ MinibatchSource ( ) { }
2016-07-23 02:46:09 +03:00
2016-08-26 22:14:23 +03:00
public :
///
/// Gets the description of the stream with given name.
/// Throws an exception of there are none or multiple streams with this same name.
///
CNTK_API const StreamInformation & StreamInfo ( const std : : wstring & streamName ) ;
///
/// Gets the description of the stream that matches the attributes (Shape, DataType and StorageFormat) of the specified Variable object
/// Throws an exception if there are none or multiple streams matching the Variable's attributes
///
CNTK_API const StreamInformation & StreamInfo ( const Variable & variableToMatch ) ;
///
/// Reads a minibatch that contains data for all input streams.
/// The minibatch size is specified terms of #samples for the primary input stream.
/// An empty map is returned when the MinibatchSource has no more data to return.
///
CNTK_API const std : : unordered_map < StreamInformation , MinibatchData > & GetNextMinibatch ( size_t minibatchSizeInSamples , const DeviceDescriptor & device = DeviceDescriptor : : UseDefaultDevice ( ) ) ;
2016-07-23 02:46:09 +03:00
// TODO: Methods to save and restore from checkpoints
// Disallow copy and move construction and assignment
MinibatchSource ( const MinibatchSource & ) = delete ; MinibatchSource ( MinibatchSource & & ) = delete ; MinibatchSource & operator = ( const MinibatchSource & ) = delete ; MinibatchSource & operator = ( MinibatchSource & & ) = delete ;
protected :
MinibatchSource ( ) { }
} ;
///
/// Instantiate the CNTK built-in composite minibatch source.
///
CNTK_API MinibatchSourcePtr CreateCompositeMinibatchSource ( const Dictionary & configuration ) ;
2016-07-31 02:16:41 +03:00
2016-09-04 03:31:02 +03:00
struct StreamConfiguration
{
StreamConfiguration ( const std : : wstring & streamName , size_t dim , bool isSparse = false , const std : : wstring & streamAlias = L " " )
: m_streamName ( streamName ) , m_dim ( dim ) , m_isSparse ( isSparse ) , m_streamAlias ( streamAlias )
{ }
std : : wstring m_streamName ;
size_t m_dim ;
bool m_isSparse ;
std : : wstring m_streamAlias ;
} ;
///
/// Instantiate the CNTK buil-in test format minibatch source
///
inline MinibatchSourcePtr TextFormatMinibatchSource ( const std : : wstring & dataFilePath , const std : : vector < StreamConfiguration > & streamConfigs , size_t epochSize = SIZE_MAX )
{
CNTK : : Dictionary minibatchSourceConfiguration ;
minibatchSourceConfiguration [ L " epochSize " ] = epochSize ;
CNTK : : Dictionary deserializerConfiguration ;
deserializerConfiguration [ L " type " ] = L " CNTKTextFormatDeserializer " ;
deserializerConfiguration [ L " file " ] = dataFilePath ;
CNTK : : Dictionary inputStreamsConfig ;
for ( auto streamConfig : streamConfigs )
{
std : : wstring streamName = streamConfig . m_streamName ;
size_t streamDim = streamConfig . m_dim ;
bool isSparse = streamConfig . m_isSparse ;
std : : wstring streamAlias = streamConfig . m_streamAlias ;
CNTK : : Dictionary inputStreamConfig ;
inputStreamConfig [ L " dim " ] = streamDim ;
inputStreamConfig [ L " format " ] = isSparse ? L " sparse " : L " dense " ;
if ( ! streamAlias . empty ( ) )
inputStreamConfig [ L " alias " ] = streamAlias ;
inputStreamsConfig [ streamName ] = inputStreamConfig ;
}
deserializerConfiguration [ L " input " ] = inputStreamsConfig ;
minibatchSourceConfiguration [ L " deserializers " ] = std : : vector < CNTK : : DictionaryValue > ( { deserializerConfiguration } ) ;
return CreateCompositeMinibatchSource ( minibatchSourceConfiguration ) ;
}
2016-07-31 02:16:41 +03:00
///
/// Compute the per dimension means and variances for each of the specified streams using data from the specified minibatchSource.
///
CNTK_API void ComputeInputPerDimMeansAndInvStdDevs ( const MinibatchSourcePtr & minibatchSource ,
2016-08-26 22:14:23 +03:00
std : : unordered_map < StreamInformation , std : : pair < NDArrayViewPtr , NDArrayViewPtr > > & computedMeanAndVariances ,
2016-07-31 02:16:41 +03:00
const DeviceDescriptor & device = DeviceDescriptor : : CPUDevice ( ) ) ;
2016-05-31 16:40:40 +03:00
}