Revert "CNTK v2 library: API changes"
This reverts commit a0d6a0b661602a365b99bbf3fa2de5649431b26f.
This commit is contained in:
Родитель
3499e05518
Коммит
60cf546948
|
@ -623,7 +623,7 @@ namespace CNTK
|
|||
{
|
||||
assert(m_inputs.size() == 2);
|
||||
if (m_inputs[0].Shape().TotalSize() != m_inputs[1].Shape().TotalSize())
|
||||
InvalidArgument("ForwardBackward: Operands '%S' and '%S' shapes must have the same total size.", m_inputs[0].AsString().c_str(), m_inputs[1].AsString().c_str());
|
||||
InvalidArgument("ForwardBackward: The shapes of operands '%S' and '%S' must have the same total size.", m_inputs[0].AsString().c_str(), m_inputs[1].AsString().c_str());
|
||||
|
||||
outputShape = {};
|
||||
break;
|
||||
|
|
|
@ -74,7 +74,7 @@ class MySgdFast(UserLearner):
|
|||
shape = result.shape
|
||||
|
||||
static_tensor = result.data.slice_view([0]*len(shape),
|
||||
shape[2:])
|
||||
shape[1:])
|
||||
p.set_value(static_tensor)
|
||||
|
||||
return True
|
||||
|
|
|
@ -418,3 +418,10 @@ def sanitize_dynamic_axes(axes):
|
|||
raise TypeError('type Axis expected, got %s instead' % type(ax))
|
||||
axes = tuple(reversed(axes))
|
||||
return axes
|
||||
|
||||
|
||||
def sanitize_variable_value_dict(var_value_dict):
|
||||
if len(var_value_dict) > 1:
|
||||
return var_value_dict
|
||||
else:
|
||||
return list(var_value_dict.values())[0]
|
||||
|
|
|
@ -9,7 +9,7 @@ import numpy as np
|
|||
import numbers
|
||||
from numbers import Number
|
||||
from . import sequence
|
||||
from .functions import CloneMethod, Function, load_model, UserFunction
|
||||
from .functions import CloneMethod, Function, load_model
|
||||
from .variables import Variable, Parameter, Constant
|
||||
from ..utils import get_data_type
|
||||
from cntk.internal import sanitize_input, sanitize_shape, sanitize_axis, sanitize_dynamic_axes
|
||||
|
@ -2506,8 +2506,8 @@ from cntk.device import use_default_device
|
|||
from cntk.axis import Axis
|
||||
|
||||
@typemap
|
||||
def input(shape, dtype=np.float32, needs_gradient=False, is_sparse=False,
|
||||
dynamic_axes=[Axis.default_batch_axis()], name=''):
|
||||
def input_variable(shape, dtype=np.float32, needs_gradient=False, is_sparse=False,
|
||||
dynamic_axes=Axis.default_input_variable_dynamic_axes(), name=''):
|
||||
'''
|
||||
It creates an input in the network: a place where data,
|
||||
such as features and labels, should be provided.
|
||||
|
@ -2538,34 +2538,6 @@ def input(shape, dtype=np.float32, needs_gradient=False, is_sparse=False,
|
|||
|
||||
return input_variable(shape, is_sparse, dtype, needs_gradient, name, dynamic_axes)
|
||||
|
||||
@typemap
|
||||
def input_variable(shape, dtype=np.float32, needs_gradient=False, is_sparse=False,
|
||||
dynamic_axes=Axis.default_input_variable_dynamic_axes(), name=''):
|
||||
'''
|
||||
DEPRECATED.
|
||||
|
||||
It creates an input in the network: a place where data,
|
||||
such as features and labels, should be provided.
|
||||
|
||||
Args:
|
||||
shape (tuple or int): the shape of the input tensor
|
||||
dtype (type, optional): np.float32 (default) or np.float64
|
||||
needs_gradients (bool, optional): whether to back-propagates to it or not. False by default.
|
||||
is_sparse (bool, optional): whether the variable is sparse (`False` by default)
|
||||
dynamic_axes (list or tuple, default): a list of dynamic axis (e.g., batch axis, time axis)
|
||||
name (str, optional): the name of the Function instance in the network
|
||||
|
||||
Returns:
|
||||
:class:`~cntk.ops.variables.Variable`
|
||||
'''
|
||||
import warnings
|
||||
warnings.warn('This will be removed in future versions. Please use '
|
||||
'input() or sequence.input() instead.', DeprecationWarning)
|
||||
if (type(dynamic_axes) in (list, tuple)) and (len(dynamic_axes) == 2):
|
||||
return sequence.input(shape, dtype, needs_gradient, is_sparse, dynamic_axes[1], name)
|
||||
else:
|
||||
return input(shape, dtype, needs_gradient, is_sparse, dynamic_axes, name)
|
||||
|
||||
@typemap
|
||||
def output_variable(shape, dtype, dynamic_axes, name=''):
|
||||
'''
|
||||
|
@ -2595,16 +2567,16 @@ def output_variable(shape, dtype, dynamic_axes, name=''):
|
|||
return output_variable(shape, dtype, dynamic_axes, name)
|
||||
|
||||
@typemap
|
||||
def forward_declaration(shape=None, dynamic_axes=None, name=''):
|
||||
def placeholder_variable(shape=None, dynamic_axes=None, name=''):
|
||||
'''
|
||||
It creates a forward declaration of an output variable to be used as a placeholder
|
||||
for a later output variable in the recurrent network. The forward declaration should be later
|
||||
replaced with the actual output variable it represents by calling bind_forward_declaration(s).
|
||||
It creates a placeholder variable that has to be later bound to an actual variable.
|
||||
A common use of this is to serve as a placeholder for a later output variable in a
|
||||
recurrent network, which is replaced with the actual output variable by calling
|
||||
replace_placeholder(s).
|
||||
|
||||
Args:
|
||||
shape (tuple or int): the shape of the variable tensor
|
||||
dynamic_axes (list): the list of dynamic axes that the variable uses
|
||||
name (str, optional): the name of the forward declaration variable in the network
|
||||
dynamic_axes (list): the list of dynamic axes that the actual variable uses
|
||||
|
||||
Returns:
|
||||
:class:`~cntk.ops.variables.Variable`
|
||||
|
@ -2622,29 +2594,6 @@ def forward_declaration(shape=None, dynamic_axes=None, name=''):
|
|||
dynamic_axes = sanitize_dynamic_axes(dynamic_axes)
|
||||
return placeholder_variable(shape, name, dynamic_axes)
|
||||
|
||||
|
||||
@typemap
|
||||
def placeholder_variable(shape=None, dynamic_axes=None, name=''):
|
||||
'''
|
||||
DEPRECATED.
|
||||
|
||||
It creates a variable place holder for recurrence networks, when the network's dynamic axes
|
||||
are unfolded, the place holder will get assigned a variable along the correspondent dynamic axis.
|
||||
|
||||
Args:
|
||||
shape (tuple or int): the shape of the variable tensor
|
||||
dynamic_axes (list): the list of dynamic axes that the actual variable uses
|
||||
name (str, optional): the name of the placeholder variable in the network
|
||||
|
||||
Returns:
|
||||
:class:`~cntk.ops.variables.Variable`
|
||||
'''
|
||||
import warnings
|
||||
warnings.warn('This will be removed in future versions. Please use '
|
||||
'forward_declaration() instead.', DeprecationWarning)
|
||||
return forward_declaration(shape, dynamic_axes, name)
|
||||
|
||||
|
||||
@typemap
|
||||
def parameter(shape=None, init=None, dtype=None, device=None, name=''):
|
||||
'''
|
||||
|
|
|
@ -2,7 +2,7 @@ from cntk import cntk_py
|
|||
from cntk.device import DeviceDescriptor, cpu
|
||||
from cntk.utils import variable_value_to_seq, Record, \
|
||||
get_python_function_arguments, map_function_arguments
|
||||
from cntk.internal import map_if_possible, typemap, sanitize_var_map, sanitize_batch, sanitize_dtype_cntk, _as_tuple
|
||||
from cntk.internal import map_if_possible, typemap, sanitize_var_map, sanitize_batch, sanitize_dtype_cntk, _as_tuple, sanitize_variable_value_dict
|
||||
from cntk.ops.variables import Variable
|
||||
from enum import Enum, unique
|
||||
|
||||
|
@ -511,11 +511,7 @@ class Function(cntk_py.Function):
|
|||
'''
|
||||
|
||||
_, output_map = self.forward(arguments, self.outputs, device=device, as_numpy=as_numpy)
|
||||
|
||||
if len(output_map) > 1:
|
||||
return output_map
|
||||
else:
|
||||
return list(output_map.values())[0]
|
||||
return sanitize_variable_value_dict(output_map)
|
||||
|
||||
|
||||
@typemap
|
||||
|
@ -723,17 +719,26 @@ class Function(cntk_py.Function):
|
|||
|
||||
Args:
|
||||
at (dict) : mapping of the Function's arguments to values
|
||||
wrt (list optional): list of Variables with respect to which the
|
||||
wrt (list, default `None`): list of Variables with respect to which the
|
||||
gradient will be computed. If omitted, the gradients with
|
||||
respect to all arguments that need gradient will be computed.
|
||||
as_numpy (bool): whether to return the gradients as a NumPy array. Default True.
|
||||
respect to all arguments of this Function that need gradient will be computed.
|
||||
outputs (iterable, optional): outputs (including intermediate outputs in the graph)
|
||||
to fetch values for. If not specified, values for none of the outputs are fetched.
|
||||
device (:class:`~cntk.device.DeviceDescriptor`, default `None`): the device
|
||||
descriptor that contains the type and id of the device on which the
|
||||
computation is performed. If `None`, the default device is used.
|
||||
as_numpy (bool, default `True`): whether to return the gradients as a NumPy array. Default True.
|
||||
Specifying this as False returns a CNTK Value which avoids a
|
||||
costly conversion but returns a somewhat opaque object.
|
||||
|
||||
Returns:
|
||||
dict or NumPy Array: Dict with keys of ``wrt`` variables and gradient values of
|
||||
``wrt`` variables. A single NumPy array if there is only one gradient value.
|
||||
Each element has the same shape as ``wrt`` including dynamic axes (such as the batch axis).
|
||||
dict or NumPy Array or a tuple of these: Dict with keys of ``wrt`` variables and gradient values of
|
||||
``wrt`` variables. A single NumPy array if there is only one gradient value.
|
||||
If ``outputs`` were specified (to fetch values for), this method returns a tuple where the 2nd element
|
||||
of the tuple is the ``outputs`` values; a dict with keys of specified ``outputs`` variables and
|
||||
values of computed ``outputs``, or a single NumPy array if there is only one output value.
|
||||
Each element has the same shape as the ``wrt`` or ``outputs`` variables including dynamic axes
|
||||
(such as the batch axis).
|
||||
'''
|
||||
if device is None:
|
||||
device = DeviceDescriptor.use_default_device()
|
||||
|
@ -757,17 +762,10 @@ class Function(cntk_py.Function):
|
|||
for k in wrt_map:
|
||||
wrt_map[k] = variable_value_to_seq(wrt_map[k], k)
|
||||
|
||||
if len(wrt_map) > 1:
|
||||
ret_grad = wrt_map
|
||||
else:
|
||||
ret_grad = list(wrt_map.values())[0]
|
||||
|
||||
if len(output_map) == 0:
|
||||
return ret_grad
|
||||
elif len(output_map) == 1:
|
||||
return ret_grad, list(output_map.values())[0]
|
||||
return sanitize_variable_value_dict(wrt_map)
|
||||
else:
|
||||
return ret_grad, output_map
|
||||
return sanitize_variable_value_dict(wrt_map), sanitize_variable_value_dict(output_map)
|
||||
|
||||
@property
|
||||
@typemap
|
||||
|
@ -897,45 +895,9 @@ class Function(cntk_py.Function):
|
|||
'''
|
||||
return super(Function, self).uid()
|
||||
|
||||
@typemap
|
||||
def bind_forward_declarations(self, bindings):
|
||||
'''
|
||||
In-place bind the specified forward declarations in the Function graph to the
|
||||
specified bindings in the map.
|
||||
|
||||
Args:
|
||||
bindings (dict): map from forward declarations to binding variables
|
||||
|
||||
Returns:
|
||||
:class:`Function`: itself
|
||||
'''
|
||||
bindings = bindings or {}
|
||||
if not isinstance(bindings, dict):
|
||||
raise TypeError("forward declaration bindings map must be a dictionary")
|
||||
return super(Function, self).replace_placeholders(bindings)
|
||||
|
||||
@typemap
|
||||
def bind_forward_declaration(self, binding):
|
||||
'''
|
||||
In-place bind the only forward declaration in the Function graph with the
|
||||
specified binding.
|
||||
|
||||
Args:
|
||||
binding (:class:`~cntk.ops.variables.Variable`): the variable
|
||||
that the forward declaration will be bound to
|
||||
|
||||
Returns:
|
||||
:class:`Function`: itself
|
||||
|
||||
:raises ExceptionType: when the function has multiple forward declarations.
|
||||
'''
|
||||
return super(Function, self).replace_placeholder(binding)
|
||||
|
||||
@typemap
|
||||
def replace_placeholders(self, substitutions):
|
||||
'''
|
||||
DEPRECATED.
|
||||
|
||||
In-place replace specified placeholders in the Function graph with the
|
||||
specified replacements in the map.
|
||||
|
||||
|
@ -945,16 +907,14 @@ class Function(cntk_py.Function):
|
|||
Returns:
|
||||
:class:`Function`: itself
|
||||
'''
|
||||
import warnings
|
||||
warnings.warn('This will be removed in future versions. Please use '
|
||||
'bind_forward_declarations() instead.', DeprecationWarning)
|
||||
return self.bind_forward_declarations(substitutions)
|
||||
substitutions = substitutions or {}
|
||||
if not isinstance(substitutions, dict):
|
||||
raise TypeError("Variable substitution map must be a dictionary")
|
||||
return super(Function, self).replace_placeholders(substitutions)
|
||||
|
||||
@typemap
|
||||
def replace_placeholder(self, substitution):
|
||||
'''
|
||||
DEPRECATED.
|
||||
|
||||
In-place replace the only placeholder in the function graph with the
|
||||
specified substitution.
|
||||
|
||||
|
@ -967,10 +927,7 @@ class Function(cntk_py.Function):
|
|||
|
||||
:raises ExceptionType: when the function has multiple placeholders.
|
||||
'''
|
||||
import warnings
|
||||
warnings.warn('This will be removed in future versions. Please use '
|
||||
'bind_forward_declaration() instead.', DeprecationWarning)
|
||||
return self.bind_forward_declaration(substitution)
|
||||
return super(Function, self).replace_placeholder(substitution)
|
||||
|
||||
@typemap
|
||||
def find_all_with_name(self, name):
|
||||
|
|
|
@ -3,35 +3,8 @@
|
|||
# for full license information.
|
||||
# ==============================================================================
|
||||
|
||||
import numpy as np
|
||||
from ...utils import get_data_type
|
||||
from cntk.internal import typemap, sanitize_input
|
||||
from ...axis import Axis
|
||||
|
||||
##########################################################################
|
||||
# variable ops
|
||||
##########################################################################
|
||||
|
||||
@typemap
|
||||
def input(shape, dtype=np.float32, needs_gradient=False, is_sparse=False,
|
||||
sequence_axis=Axis.default_dynamic_axis(), name=''):
|
||||
'''
|
||||
It creates an input in the network: a place where data,
|
||||
such as features and labels, should be provided.
|
||||
|
||||
Args:
|
||||
shape (tuple or int): the shape of the input tensor
|
||||
dtype (type, optional): np.float32 (default) or np.float64
|
||||
needs_gradients (bool, optional): whether to back-propagates to it or not. False by default.
|
||||
is_sparse (bool, optional): whether the variable is sparse (`False` by default)
|
||||
dynamic_axes (list or tuple, default): a list of dynamic axis (e.g., batch axis, time axis)
|
||||
name (str, optional): the name of the Function instance in the network
|
||||
|
||||
Returns:
|
||||
:class:`~cntk.ops.variables.Variable`
|
||||
'''
|
||||
from ... import input
|
||||
return input(shape, dtype, needs_gradient, is_sparse, [Axis.default_batch_axis(), sequence_axis], name)
|
||||
|
||||
##########################################################################
|
||||
# sequence ops
|
||||
|
|
Загрузка…
Ссылка в новой задаче