make C# sample back compatible with Rel2.2 Nuget
This commit is contained in:
Родитель
12ba72ea21
Коммит
ba5108c460
|
@ -41,8 +41,10 @@ namespace CNTK.CSTrainingExamples
|
|||
Value features, labels;
|
||||
GenerateValueData(minibatchSize, inputDim, numOutputClasses, out features, out labels, device);
|
||||
//TODO: sweepEnd should be set properly instead of false.
|
||||
#pragma warning disable 618
|
||||
trainer.TrainMinibatch(
|
||||
new Dictionary<Variable, Value>() { { featureVariable, features }, { labelVariable, labels } }, false, device);
|
||||
new Dictionary<Variable, Value>() { { featureVariable, features }, { labelVariable, labels } }, device);
|
||||
#pragma warning restore 618
|
||||
TestHelper.PrintTrainingProgress(trainer, minibatchCount, updatePerMinibatches);
|
||||
}
|
||||
|
||||
|
|
|
@ -168,9 +168,11 @@ namespace CNTK.CSTrainingExamples
|
|||
imageDims, animalModelNumClasses, device, out imageBatch, out labelBatch))
|
||||
{
|
||||
//TODO: sweepEnd should be set properly.
|
||||
#pragma warning disable 618
|
||||
trainer.TrainMinibatch(new Dictionary<Variable, Value>() {
|
||||
{ imageInput, imageBatch },
|
||||
{ labelInput, labelBatch } }, false, device);
|
||||
{ labelInput, labelBatch } }, device);
|
||||
#pragma warning restore 618
|
||||
TestHelper.PrintTrainingProgress(trainer, minibatchCount, 1);
|
||||
}
|
||||
}
|
||||
|
@ -279,14 +281,11 @@ namespace CNTK.CSTrainingExamples
|
|||
|
||||
Variable oldFeatureNode = baseModel.Arguments.Single(a => a.Name == featureNodeName);
|
||||
Function lastNode = baseModel.FindByName(hiddenNodeName);
|
||||
Variable newFeatureNode = CNTKLib.PlaceholderVariable(featureNodeName);
|
||||
|
||||
// Clone the desired layers with fixed weights
|
||||
Function clonedLayer = CNTKLib.AsComposite(lastNode).Clone(
|
||||
ParameterCloningMethod.Freeze,
|
||||
new Dictionary<Variable, Variable>() { { oldFeatureNode, newFeatureNode } });
|
||||
|
||||
clonedLayer.ReplacePlaceholders(new Dictionary<Variable, Variable>() { { newFeatureNode, normalizedFeatureNode } });
|
||||
new Dictionary<Variable, Variable>() { { oldFeatureNode, normalizedFeatureNode } });
|
||||
|
||||
// Add new dense layer for class prediction
|
||||
Function clonedModel = TestHelper.Dense(clonedLayer, numClasses, device, Activation.None, outputNodeName);
|
||||
|
|
|
@ -735,6 +735,7 @@ RENAME_AND_MAKE_PRIVATE(CNTK::Function, Save);
|
|||
RENAME_AND_MAKE_PRIVATE(CNTK::Function, Clone);
|
||||
RENAME_AND_MAKE_PRIVATE(CNTK::Function, Evaluate);
|
||||
RENAME_AND_MAKE_PRIVATE(CNTK::Function, FindByName);
|
||||
RENAME_AND_MAKE_PRIVATE(CNTK::Trainer, TrainMinibatch);
|
||||
// Customize type mapping for modelBuffer, used by Load
|
||||
%typemap(ctype) (char* buffer) "char*"
|
||||
%typemap(imtype) (char* buffer) "byte[]"
|
||||
|
|
|
@ -151,6 +151,13 @@ namespace CNTK
|
|||
return _Clone(ParameterCloningMethod.Share);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clones 'this' Function. The parameters of the Function are either cloned, shared or frozen as specified by the parameterCloneMethod argument and
|
||||
/// any variable replacements requested are applied in the cloned Function instance.
|
||||
/// </summary>
|
||||
/// <param name="parameterCloneMethod"></param>
|
||||
/// <param name="replacements">existing variables to be replaced with new variables.</param>
|
||||
/// <returns></returns>
|
||||
public Function Clone(ParameterCloningMethod parameterCloneMethod, IDictionary<Variable, Variable> replacements)
|
||||
{
|
||||
UnorderedMapVariableVariable replacementVector = Helper.AsUnorderedMapVariableVariable(replacements);
|
||||
|
|
|
@ -38,7 +38,7 @@ namespace CNTK
|
|||
public bool TrainMinibatch(IDictionary<Variable, MinibatchData> arguments, DeviceDescriptor computeDevice)
|
||||
{
|
||||
UnorderedMapVariableMinibatchData vectorData = Helper.AsUnorderedMapVariableMinibatchData(arguments);
|
||||
return TrainMinibatch(vectorData, computeDevice);
|
||||
return _TrainMinibatch(vectorData, computeDevice);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -47,10 +47,25 @@ namespace CNTK
|
|||
/// <param name="arguments">minibatch data as variable value pairs</param>
|
||||
/// <param name="computeDevice">device</param>
|
||||
/// <returns></returns>
|
||||
[System.Obsolete("TrainMinibatch() without isSweepEndInarguments will be deprecated soon. Please TrainMinibatch() with isSweepEndInarguments.", false)]
|
||||
public bool TrainMinibatch(IDictionary<Variable, Value> arguments, DeviceDescriptor computeDevice)
|
||||
{
|
||||
UnorderedMapVariableValuePtr mapData = Helper.AsUnorderedMapVariableValue(arguments);
|
||||
bool isSweepEndInarguments = false;
|
||||
return _TrainMinibatch(mapData, isSweepEndInarguments, computeDevice);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// train with a minibatch data
|
||||
/// </summary>
|
||||
/// <param name="arguments">minibatch data as variable value pairs</param>
|
||||
/// <param name="isSweepEndInarguments">indicates whether the current minibatch data is the end of one sweep</param>
|
||||
/// <param name="computeDevice">device</param>
|
||||
/// <returns></returns>
|
||||
public bool TrainMinibatch(IDictionary<Variable, Value> arguments, bool isSweepEndInarguments, DeviceDescriptor computeDevice)
|
||||
{
|
||||
UnorderedMapVariableValuePtr mapData = Helper.AsUnorderedMapVariableValue(arguments);
|
||||
return TrainMinibatch(mapData, isSweepEndInarguments, computeDevice);
|
||||
return _TrainMinibatch(mapData, isSweepEndInarguments, computeDevice);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче