new NDL for QuickE2E model now compiles (now fails due to mismatching node names for Input)

This commit is contained in:
Frank Seide 2015-08-29 21:37:56 -07:00
Родитель 3548de6a19
Коммит 5d43271b93
2 изменённых файлов: 9 добавлений и 8 удалений

Просмотреть файл

@ -40,8 +40,8 @@ namespace Microsoft { namespace MSR { namespace BS {
L"Fac(n) = if n > 1 then Fac(n-1) * n else 1 \n"
;
wstring computationNodes = // BUGBUG: optional args not working yet, some scope problem causing a circular reference
L"Parameter(rows, cols, needGradient = true, init = 'uniform'/*|fixedValueor|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', tag='') = new ComputationNode [ operation = 'LearnableParameter' /*plus the function args*/ ]\n"
wstring computationNodes =
L"Parameter(rows, cols, needGradient = true, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', tag='') = new ComputationNode [ operation = 'LearnableParameter' /*plus the function args*/ ]\n"
L"Input(rows, cols, tag='feature') = new ComputationNode [ operation = 'Input' /*plus the function args*/ ]\n"
// ^^ already works; vv not yet working
L"Mean(z, tag='') = new ComputationNode [ operation = 'Mean' ; inputs = z /* ; tag = tag */ ]\n"
@ -57,8 +57,8 @@ namespace Microsoft { namespace MSR { namespace BS {
L"ErrorPrediction(labels, outZ, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = labels:outZ /* ; tag = tag */ ]\n"
;
wstring commonMacros = // TODO: rename rows and cols to inDim and outDim or vice versa, whichever it is
L"BFF(in, rows, cols) = [ B = Parameter(rows, 1/*init = fixedvalue, value = 0*/) ; W = Parameter(rows, cols) ; z = /*W*in+B*/Log(in) ] \n" // TODO: fix this once we got the ComputationNode type connected correctly
wstring commonMacros =
L"BFF(in, rows, cols) = [ B = Parameter(rows, 1, init = 'fixedValue', value = 0) ; W = Parameter(rows, cols) ; z = W*in+B ] \n"
L"SBFF(in, rows, cols) = [ Eh = Sigmoid(BFF(in, rows, cols).z) ] \n "
L"MeanVarNorm(feat) = PerDimMeanVarNormalization(feat, Mean(feat), InvStdDev(feat)) \n"
L"LogPrior(labels) = Log(Mean(labels)) \n"

Просмотреть файл

@ -27,13 +27,13 @@ speechTrain=[
applyMeanVarNorm=true
numHiddenLayers = Length(layerSizes)-1
features = Input(layerSizes[0], tag='feature') ; labels = Input(layerSizes[Length(layerSizes)-1], tag='label')
L = Length(layerSizes)-1 // number of model layers
features = Input(layerSizes[0], 1, tag='feature') ; labels = Input(layerSizes[Length(layerSizes)-1], 1, tag='label')
featNorm = if applyMeanVarNorm
then MeanVarNorm(features)
else features
layers = array[1..numHiddenLayers] (layer => if layer > 1 then SBFF(layers[layer-1].Eh, layerSizes[layer], layerSizes[layer-1]) else SBFF(featNorm, layerSizes[layer], layerSizes[layer-1]))
outLayer = BFF(layers[numHiddenLayers].Eh, labelDim, hiddenDim)
layers = array[1..L-1] (layer => if layer > 1 then SBFF(layers[layer-1].Eh, layerSizes[layer], layerSizes[layer-1]) else SBFF(featNorm, layerSizes[layer], layerSizes[layer-1]))
outLayer = BFF(layers[L-1].Eh, layerSizes[L], layerSizes[L-1])
outZ = outLayer.z
CE = if trainingCriterion == 'CE'
then CrossEntropyWithSoftmax(labels, outZ, tag='criterion')
@ -42,6 +42,7 @@ speechTrain=[
ErrorPrediction(labels, outZ, tag='eval')
else Fail('unknown evalCriterion ' + evalCriterion)
logPrior = LogPrior(labels)
// TODO: how to add a tag to an infix operation?
ScaledLogLikelihood = outZ - logPrior
]