Merge remote-tracking branch 'origin/linux-gcc' into sls
Prepare for merge back to linux-gcc
This commit is contained in:
Коммит
8bd3eece79
|
@ -1,605 +0,0 @@
|
|||
running on KAISHENGLP1 at 2014/08/28 16:50:28
|
||||
command line options:
|
||||
configFile=C:\dev\cntk3\CheckInSuites\SLU\globals.config+C:\dev\cntk3\CheckInSuites\SLU\rnnlu.config
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> config >>>>>>>>>>>>>>>>>>>>
|
||||
configparameters: rnnlu.config:command=LSTM:LSTMTest
|
||||
configparameters: rnnlu.config:ConfigDir=$WorkDir$\config
|
||||
configparameters: rnnlu.config:DataDir=$WorkDir$
|
||||
configparameters: rnnlu.config:DeviceNumber=-1
|
||||
configparameters: rnnlu.config:ExpDir=c:\temp\exp\atis
|
||||
configparameters: rnnlu.config:LSTM=[
|
||||
action=train
|
||||
makeMode=true
|
||||
minibatchSize=10
|
||||
traceLevel=1
|
||||
deviceId=-1
|
||||
epochSize=4430000
|
||||
SimpleNetworkBuilder=[
|
||||
trainingCriterion=crossentropywithsoftmax
|
||||
evalCriterion=crossentropywithsoftmax
|
||||
defaultHiddenActivity=0.1
|
||||
recurrentLayer=2
|
||||
initValueScale=6.0
|
||||
layerSizes=2832:50:300:127
|
||||
rnnType=LSTM
|
||||
lookupTableOrder=3
|
||||
addPrior=false
|
||||
addDropoutNodes=false
|
||||
applyMeanVarNorm=false
|
||||
uniformInit=true
|
||||
]
|
||||
SGD=[
|
||||
learningRatesPerSample=0.1
|
||||
momentumPerMB=0.90
|
||||
gradientClippingWithTruncation=true
|
||||
clippingThresholdPerSample=15.0
|
||||
maxEpochs=3
|
||||
gradientcheck=false
|
||||
numMBsToShowResult=1000
|
||||
modelPath=$ExpDir$\cntkdebug.dnn
|
||||
loadBestModel=true
|
||||
AutoAdjust=[
|
||||
autoAdjustLR=adjustafterepoch
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
numMiniBatch4LRSearch=100
|
||||
numPrevLearnRates=5
|
||||
numBestSearchEpoch=1
|
||||
]
|
||||
dropoutRate=0
|
||||
]
|
||||
reader=[
|
||||
readerType=LUSequenceReader
|
||||
wordContext=0:1:2
|
||||
randomize=None
|
||||
nbruttsineachrecurrentiter=10
|
||||
wfile=$ExpDir$\sequenceSentence.bin
|
||||
wsize=256
|
||||
wrecords=1000
|
||||
windowSize=10000
|
||||
unk="<unk>"
|
||||
wordmap=$DataDir$\inputmap.txt
|
||||
file=$DataDir$\atis.train.apos.pred.pos.head.IOB.simple
|
||||
features=[
|
||||
dim=0
|
||||
sectionType=data
|
||||
]
|
||||
sequence=[
|
||||
dim=1
|
||||
wrecords=2
|
||||
sectionType=data
|
||||
]
|
||||
labelIn=[
|
||||
dim=1
|
||||
usewordmap=true
|
||||
labelDim=10000
|
||||
labelMappingFile=$ExpDir$\sentenceLabels.txt
|
||||
labelType=Category
|
||||
beginSequence="BOS"
|
||||
endSequence="EOS"
|
||||
usewordmap=true
|
||||
token=$DataDir$\input.txt
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
wrecords=11
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=11
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
labels=[
|
||||
dim=1
|
||||
labelType=Category
|
||||
beginSequence="O"
|
||||
endSequence="O"
|
||||
token=$DataDir$\output.txt
|
||||
labelMappingFile=$ExpDir$\sentenceLabels.out.txt
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
]
|
||||
cvReader=[
|
||||
readerType=LUSequenceReader
|
||||
randomize=None
|
||||
wordContext=0:1:2
|
||||
wfile=$ExpDir$\sequenceSentence.valid.bin
|
||||
wsize=256
|
||||
wrecords=1000
|
||||
windowSize=10000
|
||||
unk="<unk>"
|
||||
wordmap=$DataDir$\inputmap.txt
|
||||
file=$DataDir$\atis.dev.IOB.simple
|
||||
features=[
|
||||
dim=0
|
||||
sectionType=data
|
||||
]
|
||||
sequence=[
|
||||
dim=1
|
||||
wrecords=2
|
||||
sectionType=data
|
||||
]
|
||||
labelIn=[
|
||||
dim=1
|
||||
labelDim=10000
|
||||
labelMappingFile=$ExpDir$\sentenceLabels.in.txt
|
||||
labelType=Category
|
||||
beginSequence="BOS"
|
||||
endSequence="EOS"
|
||||
usewordmap=true
|
||||
token=$DataDir$\input.txt
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
wrecords=11
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=11
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
labels=[
|
||||
dim=1
|
||||
labelType=Category
|
||||
beginSequence="O"
|
||||
endSequence="O"
|
||||
token=$DataDir$\output.txt
|
||||
labelDim=10000
|
||||
labelMappingFile=$ExpDir$\sentenceLabels.out.txt
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
wrecords=3
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=3
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
]
|
||||
]
|
||||
configparameters: rnnlu.config:LSTMTest=[
|
||||
action=write
|
||||
minibatchSize=1
|
||||
traceLevel=1
|
||||
deviceId=-1
|
||||
epochSize=4430000
|
||||
defaultHiddenActivity=0.1
|
||||
modelPath=$ExpDir$\cntkdebug.dnn
|
||||
outputNodeNames=outputs
|
||||
reader=[
|
||||
readerType=LUSequenceReader
|
||||
randomize=None
|
||||
wordContext=0:1:2
|
||||
unk="<unk>"
|
||||
wordmap=$DataDir$\inputmap.txt
|
||||
file=$DataDir$\atis.test.apos.pred.pos.head.IOB.simple
|
||||
wfile=$ExpDir$\sequenceSentence.bin
|
||||
wsize=256
|
||||
wrecords=1000
|
||||
windowSize=10000
|
||||
features=[
|
||||
dim=0
|
||||
sectionType=data
|
||||
]
|
||||
sequence=[
|
||||
dim=1
|
||||
wrecords=2
|
||||
sectionType=data
|
||||
]
|
||||
labelIn=[
|
||||
dim=1
|
||||
labelDim=10000
|
||||
labelMappingFile=$ExpDir$\sentenceLabels.txt
|
||||
labelType=Category
|
||||
beginSequence="BOS"
|
||||
endSequence="EOS"
|
||||
usewordmap=true
|
||||
token=$DataDir$\input.txt
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
wrecords=11
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=11
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
labels=[
|
||||
dim=1
|
||||
labelType=Category
|
||||
beginSequence="BOS"
|
||||
endSequence="EOS"
|
||||
token=$DataDir$\output.txt
|
||||
labelDim=127
|
||||
labelMappingFile=$ExpDir$\sentenceLabels.out.txt
|
||||
elementSize=4
|
||||
sectionType=labels
|
||||
mapping=[
|
||||
wrecords=3
|
||||
elementSize=10
|
||||
sectionType=labelMapping
|
||||
]
|
||||
category=[
|
||||
dim=3
|
||||
sectionType=categoryLabels
|
||||
]
|
||||
]
|
||||
]
|
||||
writer=[
|
||||
writerType=LUSequenceWriter
|
||||
outputs=[
|
||||
file=$OutDir$\output.rec.txt
|
||||
token=$DataDir$\output.txt
|
||||
]
|
||||
]
|
||||
]
|
||||
configparameters: rnnlu.config:NdlDir=$ConfigDir$
|
||||
configparameters: rnnlu.config:OutDir=$ExpDir$
|
||||
configparameters: rnnlu.config:stderr=$ExpDir$\ATIS\log
|
||||
configparameters: rnnlu.config:type=double
|
||||
configparameters: rnnlu.config:WorkDir=.
|
||||
<<<<<<<<<<<<<<<<<<<< config <<<<<<<<<<<<<<<<<<<<
|
||||
command: LSTM LSTMTest
|
||||
precision = double
|
||||
SimpleNetworkBuilder Using CPU
|
||||
reading sequence file .\atis.train.apos.pred.pos.head.IOB.simple
|
||||
reading sequence file .\atis.dev.IOB.simple
|
||||
GetTrainCriterionNodes ...
|
||||
GetEvalCriterionNodes ...
|
||||
nodes in the recurrent loops :
|
||||
AutoName37 AutoName4 AutoName15 AutoName16 AutoName18 AutoName19 AutoName5 AutoName8 AutoName1 AutoName9 AutoName12 AutoName13 AutoName14 AutoName20 AutoName7 AutoName6 AutoName21 AutoName2 AutoName22 AutoName25 AutoName26 AutoName27 AutoName28 AutoName29 AutoName30 AutoName3 AutoName31 AutoName34 AutoName35 AutoName36 AutoName38
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> WXO0 = LearnableParameter
|
||||
Validating --> E0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> LookupTable = LookupTable(E0[50, 944], features[2832, 1])
|
||||
Validating --> AutoName32 = Times(WXO0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bo0 = LearnableParameter
|
||||
Validating --> AutoName33 = Plus(AutoName32[300, 1], bo0[300, 1])
|
||||
Validating --> WHO0 = LearnableParameter
|
||||
Validating --> WCO0 = LearnableParameter
|
||||
Validating --> WXF0 = LearnableParameter
|
||||
Validating --> AutoName23 = Times(WXF0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bf0 = LearnableParameter
|
||||
Validating --> AutoName24 = Plus(AutoName23[300, 1], bf0[300, 1])
|
||||
Validating --> WHF0 = LearnableParameter
|
||||
Validating --> WCF0 = LearnableParameter
|
||||
Validating --> WXI0 = LearnableParameter
|
||||
Validating --> AutoName10 = Times(WXI0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bi0 = LearnableParameter
|
||||
Validating --> AutoName11 = Plus(AutoName10[300, 1], bi0[300, 1])
|
||||
Validating --> WHI0 = LearnableParameter
|
||||
Validating --> WCI0 = LearnableParameter
|
||||
Validating --> WXC0 = LearnableParameter
|
||||
Validating --> AutoName17 = Times(WXC0[300, 150], LookupTable[150, 1])
|
||||
Validating --> WHC0 = LearnableParameter
|
||||
Validating --> bc0 = LearnableParameter
|
||||
Validating --> AutoName3 = Delay(AutoName38[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName31 = Times(WHO0[300, 300], AutoName3[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName34 = Plus(AutoName33[300, 1], AutoName31[300, 1])
|
||||
Validating --> AutoName2 = Delay(AutoName38[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName22 = Times(WHF0[300, 300], AutoName2[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName25 = Plus(AutoName24[300, 1], AutoName22[300, 1])
|
||||
Validating --> AutoName6 = Delay(AutoName29[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName21 = DiagTimes(WCF0[300, 1], AutoName6[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName26 = Plus(AutoName25[300, 1], AutoName21[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName27 = Sigmoid(AutoName26[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName7 = Delay(AutoName29[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName28 = ElementTimes(AutoName27[300 {W=0, H=0, C=0}, 1], AutoName7[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName1 = Delay(AutoName38[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName9 = Times(WHI0[300, 300], AutoName1[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName12 = Plus(AutoName11[300, 1], AutoName9[300, 1])
|
||||
Validating --> AutoName5 = Delay(AutoName29[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName8 = DiagTimes(WCI0[300, 1], AutoName5[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName13 = Plus(AutoName12[300, 1], AutoName8[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName14 = Sigmoid(AutoName13[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName4 = Delay(AutoName38[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName15 = Times(WHC0[300, 300], AutoName4[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName16 = Plus(AutoName15[300, 1], bc0[300, 1])
|
||||
Validating --> AutoName18 = Plus(AutoName17[300, 1], AutoName16[300, 1])
|
||||
Validating --> AutoName19 = Tanh(AutoName18[300, 1])
|
||||
Validating --> AutoName20 = ElementTimes(AutoName14[300 {W=0, H=0, C=0}, 1], AutoName19[300, 1])
|
||||
Validating --> AutoName29 = Plus(AutoName28[300 {W=0, H=0, C=0}, 1], AutoName20[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName30 = DiagTimes(WCO0[300, 1], AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName35 = Plus(AutoName34[300, 1], AutoName30[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName36 = Sigmoid(AutoName35[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName37 = Tanh(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName38 = ElementTimes(AutoName36[300 {W=0, H=0, C=0}, 1], AutoName37[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName39 = Times(W2[127, 300], AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[127, 1], AutoName39[127, 1])
|
||||
|
||||
No PreCompute nodes found, skipping PreCompute step
|
||||
Set Max Temp Mem Size For Convolution Nodes to 0 samples.
|
||||
WARNING: there is no convolution node.
|
||||
Finished Epoch[1]: [Training Set] Train Loss Per Sample = 4.7967326 EvalErr Per Sample = 4.7967326 Ave Learn Rate Per Sample = 0.1000000015 Epoch Time=0.177
|
||||
Final Results: Minibatch[1-11]: Samples Seen = 81 CrossEntropyWithSoftmax/Sample = 4.6260059 CrossEntropyWithSoftmax/Sample = 4.6260059
|
||||
Finished Epoch[1]: [Validation Set] Train Loss Per Sample = 4.6260059 EvalErr Per Sample = 4.6260059
|
||||
Finished Epoch[2]: [Training Set] Train Loss Per Sample = 4.4580467 EvalErr Per Sample = 4.4580467 Ave Learn Rate Per Sample = 0.1000000015 Epoch Time=0.178
|
||||
Final Results: Minibatch[1-11]: Samples Seen = 81 CrossEntropyWithSoftmax/Sample = 4.0801723 CrossEntropyWithSoftmax/Sample = 4.0801723
|
||||
Finished Epoch[2]: [Validation Set] Train Loss Per Sample = 4.0801723 EvalErr Per Sample = 4.0801723
|
||||
Finished Epoch[3]: [Training Set] Train Loss Per Sample = 3.6568716 EvalErr Per Sample = 3.6568716 Ave Learn Rate Per Sample = 0.1000000015 Epoch Time=0.171
|
||||
Final Results: Minibatch[1-11]: Samples Seen = 81 CrossEntropyWithSoftmax/Sample = 2.6959986 CrossEntropyWithSoftmax/Sample = 2.6959986
|
||||
Finished Epoch[3]: [Validation Set] Train Loss Per Sample = 2.6959986 EvalErr Per Sample = 2.6959986
|
||||
reading sequence file .\atis.test.apos.pred.pos.head.IOB.simple
|
||||
nodes in the recurrent loops :
|
||||
AutoName37 AutoName4 AutoName15 AutoName16 AutoName18 AutoName19 AutoName5 AutoName8 AutoName1 AutoName9 AutoName12 AutoName13 AutoName14 AutoName20 AutoName7 AutoName6 AutoName21 AutoName2 AutoName22 AutoName25 AutoName26 AutoName27 AutoName28 AutoName29 AutoName30 AutoName3 AutoName31 AutoName34 AutoName35 AutoName36 AutoName38
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> WXO0 = LearnableParameter
|
||||
Validating --> E0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> LookupTable = LookupTable(E0[50, 944], features[2832, 1])
|
||||
Validating --> AutoName32 = Times(WXO0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bo0 = LearnableParameter
|
||||
Validating --> AutoName33 = Plus(AutoName32[300, 1], bo0[300, 1])
|
||||
Validating --> WHO0 = LearnableParameter
|
||||
Validating --> WCO0 = LearnableParameter
|
||||
Validating --> WXF0 = LearnableParameter
|
||||
Validating --> AutoName23 = Times(WXF0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bf0 = LearnableParameter
|
||||
Validating --> AutoName24 = Plus(AutoName23[300, 1], bf0[300, 1])
|
||||
Validating --> WHF0 = LearnableParameter
|
||||
Validating --> WCF0 = LearnableParameter
|
||||
Validating --> WXI0 = LearnableParameter
|
||||
Validating --> AutoName10 = Times(WXI0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bi0 = LearnableParameter
|
||||
Validating --> AutoName11 = Plus(AutoName10[300, 1], bi0[300, 1])
|
||||
Validating --> WHI0 = LearnableParameter
|
||||
Validating --> WCI0 = LearnableParameter
|
||||
Validating --> WXC0 = LearnableParameter
|
||||
Validating --> AutoName17 = Times(WXC0[300, 150], LookupTable[150, 1])
|
||||
Validating --> WHC0 = LearnableParameter
|
||||
Validating --> bc0 = LearnableParameter
|
||||
Validating --> AutoName3 = Delay(AutoName38[0, 0])
|
||||
Validating --> AutoName31 = Times(WHO0[300, 300], AutoName3[300, 1])
|
||||
Validating --> AutoName34 = Plus(AutoName33[300, 1], AutoName31[300, 1])
|
||||
Validating --> AutoName2 = Delay(AutoName38[0, 0])
|
||||
Validating --> AutoName22 = Times(WHF0[300, 300], AutoName2[300, 1])
|
||||
Validating --> AutoName25 = Plus(AutoName24[300, 1], AutoName22[300, 1])
|
||||
Validating --> AutoName6 = Delay(AutoName29[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName21 = DiagTimes(WCF0[300, 1], AutoName6[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName26 = Plus(AutoName25[300, 1], AutoName21[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName27 = Sigmoid(AutoName26[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName7 = Delay(AutoName29[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName28 = ElementTimes(AutoName27[300 {W=0, H=0, C=0}, 1], AutoName7[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName1 = Delay(AutoName38[0, 0])
|
||||
Validating --> AutoName9 = Times(WHI0[300, 300], AutoName1[300, 1])
|
||||
Validating --> AutoName12 = Plus(AutoName11[300, 1], AutoName9[300, 1])
|
||||
Validating --> AutoName5 = Delay(AutoName29[0 {W=0, H=0, C=0}, 0])
|
||||
Validating --> AutoName8 = DiagTimes(WCI0[300, 1], AutoName5[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName13 = Plus(AutoName12[300, 1], AutoName8[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName14 = Sigmoid(AutoName13[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName4 = Delay(AutoName38[0, 0])
|
||||
Validating --> AutoName15 = Times(WHC0[300, 300], AutoName4[300, 1])
|
||||
Validating --> AutoName16 = Plus(AutoName15[300, 1], bc0[300, 1])
|
||||
Validating --> AutoName18 = Plus(AutoName17[300, 1], AutoName16[300, 1])
|
||||
Validating --> AutoName19 = Tanh(AutoName18[300, 1])
|
||||
Validating --> AutoName20 = ElementTimes(AutoName14[300 {W=0, H=0, C=0}, 1], AutoName19[300, 1])
|
||||
Validating --> AutoName29 = Plus(AutoName28[300 {W=0, H=0, C=0}, 1], AutoName20[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName30 = DiagTimes(WCO0[300, 1], AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName35 = Plus(AutoName34[300, 1], AutoName30[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName36 = Sigmoid(AutoName35[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName37 = Tanh(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName38 = ElementTimes(AutoName36[300 {W=0, H=0, C=0}, 1], AutoName37[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName39 = Times(W2[127, 300], AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[127, 1], AutoName39[127, 1])
|
||||
|
||||
|
||||
|
||||
Validating node outputs
|
||||
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> WXO0 = LearnableParameter
|
||||
Validating --> E0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> LookupTable = LookupTable(E0[50, 944], features[2832, 1])
|
||||
Validating --> AutoName32 = Times(WXO0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bo0 = LearnableParameter
|
||||
Validating --> AutoName33 = Plus(AutoName32[300, 1], bo0[300, 1])
|
||||
Validating --> WHO0 = LearnableParameter
|
||||
Validating --> AutoName3 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName31 = Times(WHO0[300, 300], AutoName3[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName34 = Plus(AutoName33[300, 1], AutoName31[300, 1])
|
||||
Validating --> WCO0 = LearnableParameter
|
||||
Validating --> WXF0 = LearnableParameter
|
||||
Validating --> AutoName23 = Times(WXF0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bf0 = LearnableParameter
|
||||
Validating --> AutoName24 = Plus(AutoName23[300, 1], bf0[300, 1])
|
||||
Validating --> WHF0 = LearnableParameter
|
||||
Validating --> AutoName2 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName22 = Times(WHF0[300, 300], AutoName2[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName25 = Plus(AutoName24[300, 1], AutoName22[300, 1])
|
||||
Validating --> WCF0 = LearnableParameter
|
||||
Validating --> AutoName6 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName21 = DiagTimes(WCF0[300, 1], AutoName6[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName26 = Plus(AutoName25[300, 1], AutoName21[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName27 = Sigmoid(AutoName26[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName7 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName28 = ElementTimes(AutoName27[300 {W=0, H=0, C=0}, 1], AutoName7[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> WXI0 = LearnableParameter
|
||||
Validating --> AutoName10 = Times(WXI0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bi0 = LearnableParameter
|
||||
Validating --> AutoName11 = Plus(AutoName10[300, 1], bi0[300, 1])
|
||||
Validating --> WHI0 = LearnableParameter
|
||||
Validating --> AutoName1 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName9 = Times(WHI0[300, 300], AutoName1[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName12 = Plus(AutoName11[300, 1], AutoName9[300, 1])
|
||||
Validating --> WCI0 = LearnableParameter
|
||||
Validating --> AutoName5 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName8 = DiagTimes(WCI0[300, 1], AutoName5[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName13 = Plus(AutoName12[300, 1], AutoName8[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName14 = Sigmoid(AutoName13[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> WXC0 = LearnableParameter
|
||||
Validating --> AutoName17 = Times(WXC0[300, 150], LookupTable[150, 1])
|
||||
Validating --> WHC0 = LearnableParameter
|
||||
Validating --> AutoName4 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName15 = Times(WHC0[300, 300], AutoName4[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> bc0 = LearnableParameter
|
||||
Validating --> AutoName16 = Plus(AutoName15[300, 1], bc0[300, 1])
|
||||
Validating --> AutoName18 = Plus(AutoName17[300, 1], AutoName16[300, 1])
|
||||
Validating --> AutoName19 = Tanh(AutoName18[300, 1])
|
||||
Validating --> AutoName20 = ElementTimes(AutoName14[300 {W=0, H=0, C=0}, 1], AutoName19[300, 1])
|
||||
Validating --> AutoName29 = Plus(AutoName28[300 {W=0, H=0, C=0}, 1], AutoName20[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName30 = DiagTimes(WCO0[300, 1], AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName35 = Plus(AutoName34[300, 1], AutoName30[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName36 = Sigmoid(AutoName35[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName37 = Tanh(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName38 = ElementTimes(AutoName36[300 {W=0, H=0, C=0}, 1], AutoName37[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> outputs = Times(W2[127, 300], AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
|
||||
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> WXO0 = LearnableParameter
|
||||
Validating --> E0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> LookupTable = LookupTable(E0[50, 944], features[2832, 1])
|
||||
Validating --> AutoName32 = Times(WXO0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bo0 = LearnableParameter
|
||||
Validating --> AutoName33 = Plus(AutoName32[300, 1], bo0[300, 1])
|
||||
Validating --> WHO0 = LearnableParameter
|
||||
Validating --> WCO0 = LearnableParameter
|
||||
Validating --> WXF0 = LearnableParameter
|
||||
Validating --> AutoName23 = Times(WXF0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bf0 = LearnableParameter
|
||||
Validating --> AutoName24 = Plus(AutoName23[300, 1], bf0[300, 1])
|
||||
Validating --> WHF0 = LearnableParameter
|
||||
Validating --> WCF0 = LearnableParameter
|
||||
Validating --> WXI0 = LearnableParameter
|
||||
Validating --> AutoName10 = Times(WXI0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bi0 = LearnableParameter
|
||||
Validating --> AutoName11 = Plus(AutoName10[300, 1], bi0[300, 1])
|
||||
Validating --> WHI0 = LearnableParameter
|
||||
Validating --> WCI0 = LearnableParameter
|
||||
Validating --> WXC0 = LearnableParameter
|
||||
Validating --> AutoName17 = Times(WXC0[300, 150], LookupTable[150, 1])
|
||||
Validating --> WHC0 = LearnableParameter
|
||||
Validating --> bc0 = LearnableParameter
|
||||
Validating --> AutoName3 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName31 = Times(WHO0[300, 300], AutoName3[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName34 = Plus(AutoName33[300, 1], AutoName31[300, 1])
|
||||
Validating --> AutoName2 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName22 = Times(WHF0[300, 300], AutoName2[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName25 = Plus(AutoName24[300, 1], AutoName22[300, 1])
|
||||
Validating --> AutoName6 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName21 = DiagTimes(WCF0[300, 1], AutoName6[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName26 = Plus(AutoName25[300, 1], AutoName21[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName27 = Sigmoid(AutoName26[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName7 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName28 = ElementTimes(AutoName27[300 {W=0, H=0, C=0}, 1], AutoName7[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName1 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName9 = Times(WHI0[300, 300], AutoName1[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName12 = Plus(AutoName11[300, 1], AutoName9[300, 1])
|
||||
Validating --> AutoName5 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName8 = DiagTimes(WCI0[300, 1], AutoName5[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName13 = Plus(AutoName12[300, 1], AutoName8[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName14 = Sigmoid(AutoName13[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName4 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName15 = Times(WHC0[300, 300], AutoName4[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName16 = Plus(AutoName15[300, 1], bc0[300, 1])
|
||||
Validating --> AutoName18 = Plus(AutoName17[300, 1], AutoName16[300, 1])
|
||||
Validating --> AutoName19 = Tanh(AutoName18[300, 1])
|
||||
Validating --> AutoName20 = ElementTimes(AutoName14[300 {W=0, H=0, C=0}, 1], AutoName19[300, 1])
|
||||
Validating --> AutoName29 = Plus(AutoName28[300 {W=0, H=0, C=0}, 1], AutoName20[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName30 = DiagTimes(WCO0[300, 1], AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName35 = Plus(AutoName34[300, 1], AutoName30[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName36 = Sigmoid(AutoName35[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName37 = Tanh(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName38 = ElementTimes(AutoName36[300 {W=0, H=0, C=0}, 1], AutoName37[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName39 = Times(W2[127, 300], AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[127, 1], AutoName39[127, 1])
|
||||
|
||||
nodes in the recurrent loops :
|
||||
AutoName37 AutoName4 AutoName15 AutoName16 AutoName18 AutoName19 AutoName5 AutoName8 AutoName1 AutoName9 AutoName12 AutoName13 AutoName14 AutoName20 AutoName7 AutoName6 AutoName21 AutoName2 AutoName22 AutoName25 AutoName26 AutoName27 AutoName28 AutoName29 AutoName30 AutoName3 AutoName31 AutoName34 AutoName35 AutoName36 AutoName38
|
||||
|
||||
Validating node outputs
|
||||
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> WXO0 = LearnableParameter
|
||||
Validating --> E0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> LookupTable = LookupTable(E0[50, 944], features[2832, 1])
|
||||
Validating --> AutoName32 = Times(WXO0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bo0 = LearnableParameter
|
||||
Validating --> AutoName33 = Plus(AutoName32[300, 1], bo0[300, 1])
|
||||
Validating --> WHO0 = LearnableParameter
|
||||
Validating --> WCO0 = LearnableParameter
|
||||
Validating --> WXF0 = LearnableParameter
|
||||
Validating --> AutoName23 = Times(WXF0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bf0 = LearnableParameter
|
||||
Validating --> AutoName24 = Plus(AutoName23[300, 1], bf0[300, 1])
|
||||
Validating --> WHF0 = LearnableParameter
|
||||
Validating --> WCF0 = LearnableParameter
|
||||
Validating --> WXI0 = LearnableParameter
|
||||
Validating --> AutoName10 = Times(WXI0[300, 150], LookupTable[150, 1])
|
||||
Validating --> bi0 = LearnableParameter
|
||||
Validating --> AutoName11 = Plus(AutoName10[300, 1], bi0[300, 1])
|
||||
Validating --> WHI0 = LearnableParameter
|
||||
Validating --> WCI0 = LearnableParameter
|
||||
Validating --> WXC0 = LearnableParameter
|
||||
Validating --> AutoName17 = Times(WXC0[300, 150], LookupTable[150, 1])
|
||||
Validating --> WHC0 = LearnableParameter
|
||||
Validating --> bc0 = LearnableParameter
|
||||
Validating --> AutoName3 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName31 = Times(WHO0[300, 300], AutoName3[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName34 = Plus(AutoName33[300, 1], AutoName31[300, 1])
|
||||
Validating --> AutoName2 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName22 = Times(WHF0[300, 300], AutoName2[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName25 = Plus(AutoName24[300, 1], AutoName22[300, 1])
|
||||
Validating --> AutoName6 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName21 = DiagTimes(WCF0[300, 1], AutoName6[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName26 = Plus(AutoName25[300, 1], AutoName21[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName27 = Sigmoid(AutoName26[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName7 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName28 = ElementTimes(AutoName27[300 {W=0, H=0, C=0}, 1], AutoName7[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName1 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName9 = Times(WHI0[300, 300], AutoName1[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName12 = Plus(AutoName11[300, 1], AutoName9[300, 1])
|
||||
Validating --> AutoName5 = Delay(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName8 = DiagTimes(WCI0[300, 1], AutoName5[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName13 = Plus(AutoName12[300, 1], AutoName8[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName14 = Sigmoid(AutoName13[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName4 = Delay(AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName15 = Times(WHC0[300, 300], AutoName4[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName16 = Plus(AutoName15[300, 1], bc0[300, 1])
|
||||
Validating --> AutoName18 = Plus(AutoName17[300, 1], AutoName16[300, 1])
|
||||
Validating --> AutoName19 = Tanh(AutoName18[300, 1])
|
||||
Validating --> AutoName20 = ElementTimes(AutoName14[300 {W=0, H=0, C=0}, 1], AutoName19[300, 1])
|
||||
Validating --> AutoName29 = Plus(AutoName28[300 {W=0, H=0, C=0}, 1], AutoName20[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName30 = DiagTimes(WCO0[300, 1], AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName35 = Plus(AutoName34[300, 1], AutoName30[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName36 = Sigmoid(AutoName35[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName37 = Tanh(AutoName29[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> AutoName38 = ElementTimes(AutoName36[300 {W=0, H=0, C=0}, 1], AutoName37[300 {W=0, H=0, C=0}, 1])
|
||||
Validating --> outputs = Times(W2[127, 300], AutoName38[300 {W=0, H=0, C=0}, 1])
|
||||
|
||||
Total Samples Evaluated = 91
|
|
@ -1,7 +1,188 @@
|
|||
// CrossProcessMutex.h -- implements a system-wide mutex to allow for system-wide GPU locking
|
||||
|
||||
#pragma once
|
||||
|
||||
#ifdef WIN32
|
||||
#include "CrossProcessMutex_win32.h"
|
||||
#else
|
||||
#include "CrossProcessMutex_linux.h"
|
||||
#endif
|
||||
// implementations differ greatly between Windows and Linux
|
||||
|
||||
#include <cassert>
|
||||
#include <string>
|
||||
|
||||
#ifdef WIN32 // --- Windows version
|
||||
|
||||
#include <Windows.h> // for HANDLE
|
||||
|
||||
class CrossProcessMutex
|
||||
{
|
||||
// no-copying
|
||||
CrossProcessMutex(const CrossProcessMutex&);
|
||||
void operator=(const CrossProcessMutex&);
|
||||
|
||||
std::string m_name; // lock name
|
||||
HANDLE m_handle;
|
||||
public:
|
||||
CrossProcessMutex(const std::string& name)
|
||||
:m_handle(NULL),
|
||||
m_name("Global\\" + name)
|
||||
{
|
||||
}
|
||||
|
||||
// Acquires the mutex. If 'wait' is true and mutex is acquired by someone else then
|
||||
// function waits until mutex is releasd
|
||||
// Returns true if successfull
|
||||
bool Acquire(bool wait)
|
||||
{
|
||||
assert(m_handle == NULL);
|
||||
m_handle = ::CreateMutexA(NULL/*security attr*/, FALSE/*bInitialOwner*/, m_name.c_str());
|
||||
if (m_handle == NULL)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (::WaitForSingleObject(m_handle, wait ? INFINITE : 0) != WAIT_OBJECT_0)
|
||||
{
|
||||
::CloseHandle(m_handle);
|
||||
m_handle = NULL;
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Releases the mutex
|
||||
void Release()
|
||||
{
|
||||
assert(m_handle != NULL);
|
||||
::ReleaseMutex(m_handle);
|
||||
::CloseHandle(m_handle);
|
||||
m_handle = NULL;
|
||||
}
|
||||
|
||||
~CrossProcessMutex()
|
||||
{
|
||||
if (m_handle != NULL)
|
||||
{
|
||||
Release();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#else // --- Linux version
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/file.h>
|
||||
#include <fcntl.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <string.h>
|
||||
#include <signal.h>
|
||||
|
||||
class CrossProcessMutex
|
||||
{
|
||||
// no-copying
|
||||
CrossProcessMutex(const CrossProcessMutex&);
|
||||
void operator=(const CrossProcessMutex&);
|
||||
|
||||
int m_fd; // file descriptor
|
||||
std::string m_fileName; // lock file name
|
||||
struct flock m_lock; // fnctl lock structure
|
||||
|
||||
static void noOpAlarmHandler(int /*signum*/)
|
||||
{
|
||||
// this handler is intentionally NO-OP
|
||||
// the side effect of execution this handler
|
||||
// will be a termination of fcntl call below with EINTR
|
||||
}
|
||||
|
||||
static void setupTimeout(int seconds)
|
||||
{
|
||||
struct sigaction action = {};
|
||||
action.sa_handler = &CrossProcessMutex::noOpAlarmHandler;
|
||||
sigaction(SIGALRM, &action, NULL);
|
||||
alarm(seconds);
|
||||
}
|
||||
|
||||
public:
|
||||
CrossProcessMutex(const std::string& name)
|
||||
:m_fd(-1),
|
||||
m_fileName("/var/lock/" + name)
|
||||
{
|
||||
}
|
||||
|
||||
// Acquires the mutex. If 'wait' is true and mutex is acquired by someone else then
|
||||
// function waits until mutex is releasd
|
||||
// Returns true if successfull
|
||||
bool Acquire(bool wait)
|
||||
{
|
||||
assert(m_fd == -1);
|
||||
for (;;) {
|
||||
// opening a lock file
|
||||
int fd = open(m_fileName.c_str(), O_WRONLY | O_CREAT, 0666);
|
||||
if (fd < 0) {
|
||||
return false;
|
||||
}
|
||||
// locking it with the fcntl API
|
||||
memset(&m_lock, 0, sizeof(m_lock));
|
||||
m_lock.l_type = F_WRLCK;
|
||||
// BUG: fcntl call with F_SETLKW doesn't always reliably detect when lock is released
|
||||
// As a workaround, using alarm() for interupting fcntl if it waits more than 1 second
|
||||
setupTimeout(1);
|
||||
int r = fcntl(fd, wait ? F_SETLKW : F_SETLK, &m_lock);
|
||||
if (errno == EINTR) {
|
||||
sleep(1);
|
||||
// retrying in the case of signal or timeout
|
||||
close(fd);
|
||||
continue;
|
||||
}
|
||||
if (r != 0) {
|
||||
// acquire failed
|
||||
close(fd);
|
||||
return false;
|
||||
}
|
||||
// we own the exclusive lock on file descriptor, but we need to double-check
|
||||
// that the lock file wasn't deleted and/or re-created;
|
||||
// checking this by comparing inode numbers
|
||||
struct stat before, after;
|
||||
fstat(fd, &before);
|
||||
if (stat(m_fileName.c_str(), &after) != 0 || before.st_ino != after.st_ino)
|
||||
{
|
||||
// we have a race with 'unlink' call in Release()
|
||||
// our lock is held to the previous instance of the file;
|
||||
// this is not a problem, we just need to retry locking the new file
|
||||
close(fd);
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
// lock acquired successfully
|
||||
m_fd = fd;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Releases the mutex
|
||||
void Release()
|
||||
{
|
||||
assert(m_fd != -1);
|
||||
// removing file
|
||||
unlink(m_fileName.c_str());
|
||||
// Note: file is intentionally removed *before* releasing the lock
|
||||
// to ensure that locked file isn't deleted by the non-owner of the lock
|
||||
m_lock.l_type = F_UNLCK;
|
||||
// Now removing the lock and closing the file descriptor
|
||||
// waiting processes will be notified
|
||||
fcntl(m_fd, F_SETLKW, &m_lock);
|
||||
close(m_fd);
|
||||
m_fd = -1;
|
||||
}
|
||||
|
||||
~CrossProcessMutex()
|
||||
{
|
||||
if (m_fd != -1)
|
||||
{
|
||||
Release();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -1,119 +0,0 @@
|
|||
#pragma once
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/file.h>
|
||||
#include <fcntl.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <string.h>
|
||||
#include <string>
|
||||
#include <cassert>
|
||||
#include <signal.h>
|
||||
|
||||
class CrossProcessMutex
|
||||
{
|
||||
// no-copying
|
||||
CrossProcessMutex(const CrossProcessMutex&);
|
||||
void operator=(const CrossProcessMutex&);
|
||||
|
||||
int m_fd; // file descriptor
|
||||
std::string m_fileName; // lock file name
|
||||
struct flock m_lock; // fnctl lock structure
|
||||
|
||||
static void noOpAlarmHandler(int /*signum*/)
|
||||
{
|
||||
// this handler is intentionally NO-OP
|
||||
// the side effect of execution this handler
|
||||
// will be a termination of fcntl call below with EINTR
|
||||
}
|
||||
|
||||
static void setupTimeout(int seconds)
|
||||
{
|
||||
struct sigaction action = {};
|
||||
action.sa_handler = &CrossProcessMutex::noOpAlarmHandler;
|
||||
sigaction(SIGALRM, &action, NULL);
|
||||
alarm(seconds);
|
||||
}
|
||||
|
||||
public:
|
||||
CrossProcessMutex(const std::string& name)
|
||||
:m_fd(-1),
|
||||
m_fileName("/var/lock/" + name)
|
||||
{
|
||||
}
|
||||
|
||||
// Acquires the mutex. If 'wait' is true and mutex is acquired by someone else then
|
||||
// function waits until mutex is releasd
|
||||
// Returns true if successfull
|
||||
bool Acquire(bool wait)
|
||||
{
|
||||
assert(m_fd == -1);
|
||||
for (;;) {
|
||||
// opening a lock file
|
||||
int fd = open(m_fileName.c_str(), O_WRONLY|O_CREAT, 0666);
|
||||
if (fd < 0) {
|
||||
return false;
|
||||
}
|
||||
// locking it with the fcntl API
|
||||
memset(&m_lock, 0, sizeof(m_lock));
|
||||
m_lock.l_type = F_WRLCK;
|
||||
// BUG: fcntl call with F_SETLKW doesn't always reliably detect when lock is released
|
||||
// As a workaround, using alarm() for interupting fcntl if it waits more than 1 second
|
||||
setupTimeout(1);
|
||||
int r = fcntl(fd, wait ? F_SETLKW : F_SETLK, &m_lock);
|
||||
if (errno == EINTR) {
|
||||
sleep(1);
|
||||
// retrying in the case of signal or timeout
|
||||
close(fd);
|
||||
continue;
|
||||
}
|
||||
if (r != 0) {
|
||||
// acquire failed
|
||||
close(fd);
|
||||
return false;
|
||||
}
|
||||
// we own the exclusive lock on file descriptor, but we need to double-check
|
||||
// that the lock file wasn't deleted and/or re-created;
|
||||
// checking this by comparing inode numbers
|
||||
struct stat before, after;
|
||||
fstat(fd, &before);
|
||||
if (stat(m_fileName.c_str(), &after) != 0 || before.st_ino != after.st_ino)
|
||||
{
|
||||
// we have a race with 'unlink' call in Release()
|
||||
// our lock is held to the previous instance of the file;
|
||||
// this is not a problem, we just need to retry locking the new file
|
||||
close(fd);
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
// lock acquired successfully
|
||||
m_fd = fd;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Releases the mutex
|
||||
void Release()
|
||||
{
|
||||
assert(m_fd != -1);
|
||||
// removing file
|
||||
unlink(m_fileName.c_str());
|
||||
// Note: file is intentionally removed *before* releasing the lock
|
||||
// to ensure that locked file isn't deleted by the non-owner of the lock
|
||||
m_lock.l_type = F_UNLCK;
|
||||
// Now removing the lock and closing the file descriptor
|
||||
// waiting processes will be notified
|
||||
fcntl(m_fd, F_SETLKW, &m_lock);
|
||||
close(m_fd);
|
||||
m_fd = -1;
|
||||
}
|
||||
|
||||
~CrossProcessMutex()
|
||||
{
|
||||
if (m_fd != -1)
|
||||
{
|
||||
Release();
|
||||
}
|
||||
}
|
||||
};
|
|
@ -1,55 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
class CrossProcessMutex
|
||||
{
|
||||
// no-copying
|
||||
CrossProcessMutex(const CrossProcessMutex&);
|
||||
void operator=(const CrossProcessMutex&);
|
||||
|
||||
std::string m_name; // lock name
|
||||
HANDLE m_handle;
|
||||
public:
|
||||
CrossProcessMutex(const std::string& name)
|
||||
:m_handle(NULL),
|
||||
m_name("Global\\" + name)
|
||||
{
|
||||
}
|
||||
|
||||
// Acquires the mutex. If 'wait' is true and mutex is acquired by someone else then
|
||||
// function waits until mutex is releasd
|
||||
// Returns true if successfull
|
||||
bool Acquire(bool wait)
|
||||
{
|
||||
assert (m_handle == NULL);
|
||||
m_handle = ::CreateMutexA(NULL/*security attr*/, FALSE/*bInitialOwner*/, m_name.c_str());
|
||||
if (m_handle == NULL)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (::WaitForSingleObject(m_handle, wait ? INFINITE : 0) != WAIT_OBJECT_0)
|
||||
{
|
||||
::CloseHandle(m_handle);
|
||||
m_handle = NULL;
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Releases the mutex
|
||||
void Release()
|
||||
{
|
||||
assert (m_handle != NULL);
|
||||
::CloseHandle(m_handle);
|
||||
m_handle = NULL;
|
||||
}
|
||||
|
||||
~CrossProcessMutex()
|
||||
{
|
||||
if (m_handle != NULL)
|
||||
{
|
||||
Release();
|
||||
}
|
||||
}
|
||||
};
|
1388
Common/File.cpp
1388
Common/File.cpp
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -425,10 +425,14 @@ void unlinkOrDie (const std::wstring & pathname)
|
|||
void renameOrDie (const std::string & from, const std::string & to)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
if (!MoveFileA (from.c_str(),to.c_str()))
|
||||
RuntimeError("error renaming: %s", GetLastError());
|
||||
#else // TODO: test this
|
||||
if (!rename (from.c_str(), to.c_str()))
|
||||
// deleting destination file if exits (to match Linux semantic)
|
||||
if (fexists(to.c_str()) && !DeleteFileA(to.c_str()))
|
||||
RuntimeError("error deleting file: '%s': %d", to.c_str(), GetLastError());
|
||||
|
||||
if (!MoveFileA (from.c_str(), to.c_str()))
|
||||
RuntimeError("error renaming file '%s': %d", from.c_str(), GetLastError());
|
||||
#else
|
||||
if (rename (from.c_str(), to.c_str()) != 0)
|
||||
RuntimeError("error renaming file '%s': %s", from.c_str(), strerror(errno));
|
||||
#endif
|
||||
}
|
||||
|
@ -436,8 +440,12 @@ void renameOrDie (const std::string & from, const std::string & to)
|
|||
void renameOrDie (const std::wstring & from, const std::wstring & to)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
// deleting destination file if exits (to match Linux semantic)
|
||||
if (fexists(to.c_str()) && !DeleteFileW(to.c_str()))
|
||||
RuntimeError("error deleting file '%S': %d", to.c_str(), GetLastError());
|
||||
|
||||
if (!MoveFileW(from.c_str(), to.c_str()))
|
||||
RuntimeError ("error renaming: %s", GetLastError());
|
||||
RuntimeError ("error renaming file '%S': %d", from.c_str(), GetLastError());
|
||||
#else
|
||||
renameOrDie (charpath(from), charpath(to));
|
||||
#endif
|
||||
|
|
|
@ -52,12 +52,12 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_uttDerivBuffer = NULL;
|
||||
m_minibatchBuffer.resize(0);
|
||||
m_minibatchBufferIndex = 0;
|
||||
m_minibatchBufferLeftovers = 0;
|
||||
m_noData = false;
|
||||
m_convertLabelsToTargets = false;
|
||||
m_doSeqTrain = false;
|
||||
m_getMinibatchCopy = false;
|
||||
m_doMinibatchBuffering = false;
|
||||
m_doMinibatchBufferTruncation = false;
|
||||
|
||||
if (readerConfig.Exists("legacyMode"))
|
||||
{
|
||||
|
@ -113,6 +113,17 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
std::string minibatchMode(readerConfig("minibatchMode", "Partial"));
|
||||
m_partialMinibatch = !_stricmp(minibatchMode.c_str(), "Partial");
|
||||
|
||||
// Figures out if we have to do minibatch buffering and how.
|
||||
if (m_doSeqTrain)
|
||||
{
|
||||
m_doMinibatchBuffering = true;
|
||||
if (m_truncated)
|
||||
{
|
||||
m_truncated = false;
|
||||
m_doMinibatchBufferTruncation = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if we are in "write" mode or "train/test" mode.
|
||||
string command(readerConfig("action",L""));
|
||||
if (command == "write")
|
||||
|
@ -826,7 +837,6 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_minibatchBuffer.clear();
|
||||
m_getMinibatchCopy = false;
|
||||
m_minibatchBufferIndex = 0;
|
||||
m_minibatchBufferLeftovers = 0;
|
||||
m_uttInfo.clear();
|
||||
m_minibatchUttInfo.clear();
|
||||
|
||||
|
@ -1039,10 +1049,29 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
m_minibatchUttInfo.assign(m_numberOfuttsPerMinibatch,
|
||||
std::vector<std::pair<wstring, size_t>>(0));
|
||||
|
||||
// For the moment we don't support same utterance in the same
|
||||
// minibatch.
|
||||
m_hasUttInCurrentMinibatch.clear();
|
||||
for (size_t i = 0; i < m_numberOfuttsPerMinibatch; i++)
|
||||
{
|
||||
while (m_hasUttInCurrentMinibatch.find(m_uttInfo[i][0].first)
|
||||
!= m_hasUttInCurrentMinibatch.end())
|
||||
{
|
||||
fprintf(stderr, "WARNING: Utterance \"%S\" already exists "
|
||||
"in the minibatch, skipping it.\n",
|
||||
m_uttInfo[i][0].first.c_str());
|
||||
ReNewBufferForMultiIO(i);
|
||||
}
|
||||
if (m_uttInfo[i].size() > 0)
|
||||
{
|
||||
m_hasUttInCurrentMinibatch[m_uttInfo[i][0].first] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
m_currentMBSize = m_mbSize;
|
||||
do
|
||||
do
|
||||
{
|
||||
// Checks if we have finished all the utterances.
|
||||
if (m_noData)
|
||||
|
@ -1112,7 +1141,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
endFrame = startFrame + m_currentMBSize;
|
||||
bool populateSucc = PopulateUtteranceInMinibatch(matrices, i, startFrame, endFrame, m_currentMBSize);
|
||||
if (m_doMinibatchBuffering && populateSucc) { m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]); }
|
||||
if (m_doMinibatchBuffering && populateSucc)
|
||||
{
|
||||
m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]);
|
||||
m_hasUttInCurrentMinibatch[m_uttInfo[i][0].first] = true;
|
||||
}
|
||||
m_processedFrame[i] += m_currentMBSize;
|
||||
}
|
||||
else if ((startFrame + m_currentMBSize) == m_toProcess[i])
|
||||
|
@ -1124,7 +1157,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
// and it reaches the end of the utterance.
|
||||
// 3. <m_framemode> is true, then we do not have to set
|
||||
// utterance boundary.
|
||||
|
||||
|
||||
// Sets the utterance boundary.
|
||||
if (m_framemode == false)
|
||||
{
|
||||
|
@ -1146,7 +1179,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
// next one.
|
||||
endFrame = startFrame + m_currentMBSize;
|
||||
bool populateSucc = PopulateUtteranceInMinibatch(matrices, i, startFrame, endFrame, m_currentMBSize);
|
||||
if (m_doMinibatchBuffering && populateSucc) { m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]); }
|
||||
if (m_doMinibatchBuffering && populateSucc)
|
||||
{
|
||||
m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]);
|
||||
m_hasUttInCurrentMinibatch[m_uttInfo[i][0].first] = true;
|
||||
}
|
||||
m_processedFrame[i] += m_currentMBSize;
|
||||
bool reNewSucc = ReNewBufferForMultiIO(i);
|
||||
}
|
||||
|
@ -1160,7 +1197,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
// 3. <m_framemode> is false, <m_truncated> is false,
|
||||
// then the utterance is too short, we should try to
|
||||
// pull next utterance.
|
||||
|
||||
|
||||
// Checks if we have reached the end of the minibatch.
|
||||
if (startFrame == m_toProcess[i])
|
||||
{
|
||||
|
@ -1171,7 +1208,6 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
// Populates <NO_INPUT> with real features, the
|
||||
// following implementation is not efficient...
|
||||
assert(m_toProcess[i] > 0);
|
||||
PopulateUtteranceInMinibatch(matrices, i, 0, 1, m_currentMBSize, k);
|
||||
}
|
||||
continue;
|
||||
|
@ -1208,7 +1244,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
endFrame = m_toProcess[i];
|
||||
size_t currentMBFilled = endFrame - startFrame;
|
||||
bool populateSucc = PopulateUtteranceInMinibatch(matrices, i, startFrame, endFrame, m_currentMBSize);
|
||||
if (m_doMinibatchBuffering && populateSucc) { m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]); }
|
||||
if (m_doMinibatchBuffering && populateSucc)
|
||||
{
|
||||
m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]);
|
||||
m_hasUttInCurrentMinibatch[m_uttInfo[i][0].first] = true;
|
||||
}
|
||||
m_processedFrame[i] += currentMBFilled;
|
||||
bool reNewSucc = ReNewBufferForMultiIO(i);
|
||||
|
||||
|
@ -1223,7 +1263,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_sentenceBegin.SetValue(i, currentMBFilled + m_toProcess[i] - 1, (ElemType)SEQUENCE_END);
|
||||
m_minibatchPackingFlag[currentMBFilled + m_toProcess[i] - 1] |= MinibatchPackingFlag::SequenceEnd;
|
||||
populateSucc = PopulateUtteranceInMinibatch(matrices, i, 0, m_toProcess[i], m_currentMBSize, currentMBFilled);
|
||||
if (m_doMinibatchBuffering && populateSucc) { m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]); }
|
||||
if (m_doMinibatchBuffering && populateSucc)
|
||||
{
|
||||
m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]);
|
||||
m_hasUttInCurrentMinibatch[m_uttInfo[i][0].first] = true;
|
||||
}
|
||||
assert(m_processedFrame[i] == 0);
|
||||
m_processedFrame[i] = m_toProcess[i];
|
||||
currentMBFilled += m_toProcess[i];
|
||||
|
@ -1235,7 +1279,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
if (reNewSucc && !m_framemode && m_truncated)
|
||||
{
|
||||
populateSucc = PopulateUtteranceInMinibatch(matrices, i, 0, m_currentMBSize - currentMBFilled, m_currentMBSize, currentMBFilled);
|
||||
if (m_doMinibatchBuffering && populateSucc) { m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]); }
|
||||
if (m_doMinibatchBuffering && populateSucc)
|
||||
{
|
||||
m_minibatchUttInfo[i].push_back(m_uttInfo[i][0]);
|
||||
m_hasUttInCurrentMinibatch[m_uttInfo[i][0].first] = true;
|
||||
}
|
||||
m_processedFrame[i] += m_currentMBSize - currentMBFilled;
|
||||
if (currentMBFilled < m_currentMBSize)
|
||||
{
|
||||
|
@ -1252,7 +1300,6 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
// Populates <NO_INPUT> with real features, the
|
||||
// following implementation is not efficient...
|
||||
assert(m_toProcess[i] > 0);
|
||||
PopulateUtteranceInMinibatch(matrices, i, 0, 1, m_currentMBSize, k);
|
||||
}
|
||||
}
|
||||
|
@ -1276,23 +1323,15 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
if (m_getMinibatchCopy == false && m_minibatchBuffer.size() > 0)
|
||||
{
|
||||
m_minibatchBufferIndex = 0;
|
||||
m_minibatchBufferLeftovers = m_minibatchBuffer.size() - 1; // Will pop one more.
|
||||
return true;
|
||||
}
|
||||
|
||||
// If <m_getMinibatchCopy> is true, we first have to re-compute
|
||||
// the likelihood for the frames that are already in the buffer.
|
||||
if (m_getMinibatchCopy == true && m_minibatchBufferLeftovers > 0)
|
||||
if (m_getMinibatchCopy == true
|
||||
&& m_minibatchBufferIndex + 1 < m_minibatchBuffer.size())
|
||||
{
|
||||
if (m_minibatchBufferLeftovers == m_minibatchBuffer.size())
|
||||
{
|
||||
m_minibatchBufferIndex = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
m_minibatchBufferIndex += 1;
|
||||
}
|
||||
m_minibatchBufferLeftovers -= 1;
|
||||
m_minibatchBufferIndex += 1;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -1303,35 +1342,86 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
template<class ElemType>
|
||||
void HTKMLFReader<ElemType>::CopyMinibatchToBuffer()
|
||||
{
|
||||
MinibatchBufferUnit currentMinibatch;
|
||||
|
||||
// Stores variables realted to the current minibatch.
|
||||
currentMinibatch.sentenceBegin.SetValue(m_sentenceBegin);
|
||||
currentMinibatch.minibatchPackingFlag = m_minibatchPackingFlag;
|
||||
currentMinibatch.currentMBSize = m_currentMBSize;
|
||||
currentMinibatch.minibatchUttInfo = m_minibatchUttInfo;
|
||||
|
||||
size_t size = m_currentMBSize * m_numberOfuttsPerMinibatch;
|
||||
|
||||
// Copies features.
|
||||
currentMinibatch.features.resize(0);
|
||||
for (size_t i = 0; i < m_featuresBufferMultiIO.size(); ++i)
|
||||
size_t originalMBSize = m_currentMBSize;
|
||||
size_t currentMBSize = m_currentMBSize;
|
||||
size_t numMinibatches = 1;
|
||||
if (m_doMinibatchBufferTruncation)
|
||||
{
|
||||
std::vector<ElemType> tmpFeatures(m_featuresBufferMultiIO[i],
|
||||
m_featuresBufferMultiIO[i] + size * m_featureNameToDimMap[m_featureIdToNameMap[i]]);
|
||||
currentMinibatch.features.push_back(tmpFeatures);
|
||||
currentMBSize = m_mbSize;
|
||||
numMinibatches = (ElemType)originalMBSize / (ElemType)m_mbSize;
|
||||
numMinibatches += (originalMBSize % m_mbSize == 0) ? 0 : 1;
|
||||
}
|
||||
|
||||
// Copies labels.
|
||||
currentMinibatch.labels.resize(0);
|
||||
for (size_t i = 0; i < m_labelsBufferMultiIO.size(); ++i)
|
||||
for (size_t i = 0; i < numMinibatches; ++i)
|
||||
{
|
||||
std::vector<ElemType> tmpLabels(m_labelsBufferMultiIO[i],
|
||||
m_labelsBufferMultiIO[i] + size * m_labelNameToDimMap[m_labelIdToNameMap[i]]);
|
||||
currentMinibatch.labels.push_back(tmpLabels);
|
||||
}
|
||||
MinibatchBufferUnit currentMinibatch;
|
||||
|
||||
m_minibatchBuffer.push_back(currentMinibatch);
|
||||
size_t startIndex = i * currentMBSize;
|
||||
size_t numFrames =
|
||||
(startIndex + currentMBSize <= originalMBSize) ?
|
||||
currentMBSize : (originalMBSize - startIndex);
|
||||
|
||||
// Sets sentence boundary for the current minibatch.
|
||||
currentMinibatch.sentenceBegin.SetValue(
|
||||
m_sentenceBegin.ColumnSlice(startIndex, numFrames));
|
||||
|
||||
// Sets packing flag for the current minibatch.
|
||||
currentMinibatch.minibatchPackingFlag.resize(numFrames);
|
||||
currentMinibatch.minibatchPackingFlag.assign(
|
||||
m_minibatchPackingFlag.begin() + startIndex,
|
||||
m_minibatchPackingFlag.begin() + startIndex + numFrames);
|
||||
|
||||
// Sets the minibatch size for the current minibatch.
|
||||
currentMinibatch.currentMBSize = numFrames;
|
||||
|
||||
// Sets the utterance information for the current minibatch.
|
||||
currentMinibatch.minibatchUttInfo.assign(
|
||||
m_numberOfuttsPerMinibatch,
|
||||
std::vector<std::pair<wstring, size_t>>(0));
|
||||
for (size_t j = 0; j < m_minibatchUttInfo.size(); ++j)
|
||||
{
|
||||
size_t uttStartIndex = 0;
|
||||
for (size_t k = 0; k < m_minibatchUttInfo[j].size(); ++k)
|
||||
{
|
||||
if (startIndex >= uttStartIndex + m_minibatchUttInfo[j][k].second)
|
||||
{
|
||||
uttStartIndex += m_minibatchUttInfo[j][k].second;
|
||||
continue;
|
||||
}
|
||||
if (startIndex + numFrames <= uttStartIndex)
|
||||
{
|
||||
break;
|
||||
}
|
||||
currentMinibatch.minibatchUttInfo[j].push_back(m_minibatchUttInfo[j][k]);
|
||||
uttStartIndex += m_minibatchUttInfo[j][k].second;
|
||||
}
|
||||
}
|
||||
|
||||
size_t startDataCopy = startIndex * m_numberOfuttsPerMinibatch;
|
||||
size_t endDataCopy = (startIndex + numFrames) * m_numberOfuttsPerMinibatch;
|
||||
|
||||
// Copies features.
|
||||
currentMinibatch.features.resize(0);
|
||||
for (size_t i = 0; i < m_featuresBufferMultiIO.size(); ++i)
|
||||
{
|
||||
std::vector<ElemType> tmpFeatures(
|
||||
m_featuresBufferMultiIO[i] + startDataCopy * m_featureNameToDimMap[m_featureIdToNameMap[i]],
|
||||
m_featuresBufferMultiIO[i] + endDataCopy * m_featureNameToDimMap[m_featureIdToNameMap[i]]);
|
||||
currentMinibatch.features.push_back(tmpFeatures);
|
||||
}
|
||||
|
||||
// Copies labels.
|
||||
currentMinibatch.labels.resize(0);
|
||||
for (size_t i = 0; i < m_labelsBufferMultiIO.size(); ++i)
|
||||
{
|
||||
std::vector<ElemType> tmpLabels(
|
||||
m_labelsBufferMultiIO[i] + startDataCopy * m_labelNameToDimMap[m_labelIdToNameMap[i]],
|
||||
m_labelsBufferMultiIO[i] + endDataCopy * m_labelNameToDimMap[m_labelIdToNameMap[i]]);
|
||||
currentMinibatch.labels.push_back(tmpLabels);
|
||||
}
|
||||
|
||||
m_minibatchBuffer.push_back(currentMinibatch);
|
||||
}
|
||||
}
|
||||
|
||||
template<class ElemType>
|
||||
|
@ -1371,24 +1461,47 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_minibatchBuffer[index].labels[id].data(),
|
||||
matrixFlagNormal);
|
||||
}
|
||||
else if (m_doMinibatchBuffering && !m_getMinibatchCopy)
|
||||
else if (m_doMinibatchBuffering)
|
||||
{
|
||||
if (m_nameToTypeMap[iter->first] == InputOutputTypes::readerDeriv)
|
||||
{
|
||||
m_uttDerivBuffer->GetDerivative(
|
||||
m_minibatchUttInfo, m_sentenceBegin,
|
||||
m_minibatchPackingFlag, matrices[iter->first]);
|
||||
if (m_getMinibatchCopy)
|
||||
{
|
||||
if (data.GetNumCols()
|
||||
!= m_currentMBSize * m_numberOfuttsPerMinibatch)
|
||||
{
|
||||
matrices[iter->first]->Resize(data.GetNumRows(),
|
||||
m_currentMBSize
|
||||
* m_numberOfuttsPerMinibatch);
|
||||
}
|
||||
matrices[iter->first]->SetValue(0);
|
||||
}
|
||||
else
|
||||
{
|
||||
m_uttDerivBuffer->GetDerivative(
|
||||
m_minibatchUttInfo, m_sentenceBegin,
|
||||
m_minibatchPackingFlag, matrices[iter->first]);
|
||||
}
|
||||
}
|
||||
else if (m_nameToTypeMap[iter->first] == InputOutputTypes::readerObj)
|
||||
{
|
||||
m_uttDerivBuffer->GetObjective(m_minibatchUttInfo,
|
||||
matrices[iter->first]);
|
||||
if (m_getMinibatchCopy)
|
||||
{
|
||||
if (data.GetNumCols() != 1)
|
||||
{
|
||||
data.Resize(1, 1);
|
||||
}
|
||||
data.SetValue(0);
|
||||
}
|
||||
else
|
||||
{
|
||||
m_uttDerivBuffer->GetObjective(m_minibatchUttInfo,
|
||||
matrices[iter->first]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we are not in the minibatch copy mode, then we can remove the
|
||||
// minibatch from buffer.
|
||||
if (m_getMinibatchCopy == false)
|
||||
{
|
||||
assert(index == 0);
|
||||
|
@ -1424,11 +1537,20 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
if (m_nameToTypeMap[iter->first] == InputOutputTypes::readerDeriv)
|
||||
{
|
||||
data.Resize(data.GetNumRows(), m_currentMBSize);
|
||||
if (data.GetNumCols()
|
||||
!= m_currentMBSize * m_numberOfuttsPerMinibatch)
|
||||
{
|
||||
data.Resize(data.GetNumRows(),
|
||||
m_currentMBSize * m_numberOfuttsPerMinibatch);
|
||||
}
|
||||
data.SetValue(0);
|
||||
}
|
||||
else if (m_nameToTypeMap[iter->first] == InputOutputTypes::readerObj)
|
||||
{
|
||||
if (data.GetNumCols() != 1)
|
||||
{
|
||||
data.Resize(1, 1);
|
||||
}
|
||||
data.SetValue(0);
|
||||
}
|
||||
}
|
||||
|
@ -1452,15 +1574,19 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
success = GetOneMinibatchToTrainOrTestDataBuffer(matrices);
|
||||
if (success)
|
||||
{
|
||||
CopyMinibatchToMatrix(
|
||||
m_currentMBSize * m_numberOfuttsPerMinibatch,
|
||||
m_featuresBufferMultiIO, m_labelsBufferMultiIO, matrices);
|
||||
}
|
||||
|
||||
// Checks if we need to move the current minibatch to buffer.
|
||||
if (success && m_getMinibatchCopy)
|
||||
{
|
||||
CopyMinibatchToBuffer();
|
||||
if (m_getMinibatchCopy)
|
||||
{
|
||||
assert(m_minibatchBuffer.size() == 0);
|
||||
CopyMinibatchToBuffer();
|
||||
CopyMinibatchFromBufferToMatrix(0, matrices);
|
||||
m_minibatchBufferIndex = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
CopyMinibatchToMatrix(
|
||||
m_currentMBSize * m_numberOfuttsPerMinibatch,
|
||||
m_featuresBufferMultiIO, m_labelsBufferMultiIO, matrices);
|
||||
}
|
||||
}
|
||||
|
||||
// If we are in the "copy" mode, and we cannot get a full minibatch,
|
||||
|
@ -1612,6 +1738,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
if (m_noData)
|
||||
{
|
||||
m_currentBufferFrames[i] = 0;
|
||||
m_processedFrame[i] = 0;
|
||||
m_toProcess[i] = 0;
|
||||
m_uttInfo[i].clear();
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1665,8 +1795,9 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
}
|
||||
|
||||
// We don't support having two utterances in the same buffer.
|
||||
if (m_doMinibatchBuffering &&
|
||||
m_uttDerivBuffer->HasUtterance(m_uttInfo[i][0].first))
|
||||
if (m_doMinibatchBuffering
|
||||
&& m_hasUttInCurrentMinibatch.find(m_uttInfo[i][0].first)
|
||||
!= m_hasUttInCurrentMinibatch.end())
|
||||
{
|
||||
(*m_mbiter)++;
|
||||
if (!(*m_mbiter))
|
||||
|
@ -1674,7 +1805,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_noData = true;
|
||||
}
|
||||
fprintf(stderr, "WARNING: Utterance \"%S\" already exists in "
|
||||
"the buffer, skipping it.\n",
|
||||
"the minibatch, skipping it.\n",
|
||||
m_uttInfo[i][0].first.c_str());
|
||||
return ReNewBufferForMultiIO(i);
|
||||
}
|
||||
|
|
|
@ -42,10 +42,11 @@ private:
|
|||
};
|
||||
bool m_doMinibatchBuffering;
|
||||
bool m_getMinibatchCopy;
|
||||
bool m_doMinibatchBufferTruncation;
|
||||
size_t m_minibatchBufferIndex;
|
||||
size_t m_minibatchBufferLeftovers;
|
||||
std::deque<MinibatchBufferUnit> m_minibatchBuffer;
|
||||
UtteranceDerivativeBuffer<ElemType>* m_uttDerivBuffer;
|
||||
unordered_map<wstring, bool> m_hasUttInCurrentMinibatch;
|
||||
|
||||
// Utterance information.
|
||||
std::vector<std::vector<std::pair<wstring, size_t>>> m_uttInfo;
|
||||
|
|
|
@ -15,8 +15,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_numUttsPerMinibatch = numberOfuttsPerMinibatch;
|
||||
m_needLikelihood = true;
|
||||
m_currentObj = 0;
|
||||
m_minibatchIndex = 1;
|
||||
m_lastCompleteMinibatch.assign(m_numUttsPerMinibatch, 0);
|
||||
m_uttReady.assign(m_numUttsPerMinibatch, false);
|
||||
m_epochEnd = false;
|
||||
m_dimension = 0;
|
||||
}
|
||||
|
@ -148,26 +147,22 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
&m_uttPool[uttID].objective);
|
||||
m_uttPool[uttID].hasDerivative = true;
|
||||
m_uttPool[uttID].progress = 0;
|
||||
if (startFrame + numFrames == currentMBSize)
|
||||
{
|
||||
m_lastCompleteMinibatch[m_uttPool[uttID].streamID]
|
||||
= m_minibatchIndex;
|
||||
}
|
||||
else
|
||||
{
|
||||
m_lastCompleteMinibatch[m_uttPool[uttID].streamID]
|
||||
= m_minibatchIndex - 1;
|
||||
}
|
||||
m_uttReady[m_uttPool[uttID].streamID] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if we are ready to provide derivatives.
|
||||
m_minCompleteMinibatchIndex = *std::min_element(
|
||||
m_lastCompleteMinibatch.begin(), m_lastCompleteMinibatch.end());
|
||||
m_needLikelihood = (m_minCompleteMinibatchIndex >= 1) ? false : true;
|
||||
m_minibatchIndex += 1;
|
||||
m_needLikelihood = false;
|
||||
for (size_t i = 0; i < m_uttReady.size(); ++i)
|
||||
{
|
||||
if (m_uttReady[i] == false)
|
||||
{
|
||||
m_needLikelihood = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Suppose we have a, b, c 3 streams, the <derivativesOut> should be in the
|
||||
|
@ -245,22 +240,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_currentUttInfo = uttInfo;
|
||||
|
||||
// Checks if we need to read more loglikelihoods.
|
||||
m_needLikelihood = false;
|
||||
m_minCompleteMinibatchIndex -= 1;
|
||||
if (m_minCompleteMinibatchIndex <= 0 && !m_epochEnd)
|
||||
m_needLikelihood = (m_epochEnd || m_uttPool.size() > 0) ? false : true;
|
||||
if (m_needLikelihood == true)
|
||||
{
|
||||
m_needLikelihood = true;
|
||||
m_minibatchIndex = 1;
|
||||
m_lastCompleteMinibatch.assign(m_numUttsPerMinibatch, 0);
|
||||
|
||||
// Un-do the logLikelihood for partial utterances.
|
||||
for (auto iter = m_uttPool.begin(); iter != m_uttPool.end(); ++iter)
|
||||
{
|
||||
if (iter->second.hasDerivative == false)
|
||||
{
|
||||
iter->second.progress = 0;
|
||||
}
|
||||
}
|
||||
m_uttReady.assign(m_numUttsPerMinibatch, false);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -278,7 +261,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
RuntimeError("Current objective does not correspond to the"
|
||||
" minibatch utterance information, perhaps you did not"
|
||||
" run GetObjective() right after GetDerivatives()?");
|
||||
" run GetObjective() right after GetDerivative()?");
|
||||
}
|
||||
|
||||
// Sets the objectives...
|
||||
|
@ -333,12 +316,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
m_needLikelihood = true;
|
||||
m_currentObj = 0;
|
||||
m_minibatchIndex = 1;
|
||||
m_minCompleteMinibatchIndex = 0;
|
||||
m_epochEnd = false;
|
||||
m_lastCompleteMinibatch.assign(m_numUttsPerMinibatch, 0);
|
||||
m_uttPool.clear();
|
||||
m_currentUttInfo.clear();
|
||||
m_uttReady.assign(m_numUttsPerMinibatch, false);
|
||||
}
|
||||
|
||||
template class UtteranceDerivativeBuffer<float>;
|
||||
|
|
|
@ -35,12 +35,10 @@ private:
|
|||
|
||||
bool m_needLikelihood;
|
||||
bool m_epochEnd;
|
||||
int m_minCompleteMinibatchIndex;
|
||||
int m_minibatchIndex;
|
||||
size_t m_numUttsPerMinibatch;
|
||||
size_t m_dimension;
|
||||
ElemType m_currentObj;
|
||||
std::vector<int> m_lastCompleteMinibatch;
|
||||
std::vector<bool> m_uttReady;
|
||||
std::vector<std::vector<std::pair<wstring, size_t>>> m_currentUttInfo;
|
||||
unordered_map<wstring, UtteranceDerivativeUnit> m_uttPool;
|
||||
UtteranceDerivativeComputationInterface<ElemType>* m_derivativeInterface;
|
||||
|
|
|
@ -1,222 +1,220 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{E6F26F9A-FF64-4F0A-B749-CD309EE357EE}</ProjectGuid>
|
||||
<SccProjectName>
|
||||
</SccProjectName>
|
||||
<SccAuxPath>
|
||||
</SccAuxPath>
|
||||
<SccLocalPath>
|
||||
</SccLocalPath>
|
||||
<SccProvider>
|
||||
</SccProvider>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>CNTK</RootNamespace>
|
||||
<ProjectName>CNTK</ProjectName>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v120</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v120</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings" />
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
<IncludePath>c:\Program Files\Microsoft MPI\Inc;..\..\Math\Math;..\..\Common\;..\..\Common\include;$(VCInstallDir)include;$(CUDA_PATH)\include;$(WindowsSDK_IncludePath);</IncludePath>
|
||||
<LibraryPath>c:\Program Files\Microsoft MPI\Lib\amd64;$(SolutionDir)$(Platform)\$(Configuration);$(SolutionDir)..\Common\lib;$(VCInstallDir)lib\amd64;$(WindowsSDK_LibraryPath_x64);$(CUDA_PATH)\lib\$(Platform)</LibraryPath>
|
||||
<CustomBuildAfterTargets>Build</CustomBuildAfterTargets>
|
||||
<IntDir>$(Platform)\$(Configuration)\$(ProjectName)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
<IncludePath>c:\Program Files\Microsoft MPI\Inc;..\..\Math\Math;..\..\Common\;..\..\Common\include;$(VCInstallDir)include;$(VCInstallDir)atlmfc\include;$(CUDA_PATH)\include;$(WindowsSDK_IncludePath);</IncludePath>
|
||||
<LibraryPath>c:\Program Files\Microsoft MPI\Lib\amd64;$(SolutionDir)$(Platform)\$(Configuration);$(SolutionDir)..\Common\lib;$(VCInstallDir)lib\amd64;$(WindowsSDK_LibraryPath_x64);$(CUDA_PATH)\lib\$(Platform)</LibraryPath>
|
||||
<CustomBuildAfterTargets>Build</CustomBuildAfterTargets>
|
||||
<ExecutablePath>$(ExecutablePath)</ExecutablePath>
|
||||
<IntDir>$(Platform)\$(Configuration)\$(ProjectName)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>_SCL_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<OpenMPSupport>true</OpenMPSupport>
|
||||
<TreatWarningAsError>true</TreatWarningAsError>
|
||||
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
|
||||
<AdditionalIncludeDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\include"</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<AdditionalDependencies>CNTKMath.lib; nvml.lib; kernel32.lib; user32.lib; shell32.lib; %(AdditionalDependencies)</AdditionalDependencies>
|
||||
<AdditionalLibraryDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\lib"</AdditionalLibraryDirectories>
|
||||
<DelayLoadDLLs>CNTKMath.dll; nvml.dll; cudart64_70.dll</DelayLoadDLLs>
|
||||
</Link>
|
||||
<PostBuildEvent>
|
||||
<Command>xcopy /I /D /Y "%ProgramW6432%\NVIDIA Corporation\NVSMI\nvml*.dll" $(TargetDir)</Command>
|
||||
<Message>Copying NVidia GDK extension DLL to target folder</Message>
|
||||
</PostBuildEvent>
|
||||
<CustomBuildStep>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<Outputs>$(TargetDir)config.txt;$(TargetDir)labels.txt;$(TargetDir)network.txt;$(TargetDir)NdlScript.txt</Outputs>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<TreatOutputAsContent>true</TreatOutputAsContent>
|
||||
<Message>Copy content files to target directory</Message>
|
||||
</CustomBuildStep>
|
||||
<PreBuildEvent>
|
||||
<Command>prebuild.bat</Command>
|
||||
</PreBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
|
||||
<AdditionalOptions>/d2Zi+ %(AdditionalOptions)</AdditionalOptions>
|
||||
<TreatWarningAsError>true</TreatWarningAsError>
|
||||
<AdditionalIncludeDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\include"</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<AdditionalDependencies>CNTKMath.lib; nvml.lib; kernel32.lib; user32.lib; shell32.lib; %(AdditionalDependencies)</AdditionalDependencies>
|
||||
<Profile>true</Profile>
|
||||
<DelayLoadDLLs>CNTKMath.dll; nvml.dll; cudart64_70.dll</DelayLoadDLLs>
|
||||
<AdditionalLibraryDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\lib"</AdditionalLibraryDirectories>
|
||||
</Link>
|
||||
<PostBuildEvent>
|
||||
<Command>xcopy /I /D /Y "%ProgramW6432%\NVIDIA Corporation\NVSMI\nvml*.dll" $(TargetDir)</Command>
|
||||
<Message>Copying NVidia GDK extension DLL to target folder</Message>
|
||||
</PostBuildEvent>
|
||||
<CustomBuildStep>
|
||||
<Command>
|
||||
</Command>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<Outputs>
|
||||
</Outputs>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<TreatOutputAsContent>true</TreatOutputAsContent>
|
||||
<Message>
|
||||
</Message>
|
||||
</CustomBuildStep>
|
||||
<PreBuildEvent>
|
||||
<Command>prebuild.bat</Command>
|
||||
</PreBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<Text Include="DefaultMacros.txt" />
|
||||
<Text Include="modelEditor.txt" />
|
||||
<Text Include="modelEditorFromScratch.txt" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex.h" />
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex_linux.h" />
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex_win32.h" />
|
||||
<ClInclude Include="..\..\Common\Include\basetypes.h" />
|
||||
<ClInclude Include="..\..\Common\Include\Basics.h" />
|
||||
<ClInclude Include="..\..\Common\Include\BestGpu.h" />
|
||||
<ClInclude Include="..\..\Common\Include\commandArgUtil.h" />
|
||||
<ClInclude Include="..\..\Common\Include\DataReader.h" />
|
||||
<ClInclude Include="..\..\Common\Include\DataWriter.h" />
|
||||
<ClInclude Include="..\..\Common\Include\File.h" />
|
||||
<ClInclude Include="..\..\Common\Include\fileutil.h" />
|
||||
<ClInclude Include="..\..\Common\Include\hostname.h" />
|
||||
<ClInclude Include="..\..\Common\Include\minibatchsourcehelpers.h" />
|
||||
<ClInclude Include="..\..\Common\Include\nvml.h" />
|
||||
<ClInclude Include="..\..\Common\Include\TimerUtility.h" />
|
||||
<ClInclude Include="CompositeComputationNodes.h" />
|
||||
<ClInclude Include="ComputationNetwork.h" />
|
||||
<ClInclude Include="ComputationNetworkHelper.h" />
|
||||
<ClInclude Include="ComputationNode.h" />
|
||||
<ClInclude Include="ConvolutionalNodes.h" />
|
||||
<ClInclude Include="DecoderNode.h" />
|
||||
<ClInclude Include="EvaluationCriterionNodes.h" />
|
||||
<ClInclude Include="IComputationNetBuilder.h" />
|
||||
<ClInclude Include="IExecutionEngine.h" />
|
||||
<ClInclude Include="InputAndParamNodes.h" />
|
||||
<ClInclude Include="LinearAlgebraNodes.h" />
|
||||
<ClInclude Include="ModelEditLanguage.h" />
|
||||
<ClInclude Include="MultiNetworksSGD.h" />
|
||||
<ClInclude Include="NDLNetworkBuilder.h" />
|
||||
<ClInclude Include="NDLUtil.h" />
|
||||
<ClInclude Include="NetworkDescriptionLanguage.h" />
|
||||
<ClInclude Include="NonlinearityNodes.h" />
|
||||
<ClInclude Include="RecurrentNodes.h" />
|
||||
<ClInclude Include="SimpleEvaluator.h" />
|
||||
<ClInclude Include="SimpleOutputWriter.h" />
|
||||
<ClInclude Include="SGD.h" />
|
||||
<ClInclude Include="SimpleNetworkBuilder.h" />
|
||||
<ClInclude Include="stdafx.h" />
|
||||
<ClInclude Include="SynchronousExecutionEngine.h" />
|
||||
<ClInclude Include="targetver.h" />
|
||||
<ClInclude Include="TrainingCriterionNodes.h" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\Common\BestGpu.cpp" />
|
||||
<ClCompile Include="..\..\Common\ConfigFile.cpp" />
|
||||
<ClCompile Include="..\..\Common\DataReader.cpp" />
|
||||
<ClCompile Include="..\..\Common\DataWriter.cpp" />
|
||||
<ClCompile Include="..\..\Common\File.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\fileutil.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\TimerUtility.cpp" />
|
||||
<ClCompile Include="CNTK.cpp" />
|
||||
<ClCompile Include="ComputationNode.cpp" />
|
||||
<ClCompile Include="ModelEditLanguage.cpp" />
|
||||
<ClCompile Include="NetworkDescriptionLanguage.cpp" />
|
||||
<ClCompile Include="SimpleNetworkBuilder.cpp" />
|
||||
<ClCompile Include="Profiler.cpp" />
|
||||
<ClCompile Include="stdafx.cpp" />
|
||||
<ClCompile Include="tests.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="prebuild.bat" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets" />
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{E6F26F9A-FF64-4F0A-B749-CD309EE357EE}</ProjectGuid>
|
||||
<SccProjectName>
|
||||
</SccProjectName>
|
||||
<SccAuxPath>
|
||||
</SccAuxPath>
|
||||
<SccLocalPath>
|
||||
</SccLocalPath>
|
||||
<SccProvider>
|
||||
</SccProvider>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>CNTK</RootNamespace>
|
||||
<ProjectName>CNTK</ProjectName>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v120</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v120</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings" />
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
<IncludePath>c:\Program Files\Microsoft MPI\Inc;..\..\Math\Math;..\..\Common\;..\..\Common\include;$(VCInstallDir)include;$(CUDA_PATH)\include;$(WindowsSDK_IncludePath);</IncludePath>
|
||||
<LibraryPath>c:\Program Files\Microsoft MPI\Lib\amd64;$(SolutionDir)$(Platform)\$(Configuration);$(SolutionDir)..\Common\lib;$(VCInstallDir)lib\amd64;$(WindowsSDK_LibraryPath_x64);$(CUDA_PATH)\lib\$(Platform)</LibraryPath>
|
||||
<CustomBuildAfterTargets>Build</CustomBuildAfterTargets>
|
||||
<IntDir>$(Platform)\$(Configuration)\$(ProjectName)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
<IncludePath>c:\Program Files\Microsoft MPI\Inc;..\..\Math\Math;..\..\Common\;..\..\Common\include;$(VCInstallDir)include;$(VCInstallDir)atlmfc\include;$(CUDA_PATH)\include;$(WindowsSDK_IncludePath);</IncludePath>
|
||||
<LibraryPath>c:\Program Files\Microsoft MPI\Lib\amd64;$(SolutionDir)$(Platform)\$(Configuration);$(SolutionDir)..\Common\lib;$(VCInstallDir)lib\amd64;$(WindowsSDK_LibraryPath_x64);$(CUDA_PATH)\lib\$(Platform)</LibraryPath>
|
||||
<CustomBuildAfterTargets>Build</CustomBuildAfterTargets>
|
||||
<ExecutablePath>$(ExecutablePath)</ExecutablePath>
|
||||
<IntDir>$(Platform)\$(Configuration)\$(ProjectName)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>_SCL_SECURE_NO_WARNINGS;WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<OpenMPSupport>true</OpenMPSupport>
|
||||
<TreatWarningAsError>true</TreatWarningAsError>
|
||||
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
|
||||
<AdditionalIncludeDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\include"</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<AdditionalDependencies>CNTKMath.lib; kernel32.lib; user32.lib; shell32.lib; %(AdditionalDependencies)</AdditionalDependencies>
|
||||
<AdditionalLibraryDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\lib"</AdditionalLibraryDirectories>
|
||||
<DelayLoadDLLs>CNTKMath.dll; nvml.dll; cudart64_70.dll</DelayLoadDLLs>
|
||||
</Link>
|
||||
<PostBuildEvent>
|
||||
<Command>if exist "%ProgramW6432%\NVIDIA Corporation\NVSMI" xcopy /I /D /Y "%ProgramW6432%\NVIDIA Corporation\NVSMI\nvml*.dll" $(TargetDir)</Command>
|
||||
<Message>Copying NVidia GDK extension DLL to target folder</Message>
|
||||
</PostBuildEvent>
|
||||
<CustomBuildStep>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<Outputs>$(TargetDir)config.txt;$(TargetDir)labels.txt;$(TargetDir)network.txt;$(TargetDir)NdlScript.txt</Outputs>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<TreatOutputAsContent>true</TreatOutputAsContent>
|
||||
<Message>Copy content files to target directory</Message>
|
||||
</CustomBuildStep>
|
||||
<PreBuildEvent>
|
||||
<Command>prebuild.bat</Command>
|
||||
</PreBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<SDLCheck>true</SDLCheck>
|
||||
<FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
|
||||
<AdditionalOptions>/d2Zi+ %(AdditionalOptions)</AdditionalOptions>
|
||||
<TreatWarningAsError>true</TreatWarningAsError>
|
||||
<AdditionalIncludeDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\include"</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<AdditionalDependencies>CNTKMath.lib; kernel32.lib; user32.lib; shell32.lib; %(AdditionalDependencies)</AdditionalDependencies>
|
||||
<Profile>true</Profile>
|
||||
<DelayLoadDLLs>CNTKMath.dll; nvml.dll; cudart64_70.dll</DelayLoadDLLs>
|
||||
<AdditionalLibraryDirectories>"c:\Program Files\NVIDIA Corporation\GDK\gdk_win7_amd64_release\nvml\lib"</AdditionalLibraryDirectories>
|
||||
</Link>
|
||||
<PostBuildEvent>
|
||||
<Command>if exist "%ProgramW6432%\NVIDIA Corporation\NVSMI" xcopy /I /D /Y "%ProgramW6432%\NVIDIA Corporation\NVSMI\nvml*.dll" $(TargetDir)</Command>
|
||||
<Message>Copying NVidia GDK extension DLL to target folder</Message>
|
||||
</PostBuildEvent>
|
||||
<CustomBuildStep>
|
||||
<Command>
|
||||
</Command>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<Outputs>
|
||||
</Outputs>
|
||||
</CustomBuildStep>
|
||||
<CustomBuildStep>
|
||||
<TreatOutputAsContent>true</TreatOutputAsContent>
|
||||
<Message>
|
||||
</Message>
|
||||
</CustomBuildStep>
|
||||
<PreBuildEvent>
|
||||
<Command>prebuild.bat</Command>
|
||||
</PreBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<Text Include="DefaultMacros.txt" />
|
||||
<Text Include="modelEditor.txt" />
|
||||
<Text Include="modelEditorFromScratch.txt" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex.h" />
|
||||
<ClInclude Include="..\..\Common\Include\basetypes.h" />
|
||||
<ClInclude Include="..\..\Common\Include\Basics.h" />
|
||||
<ClInclude Include="..\..\Common\Include\BestGpu.h" />
|
||||
<ClInclude Include="..\..\Common\Include\commandArgUtil.h" />
|
||||
<ClInclude Include="..\..\Common\Include\DataReader.h" />
|
||||
<ClInclude Include="..\..\Common\Include\DataWriter.h" />
|
||||
<ClInclude Include="..\..\Common\Include\File.h" />
|
||||
<ClInclude Include="..\..\Common\Include\fileutil.h" />
|
||||
<ClInclude Include="..\..\Common\Include\hostname.h" />
|
||||
<ClInclude Include="..\..\Common\Include\minibatchsourcehelpers.h" />
|
||||
<ClInclude Include="..\..\Common\Include\nvml.h" />
|
||||
<ClInclude Include="..\..\Common\Include\TimerUtility.h" />
|
||||
<ClInclude Include="CompositeComputationNodes.h" />
|
||||
<ClInclude Include="ComputationNetwork.h" />
|
||||
<ClInclude Include="ComputationNetworkHelper.h" />
|
||||
<ClInclude Include="ComputationNode.h" />
|
||||
<ClInclude Include="ConvolutionalNodes.h" />
|
||||
<ClInclude Include="DecoderNode.h" />
|
||||
<ClInclude Include="EvaluationCriterionNodes.h" />
|
||||
<ClInclude Include="IComputationNetBuilder.h" />
|
||||
<ClInclude Include="IExecutionEngine.h" />
|
||||
<ClInclude Include="InputAndParamNodes.h" />
|
||||
<ClInclude Include="LinearAlgebraNodes.h" />
|
||||
<ClInclude Include="ModelEditLanguage.h" />
|
||||
<ClInclude Include="MultiNetworksSGD.h" />
|
||||
<ClInclude Include="NDLNetworkBuilder.h" />
|
||||
<ClInclude Include="NDLUtil.h" />
|
||||
<ClInclude Include="NetworkDescriptionLanguage.h" />
|
||||
<ClInclude Include="NonlinearityNodes.h" />
|
||||
<ClInclude Include="RecurrentNodes.h" />
|
||||
<ClInclude Include="SimpleEvaluator.h" />
|
||||
<ClInclude Include="SimpleOutputWriter.h" />
|
||||
<ClInclude Include="SGD.h" />
|
||||
<ClInclude Include="SimpleNetworkBuilder.h" />
|
||||
<ClInclude Include="stdafx.h" />
|
||||
<ClInclude Include="SynchronousExecutionEngine.h" />
|
||||
<ClInclude Include="targetver.h" />
|
||||
<ClInclude Include="TrainingCriterionNodes.h" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\Common\BestGpu.cpp" />
|
||||
<ClCompile Include="..\..\Common\ConfigFile.cpp" />
|
||||
<ClCompile Include="..\..\Common\DataReader.cpp" />
|
||||
<ClCompile Include="..\..\Common\DataWriter.cpp" />
|
||||
<ClCompile Include="..\..\Common\File.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\fileutil.cpp">
|
||||
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\TimerUtility.cpp" />
|
||||
<ClCompile Include="CNTK.cpp" />
|
||||
<ClCompile Include="ComputationNode.cpp" />
|
||||
<ClCompile Include="ModelEditLanguage.cpp" />
|
||||
<ClCompile Include="NetworkDescriptionLanguage.cpp" />
|
||||
<ClCompile Include="SimpleNetworkBuilder.cpp" />
|
||||
<ClCompile Include="Profiler.cpp" />
|
||||
<ClCompile Include="stdafx.cpp" />
|
||||
<ClCompile Include="tests.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="prebuild.bat" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets" />
|
||||
</Project>
|
|
@ -1,213 +1,207 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\Common\ConfigFile.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\DataReader.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\DataWriter.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\File.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\fileutil.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ModelEditLanguage.cpp">
|
||||
<Filter>Model Editing</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ComputationNode.cpp">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="SimpleNetworkBuilder.cpp">
|
||||
<Filter>Network</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="stdafx.cpp">
|
||||
<Filter>Misc</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="tests.cpp">
|
||||
<Filter>Misc</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="NetworkDescriptionLanguage.cpp">
|
||||
<Filter>Network</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\TimerUtility.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="CNTK.cpp" />
|
||||
<ClCompile Include="..\..\Common\BestGpu.cpp">
|
||||
<Filter>GPU Interfacing</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="Profiler.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="..\..\Common\Include\basetypes.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\commandArgUtil.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\fileutil.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\File.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\DataReader.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\DataWriter.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ComputationNetwork.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ComputationNetworkHelper.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="IComputationNetBuilder.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="IExecutionEngine.h">
|
||||
<Filter>Execution Engine</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ModelEditLanguage.h">
|
||||
<Filter>Model Editing</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ComputationNode.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NDLNetworkBuilder.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NDLUtil.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NetworkDescriptionLanguage.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SimpleEvaluator.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SimpleNetworkBuilder.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SimpleOutputWriter.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SGD.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SynchronousExecutionEngine.h">
|
||||
<Filter>Execution Engine</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="stdafx.h">
|
||||
<Filter>Misc</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="targetver.h">
|
||||
<Filter>Misc</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\hostname.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\TimerUtility.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\Basics.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\nvml.h">
|
||||
<Filter>GPU Interfacing</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\minibatchsourcehelpers.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\BestGpu.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="CompositeComputationNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="EvaluationCriterionNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="TrainingCriterionNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NonlinearityNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="LinearAlgebraNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ConvolutionalNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="RecurrentNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="InputAndParamNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="DecoderNode.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="MultiNetworksSGD.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex_linux.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex_win32.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Text Include="modelEditor.txt">
|
||||
<Filter>Model Editing</Filter>
|
||||
</Text>
|
||||
<Text Include="modelEditorFromScratch.txt">
|
||||
<Filter>Model Editing</Filter>
|
||||
</Text>
|
||||
<Text Include="DefaultMacros.txt">
|
||||
<Filter>Misc</Filter>
|
||||
</Text>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Filter Include="Common">
|
||||
<UniqueIdentifier>{b3d05c7b-7bcf-4b12-bcb5-dced86717202}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Common\Include">
|
||||
<UniqueIdentifier>{85226dda-87ba-4da6-af04-563d0ce23b94}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Network">
|
||||
<UniqueIdentifier>{498bb2e9-53de-4955-970e-813e3f21025b}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Model Editing">
|
||||
<UniqueIdentifier>{53c3735f-1374-4044-ab58-8a646c95a5e8}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Execution Engine">
|
||||
<UniqueIdentifier>{3ddfc109-3a90-45f5-91e8-1930759cfe9d}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Nodes">
|
||||
<UniqueIdentifier>{0b366814-48b2-4619-bf92-85ee24e3cbc1}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Misc">
|
||||
<UniqueIdentifier>{3c119a92-ffb2-4850-adae-01778324974d}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="GPU Interfacing">
|
||||
<UniqueIdentifier>{8d99b2cc-5209-40e4-8b4b-a7616973ae3b}</UniqueIdentifier>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="prebuild.bat">
|
||||
<Filter>Misc</Filter>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\Common\ConfigFile.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\DataReader.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\DataWriter.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\File.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\fileutil.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ModelEditLanguage.cpp">
|
||||
<Filter>Model Editing</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="ComputationNode.cpp">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="SimpleNetworkBuilder.cpp">
|
||||
<Filter>Network</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="stdafx.cpp">
|
||||
<Filter>Misc</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="tests.cpp">
|
||||
<Filter>Misc</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="NetworkDescriptionLanguage.cpp">
|
||||
<Filter>Network</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\..\Common\TimerUtility.cpp">
|
||||
<Filter>Common</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="CNTK.cpp" />
|
||||
<ClCompile Include="..\..\Common\BestGpu.cpp">
|
||||
<Filter>GPU Interfacing</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="Profiler.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="..\..\Common\Include\basetypes.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\commandArgUtil.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\fileutil.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\File.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\DataReader.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\DataWriter.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ComputationNetwork.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ComputationNetworkHelper.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="IComputationNetBuilder.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="IExecutionEngine.h">
|
||||
<Filter>Execution Engine</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ModelEditLanguage.h">
|
||||
<Filter>Model Editing</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ComputationNode.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NDLNetworkBuilder.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NDLUtil.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NetworkDescriptionLanguage.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SimpleEvaluator.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SimpleNetworkBuilder.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SimpleOutputWriter.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SGD.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="SynchronousExecutionEngine.h">
|
||||
<Filter>Execution Engine</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="stdafx.h">
|
||||
<Filter>Misc</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="targetver.h">
|
||||
<Filter>Misc</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\hostname.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\TimerUtility.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\Basics.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\nvml.h">
|
||||
<Filter>GPU Interfacing</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\minibatchsourcehelpers.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\Include\BestGpu.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="CompositeComputationNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="EvaluationCriterionNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="TrainingCriterionNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="NonlinearityNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="LinearAlgebraNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="ConvolutionalNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="RecurrentNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="InputAndParamNodes.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="DecoderNode.h">
|
||||
<Filter>Nodes</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="MultiNetworksSGD.h">
|
||||
<Filter>Network</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\..\Common\CrossProcessMutex.h">
|
||||
<Filter>Common\Include</Filter>
|
||||
</ClInclude>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Text Include="modelEditor.txt">
|
||||
<Filter>Model Editing</Filter>
|
||||
</Text>
|
||||
<Text Include="modelEditorFromScratch.txt">
|
||||
<Filter>Model Editing</Filter>
|
||||
</Text>
|
||||
<Text Include="DefaultMacros.txt">
|
||||
<Filter>Misc</Filter>
|
||||
</Text>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Filter Include="Common">
|
||||
<UniqueIdentifier>{b3d05c7b-7bcf-4b12-bcb5-dced86717202}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Common\Include">
|
||||
<UniqueIdentifier>{85226dda-87ba-4da6-af04-563d0ce23b94}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Network">
|
||||
<UniqueIdentifier>{498bb2e9-53de-4955-970e-813e3f21025b}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Model Editing">
|
||||
<UniqueIdentifier>{53c3735f-1374-4044-ab58-8a646c95a5e8}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Execution Engine">
|
||||
<UniqueIdentifier>{3ddfc109-3a90-45f5-91e8-1930759cfe9d}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Nodes">
|
||||
<UniqueIdentifier>{0b366814-48b2-4619-bf92-85ee24e3cbc1}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="Misc">
|
||||
<UniqueIdentifier>{3c119a92-ffb2-4850-adae-01778324974d}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="GPU Interfacing">
|
||||
<UniqueIdentifier>{8d99b2cc-5209-40e4-8b4b-a7616973ae3b}</UniqueIdentifier>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="prebuild.bat">
|
||||
<Filter>Misc</Filter>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -482,6 +482,16 @@ public:
|
|||
}
|
||||
|
||||
void SaveToFile(const std::wstring& fileName, const FileOptions fileFormat = FileOptions::fileOptionsBinary) const
|
||||
{
|
||||
// Saving into temporary file and then renaming it to the requested fileName
|
||||
// This is a standard trick to avoid havign corrupted model files if process dies during writing
|
||||
wstring tmpFileName = fileName + L".tmp";
|
||||
SaveToFileImpl(tmpFileName, fileFormat);
|
||||
renameOrDie(tmpFileName, fileName);
|
||||
}
|
||||
|
||||
private:
|
||||
void SaveToFileImpl(const std::wstring& fileName, const FileOptions fileFormat) const
|
||||
{
|
||||
File fstream(fileName, fileFormat | FileOptions::fileOptionsWrite);
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BCN");
|
||||
|
@ -589,8 +599,11 @@ public:
|
|||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"ERootNodes");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"ECN");
|
||||
|
||||
fstream.Flush();
|
||||
}
|
||||
|
||||
public:
|
||||
void LoadPersistableParametersFromFile(const std::wstring& fileName, const bool requireValidation = true,
|
||||
const FileOptions fileFormat = FileOptions::fileOptionsBinary)
|
||||
{
|
||||
|
@ -854,6 +867,45 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
//this is a temp solution since some nodes such as plus can be just aggregate of two scalar values
|
||||
//in which case the packing info is not available (and not meaningful) for them
|
||||
size_t GetNumSamplesWithLabel(const size_t numAllSamples)
|
||||
{
|
||||
if (!m_SentenceBoundary.IsEmpty() &&
|
||||
!m_minibatchPackingFlag.size() == 0)
|
||||
{
|
||||
size_t numTimeSteps = m_SentenceBoundary.GetNumCols();
|
||||
size_t numSequences = m_SentenceBoundary.GetNumRows();
|
||||
|
||||
if (m_minibatchPackingFlag.size() != numTimeSteps)
|
||||
{
|
||||
LogicError("GetNumSamplesWithLabel(): m_minibatchPackingFlag should have one element for each timestep of all streams.Check feature reader. ");
|
||||
}
|
||||
|
||||
size_t numSamplesWithoutLabel = 0;
|
||||
|
||||
for (size_t j = 0; j < numTimeSteps; j++)
|
||||
{
|
||||
if (m_minibatchPackingFlag[j] & MinibatchPackingFlag::NoLabel)
|
||||
{
|
||||
for (int i = 0; i < numSequences; i++)
|
||||
{
|
||||
if ((int)(m_SentenceBoundary(i, j)) & NO_LABEL)
|
||||
{
|
||||
numSamplesWithoutLabel++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return numTimeSteps*numSequences - numSamplesWithoutLabel;
|
||||
}
|
||||
else
|
||||
{
|
||||
return numAllSamples;
|
||||
}
|
||||
}
|
||||
|
||||
// Read a matrix stored in text format from 'filePath' (whitespace-separated columns, newline-separated rows),
|
||||
// and return a flat array containing the contents of this file in column-major format.
|
||||
// filePath: path to file containing matrix in text format.
|
||||
|
@ -1268,14 +1320,22 @@ public:
|
|||
{
|
||||
newNode = new TransposeTimesNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == StrideTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new StrideTimesNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == StrideTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new StrideTimesNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == ElementTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new ElementTimesNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == RowElementTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new RowElementTimesNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == ColumnElementTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new ColumnElementTimesNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == DiagTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new DiagTimesNode<ElemType>(fstream, modelVersion, m_deviceId, nodeName);
|
||||
|
@ -1606,14 +1666,22 @@ public:
|
|||
{
|
||||
newNode = new TransposeTimesNode<ElemType>(m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == StrideTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new StrideTimesNode<ElemType>(m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == StrideTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new StrideTimesNode<ElemType>(m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == ElementTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new ElementTimesNode<ElemType>(m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == RowElementTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new RowElementTimesNode<ElemType>(m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == ColumnElementTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new ColumnElementTimesNode<ElemType>(m_deviceId, nodeName);
|
||||
}
|
||||
else if (nodeType == DiagTimesNode<ElemType>::TypeName())
|
||||
{
|
||||
newNode = new DiagTimesNode<ElemType>(m_deviceId, nodeName);
|
||||
|
@ -2110,7 +2178,26 @@ public:
|
|||
return newNode;
|
||||
}
|
||||
|
||||
ComputationNodePtr StrideTimes(const ComputationNodePtr a, const ComputationNodePtr b, const ComputationNodePtr c, const std::wstring nodeName = L"")
|
||||
ComputationNodePtr RowElementTimes(const ComputationNodePtr a,
|
||||
const ComputationNodePtr b,
|
||||
const std::wstring nodeName = L"")
|
||||
{
|
||||
ComputationNodePtr newNode(new RowElementTimesNode<ElemType>(m_deviceId, nodeName));
|
||||
newNode->AttachInputs(a, b);
|
||||
AddNodeToNet(newNode);
|
||||
return newNode;
|
||||
}
|
||||
|
||||
ComputationNodePtr ColumnElementTimes(const ComputationNodePtr a,
|
||||
const ComputationNodePtr b,
|
||||
const std::wstring nodeName = L"")
|
||||
{
|
||||
ComputationNodePtr newNode(new ColumnElementTimesNode<ElemType>(m_deviceId, nodeName));
|
||||
newNode->AttachInputs(a, b);
|
||||
AddNodeToNet(newNode);
|
||||
return newNode;
|
||||
}
|
||||
ComputationNodePtr StrideTimes(const ComputationNodePtr a, const ComputationNodePtr b, const ComputationNodePtr c, const std::wstring nodeName = L"")
|
||||
{
|
||||
ComputationNodePtr newNode(new StrideTimesNode<ElemType>(m_deviceId, nodeName));
|
||||
newNode->AttachInputs(a, b, c);
|
||||
|
|
|
@ -306,6 +306,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
return processedExistsNoLabelorFeatureMissing;
|
||||
}
|
||||
|
||||
/*
|
||||
virtual size_t GetNumSamplesWithLabel(const size_t numAllSamples)
|
||||
{
|
||||
if (m_sentenceSeg != nullptr &&
|
||||
|
@ -344,6 +345,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
return numAllSamples;
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
void SetLoopId(const int id)
|
||||
{
|
||||
|
|
|
@ -1269,18 +1269,8 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
// inputIndex == 1 (right) - inputGradientValues[1], inputFunctionValues[0]
|
||||
static void WINAPI ComputeInputPartialS(Matrix<ElemType>& inputFunctionValues, Matrix<ElemType>& inputGradientValues, const Matrix<ElemType>& gradientValues)
|
||||
{
|
||||
size_t gradCol = gradientValues.GetNumCols();
|
||||
size_t inputCol = inputFunctionValues.GetNumCols();
|
||||
inputGradientValues.AddElementProductOf(gradientValues, inputFunctionValues);
|
||||
|
||||
if (gradCol != inputCol && inputCol == 1)
|
||||
{
|
||||
inputGradientValues.SetValue(gradientValues);
|
||||
inputGradientValues.ColumnElementMultiplyWith(inputFunctionValues);
|
||||
}
|
||||
else
|
||||
{
|
||||
inputGradientValues.AddElementProductOf(gradientValues, inputFunctionValues);
|
||||
}
|
||||
#if NANCHECK
|
||||
inputGradientValues.HasNan("ElementTimes");
|
||||
#endif
|
||||
|
@ -1303,30 +1293,8 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
static void WINAPI EvaluateThisNodeS(Matrix<ElemType>& functionValues, const Matrix<ElemType>& input0, const Matrix<ElemType>& input1)
|
||||
{
|
||||
size_t rows0 = input0.GetNumRows(), cols0 = input0.GetNumCols();
|
||||
size_t rows1 = input1.GetNumRows(), cols1 = input1.GetNumCols();
|
||||
if (rows0 == rows1 && cols0 == cols1)
|
||||
{
|
||||
functionValues.AssignElementProductOf(input0, input1);
|
||||
}
|
||||
else if ((cols0 == 1 || cols1 == 1) && rows1 == rows0) // col vec with matching rows
|
||||
{
|
||||
Matrix<ElemType> tmpMat;
|
||||
if (cols0 == 1)
|
||||
{
|
||||
functionValues.SetValue(input1);
|
||||
functionValues.ColumnElementMultiplyWith(input0);
|
||||
}
|
||||
else if (cols1 == 1)
|
||||
{
|
||||
functionValues.SetValue(input0);
|
||||
functionValues.ColumnElementMultiplyWith(input1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw std::logic_error("The Matrix<ElemType> dimension in the ElementTimes operation does not match.");
|
||||
}
|
||||
functionValues.AssignElementProductOf(input0, input1);
|
||||
|
||||
#if NANCHECK
|
||||
functionValues.HasNan("ElementTimes");
|
||||
#endif
|
||||
|
@ -1339,29 +1307,22 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
if (m_children.size() != 2)
|
||||
throw std::logic_error("ElementTimes operation requires two inputs.");
|
||||
|
||||
size_t index = 0;
|
||||
if (Inputs(index)->OperationName() == LearnableParameter<ElemType>::TypeName())
|
||||
//derive number of rows if possible
|
||||
for (size_t index = 0; index < 2; index++)
|
||||
{
|
||||
size_t rows = Inputs(index)->FunctionValues().GetNumRows() == 0? Inputs(1-index)->FunctionValues().GetNumRows() : Inputs(index)->FunctionValues().GetNumRows();
|
||||
size_t cols = Inputs(index)->FunctionValues().GetNumCols() == 0? Inputs(1-index)->FunctionValues().GetNumCols() : Inputs(index)->FunctionValues().GetNumCols();
|
||||
Inputs(index)->FunctionValues().Resize(rows, cols);
|
||||
}
|
||||
|
||||
index = 1;
|
||||
if (Inputs(index)->OperationName() == LearnableParameter<ElemType>::TypeName())
|
||||
{
|
||||
size_t rows = Inputs(index)->FunctionValues().GetNumRows() == 0? Inputs(1-index)->FunctionValues().GetNumRows() : Inputs(index)->FunctionValues().GetNumRows();
|
||||
size_t cols = Inputs(index)->FunctionValues().GetNumCols() == 0? Inputs(1-index)->FunctionValues().GetNumCols() : Inputs(index)->FunctionValues().GetNumCols();
|
||||
Inputs(index)->FunctionValues().Resize(rows, cols);
|
||||
if (Inputs(index)->OperationName() == LearnableParameter<ElemType>::TypeName())
|
||||
{
|
||||
size_t rows = Inputs(index)->FunctionValues().GetNumRows() == 0 ? Inputs(1 - index)->FunctionValues().GetNumRows() : Inputs(index)->FunctionValues().GetNumRows();
|
||||
size_t cols = Inputs(index)->FunctionValues().GetNumCols() == 0 ? Inputs(1 - index)->FunctionValues().GetNumCols() : Inputs(index)->FunctionValues().GetNumCols();
|
||||
Inputs(index)->FunctionValues().Resize(rows, cols);
|
||||
}
|
||||
}
|
||||
|
||||
if (Inputs(0)->FunctionValues().GetNumElements() == 0 || Inputs(1)->FunctionValues().GetNumElements() == 0)
|
||||
throw std::logic_error("ElementTimes operation: one of the operants has 0 element.");
|
||||
|
||||
size_t rows0 = Inputs(0)->FunctionValues().GetNumRows(), cols0 = Inputs(0)->FunctionValues().GetNumCols();
|
||||
size_t rows1 = Inputs(1)->FunctionValues().GetNumRows(), cols1 = Inputs(1)->FunctionValues().GetNumCols();
|
||||
|
||||
if (rows0 != rows1 || (cols0 != cols1 && cols0 != 1 && cols1 != 1))
|
||||
if (Inputs(1)->FunctionValues().GetNumRows() != Inputs(0)->FunctionValues().GetNumRows() ||
|
||||
Inputs(1)->FunctionValues().GetNumCols() != Inputs(0)->FunctionValues().GetNumCols())
|
||||
throw std::logic_error("The Matrix<ElemType> dimension in the ElementTimes operation does not match.");
|
||||
|
||||
FunctionValues().Resize(Inputs(0)->FunctionValues().GetNumRows(), Inputs(0)->FunctionValues().GetNumCols());
|
||||
|
@ -1387,6 +1348,364 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
template class ElementTimesNode<float>;
|
||||
template class ElementTimesNode<double>;
|
||||
|
||||
template<class ElemType>
|
||||
class RowElementTimesNode : public ComputationNode<ElemType>
|
||||
{
|
||||
UsingComputationNodeMembers;
|
||||
public:
|
||||
RowElementTimesNode(const DEVICEID_TYPE deviceId = AUTOPLACEMATRIX, const std::wstring name = L"") : ComputationNode<ElemType>(deviceId), m_tempMatrix(deviceId)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
m_deviceId = deviceId;
|
||||
MoveMatricesToDevice(deviceId);
|
||||
InitRecurrentNode();
|
||||
}
|
||||
|
||||
RowElementTimesNode(File& fstream, const size_t modelVersion, const DEVICEID_TYPE deviceId = AUTOPLACEMATRIX, const std::wstring name = L"") : ComputationNode<ElemType>(deviceId), m_tempMatrix(deviceId)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
LoadFromFile(fstream, modelVersion, deviceId);
|
||||
}
|
||||
|
||||
// copy constructor
|
||||
RowElementTimesNode(const RowElementTimesNode<ElemType>* node, const std::wstring& newName, const CopyNodeFlags flags) : ComputationNode<ElemType>(node->m_deviceId), m_tempMatrix(node->m_deviceId)
|
||||
{
|
||||
node->CopyTo(this, newName, flags);
|
||||
}
|
||||
|
||||
virtual ComputationNodePtr Duplicate(const std::wstring& newName, const CopyNodeFlags flags) const
|
||||
{
|
||||
const std::wstring& name = (newName == L"") ? NodeName() : newName;
|
||||
|
||||
ComputationNodePtr node = new RowElementTimesNode<ElemType>(this, name, flags);
|
||||
return node;
|
||||
}
|
||||
|
||||
virtual const std::wstring OperationName() const { return TypeName(); }
|
||||
static const std::wstring TypeName() { return L"RowElementTimes"; }
|
||||
|
||||
virtual void ComputeInputPartial(const size_t inputIndex)
|
||||
{
|
||||
if (inputIndex > 1)
|
||||
throw std::invalid_argument("RowElementTimes operation only takes two inputs.");
|
||||
|
||||
if (inputIndex == 0)
|
||||
{
|
||||
ComputeInputPartialLeftS(Inputs(1)->FunctionValues(), Inputs(0)->GradientValues(), GradientValues(), m_tempMatrix);
|
||||
}
|
||||
else
|
||||
{
|
||||
ComputeInputPartialRightS(Inputs(0)->FunctionValues(), Inputs(1)->GradientValues(), GradientValues(), m_tempMatrix);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void ComputeInputPartial(const size_t inputIndex, const size_t timeIdxInSeq)
|
||||
{
|
||||
if (inputIndex > 1)
|
||||
throw std::invalid_argument("RowElementTimes operation only takes two inputs.");
|
||||
|
||||
Matrix<ElemType> sliceInput0Grad = Inputs(inputIndex)->GradientValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
Matrix<ElemType> sliceOutputGrad = GradientValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
Matrix<ElemType> sliceInput1Value = Inputs(1 - inputIndex)->FunctionValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
if (inputIndex == 0)
|
||||
{
|
||||
ComputeInputPartialLeftS(sliceInput1Value, sliceInput0Grad, sliceOutputGrad, m_tempMatrix);
|
||||
}
|
||||
else
|
||||
{
|
||||
ComputeInputPartialRightS(sliceInput1Value, sliceInput0Grad, sliceOutputGrad, m_tempMatrix);
|
||||
}
|
||||
}
|
||||
|
||||
//left (input 0) is a matrix
|
||||
static void WINAPI ComputeInputPartialLeftS(Matrix<ElemType>& input1FunctionValues,
|
||||
Matrix<ElemType>& input0GradientValues,
|
||||
const Matrix<ElemType>& gradientValues,
|
||||
Matrix<ElemType>& tempMatrix)
|
||||
{
|
||||
tempMatrix.SetValue(gradientValues);
|
||||
tempMatrix.RowElementMultiplyWith(input1FunctionValues);
|
||||
input0GradientValues += tempMatrix;
|
||||
|
||||
#if NANCHECK
|
||||
input0GradientValues.HasNan("RowElementTimes");
|
||||
#endif
|
||||
}
|
||||
|
||||
//right (input 1) is a row vector
|
||||
static void WINAPI ComputeInputPartialRightS(Matrix<ElemType>& input0FunctionValues,
|
||||
Matrix<ElemType>& input1GradientValues,
|
||||
const Matrix<ElemType>& gradientValues,
|
||||
Matrix<ElemType>& tempMatrix)
|
||||
{
|
||||
tempMatrix.AssignInnerProductOf(gradientValues, input0FunctionValues, true);
|
||||
input1GradientValues += tempMatrix;
|
||||
|
||||
#if NANCHECK
|
||||
input1GradientValues.HasNan("RowElementTimes");
|
||||
#endif
|
||||
}
|
||||
virtual void EvaluateThisNode()
|
||||
{
|
||||
EvaluateThisNodeS(FunctionValues(), Inputs(0)->FunctionValues(), Inputs(1)->FunctionValues());
|
||||
}
|
||||
|
||||
virtual void EvaluateThisNode(const size_t timeIdxInSeq)
|
||||
{
|
||||
Matrix<ElemType> sliceInput0Value = Inputs(0)->FunctionValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
Matrix<ElemType> sliceInput1Value = Inputs(1)->FunctionValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
Matrix<ElemType> sliceOutputValue = m_functionValues.ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
EvaluateThisNodeS(sliceOutputValue, sliceInput0Value, sliceInput1Value);
|
||||
}
|
||||
|
||||
static void WINAPI EvaluateThisNodeS(Matrix<ElemType>& functionValues, const Matrix<ElemType>& input0, const Matrix<ElemType>& input1)
|
||||
{
|
||||
functionValues.SetValue(input0);
|
||||
functionValues.RowElementMultiplyWith(input1);
|
||||
|
||||
#if NANCHECK
|
||||
functionValues.HasNan("RowElementTimes");
|
||||
#endif
|
||||
}
|
||||
|
||||
virtual void Validate()
|
||||
{
|
||||
PrintSelfBeforeValidation();
|
||||
|
||||
if (m_children.size() != 2)
|
||||
throw std::logic_error("RowElementTimes operation requires two inputs.");
|
||||
|
||||
if (Inputs(0)->FunctionValues().GetNumElements() == 0 || Inputs(1)->FunctionValues().GetNumElements() == 0)
|
||||
throw std::logic_error("RowElementTimes operation: one of the operants has 0 element.");
|
||||
|
||||
size_t rows0 = Inputs(0)->FunctionValues().GetNumRows(), cols0 = Inputs(0)->FunctionValues().GetNumCols();
|
||||
size_t rows1 = Inputs(1)->FunctionValues().GetNumRows(), cols1 = Inputs(1)->FunctionValues().GetNumCols();
|
||||
|
||||
if (cols0 != cols1 || rows1 != 1)
|
||||
throw std::logic_error("RowElementTimes: Either the second operand is not a row vector or the number of columns of operands does not match.");
|
||||
|
||||
FunctionValues().Resize(rows0, cols0);
|
||||
InferImageDimsFromInputs();
|
||||
}
|
||||
|
||||
virtual void InferImageDimsFromInputs()
|
||||
{
|
||||
//input 0 is the matrix and input 1 is a row vector
|
||||
InferImageDimsFromInput(0);
|
||||
}
|
||||
|
||||
virtual void AttachInputs(const ComputationNodePtr leftNode, const ComputationNodePtr rightNode)
|
||||
{
|
||||
m_children.resize(2);
|
||||
m_children[0] = leftNode;
|
||||
m_children[1] = rightNode;
|
||||
}
|
||||
|
||||
virtual void MoveMatricesToDevice(const DEVICEID_TYPE deviceId)
|
||||
{
|
||||
ComputationNode<ElemType>::MoveMatricesToDevice(deviceId);
|
||||
|
||||
if (deviceId != AUTOPLACEMATRIX)
|
||||
{
|
||||
if (m_tempMatrix.GetDeviceId() != deviceId)
|
||||
m_tempMatrix.TransferFromDeviceToDevice(m_tempMatrix.GetDeviceId(), deviceId);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
Matrix<ElemType> m_tempMatrix;
|
||||
};
|
||||
|
||||
template class RowElementTimesNode<float>;
|
||||
template class RowElementTimesNode<double>;
|
||||
|
||||
template<class ElemType>
|
||||
class ColumnElementTimesNode : public ComputationNode<ElemType>
|
||||
{
|
||||
UsingComputationNodeMembers;
|
||||
public:
|
||||
ColumnElementTimesNode(const DEVICEID_TYPE deviceId = AUTOPLACEMATRIX, const std::wstring name = L"") : ComputationNode<ElemType>(deviceId), m_tempMatrix(deviceId)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
m_deviceId = deviceId;
|
||||
MoveMatricesToDevice(deviceId);
|
||||
InitRecurrentNode();
|
||||
}
|
||||
|
||||
ColumnElementTimesNode(File& fstream, const size_t modelVersion, const DEVICEID_TYPE deviceId = AUTOPLACEMATRIX, const std::wstring name = L"") : ComputationNode<ElemType>(deviceId), m_tempMatrix(deviceId)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
LoadFromFile(fstream, modelVersion, deviceId);
|
||||
}
|
||||
|
||||
// copy constructor
|
||||
ColumnElementTimesNode(const ColumnElementTimesNode<ElemType>* node, const std::wstring& newName, const CopyNodeFlags flags) : ComputationNode<ElemType>(node->m_deviceId), m_tempMatrix(node->m_deviceId)
|
||||
{
|
||||
node->CopyTo(this, newName, flags);
|
||||
}
|
||||
|
||||
virtual ComputationNodePtr Duplicate(const std::wstring& newName, const CopyNodeFlags flags) const
|
||||
{
|
||||
const std::wstring& name = (newName == L"") ? NodeName() : newName;
|
||||
|
||||
ComputationNodePtr node = new ColumnElementTimesNode<ElemType>(this, name, flags);
|
||||
return node;
|
||||
}
|
||||
|
||||
virtual const std::wstring OperationName() const { return TypeName(); }
|
||||
static const std::wstring TypeName() { return L"ColumnElementTimes"; }
|
||||
|
||||
virtual void ComputeInputPartial(const size_t inputIndex)
|
||||
{
|
||||
if (inputIndex > 1)
|
||||
throw std::invalid_argument("ColumnElementTimes operation only takes two inputs.");
|
||||
|
||||
if (inputIndex == 0)
|
||||
{
|
||||
ComputeInputPartialLeftS(Inputs(1)->FunctionValues(), Inputs(0)->GradientValues(), GradientValues(), m_tempMatrix);
|
||||
}
|
||||
else
|
||||
{
|
||||
ComputeInputPartialRightS(Inputs(0)->FunctionValues(), Inputs(1)->GradientValues(), GradientValues(), m_tempMatrix);
|
||||
}
|
||||
}
|
||||
|
||||
virtual void ComputeInputPartial(const size_t inputIndex, const size_t timeIdxInSeq)
|
||||
{
|
||||
if (inputIndex > 1)
|
||||
throw std::invalid_argument("ColumnElementTimes operation only takes two inputs.");
|
||||
|
||||
Matrix<ElemType> sliceOutputGrad = GradientValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
if (inputIndex == 0)
|
||||
{
|
||||
Matrix<ElemType> sliceInput0Grad = Inputs(0)->GradientValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
ComputeInputPartialLeftS(Inputs(1)->FunctionValues(), sliceInput0Grad, sliceOutputGrad, m_tempMatrix);
|
||||
}
|
||||
else
|
||||
{
|
||||
Matrix<ElemType> sliceInput0Value = Inputs(0)->FunctionValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
ComputeInputPartialRightS(sliceInput0Value, Inputs(1)->GradientValues(), sliceOutputGrad, m_tempMatrix);
|
||||
}
|
||||
}
|
||||
|
||||
//left (input 0) is a matrix
|
||||
static void WINAPI ComputeInputPartialLeftS(Matrix<ElemType>& input1FunctionValues,
|
||||
Matrix<ElemType>& input0GradientValues,
|
||||
const Matrix<ElemType>& gradientValues,
|
||||
Matrix<ElemType>& tempMatrix)
|
||||
{
|
||||
tempMatrix.SetValue(gradientValues);
|
||||
tempMatrix.ColumnElementMultiplyWith(input1FunctionValues);
|
||||
input0GradientValues += tempMatrix;
|
||||
|
||||
#if NANCHECK
|
||||
input0GradientValues.HasNan("ColumnElementTimes");
|
||||
#endif
|
||||
}
|
||||
|
||||
//right (input 1) is a col vector
|
||||
static void WINAPI ComputeInputPartialRightS(Matrix<ElemType>& input0FunctionValues,
|
||||
Matrix<ElemType>& input1GradientValues,
|
||||
const Matrix<ElemType>& gradientValues,
|
||||
Matrix<ElemType>& tempMatrix)
|
||||
{
|
||||
tempMatrix.AssignInnerProductOf(gradientValues, input0FunctionValues, false);
|
||||
input1GradientValues += tempMatrix;
|
||||
|
||||
#if NANCHECK
|
||||
input1GradientValues.HasNan("ColumnElementTimes");
|
||||
#endif
|
||||
}
|
||||
virtual void EvaluateThisNode()
|
||||
{
|
||||
EvaluateThisNodeS(FunctionValues(), Inputs(0)->FunctionValues(), Inputs(1)->FunctionValues());
|
||||
}
|
||||
|
||||
virtual void EvaluateThisNode(const size_t timeIdxInSeq)
|
||||
{
|
||||
Matrix<ElemType> sliceInput0Value = Inputs(0)->FunctionValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
Matrix<ElemType> sliceOutputValue = m_functionValues.ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
EvaluateThisNodeS(sliceOutputValue, sliceInput0Value, Inputs(1)->FunctionValues());
|
||||
}
|
||||
|
||||
static void WINAPI EvaluateThisNodeS(Matrix<ElemType>& functionValues, const Matrix<ElemType>& input0, const Matrix<ElemType>& input1)
|
||||
{
|
||||
functionValues.SetValue(input0);
|
||||
functionValues.ColumnElementMultiplyWith(input1);
|
||||
|
||||
#if NANCHECK
|
||||
functionValues.HasNan("ColumnElementTimes");
|
||||
#endif
|
||||
}
|
||||
|
||||
virtual void Validate()
|
||||
{
|
||||
PrintSelfBeforeValidation();
|
||||
|
||||
if (m_children.size() != 2)
|
||||
throw std::logic_error("ColumnElementTimes operation requires two inputs.");
|
||||
|
||||
//derive number of rows if possible
|
||||
for (size_t index = 0; index < 2; index++)
|
||||
{
|
||||
if (Inputs(index)->OperationName() == LearnableParameter<ElemType>::TypeName())
|
||||
{
|
||||
size_t rows = Inputs(index)->FunctionValues().GetNumRows() == 0 ? Inputs(1 - index)->FunctionValues().GetNumRows() : Inputs(index)->FunctionValues().GetNumRows();
|
||||
size_t cols = Inputs(index)->FunctionValues().GetNumCols() == 0 ? Inputs(1 - index)->FunctionValues().GetNumCols() : Inputs(index)->FunctionValues().GetNumCols();
|
||||
Inputs(index)->FunctionValues().Resize(rows, cols);
|
||||
}
|
||||
}
|
||||
|
||||
if (Inputs(0)->FunctionValues().GetNumElements() == 0 || Inputs(1)->FunctionValues().GetNumElements() == 0)
|
||||
throw std::logic_error("ColumnElementTimes operation: one of the operants has 0 element.");
|
||||
|
||||
size_t rows0 = Inputs(0)->FunctionValues().GetNumRows(), cols0 = Inputs(0)->FunctionValues().GetNumCols();
|
||||
size_t rows1 = Inputs(1)->FunctionValues().GetNumRows(), cols1 = Inputs(1)->FunctionValues().GetNumCols();
|
||||
|
||||
if (rows0 != rows1 || cols1 != 1)
|
||||
throw std::logic_error("ColumnElementTimes: Either the second operand is not a column vector or the number of rows of operands does not match.");
|
||||
|
||||
FunctionValues().Resize(rows0, cols0);
|
||||
InferImageDimsFromInputs();
|
||||
}
|
||||
|
||||
virtual void InferImageDimsFromInputs()
|
||||
{
|
||||
//input 0 is the matrix and input 1 is a column vector
|
||||
InferImageDimsFromInput(0);
|
||||
}
|
||||
|
||||
virtual void AttachInputs(const ComputationNodePtr leftNode, const ComputationNodePtr rightNode)
|
||||
{
|
||||
m_children.resize(2);
|
||||
m_children[0] = leftNode;
|
||||
m_children[1] = rightNode;
|
||||
}
|
||||
|
||||
virtual void MoveMatricesToDevice(const DEVICEID_TYPE deviceId)
|
||||
{
|
||||
ComputationNode<ElemType>::MoveMatricesToDevice(deviceId);
|
||||
|
||||
if (deviceId != AUTOPLACEMATRIX)
|
||||
{
|
||||
if (m_tempMatrix.GetDeviceId() != deviceId)
|
||||
m_tempMatrix.TransferFromDeviceToDevice(m_tempMatrix.GetDeviceId(), deviceId);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
Matrix<ElemType> m_tempMatrix;
|
||||
};
|
||||
|
||||
template class ColumnElementTimesNode<float>;
|
||||
template class ColumnElementTimesNode<double>;
|
||||
|
||||
template<class ElemType>
|
||||
class PlusNode : public ComputationNode<ElemType>
|
||||
{
|
||||
|
@ -3021,14 +3340,14 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
UsingComputationNodeMembers;
|
||||
|
||||
size_t mStrideDim; /// the dimension index on which stride works
|
||||
size_t mStride; /// the stride
|
||||
size_t m_StrideDim; /// the dimension index on which stride works
|
||||
size_t m_Stride; /// the stride
|
||||
|
||||
private:
|
||||
|
||||
void UpdateStride(const Matrix<ElemType>& input1)
|
||||
{
|
||||
mStride = input1.GetNumCols();
|
||||
m_Stride = input1.GetNumCols();
|
||||
}
|
||||
|
||||
public:
|
||||
|
@ -3037,21 +3356,21 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
m_deviceId = deviceId;
|
||||
MoveMatricesToDevice(deviceId);
|
||||
mStride = 1;
|
||||
m_Stride = 1;
|
||||
InitRecurrentNode();
|
||||
}
|
||||
|
||||
StrideTimesNode(File& fstream, const size_t modelVersion, const DEVICEID_TYPE deviceId = AUTOPLACEMATRIX, const std::wstring name = L"") : ComputationNode<ElemType>(deviceId)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
mStride = 1;
|
||||
m_Stride = 1;
|
||||
LoadFromFile(fstream, modelVersion, deviceId);
|
||||
}
|
||||
|
||||
// copy constructor
|
||||
StrideTimesNode(const StrideTimesNode<ElemType>* node, const std::wstring& newName, const CopyNodeFlags flags) : ComputationNode<ElemType>(node->m_deviceId)
|
||||
{
|
||||
mStride = 1;
|
||||
m_Stride = 1;
|
||||
node->CopyTo(this, newName, flags);
|
||||
}
|
||||
|
||||
|
@ -3078,7 +3397,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
Matrix<ElemType> sliceOutputGrad = GradientValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
if (mStrideDim == 1) /// column stride
|
||||
if (m_StrideDim == 1) /// column stride
|
||||
{
|
||||
if (inputIndex == 0) //left derivative
|
||||
{
|
||||
|
@ -3133,7 +3452,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
}
|
||||
}
|
||||
}
|
||||
else if (mStrideDim == 0) /// row stride
|
||||
else if (m_StrideDim == 0) /// row stride
|
||||
{
|
||||
if (inputIndex == 0) //left derivative
|
||||
{
|
||||
|
@ -3226,12 +3545,12 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
{
|
||||
size_t rows0 = Inputs(0)->FunctionValues().GetNumRows(), cols1 = Inputs(1)->FunctionValues().GetNumCols();
|
||||
UpdateStride(Inputs(1)->FunctionValues());
|
||||
if (mStrideDim == 0)
|
||||
if (m_StrideDim == 0)
|
||||
FunctionValues().Resize(rows0 / m_samplesInRecurrentStep, cols1);
|
||||
if (mStrideDim == 1)
|
||||
if (m_StrideDim == 1)
|
||||
FunctionValues().Resize(rows0, cols1);
|
||||
|
||||
EvaluateThisNodeS(FunctionValues(), Inputs(0)->FunctionValues(), Inputs(1)->FunctionValues(), mStride, mStrideDim);
|
||||
EvaluateThisNodeS(FunctionValues(), Inputs(0)->FunctionValues(), Inputs(1)->FunctionValues(), m_Stride, m_StrideDim);
|
||||
#ifdef DEBUG_DECODER
|
||||
fprintf(stderr, "Times node %ls output norm = %.8e, input(0) norm = %.8e, input(1) norm = %.8e\n", this->NodeName().c_str(), FunctionValues().FrobeniusNorm(),
|
||||
Inputs(0)->FunctionValues().FrobeniusNorm(), Inputs(1)->FunctionValues().FrobeniusNorm());
|
||||
|
@ -3244,13 +3563,13 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
Matrix<ElemType> sliceInput1Value = Inputs(1)->FunctionValues().ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
UpdateStride(sliceInput1Value);
|
||||
if (mStrideDim == 0)
|
||||
if (m_StrideDim == 0)
|
||||
FunctionValues().Resize(rows0 / m_samplesInRecurrentStep, cols1);
|
||||
if (mStrideDim == 1)
|
||||
if (m_StrideDim == 1)
|
||||
FunctionValues().Resize(rows0, cols1);
|
||||
Matrix<ElemType> sliceOutputValue = m_functionValues.ColumnSlice(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
|
||||
|
||||
EvaluateThisNodeS(sliceOutputValue, Inputs(0)->FunctionValues(), sliceInput1Value, mStride, mStrideDim);
|
||||
EvaluateThisNodeS(sliceOutputValue, Inputs(0)->FunctionValues(), sliceInput1Value, m_Stride, m_StrideDim);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -3344,30 +3663,30 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
if (Inputs(2)->FunctionValues().GetNumElements() != 1)
|
||||
LogicError("StrideTimes : input(2) should be a single element matrix");
|
||||
|
||||
mStrideDim = (size_t) Inputs(2)->FunctionValues().Get00Element();
|
||||
m_StrideDim = (size_t) Inputs(2)->FunctionValues().Get00Element();
|
||||
size_t rows0 = Inputs(0)->FunctionValues().GetNumRows(), cols0 = Inputs(0)->FunctionValues().GetNumCols();
|
||||
size_t rows1 = Inputs(1)->FunctionValues().GetNumRows(), cols1 = Inputs(1)->FunctionValues().GetNumCols();
|
||||
|
||||
if (mStrideDim != 0 && mStrideDim != 1)
|
||||
if (m_StrideDim != 0 && m_StrideDim != 1)
|
||||
LogicError("StrideTimes : stride dim must be either 0 (row) or 1 (column)");
|
||||
|
||||
if (Inputs(2)->NeedGradient())
|
||||
LogicError("StrideTImes : no gradient update should be on input(2)");
|
||||
|
||||
//cols0 and rows1 may have been changed so don't use them in the following check
|
||||
if (mStrideDim == 0)
|
||||
if (m_StrideDim == 0)
|
||||
{
|
||||
if (rows1 != cols0)
|
||||
LogicError("The Matrix dimension in the StrideTimes operation in dim %d does not match for cols %d in A and rows %d in B.", mStrideDim, cols0, rows1);
|
||||
size_t T1 = rows0 / mStride;
|
||||
LogicError("The Matrix dimension in the StrideTimes operation in dim %d does not match for cols %d in A and rows %d in B.", m_StrideDim, cols0, rows1);
|
||||
size_t T1 = rows0 / m_Stride;
|
||||
FunctionValues().Resize(T1, cols1);
|
||||
}
|
||||
|
||||
//cols0 and rows1 may have been changed so don't use them in the following check
|
||||
if (mStrideDim == 1)
|
||||
if (m_StrideDim == 1)
|
||||
{
|
||||
if (cols0/mStride != rows1)
|
||||
LogicError("The Matrix dimension in the StrideTimes operation in dim %d does not match for cols %d in A and row number %d in B.", mStrideDim, cols0, rows1);
|
||||
if (cols0/m_Stride != rows1)
|
||||
LogicError("The Matrix dimension in the StrideTimes operation in dim %d does not match for cols %d in A and row number %d in B.", m_StrideDim, cols0, rows1);
|
||||
FunctionValues().Resize(rows0, cols1);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,284 +1,290 @@
|
|||
//
|
||||
// <copyright file="NetworkDescriptionLanguage.cpp" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// NetworkDescriptionLanguage.cpp : Code used to interpret the Network Description Language.
|
||||
//
|
||||
|
||||
#define _CRT_SECURE_NO_WARNINGS // "secure" CRT not available on all platforms --add this at the top of all CPP files that give "function or variable may be unsafe" warnings
|
||||
|
||||
#include "NetworkDescriptionLanguage.h"
|
||||
#include "SynchronousExecutionEngine.h"
|
||||
|
||||
namespace Microsoft { namespace MSR { namespace CNTK {
|
||||
|
||||
// DuplicateNode - Duplicate a node in a macro as needed (it might already exist)
|
||||
// node - node we are duplicating
|
||||
// return - the new duplicated node if it didn't exist, or the previously duplicated node if it already did
|
||||
template <typename ElemType>
|
||||
NDLNode<ElemType>* NDLScript<ElemType>::DuplicateNode(NDLNode<ElemType>* node)
|
||||
{
|
||||
NDLNode<ElemType>* newNode = node->Copy();
|
||||
m_children.push_back(newNode);
|
||||
newNode->SetParentScript(this);
|
||||
return newNode;
|
||||
}
|
||||
|
||||
template <typename ElemType>
|
||||
NDLScript<ElemType>::NDLScript(const NDLScript& copyMe) : ConfigParser(copyMe)
|
||||
{
|
||||
m_baseName = copyMe.m_baseName;
|
||||
m_scriptString = copyMe.m_scriptString;
|
||||
m_macroNode = copyMe.m_macroNode;
|
||||
m_noDefinitions = copyMe.m_noDefinitions; // no definitions can be made in this script, interpret all macro/function names as calls
|
||||
m_definingMacro = false; // not defining when expanding macros (only reason to call this method
|
||||
m_cn = copyMe.m_cn; // computation network to use for backup symbol lookup. Used for MEL where NDL and network nodes are mixed
|
||||
|
||||
// script lines in parsed node order
|
||||
for (NDLNode<ElemType>* node : copyMe.m_script)
|
||||
{
|
||||
// duplicate this node
|
||||
NDLNode<ElemType>* newNode = DuplicateNode(node);
|
||||
AddSymbol(newNode->GetName(), newNode);
|
||||
|
||||
// now get the parameters to the functions added
|
||||
ConfigValue value = newNode->GetParamString();
|
||||
ParseParameters(newNode, value, true /*createNew*/);
|
||||
|
||||
// add it to the new script
|
||||
m_script.push_back(newNode);
|
||||
}
|
||||
|
||||
// now search the symbol table for other symbols that haven't been copied yet
|
||||
// this happens for constants defined in macros and such
|
||||
for (std::pair<std::string, NDLNode<ElemType>*> pair : copyMe.m_symbols)
|
||||
{
|
||||
// if we can't find the symbol in the copied symbol table, copy it here
|
||||
if (m_symbols.find(pair.first) == end(m_symbols))
|
||||
{
|
||||
// duplicate this node
|
||||
NDLNode<ElemType>* newNode = DuplicateNode(pair.second);
|
||||
AddSymbol(pair.first, newNode);
|
||||
// anything that takes parameters should be evaluated in the script loop
|
||||
assert(newNode->GetParamString().empty());
|
||||
}
|
||||
}
|
||||
// NOTE: the child nodes get populated as the nodes are duplicated in the loop above
|
||||
// we shouldn't try to duplicate them separately
|
||||
}
|
||||
|
||||
// copy constructor, creates a new disconnected copy of this node
|
||||
// doesn't copy everything, so use for macro expansion only (it's private)
|
||||
// copyMe - node to copy
|
||||
template <typename ElemType>
|
||||
NDLNode<ElemType>::NDLNode(const NDLNode<ElemType>& copyMe)
|
||||
{
|
||||
m_name = copyMe.m_name; // value on the left of the equals
|
||||
m_value = copyMe.m_value; // value on the right of the equals (CN node name, or value)
|
||||
m_parent = copyMe.m_parent; // parent script
|
||||
m_type = copyMe.m_type; //type of node
|
||||
m_paramString = copyMe.m_paramString; // parameter of a function/array
|
||||
m_paramMacro = copyMe.m_paramMacro; // parameter of a macro (the variables used in the macro definition)
|
||||
// don't copy over the parameters, they will be reparsed after the copy
|
||||
//m_parameters = copyMe.m_parameters; // copy over the parameters straight
|
||||
|
||||
m_eval = nullptr; // pointer to an arbitrary eval structure
|
||||
// script for macro calls, need to expand the macro for each call
|
||||
// if it's not expanded the evalValue will be overwitten on multiple calls to a macro
|
||||
m_script = (copyMe.m_script) ? new NDLScript<ElemType>(*copyMe.m_script) : nullptr;
|
||||
}
|
||||
template <typename ElemType>
|
||||
NDLScript<ElemType>::NDLScript(const NDLScript&& moveMe) : ConfigParser(move(moveMe))
|
||||
{
|
||||
m_baseName = move(moveMe.m_baseName);
|
||||
m_scriptString = move(moveMe.m_scriptString);
|
||||
m_script = move(moveMe.m_script); // script lines in parsed node order, macros will have definition followed by body
|
||||
m_symbols = move(moveMe.m_symbols); // symbol table
|
||||
m_macroNode = move(moveMe.m_macroNode); // set when interpretting a macro definition
|
||||
m_noDefinitions = move(moveMe.m_noDefinitions); // no definitions can be made in this script, interpret all macro/function names as calls
|
||||
m_definingMacro = move(moveMe.m_definingMacro);
|
||||
m_children = move(moveMe.m_children); // child nodes. Note that m_script nodes may not be children of this object, they include macro nodes
|
||||
m_cn = move(moveMe.m_cn); // computation network to use for backup symbol lookup. Used for MEL where NDL and network nodes are mixed
|
||||
}
|
||||
|
||||
// EqualInsensitive - check to see if two nodes are equal
|
||||
// string1 - [in,out] string to compare, if comparision is equal insensitive but not sensitive, will replace with sensitive version
|
||||
// string2 - second string to compare
|
||||
// alternate - alternate naming of the string
|
||||
// return - true if strings are equal insensitive and modifies string1 to sensitive version if different
|
||||
bool EqualInsensitive(std::wstring& string1, const std::wstring& string2, const wchar_t* alternate/*=NULL*/)
|
||||
{
|
||||
bool equal = !_wcsnicmp(string1.c_str(), string2.c_str(), string1.size()) && string1.size()==string2.size();
|
||||
|
||||
if (!equal && alternate != NULL)
|
||||
equal = !_wcsnicmp(string1.c_str(), alternate, string1.size()) && string1.size()==wcslen(alternate);
|
||||
|
||||
if (equal)
|
||||
string1 = string2;
|
||||
|
||||
return equal;
|
||||
}
|
||||
|
||||
// ++ operator for this enum, so loops work
|
||||
NDLPass &operator++(NDLPass &ndlPass) {
|
||||
assert(ndlPass != ndlPassMax);
|
||||
ndlPass = static_cast<NDLPass>(ndlPass + 1);
|
||||
return ndlPass;
|
||||
}
|
||||
|
||||
// CheckFunction - check to see if we match a function name
|
||||
// string1 - [in,out] string to compare, if comparision is equal and at least half the full node name will replace with full node name
|
||||
// allowUndeterminedVariable - [out] set to true if undetermined variables (symbols yet to be defined) are allowed here
|
||||
// return - true if function name found
|
||||
template <typename ElemType>
|
||||
bool CheckFunction(std::string& p_nodeType, bool* allowUndeterminedVariable)
|
||||
{
|
||||
std::wstring nodeType = msra::strfun::utf16(p_nodeType);
|
||||
bool ret = false;
|
||||
if (allowUndeterminedVariable)
|
||||
*allowUndeterminedVariable = true; // be default we allow undetermined variables
|
||||
if (EqualInsensitive(nodeType, InputValue<ElemType>::TypeName(), L"Input"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, InputValue<ElemType>::SparseTypeName(), L"SparseInput"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LearnableParameter<ElemType>::TypeName(), L"Parameter"))
|
||||
ret = true;
|
||||
//else if (EqualInsensitive(nodeType, SparseLearnableParameter<ElemType>::TypeName(), L"SparseParameter"))
|
||||
// ret = true;
|
||||
else if (EqualInsensitive(nodeType, L"Constant", L"Const"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, L"ImageInput", L"Image"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, L"SparseImageInput", L"SparseImage"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SumElementsNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SumColumnElementsNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ScaleNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TransposeNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TransposeTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ElementTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, DiagTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CosDistanceNode<ElemType>::TypeName(), L"CosDist"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, KhatriRaoProductNode<ElemType>::TypeName(), L"ColumnwiseCrossProduct"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PlusNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MinusNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, NegateNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RectifiedLinearNode<ElemType>::TypeName(), L"ReLU"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SigmoidNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TanhNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ExpNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LogNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CosineNode<ElemType>::TypeName(), L"Cos"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SoftmaxNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LogSoftmaxNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SquareErrorNode<ElemType>::TypeName(), L"SE"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CrossEntropyWithSoftmaxNode<ElemType>::TypeName(), L"CEWithSM"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CrossEntropyNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ClassBasedCrossEntropyWithSoftmaxNode<ElemType>::TypeName(), L"CBCEWithSM"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MatrixL1RegNode<ElemType>::TypeName(), L"L1Reg"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MatrixL2RegNode<ElemType>::TypeName(), L"L2Reg"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PerDimMeanVarNormalizationNode<ElemType>::TypeName(),L"PerDimMVNorm"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PerDimMeanVarDeNormalizationNode<ElemType>::TypeName(),L"PerDimMVDeNorm"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ErrorPredictionNode<ElemType>::TypeName(), L"ClassificationError"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, DropoutNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ReshapeNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RowRepeatNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MeanNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, InvStdDevNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ConvolutionNode<ElemType>::TypeName(), L"Convolve"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MaxPoolingNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, AveragePoolingNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PastValueNode<ElemType>::TypeName(), L"Delay"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, FutureValueNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RowSliceNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RowStackNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LookupTableNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, GMMLogLikelihoodNode<ElemType>::TypeName(), L"GMMLL"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CosDistanceWithNegativeSamplesNode<ElemType>::TypeName(), L"CosWithNegSamples"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TimeReverseNode<ElemType>::TypeName(), L"TimeReverse"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CRFNode<ElemType>::TypeName(), L"CRF"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, DummyCriterionNode<ElemType>::TypeName(), L"DummyCriterion"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ParallelNode<ElemType>::TypeName(), L"Parallel"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LSTMNode<ElemType>::TypeName(), L"LSTM"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PairNetworkNode<ElemType>::TypeName(), L"PairNetwork"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, StrideTimesNode<ElemType>::TypeName(), L"StrideTimes"))
|
||||
ret = true;
|
||||
|
||||
// return the actual node name in the parameter if we found something
|
||||
if (ret)
|
||||
{
|
||||
p_nodeType = msra::strfun::utf8(nodeType);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
template <typename ElemType>
|
||||
NDLScript<ElemType> NDLScript<ElemType>::s_global("global");
|
||||
|
||||
// declare the static variables from the classes
|
||||
template<> NDLScript<float> NDLScript<float>::s_global{};
|
||||
template<> NDLScript<double> NDLScript<double>::s_global{};
|
||||
|
||||
template<> int NDLNode<float>::s_nameCounter = 0;
|
||||
template<> int NDLNode<double>::s_nameCounter = 0;
|
||||
|
||||
template class NDLNode<float>;
|
||||
template class NDLNode<double>;
|
||||
|
||||
template class NDLScript<float>;
|
||||
template class NDLScript<double>;
|
||||
|
||||
}}}
|
||||
//
|
||||
// <copyright file="NetworkDescriptionLanguage.cpp" company="Microsoft">
|
||||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// </copyright>
|
||||
//
|
||||
// NetworkDescriptionLanguage.cpp : Code used to interpret the Network Description Language.
|
||||
//
|
||||
|
||||
#define _CRT_SECURE_NO_WARNINGS // "secure" CRT not available on all platforms --add this at the top of all CPP files that give "function or variable may be unsafe" warnings
|
||||
|
||||
#include "NetworkDescriptionLanguage.h"
|
||||
#include "SynchronousExecutionEngine.h"
|
||||
|
||||
namespace Microsoft { namespace MSR { namespace CNTK {
|
||||
|
||||
// DuplicateNode - Duplicate a node in a macro as needed (it might already exist)
|
||||
// node - node we are duplicating
|
||||
// return - the new duplicated node if it didn't exist, or the previously duplicated node if it already did
|
||||
template <typename ElemType>
|
||||
NDLNode<ElemType>* NDLScript<ElemType>::DuplicateNode(NDLNode<ElemType>* node)
|
||||
{
|
||||
NDLNode<ElemType>* newNode = node->Copy();
|
||||
m_children.push_back(newNode);
|
||||
newNode->SetParentScript(this);
|
||||
return newNode;
|
||||
}
|
||||
|
||||
template <typename ElemType>
|
||||
NDLScript<ElemType>::NDLScript(const NDLScript& copyMe) : ConfigParser(copyMe)
|
||||
{
|
||||
m_baseName = copyMe.m_baseName;
|
||||
m_scriptString = copyMe.m_scriptString;
|
||||
m_macroNode = copyMe.m_macroNode;
|
||||
m_noDefinitions = copyMe.m_noDefinitions; // no definitions can be made in this script, interpret all macro/function names as calls
|
||||
m_definingMacro = false; // not defining when expanding macros (only reason to call this method
|
||||
m_cn = copyMe.m_cn; // computation network to use for backup symbol lookup. Used for MEL where NDL and network nodes are mixed
|
||||
|
||||
// script lines in parsed node order
|
||||
for (NDLNode<ElemType>* node : copyMe.m_script)
|
||||
{
|
||||
// duplicate this node
|
||||
NDLNode<ElemType>* newNode = DuplicateNode(node);
|
||||
AddSymbol(newNode->GetName(), newNode);
|
||||
|
||||
// now get the parameters to the functions added
|
||||
ConfigValue value = newNode->GetParamString();
|
||||
ParseParameters(newNode, value, true /*createNew*/);
|
||||
|
||||
// add it to the new script
|
||||
m_script.push_back(newNode);
|
||||
}
|
||||
|
||||
// now search the symbol table for other symbols that haven't been copied yet
|
||||
// this happens for constants defined in macros and such
|
||||
for (std::pair<std::string, NDLNode<ElemType>*> pair : copyMe.m_symbols)
|
||||
{
|
||||
// if we can't find the symbol in the copied symbol table, copy it here
|
||||
if (m_symbols.find(pair.first) == end(m_symbols))
|
||||
{
|
||||
// duplicate this node
|
||||
NDLNode<ElemType>* newNode = DuplicateNode(pair.second);
|
||||
AddSymbol(pair.first, newNode);
|
||||
// anything that takes parameters should be evaluated in the script loop
|
||||
assert(newNode->GetParamString().empty());
|
||||
}
|
||||
}
|
||||
// NOTE: the child nodes get populated as the nodes are duplicated in the loop above
|
||||
// we shouldn't try to duplicate them separately
|
||||
}
|
||||
|
||||
// copy constructor, creates a new disconnected copy of this node
|
||||
// doesn't copy everything, so use for macro expansion only (it's private)
|
||||
// copyMe - node to copy
|
||||
template <typename ElemType>
|
||||
NDLNode<ElemType>::NDLNode(const NDLNode<ElemType>& copyMe)
|
||||
{
|
||||
m_name = copyMe.m_name; // value on the left of the equals
|
||||
m_value = copyMe.m_value; // value on the right of the equals (CN node name, or value)
|
||||
m_parent = copyMe.m_parent; // parent script
|
||||
m_type = copyMe.m_type; //type of node
|
||||
m_paramString = copyMe.m_paramString; // parameter of a function/array
|
||||
m_paramMacro = copyMe.m_paramMacro; // parameter of a macro (the variables used in the macro definition)
|
||||
// don't copy over the parameters, they will be reparsed after the copy
|
||||
//m_parameters = copyMe.m_parameters; // copy over the parameters straight
|
||||
|
||||
m_eval = nullptr; // pointer to an arbitrary eval structure
|
||||
// script for macro calls, need to expand the macro for each call
|
||||
// if it's not expanded the evalValue will be overwitten on multiple calls to a macro
|
||||
m_script = (copyMe.m_script) ? new NDLScript<ElemType>(*copyMe.m_script) : nullptr;
|
||||
}
|
||||
template <typename ElemType>
|
||||
NDLScript<ElemType>::NDLScript(const NDLScript&& moveMe) : ConfigParser(move(moveMe))
|
||||
{
|
||||
m_baseName = move(moveMe.m_baseName);
|
||||
m_scriptString = move(moveMe.m_scriptString);
|
||||
m_script = move(moveMe.m_script); // script lines in parsed node order, macros will have definition followed by body
|
||||
m_symbols = move(moveMe.m_symbols); // symbol table
|
||||
m_macroNode = move(moveMe.m_macroNode); // set when interpretting a macro definition
|
||||
m_noDefinitions = move(moveMe.m_noDefinitions); // no definitions can be made in this script, interpret all macro/function names as calls
|
||||
m_definingMacro = move(moveMe.m_definingMacro);
|
||||
m_children = move(moveMe.m_children); // child nodes. Note that m_script nodes may not be children of this object, they include macro nodes
|
||||
m_cn = move(moveMe.m_cn); // computation network to use for backup symbol lookup. Used for MEL where NDL and network nodes are mixed
|
||||
}
|
||||
|
||||
// EqualInsensitive - check to see if two nodes are equal
|
||||
// string1 - [in,out] string to compare, if comparision is equal insensitive but not sensitive, will replace with sensitive version
|
||||
// string2 - second string to compare
|
||||
// alternate - alternate naming of the string
|
||||
// return - true if strings are equal insensitive and modifies string1 to sensitive version if different
|
||||
bool EqualInsensitive(std::wstring& string1, const std::wstring& string2, const wchar_t* alternate/*=NULL*/)
|
||||
{
|
||||
bool equal = !_wcsnicmp(string1.c_str(), string2.c_str(), string1.size()) && string1.size()==string2.size();
|
||||
|
||||
if (!equal && alternate != NULL)
|
||||
equal = !_wcsnicmp(string1.c_str(), alternate, string1.size()) && string1.size()==wcslen(alternate);
|
||||
|
||||
if (equal)
|
||||
string1 = string2;
|
||||
|
||||
return equal;
|
||||
}
|
||||
|
||||
// ++ operator for this enum, so loops work
|
||||
NDLPass &operator++(NDLPass &ndlPass) {
|
||||
assert(ndlPass != ndlPassMax);
|
||||
ndlPass = static_cast<NDLPass>(ndlPass + 1);
|
||||
return ndlPass;
|
||||
}
|
||||
|
||||
// CheckFunction - check to see if we match a function name
|
||||
// string1 - [in,out] string to compare, if comparision is equal and at least half the full node name will replace with full node name
|
||||
// allowUndeterminedVariable - [out] set to true if undetermined variables (symbols yet to be defined) are allowed here
|
||||
// return - true if function name found
|
||||
template <typename ElemType>
|
||||
bool CheckFunction(std::string& p_nodeType, bool* allowUndeterminedVariable)
|
||||
{
|
||||
std::wstring nodeType = msra::strfun::utf16(p_nodeType);
|
||||
bool ret = false;
|
||||
if (allowUndeterminedVariable)
|
||||
*allowUndeterminedVariable = true; // be default we allow undetermined variables
|
||||
if (EqualInsensitive(nodeType, InputValue<ElemType>::TypeName(), L"Input"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, InputValue<ElemType>::SparseTypeName(), L"SparseInput"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LearnableParameter<ElemType>::TypeName(), L"Parameter"))
|
||||
ret = true;
|
||||
//else if (EqualInsensitive(nodeType, SparseLearnableParameter<ElemType>::TypeName(), L"SparseParameter"))
|
||||
// ret = true;
|
||||
else if (EqualInsensitive(nodeType, L"Constant", L"Const"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, L"ImageInput", L"Image"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, L"SparseImageInput", L"SparseImage"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SumElementsNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SumColumnElementsNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ScaleNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TransposeNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TransposeTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, StrideTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ElementTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RowElementTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ColumnElementTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, DiagTimesNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CosDistanceNode<ElemType>::TypeName(), L"CosDist"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, KhatriRaoProductNode<ElemType>::TypeName(), L"ColumnwiseCrossProduct"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PlusNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MinusNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, NegateNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RectifiedLinearNode<ElemType>::TypeName(), L"ReLU"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SigmoidNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TanhNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ExpNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LogNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CosineNode<ElemType>::TypeName(), L"Cos"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SoftmaxNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LogSoftmaxNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, SquareErrorNode<ElemType>::TypeName(), L"SE"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CrossEntropyWithSoftmaxNode<ElemType>::TypeName(), L"CEWithSM"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CrossEntropyNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ClassBasedCrossEntropyWithSoftmaxNode<ElemType>::TypeName(), L"CBCEWithSM"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MatrixL1RegNode<ElemType>::TypeName(), L"L1Reg"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MatrixL2RegNode<ElemType>::TypeName(), L"L2Reg"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PerDimMeanVarNormalizationNode<ElemType>::TypeName(),L"PerDimMVNorm"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PerDimMeanVarDeNormalizationNode<ElemType>::TypeName(),L"PerDimMVDeNorm"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ErrorPredictionNode<ElemType>::TypeName(), L"ClassificationError"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, DropoutNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ReshapeNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RowRepeatNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MeanNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, InvStdDevNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ConvolutionNode<ElemType>::TypeName(), L"Convolve"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, MaxPoolingNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, AveragePoolingNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PastValueNode<ElemType>::TypeName(), L"Delay"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, FutureValueNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RowSliceNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, RowStackNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LookupTableNode<ElemType>::TypeName()))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, GMMLogLikelihoodNode<ElemType>::TypeName(), L"GMMLL"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CosDistanceWithNegativeSamplesNode<ElemType>::TypeName(), L"CosWithNegSamples"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, TimeReverseNode<ElemType>::TypeName(), L"TimeReverse"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, CRFNode<ElemType>::TypeName(), L"CRF"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, DummyCriterionNode<ElemType>::TypeName(), L"DummyCriterion"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, ParallelNode<ElemType>::TypeName(), L"Parallel"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, LSTMNode<ElemType>::TypeName(), L"LSTM"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, PairNetworkNode<ElemType>::TypeName(), L"PairNetwork"))
|
||||
ret = true;
|
||||
else if (EqualInsensitive(nodeType, StrideTimesNode<ElemType>::TypeName(), L"StrideTimes"))
|
||||
ret = true;
|
||||
|
||||
// return the actual node name in the parameter if we found something
|
||||
if (ret)
|
||||
{
|
||||
p_nodeType = msra::strfun::utf8(nodeType);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
template <typename ElemType>
|
||||
NDLScript<ElemType> NDLScript<ElemType>::s_global("global");
|
||||
|
||||
// declare the static variables from the classes
|
||||
template<> NDLScript<float> NDLScript<float>::s_global{};
|
||||
template<> NDLScript<double> NDLScript<double>::s_global{};
|
||||
|
||||
template<> int NDLNode<float>::s_nameCounter = 0;
|
||||
template<> int NDLNode<double>::s_nameCounter = 0;
|
||||
|
||||
template class NDLNode<float>;
|
||||
template class NDLNode<double>;
|
||||
|
||||
template class NDLScript<float>;
|
||||
template class NDLScript<double>;
|
||||
|
||||
}}}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include <cassert>
|
||||
#include <stdio.h>
|
||||
#include "Profiler.h"
|
||||
#include "BestGpu.h" // for CPUONLY flag only
|
||||
|
||||
#ifndef CPUONLY
|
||||
#include <cuda_profiler_api.h>
|
||||
|
@ -15,7 +16,6 @@ void cudaProfilerStart() {}
|
|||
void cudaProfilerStop() {}
|
||||
#endif
|
||||
|
||||
|
||||
Profiler::Profiler(int numSamples)
|
||||
:m_numSamples(numSamples),
|
||||
m_isProfilingActive(false)
|
||||
|
|
|
@ -32,7 +32,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
public:
|
||||
PastValueNode(const DEVICEID_TYPE deviceId=AUTOPLACEMATRIX, const std::wstring name = L"")
|
||||
: ComputationNode<ElemType>(deviceId), m_pastActivity(deviceId)
|
||||
: ComputationNode<ElemType>(deviceId), m_pastActivity(deviceId), m_boundaryInfo(CPUDEVICE)
|
||||
{
|
||||
m_nodeName = (name == L""? CreateUniqNodeName() : name);
|
||||
m_deviceId = deviceId;
|
||||
|
@ -47,7 +47,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
}
|
||||
|
||||
PastValueNode(File& fstream, const size_t modelVersion, const DEVICEID_TYPE deviceId=AUTOPLACEMATRIX, const std::wstring name = L"")
|
||||
: ComputationNode<ElemType>(deviceId), m_pastActivity(deviceId)
|
||||
: ComputationNode<ElemType>(deviceId), m_pastActivity(deviceId), m_boundaryInfo(CPUDEVICE)
|
||||
{
|
||||
m_nodeName = (name == L""? CreateUniqNodeName() : name);
|
||||
|
||||
|
@ -439,7 +439,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
|
||||
public:
|
||||
FutureValueNode(const DEVICEID_TYPE deviceId = AUTOPLACEMATRIX, const std::wstring name = L"")
|
||||
: ComputationNode<ElemType>(deviceId), m_futureActivity(deviceId)
|
||||
: ComputationNode<ElemType>(deviceId), m_futureActivity(deviceId),m_boundaryInfo(CPUDEVICE)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
m_deviceId = deviceId;
|
||||
|
@ -453,7 +453,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
|
|||
}
|
||||
|
||||
FutureValueNode(File& fstream, const size_t modelVersion, const DEVICEID_TYPE deviceId = AUTOPLACEMATRIX, const std::wstring name = L"")
|
||||
: ComputationNode<ElemType>(deviceId), m_futureActivity(deviceId)
|
||||
: ComputationNode<ElemType>(deviceId), m_futureActivity(deviceId), m_boundaryInfo(CPUDEVICE)
|
||||
{
|
||||
m_nodeName = (name == L"" ? CreateUniqNodeName() : name);
|
||||
|
||||
|
|
|
@ -247,7 +247,7 @@ public:
|
|||
|
||||
if (doGradientCheck && sizeof(ElemType) != sizeof(double))
|
||||
{
|
||||
LogicError("Gradient check needs to use type = double");
|
||||
LogicError("Gradient check needs to use precision = double");
|
||||
}
|
||||
m_doUnitTest = configSGD("unittest", "false");
|
||||
|
||||
|
@ -1668,10 +1668,6 @@ protected:
|
|||
else if (!std::isnan(epochCriterion) &&
|
||||
(epochCriterion > (baseCriterion * (ElemType) (1.0 + ((ElemType) m_minibatchSearchCriterionErrorMargin / 100.0)))))
|
||||
{
|
||||
fprintf(stderr, "AdaptiveMinibatchSearch: Search successful!!! Chose new minibatchSize of %d. "
|
||||
"EpochCriterion = %.10g vs BaseCriterion = %.10g\n\n",
|
||||
(int) lastTriedTrialMinibatchSize, lastTriedTrialEpochCriterion, baseCriterion);
|
||||
|
||||
// As soon as we see the Criterion (a measure of error) start to get larger than the
|
||||
// Criterion we started with, we stop.
|
||||
// TODO: if this is too sensitive, we can add a margin on the bases of percentage of
|
||||
|
@ -1682,11 +1678,18 @@ protected:
|
|||
{
|
||||
lastTriedTrialMinibatchSize = trialMinibatchSize;
|
||||
lastTriedTrialEpochCriterion = epochCriterion;
|
||||
fprintf(stderr, "AdaptiveMinibatchSearch: Keep searching... "
|
||||
"EpochCriterion = %.10g vs BaseCriterion = %.10g\n",
|
||||
epochCriterion, baseCriterion);
|
||||
if (trialMinibatchSizeFloat * minibatchSizeTuningFactor <= maxMinibatchSize)
|
||||
{
|
||||
fprintf(stderr, "AdaptiveMinibatchSearch: Keep searching... "
|
||||
"EpochCriterion = %.10g vs BaseCriterion = %.10g\n",
|
||||
epochCriterion, baseCriterion);
|
||||
}
|
||||
}
|
||||
}
|
||||
fprintf(stderr, "AdaptiveMinibatchSearch: Search successful!!! Chose new minibatchSize of %d. "
|
||||
"EpochCriterion = %.10g vs BaseCriterion = %.10g\n\n",
|
||||
(int) lastTriedTrialMinibatchSize, lastTriedTrialEpochCriterion, baseCriterion);
|
||||
|
||||
|
||||
return lastTriedTrialMinibatchSize;
|
||||
}
|
||||
|
@ -1856,7 +1859,7 @@ protected:
|
|||
//for now since we share the same label masking flag we call this on the training
|
||||
//criterion node ony. Later, when we apply different labels on different nodes
|
||||
//we need to add code to call this function multiple times, one for each criteria node
|
||||
size_t numSamplesWithLabel = (*criterionNodes)[0]->GetNumSamplesWithLabel(actualMBSize);
|
||||
size_t numSamplesWithLabel = net.GetNumSamplesWithLabel(actualMBSize);
|
||||
|
||||
std::vector<ElemType> mbEvalErrors(numEvalNodes, 0);
|
||||
for (size_t i = 0; i < numEvalNodes; i++)
|
||||
|
@ -1882,9 +1885,6 @@ protected:
|
|||
}
|
||||
}
|
||||
|
||||
// Tries to set up derivative features for the next utterance.
|
||||
AttemptUtteranceDerivativeFeatures(net, trainSetDataReader, FeatureNodes, inputMatrices);
|
||||
|
||||
timer.Stop();
|
||||
numMBsRun++;
|
||||
if (m_traceLevel > 0)
|
||||
|
@ -1953,6 +1953,9 @@ protected:
|
|||
// DataEnd does reader specific process if sentence ending is reached
|
||||
trainSetDataReader->DataEnd(endDataSentence);
|
||||
|
||||
// Tries to set up derivative features for the next utterance.
|
||||
AttemptUtteranceDerivativeFeatures(net, trainSetDataReader, FeatureNodes, inputMatrices);
|
||||
|
||||
profiler.NextSample();
|
||||
}
|
||||
|
||||
|
@ -2102,30 +2105,40 @@ protected:
|
|||
const size_t minibatchSize)
|
||||
{
|
||||
wstring checkPointFileName = GetCheckPointFileNameForEpoch(int(epoch));
|
||||
// Saving into temporary file and then renaming it to the checkPointFileName
|
||||
// This is a standard trick to avoid havign corrupted checkpoints files if process dies during writing
|
||||
wstring tempFileName = checkPointFileName + L".tmp";
|
||||
|
||||
File fstream(checkPointFileName,
|
||||
FileOptions::fileOptionsBinary | FileOptions::fileOptionsWrite);
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BCKP");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BLearnRate");
|
||||
fstream << totalSamplesSeen << learnRatePerSample << prevCriterion;
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"ELearnRate");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BMinibatchSize");
|
||||
fstream << minibatchSize;
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"EMinibatchSize");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BGradient");
|
||||
|
||||
for (auto smoothedGradientIter = smoothedGradients.begin(); smoothedGradientIter != smoothedGradients.end(); smoothedGradientIter++)
|
||||
{
|
||||
const Matrix<ElemType>& smoothedGradient = *smoothedGradientIter;
|
||||
fstream << smoothedGradient;
|
||||
File fstream(tempFileName,
|
||||
FileOptions::fileOptionsBinary | FileOptions::fileOptionsWrite);
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BCKP");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BLearnRate");
|
||||
fstream << totalSamplesSeen << learnRatePerSample << prevCriterion;
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"ELearnRate");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BMinibatchSize");
|
||||
fstream << minibatchSize;
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"EMinibatchSize");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerBeginSection, L"BGradient");
|
||||
|
||||
for (auto smoothedGradientIter = smoothedGradients.begin(); smoothedGradientIter != smoothedGradients.end(); smoothedGradientIter++)
|
||||
{
|
||||
const Matrix<ElemType>& smoothedGradient = *smoothedGradientIter;
|
||||
fstream << smoothedGradient;
|
||||
}
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"EGradient");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"ECKP");
|
||||
|
||||
// Ensuring that data is written
|
||||
fstream.Flush();
|
||||
}
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"EGradient");
|
||||
|
||||
fstream.PutMarker(FileMarker::fileMarkerEndSection, L"ECKP");
|
||||
renameOrDie(tempFileName, checkPointFileName);
|
||||
}
|
||||
|
||||
bool LoadCheckPointInfo(const size_t epochNumber,
|
||||
|
|
|
@ -141,7 +141,7 @@ namespace Microsoft {
|
|||
//for now since we share the same label masking flag we call this on one node only
|
||||
//Later, when we apply different labels on different nodes
|
||||
//we need to add code to call this function multiple times, one for each criteria node
|
||||
size_t numSamplesWithLabel = evalNodes[0]->GetNumSamplesWithLabel(actualMBSize);
|
||||
size_t numSamplesWithLabel = m_net.GetNumSamplesWithLabel(actualMBSize);
|
||||
for (int i = 0; i<evalNodes.size(); i++)
|
||||
{
|
||||
m_net.Evaluate(evalNodes[i]);
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
|
||||
<html>
|
||||
<head>
|
||||
<title>AN4 License Terms</title>
|
||||
|
||||
<meta http-equiv="content-type"
|
||||
content="text/html; charset=ISO-8859-1">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h2>AN4 License Terms</h2>
|
||||
|
||||
<p>This audio database is free for use for any purpose (commercial or otherwise)
|
||||
subject to the restrictions detailed below.</p>
|
||||
|
||||
<pre>
|
||||
/* ====================================================================
|
||||
* Copyright (c) 1991-2005 Carnegie Mellon University. All rights
|
||||
* reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
*
|
||||
* This work was supported in part by funding from the Defense Advanced
|
||||
* Research Projects Agency and the National Science Foundation of the
|
||||
* United States of America, and the CMU Sphinx Speech Consortium.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY CARNEGIE MELLON UNIVERSITY ``AS IS'' AND
|
||||
* ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY
|
||||
* NOR ITS EMPLOYEES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ====================================================================
|
||||
*/
|
||||
</pre>
|
||||
</body>
|
||||
</html>
|
Двоичный файл не отображается.
|
@ -0,0 +1,3 @@
|
|||
Contents of this directory is a modified version of AN4 dataset pre-processed and optimized for CNTK end-to-end testing.
|
||||
AN4 dataset is a part of CMU audio databases located at http://www.speech.cs.cmu.edu/databases/an4
|
||||
This modified version of dataset is distributed under the terms of a AN4 license which can be found in AN4.LICENSE.html
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,948 @@
|
|||
An4/71/71/cen5-fjam-b.mfc=Features/000000000.chunk[0,367]
|
||||
An4/213/213/cen4-fsaf2-b.mfc=Features/000000000.chunk[368,805]
|
||||
An4/513/513/cen7-mgah-b.mfc=Features/000000000.chunk[806,1173]
|
||||
An4/614/614/cen7-mkdb-b.mfc=Features/000000000.chunk[1174,1421]
|
||||
An4/507/507/cen1-mgah-b.mfc=Features/000000000.chunk[1422,1669]
|
||||
An4/693/693/cen8-mmkw-b.mfc=Features/000000000.chunk[1670,2027]
|
||||
An4/918/918/cen4-mtos-b.mfc=Features/000000000.chunk[2028,2335]
|
||||
An4/477/477/an257-mewl-b.mfc=Features/000000000.chunk[2336,2943]
|
||||
An4/454/454/an70-meht-b.mfc=Features/000000000.chunk[2944,3021]
|
||||
An4/254/254/cen6-ftmj-b.mfc=Features/000000000.chunk[3022,3249]
|
||||
An4/946/946/cen6-mwhw-b.mfc=Features/000000000.chunk[3250,3467]
|
||||
An4/122/122/cen4-fkdo-b.mfc=Features/000000000.chunk[3468,3735]
|
||||
An4/181/181/an183-fnsv-b.mfc=Features/000000000.chunk[3736,4093]
|
||||
An4/93/93/cen1-fjmd-b.mfc=Features/000000000.chunk[4094,4251]
|
||||
An4/128/128/an62-flmm2-b.mfc=Features/000000000.chunk[4252,4409]
|
||||
An4/688/688/cen3-mmkw-b.mfc=Features/000000000.chunk[4410,4617]
|
||||
An4/872/872/an332-msrb-b.mfc=Features/000000000.chunk[4618,4985]
|
||||
An4/624/624/cen5-mkem-b.mfc=Features/000000000.chunk[4986,5383]
|
||||
An4/146/146/cen2-flrp-b.mfc=Features/000000000.chunk[5384,5541]
|
||||
An4/198/198/cen2-fplp-b.mfc=Features/000000000.chunk[5542,5969]
|
||||
An4/239/239/cen4-ftal-b.mfc=Features/000000000.chunk[5970,6187]
|
||||
An4/49/49/an291-ffmm-b.mfc=Features/000000000.chunk[6188,6335]
|
||||
An4/306/306/cen7-mbmg-b.mfc=Features/000000000.chunk[6336,6733]
|
||||
An4/252/252/cen4-ftmj-b.mfc=Features/000000000.chunk[6734,7171]
|
||||
An4/800/800/an359-mscg2-b.mfc=Features/000000000.chunk[7172,7509]
|
||||
An4/771/771/an236-mrjc2-b.mfc=Features/000000000.chunk[7510,7597]
|
||||
An4/880/880/cen5-msrb-b.mfc=Features/000000000.chunk[7598,7955]
|
||||
An4/795/795/cen7-mrmg-b.mfc=Features/000000000.chunk[7956,8293]
|
||||
An4/821/821/cen7-msct-b.mfc=Features/000000000.chunk[8294,8611]
|
||||
An4/255/255/cen7-ftmj-b.mfc=Features/000000000.chunk[8612,8949]
|
||||
An4/580/580/an58-mjhp-b.mfc=Features/000000000.chunk[8950,9267]
|
||||
An4/70/70/cen4-fjam-b.mfc=Features/000000000.chunk[9268,9595]
|
||||
An4/528/528/an171-mjda-b.mfc=Features/000000000.chunk[9596,9963]
|
||||
An4/901/901/an35-mtje-b.mfc=Features/000000000.chunk[9964,10371]
|
||||
An4/776/776/cen1-mrjc2-b.mfc=Features/000000000.chunk[10372,10779]
|
||||
An4/908/908/cen7-mtje-b.mfc=Features/000000000.chunk[10780,11257]
|
||||
An4/603/603/an316-mkdb-b.mfc=Features/000000000.chunk[11258,11565]
|
||||
An4/544/544/an20-mjdr-b.mfc=Features/000000000.chunk[11566,11853]
|
||||
An4/243/243/cen8-ftal-b.mfc=Features/000000000.chunk[11854,12071]
|
||||
An4/891/891/cen3-mtcv-b.mfc=Features/000000000.chunk[12072,12269]
|
||||
An4/245/245/an212-ftmj-b.mfc=Features/000000000.chunk[12270,12647]
|
||||
An4/156/156/an119-fmjc-b.mfc=Features/000000000.chunk[12648,13055]
|
||||
An4/446/446/cen5-meab-b.mfc=Features/000000000.chunk[13056,13483]
|
||||
An4/801/801/an360-mscg2-b.mfc=Features/000000000.chunk[13484,13601]
|
||||
An4/538/538/cen6-mjda-b.mfc=Features/000000000.chunk[13602,13799]
|
||||
An4/282/282/an1-mblw-b.mfc=Features/000000000.chunk[13800,13947]
|
||||
An4/589/589/cen7-mjhp-b.mfc=Features/000000000.chunk[13948,14275]
|
||||
An4/710/710/an389-mmtm-b.mfc=Features/000000000.chunk[14276,14603]
|
||||
An4/638/638/cen6-mmaf-b.mfc=Features/000000000.chunk[14604,14811]
|
||||
An4/874/874/an334-msrb-b.mfc=Features/000000000.chunk[14812,15029]
|
||||
An4/40/40/an40-fejs-b.mfc=Features/000000000.chunk[15030,15337]
|
||||
An4/176/176/cen6-fmjd-b.mfc=Features/000000000.chunk[15338,15545]
|
||||
An4/732/732/cen8-mnfe-b.mfc=Features/000000000.chunk[15546,15773]
|
||||
An4/575/575/cen6-mjgk-b.mfc=Features/000000000.chunk[15774,16191]
|
||||
An4/234/234/an329-ftal-b.mfc=Features/000000000.chunk[16192,16429]
|
||||
An4/497/497/cen4-mfaa-b.mfc=Features/000000000.chunk[16430,16687]
|
||||
An4/619/619/an189-mkem-b.mfc=Features/000000000.chunk[16688,16785]
|
||||
An4/303/303/cen4-mbmg-b.mfc=Features/000000000.chunk[16786,17093]
|
||||
An4/502/502/an196-mgah-b.mfc=Features/000000000.chunk[17094,17291]
|
||||
An4/436/436/cen8-mdxs-b.mfc=Features/000000000.chunk[17292,17619]
|
||||
An4/889/889/cen1-mtcv-b.mfc=Features/000000000.chunk[17620,18227]
|
||||
An4/697/697/an384-mmsh-b.mfc=Features/000000000.chunk[18228,18475]
|
||||
An4/413/413/an108-mdxn-b.mfc=Features/000000000.chunk[18476,18643]
|
||||
An4/165/165/cen8-fmjc-b.mfc=Features/000000000.chunk[18644,18901]
|
||||
An4/186/186/cen3-fnsv-b.mfc=Features/000000000.chunk[18902,19149]
|
||||
An4/274/274/cen1-mblb-b.mfc=Features/000000000.chunk[19150,19417]
|
||||
An4/309/309/an202-mcel-b.mfc=Features/000000000.chunk[19418,19525]
|
||||
An4/725/725/cen1-mnfe-b.mfc=Features/000000000.chunk[19526,19783]
|
||||
An4/699/699/cen1-mmsh-b.mfc=Features/000000000.chunk[19784,20051]
|
||||
An4/833/833/cen6-msjm-b.mfc=Features/000000000.chunk[20052,20299]
|
||||
An4/857/857/cen4-mskh-b.mfc=Features/000000000.chunk[20300,20687]
|
||||
An4/734/734/an82-mnjl-b.mfc=Features/000000000.chunk[20688,21025]
|
||||
An4/340/340/cen3-mcfl-b.mfc=Features/000000000.chunk[21026,21263]
|
||||
An4/36/36/an36-fejs-b.mfc=Features/000000000.chunk[21264,21641]
|
||||
An4/690/690/cen5-mmkw-b.mfc=Features/000000000.chunk[21642,22069]
|
||||
An4/545/545/cen1-mjdr-b.mfc=Features/000000000.chunk[22070,22347]
|
||||
An4/115/115/an132-fkdo-b.mfc=Features/000000000.chunk[22348,22505]
|
||||
An4/48/48/cen8-fejs-b.mfc=Features/000000000.chunk[22506,22723]
|
||||
An4/518/518/an249-mjbh-b.mfc=Features/000000000.chunk[22724,22811]
|
||||
An4/89/89/an6-fjmd-b.mfc=Features/000000000.chunk[22812,22889]
|
||||
An4/668/668/an337-mmdg-b.mfc=Features/000000000.chunk[22890,23007]
|
||||
An4/622/622/cen2-mkem-b.mfc=Features/000000000.chunk[23008,23175]
|
||||
An4/8/8/cen5-fash-b.mfc=Features/000000000.chunk[23176,23623]
|
||||
An4/601/601/cen7-mjjs2-b.mfc=Features/000000000.chunk[23624,24051]
|
||||
An4/480/480/an260-mewl-b.mfc=Features/000000000.chunk[24052,24409]
|
||||
An4/182/182/an184-fnsv-b.mfc=Features/000000000.chunk[24410,24497]
|
||||
An4/179/179/an181-fnsv-b.mfc=Features/000000000.chunk[24498,24825]
|
||||
An4/92/92/an9-fjmd-b.mfc=Features/000000000.chunk[24826,25003]
|
||||
An4/164/164/cen7-fmjc-b.mfc=Features/000000000.chunk[25004,25251]
|
||||
An4/16/16/cen2-fbbh-b.mfc=Features/000000000.chunk[25252,25549]
|
||||
An4/657/657/an49-mmap-b.mfc=Features/000000000.chunk[25550,25867]
|
||||
An4/723/723/an349-mnfe-b.mfc=Features/000000000.chunk[25868,26325]
|
||||
An4/700/700/cen2-mmsh-b.mfc=Features/000000000.chunk[26326,26453]
|
||||
An4/675/675/cen4-mmdg-b.mfc=Features/000000000.chunk[26454,26861]
|
||||
An4/386/386/an112-mdcs2-b.mfc=Features/000000000.chunk[26862,27129]
|
||||
An4/152/152/cen8-flrp-b.mfc=Features/000000000.chunk[27130,27347]
|
||||
An4/740/740/cen3-mnjl-b.mfc=Features/000000000.chunk[27348,27465]
|
||||
An4/370/370/cen7-mcsc-b.mfc=Features/000000000.chunk[27466,27783]
|
||||
An4/683/683/an364-mmkw-b.mfc=Features/000000000.chunk[27784,27861]
|
||||
An4/440/440/an139-meab-b.mfc=Features/000000000.chunk[27862,28089]
|
||||
An4/789/789/cen1-mrmg-b.mfc=Features/000000000.chunk[28090,28427]
|
||||
An4/611/611/cen4-mkdb-b.mfc=Features/000000000.chunk[28428,28685]
|
||||
An4/10/10/an86-fbbh-b.mfc=Features/000000000.chunk[28686,29013]
|
||||
An4/343/343/cen6-mcfl-b.mfc=Features/000000000.chunk[29014,29251]
|
||||
An4/438/438/an137-meab-b.mfc=Features/000000000.chunk[29252,29669]
|
||||
An4/456/456/cen2-meht-b.mfc=Features/000000000.chunk[29670,29817]
|
||||
An4/489/489/an161-mfaa-b.mfc=Features/000000000.chunk[29818,30075]
|
||||
An4/53/53/an295-ffmm-b.mfc=Features/000000000.chunk[30076,30363]
|
||||
An4/702/702/cen4-mmsh-b.mfc=Features/000000000.chunk[30364,30681]
|
||||
An4/777/777/cen2-mrjc2-b.mfc=Features/000000000.chunk[30682,30999]
|
||||
An4/873/873/an333-msrb-b.mfc=Features/000000000.chunk[31000,31097]
|
||||
An4/768/768/cen6-mrcb-b.mfc=Features/000000000.chunk[31098,31275]
|
||||
An4/552/552/cen8-mjdr-b.mfc=Features/000000000.chunk[31276,31503]
|
||||
An4/631/631/an54-mmaf-b.mfc=Features/000000000.chunk[31504,31611]
|
||||
An4/476/476/an256-mewl-b.mfc=Features/000000000.chunk[31612,31689]
|
||||
An4/151/151/cen7-flrp-b.mfc=Features/000000000.chunk[31690,31937]
|
||||
An4/920/920/cen6-mtos-b.mfc=Features/000000000.chunk[31938,32145]
|
||||
An4/358/358/cen8-mcrt-b.mfc=Features/000000000.chunk[32146,32463]
|
||||
An4/177/177/cen7-fmjd-b.mfc=Features/000000000.chunk[32464,32761]
|
||||
An4/635/635/cen3-mmaf-b.mfc=Features/000000000.chunk[32762,32929]
|
||||
An4/719/719/cen8-mmtm-b.mfc=Features/000000000.chunk[32930,33207]
|
||||
An4/750/750/cen1-mrab-b.mfc=Features/000000000.chunk[33208,33395]
|
||||
An4/755/755/cen6-mrab-b.mfc=Features/000000000.chunk[33396,33573]
|
||||
An4/721/721/an347-mnfe-b.mfc=Features/000000000.chunk[33574,33661]
|
||||
An4/380/380/cen4-mdcs-b.mfc=Features/000000000.chunk[33662,33909]
|
||||
An4/625/625/cen6-mkem-b.mfc=Features/000000000.chunk[33910,34117]
|
||||
An4/106/106/cen1-fkai-b.mfc=Features/000000000.chunk[34118,34295]
|
||||
An4/658/658/an50-mmap-b.mfc=Features/000000000.chunk[34296,34513]
|
||||
An4/402/402/an210-mdmc-b.mfc=Features/000000000.chunk[34514,35021]
|
||||
An4/192/192/an91-fplp-b.mfc=Features/000000000.chunk[35022,35469]
|
||||
An4/416/416/cen1-mdxn-b.mfc=Features/000000000.chunk[35470,35757]
|
||||
An4/161/161/cen4-fmjc-b.mfc=Features/000000000.chunk[35758,35965]
|
||||
An4/797/797/an356-mscg2-b.mfc=Features/000000000.chunk[35966,36183]
|
||||
An4/433/433/cen5-mdxs-b.mfc=Features/000000000.chunk[36184,36691]
|
||||
An4/57/57/cen4-ffmm-b.mfc=Features/000000000.chunk[36692,37119]
|
||||
An4/157/157/an120-fmjc-b.mfc=Features/000000000.chunk[37120,37347]
|
||||
An4/272/272/an374-mblb-b.mfc=Features/000000000.chunk[37348,37575]
|
||||
An4/549/549/cen5-mjdr-b.mfc=Features/000000000.chunk[37576,37903]
|
||||
An4/41/41/cen1-fejs-b.mfc=Features/000000000.chunk[37904,38341]
|
||||
An4/290/290/cen4-mblw-b.mfc=Features/000000000.chunk[38342,38549]
|
||||
An4/701/701/cen3-mmsh-b.mfc=Features/000000000.chunk[38550,38677]
|
||||
An4/398/398/an206-mdmc-b.mfc=Features/000000000.chunk[38678,39005]
|
||||
An4/640/640/cen8-mmaf-b.mfc=Features/000000000.chunk[39006,39323]
|
||||
An4/904/904/cen3-mtje-b.mfc=Features/000000000.chunk[39324,39541]
|
||||
An4/686/686/cen1-mmkw-b.mfc=Features/000000000.chunk[39542,40039]
|
||||
An4/97/97/cen5-fjmd-b.mfc=Features/000000000.chunk[40040,40397]
|
||||
An4/259/259/an223-fwxs-b.mfc=Features/000000000.chunk[40398,40495]
|
||||
An4/729/729/cen5-mnfe-b.mfc=Features/000000000.chunk[40496,41033]
|
||||
An4/709/709/an388-mmtm-b.mfc=Features/000000000.chunk[41034,41131]
|
||||
An4/692/692/cen7-mmkw-b.mfc=Features/000000000.chunk[41132,41759]
|
||||
An4/2/2/an253-fash-b.mfc=Features/000000000.chunk[41760,41827]
|
||||
An4/39/39/an39-fejs-b.mfc=Features/000000000.chunk[41828,42095]
|
||||
An4/488/488/cen8-mewl-b.mfc=Features/000000000.chunk[42096,42423]
|
||||
An4/411/411/an106-mdxn-b.mfc=Features/000000000.chunk[42424,42601]
|
||||
An4/905/905/cen4-mtje-b.mfc=Features/000000000.chunk[42602,43069]
|
||||
An4/783/783/cen8-mrjc2-b.mfc=Features/000000000.chunk[43070,43417]
|
||||
An4/205/205/an296-fsaf2-b.mfc=Features/000000000.chunk[43418,43705]
|
||||
An4/788/788/an285-mrmg-b.mfc=Features/000000000.chunk[43706,44053]
|
||||
An4/173/173/cen3-fmjd-b.mfc=Features/000000000.chunk[44054,44251]
|
||||
An4/389/389/an115-mdcs2-b.mfc=Features/000000000.chunk[44252,44579]
|
||||
An4/412/412/an107-mdxn-b.mfc=Features/000000000.chunk[44580,44867]
|
||||
An4/69/69/cen3-fjam-b.mfc=Features/000000000.chunk[44868,45045]
|
||||
An4/84/84/cen5-fjdn-b.mfc=Features/000000000.chunk[45046,45273]
|
||||
An4/826/826/an229-msjm-b.mfc=Features/000000000.chunk[45274,45361]
|
||||
An4/722/722/an348-mnfe-b.mfc=Features/000000000.chunk[45362,45589]
|
||||
An4/490/490/an162-mfaa-b.mfc=Features/000000000.chunk[45590,45897]
|
||||
An4/335/335/an263-mcfl-b.mfc=Features/000000000.chunk[45898,46275]
|
||||
An4/854/854/cen1-mskh-b.mfc=Features/000000000.chunk[46276,46503]
|
||||
An4/334/334/an262-mcfl-b.mfc=Features/000000000.chunk[46504,46851]
|
||||
An4/403/403/cen1-mdmc-b.mfc=Features/000000000.chunk[46852,47079]
|
||||
An4/46/46/cen6-fejs-b.mfc=Features/000000000.chunk[47080,47277]
|
||||
An4/154/154/an117-fmjc-b.mfc=Features/000000000.chunk[47278,47595]
|
||||
An4/565/565/cen8-mjes-b.mfc=Features/000000000.chunk[47596,47843]
|
||||
An4/251/251/cen3-ftmj-b.mfc=Features/000000000.chunk[47844,48071]
|
||||
An4/139/139/an21-flrp-b.mfc=Features/000000000.chunk[48072,48479]
|
||||
An4/6/6/cen2-fash-b.mfc=Features/000000000.chunk[48480,48607]
|
||||
An4/76/76/an122-fjdn-b.mfc=Features/000000000.chunk[48608,48765]
|
||||
An4/817/817/cen3-msct-b.mfc=Features/000000000.chunk[48766,48913]
|
||||
An4/328/328/cen4-mcen-b.mfc=Features/000000000.chunk[48914,49161]
|
||||
An4/293/293/cen7-mblw-b.mfc=Features/000000000.chunk[49162,49409]
|
||||
An4/214/214/cen5-fsaf2-b.mfc=Features/000000000.chunk[49410,49797]
|
||||
An4/91/91/an8-fjmd-b.mfc=Features/000000000.chunk[49798,49975]
|
||||
An4/820/820/cen6-msct-b.mfc=Features/000000000.chunk[49976,50213]
|
||||
An4/300/300/cen1-mbmg-b.mfc=Features/000000000.chunk[50214,50491]
|
||||
An4/18/18/cen4-fbbh-b.mfc=Features/000000000.chunk[50492,50829]
|
||||
An4/526/526/cen7-mjbh-b.mfc=Features/000000000.chunk[50830,51067]
|
||||
An4/408/408/cen6-mdmc-b.mfc=Features/000000000.chunk[51068,51285]
|
||||
An4/169/169/an194-fmjd-b.mfc=Features/000000000.chunk[51286,51553]
|
||||
An4/939/939/an154-mwhw-b.mfc=Features/000000000.chunk[51554,51841]
|
||||
An4/931/931/cen4-mtxj-b.mfc=Features/000000000.chunk[51842,52299]
|
||||
An4/758/758/an101-mrcb-b.mfc=Features/000000000.chunk[52300,52647]
|
||||
An4/781/781/cen6-mrjc2-b.mfc=Features/000000000.chunk[52648,52875]
|
||||
An4/321/321/an127-mcen-b.mfc=Features/000000000.chunk[52876,52973]
|
||||
An4/199/199/cen3-fplp-b.mfc=Features/000000000.chunk[52974,53271]
|
||||
An4/494/494/cen1-mfaa-b.mfc=Features/000000000.chunk[53272,53469]
|
||||
An4/560/560/cen3-mjes-b.mfc=Features/000000000.chunk[53470,53547]
|
||||
An4/713/713/cen2-mmtm-b.mfc=Features/000000000.chunk[53548,53855]
|
||||
An4/938/938/an153-mwhw-b.mfc=Features/000000000.chunk[53856,54143]
|
||||
An4/163/163/cen6-fmjc-b.mfc=Features/000000000.chunk[54144,54321]
|
||||
An4/338/338/cen1-mcfl-b.mfc=Features/000000000.chunk[54322,54569]
|
||||
An4/775/775/an240-mrjc2-b.mfc=Features/000000000.chunk[54570,54777]
|
||||
An4/264/264/cen3-fwxs-b.mfc=Features/000000000.chunk[54778,54925]
|
||||
An4/224/224/cen2-fsrb-b.mfc=Features/000000000.chunk[54926,55233]
|
||||
An4/166/166/an191-fmjd-b.mfc=Features/000000000.chunk[55234,55321]
|
||||
An4/80/80/cen1-fjdn-b.mfc=Features/000000000.chunk[55322,55469]
|
||||
An4/426/426/an28-mdxs-b.mfc=Features/000000000.chunk[55470,55577]
|
||||
An4/737/737/an85-mnjl-b.mfc=Features/000000000.chunk[55578,55965]
|
||||
An4/919/919/cen5-mtos-b.mfc=Features/000000000.chunk[55966,56363]
|
||||
An4/102/102/an312-fkai-b.mfc=Features/000000000.chunk[56364,56751]
|
||||
An4/743/743/cen7-mnjl-b.mfc=Features/000000000.chunk[56752,57129]
|
||||
An4/948/948/cen8-mwhw-b.mfc=Features/000000000.chunk[57130,57347]
|
||||
An4/17/17/cen3-fbbh-b.mfc=Features/000000000.chunk[57348,57575]
|
||||
An4/11/11/an87-fbbh-b.mfc=Features/000000000.chunk[57576,57743]
|
||||
An4/344/344/cen7-mcfl-b.mfc=Features/000000000.chunk[57744,58111]
|
||||
An4/359/359/an231-mcsc-b.mfc=Features/000000000.chunk[58112,58329]
|
||||
An4/203/203/cen7-fplp-b.mfc=Features/000000000.chunk[58330,58877]
|
||||
An4/704/704/cen6-mmsh-b.mfc=Features/000000000.chunk[58878,59035]
|
||||
An4/331/331/cen7-mcen-b.mfc=Features/000000000.chunk[59036,59323]
|
||||
An4/736/736/an84-mnjl-b.mfc=Features/000000000.chunk[59324,59511]
|
||||
An4/121/121/cen3-fkdo-b.mfc=Features/000000000.chunk[59512,59769]
|
||||
An4/574/574/cen5-mjgk-b.mfc=Features/000000000.chunk[59770,59977]
|
||||
An4/143/143/an24-flrp-b.mfc=Features/000000000.chunk[59978,60065]
|
||||
An4/209/209/an300-fsaf2-b.mfc=Features/000000000.chunk[60066,60473]
|
||||
An4/367/367/cen4-mcsc-b.mfc=Features/000000000.chunk[60474,60731]
|
||||
An4/38/38/an38-fejs-b.mfc=Features/000000000.chunk[60732,60809]
|
||||
An4/390/390/cen1-mdcs2-b.mfc=Features/000000000.chunk[60810,61057]
|
||||
An4/756/756/cen7-mrab-b.mfc=Features/000000000.chunk[61058,61275]
|
||||
An4/555/555/an158-mjes-b.mfc=Features/000000000.chunk[61276,61613]
|
||||
An4/680/680/an361-mmkw-b.mfc=Features/000000000.chunk[61614,62041]
|
||||
An4/578/578/an56-mjhp-b.mfc=Features/000000000.chunk[62042,62419]
|
||||
An4/655/655/an47-mmap-b.mfc=Features/000000000.chunk[62420,62667]
|
||||
An4/646/646/cen1-mmal-b.mfc=Features/000000000.chunk[62668,63035]
|
||||
An4/720/720/an346-mnfe-b.mfc=Features/000000000.chunk[63036,63453]
|
||||
An4/608/608/cen1-mkdb-b.mfc=Features/000000000.chunk[63454,63721]
|
||||
An4/441/441/an140-meab-b.mfc=Features/000000000.chunk[63722,64299]
|
||||
An4/356/356/cen6-mcrt-b.mfc=Features/000000000.chunk[64300,64547]
|
||||
An4/926/926/an379-mtxj-b.mfc=Features/000000000.chunk[64548,64625]
|
||||
An4/541/541/an16-mjdr-b.mfc=Features/000000000.chunk[64626,64893]
|
||||
An4/195/195/an94-fplp-b.mfc=Features/000000000.chunk[64894,65441]
|
||||
An4/591/591/an176-mjjs2-b.mfc=Features/000000000.chunk[65442,65789]
|
||||
An4/9/9/cen7-fash-b.mfc=Features/000000000.chunk[65790,66037]
|
||||
An4/484/484/cen4-mewl-b.mfc=Features/000000000.chunk[66038,66525]
|
||||
An4/537/537/cen5-mjda-b.mfc=Features/000000000.chunk[66526,66933]
|
||||
An4/242/242/cen7-ftal-b.mfc=Features/000000000.chunk[66934,67171]
|
||||
An4/848/848/cen8-msjr-b.mfc=Features/000000000.chunk[67172,67409]
|
||||
An4/220/220/an168-fsrb-b.mfc=Features/000000000.chunk[67410,67757]
|
||||
An4/906/906/cen5-mtje-b.mfc=Features/000000000.chunk[67758,68185]
|
||||
An4/444/444/cen3-meab-b.mfc=Features/000000000.chunk[68186,68373]
|
||||
An4/88/88/an10-fjmd-b.mfc=Features/000000000.chunk[68374,68531]
|
||||
An4/561/561/cen4-mjes-b.mfc=Features/000000000.chunk[68532,68919]
|
||||
An4/728/728/cen4-mnfe-b.mfc=Features/000000000.chunk[68920,69347]
|
||||
An4/784/784/an281-mrmg-b.mfc=Features/000000000.chunk[69348,69485]
|
||||
An4/55/55/cen2-ffmm-b.mfc=Features/000000000.chunk[69486,69983]
|
||||
An4/593/593/an178-mjjs2-b.mfc=Features/000000000.chunk[69984,70061]
|
||||
An4/327/327/cen3-mcen-b.mfc=Features/000000000.chunk[70062,70309]
|
||||
An4/4/4/an255-fash-b.mfc=Features/000000000.chunk[70310,70567]
|
||||
An4/922/922/cen8-mtos-b.mfc=Features/000000000.chunk[70568,70775]
|
||||
An4/229/229/cen7-fsrb-b.mfc=Features/000000000.chunk[70776,71253]
|
||||
An4/297/297/an268-mbmg-b.mfc=Features/000000000.chunk[71254,71651]
|
||||
An4/215/215/cen6-fsaf2-b.mfc=Features/000000000.chunk[71652,71839]
|
||||
An4/567/567/an217-mjgk-b.mfc=Features/000000000.chunk[71840,71987]
|
||||
An4/96/96/cen4-fjmd-b.mfc=Features/000000000.chunk[71988,72335]
|
||||
An4/846/846/cen6-msjr-b.mfc=Features/000000000.chunk[72336,72543]
|
||||
An4/850/850/an96-mskh-b.mfc=Features/000000000.chunk[72544,72621]
|
||||
An4/492/492/an164-mfaa-b.mfc=Features/000000000.chunk[72622,72859]
|
||||
An4/661/661/cen3-mmap-b.mfc=Features/000000000.chunk[72860,72987]
|
||||
An4/200/200/cen4-fplp-b.mfc=Features/000000000.chunk[72988,73485]
|
||||
An4/82/82/cen3-fjdn-b.mfc=Features/000000000.chunk[73486,73583]
|
||||
An4/936/936/an151-mwhw-b.mfc=Features/000000000.chunk[73584,73891]
|
||||
An4/60/60/cen7-ffmm-b.mfc=Features/000000000.chunk[73892,74379]
|
||||
An4/183/183/an185-fnsv-b.mfc=Features/000000000.chunk[74380,74477]
|
||||
An4/667/667/an336-mmdg-b.mfc=Features/000000000.chunk[74478,74785]
|
||||
An4/576/576/cen7-mjgk-b.mfc=Features/000000000.chunk[74786,74993]
|
||||
An4/212/212/cen3-fsaf2-b.mfc=Features/000000000.chunk[74994,75101]
|
||||
An4/779/779/cen4-mrjc2-b.mfc=Features/000000000.chunk[75102,75449]
|
||||
An4/418/418/cen3-mdxn-b.mfc=Features/000000000.chunk[75450,75637]
|
||||
An4/636/636/cen4-mmaf-b.mfc=Features/000000000.chunk[75638,75935]
|
||||
An4/257/257/an221-fwxs-b.mfc=Features/000000000.chunk[75936,76253]
|
||||
An4/59/59/cen6-ffmm-b.mfc=Features/000000000.chunk[76254,76481]
|
||||
An4/899/899/an33-mtje-b.mfc=Features/000000000.chunk[76482,76879]
|
||||
An4/886/886/an303-mtcv-b.mfc=Features/000000000.chunk[76880,77307]
|
||||
An4/932/932/cen5-mtxj-b.mfc=Features/000000000.chunk[77308,77735]
|
||||
An4/336/336/an264-mcfl-b.mfc=Features/000000000.chunk[77736,77813]
|
||||
An4/877/877/cen2-msrb-b.mfc=Features/000000000.chunk[77814,78051]
|
||||
An4/629/629/an52-mmaf-b.mfc=Features/000000000.chunk[78052,78199]
|
||||
An4/767/767/cen5-mrcb-b.mfc=Features/000000000.chunk[78200,78547]
|
||||
An4/374/374/an243-mdcs-b.mfc=Features/000000000.chunk[78548,78635]
|
||||
An4/437/437/an136-meab-b.mfc=Features/000000000.chunk[78636,79063]
|
||||
An4/202/202/cen6-fplp-b.mfc=Features/000000000.chunk[79064,79451]
|
||||
An4/29/29/cen2-fclc-b.mfc=Features/000000000.chunk[79452,79699]
|
||||
An4/669/669/an338-mmdg-b.mfc=Features/000000000.chunk[79700,80017]
|
||||
An4/216/216/cen7-fsaf2-b.mfc=Features/000000000.chunk[80018,80395]
|
||||
An4/227/227/cen5-fsrb-b.mfc=Features/000000000.chunk[80396,80903]
|
||||
An4/864/864/an278-msmn-b.mfc=Features/000000000.chunk[80904,81311]
|
||||
An4/794/794/cen6-mrmg-b.mfc=Features/000000000.chunk[81312,81549]
|
||||
An4/865/865/an279-msmn-b.mfc=Features/000000000.chunk[81550,81837]
|
||||
An4/111/111/cen6-fkai-b.mfc=Features/000000000.chunk[81838,82015]
|
||||
An4/774/774/an239-mrjc2-b.mfc=Features/000000000.chunk[82016,82293]
|
||||
An4/831/831/cen4-msjm-b.mfc=Features/000000000.chunk[82294,82481]
|
||||
An4/793/793/cen5-mrmg-b.mfc=Features/000000000.chunk[82482,83049]
|
||||
An4/301/301/cen2-mbmg-b.mfc=Features/000000000.chunk[83050,83237]
|
||||
An4/325/325/cen1-mcen-b.mfc=Features/000000000.chunk[83238,83485]
|
||||
An4/210/210/cen1-fsaf2-b.mfc=Features/000000000.chunk[83486,83863]
|
||||
An4/117/117/an134-fkdo-b.mfc=Features/000000000.chunk[83864,83991]
|
||||
An4/388/388/an114-mdcs2-b.mfc=Features/000000000.chunk[83992,84289]
|
||||
An4/718/718/cen7-mmtm-b.mfc=Features/000000000.chunk[84290,84617]
|
||||
An4/174/174/cen4-fmjd-b.mfc=Features/000000000.chunk[84618,84955]
|
||||
An4/652/652/cen7-mmal-b.mfc=Features/000000000.chunk[84956,85233]
|
||||
An4/228/228/cen6-fsrb-b.mfc=Features/000000000.chunk[85234,85451]
|
||||
An4/373/373/an242-mdcs-b.mfc=Features/000000000.chunk[85452,85729]
|
||||
An4/175/175/cen5-fmjd-b.mfc=Features/000000000.chunk[85730,86147]
|
||||
An4/184/184/cen1-fnsv-b.mfc=Features/000000000.chunk[86148,86505]
|
||||
An4/393/393/cen4-mdcs2-b.mfc=Features/000000000.chunk[86506,86723]
|
||||
An4/319/319/cen8-mcel-b.mfc=Features/000000000.chunk[86724,86961]
|
||||
An4/291/291/cen5-mblw-b.mfc=Features/000000000.chunk[86962,87079]
|
||||
An4/584/584/cen2-mjhp-b.mfc=Features/000000000.chunk[87080,87287]
|
||||
An4/827/827/an230-msjm-b.mfc=Features/000000000.chunk[87288,87385]
|
||||
An4/628/628/an51-mmaf-b.mfc=Features/000000000.chunk[87386,87733]
|
||||
An4/295/295/an266-mbmg-b.mfc=Features/000000000.chunk[87734,87901]
|
||||
An4/317/317/cen5-mcel-b.mfc=Features/000000000.chunk[87902,88269]
|
||||
An4/431/431/cen3-mdxs-b.mfc=Features/000000000.chunk[88270,88517]
|
||||
An4/52/52/an294-ffmm-b.mfc=Features/000000000.chunk[88518,88785]
|
||||
An4/491/491/an163-mfaa-b.mfc=Features/000000000.chunk[88786,89023]
|
||||
An4/844/844/cen4-msjr-b.mfc=Features/000000000.chunk[89024,89261]
|
||||
An4/116/116/an133-fkdo-b.mfc=Features/000000000.chunk[89262,89369]
|
||||
An4/61/61/cen8-ffmm-b.mfc=Features/000000000.chunk[89370,89717]
|
||||
An4/118/118/an135-fkdo-b.mfc=Features/000000000.chunk[89718,89995]
|
||||
An4/131/131/an65-flmm2-b.mfc=Features/000000000.chunk[89996,90233]
|
||||
An4/878/878/cen3-msrb-b.mfc=Features/000000000.chunk[90234,90431]
|
||||
An4/352/352/cen2-mcrt-b.mfc=Features/000000000.chunk[90432,90739]
|
||||
An4/132/132/cen1-flmm2-b.mfc=Features/000000000.chunk[90740,91127]
|
||||
An4/230/230/cen8-fsrb-b.mfc=Features/000000000.chunk[91128,91425]
|
||||
An4/933/933/cen6-mtxj-b.mfc=Features/000000000.chunk[91426,91603]
|
||||
An4/535/535/cen3-mjda-b.mfc=Features/000000000.chunk[91604,91741]
|
||||
An4/531/531/an174-mjda-b.mfc=Features/000000000.chunk[91742,91859]
|
||||
An4/525/525/cen6-mjbh-b.mfc=Features/000000000.chunk[91860,92057]
|
||||
An4/74/74/cen8-fjam-b.mfc=Features/000000000.chunk[92058,92265]
|
||||
An4/644/644/an324-mmal-b.mfc=Features/000000000.chunk[92266,92363]
|
||||
An4/240/240/cen5-ftal-b.mfc=Features/000000000.chunk[92364,92601]
|
||||
An4/726/726/cen2-mnfe-b.mfc=Features/000000000.chunk[92602,92809]
|
||||
An4/425/425/an27-mdxs-b.mfc=Features/000000000.chunk[92810,93297]
|
||||
An4/612/612/cen5-mkdb-b.mfc=Features/000000000.chunk[93298,93655]
|
||||
An4/698/698/an385-mmsh-b.mfc=Features/000000000.chunk[93656,93733]
|
||||
An4/787/787/an284-mrmg-b.mfc=Features/000000000.chunk[93734,93881]
|
||||
An4/666/666/cen8-mmap-b.mfc=Features/000000000.chunk[93882,94099]
|
||||
An4/31/31/cen4-fclc-b.mfc=Features/000000000.chunk[94100,94467]
|
||||
An4/470/470/cen3-mema-b.mfc=Features/000000000.chunk[94468,94615]
|
||||
An4/782/782/cen7-mrjc2-b.mfc=Features/000000000.chunk[94616,94943]
|
||||
An4/824/824/an227-msjm-b.mfc=Features/000000000.chunk[94944,95211]
|
||||
An4/287/287/cen1-mblw-b.mfc=Features/000000000.chunk[95212,95379]
|
||||
An4/748/748/an74-mrab-b.mfc=Features/000000000.chunk[95380,95667]
|
||||
An4/241/241/cen6-ftal-b.mfc=Features/000000000.chunk[95668,95835]
|
||||
An4/832/832/cen5-msjm-b.mfc=Features/000000000.chunk[95836,96123]
|
||||
An4/664/664/cen6-mmap-b.mfc=Features/000000000.chunk[96124,96311]
|
||||
An4/347/347/an142-mcrt-b.mfc=Features/000000000.chunk[96312,96509]
|
||||
An4/377/377/cen1-mdcs-b.mfc=Features/000000000.chunk[96510,96757]
|
||||
An4/124/124/cen6-fkdo-b.mfc=Features/000000000.chunk[96758,96995]
|
||||
An4/724/724/an350-mnfe-b.mfc=Features/000000000.chunk[96996,97133]
|
||||
An4/442/442/cen1-meab-b.mfc=Features/000000000.chunk[97134,97601]
|
||||
An4/742/742/cen6-mnjl-b.mfc=Features/000000000.chunk[97602,97809]
|
||||
An4/500/500/cen7-mfaa-b.mfc=Features/000000000.chunk[97810,98047]
|
||||
An4/909/909/cen8-mtje-b.mfc=Features/000000000.chunk[98048,98245]
|
||||
An4/626/626/cen7-mkem-b.mfc=Features/000000000.chunk[98246,98413]
|
||||
An4/627/627/cen8-mkem-b.mfc=Features/000000000.chunk[98414,98671]
|
||||
An4/401/401/an209-mdmc-b.mfc=Features/000000000.chunk[98672,98979]
|
||||
An4/838/838/an353-msjr-b.mfc=Features/000000000.chunk[98980,99057]
|
||||
An4/415/415/an110-mdxn-b.mfc=Features/000000000.chunk[99058,99265]
|
||||
An4/225/225/cen3-fsrb-b.mfc=Features/000000000.chunk[99266,99473]
|
||||
An4/595/595/an180-mjjs2-b.mfc=Features/000000000.chunk[99474,99571]
|
||||
An4/673/673/cen2-mmdg-b.mfc=Features/000000000.chunk[99572,99859]
|
||||
An4/162/162/cen5-fmjc-b.mfc=Features/000000000.chunk[99860,100147]
|
||||
An4/679/679/cen8-mmdg-b.mfc=Features/000000000.chunk[100148,100445]
|
||||
An4/590/590/cen8-mjhp-b.mfc=Features/000000000.chunk[100446,100673]
|
||||
An4/299/299/an270-mbmg-b.mfc=Features/000000000.chunk[100674,100881]
|
||||
An4/805/805/cen4-mscg2-b.mfc=Features/000000000.chunk[100882,101249]
|
||||
An4/197/197/cen1-fplp-b.mfc=Features/000000000.chunk[101250,101807]
|
||||
An4/267/267/cen6-fwxs-b.mfc=Features/000000000.chunk[101808,102115]
|
||||
An4/630/630/an53-mmaf-b.mfc=Features/000000000.chunk[102116,102463]
|
||||
An4/888/888/an305-mtcv-b.mfc=Features/000000000.chunk[102464,102691]
|
||||
An4/812/812/an343-msct-b.mfc=Features/000000000.chunk[102692,102879]
|
||||
An4/233/233/an328-ftal-b.mfc=Features/000000000.chunk[102880,103207]
|
||||
An4/529/529/an172-mjda-b.mfc=Features/000000000.chunk[103208,103425]
|
||||
An4/707/707/an386-mmtm-b.mfc=Features/000000000.chunk[103426,103663]
|
||||
An4/592/592/an177-mjjs2-b.mfc=Features/000000000.chunk[103664,103791]
|
||||
An4/130/130/an64-flmm2-b.mfc=Features/000000000.chunk[103792,103919]
|
||||
An4/310/310/an203-mcel-b.mfc=Features/000000000.chunk[103920,104167]
|
||||
An4/170/170/an195-fmjd-b.mfc=Features/000000000.chunk[104168,104485]
|
||||
An4/119/119/cen1-fkdo-b.mfc=Features/000000000.chunk[104486,104753]
|
||||
An4/345/345/cen8-mcfl-b.mfc=Features/000000000.chunk[104754,105011]
|
||||
An4/365/365/cen2-mcsc-b.mfc=Features/000000000.chunk[105012,105319]
|
||||
An4/735/735/an83-mnjl-b.mfc=Features/000000000.chunk[105320,105467]
|
||||
An4/633/633/cen1-mmaf-b.mfc=Features/000000000.chunk[105468,105795]
|
||||
An4/654/654/an46-mmap-b.mfc=Features/000000000.chunk[105796,105893]
|
||||
An4/149/149/cen5-flrp-b.mfc=Features/000000000.chunk[105894,106341]
|
||||
An4/751/751/cen2-mrab-b.mfc=Features/000000000.chunk[106342,106519]
|
||||
An4/238/238/cen3-ftal-b.mfc=Features/000000000.chunk[106520,106657]
|
||||
An4/360/360/an232-mcsc-b.mfc=Features/000000000.chunk[106658,106745]
|
||||
An4/881/881/cen6-msrb-b.mfc=Features/000000000.chunk[106746,106973]
|
||||
An4/757/757/cen8-mrab-b.mfc=Features/000000000.chunk[106974,107171]
|
||||
An4/400/400/an208-mdmc-b.mfc=Features/000000000.chunk[107172,107549]
|
||||
An4/168/168/an193-fmjd-b.mfc=Features/000000000.chunk[107550,107977]
|
||||
An4/897/897/an31-mtje-b.mfc=Features/000000000.chunk[107978,108345]
|
||||
An4/530/530/an173-mjda-b.mfc=Features/000000000.chunk[108346,108653]
|
||||
An4/566/566/an216-mjgk-b.mfc=Features/000000000.chunk[108654,108911]
|
||||
An4/95/95/cen3-fjmd-b.mfc=Features/000000000.chunk[108912,109009]
|
||||
An4/43/43/cen3-fejs-b.mfc=Features/000000000.chunk[109010,109287]
|
||||
An4/753/753/cen4-mrab-b.mfc=Features/000000000.chunk[109288,109615]
|
||||
An4/405/405/cen3-mdmc-b.mfc=Features/000000000.chunk[109616,109743]
|
||||
An4/66/66/an80-fjam-b.mfc=Features/000000000.chunk[109744,109831]
|
||||
An4/858/858/cen5-mskh-b.mfc=Features/000000000.chunk[109832,110219]
|
||||
An4/852/852/an98-mskh-b.mfc=Features/000000000.chunk[110220,110307]
|
||||
An4/237/237/cen2-ftal-b.mfc=Features/000000000.chunk[110308,110485]
|
||||
An4/602/602/cen8-mjjs2-b.mfc=Features/000000000.chunk[110486,110903]
|
||||
An4/842/842/cen2-msjr-b.mfc=Features/000000000.chunk[110904,111131]
|
||||
An4/13/13/an89-fbbh-b.mfc=Features/000000000.chunk[111132,111769]
|
||||
An4/283/283/an2-mblw-b.mfc=Features/000000000.chunk[111770,111917]
|
||||
An4/460/460/cen6-meht-b.mfc=Features/000000000.chunk[111918,112105]
|
||||
An4/20/20/cen6-fbbh-b.mfc=Features/000000000.chunk[112106,112343]
|
||||
An4/308/308/an201-mcel-b.mfc=Features/000000000.chunk[112344,112451]
|
||||
An4/471/471/cen4-mema-b.mfc=Features/000000000.chunk[112452,112819]
|
||||
An4/546/546/cen2-mjdr-b.mfc=Features/000000000.chunk[112820,113017]
|
||||
An4/468/468/cen1-mema-b.mfc=Features/000000000.chunk[113018,113255]
|
||||
An4/236/236/cen1-ftal-b.mfc=Features/000000000.chunk[113256,113453]
|
||||
An4/372/372/an241-mdcs-b.mfc=Features/000000000.chunk[113454,113691]
|
||||
An4/395/395/cen6-mdcs2-b.mfc=Features/000000000.chunk[113692,113869]
|
||||
An4/945/945/cen5-mwhw-b.mfc=Features/000000000.chunk[113870,114177]
|
||||
An4/754/754/cen5-mrab-b.mfc=Features/000000000.chunk[114178,114445]
|
||||
An4/509/509/cen3-mgah-b.mfc=Features/000000000.chunk[114446,114643]
|
||||
An4/556/556/an159-mjes-b.mfc=Features/000000000.chunk[114644,114721]
|
||||
An4/594/594/an179-mjjs2-b.mfc=Features/000000000.chunk[114722,115229]
|
||||
An4/487/487/cen7-mewl-b.mfc=Features/000000000.chunk[115230,115487]
|
||||
An4/684/684/an365-mmkw-b.mfc=Features/000000000.chunk[115488,115915]
|
||||
An4/855/855/cen2-mskh-b.mfc=Features/000000000.chunk[115916,116113]
|
||||
An4/439/439/an138-meab-b.mfc=Features/000000000.chunk[116114,116401]
|
||||
An4/354/354/cen4-mcrt-b.mfc=Features/000000000.chunk[116402,116879]
|
||||
An4/26/26/an149-fclc-b.mfc=Features/000000000.chunk[116880,117107]
|
||||
An4/588/588/cen6-mjhp-b.mfc=Features/000000000.chunk[117108,117275]
|
||||
An4/823/823/an226-msjm-b.mfc=Features/000000000.chunk[117276,117363]
|
||||
An4/68/68/cen2-fjam-b.mfc=Features/000000000.chunk[117364,117511]
|
||||
An4/101/101/an311-fkai-b.mfc=Features/000000000.chunk[117512,117819]
|
||||
An4/98/98/cen6-fjmd-b.mfc=Features/000000000.chunk[117820,118017]
|
||||
An4/450/450/an66-meht-b.mfc=Features/000000000.chunk[118018,118285]
|
||||
An4/662/662/cen4-mmap-b.mfc=Features/000000000.chunk[118286,118753]
|
||||
An4/452/452/an68-meht-b.mfc=Features/000000000.chunk[118754,118961]
|
||||
An4/687/687/cen2-mmkw-b.mfc=Features/000000000.chunk[118962,119419]
|
||||
An4/218/218/an166-fsrb-b.mfc=Features/000000000.chunk[119420,119667]
|
||||
An4/314/314/cen2-mcel-b.mfc=Features/000000000.chunk[119668,119955]
|
||||
An4/33/33/cen6-fclc-b.mfc=Features/000000000.chunk[119956,120163]
|
||||
An4/424/424/an26-mdxs-b.mfc=Features/000000000.chunk[120164,120281]
|
||||
An4/615/615/cen8-mkdb-b.mfc=Features/000000000.chunk[120282,120529]
|
||||
An4/298/298/an269-mbmg-b.mfc=Features/000000000.chunk[120530,120927]
|
||||
An4/527/527/cen8-mjbh-b.mfc=Features/000000000.chunk[120928,121145]
|
||||
An4/15/15/cen1-fbbh-b.mfc=Features/000000000.chunk[121146,121383]
|
||||
An4/910/910/an366-mtos-b.mfc=Features/000000000.chunk[121384,121661]
|
||||
An4/158/158/cen1-fmjc-b.mfc=Features/000000000.chunk[121662,121949]
|
||||
An4/246/246/an213-ftmj-b.mfc=Features/000000000.chunk[121950,122347]
|
||||
An4/849/849/an100-mskh-b.mfc=Features/000000000.chunk[122348,122425]
|
||||
An4/56/56/cen3-ffmm-b.mfc=Features/000000000.chunk[122426,122623]
|
||||
An4/404/404/cen2-mdmc-b.mfc=Features/000000000.chunk[122624,122871]
|
||||
An4/351/351/cen1-mcrt-b.mfc=Features/000000000.chunk[122872,123289]
|
||||
An4/863/863/an277-msmn-b.mfc=Features/000000000.chunk[123290,123377]
|
||||
An4/322/322/an128-mcen-b.mfc=Features/000000000.chunk[123378,123775]
|
||||
An4/419/419/cen4-mdxn-b.mfc=Features/000000000.chunk[123776,124043]
|
||||
An4/86/86/cen7-fjdn-b.mfc=Features/000000000.chunk[124044,124301]
|
||||
An4/311/311/an204-mcel-b.mfc=Features/000000000.chunk[124302,124399]
|
||||
An4/142/142/an23-flrp-b.mfc=Features/000000000.chunk[124400,124807]
|
||||
An4/25/25/an148-fclc-b.mfc=Features/000000000.chunk[124808,125205]
|
||||
An4/947/947/cen7-mwhw-b.mfc=Features/000000000.chunk[125206,125493]
|
||||
An4/250/250/cen2-ftmj-b.mfc=Features/000000000.chunk[125494,125781]
|
||||
An4/381/381/cen5-mdcs-b.mfc=Features/000000000.chunk[125782,126059]
|
||||
An4/927/927/an380-mtxj-b.mfc=Features/000000000.chunk[126060,126247]
|
||||
An4/941/941/cen1-mwhw-b.mfc=Features/000000000.chunk[126248,126425]
|
||||
An4/769/769/cen7-mrcb-b.mfc=Features/000000000.chunk[126426,126863]
|
||||
An4/685/685/an59-mmkw-b.mfc=Features/000000000.chunk[126864,127411]
|
||||
An4/72/72/cen6-fjam-b.mfc=Features/000000000.chunk[127412,127699]
|
||||
An4/420/420/cen5-mdxn-b.mfc=Features/000000000.chunk[127700,127907]
|
||||
An4/457/457/cen3-meht-b.mfc=Features/000000000.chunk[127908,128095]
|
||||
An4/279/279/cen6-mblb-b.mfc=Features/000000000.chunk[128096,128263]
|
||||
An4/656/656/an48-mmap-b.mfc=Features/000000000.chunk[128264,128481]
|
||||
An4/773/773/an238-mrjc2-b.mfc=Features/000000000.chunk[128482,128559]
|
||||
An4/562/562/cen5-mjes-b.mfc=Features/000000000.chunk[128560,128797]
|
||||
An4/811/811/an342-msct-b.mfc=Features/000000000.chunk[128798,129205]
|
||||
An4/23/23/an146-fclc-b.mfc=Features/000000000.chunk[129206,129543]
|
||||
An4/391/391/cen2-mdcs2-b.mfc=Features/000000000.chunk[129544,129761]
|
||||
An4/172/172/cen2-fmjd-b.mfc=Features/000000000.chunk[129762,130099]
|
||||
An4/185/185/cen2-fnsv-b.mfc=Features/000000000.chunk[130100,130377]
|
||||
An4/78/78/an124-fjdn-b.mfc=Features/000000000.chunk[130378,130645]
|
||||
An4/148/148/cen4-flrp-b.mfc=Features/000000000.chunk[130646,130943]
|
||||
An4/253/253/cen5-ftmj-b.mfc=Features/000000000.chunk[130944,131361]
|
||||
An4/445/445/cen4-meab-b.mfc=Features/000000000.chunk[131362,131749]
|
||||
An4/523/523/cen4-mjbh-b.mfc=Features/000000000.chunk[131750,131907]
|
||||
An4/524/524/cen5-mjbh-b.mfc=Features/000000000.chunk[131908,132325]
|
||||
An4/428/428/an30-mdxs-b.mfc=Features/000000000.chunk[132326,132423]
|
||||
An4/315/315/cen3-mcel-b.mfc=Features/000000000.chunk[132424,132611]
|
||||
An4/281/281/cen8-mblb-b.mfc=Features/000000000.chunk[132612,132819]
|
||||
An4/570/570/an220-mjgk-b.mfc=Features/000000000.chunk[132820,132987]
|
||||
An4/727/727/cen3-mnfe-b.mfc=Features/000000000.chunk[132988,133155]
|
||||
An4/231/231/an326-ftal-b.mfc=Features/000000000.chunk[133156,133273]
|
||||
An4/193/193/an92-fplp-b.mfc=Features/000000000.chunk[133274,133871]
|
||||
An4/892/892/cen4-mtcv-b.mfc=Features/000000000.chunk[133872,134479]
|
||||
An4/834/834/cen7-msjm-b.mfc=Features/000000000.chunk[134480,134747]
|
||||
An4/144/144/an25-flrp-b.mfc=Features/000000000.chunk[134748,135185]
|
||||
An4/828/828/cen1-msjm-b.mfc=Features/000000000.chunk[135186,135413]
|
||||
An4/934/934/cen7-mtxj-b.mfc=Features/000000000.chunk[135414,135661]
|
||||
An4/387/387/an113-mdcs2-b.mfc=Features/000000000.chunk[135662,136139]
|
||||
An4/434/434/cen6-mdxs-b.mfc=Features/000000000.chunk[136140,136377]
|
||||
An4/469/469/cen2-mema-b.mfc=Features/000000000.chunk[136378,136515]
|
||||
An4/232/232/an327-ftal-b.mfc=Features/000000000.chunk[136516,136673]
|
||||
An4/378/378/cen2-mdcs-b.mfc=Features/000000000.chunk[136674,136831]
|
||||
An4/275/275/cen2-mblb-b.mfc=Features/000000000.chunk[136832,137079]
|
||||
An4/837/837/an352-msjr-b.mfc=Features/000000000.chunk[137080,137207]
|
||||
An4/447/447/cen6-meab-b.mfc=Features/000000000.chunk[137208,137425]
|
||||
An4/521/521/cen2-mjbh-b.mfc=Features/000000000.chunk[137426,137573]
|
||||
An4/733/733/an81-mnjl-b.mfc=Features/000000000.chunk[137574,137791]
|
||||
An4/510/510/cen4-mgah-b.mfc=Features/000000000.chunk[137792,138119]
|
||||
An4/276/276/cen3-mblb-b.mfc=Features/000000000.chunk[138120,138227]
|
||||
An4/894/894/cen6-mtcv-b.mfc=Features/000000000.chunk[138228,138465]
|
||||
An4/741/741/cen5-mnjl-b.mfc=Features/000000000.chunk[138466,138783]
|
||||
An4/898/898/an32-mtje-b.mfc=Features/000000000.chunk[138784,138881]
|
||||
An4/532/532/an175-mjda-b.mfc=Features/000000000.chunk[138882,138959]
|
||||
An4/150/150/cen6-flrp-b.mfc=Features/000000000.chunk[138960,139177]
|
||||
An4/280/280/cen7-mblb-b.mfc=Features/000000000.chunk[139178,139555]
|
||||
An4/902/902/cen1-mtje-b.mfc=Features/000000000.chunk[139556,139813]
|
||||
An4/896/896/cen8-mtcv-b.mfc=Features/000000000.chunk[139814,140101]
|
||||
An4/900/900/an34-mtje-b.mfc=Features/000000000.chunk[140102,140209]
|
||||
An4/323/323/an129-mcen-b.mfc=Features/000000000.chunk[140210,140357]
|
||||
An4/579/579/an57-mjhp-b.mfc=Features/000000000.chunk[140358,140595]
|
||||
An4/451/451/an67-meht-b.mfc=Features/000000000.chunk[140596,140673]
|
||||
An4/830/830/cen3-msjm-b.mfc=Features/000000000.chunk[140674,140851]
|
||||
An4/75/75/an121-fjdn-b.mfc=Features/000000000.chunk[140852,140919]
|
||||
An4/194/194/an93-fplp-b.mfc=Features/000000000.chunk[140920,141027]
|
||||
An4/620/620/an190-mkem-b.mfc=Features/000000000.chunk[141028,141295]
|
||||
An4/266/266/cen5-fwxs-b.mfc=Features/000000000.chunk[141296,141713]
|
||||
An4/659/659/cen1-mmap-b.mfc=Features/000000000.chunk[141714,141931]
|
||||
An4/903/903/cen2-mtje-b.mfc=Features/000000000.chunk[141932,142359]
|
||||
An4/189/189/cen6-fnsv-b.mfc=Features/000000000.chunk[142360,142617]
|
||||
An4/271/271/an373-mblb-b.mfc=Features/000000000.chunk[142618,143015]
|
||||
An4/67/67/cen1-fjam-b.mfc=Features/000000000.chunk[143016,143253]
|
||||
An4/219/219/an167-fsrb-b.mfc=Features/000000000.chunk[143254,143511]
|
||||
An4/778/778/cen3-mrjc2-b.mfc=Features/000000000.chunk[143512,143719]
|
||||
An4/814/814/an345-msct-b.mfc=Features/000000000.chunk[143720,144287]
|
||||
An4/829/829/cen2-msjm-b.mfc=Features/000000000.chunk[144288,144525]
|
||||
An4/47/47/cen7-fejs-b.mfc=Features/000000000.chunk[144526,144823]
|
||||
An4/799/799/an358-mscg2-b.mfc=Features/000000000.chunk[144824,145241]
|
||||
An4/804/804/cen3-mscg2-b.mfc=Features/000000000.chunk[145242,145469]
|
||||
An4/329/329/cen5-mcen-b.mfc=Features/000000000.chunk[145470,145887]
|
||||
An4/600/600/cen6-mjjs2-b.mfc=Features/000000000.chunk[145888,146225]
|
||||
An4/876/876/cen1-msrb-b.mfc=Features/000000000.chunk[146226,146493]
|
||||
An4/708/708/an387-mmtm-b.mfc=Features/000000000.chunk[146494,146781]
|
||||
An4/24/24/an147-fclc-b.mfc=Features/000000000.chunk[146782,147069]
|
||||
An4/808/808/cen7-mscg2-b.mfc=Features/000000000.chunk[147070,147457]
|
||||
An4/313/313/cen1-mcel-b.mfc=Features/000000000.chunk[147458,147765]
|
||||
An4/482/482/cen2-mewl-b.mfc=Features/000000000.chunk[147766,147923]
|
||||
An4/51/51/an293-ffmm-b.mfc=Features/000000000.chunk[147924,148261]
|
||||
An4/935/935/cen8-mtxj-b.mfc=Features/000000000.chunk[148262,148529]
|
||||
An4/244/244/an211-ftmj-b.mfc=Features/000000000.chunk[148530,148737]
|
||||
An4/396/396/cen7-mdcs2-b.mfc=Features/000000000.chunk[148738,148945]
|
||||
An4/745/745/an71-mrab-b.mfc=Features/000000000.chunk[148946,149253]
|
||||
An4/569/569/an219-mjgk-b.mfc=Features/000000000.chunk[149254,149331]
|
||||
An4/277/277/cen4-mblb-b.mfc=Features/000000000.chunk[149332,149599]
|
||||
An4/371/371/cen8-mcsc-b.mfc=Features/000000000.chunk[149600,149847]
|
||||
An4/650/650/cen5-mmal-b.mfc=Features/000000000.chunk[149848,150175]
|
||||
An4/135/135/cen4-flmm2-b.mfc=Features/000000000.chunk[150176,150793]
|
||||
An4/206/206/an297-fsaf2-b.mfc=Features/000000000.chunk[150794,151221]
|
||||
An4/294/294/cen8-mblw-b.mfc=Features/000000000.chunk[151222,151419]
|
||||
An4/85/85/cen6-fjdn-b.mfc=Features/000000000.chunk[151420,151567]
|
||||
An4/785/785/an282-mrmg-b.mfc=Features/000000000.chunk[151568,151695]
|
||||
An4/406/406/cen4-mdmc-b.mfc=Features/000000000.chunk[151696,152323]
|
||||
An4/474/474/cen7-mema-b.mfc=Features/000000000.chunk[152324,152561]
|
||||
An4/790/790/cen2-mrmg-b.mfc=Features/000000000.chunk[152562,152919]
|
||||
An4/463/463/an286-mema-b.mfc=Features/000000000.chunk[152920,153027]
|
||||
An4/559/559/cen2-mjes-b.mfc=Features/000000000.chunk[153028,153255]
|
||||
An4/353/353/cen3-mcrt-b.mfc=Features/000000000.chunk[153256,153523]
|
||||
An4/435/435/cen7-mdxs-b.mfc=Features/000000000.chunk[153524,153941]
|
||||
An4/145/145/cen1-flrp-b.mfc=Features/000000000.chunk[153942,154229]
|
||||
An4/278/278/cen5-mblb-b.mfc=Features/000000000.chunk[154230,154557]
|
||||
An4/517/517/an248-mjbh-b.mfc=Features/000000000.chunk[154558,154765]
|
||||
An4/65/65/an79-fjam-b.mfc=Features/000000000.chunk[154766,155073]
|
||||
An4/341/341/cen4-mcfl-b.mfc=Features/000000000.chunk[155074,155411]
|
||||
An4/520/520/cen1-mjbh-b.mfc=Features/000000000.chunk[155412,155679]
|
||||
An4/137/137/cen7-flmm2-b.mfc=Features/000000000.chunk[155680,155967]
|
||||
An4/806/806/cen5-mscg2-b.mfc=Features/000000000.chunk[155968,156415]
|
||||
An4/429/429/cen1-mdxs-b.mfc=Features/000000000.chunk[156416,156933]
|
||||
An4/610/610/cen3-mkdb-b.mfc=Features/000000000.chunk[156934,157151]
|
||||
An4/869/869/cen7-msmn-b.mfc=Features/000000000.chunk[157152,157539]
|
||||
An4/141/141/an22-flrp-b.mfc=Features/000000000.chunk[157540,157637]
|
||||
An4/791/791/cen3-mrmg-b.mfc=Features/000000000.chunk[157638,157805]
|
||||
An4/289/289/cen3-mblw-b.mfc=Features/000000000.chunk[157806,157963]
|
||||
An4/711/711/an390-mmtm-b.mfc=Features/000000000.chunk[157964,158061]
|
||||
An4/432/432/cen4-mdxs-b.mfc=Features/000000000.chunk[158062,158369]
|
||||
An4/350/350/an145-mcrt-b.mfc=Features/000000000.chunk[158370,158557]
|
||||
An4/670/670/an339-mmdg-b.mfc=Features/000000000.chunk[158558,158905]
|
||||
An4/581/581/an59-mjhp-b.mfc=Features/000000000.chunk[158906,159203]
|
||||
An4/461/461/cen7-meht-b.mfc=Features/000000000.chunk[159204,159531]
|
||||
An4/103/103/an313-fkai-b.mfc=Features/000000000.chunk[159532,160009]
|
||||
An4/263/263/cen2-fwxs-b.mfc=Features/000000000.chunk[160010,160317]
|
||||
An4/362/362/an234-mcsc-b.mfc=Features/000000000.chunk[160318,160415]
|
||||
An4/478/478/an258-mewl-b.mfc=Features/000000000.chunk[160416,160633]
|
||||
An4/786/786/an283-mrmg-b.mfc=Features/000000000.chunk[160634,161131]
|
||||
An4/512/512/cen6-mgah-b.mfc=Features/000000000.chunk[161132,161369]
|
||||
An4/847/847/cen7-msjr-b.mfc=Features/000000000.chunk[161370,161647]
|
||||
An4/498/498/cen5-mfaa-b.mfc=Features/000000000.chunk[161648,161975]
|
||||
An4/916/916/cen2-mtos-b.mfc=Features/000000000.chunk[161976,162233]
|
||||
An4/410/410/cen8-mdmc-b.mfc=Features/000000000.chunk[162234,162561]
|
||||
An4/459/459/cen5-meht-b.mfc=Features/000000000.chunk[162562,162859]
|
||||
An4/223/223/cen1-fsrb-b.mfc=Features/000000000.chunk[162860,163137]
|
||||
An4/764/764/cen2-mrcb-b.mfc=Features/000000000.chunk[163138,163325]
|
||||
An4/564/564/cen7-mjes-b.mfc=Features/000000000.chunk[163326,163663]
|
||||
An4/5/5/cen1-fash-b.mfc=Features/000000000.chunk[163664,164011]
|
||||
An4/129/129/an63-flmm2-b.mfc=Features/000000000.chunk[164012,164249]
|
||||
An4/369/369/cen6-mcsc-b.mfc=Features/000000000.chunk[164250,164457]
|
||||
An4/87/87/cen8-fjdn-b.mfc=Features/000000000.chunk[164458,164625]
|
||||
An4/167/167/an192-fmjd-b.mfc=Features/000000000.chunk[164626,165043]
|
||||
An4/598/598/cen4-mjjs2-b.mfc=Features/000000000.chunk[165044,165511]
|
||||
An4/188/188/cen5-fnsv-b.mfc=Features/000000000.chunk[165512,166029]
|
||||
An4/749/749/an75-mrab-b.mfc=Features/000000000.chunk[166030,166347]
|
||||
An4/582/582/an60-mjhp-b.mfc=Features/000000000.chunk[166348,166435]
|
||||
An4/160/160/cen3-fmjc-b.mfc=Features/000000000.chunk[166436,166633]
|
||||
An4/180/180/an182-fnsv-b.mfc=Features/000000000.chunk[166634,166761]
|
||||
An4/682/682/an363-mmkw-b.mfc=Features/000000000.chunk[166762,167379]
|
||||
An4/339/339/cen2-mcfl-b.mfc=Features/000000000.chunk[167380,167907]
|
||||
An4/921/921/cen7-mtos-b.mfc=Features/000000000.chunk[167908,168075]
|
||||
An4/421/421/cen6-mdxn-b.mfc=Features/000000000.chunk[168076,168253]
|
||||
An4/247/247/an214-ftmj-b.mfc=Features/000000000.chunk[168254,168391]
|
||||
An4/815/815/cen1-msct-b.mfc=Features/000000000.chunk[168392,168659]
|
||||
An4/671/671/an340-mmdg-b.mfc=Features/000000000.chunk[168660,168787]
|
||||
An4/616/616/an186-mkem-b.mfc=Features/000000000.chunk[168788,169105]
|
||||
An4/196/196/an95-fplp-b.mfc=Features/000000000.chunk[169106,169733]
|
||||
An4/235/235/an330-ftal-b.mfc=Features/000000000.chunk[169734,169901]
|
||||
An4/268/268/cen7-fwxs-b.mfc=Features/000000000.chunk[169902,170319]
|
||||
An4/506/506/an200-mgah-b.mfc=Features/000000000.chunk[170320,170417]
|
||||
An4/647/647/cen2-mmal-b.mfc=Features/000000000.chunk[170418,170615]
|
||||
An4/127/127/an61-flmm2-b.mfc=Features/000000000.chunk[170616,170873]
|
||||
An4/803/803/cen2-mscg2-b.mfc=Features/000000000.chunk[170874,171151]
|
||||
An4/475/475/cen8-mema-b.mfc=Features/000000000.chunk[171152,171499]
|
||||
An4/472/472/cen5-mema-b.mfc=Features/000000000.chunk[171500,171617]
|
||||
An4/599/599/cen5-mjjs2-b.mfc=Features/000000000.chunk[171618,172245]
|
||||
An4/108/108/cen3-fkai-b.mfc=Features/000000000.chunk[172246,172353]
|
||||
An4/357/357/cen7-mcrt-b.mfc=Features/000000000.chunk[172354,172711]
|
||||
An4/342/342/cen5-mcfl-b.mfc=Features/000000000.chunk[172712,173029]
|
||||
An4/714/714/cen3-mmtm-b.mfc=Features/000000000.chunk[173030,173177]
|
||||
An4/747/747/an73-mrab-b.mfc=Features/000000000.chunk[173178,173325]
|
||||
An4/643/643/an323-mmal-b.mfc=Features/000000000.chunk[173326,173523]
|
||||
An4/99/99/cen7-fjmd-b.mfc=Features/000000000.chunk[173524,173741]
|
||||
An4/503/503/an197-mgah-b.mfc=Features/000000000.chunk[173742,173949]
|
||||
An4/533/533/cen1-mjda-b.mfc=Features/000000000.chunk[173950,174167]
|
||||
An4/691/691/cen6-mmkw-b.mfc=Features/000000000.chunk[174168,174525]
|
||||
An4/305/305/cen6-mbmg-b.mfc=Features/000000000.chunk[174526,174763]
|
||||
An4/191/191/cen8-fnsv-b.mfc=Features/000000000.chunk[174764,175101]
|
||||
An4/772/772/an237-mrjc2-b.mfc=Features/000000000.chunk[175102,175239]
|
||||
An4/273/273/an375-mblb-b.mfc=Features/000000000.chunk[175240,175467]
|
||||
An4/660/660/cen2-mmap-b.mfc=Features/000000000.chunk[175468,175615]
|
||||
An4/540/540/cen8-mjda-b.mfc=Features/000000000.chunk[175616,175903]
|
||||
An4/930/930/cen3-mtxj-b.mfc=Features/000000000.chunk[175904,176001]
|
||||
An4/346/346/an141-mcrt-b.mfc=Features/000000000.chunk[176002,176089]
|
||||
An4/125/125/cen7-fkdo-b.mfc=Features/000000000.chunk[176090,176447]
|
||||
An4/107/107/cen2-fkai-b.mfc=Features/000000000.chunk[176448,176605]
|
||||
An4/504/504/an198-mgah-b.mfc=Features/000000000.chunk[176606,176843]
|
||||
An4/316/316/cen4-mcel-b.mfc=Features/000000000.chunk[176844,177161]
|
||||
An4/840/840/an355-msjr-b.mfc=Features/000000000.chunk[177162,177359]
|
||||
An4/689/689/cen4-mmkw-b.mfc=Features/000000000.chunk[177360,177877]
|
||||
An4/113/113/cen8-fkai-b.mfc=Features/000000000.chunk[177878,178095]
|
||||
An4/548/548/cen4-mjdr-b.mfc=Features/000000000.chunk[178096,178533]
|
||||
An4/915/915/cen1-mtos-b.mfc=Features/000000000.chunk[178534,178741]
|
||||
An4/326/326/cen2-mcen-b.mfc=Features/000000000.chunk[178742,178989]
|
||||
An4/770/770/cen8-mrcb-b.mfc=Features/000000000.chunk[178990,179267]
|
||||
An4/618/618/an188-mkem-b.mfc=Features/000000000.chunk[179268,179515]
|
||||
An4/543/543/an19-mjdr-b.mfc=Features/000000000.chunk[179516,179923]
|
||||
An4/597/597/cen2-mjjs2-b.mfc=Features/000000000.chunk[179924,180391]
|
||||
An4/304/304/cen5-mbmg-b.mfc=Features/000000000.chunk[180392,180779]
|
||||
An4/613/613/cen6-mkdb-b.mfc=Features/000000000.chunk[180780,180947]
|
||||
An4/551/551/cen7-mjdr-b.mfc=Features/000000000.chunk[180948,181235]
|
||||
An4/260/260/an224-fwxs-b.mfc=Features/000000000.chunk[181236,181553]
|
||||
An4/810/810/an341-msct-b.mfc=Features/000000000.chunk[181554,181651]
|
||||
An4/211/211/cen2-fsaf2-b.mfc=Features/000000000.chunk[181652,181959]
|
||||
An4/3/3/an254-fash-b.mfc=Features/000000000.chunk[181960,182047]
|
||||
An4/285/285/an4-mblw-b.mfc=Features/000000000.chunk[182048,182185]
|
||||
An4/651/651/cen6-mmal-b.mfc=Features/000000000.chunk[182186,182343]
|
||||
An4/862/862/an276-msmn-b.mfc=Features/000000000.chunk[182344,182631]
|
||||
An4/296/296/an267-mbmg-b.mfc=Features/000000000.chunk[182632,182969]
|
||||
An4/261/261/an225-fwxs-b.mfc=Features/000000000.chunk[182970,183067]
|
||||
An4/705/705/cen7-mmsh-b.mfc=Features/000000000.chunk[183068,183285]
|
||||
An4/28/28/cen1-fclc-b.mfc=Features/000000000.chunk[183286,183713]
|
||||
An4/382/382/cen6-mdcs-b.mfc=Features/000000000.chunk[183714,183891]
|
||||
An4/765/765/cen3-mrcb-b.mfc=Features/000000000.chunk[183892,184069]
|
||||
An4/499/499/cen6-mfaa-b.mfc=Features/000000000.chunk[184070,184247]
|
||||
An4/7/7/cen4-fash-b.mfc=Features/000000000.chunk[184248,184605]
|
||||
An4/110/110/cen5-fkai-b.mfc=Features/000000000.chunk[184606,184913]
|
||||
An4/893/893/cen5-mtcv-b.mfc=Features/000000000.chunk[184914,185201]
|
||||
An4/677/677/cen6-mmdg-b.mfc=Features/000000000.chunk[185202,185509]
|
||||
An4/204/204/cen8-fplp-b.mfc=Features/000000000.chunk[185510,185897]
|
||||
An4/427/427/an29-mdxs-b.mfc=Features/000000000.chunk[185898,186005]
|
||||
An4/284/284/an3-mblw-b.mfc=Features/000000000.chunk[186006,186133]
|
||||
An4/348/348/an143-mcrt-b.mfc=Features/000000000.chunk[186134,186241]
|
||||
An4/449/449/cen8-meab-b.mfc=Features/000000000.chunk[186242,186549]
|
||||
An4/423/423/cen8-mdxn-b.mfc=Features/000000000.chunk[186550,186817]
|
||||
An4/466/466/an289-mema-b.mfc=Features/000000000.chunk[186818,186985]
|
||||
An4/508/508/cen2-mgah-b.mfc=Features/000000000.chunk[186986,187243]
|
||||
An4/1/1/an251-fash-b.mfc=Features/000000000.chunk[187244,187341]
|
||||
An4/399/399/an207-mdmc-b.mfc=Features/000000000.chunk[187342,187799]
|
||||
An4/21/21/cen7-fbbh-b.mfc=Features/000000000.chunk[187800,188107]
|
||||
An4/573/573/cen4-mjgk-b.mfc=Features/000000000.chunk[188108,188525]
|
||||
An4/706/706/cen8-mmsh-b.mfc=Features/000000000.chunk[188526,188743]
|
||||
An4/609/609/cen2-mkdb-b.mfc=Features/000000000.chunk[188744,188911]
|
||||
An4/839/839/an354-msjr-b.mfc=Features/000000000.chunk[188912,189299]
|
||||
An4/312/312/an205-mcel-b.mfc=Features/000000000.chunk[189300,189657]
|
||||
An4/63/63/an77-fjam-b.mfc=Features/000000000.chunk[189658,189965]
|
||||
An4/678/678/cen7-mmdg-b.mfc=Features/000000000.chunk[189966,190503]
|
||||
An4/761/761/an104-mrcb-b.mfc=Features/000000000.chunk[190504,190601]
|
||||
An4/642/642/an322-mmal-b.mfc=Features/000000000.chunk[190602,190929]
|
||||
An4/262/262/cen1-fwxs-b.mfc=Features/000000000.chunk[190930,191347]
|
||||
An4/171/171/cen1-fmjd-b.mfc=Features/000000000.chunk[191348,191665]
|
||||
An4/114/114/an131-fkdo-b.mfc=Features/000000000.chunk[191666,192263]
|
||||
An4/853/853/an99-mskh-b.mfc=Features/000000000.chunk[192264,192521]
|
||||
An4/333/333/an261-mcfl-b.mfc=Features/000000000.chunk[192522,192619]
|
||||
An4/112/112/cen7-fkai-b.mfc=Features/000000000.chunk[192620,192777]
|
||||
An4/265/265/cen4-fwxs-b.mfc=Features/000000000.chunk[192778,193325]
|
||||
An4/813/813/an344-msct-b.mfc=Features/000000000.chunk[193326,193723]
|
||||
An4/496/496/cen3-mfaa-b.mfc=Features/000000000.chunk[193724,193881]
|
||||
An4/178/178/cen8-fmjd-b.mfc=Features/000000000.chunk[193882,194129]
|
||||
An4/54/54/cen1-ffmm-b.mfc=Features/000000000.chunk[194130,194457]
|
||||
An4/596/596/cen1-mjjs2-b.mfc=Features/000000000.chunk[194458,194895]
|
||||
An4/875/875/an335-msrb-b.mfc=Features/000000000.chunk[194896,195033]
|
||||
An4/430/430/cen2-mdxs-b.mfc=Features/000000000.chunk[195034,195281]
|
||||
An4/928/928/cen1-mtxj-b.mfc=Features/000000000.chunk[195282,195529]
|
||||
An4/493/493/an165-mfaa-b.mfc=Features/000000000.chunk[195530,195807]
|
||||
An4/887/887/an304-mtcv-b.mfc=Features/000000000.chunk[195808,195905]
|
||||
An4/859/859/cen6-mskh-b.mfc=Features/000000000.chunk[195906,196083]
|
||||
An4/883/883/cen8-msrb-b.mfc=Features/000000000.chunk[196084,196301]
|
||||
An4/871/871/an331-msrb-b.mfc=Features/000000000.chunk[196302,196479]
|
||||
An4/836/836/an351-msjr-b.mfc=Features/000000000.chunk[196480,196757]
|
||||
An4/553/553/an156-mjes-b.mfc=Features/000000000.chunk[196758,197075]
|
||||
An4/861/861/cen8-mskh-b.mfc=Features/000000000.chunk[197076,197333]
|
||||
An4/248/248/an215-ftmj-b.mfc=Features/000000000.chunk[197334,197441]
|
||||
An4/77/77/an123-fjdn-b.mfc=Features/000000000.chunk[197442,197569]
|
||||
An4/417/417/cen2-mdxn-b.mfc=Features/000000000.chunk[197570,197787]
|
||||
An4/856/856/cen3-mskh-b.mfc=Features/000000000.chunk[197788,197965]
|
||||
An4/44/44/cen4-fejs-b.mfc=Features/000000000.chunk[197966,198493]
|
||||
An4/645/645/an325-mmal-b.mfc=Features/000000000.chunk[198494,198581]
|
||||
An4/453/453/an69-meht-b.mfc=Features/000000000.chunk[198582,198669]
|
||||
An4/639/639/cen7-mmaf-b.mfc=Features/000000000.chunk[198670,198957]
|
||||
An4/907/907/cen6-mtje-b.mfc=Features/000000000.chunk[198958,199175]
|
||||
An4/330/330/cen6-mcen-b.mfc=Features/000000000.chunk[199176,199373]
|
||||
An4/258/258/an222-fwxs-b.mfc=Features/000000000.chunk[199374,199461]
|
||||
An4/746/746/an72-mrab-b.mfc=Features/000000000.chunk[199462,199549]
|
||||
An4/752/752/cen3-mrab-b.mfc=Features/000000000.chunk[199550,199677]
|
||||
An4/663/663/cen5-mmap-b.mfc=Features/000000000.chunk[199678,199945]
|
||||
An4/843/843/cen3-msjr-b.mfc=Features/000000000.chunk[199946,200083]
|
||||
An4/674/674/cen3-mmdg-b.mfc=Features/000000000.chunk[200084,200321]
|
||||
An4/105/105/an315-fkai-b.mfc=Features/000000000.chunk[200322,200519]
|
||||
An4/604/604/an317-mkdb-b.mfc=Features/000000000.chunk[200520,200657]
|
||||
An4/557/557/an160-mjes-b.mfc=Features/000000000.chunk[200658,201035]
|
||||
An4/320/320/an126-mcen-b.mfc=Features/000000000.chunk[201036,201123]
|
||||
An4/605/605/an318-mkdb-b.mfc=Features/000000000.chunk[201124,201251]
|
||||
An4/731/731/cen7-mnfe-b.mfc=Features/000000000.chunk[201252,201649]
|
||||
An4/467/467/an290-mema-b.mfc=Features/000000000.chunk[201650,201907]
|
||||
An4/368/368/cen5-mcsc-b.mfc=Features/000000000.chunk[201908,202045]
|
||||
An4/672/672/cen1-mmdg-b.mfc=Features/000000000.chunk[202046,202463]
|
||||
An4/744/744/cen8-mnjl-b.mfc=Features/000000000.chunk[202464,202721]
|
||||
An4/37/37/an37-fejs-b.mfc=Features/000000000.chunk[202722,203189]
|
||||
An4/376/376/an245-mdcs-b.mfc=Features/000000000.chunk[203190,203447]
|
||||
An4/64/64/an78-fjam-b.mfc=Features/000000000.chunk[203448,203645]
|
||||
An4/27/27/an150-fclc-b.mfc=Features/000000000.chunk[203646,204073]
|
||||
An4/917/917/cen3-mtos-b.mfc=Features/000000000.chunk[204074,204311]
|
||||
An4/637/637/cen5-mmaf-b.mfc=Features/000000000.chunk[204312,204659]
|
||||
An4/464/464/an287-mema-b.mfc=Features/000000000.chunk[204660,204907]
|
||||
An4/187/187/cen4-fnsv-b.mfc=Features/000000000.chunk[204908,205375]
|
||||
An4/385/385/an111-mdcs2-b.mfc=Features/000000000.chunk[205376,205573]
|
||||
An4/885/885/an302-mtcv-b.mfc=Features/000000000.chunk[205574,205981]
|
||||
An4/914/914/an370-mtos-b.mfc=Features/000000000.chunk[205982,206439]
|
||||
An4/153/153/an116-fmjc-b.mfc=Features/000000000.chunk[206440,206667]
|
||||
An4/375/375/an244-mdcs-b.mfc=Features/000000000.chunk[206668,206915]
|
||||
An4/868/868/cen6-msmn-b.mfc=Features/000000000.chunk[206916,207093]
|
||||
An4/495/495/cen2-mfaa-b.mfc=Features/000000000.chunk[207094,207221]
|
||||
An4/712/712/cen1-mmtm-b.mfc=Features/000000000.chunk[207222,207509]
|
||||
An4/364/364/cen1-mcsc-b.mfc=Features/000000000.chunk[207510,207987]
|
||||
An4/739/739/cen2-mnjl-b.mfc=Features/000000000.chunk[207988,208185]
|
||||
An4/256/256/cen8-ftmj-b.mfc=Features/000000000.chunk[208186,208473]
|
||||
An4/222/222/an170-fsrb-b.mfc=Features/000000000.chunk[208474,208721]
|
||||
An4/332/332/cen8-mcen-b.mfc=Features/000000000.chunk[208722,208979]
|
||||
An4/759/759/an102-mrcb-b.mfc=Features/000000000.chunk[208980,209057]
|
||||
An4/571/571/cen1-mjgk-b.mfc=Features/000000000.chunk[209058,209245]
|
||||
An4/585/585/cen3-mjhp-b.mfc=Features/000000000.chunk[209246,209413]
|
||||
An4/422/422/cen7-mdxn-b.mfc=Features/000000000.chunk[209414,209681]
|
||||
An4/50/50/an292-ffmm-b.mfc=Features/000000000.chunk[209682,209849]
|
||||
An4/483/483/cen3-mewl-b.mfc=Features/000000000.chunk[209850,210167]
|
||||
An4/104/104/an314-fkai-b.mfc=Features/000000000.chunk[210168,210535]
|
||||
An4/641/641/an321-mmal-b.mfc=Features/000000000.chunk[210536,210833]
|
||||
An4/798/798/an357-mscg2-b.mfc=Features/000000000.chunk[210834,210931]
|
||||
An4/42/42/cen2-fejs-b.mfc=Features/000000000.chunk[210932,211159]
|
||||
An4/632/632/an55-mmaf-b.mfc=Features/000000000.chunk[211160,211347]
|
||||
An4/716/716/cen5-mmtm-b.mfc=Features/000000000.chunk[211348,211585]
|
||||
An4/19/19/cen5-fbbh-b.mfc=Features/000000000.chunk[211586,211873]
|
||||
An4/923/923/an376-mtxj-b.mfc=Features/000000000.chunk[211874,212041]
|
||||
An4/890/890/cen2-mtcv-b.mfc=Features/000000000.chunk[212042,212179]
|
||||
An4/825/825/an228-msjm-b.mfc=Features/000000000.chunk[212180,212537]
|
||||
An4/379/379/cen3-mdcs-b.mfc=Features/000000000.chunk[212538,212745]
|
||||
An4/870/870/cen8-msmn-b.mfc=Features/000000000.chunk[212746,213333]
|
||||
An4/623/623/cen4-mkem-b.mfc=Features/000000000.chunk[213334,213461]
|
||||
An4/703/703/cen5-mmsh-b.mfc=Features/000000000.chunk[213462,213679]
|
||||
An4/126/126/cen8-fkdo-b.mfc=Features/000000000.chunk[213680,213937]
|
||||
An4/79/79/an125-fjdn-b.mfc=Features/000000000.chunk[213938,214045]
|
||||
An4/384/384/cen8-mdcs-b.mfc=Features/000000000.chunk[214046,214303]
|
||||
An4/681/681/an362-mmkw-b.mfc=Features/000000000.chunk[214304,214741]
|
||||
An4/913/913/an369-mtos-b.mfc=Features/000000000.chunk[214742,214839]
|
||||
An4/392/392/cen3-mdcs2-b.mfc=Features/000000000.chunk[214840,215007]
|
||||
An4/217/217/cen8-fsaf2-b.mfc=Features/000000000.chunk[215008,215205]
|
||||
An4/409/409/cen7-mdmc-b.mfc=Features/000000000.chunk[215206,215533]
|
||||
An4/515/515/an246-mjbh-b.mfc=Features/000000000.chunk[215534,215621]
|
||||
An4/90/90/an7-fjmd-b.mfc=Features/000000000.chunk[215622,215839]
|
||||
An4/760/760/an103-mrcb-b.mfc=Features/000000000.chunk[215840,216247]
|
||||
An4/62/62/an76-fjam-b.mfc=Features/000000000.chunk[216248,216335]
|
||||
An4/822/822/cen8-msct-b.mfc=Features/000000000.chunk[216336,216563]
|
||||
An4/462/462/cen8-meht-b.mfc=Features/000000000.chunk[216564,216761]
|
||||
An4/292/292/cen6-mblw-b.mfc=Features/000000000.chunk[216762,216929]
|
||||
An4/676/676/cen5-mmdg-b.mfc=Features/000000000.chunk[216930,217477]
|
||||
An4/572/572/cen2-mjgk-b.mfc=Features/000000000.chunk[217478,217695]
|
||||
An4/363/363/an235-mcsc-b.mfc=Features/000000000.chunk[217696,217773]
|
||||
An4/522/522/cen3-mjbh-b.mfc=Features/000000000.chunk[217774,217931]
|
||||
An4/924/924/an377-mtxj-b.mfc=Features/000000000.chunk[217932,218299]
|
||||
An4/816/816/cen2-msct-b.mfc=Features/000000000.chunk[218300,218547]
|
||||
An4/485/485/cen5-mewl-b.mfc=Features/000000000.chunk[218548,218915]
|
||||
An4/621/621/cen1-mkem-b.mfc=Features/000000000.chunk[218916,219183]
|
||||
An4/577/577/cen8-mjgk-b.mfc=Features/000000000.chunk[219184,219391]
|
||||
An4/318/318/cen6-mcel-b.mfc=Features/000000000.chunk[219392,219619]
|
||||
An4/792/792/cen4-mrmg-b.mfc=Features/000000000.chunk[219620,219977]
|
||||
An4/841/841/cen1-msjr-b.mfc=Features/000000000.chunk[219978,220345]
|
||||
An4/763/763/cen1-mrcb-b.mfc=Features/000000000.chunk[220346,220553]
|
||||
An4/458/458/cen4-meht-b.mfc=Features/000000000.chunk[220554,220751]
|
||||
An4/715/715/cen4-mmtm-b.mfc=Features/000000000.chunk[220752,221289]
|
||||
An4/607/607/an320-mkdb-b.mfc=Features/000000000.chunk[221290,221527]
|
||||
An4/208/208/an299-fsaf2-b.mfc=Features/000000000.chunk[221528,221925]
|
||||
An4/134/134/cen3-flmm2-b.mfc=Features/000000000.chunk[221926,222103]
|
||||
An4/649/649/cen4-mmal-b.mfc=Features/000000000.chunk[222104,222391]
|
||||
An4/911/911/an367-mtos-b.mfc=Features/000000000.chunk[222392,222799]
|
||||
An4/730/730/cen6-mnfe-b.mfc=Features/000000000.chunk[222800,223087]
|
||||
An4/349/349/an144-mcrt-b.mfc=Features/000000000.chunk[223088,223485]
|
||||
An4/324/324/an130-mcen-b.mfc=Features/000000000.chunk[223486,223553]
|
||||
An4/501/501/cen8-mfaa-b.mfc=Features/000000000.chunk[223554,223751]
|
||||
An4/226/226/cen4-fsrb-b.mfc=Features/000000000.chunk[223752,224189]
|
||||
An4/547/547/cen3-mjdr-b.mfc=Features/000000000.chunk[224190,224357]
|
||||
An4/414/414/an109-mdxn-b.mfc=Features/000000000.chunk[224358,224625]
|
||||
An4/201/201/cen5-fplp-b.mfc=Features/000000000.chunk[224626,225233]
|
||||
An4/221/221/an169-fsrb-b.mfc=Features/000000000.chunk[225234,225391]
|
||||
An4/12/12/an88-fbbh-b.mfc=Features/000000000.chunk[225392,225859]
|
||||
An4/879/879/cen4-msrb-b.mfc=Features/000000000.chunk[225860,226267]
|
||||
An4/563/563/cen6-mjes-b.mfc=Features/000000000.chunk[226268,226415]
|
||||
An4/123/123/cen5-fkdo-b.mfc=Features/000000000.chunk[226416,226813]
|
||||
An4/207/207/an298-fsaf2-b.mfc=Features/000000000.chunk[226814,226911]
|
||||
An4/617/617/an187-mkem-b.mfc=Features/000000000.chunk[226912,227079]
|
||||
An4/866/866/cen4-msmn-b.mfc=Features/000000000.chunk[227080,227517]
|
||||
An4/943/943/cen3-mwhw-b.mfc=Features/000000000.chunk[227518,227625]
|
||||
An4/542/542/an18-mjdr-b.mfc=Features/000000000.chunk[227626,227783]
|
||||
An4/762/762/an105-mrcb-b.mfc=Features/000000000.chunk[227784,227931]
|
||||
An4/465/465/an288-mema-b.mfc=Features/000000000.chunk[227932,228019]
|
||||
An4/249/249/cen1-ftmj-b.mfc=Features/000000000.chunk[228020,228257]
|
||||
An4/307/307/cen8-mbmg-b.mfc=Features/000000000.chunk[228258,228585]
|
||||
An4/802/802/cen1-mscg2-b.mfc=Features/000000000.chunk[228586,228823]
|
||||
An4/73/73/cen7-fjam-b.mfc=Features/000000000.chunk[228824,229061]
|
||||
An4/554/554/an157-mjes-b.mfc=Features/000000000.chunk[229062,229189]
|
||||
An4/539/539/cen7-mjda-b.mfc=Features/000000000.chunk[229190,229517]
|
||||
An4/505/505/an199-mgah-b.mfc=Features/000000000.chunk[229518,229615]
|
||||
An4/780/780/cen5-mrjc2-b.mfc=Features/000000000.chunk[229616,229993]
|
||||
An4/100/100/cen8-fjmd-b.mfc=Features/000000000.chunk[229994,230211]
|
||||
An4/696/696/an383-mmsh-b.mfc=Features/000000000.chunk[230212,230349]
|
||||
An4/486/486/cen6-mewl-b.mfc=Features/000000000.chunk[230350,230547]
|
||||
An4/32/32/cen5-fclc-b.mfc=Features/000000000.chunk[230548,230975]
|
||||
An4/835/835/cen8-msjm-b.mfc=Features/000000000.chunk[230976,231193]
|
||||
An4/514/514/cen8-mgah-b.mfc=Features/000000000.chunk[231194,231541]
|
||||
An4/694/694/an381-mmsh-b.mfc=Features/000000000.chunk[231542,231779]
|
||||
An4/867/867/cen5-msmn-b.mfc=Features/000000000.chunk[231780,232107]
|
||||
An4/366/366/cen3-mcsc-b.mfc=Features/000000000.chunk[232108,232335]
|
||||
An4/912/912/an368-mtos-b.mfc=Features/000000000.chunk[232336,232753]
|
||||
An4/738/738/cen1-mnjl-b.mfc=Features/000000000.chunk[232754,233161]
|
||||
An4/270/270/an372-mblb-b.mfc=Features/000000000.chunk[233162,233459]
|
||||
An4/155/155/an118-fmjc-b.mfc=Features/000000000.chunk[233460,233707]
|
||||
An4/558/558/cen1-mjes-b.mfc=Features/000000000.chunk[233708,233925]
|
||||
An4/606/606/an319-mkdb-b.mfc=Features/000000000.chunk[233926,234273]
|
||||
An4/819/819/cen5-msct-b.mfc=Features/000000000.chunk[234274,234671]
|
||||
An4/288/288/cen2-mblw-b.mfc=Features/000000000.chunk[234672,234829]
|
||||
An4/120/120/cen2-fkdo-b.mfc=Features/000000000.chunk[234830,235117]
|
||||
An4/536/536/cen4-mjda-b.mfc=Features/000000000.chunk[235118,235695]
|
||||
An4/302/302/cen3-mbmg-b.mfc=Features/000000000.chunk[235696,235843]
|
||||
An4/860/860/cen7-mskh-b.mfc=Features/000000000.chunk[235844,236141]
|
||||
An4/269/269/an371-mblb-b.mfc=Features/000000000.chunk[236142,236509]
|
||||
An4/455/455/cen1-meht-b.mfc=Features/000000000.chunk[236510,236717]
|
||||
An4/286/286/an5-mblw-b.mfc=Features/000000000.chunk[236718,236815]
|
||||
An4/136/136/cen6-flmm2-b.mfc=Features/000000000.chunk[236816,237043]
|
||||
An4/481/481/cen1-mewl-b.mfc=Features/000000000.chunk[237044,237471]
|
||||
An4/58/58/cen5-ffmm-b.mfc=Features/000000000.chunk[237472,237959]
|
||||
An4/583/583/cen1-mjhp-b.mfc=Features/000000000.chunk[237960,238337]
|
||||
An4/534/534/cen2-mjda-b.mfc=Features/000000000.chunk[238338,238555]
|
||||
An4/940/940/an155-mwhw-b.mfc=Features/000000000.chunk[238556,238693]
|
||||
An4/882/882/cen7-msrb-b.mfc=Features/000000000.chunk[238694,239061]
|
||||
An4/473/473/cen6-mema-b.mfc=Features/000000000.chunk[239062,239239]
|
||||
An4/937/937/an152-mwhw-b.mfc=Features/000000000.chunk[239240,239337]
|
||||
An4/94/94/cen2-fjmd-b.mfc=Features/000000000.chunk[239338,239615]
|
||||
An4/83/83/cen4-fjdn-b.mfc=Features/000000000.chunk[239616,239763]
|
||||
An4/568/568/an218-mjgk-b.mfc=Features/000000000.chunk[239764,239851]
|
||||
An4/45/45/cen5-fejs-b.mfc=Features/000000000.chunk[239852,240259]
|
||||
An4/766/766/cen4-mrcb-b.mfc=Features/000000000.chunk[240260,240537]
|
||||
An4/929/929/cen2-mtxj-b.mfc=Features/000000000.chunk[240538,240695]
|
||||
An4/634/634/cen2-mmaf-b.mfc=Features/000000000.chunk[240696,240953]
|
||||
An4/337/337/an265-mcfl-b.mfc=Features/000000000.chunk[240954,241051]
|
||||
An4/884/884/an301-mtcv-b.mfc=Features/000000000.chunk[241052,241429]
|
||||
An4/516/516/an247-mjbh-b.mfc=Features/000000000.chunk[241430,241507]
|
||||
An4/796/796/cen8-mrmg-b.mfc=Features/000000000.chunk[241508,241725]
|
||||
An4/397/397/cen8-mdcs2-b.mfc=Features/000000000.chunk[241726,241973]
|
||||
An4/648/648/cen3-mmal-b.mfc=Features/000000000.chunk[241974,242151]
|
||||
An4/81/81/cen2-fjdn-b.mfc=Features/000000000.chunk[242152,242329]
|
||||
An4/807/807/cen6-mscg2-b.mfc=Features/000000000.chunk[242330,242617]
|
||||
An4/717/717/cen6-mmtm-b.mfc=Features/000000000.chunk[242618,242845]
|
||||
An4/394/394/cen5-mdcs2-b.mfc=Features/000000000.chunk[242846,243113]
|
||||
An4/895/895/cen7-mtcv-b.mfc=Features/000000000.chunk[243114,243461]
|
||||
An4/140/140/an2121-flrp-b.mfc=Features/000000000.chunk[243462,243779]
|
||||
An4/653/653/cen8-mmal-b.mfc=Features/000000000.chunk[243780,243957]
|
||||
An4/355/355/cen5-mcrt-b.mfc=Features/000000000.chunk[243958,244555]
|
||||
An4/159/159/cen2-fmjc-b.mfc=Features/000000000.chunk[244556,244803]
|
||||
An4/443/443/cen2-meab-b.mfc=Features/000000000.chunk[244804,245111]
|
||||
An4/942/942/cen2-mwhw-b.mfc=Features/000000000.chunk[245112,245329]
|
||||
An4/809/809/cen8-mscg2-b.mfc=Features/000000000.chunk[245330,245637]
|
||||
An4/519/519/an250-mjbh-b.mfc=Features/000000000.chunk[245638,245725]
|
||||
An4/944/944/cen4-mwhw-b.mfc=Features/000000000.chunk[245726,246083]
|
||||
An4/190/190/cen7-fnsv-b.mfc=Features/000000000.chunk[246084,246471]
|
||||
An4/925/925/an378-mtxj-b.mfc=Features/000000000.chunk[246472,246619]
|
||||
An4/665/665/cen7-mmap-b.mfc=Features/000000000.chunk[246620,246907]
|
||||
An4/448/448/cen7-meab-b.mfc=Features/000000000.chunk[246908,247345]
|
||||
An4/845/845/cen5-msjr-b.mfc=Features/000000000.chunk[247346,247563]
|
||||
An4/818/818/cen4-msct-b.mfc=Features/000000000.chunk[247564,247821]
|
||||
An4/695/695/an382-mmsh-b.mfc=Features/000000000.chunk[247822,248089]
|
||||
An4/511/511/cen5-mgah-b.mfc=Features/000000000.chunk[248090,248567]
|
||||
An4/479/479/an259-mewl-b.mfc=Features/000000000.chunk[248568,248705]
|
||||
An4/35/35/cen8-fclc-b.mfc=Features/000000000.chunk[248706,248973]
|
||||
An4/109/109/cen4-fkai-b.mfc=Features/000000000.chunk[248974,249221]
|
||||
An4/14/14/an90-fbbh-b.mfc=Features/000000000.chunk[249222,249319]
|
||||
An4/586/586/cen4-mjhp-b.mfc=Features/000000000.chunk[249320,249647]
|
||||
An4/133/133/cen2-flmm2-b.mfc=Features/000000000.chunk[249648,249845]
|
||||
An4/30/30/cen3-fclc-b.mfc=Features/000000000.chunk[249846,250033]
|
||||
An4/383/383/cen7-mdcs-b.mfc=Features/000000000.chunk[250034,250381]
|
||||
An4/34/34/cen7-fclc-b.mfc=Features/000000000.chunk[250382,250679]
|
||||
An4/851/851/an97-mskh-b.mfc=Features/000000000.chunk[250680,250817]
|
||||
An4/147/147/cen3-flrp-b.mfc=Features/000000000.chunk[250818,250975]
|
||||
An4/550/550/cen6-mjdr-b.mfc=Features/000000000.chunk[250976,251143]
|
||||
An4/407/407/cen5-mdmc-b.mfc=Features/000000000.chunk[251144,251521]
|
||||
An4/587/587/cen5-mjhp-b.mfc=Features/000000000.chunk[251522,251799]
|
||||
An4/22/22/cen8-fbbh-b.mfc=Features/000000000.chunk[251800,252077]
|
||||
An4/138/138/cen8-flmm2-b.mfc=Features/000000000.chunk[252078,252655]
|
||||
An4/361/361/an233-mcsc-b.mfc=Features/000000000.chunk[252656,252733]
|
|
@ -0,0 +1,132 @@
|
|||
_ah_[2]
|
||||
_ah_[3]
|
||||
_ah_[4]
|
||||
_hmm_[2]
|
||||
_hmm_[3]
|
||||
_hmm_[4]
|
||||
_noise_[2]
|
||||
_noise_[3]
|
||||
_noise_[4]
|
||||
aa_s2_1
|
||||
aa_s3_1
|
||||
aa_s4_1
|
||||
ae_s2_1
|
||||
ae_s3_1
|
||||
ae_s4_1
|
||||
ah_s2_1
|
||||
ah_s3_1
|
||||
ah_s4_1
|
||||
ao_s2_1
|
||||
ao_s3_1
|
||||
ao_s4_1
|
||||
aw_s2_1
|
||||
aw_s3_1
|
||||
aw_s4_1
|
||||
ax_s2_1
|
||||
ax_s3_1
|
||||
ax_s4_1
|
||||
ay_s2_1
|
||||
ay_s3_1
|
||||
ay_s4_1
|
||||
b_s2_1
|
||||
b_s3_1
|
||||
b_s4_1
|
||||
ch_s2_1
|
||||
ch_s3_1
|
||||
ch_s4_1
|
||||
d_s2_1
|
||||
d_s3_1
|
||||
d_s4_1
|
||||
dh_s2_1
|
||||
dh_s3_1
|
||||
dh_s4_1
|
||||
eh_s2_1
|
||||
eh_s3_1
|
||||
eh_s4_1
|
||||
er_s2_1
|
||||
er_s3_1
|
||||
er_s4_1
|
||||
ey_s2_1
|
||||
ey_s3_1
|
||||
ey_s4_1
|
||||
f_s2_1
|
||||
f_s3_1
|
||||
f_s4_1
|
||||
g_s2_1
|
||||
g_s3_1
|
||||
g_s4_1
|
||||
hh_s2_1
|
||||
hh_s3_1
|
||||
hh_s4_1
|
||||
ih_s2_1
|
||||
ih_s3_1
|
||||
ih_s4_1
|
||||
iy_s2_1
|
||||
iy_s3_1
|
||||
iy_s4_1
|
||||
jh_s2_1
|
||||
jh_s3_1
|
||||
jh_s4_1
|
||||
k_s2_1
|
||||
k_s3_1
|
||||
k_s4_1
|
||||
l_s2_1
|
||||
l_s3_1
|
||||
l_s4_1
|
||||
m_s2_1
|
||||
m_s3_1
|
||||
m_s4_1
|
||||
n_s2_1
|
||||
n_s3_1
|
||||
n_s4_1
|
||||
ng_s2_1
|
||||
ng_s3_1
|
||||
ng_s4_1
|
||||
ow_s2_1
|
||||
ow_s3_1
|
||||
ow_s4_1
|
||||
oy_s2_1
|
||||
oy_s3_1
|
||||
oy_s4_1
|
||||
p_s2_1
|
||||
p_s3_1
|
||||
p_s4_1
|
||||
r_s2_1
|
||||
r_s3_1
|
||||
r_s4_1
|
||||
s_s2_1
|
||||
s_s3_1
|
||||
s_s4_1
|
||||
sh_s2_1
|
||||
sh_s3_1
|
||||
sh_s4_1
|
||||
sil[2]
|
||||
sil[3]
|
||||
sil[4]
|
||||
t_s2_1
|
||||
t_s3_1
|
||||
t_s4_1
|
||||
th_s2_1
|
||||
th_s3_1
|
||||
th_s4_1
|
||||
uh_s2_1
|
||||
uh_s3_1
|
||||
uh_s4_1
|
||||
uw_s2_1
|
||||
uw_s3_1
|
||||
uw_s4_1
|
||||
v_s2_1
|
||||
v_s3_1
|
||||
v_s4_1
|
||||
w_s2_1
|
||||
w_s3_1
|
||||
w_s4_1
|
||||
y_s2_1
|
||||
y_s3_1
|
||||
y_s4_1
|
||||
z_s2_1
|
||||
z_s3_1
|
||||
z_s4_1
|
||||
zh_s2_1
|
||||
zh_s3_1
|
||||
zh_s4_1
|
|
@ -0,0 +1,758 @@
|
|||
=== Running /home/vlivan/cntk/bin/x86_64.gpu.release.acml/cntk configFile=/home/vlivan/cntk/Tests/Speech/QuickE2E/cntk.config RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu DataDir=/home/vlivan/cntk/Tests/Speech/Data DeviceId=Auto
|
||||
running on localhost at 2015/07/29 19:11:01
|
||||
command line options:
|
||||
configFile=/home/vlivan/cntk/Tests/Speech/QuickE2E/cntk.config RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu DataDir=/home/vlivan/cntk/Tests/Speech/Data DeviceId=Auto
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG (VARIABLES NOT RESOLVED) >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=$DeviceId$
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=$RunDir$/models/cntkSpeech.dnn
|
||||
deviceId=$DeviceId$
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=$DataDir$/glob_0000.mlf
|
||||
labelMappingFile=$DataDir$/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG (VARIABLES NOT RESOLVED) <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=Auto
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> PROCESSED CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
configparameters: cntk.config:command=speechTrain
|
||||
configparameters: cntk.config:DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
configparameters: cntk.config:deviceId=Auto
|
||||
configparameters: cntk.config:parallelTrain=false
|
||||
configparameters: cntk.config:precision=float
|
||||
configparameters: cntk.config:RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu
|
||||
configparameters: cntk.config:speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< PROCESSED CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
command: speechTrain
|
||||
precision = float
|
||||
lsof: WARNING: can't stat() ext4 file system /var/lib/docker/aufs
|
||||
Output information may be incomplete.
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
SimpleNetworkBuilder Using GPU 0
|
||||
reading script file glob_0000.scp ... 948 entries
|
||||
total 132 state names in state list /home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
htkmlfreader: reading MLF file /home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf ...parse the line 55130
|
||||
total 948 entries
|
||||
...............................................................................................feature set 0: 252734 frames in 948 out of 948 utterances
|
||||
label set 0: 129 classes
|
||||
minibatchutterancesource: 948 utterances grouped into 3 chunks, av. chunk size: 316.0 utterances, 84244.7 frames
|
||||
GetTrainCriterionNodes ...
|
||||
GetEvalCriterionNodes ...
|
||||
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 3])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 3])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 3], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 3])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 3], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 3])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 3])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 3], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 3])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 3])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 3], B2[132, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[132, 3], HLast[132, 3])
|
||||
|
||||
Found 3 PreCompute nodes
|
||||
NodeName: InvStdOfFeatures
|
||||
NodeName: MeanOfFeatures
|
||||
NodeName: Prior
|
||||
minibatchiterator: epoch 0: frames [0..252734] (first utterance at frame 0) with 1 datapasses
|
||||
requiredata: determined feature kind as 33-dimensional 'USER' with frame shift 10.0 ms
|
||||
|
||||
|
||||
Validating node InvStdOfFeatures
|
||||
|
||||
Validating --> features = InputValue
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 64])
|
||||
|
||||
|
||||
|
||||
Validating node MeanOfFeatures
|
||||
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 64])
|
||||
|
||||
|
||||
|
||||
Validating node Prior
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> Prior = Mean(labels[132, 64])
|
||||
|
||||
Set Max Temp Mem Size For Convolution Nodes to 0 samples.
|
||||
Starting Epoch 1: learning rate per sample = 0.015625 momentum = 0.900000
|
||||
minibatchiterator: epoch 0: frames [0..20480] (first utterance at frame 0) with 1 datapasses
|
||||
|
||||
|
||||
Validating node EvalErrorPrediction
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 64])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 64])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 64], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 64])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 64], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 64])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 64])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 64], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 64])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 64])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 64], B2[132, 1])
|
||||
Validating --> EvalErrorPrediction = ErrorPrediction(labels[132, 64], HLast[132, 64])
|
||||
|
||||
Epoch[1 of 3]-Minibatch[1-10 of 320]: SamplesSeen = 640; TrainLossPerSample = 4.3213539; EvalErr[0]PerSample = 0.89999998; TotalTime=0.064177; TotalTimePerSample=0.00010027656, SamplesPerSecond=9972
|
||||
Epoch[1 of 3]-Minibatch[11-20 of 320]: SamplesSeen = 640; TrainLossPerSample = 4.1507101; EvalErr[0]PerSample = 0.8671875; TotalTime=0.060664; TotalTimePerSample=9.47875e-05, SamplesPerSecond=10549
|
||||
Epoch[1 of 3]-Minibatch[21-30 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.9990096; EvalErr[0]PerSample = 0.87656248; TotalTime=0.062395; TotalTimePerSample=9.7492187e-05, SamplesPerSecond=10257
|
||||
Epoch[1 of 3]-Minibatch[31-40 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.8694596; EvalErr[0]PerSample = 0.87656248; TotalTime=0.058102; TotalTimePerSample=9.0784375e-05, SamplesPerSecond=11015
|
||||
Epoch[1 of 3]-Minibatch[41-50 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.8021927; EvalErr[0]PerSample = 0.87812501; TotalTime=0.058272; TotalTimePerSample=9.105e-05, SamplesPerSecond=10982
|
||||
Epoch[1 of 3]-Minibatch[51-60 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.7289093; EvalErr[0]PerSample = 0.86874998; TotalTime=0.056752; TotalTimePerSample=8.8675e-05, SamplesPerSecond=11277
|
||||
Epoch[1 of 3]-Minibatch[61-70 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.5618699; EvalErr[0]PerSample = 0.82343751; TotalTime=0.06015; TotalTimePerSample=9.3984375e-05, SamplesPerSecond=10640
|
||||
Epoch[1 of 3]-Minibatch[71-80 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.4279053; EvalErr[0]PerSample = 0.80781251; TotalTime=0.061573; TotalTimePerSample=9.6207812e-05, SamplesPerSecond=10394
|
||||
Epoch[1 of 3]-Minibatch[81-90 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.3392854; EvalErr[0]PerSample = 0.7734375; TotalTime=0.057831; TotalTimePerSample=9.0360938e-05, SamplesPerSecond=11066
|
||||
Epoch[1 of 3]-Minibatch[91-100 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.3639894; EvalErr[0]PerSample = 0.84375; TotalTime=0.05709; TotalTimePerSample=8.9203125e-05, SamplesPerSecond=11210
|
||||
WARNING: The same matrix with dim [1, 1] has been transferred between different devices for 20 times.
|
||||
Epoch[1 of 3]-Minibatch[101-110 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.2122345; EvalErr[0]PerSample = 0.75312501; TotalTime=0.061065; TotalTimePerSample=9.5414062e-05, SamplesPerSecond=10480
|
||||
Epoch[1 of 3]-Minibatch[111-120 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.3126526; EvalErr[0]PerSample = 0.78750002; TotalTime=0.058543; TotalTimePerSample=9.1473437e-05, SamplesPerSecond=10932
|
||||
Epoch[1 of 3]-Minibatch[121-130 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.1408203; EvalErr[0]PerSample = 0.74687499; TotalTime=0.0605; TotalTimePerSample=9.453125e-05, SamplesPerSecond=10578
|
||||
Epoch[1 of 3]-Minibatch[131-140 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.006897; EvalErr[0]PerSample = 0.69687498; TotalTime=0.054623; TotalTimePerSample=8.5348438e-05, SamplesPerSecond=11716
|
||||
Epoch[1 of 3]-Minibatch[141-150 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.0049591; EvalErr[0]PerSample = 0.72343749; TotalTime=0.059955; TotalTimePerSample=9.3679687e-05, SamplesPerSecond=10674
|
||||
Epoch[1 of 3]-Minibatch[151-160 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.9785829; EvalErr[0]PerSample = 0.73906249; TotalTime=0.060773; TotalTimePerSample=9.4957812e-05, SamplesPerSecond=10530
|
||||
Epoch[1 of 3]-Minibatch[161-170 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.8568604; EvalErr[0]PerSample = 0.70781249; TotalTime=0.060235; TotalTimePerSample=9.4117187e-05, SamplesPerSecond=10625
|
||||
Epoch[1 of 3]-Minibatch[171-180 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.6905334; EvalErr[0]PerSample = 0.671875; TotalTime=0.064974; TotalTimePerSample=0.00010152188, SamplesPerSecond=9850
|
||||
Epoch[1 of 3]-Minibatch[181-190 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.7865357; EvalErr[0]PerSample = 0.70468748; TotalTime=0.05438; TotalTimePerSample=8.496875e-05, SamplesPerSecond=11769
|
||||
Epoch[1 of 3]-Minibatch[191-200 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.5770202; EvalErr[0]PerSample = 0.6484375; TotalTime=0.063006; TotalTimePerSample=9.8446875e-05, SamplesPerSecond=10157
|
||||
Epoch[1 of 3]-Minibatch[201-210 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.6157165; EvalErr[0]PerSample = 0.6640625; TotalTime=0.058268; TotalTimePerSample=9.104375e-05, SamplesPerSecond=10983
|
||||
Epoch[1 of 3]-Minibatch[211-220 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.552362; EvalErr[0]PerSample = 0.65781248; TotalTime=0.059349; TotalTimePerSample=9.2732812e-05, SamplesPerSecond=10783
|
||||
Epoch[1 of 3]-Minibatch[221-230 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.4821167; EvalErr[0]PerSample = 0.625; TotalTime=0.061069; TotalTimePerSample=9.5420313e-05, SamplesPerSecond=10479
|
||||
Epoch[1 of 3]-Minibatch[231-240 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.3877869; EvalErr[0]PerSample = 0.62812501; TotalTime=0.055723; TotalTimePerSample=8.7067188e-05, SamplesPerSecond=11485
|
||||
Epoch[1 of 3]-Minibatch[241-250 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.3690064; EvalErr[0]PerSample = 0.6484375; TotalTime=0.061959; TotalTimePerSample=9.6810937e-05, SamplesPerSecond=10329
|
||||
Epoch[1 of 3]-Minibatch[251-260 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.4396729; EvalErr[0]PerSample = 0.6328125; TotalTime=0.062976; TotalTimePerSample=9.84e-05, SamplesPerSecond=10162
|
||||
Epoch[1 of 3]-Minibatch[261-270 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.3028197; EvalErr[0]PerSample = 0.61250001; TotalTime=0.060925; TotalTimePerSample=9.5195312e-05, SamplesPerSecond=10504
|
||||
Epoch[1 of 3]-Minibatch[271-280 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.1966858; EvalErr[0]PerSample = 0.55937499; TotalTime=0.060799; TotalTimePerSample=9.4998438e-05, SamplesPerSecond=10526
|
||||
Epoch[1 of 3]-Minibatch[281-290 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.2898011; EvalErr[0]PerSample = 0.60468751; TotalTime=0.055702; TotalTimePerSample=8.7034375e-05, SamplesPerSecond=11489
|
||||
Epoch[1 of 3]-Minibatch[291-300 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.1775086; EvalErr[0]PerSample = 0.62187499; TotalTime=0.061515; TotalTimePerSample=9.6117187e-05, SamplesPerSecond=10403
|
||||
Epoch[1 of 3]-Minibatch[301-310 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.2626343; EvalErr[0]PerSample = 0.59687501; TotalTime=0.059247; TotalTimePerSample=9.2573438e-05, SamplesPerSecond=10802
|
||||
Epoch[1 of 3]-Minibatch[311-320 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.1507263; EvalErr[0]PerSample = 0.5625; TotalTime=0.059464; TotalTimePerSample=9.29125e-05, SamplesPerSecond=10762
|
||||
Finished Epoch[1]: [Training Set] TrainLossPerSample = 2.9799569; EvalErrPerSample = 0.72216797; Ave LearnRatePerSample = 0.015625; EpochTime=1.913549
|
||||
Starting Epoch 2: learning rate per sample = 0.001953 momentum = 0.656119
|
||||
minibatchiterator: epoch 1: frames [20480..40960] (first utterance at frame 20480) with 1 datapasses
|
||||
Epoch[2 of 3]-Minibatch[1-10 of 80]: SamplesSeen = 2560; TrainLossPerSample = 2.0159853; EvalErr[0]PerSample = 0.54140627; TotalTime=0.100302; TotalTimePerSample=3.9180469e-05, SamplesPerSecond=25522
|
||||
Epoch[2 of 3]-Minibatch[11-20 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9881856; EvalErr[0]PerSample = 0.54296875; TotalTime=0.093995; TotalTimePerSample=3.6716797e-05, SamplesPerSecond=27235
|
||||
Epoch[2 of 3]-Minibatch[21-30 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9869812; EvalErr[0]PerSample = 0.54140627; TotalTime=0.09237; TotalTimePerSample=3.6082031e-05, SamplesPerSecond=27714
|
||||
Epoch[2 of 3]-Minibatch[31-40 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9312614; EvalErr[0]PerSample = 0.5277344; TotalTime=0.092894; TotalTimePerSample=3.6286719e-05, SamplesPerSecond=27558
|
||||
Epoch[2 of 3]-Minibatch[41-50 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9006774; EvalErr[0]PerSample = 0.52656251; TotalTime=0.08927; TotalTimePerSample=3.4871094e-05, SamplesPerSecond=28677
|
||||
Epoch[2 of 3]-Minibatch[51-60 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9711578; EvalErr[0]PerSample = 0.54140627; TotalTime=0.091869; TotalTimePerSample=3.5886328e-05, SamplesPerSecond=27865
|
||||
Epoch[2 of 3]-Minibatch[61-70 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.8951813; EvalErr[0]PerSample = 0.52031249; TotalTime=0.092242; TotalTimePerSample=3.6032031e-05, SamplesPerSecond=27753
|
||||
Epoch[2 of 3]-Minibatch[71-80 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.904506; EvalErr[0]PerSample = 0.53164065; TotalTime=0.094062; TotalTimePerSample=3.6742969e-05, SamplesPerSecond=27216
|
||||
Finished Epoch[2]: [Training Set] TrainLossPerSample = 1.949242; EvalErrPerSample = 0.53417969; Ave LearnRatePerSample = 0.001953125; EpochTime=0.747962
|
||||
Starting Epoch 3: learning rate per sample = 0.000098 momentum = 0.656119
|
||||
minibatchiterator: epoch 2: frames [40960..61440] (first utterance at frame 40960) with 1 datapasses
|
||||
Epoch[3 of 3]-Minibatch[1-10 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8735985; EvalErr[0]PerSample = 0.51933593; TotalTime=0.27124; TotalTimePerSample=2.6488281e-05, SamplesPerSecond=37752
|
||||
Epoch[3 of 3]-Minibatch[11-20 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8665626; EvalErr[0]PerSample = 0.51748049; TotalTime=0.266098; TotalTimePerSample=2.5986133e-05, SamplesPerSecond=38482
|
||||
Finished Epoch[3]: [Training Set] TrainLossPerSample = 1.8700806; EvalErrPerSample = 0.51840824; Ave LearnRatePerSample = 9.765625146e-05; EpochTime=0.539342
|
||||
COMPLETED
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
Linked to libnvidia-ml library at wrong path : /usr/src/gdk/nvml/lib/libnvidia-ml.so.1
|
||||
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
=== Deleting last epoch data
|
||||
==== Re-running from checkpoint
|
||||
running on localhost at 2015/07/29 19:11:07
|
||||
command line options:
|
||||
configFile=/home/vlivan/cntk/Tests/Speech/QuickE2E/cntk.config RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu DataDir=/home/vlivan/cntk/Tests/Speech/Data DeviceId=Auto
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG (VARIABLES NOT RESOLVED) >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=$DeviceId$
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=$RunDir$/models/cntkSpeech.dnn
|
||||
deviceId=$DeviceId$
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=$DataDir$/glob_0000.mlf
|
||||
labelMappingFile=$DataDir$/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG (VARIABLES NOT RESOLVED) <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=Auto
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> PROCESSED CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
configparameters: cntk.config:command=speechTrain
|
||||
configparameters: cntk.config:DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
configparameters: cntk.config:deviceId=Auto
|
||||
configparameters: cntk.config:parallelTrain=false
|
||||
configparameters: cntk.config:precision=float
|
||||
configparameters: cntk.config:RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu
|
||||
configparameters: cntk.config:speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< PROCESSED CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
command: speechTrain
|
||||
precision = float
|
||||
lsof: WARNING: can't stat() ext4 file system /var/lib/docker/aufs
|
||||
Output information may be incomplete.
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
SimpleNetworkBuilder Using GPU 0
|
||||
reading script file glob_0000.scp ... 948 entries
|
||||
total 132 state names in state list /home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
htkmlfreader: reading MLF file /home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf ...parse the line 55130
|
||||
total 948 entries
|
||||
...............................................................................................feature set 0: 252734 frames in 948 out of 948 utterances
|
||||
label set 0: 129 classes
|
||||
minibatchutterancesource: 948 utterances grouped into 3 chunks, av. chunk size: 316.0 utterances, 84244.7 frames
|
||||
Starting from checkpoint. Load Network From File /tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_cpu/models/cntkSpeech.dnn.2.
|
||||
|
||||
|
||||
Printing Gradient Computation Node Order ...
|
||||
|
||||
CrossEntropyWithSoftmax[0, 0] = CrossEntropyWithSoftmax(labels[132, 256], HLast[0, 0])
|
||||
HLast[0, 0] = Plus(W2*H1[0, 0], B2[132, 1])
|
||||
B2[132, 1] = LearnableParameter
|
||||
W2*H1[0, 0] = Times(W2[132, 512], H2[0, 0])
|
||||
H2[0, 0] = Sigmoid(W1*H1+B1[0, 0])
|
||||
W1*H1+B1[0, 0] = Plus(W1*H1[0, 0], B1[512, 1])
|
||||
B1[512, 1] = LearnableParameter
|
||||
W1*H1[0, 0] = Times(W1[512, 512], H1[0, 0])
|
||||
H1[0, 0] = Sigmoid(W0*features+B0[0, 0])
|
||||
W0*features+B0[0, 0] = Plus(W0*features[0, 0], B0[512, 1])
|
||||
B0[512, 1] = LearnableParameter
|
||||
W0*features[0, 0] = Times(W0[512, 363], MVNormalizedFeatures[0, 0])
|
||||
MVNormalizedFeatures[0, 0] = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
InvStdOfFeatures[363, 1] = InvStdDev(features[363, 256])
|
||||
MeanOfFeatures[363, 1] = Mean(features[363, 256])
|
||||
features[363, 256] = InputValue
|
||||
W0[512, 363] = LearnableParameter
|
||||
W1[512, 512] = LearnableParameter
|
||||
W2[132, 512] = LearnableParameter
|
||||
labels[132, 256] = InputValue
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[132, 256], HLast[132, 256])
|
||||
|
||||
|
||||
|
||||
Validating node ScaledLogLikelihood
|
||||
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> labels = InputValue
|
||||
Validating --> Prior = Mean(labels[132, 256])
|
||||
Validating --> LogOfPrior = Log(Prior[132, 1])
|
||||
Validating --> ScaledLogLikelihood = Minus(HLast[132, 256], LogOfPrior[132, 1])
|
||||
|
||||
|
||||
|
||||
Validating node EvalErrorPrediction
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> EvalErrorPrediction = ErrorPrediction(labels[132, 256], HLast[132, 256])
|
||||
|
||||
GetTrainCriterionNodes ...
|
||||
GetEvalCriterionNodes ...
|
||||
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[132, 256], HLast[132, 256])
|
||||
|
||||
No PreCompute nodes found, skipping PreCompute step
|
||||
Set Max Temp Mem Size For Convolution Nodes to 0 samples.
|
||||
Starting Epoch 3: learning rate per sample = 0.000098 momentum = 0.656119
|
||||
minibatchiterator: epoch 2: frames [40960..61440] (first utterance at frame 40960) with 1 datapasses
|
||||
requiredata: determined feature kind as 33-dimensional 'USER' with frame shift 10.0 ms
|
||||
|
||||
|
||||
Validating node EvalErrorPrediction
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 1024])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 1024])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 1024], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 1024])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 1024], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 1024])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 1024])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 1024], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 1024])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 1024])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 1024], B2[132, 1])
|
||||
Validating --> EvalErrorPrediction = ErrorPrediction(labels[132, 1024], HLast[132, 1024])
|
||||
|
||||
Epoch[3 of 3]-Minibatch[1-10 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8735985; EvalErr[0]PerSample = 0.51933593; TotalTime=0.390092; TotalTimePerSample=3.8094922e-05, SamplesPerSecond=26250
|
||||
Epoch[3 of 3]-Minibatch[11-20 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8665626; EvalErr[0]PerSample = 0.51748049; TotalTime=0.261875; TotalTimePerSample=2.557373e-05, SamplesPerSecond=39102
|
||||
Finished Epoch[3]: [Training Set] TrainLossPerSample = 1.8700806; EvalErrPerSample = 0.51840824; Ave LearnRatePerSample = 9.765625146e-05; EpochTime=0.770276
|
||||
COMPLETED
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
Linked to libnvidia-ml library at wrong path : /usr/src/gdk/nvml/lib/libnvidia-ml.so.1
|
||||
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
|
@ -0,0 +1,758 @@
|
|||
=== Running /home/vlivan/cntk/bin/x86_64.gpu.release.acml/cntk configFile=/home/vlivan/cntk/Tests/Speech/QuickE2E/cntk.config RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu DataDir=/home/vlivan/cntk/Tests/Speech/Data DeviceId=Auto
|
||||
running on localhost at 2015/07/29 19:11:08
|
||||
command line options:
|
||||
configFile=/home/vlivan/cntk/Tests/Speech/QuickE2E/cntk.config RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu DataDir=/home/vlivan/cntk/Tests/Speech/Data DeviceId=Auto
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG (VARIABLES NOT RESOLVED) >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=$DeviceId$
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=$RunDir$/models/cntkSpeech.dnn
|
||||
deviceId=$DeviceId$
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=$DataDir$/glob_0000.mlf
|
||||
labelMappingFile=$DataDir$/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG (VARIABLES NOT RESOLVED) <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=Auto
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> PROCESSED CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
configparameters: cntk.config:command=speechTrain
|
||||
configparameters: cntk.config:DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
configparameters: cntk.config:deviceId=Auto
|
||||
configparameters: cntk.config:parallelTrain=false
|
||||
configparameters: cntk.config:precision=float
|
||||
configparameters: cntk.config:RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu
|
||||
configparameters: cntk.config:speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< PROCESSED CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
command: speechTrain
|
||||
precision = float
|
||||
lsof: WARNING: can't stat() ext4 file system /var/lib/docker/aufs
|
||||
Output information may be incomplete.
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
SimpleNetworkBuilder Using GPU 0
|
||||
reading script file glob_0000.scp ... 948 entries
|
||||
total 132 state names in state list /home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
htkmlfreader: reading MLF file /home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf ...parse the line 55130
|
||||
total 948 entries
|
||||
...............................................................................................feature set 0: 252734 frames in 948 out of 948 utterances
|
||||
label set 0: 129 classes
|
||||
minibatchutterancesource: 948 utterances grouped into 3 chunks, av. chunk size: 316.0 utterances, 84244.7 frames
|
||||
GetTrainCriterionNodes ...
|
||||
GetEvalCriterionNodes ...
|
||||
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 3])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 3])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 3], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 3])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 3], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 3])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 3])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 3], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 3])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 3])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 3], B2[132, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[132, 3], HLast[132, 3])
|
||||
|
||||
Found 3 PreCompute nodes
|
||||
NodeName: InvStdOfFeatures
|
||||
NodeName: MeanOfFeatures
|
||||
NodeName: Prior
|
||||
minibatchiterator: epoch 0: frames [0..252734] (first utterance at frame 0) with 1 datapasses
|
||||
requiredata: determined feature kind as 33-dimensional 'USER' with frame shift 10.0 ms
|
||||
|
||||
|
||||
Validating node InvStdOfFeatures
|
||||
|
||||
Validating --> features = InputValue
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 64])
|
||||
|
||||
|
||||
|
||||
Validating node MeanOfFeatures
|
||||
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 64])
|
||||
|
||||
|
||||
|
||||
Validating node Prior
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> Prior = Mean(labels[132, 64])
|
||||
|
||||
Set Max Temp Mem Size For Convolution Nodes to 0 samples.
|
||||
Starting Epoch 1: learning rate per sample = 0.015625 momentum = 0.900000
|
||||
minibatchiterator: epoch 0: frames [0..20480] (first utterance at frame 0) with 1 datapasses
|
||||
|
||||
|
||||
Validating node EvalErrorPrediction
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 64])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 64])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 64], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 64])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 64], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 64])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 64])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 64], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 64])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 64])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 64], B2[132, 1])
|
||||
Validating --> EvalErrorPrediction = ErrorPrediction(labels[132, 64], HLast[132, 64])
|
||||
|
||||
Epoch[1 of 3]-Minibatch[1-10 of 320]: SamplesSeen = 640; TrainLossPerSample = 4.3213539; EvalErr[0]PerSample = 0.89999998; TotalTime=0.064294; TotalTimePerSample=0.00010045938, SamplesPerSecond=9954
|
||||
Epoch[1 of 3]-Minibatch[11-20 of 320]: SamplesSeen = 640; TrainLossPerSample = 4.1507101; EvalErr[0]PerSample = 0.8671875; TotalTime=0.055813; TotalTimePerSample=8.7207812e-05, SamplesPerSecond=11466
|
||||
Epoch[1 of 3]-Minibatch[21-30 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.9990096; EvalErr[0]PerSample = 0.87656248; TotalTime=0.062703; TotalTimePerSample=9.7973437e-05, SamplesPerSecond=10206
|
||||
Epoch[1 of 3]-Minibatch[31-40 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.8694596; EvalErr[0]PerSample = 0.87656248; TotalTime=0.059923; TotalTimePerSample=9.3629687e-05, SamplesPerSecond=10680
|
||||
Epoch[1 of 3]-Minibatch[41-50 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.8021927; EvalErr[0]PerSample = 0.87812501; TotalTime=0.061061; TotalTimePerSample=9.5407812e-05, SamplesPerSecond=10481
|
||||
Epoch[1 of 3]-Minibatch[51-60 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.7289093; EvalErr[0]PerSample = 0.86874998; TotalTime=0.062101; TotalTimePerSample=9.7032813e-05, SamplesPerSecond=10305
|
||||
Epoch[1 of 3]-Minibatch[61-70 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.5618699; EvalErr[0]PerSample = 0.82343751; TotalTime=0.056094; TotalTimePerSample=8.7646875e-05, SamplesPerSecond=11409
|
||||
Epoch[1 of 3]-Minibatch[71-80 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.4279053; EvalErr[0]PerSample = 0.80781251; TotalTime=0.063459; TotalTimePerSample=9.9154687e-05, SamplesPerSecond=10085
|
||||
Epoch[1 of 3]-Minibatch[81-90 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.3392854; EvalErr[0]PerSample = 0.7734375; TotalTime=0.062265; TotalTimePerSample=9.7289063e-05, SamplesPerSecond=10278
|
||||
Epoch[1 of 3]-Minibatch[91-100 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.3639894; EvalErr[0]PerSample = 0.84375; TotalTime=0.059843; TotalTimePerSample=9.3504687e-05, SamplesPerSecond=10694
|
||||
WARNING: The same matrix with dim [1, 1] has been transferred between different devices for 20 times.
|
||||
Epoch[1 of 3]-Minibatch[101-110 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.2122345; EvalErr[0]PerSample = 0.75312501; TotalTime=0.062375; TotalTimePerSample=9.7460937e-05, SamplesPerSecond=10260
|
||||
Epoch[1 of 3]-Minibatch[111-120 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.3126526; EvalErr[0]PerSample = 0.78750002; TotalTime=0.061085; TotalTimePerSample=9.5445313e-05, SamplesPerSecond=10477
|
||||
Epoch[1 of 3]-Minibatch[121-130 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.1408203; EvalErr[0]PerSample = 0.74687499; TotalTime=0.064562; TotalTimePerSample=0.00010087812, SamplesPerSecond=9912
|
||||
Epoch[1 of 3]-Minibatch[131-140 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.006897; EvalErr[0]PerSample = 0.69687498; TotalTime=0.0575; TotalTimePerSample=8.984375e-05, SamplesPerSecond=11130
|
||||
Epoch[1 of 3]-Minibatch[141-150 of 320]: SamplesSeen = 640; TrainLossPerSample = 3.0049591; EvalErr[0]PerSample = 0.72343749; TotalTime=0.058338; TotalTimePerSample=9.1153125e-05, SamplesPerSecond=10970
|
||||
Epoch[1 of 3]-Minibatch[151-160 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.9785829; EvalErr[0]PerSample = 0.73906249; TotalTime=0.064603; TotalTimePerSample=0.00010094219, SamplesPerSecond=9906
|
||||
Epoch[1 of 3]-Minibatch[161-170 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.8568604; EvalErr[0]PerSample = 0.70781249; TotalTime=0.060368; TotalTimePerSample=9.4325e-05, SamplesPerSecond=10601
|
||||
Epoch[1 of 3]-Minibatch[171-180 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.6905334; EvalErr[0]PerSample = 0.671875; TotalTime=0.059125; TotalTimePerSample=9.2382812e-05, SamplesPerSecond=10824
|
||||
Epoch[1 of 3]-Minibatch[181-190 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.7865357; EvalErr[0]PerSample = 0.70468748; TotalTime=0.056113; TotalTimePerSample=8.7676563e-05, SamplesPerSecond=11405
|
||||
Epoch[1 of 3]-Minibatch[191-200 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.5770202; EvalErr[0]PerSample = 0.6484375; TotalTime=0.060745; TotalTimePerSample=9.4914062e-05, SamplesPerSecond=10535
|
||||
Epoch[1 of 3]-Minibatch[201-210 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.6157165; EvalErr[0]PerSample = 0.6640625; TotalTime=0.059709; TotalTimePerSample=9.3295312e-05, SamplesPerSecond=10718
|
||||
Epoch[1 of 3]-Minibatch[211-220 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.552362; EvalErr[0]PerSample = 0.65781248; TotalTime=0.061917; TotalTimePerSample=9.6745313e-05, SamplesPerSecond=10336
|
||||
Epoch[1 of 3]-Minibatch[221-230 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.4821167; EvalErr[0]PerSample = 0.625; TotalTime=0.053813; TotalTimePerSample=8.4082813e-05, SamplesPerSecond=11893
|
||||
Epoch[1 of 3]-Minibatch[231-240 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.3877869; EvalErr[0]PerSample = 0.62812501; TotalTime=0.061932; TotalTimePerSample=9.676875e-05, SamplesPerSecond=10333
|
||||
Epoch[1 of 3]-Minibatch[241-250 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.3690064; EvalErr[0]PerSample = 0.6484375; TotalTime=0.059294; TotalTimePerSample=9.2646875e-05, SamplesPerSecond=10793
|
||||
Epoch[1 of 3]-Minibatch[251-260 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.4396729; EvalErr[0]PerSample = 0.6328125; TotalTime=0.060513; TotalTimePerSample=9.4551562e-05, SamplesPerSecond=10576
|
||||
Epoch[1 of 3]-Minibatch[261-270 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.3028197; EvalErr[0]PerSample = 0.61250001; TotalTime=0.06037; TotalTimePerSample=9.4328125e-05, SamplesPerSecond=10601
|
||||
Epoch[1 of 3]-Minibatch[271-280 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.1966858; EvalErr[0]PerSample = 0.55937499; TotalTime=0.056485; TotalTimePerSample=8.8257812e-05, SamplesPerSecond=11330
|
||||
Epoch[1 of 3]-Minibatch[281-290 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.2898011; EvalErr[0]PerSample = 0.60468751; TotalTime=0.059356; TotalTimePerSample=9.274375e-05, SamplesPerSecond=10782
|
||||
Epoch[1 of 3]-Minibatch[291-300 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.1775086; EvalErr[0]PerSample = 0.62187499; TotalTime=0.059501; TotalTimePerSample=9.2970312e-05, SamplesPerSecond=10756
|
||||
Epoch[1 of 3]-Minibatch[301-310 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.2626343; EvalErr[0]PerSample = 0.59687501; TotalTime=0.064342; TotalTimePerSample=0.00010053437, SamplesPerSecond=9946
|
||||
Epoch[1 of 3]-Minibatch[311-320 of 320]: SamplesSeen = 640; TrainLossPerSample = 2.1507263; EvalErr[0]PerSample = 0.5625; TotalTime=0.064522; TotalTimePerSample=0.00010081563, SamplesPerSecond=9919
|
||||
Finished Epoch[1]: [Training Set] TrainLossPerSample = 2.9799569; EvalErrPerSample = 0.72216797; Ave LearnRatePerSample = 0.015625; EpochTime=1.935613
|
||||
Starting Epoch 2: learning rate per sample = 0.001953 momentum = 0.656119
|
||||
minibatchiterator: epoch 1: frames [20480..40960] (first utterance at frame 20480) with 1 datapasses
|
||||
Epoch[2 of 3]-Minibatch[1-10 of 80]: SamplesSeen = 2560; TrainLossPerSample = 2.0159853; EvalErr[0]PerSample = 0.54140627; TotalTime=0.102487; TotalTimePerSample=4.0033984e-05, SamplesPerSecond=24978
|
||||
Epoch[2 of 3]-Minibatch[11-20 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9881856; EvalErr[0]PerSample = 0.54296875; TotalTime=0.09473; TotalTimePerSample=3.7003906e-05, SamplesPerSecond=27024
|
||||
Epoch[2 of 3]-Minibatch[21-30 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9869812; EvalErr[0]PerSample = 0.54140627; TotalTime=0.091318; TotalTimePerSample=3.5671094e-05, SamplesPerSecond=28033
|
||||
Epoch[2 of 3]-Minibatch[31-40 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9312614; EvalErr[0]PerSample = 0.5277344; TotalTime=0.092408; TotalTimePerSample=3.6096875e-05, SamplesPerSecond=27703
|
||||
Epoch[2 of 3]-Minibatch[41-50 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9006774; EvalErr[0]PerSample = 0.52656251; TotalTime=0.098698; TotalTimePerSample=3.8553906e-05, SamplesPerSecond=25937
|
||||
Epoch[2 of 3]-Minibatch[51-60 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.9711578; EvalErr[0]PerSample = 0.54140627; TotalTime=0.0896; TotalTimePerSample=3.5e-05, SamplesPerSecond=28571
|
||||
Epoch[2 of 3]-Minibatch[61-70 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.8951813; EvalErr[0]PerSample = 0.52031249; TotalTime=0.092477; TotalTimePerSample=3.6123828e-05, SamplesPerSecond=27682
|
||||
Epoch[2 of 3]-Minibatch[71-80 of 80]: SamplesSeen = 2560; TrainLossPerSample = 1.904506; EvalErr[0]PerSample = 0.53164065; TotalTime=0.091179; TotalTimePerSample=3.5616797e-05, SamplesPerSecond=28076
|
||||
Finished Epoch[2]: [Training Set] TrainLossPerSample = 1.949242; EvalErrPerSample = 0.53417969; Ave LearnRatePerSample = 0.001953125; EpochTime=0.753703
|
||||
Starting Epoch 3: learning rate per sample = 0.000098 momentum = 0.656119
|
||||
minibatchiterator: epoch 2: frames [40960..61440] (first utterance at frame 40960) with 1 datapasses
|
||||
Epoch[3 of 3]-Minibatch[1-10 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8735985; EvalErr[0]PerSample = 0.51933593; TotalTime=0.27395; TotalTimePerSample=2.675293e-05, SamplesPerSecond=37379
|
||||
Epoch[3 of 3]-Minibatch[11-20 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8665626; EvalErr[0]PerSample = 0.51748049; TotalTime=0.261453; TotalTimePerSample=2.553252e-05, SamplesPerSecond=39165
|
||||
Finished Epoch[3]: [Training Set] TrainLossPerSample = 1.8700806; EvalErrPerSample = 0.51840824; Ave LearnRatePerSample = 9.765625146e-05; EpochTime=0.537273
|
||||
COMPLETED
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
Linked to libnvidia-ml library at wrong path : /usr/src/gdk/nvml/lib/libnvidia-ml.so.1
|
||||
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
=== Deleting last epoch data
|
||||
==== Re-running from checkpoint
|
||||
running on localhost at 2015/07/29 19:11:14
|
||||
command line options:
|
||||
configFile=/home/vlivan/cntk/Tests/Speech/QuickE2E/cntk.config RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu DataDir=/home/vlivan/cntk/Tests/Speech/Data DeviceId=Auto
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG (VARIABLES NOT RESOLVED) >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=$DeviceId$
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=$RunDir$/models/cntkSpeech.dnn
|
||||
deviceId=$DeviceId$
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=$DataDir$/glob_0000.mlf
|
||||
labelMappingFile=$DataDir$/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG (VARIABLES NOT RESOLVED) <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> RAW CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=Auto
|
||||
parallelTrain=false
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu
|
||||
DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
DeviceId=Auto
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< RAW CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
|
||||
>>>>>>>>>>>>>>>>>>>> PROCESSED CONFIG WITH ALL VARIABLES RESOLVED >>>>>>>>>>>>>>>>>>>>
|
||||
configparameters: cntk.config:command=speechTrain
|
||||
configparameters: cntk.config:DataDir=/home/vlivan/cntk/Tests/Speech/Data
|
||||
configparameters: cntk.config:deviceId=Auto
|
||||
configparameters: cntk.config:parallelTrain=false
|
||||
configparameters: cntk.config:precision=float
|
||||
configparameters: cntk.config:RunDir=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu
|
||||
configparameters: cntk.config:speechTrain=[
|
||||
action=train
|
||||
modelPath=/tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu/models/cntkSpeech.dnn
|
||||
deviceId=Auto
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
labels=[
|
||||
mlfFile=/home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf
|
||||
labelMappingFile=/home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
<<<<<<<<<<<<<<<<<<<< PROCESSED CONFIG WITH ALL VARIABLES RESOLVED <<<<<<<<<<<<<<<<<<<<
|
||||
command: speechTrain
|
||||
precision = float
|
||||
lsof: WARNING: can't stat() ext4 file system /var/lib/docker/aufs
|
||||
Output information may be incomplete.
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
LockDevice: Capture device 0 and lock it for exclusive use
|
||||
SimpleNetworkBuilder Using GPU 0
|
||||
reading script file glob_0000.scp ... 948 entries
|
||||
total 132 state names in state list /home/vlivan/cntk/Tests/Speech/Data/state.list
|
||||
htkmlfreader: reading MLF file /home/vlivan/cntk/Tests/Speech/Data/glob_0000.mlf ...parse the line 55130
|
||||
total 948 entries
|
||||
...............................................................................................feature set 0: 252734 frames in 948 out of 948 utterances
|
||||
label set 0: 129 classes
|
||||
minibatchutterancesource: 948 utterances grouped into 3 chunks, av. chunk size: 316.0 utterances, 84244.7 frames
|
||||
Starting from checkpoint. Load Network From File /tmp/cntk-test-20150729191101.973007/Speech_QuickE2E@release_gpu/models/cntkSpeech.dnn.2.
|
||||
|
||||
|
||||
Printing Gradient Computation Node Order ...
|
||||
|
||||
CrossEntropyWithSoftmax[0, 0] = CrossEntropyWithSoftmax(labels[132, 256], HLast[0, 0])
|
||||
HLast[0, 0] = Plus(W2*H1[0, 0], B2[132, 1])
|
||||
B2[132, 1] = LearnableParameter
|
||||
W2*H1[0, 0] = Times(W2[132, 512], H2[0, 0])
|
||||
H2[0, 0] = Sigmoid(W1*H1+B1[0, 0])
|
||||
W1*H1+B1[0, 0] = Plus(W1*H1[0, 0], B1[512, 1])
|
||||
B1[512, 1] = LearnableParameter
|
||||
W1*H1[0, 0] = Times(W1[512, 512], H1[0, 0])
|
||||
H1[0, 0] = Sigmoid(W0*features+B0[0, 0])
|
||||
W0*features+B0[0, 0] = Plus(W0*features[0, 0], B0[512, 1])
|
||||
B0[512, 1] = LearnableParameter
|
||||
W0*features[0, 0] = Times(W0[512, 363], MVNormalizedFeatures[0, 0])
|
||||
MVNormalizedFeatures[0, 0] = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
InvStdOfFeatures[363, 1] = InvStdDev(features[363, 256])
|
||||
MeanOfFeatures[363, 1] = Mean(features[363, 256])
|
||||
features[363, 256] = InputValue
|
||||
W0[512, 363] = LearnableParameter
|
||||
W1[512, 512] = LearnableParameter
|
||||
W2[132, 512] = LearnableParameter
|
||||
labels[132, 256] = InputValue
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[132, 256], HLast[132, 256])
|
||||
|
||||
|
||||
|
||||
Validating node ScaledLogLikelihood
|
||||
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> labels = InputValue
|
||||
Validating --> Prior = Mean(labels[132, 256])
|
||||
Validating --> LogOfPrior = Log(Prior[132, 1])
|
||||
Validating --> ScaledLogLikelihood = Minus(HLast[132, 256], LogOfPrior[132, 1])
|
||||
|
||||
|
||||
|
||||
Validating node EvalErrorPrediction
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> EvalErrorPrediction = ErrorPrediction(labels[132, 256], HLast[132, 256])
|
||||
|
||||
GetTrainCriterionNodes ...
|
||||
GetEvalCriterionNodes ...
|
||||
|
||||
|
||||
Validating node CrossEntropyWithSoftmax
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 256])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 256])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 256], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 256])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 256], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 256])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 256])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 256], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 256])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 256])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 256], B2[132, 1])
|
||||
Validating --> CrossEntropyWithSoftmax = CrossEntropyWithSoftmax(labels[132, 256], HLast[132, 256])
|
||||
|
||||
No PreCompute nodes found, skipping PreCompute step
|
||||
Set Max Temp Mem Size For Convolution Nodes to 0 samples.
|
||||
Starting Epoch 3: learning rate per sample = 0.000098 momentum = 0.656119
|
||||
minibatchiterator: epoch 2: frames [40960..61440] (first utterance at frame 40960) with 1 datapasses
|
||||
requiredata: determined feature kind as 33-dimensional 'USER' with frame shift 10.0 ms
|
||||
|
||||
|
||||
Validating node EvalErrorPrediction
|
||||
|
||||
Validating --> labels = InputValue
|
||||
Validating --> W2 = LearnableParameter
|
||||
Validating --> W1 = LearnableParameter
|
||||
Validating --> W0 = LearnableParameter
|
||||
Validating --> features = InputValue
|
||||
Validating --> MeanOfFeatures = Mean(features[363, 1024])
|
||||
Validating --> InvStdOfFeatures = InvStdDev(features[363, 1024])
|
||||
Validating --> MVNormalizedFeatures = PerDimMeanVarNormalization(features[363, 1024], MeanOfFeatures[363, 1], InvStdOfFeatures[363, 1])
|
||||
Validating --> W0*features = Times(W0[512, 363], MVNormalizedFeatures[363, 1024])
|
||||
Validating --> B0 = LearnableParameter
|
||||
Validating --> W0*features+B0 = Plus(W0*features[512, 1024], B0[512, 1])
|
||||
Validating --> H1 = Sigmoid(W0*features+B0[512, 1024])
|
||||
Validating --> W1*H1 = Times(W1[512, 512], H1[512, 1024])
|
||||
Validating --> B1 = LearnableParameter
|
||||
Validating --> W1*H1+B1 = Plus(W1*H1[512, 1024], B1[512, 1])
|
||||
Validating --> H2 = Sigmoid(W1*H1+B1[512, 1024])
|
||||
Validating --> W2*H1 = Times(W2[132, 512], H2[512, 1024])
|
||||
Validating --> B2 = LearnableParameter
|
||||
Validating --> HLast = Plus(W2*H1[132, 1024], B2[132, 1])
|
||||
Validating --> EvalErrorPrediction = ErrorPrediction(labels[132, 1024], HLast[132, 1024])
|
||||
|
||||
Epoch[3 of 3]-Minibatch[1-10 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8735985; EvalErr[0]PerSample = 0.51933593; TotalTime=0.430752; TotalTimePerSample=4.2065625e-05, SamplesPerSecond=23772
|
||||
Epoch[3 of 3]-Minibatch[11-20 of 20]: SamplesSeen = 10240; TrainLossPerSample = 1.8665626; EvalErr[0]PerSample = 0.51748049; TotalTime=0.2702; TotalTimePerSample=2.6386719e-05, SamplesPerSecond=37897
|
||||
Finished Epoch[3]: [Training Set] TrainLossPerSample = 1.8700806; EvalErrPerSample = 0.51840824; Ave LearnRatePerSample = 9.765625146e-05; EpochTime=0.868162
|
||||
COMPLETED
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
Linked to libnvidia-ml library at wrong path : /usr/src/gdk/nvml/lib/libnvidia-ml.so.1
|
||||
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
WARNING:
|
||||
|
||||
You should always run with libnvidia-ml.so that is installed with your
|
||||
NVIDIA Display Driver. By default it's installed in /usr/lib and /usr/lib64.
|
||||
libnvidia-ml.so in GDK package is a stub library that is attached only for
|
||||
build purposes (e.g. machine that you build your application doesn't have
|
||||
to have Display Driver installed).
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
|
@ -0,0 +1,63 @@
|
|||
precision=float
|
||||
command=speechTrain
|
||||
deviceId=$DeviceId$
|
||||
|
||||
parallelTrain=false
|
||||
|
||||
speechTrain=[
|
||||
action=train
|
||||
modelPath=$RunDir$/models/cntkSpeech.dnn
|
||||
deviceId=$DeviceId$
|
||||
traceLevel=1
|
||||
SimpleNetworkBuilder=[
|
||||
layerSizes=363:512:512:132
|
||||
trainingCriterion=CrossEntropyWithSoftmax
|
||||
evalCriterion=ErrorPrediction
|
||||
layerTypes=Sigmoid
|
||||
initValueScale=1.0
|
||||
applyMeanVarNorm=true
|
||||
uniformInit=true
|
||||
needPrior=true
|
||||
]
|
||||
|
||||
SGD=[
|
||||
epochSize=20480
|
||||
minibatchSize=64:256:1024:
|
||||
learningRatesPerMB=1.0:0.5:0.1
|
||||
numMBsToShowResult=10
|
||||
momentumPerMB=0.9:0.656119
|
||||
dropoutRate=0.0
|
||||
maxEpochs=3
|
||||
keepCheckPointFiles=true
|
||||
|
||||
AutoAdjust=[
|
||||
reduceLearnRateIfImproveLessThan=0
|
||||
loadBestModel=true
|
||||
increaseLearnRateIfImproveMoreThan=1000000000
|
||||
learnRateDecreaseFactor=0.5
|
||||
learnRateIncreaseFactor=1.382
|
||||
autoAdjustLR=AdjustAfterEpoch
|
||||
]
|
||||
clippingThresholdPerSample=1#INF
|
||||
]
|
||||
reader=[
|
||||
readerType=HTKMLFReader
|
||||
readMethod=blockRandomize
|
||||
miniBatchMode=Partial
|
||||
randomize=Auto
|
||||
verbosity=0
|
||||
features=[
|
||||
dim=363
|
||||
type=Real
|
||||
scpFile=glob_0000.scp
|
||||
]
|
||||
|
||||
labels=[
|
||||
mlfFile=$DataDir$/glob_0000.mlf
|
||||
labelMappingFile=$DataDir$/state.list
|
||||
|
||||
labelDim=132
|
||||
labelType=Category
|
||||
]
|
||||
]
|
||||
]
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/bash
|
||||
CNTK_BINARY=$TEST_BUILD_LOCATION/x86_64.gpu.$TEST_FLAVOR.acml/cntk
|
||||
if [ "$TEST_DEVICE" == "CPU" ]; then
|
||||
CNTK_DEVICE_ID=-1
|
||||
else
|
||||
CNTK_DEVICE_ID=Auto
|
||||
fi
|
||||
CNTK_ARGS="configFile=$TEST_DIR/cntk.config RunDir=$TEST_RUN_DIR DataDir=$TEST_DATA_DIR DeviceId=$CNTK_DEVICE_ID"
|
||||
MODELS_DIR=$TEST_RUN_DIR/models
|
||||
[ -d $MODELS_DIR ] && rm -rf $MODELS_DIR
|
||||
mkdir -p $MODELS_DIR || exit $?
|
||||
echo === Running $CNTK_BINARY $CNTK_ARGS
|
||||
$CNTK_BINARY $CNTK_ARGS || exit $?
|
||||
echo === Deleting last epoch data
|
||||
rm $TEST_RUN_DIR/models/*.dnn
|
||||
echo ==== Re-running from checkpoint
|
||||
$CNTK_BINARY $CNTK_ARGS || exit $?
|
|
@ -0,0 +1,27 @@
|
|||
dataDir: ../Data
|
||||
|
||||
testCases:
|
||||
CNTK Run must be completed:
|
||||
patterns:
|
||||
- ^COMPLETED
|
||||
|
||||
Must train epochs in exactly same order and parameters:
|
||||
patterns:
|
||||
- ^Starting Epoch {{integer}}
|
||||
- learning rate per sample = {{float}}
|
||||
- momentum = {{float}}
|
||||
|
||||
Epochs must be finished with expected results:
|
||||
patterns:
|
||||
- ^Finished Epoch[{{integer}}]
|
||||
- TrainLossPerSample = {{float,tolerance=1%}}
|
||||
- EvalErrPerSample = {{float,tolerance=1%}}
|
||||
- Ave LearnRatePerSample = {{float,tolerance=1%}}
|
||||
|
||||
Per-minibatch training results must match:
|
||||
patterns:
|
||||
- ^ Epoch[{{integer}} of {{integer}}]-Minibatch[{{integer}}-{{integer}} of {{integer}}]
|
||||
- SamplesSeen = {{integer}}
|
||||
- TrainLossPerSample = {{float,tolerance=1%}}
|
||||
- EvalErr[0]PerSample = {{float,tolerance=1%}}
|
||||
|
|
@ -0,0 +1,551 @@
|
|||
#!/usr/bin/env python
|
||||
# ----------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# ---------------------------------------------------------
|
||||
# This is a test driver for running end-to-end CNTK tests
|
||||
#
|
||||
# ----- Running a test and/or updating baselines ------
|
||||
# For instructions see:
|
||||
# ./TestDriver.py --help
|
||||
#
|
||||
# ---- Adding the tests: -------
|
||||
# File system organization:
|
||||
# Each test suite (e.g. Speech) has its own directory inside Tests
|
||||
# Each test (e.g. QuickE2E) has its own directory within test suite
|
||||
#
|
||||
# Each test directory has a following components:
|
||||
# - testcases.yml - main test confuguration file, whcih defines all test cases
|
||||
# - run-test - (run-test) script
|
||||
# - baseline*.txt - baseline files whith a captured expected output of run-test script
|
||||
#
|
||||
# ----- testcases.yml format -------
|
||||
# dataDir: <path> #<relative-path-to the data directory
|
||||
#
|
||||
# testCases:
|
||||
# <name of the testcase 1>:
|
||||
# patterns:
|
||||
# - <pattern 1> # see pattern language
|
||||
# - <pattern 2>
|
||||
# - .....
|
||||
#
|
||||
# <name of the testcase 2>:
|
||||
# patterns:
|
||||
# - <pattern 1>
|
||||
# - <pattern 2>
|
||||
# - .....
|
||||
# .....
|
||||
#
|
||||
# ----- pattern language --------
|
||||
# Multpile patterns of the same testcase are matching a *single* line of text
|
||||
# Pattern is essentiually a substring which has to be found in a line
|
||||
# if pattern starts with ^ then matching is constrained to look only at the beginning of the line
|
||||
#
|
||||
# pattern can have one or multiple placelohders wrapped with double-curly braces: {{...}}
|
||||
# this placeholders can match any text conforming to the type constraint. Available placeholders
|
||||
# {{integer}} - matches any (positive or negative integer) value
|
||||
# {{float}} - matches any float value
|
||||
# {{float,tolerance=0.00001}} - matches float value with given absolute tolerance: 0.00001 in this example
|
||||
# {{float,tolerance=2%}} - matches float value with relative tolerance, 2% in this example
|
||||
#
|
||||
# At runtime patterns are compiled by TestDriver.py to regular expressions
|
||||
#
|
||||
# ---- Baseline files ----
|
||||
# Order of searching baseline files, depends on the current mode for a given test:
|
||||
#
|
||||
# 1. baseline.<flavor>.<device>.txt
|
||||
# 2. baseline.<flavor>.txt
|
||||
# 3. baseline.<device>.txt
|
||||
# 4. baseline.txt
|
||||
# where <flavor> = { debug | release }
|
||||
# <device> = { cpu | gpu }
|
||||
#
|
||||
# ----- Algorithm ------
|
||||
# Baseline verification:
|
||||
# For each testcase
|
||||
# - filter all lines which matches
|
||||
# - if no lines found then abord with an error - since either baseline and/or pattern are invalid
|
||||
# Running test:
|
||||
# Run test script (run-test) and capture output:
|
||||
#
|
||||
# For each testcase
|
||||
# - filter all matching lines from baseline
|
||||
# - filter all matching lines from test output
|
||||
# - compare filtered lines one by one, ensuring that substrings defined by patterns are matching
|
||||
#
|
||||
# In practice, TestDriver performs 1 pass through the output of run-test performing a real-time
|
||||
# matching against all test-cases/pattern simulteneously
|
||||
#
|
||||
|
||||
import sys, os, argparse, traceback, yaml, subprocess, random, re, time
|
||||
|
||||
thisDir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# This class encapsulates an instance of the test
|
||||
class Test:
|
||||
# "Suite/TestName" => instance of Test
|
||||
allTestsIndexedByFullName = {}
|
||||
|
||||
# suite - name of the test suite
|
||||
# name - name of the test
|
||||
# path to the testcases.yml file
|
||||
def __init__(self, suite, name, pathToYmlFile):
|
||||
self.suite = suite
|
||||
self.name = name
|
||||
self.fullName = suite + "/" + name
|
||||
# computing location of test directory (yml file directory)
|
||||
self.testDir = os.path.dirname(pathToYmlFile)
|
||||
# parsing yml file with testcases
|
||||
with open(pathToYmlFile, "r") as f:
|
||||
self.rawYamlData = yaml.safe_load(f.read())
|
||||
|
||||
# finding location of data directory
|
||||
if self.rawYamlData["dataDir"]:
|
||||
self.dataDir = os.path.realpath(os.path.join(self.testDir, self.rawYamlData["dataDir"]))
|
||||
else:
|
||||
self.dataDir = self.testDir
|
||||
|
||||
testCasesYaml = self.rawYamlData["testCases"]
|
||||
self.testCases = []
|
||||
for name in testCasesYaml.keys():
|
||||
try:
|
||||
self.testCases.append(TestCase(name, testCasesYaml[name]))
|
||||
except Exception as e:
|
||||
print >>sys.stderr, "ERROR registering test case: " + name
|
||||
raise
|
||||
|
||||
# Populates Tests.allTestsIndexedByFullName by scanning directory tree
|
||||
# and finding all testcases.yml files
|
||||
@staticmethod
|
||||
def discoverAllTests():
|
||||
for dirName, subdirList, fileList in os.walk(thisDir):
|
||||
if 'testcases.yml' in fileList:
|
||||
testDir = dirName
|
||||
testName = os.path.basename(dirName)
|
||||
suiteDir = os.path.dirname(dirName)
|
||||
# sute name will be derived from the path components
|
||||
suiteName = os.path.relpath(suiteDir, thisDir).replace('\\', '/')
|
||||
try:
|
||||
test = Test(suiteName, testName, dirName + "/testcases.yml")
|
||||
Test.allTestsIndexedByFullName[test.fullName.lower()] = test
|
||||
except Exception as e:
|
||||
print >>sys.stderr, "ERROR registering test: " + dirName
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
# Runs this test
|
||||
# flavor - "debug" or "release"
|
||||
# device - "cpu" or "gpu"
|
||||
# args - command line arguments from argparse
|
||||
# returns an instance of TestRunResult
|
||||
def run(self, flavor, device, args):
|
||||
# Locating and reading baseline file
|
||||
baselineFile = self.findBaselineFile(flavor, device)
|
||||
if baselineFile == None:
|
||||
return TestRunResult.fatalError("Baseline file sanity check", "Can't find baseline file")
|
||||
|
||||
with open(baselineFile, "r") as f:
|
||||
baseline = f.read().split("\n")
|
||||
if args.verbose:
|
||||
print "Baseline:", baselineFile
|
||||
|
||||
# Before running the test, pre-creating TestCaseRunResult object for each test case
|
||||
# and compute filtered lines from baseline file.
|
||||
# Note: some test cases might fail at this time if baseline and/or patterns are inconsistant
|
||||
result = TestRunResult()
|
||||
result.succeeded = True
|
||||
if not args.update_baseline:
|
||||
for testCase in self.testCases:
|
||||
testCaseRunResult = testCase.processBaseline(baseline)
|
||||
if not testCaseRunResult.succeeded:
|
||||
result.succeeded = False
|
||||
result.testCaseRunResults.append(testCaseRunResult)
|
||||
|
||||
# preparing run directory
|
||||
runDir = os.path.join(args.run_dir, "{0}_{1}@{2}_{3}".format(self.suite, self.name, flavor, device))
|
||||
if not os.path.isdir(runDir):
|
||||
os.makedirs(runDir)
|
||||
|
||||
# preparing environment for the test script
|
||||
os.environ["TEST_FLAVOR"] = flavor
|
||||
os.environ["TEST_DEVICE"] = device
|
||||
os.environ["TEST_BUILD_LOCATION"] = args.build_location
|
||||
os.environ["TEST_DIR"] = self.testDir
|
||||
os.environ["TEST_DATA_DIR"] = self.dataDir
|
||||
os.environ["TEST_RUN_DIR"] = runDir
|
||||
# WORKAROUND: changing current dir to the dataDir so relative paths in SCP files work as expected
|
||||
os.chdir(self.dataDir)
|
||||
# Running test script
|
||||
#TODO:port this properly to windows
|
||||
# Writing standard output to the file and to the console (if --verbose)
|
||||
logFile = os.path.join(runDir, "output.txt")
|
||||
allLines = []
|
||||
if args.verbose:
|
||||
print self.fullName + ":>" + logFile
|
||||
with open(logFile, "w") as output:
|
||||
cmdLine = ["bash", "-c", self.testDir + "/run-test 2>&1"]
|
||||
process = subprocess.Popen(cmdLine, stdout=subprocess.PIPE)
|
||||
|
||||
while True:
|
||||
line = process.stdout.readline()
|
||||
if not line:
|
||||
break
|
||||
|
||||
if len(line)>0 and line[-1]=='\n':
|
||||
line=line[:len(line)-1]
|
||||
|
||||
if args.verbose:
|
||||
print self.fullName + ": " + line
|
||||
|
||||
print >>output, line
|
||||
allLines.append(line)
|
||||
output.flush()
|
||||
for testCaseRunResult in result.testCaseRunResults:
|
||||
testCaseRunResult.testCase.processLine(line, testCaseRunResult, args.verbose)
|
||||
|
||||
exitCode = process.wait()
|
||||
success = True
|
||||
|
||||
# checking exit code
|
||||
if exitCode != 0:
|
||||
return TestRunResult.fatalError("Exit code must be 0", "==> got exit code {0} when running: {1}".format(exitCode, " ".join(cmdLine)), logFile = logFile)
|
||||
|
||||
# saving log file path, so it can be reported later
|
||||
result.logFile = logFile
|
||||
|
||||
# finalizing verification - need to check whether we have any unmatched lines
|
||||
for testCaseRunResult in result.testCaseRunResults:
|
||||
testCaseRunResult.testCase.finalize(testCaseRunResult)
|
||||
if not testCaseRunResult.succeeded:
|
||||
result.succeeded = False
|
||||
|
||||
if args.update_baseline and result.succeeded:
|
||||
# When running in --update-baseline mode
|
||||
# verifying that new output is succesfully matching every pattern in the testcases.yml
|
||||
# If this is not the case then baseline update will be rejected
|
||||
for testCase in self.testCases:
|
||||
testCaseRunResult = testCase.processBaseline(allLines)
|
||||
if not testCaseRunResult.succeeded:
|
||||
result.succeeded = False
|
||||
result.testCaseRunResults.append(testCaseRunResult)
|
||||
|
||||
if result.succeeded:
|
||||
if args.verbose:
|
||||
print "Updating baseline file", baselineFile
|
||||
with open(baselineFile, "w") as f:
|
||||
f.write("\n".join(allLines))
|
||||
|
||||
return result
|
||||
|
||||
# Finds a location of a baseline file by probing different names in the following order:
|
||||
# baseline.$flavor.$device.txt
|
||||
# baseline.$flavor.txt
|
||||
# baseline.$device.txt
|
||||
# baseline.txt
|
||||
def findBaselineFile(self, flavor, device):
|
||||
for f in ["." + flavor.lower(), ""]:
|
||||
for d in ["." + device.lower(), ""]:
|
||||
candidateName = "baseline" + f + d + ".txt";
|
||||
fullPath = os.path.join(self.testDir, candidateName)
|
||||
if os.path.isfile(fullPath):
|
||||
return fullPath
|
||||
return None
|
||||
|
||||
# This class encapsulates one testcase (in testcases.yml file)
|
||||
class TestCase:
|
||||
def __init__(self, name, yamlNode):
|
||||
self.name = name
|
||||
self.patterns = []
|
||||
if "patterns" in yamlNode:
|
||||
for pattern in yamlNode["patterns"]:
|
||||
try:
|
||||
self.patterns.append(TestPattern(pattern))
|
||||
except Exception as e:
|
||||
print >>sys.stderr, "ERROR registering pattern: " + pattern
|
||||
raise
|
||||
|
||||
# Processes the baseline file and return an instance of TestCaseRunResult
|
||||
# which is ready to be passed into processLine
|
||||
def processBaseline(self, baseline):
|
||||
result = TestCaseRunResult(self.name, True)
|
||||
result.diagnostics = ""
|
||||
result.testCase = self
|
||||
|
||||
# filter all lines of baseline file leaving only those which match ALL the patterns
|
||||
filteredLines = []
|
||||
for line in baseline:
|
||||
if all([p.match(line) for p in self.patterns]):
|
||||
filteredLines.append(line)
|
||||
if len(filteredLines) == 0:
|
||||
result.succeeded = False
|
||||
result.diagnostics+="Baseline file doesn't have any lines matching all patterns defined in the test case.\n"\
|
||||
"Possible cause: patterns are wrong and/or baseline file doesn't have required line"
|
||||
result.expectedLines = filteredLines
|
||||
return result
|
||||
|
||||
# Runs this test case and report result into TestCaseRunResult
|
||||
def processLine(self, line, result, verbose):
|
||||
if all([p.match(line) for p in self.patterns]):
|
||||
if len(result.expectedLines) > 0:
|
||||
# we have mathed line in the output and at leat one remaining unmatched in a baseline
|
||||
expected = result.expectedLines[0]
|
||||
# running comparison logic for each pattern
|
||||
failedPatterns = []
|
||||
for p in self.patterns:
|
||||
if not p.compare(expected, line):
|
||||
result.succeeded = False
|
||||
failedPatterns.append(p)
|
||||
|
||||
# in the case of failure - reporting mismatched lines
|
||||
if len(failedPatterns)>0:
|
||||
result.diagnostics+=("Baseline: {0}\n"+
|
||||
"Output: {1}\n"
|
||||
).format(expected, line)
|
||||
if verbose:
|
||||
print "[FAILED]: Testcase", self.name
|
||||
print "Baseline:", expected
|
||||
|
||||
# also show all failed patterns
|
||||
for p in failedPatterns:
|
||||
msg = "Failed pattern: " + p.patternText
|
||||
if verbose:
|
||||
print msg
|
||||
result.diagnostics+=msg+"\n"
|
||||
# removing this line, since we already matched it (whether succesfully or not - doesn't matter)
|
||||
del result.expectedLines[0]
|
||||
else:
|
||||
# we have matched line in the output - but don't have any remaining unmatched in a baseline
|
||||
result.succeeded = False
|
||||
result.diagnostics+=("Unexpected (extra) line in the output which matches the pattern, but doesn't appear in baseline file.\n"+
|
||||
"Extra line: {0}"
|
||||
).format(line)
|
||||
|
||||
# called once for each TestCaseRunResult at the end to check for unmatched patterns
|
||||
def finalize(self, result):
|
||||
if len(result.expectedLines) > 0:
|
||||
result.succeeded = False
|
||||
result.diagnostics+=("{0} expected lines weren't observed in the output.\n"+
|
||||
"First unmatched: {1}"
|
||||
).format(len(result.expectedLines), result.expectedLines[0])
|
||||
|
||||
# This encapsulates parsing and evaluation of a test patterns occurring in testcases.yml file
|
||||
class TestPattern:
|
||||
# maps a type (specified in {{...}} expressions) to a regular expression
|
||||
typeTable = {
|
||||
"integer" : r"\s*-?[0-9]+",
|
||||
"float" : r"\s*-?([0-9]*\.[0-9]+|[0-9]+)(e[+-]?[0-9]+)?"
|
||||
}
|
||||
def __init__(self, patternText):
|
||||
self.patternText = str(patternText)
|
||||
if len(patternText) == 0:
|
||||
raise Exception("Empty pattern")
|
||||
if patternText[0]=='^':
|
||||
patternText = patternText[1:]
|
||||
prefix = "^"
|
||||
else:
|
||||
prefix = ".*?"
|
||||
|
||||
# After parsing this will be a list of tuples (dataType, tolerance) for each {{...}} section from left to right
|
||||
self.groupInfo = []
|
||||
|
||||
# Transforming our pattern into a sigle regular expression
|
||||
# processing {{...}} fragments and escaping all regex special characters
|
||||
self.regexText = prefix + re.sub(r"(\{\{[^}]+\}\}|[\[\]\.\*\+\{\}\(\)\$\^\\\|\?])", self.patternParse, patternText)
|
||||
# Compiling it to perform a check (fail-fast) and for faster matching later
|
||||
self.regex = re.compile(self.regexText)
|
||||
|
||||
|
||||
# this is a callback method passed to re.sub call above - it performs the core parsing logic
|
||||
def patternParse(self, match):
|
||||
fragment = match.group(1)
|
||||
if len(fragment) == 1:
|
||||
# this is a spexcial character of regex
|
||||
return "\\" + fragment;
|
||||
else:
|
||||
# parsing {{...}} expressions
|
||||
m = re.match(r"{{(integer|float)(,tolerance=([-0-9\.e]*)(%?))?}}", fragment)
|
||||
dataType = m.group(1)
|
||||
if m.group(3):
|
||||
tolerance = float(m.group(3))
|
||||
if m.group(4) == "%":
|
||||
# using minus sign to indicate that it is a relative value
|
||||
tolerance = - tolerance/100.0;
|
||||
else:
|
||||
tolerance = 0.0
|
||||
# saving information about data type and tolerance
|
||||
self.groupInfo.append((dataType, tolerance))
|
||||
# converting this to regex which mathes specific type
|
||||
# All {{...}} sections are converted to regex groups named as G0, G1, G2...
|
||||
return "(?P<G{0}>{1})".format(len(self.groupInfo)-1, TestPattern.typeTable[dataType])
|
||||
|
||||
# Checks wether given line matches this pattern
|
||||
# returns True or False
|
||||
def match(self, line):
|
||||
return self.regex.match(line) != None
|
||||
|
||||
# Compares a line from baseline log and a line from real output against this pattern
|
||||
# return true or false
|
||||
def compare(self, expected, actual):
|
||||
em = self.regex.match(expected)
|
||||
am = self.regex.match(actual)
|
||||
if em == None and am == None:
|
||||
return True
|
||||
if em == None or am == None:
|
||||
return False
|
||||
|
||||
for i in range(0, len(self.groupInfo)):
|
||||
dataType, tolerance = self.groupInfo[i]
|
||||
groupId = "G"+str(i)
|
||||
expectedText = em.group(groupId).strip()
|
||||
actualText = am.group(groupId).strip()
|
||||
if dataType=="integer":
|
||||
return int(expectedText) == int(actualText)
|
||||
elif dataType=="float":
|
||||
epsilon = tolerance if tolerance > 0 else abs(float(expectedText)*tolerance)
|
||||
return abs(float(expectedText)-float(actualText)) <= epsilon
|
||||
else:
|
||||
return False;
|
||||
return True
|
||||
|
||||
class TestRunResult:
|
||||
def __init__(self):
|
||||
self.succeeded = False;
|
||||
self.testCaseRunResults = [] # list of TestCaseRunResult
|
||||
|
||||
@staticmethod
|
||||
def fatalError(name, diagnostics, logFile = None):
|
||||
r = TestRunResult()
|
||||
r.testCaseRunResults.append(TestCaseRunResult(name, False, diagnostics))
|
||||
r.logFile = logFile
|
||||
return r
|
||||
|
||||
class TestCaseRunResult:
|
||||
def __init__(self, testCaseName, succeeded, diagnostics = None):
|
||||
self.testCaseName = testCaseName
|
||||
self.succeeded = succeeded
|
||||
self.diagnostics = diagnostics
|
||||
self.expectedLines = [] # list of remaining unmatched expected lines from the baseline file for this test case run
|
||||
|
||||
# Lists all available tests
|
||||
def listCommand(args):
|
||||
for t in Test.allTestsIndexedByFullName.values():
|
||||
print t.fullName
|
||||
|
||||
# Runs given test(s) or all tests
|
||||
def runCommand(args):
|
||||
if len(args.test) > 0:
|
||||
testsToRun = []
|
||||
for name in args.test:
|
||||
if name.lower() in Test.allTestsIndexedByFullName:
|
||||
testsToRun.append(Test.allTestsIndexedByFullName[name.lower()])
|
||||
else:
|
||||
print >>sys.stderr, "ERROR: test not found", name
|
||||
return 1
|
||||
else:
|
||||
testsToRun = Test.allTestsIndexedByFullName.values()
|
||||
devices = ["cpu", "gpu"]
|
||||
if (args.device):
|
||||
args.device = args.device.lower()
|
||||
if not args.device in devices:
|
||||
print >>sys.stderr, "--device must be one of", devices
|
||||
return 1
|
||||
devices = [args.device]
|
||||
|
||||
flavors = ["debug", "release"]
|
||||
if (args.flavor):
|
||||
args.flavor = args.flavor.lower()
|
||||
if not args.flavor in flavors:
|
||||
print >>sys.stderr, "--flavor must be one of", flavors
|
||||
return 1
|
||||
flavors = [args.flavor]
|
||||
|
||||
print "CNTK Test Driver is started"
|
||||
print "Running tests: ", " ".join([y.fullName for y in testsToRun])
|
||||
print "Build location: ", args.build_location
|
||||
print "Run location: ", args.run_dir
|
||||
print "Flavors: ", " ".join(flavors)
|
||||
print "Devices: ", " ".join(devices)
|
||||
if (args.update_baseline):
|
||||
print "*** Running in automatic baseline update mode ***"
|
||||
print ""
|
||||
succeededCount, totalCount = 0, 0
|
||||
for test in testsToRun:
|
||||
for flavor in flavors:
|
||||
for device in devices:
|
||||
totalCount = totalCount + 1
|
||||
# Printing the test which is about to run (without terminating the line)
|
||||
sys.stdout.write("Running test {0} ({1} {2}) - ".format(test.fullName, flavor, device));
|
||||
# in verbose mode, terminate the line, since there will be a lot of output
|
||||
if args.verbose:
|
||||
sys.stdout.write("\n");
|
||||
sys.stdout.flush()
|
||||
# Running the test and collecting a run results
|
||||
result = test.run(flavor, device, args)
|
||||
if args.verbose:
|
||||
# writing the test name one more time (after possibly long verbose output)
|
||||
sys.stdout.write("Test finished {0} ({1} {2}) - ".format(test.fullName, flavor, device));
|
||||
if result.succeeded:
|
||||
succeededCount = succeededCount + 1
|
||||
# in no-verbose mode this will be printed in the same line as 'Running test...'
|
||||
print "[OK]"
|
||||
else:
|
||||
print "[FAILED]"
|
||||
# Showing per-test-case results:
|
||||
for testCaseRunResult in result.testCaseRunResults:
|
||||
if testCaseRunResult.succeeded:
|
||||
# Printing 'OK' test cases only in verbose mode
|
||||
if (args.verbose):
|
||||
print(" [OK] " + testCaseRunResult.testCaseName);
|
||||
else:
|
||||
# 'FAILED' + detailed diagnostics with proper indendtation
|
||||
print(" [FAILED] " + testCaseRunResult.testCaseName);
|
||||
if testCaseRunResult.diagnostics:
|
||||
for line in testCaseRunResult.diagnostics.split('\n'):
|
||||
print " " + line;
|
||||
# In non-verbose mode log wasn't piped to the stdout, showing log file path for conveniencce
|
||||
|
||||
if not result.succeeded and not args.verbose and result.logFile:
|
||||
print " See log file for details:", result.logFile
|
||||
|
||||
if args.update_baseline:
|
||||
print "{0}/{1} baselines updated, {2} failed".format(succeededCount, totalCount, totalCount - succeededCount)
|
||||
else:
|
||||
print "{0}/{1} tests passed, {2} failed".format(succeededCount, totalCount, totalCount - succeededCount)
|
||||
if succeededCount != totalCount:
|
||||
sys.exit(10)
|
||||
|
||||
# ======================= Entry point =======================
|
||||
parser = argparse.ArgumentParser(description="TestDriver - CNTK Test Driver")
|
||||
subparsers = parser.add_subparsers(help="command to execute. Run TestDriver.py <command> --help for command-specific help")
|
||||
runSubparser = subparsers.add_parser("run", help="run test(s)")
|
||||
runSubparser.add_argument("test", nargs="*",
|
||||
help="optional test name(s) to run, specified as Suite/TestName. "
|
||||
"Use list command to list available tests. "
|
||||
"If not specified then all tests will be run.")
|
||||
#TODO: port paths to Windows
|
||||
defaultBuildLocation=os.path.realpath(os.path.join(thisDir, "..", "bin"))
|
||||
runSubparser.add_argument("-b", "--build-location", default=defaultBuildLocation, help="location of the CNTK build to run")
|
||||
runSubparser.add_argument("-d", "--device", help="cpu|gpu - run on a specific device")
|
||||
runSubparser.add_argument("-f", "--flavor", help="release|debug - run only a specific flavor")
|
||||
#TODO: port paths to Windows
|
||||
defaultRunDir=os.path.join("/tmp", "cntk-test-{0}.{1}".format(time.strftime("%Y%m%d%H%M%S"), random.randint(0,1000000)))
|
||||
runSubparser.add_argument("-r", "--run-dir", default=defaultRunDir, help="directory where to store test output, default: a random dir within /tmp")
|
||||
runSubparser.add_argument("--update-baseline", action='store_true', help="update baseline file(s) instead of matching them")
|
||||
runSubparser.add_argument("-v", "--verbose", action='store_true', help="verbose output - dump all output of test script")
|
||||
|
||||
runSubparser.set_defaults(func=runCommand)
|
||||
|
||||
listSubparser = subparsers.add_parser("list", help="list available tests")
|
||||
listSubparser.set_defaults(func=listCommand)
|
||||
|
||||
if len(sys.argv)==1:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
|
||||
# discover all the tests
|
||||
Test.discoverAllTests()
|
||||
|
||||
# execute the command
|
||||
args.func(args)
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
=== AN4 dataset ====
|
||||
|
||||
Contents of Tests/Speech/Data directory is a modified version of AN4 dataset pre-processed and optimized for CNTK end-to-end testing.
|
||||
AN4 dataset is a part of CMU audio databases located at http://www.speech.cs.cmu.edu/databases/an4
|
||||
This modified version of dataset is distributed under the terms of a AN4 license:
|
||||
|
||||
/* ====================================================================
|
||||
* Copyright (c) 1991-2005 Carnegie Mellon University. All rights
|
||||
* reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
*
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
*
|
||||
* This work was supported in part by funding from the Defense Advanced
|
||||
* Research Projects Agency and the National Science Foundation of the
|
||||
* United States of America, and the CMU Sphinx Speech Consortium.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY CARNEGIE MELLON UNIVERSITY ``AS IS'' AND
|
||||
* ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY
|
||||
* NOR ITS EMPLOYEES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ====================================================================
|
||||
*/
|
||||
|
||||
|
Загрузка…
Ссылка в новой задаче