implemented include-path traversal for BrainScript and turned CNTK standard BS into an include file CNTK.core.bs;
bug fix for Issue #210: PreComputeNodeBase must infer layout from input after loading; ImageReader post-build events now consistently use xcopy
This commit is contained in:
Родитель
2768e7cfb0
Коммит
ecc8deb5b5
5
Makefile
5
Makefile
|
@ -574,6 +574,11 @@ $(CNTK): $(BUILDINFO) $(CNTK_OBJ) | $(CNTKMATH_LIB)
|
|||
@echo building output for $(ARCH) with build type $(BUILDTYPE)
|
||||
$(CXX) $(LDFLAGS) $(patsubst %,-L%, $(LIBDIR) $(LIBPATH) $(NVMLPATH)) $(patsubst %,$(RPATH)%, $(ORIGINLIBDIR) $(LIBPATH)) -o $@ $^ $(LIBS) -l$(CNTKMATH) -fopenmp
|
||||
|
||||
# deployable resources
|
||||
ALL += $(CNTK).core.bs
|
||||
$(CNTK).core.bs: $(SOURCEDIR)/CNTK/BrainScript/CNTKCoreLib/CNTK.core.bs
|
||||
cp -f $^ $@
|
||||
|
||||
########################################
|
||||
# General compile and dependency rules
|
||||
########################################
|
||||
|
|
|
@ -49,9 +49,9 @@ using namespace Microsoft::MSR::CNTK;
|
|||
// ===========================================================================
|
||||
|
||||
// TODO: decide where these should go. Also, do we need three variables?
|
||||
extern wstring standardFunctions;
|
||||
extern wstring commonMacros;
|
||||
extern wstring computationNodes;
|
||||
//extern wstring standardFunctions;
|
||||
//extern wstring commonMacros;
|
||||
//extern wstring computationNodes;
|
||||
|
||||
// helper that returns 'float' or 'double' depending on ElemType
|
||||
template <class ElemType> static const wchar_t* ElemTypeName();
|
||||
|
@ -133,9 +133,15 @@ void DoTrain(const ConfigRecordType& config)
|
|||
// We prepend a few standard definitions, and also definition of deviceId and precision, which all objects will pull out again when they are being constructed.
|
||||
// BUGBUG: We are not getting TextLocations right in this way! Do we need to inject location markers into the source? Moot once we fully switch to BS
|
||||
wstring sourceCode = config.Exists(L"BrainScriptNetworkBuilder") ? config(L"BrainScriptNetworkBuilder") : config(L"ExperimentalNetworkBuilder");
|
||||
let expr = BS::ParseConfigDictFromString(standardFunctions + computationNodes + commonMacros + msra::strfun::wstrprintf(L"deviceId = %d ; precision = '%ls' ; network = new ComputationNetwork ", (int) deviceId, ElemTypeName<ElemType>()) // TODO: check if typeid needs postprocessing
|
||||
+ sourceCode,
|
||||
vector<wstring>()); // source code has the form [ ... ]
|
||||
//FILE * f = fopen("c:/me/CNTK.core.bs", "wb");
|
||||
//fprintf(f, "%ls", (standardFunctions + computationNodes + commonMacros).c_str());
|
||||
//fclose(f);
|
||||
//wstring boilerplate = standardFunctions + computationNodes + commonMacros;
|
||||
auto configDirs = ConfigParameters::GetBrainScriptNetworkBuilderIncludePaths();
|
||||
let expr = BS::ParseConfigDictFromString(L"include \'CNTK.core.bs\'"
|
||||
+ msra::strfun::wstrprintf(L"deviceId = %d ; precision = '%ls' ; network = new ComputationNetwork ", (int)deviceId, ElemTypeName<ElemType>())
|
||||
+ sourceCode, // source code has the form [ ... ] with brackets in the string
|
||||
move(configDirs)); // set include paths to all paths that configs were read from; no additional configurable include paths are supported by BrainScriptNetworkBuilder
|
||||
createNetworkFn = [expr](DEVICEID_TYPE /*deviceId*/)
|
||||
{
|
||||
// evaluate the parse tree--specifically the top-level field 'network'--which will create the network
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#define _CRT_SECURE_NO_WARNINGS // "secure" CRT not available on all platforms --add this at the top of all CPP files that give "function or variable may be unsafe" warnings
|
||||
|
||||
#include "BrainScriptParser.h"
|
||||
#include "File.h"
|
||||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
#include <cctype>
|
||||
|
@ -32,11 +33,9 @@ SourceFile::SourceFile(wstring location, wstring text)
|
|||
: path(location), lines(split(text, L"\r\n"))
|
||||
{
|
||||
} // from string, e.g. command line
|
||||
SourceFile::SourceFile(wstring path, const vector<wstring>& includePaths)
|
||||
SourceFile::SourceFile(wstring path)
|
||||
: path(path) // from file
|
||||
{
|
||||
// ... scan paths
|
||||
includePaths;
|
||||
File(path, fileOptionsRead | fileOptionsText).GetLines(lines);
|
||||
}
|
||||
|
||||
|
@ -380,6 +379,30 @@ private:
|
|||
throw LexerException(msg, where.beginLocation);
|
||||
}
|
||||
|
||||
// find a file either at given location or traverse include paths
|
||||
static wstring FindSourceFile(const wstring& path, const vector<wstring>& includePaths)
|
||||
{
|
||||
if (File::Exists(path))
|
||||
return path;
|
||||
// non-existent path: scan include paths
|
||||
// TODO: This is a little weird. Rather, this should be done by the call site.
|
||||
let fileName = File::FileNameOf(path);
|
||||
for (let& dir : includePaths)
|
||||
{
|
||||
// TODO: We should use the separator that matches the include path.
|
||||
let newPath = dir + L"/" + fileName;
|
||||
if (File::Exists(newPath))
|
||||
return newPath;
|
||||
}
|
||||
// not in include path: try EXE directory
|
||||
let dir = File::DirectoryPathOf(File::GetExecutablePath());
|
||||
let newPath = dir + L"/" + fileName;
|
||||
if (File::Exists(newPath))
|
||||
return newPath;
|
||||
// not found: return unmodified, let caller fail
|
||||
return path;
|
||||
}
|
||||
|
||||
Token currentToken;
|
||||
// consume input characters to form a next token
|
||||
// - this function mutates the cursor, but does not set currentToken
|
||||
|
@ -405,6 +428,7 @@ private:
|
|||
{
|
||||
if (IsInInclude())
|
||||
{
|
||||
includePaths.erase(includePaths.begin()); // pop dir of current include file
|
||||
PopSourceFile();
|
||||
t = NextToken(); // tail call--the current 't' gets dropped/ignored
|
||||
t.isLineInitial = true; // eof is a line end
|
||||
|
@ -443,8 +467,9 @@ private:
|
|||
let nameTok = NextToken(); // must be followed by a string literal
|
||||
if (nameTok.kind != stringliteral)
|
||||
Fail(L"'include' must be followed by a quoted string", nameTok);
|
||||
let path = nameTok.symbol; // TODO: some massaging of the path
|
||||
PushSourceFile(SourceFile(path, includePaths)); // current cursor is right after the pathname; that's where we will pick up later
|
||||
let path = FindSourceFile(nameTok.symbol, includePaths);
|
||||
PushSourceFile(SourceFile(path)); // current cursor is right after the pathname; that's where we will pick up later
|
||||
includePaths.insert(includePaths.begin(), File::DirectoryPathOf(path));
|
||||
return NextToken();
|
||||
}
|
||||
}
|
||||
|
@ -940,11 +965,12 @@ ExpressionPtr ParseConfigDictFromString(wstring text, vector<wstring>&& includeP
|
|||
{
|
||||
return Parse(SourceFile(L"(command line)", text), move(includePaths));
|
||||
}
|
||||
ExpressionPtr ParseConfigDictFromFile(wstring path, vector<wstring>&& includePaths)
|
||||
{
|
||||
auto sourceFile = SourceFile(path, includePaths);
|
||||
return Parse(move(sourceFile), move(includePaths));
|
||||
}
|
||||
//ExpressionPtr ParseConfigDictFromFile(wstring path, vector<wstring> includePaths)
|
||||
//{
|
||||
// auto sourceFile = SourceFile(path); // note: no resolution against include paths done here
|
||||
// includePaths.insert(includePaths.begin(), File::DirectoryPathOf(path)); // must include our own path for nested include statements
|
||||
// return Parse(move(sourceFile), move(includePaths));
|
||||
//}
|
||||
ExpressionPtr ParseConfigExpression(const wstring& sourceText, vector<wstring>&& includePaths)
|
||||
{
|
||||
auto parser = Parser(SourceFile(L"(command line)", sourceText), move(includePaths));
|
||||
|
@ -952,4 +978,5 @@ ExpressionPtr ParseConfigExpression(const wstring& sourceText, vector<wstring>&&
|
|||
parser.VerifyAtEnd();
|
||||
return expr;
|
||||
}
|
||||
} } } // namespaces
|
||||
|
||||
}}}
|
||||
|
|
|
@ -23,7 +23,7 @@ struct SourceFile // content of one source file (only in this header because Te
|
|||
/*const*/ wstring path; // where it came from
|
||||
/*const*/ vector<wstring> lines; // source code lines
|
||||
SourceFile(wstring location, wstring text); // from string, e.g. command line
|
||||
SourceFile(wstring path, const vector<wstring>& includePaths); // from file
|
||||
SourceFile(wstring path); // from file
|
||||
};
|
||||
|
||||
struct TextLocation // position in the text. Lightweight value struct that we can copy around, even into dictionaries etc., for error messages
|
||||
|
@ -135,6 +135,7 @@ typedef Expression::ExpressionPtr ExpressionPtr; // circumvent some circular def
|
|||
|
||||
// access the parser through one of these functions
|
||||
ExpressionPtr ParseConfigDictFromString(wstring text, vector<wstring>&& includePaths); // parses a list of dictionary members, returns a dictionary expression
|
||||
ExpressionPtr ParseConfigDictFromFile(wstring path, vector<wstring>&& includePaths); // likewise, but from a file path
|
||||
//ExpressionPtr ParseConfigDictFromFile(wstring path, vector<wstring> includePaths); // likewise, but from a file path
|
||||
ExpressionPtr ParseConfigExpression(const wstring& sourceText, vector<wstring>&& includePaths); // parses a single expression from sourceText, which is meant to contain an include statement, hence includePaths
|
||||
} } } // namespaces
|
||||
|
||||
}}}
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
//
|
||||
// Copyright (c) Microsoft. All rights reserved.
|
||||
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
|
||||
//
|
||||
// CNTK.core.bs -- core BrainScript library including both general and CNTK-specific definitions
|
||||
//
|
||||
|
||||
##############################################################################
|
||||
# standard functions
|
||||
##############################################################################
|
||||
|
||||
Print(value, format='') = new PrintAction [ what = value /*; how = format*/ ]
|
||||
Debug(value, say = '', enabled = true) = new Debug [ /*macro arg values*/ ]
|
||||
Format(value, format) = new StringFunction [ what = 'Format' ; arg = value ; how = format ]
|
||||
Replace(s, from, to) = new StringFunction [ what = 'Replace' ; arg = s ; replacewhat = from ; withwhat = to ]
|
||||
Substr(s, begin, num) = new StringFunction [ what = 'Substr' ; arg = s ; pos = begin ; chars = num ]
|
||||
Chr(c) = new StringFunction [ what = 'Chr' ; arg = c ]
|
||||
Floor(x) = new NumericFunction [ what = 'Floor' ; arg = x ]
|
||||
Length(x) = new NumericFunction [ what = 'Length' ; arg = x ]
|
||||
Ceil(x) = -Floor(-x)
|
||||
Round(x) = Floor(x+0.5)
|
||||
Abs(x) = if x >= 0 then x else -x
|
||||
Sign(x) = if x > 0 then 1 else if x < 0 then -1 else 0
|
||||
Min(a,b) = if a < b then a else b
|
||||
Max(a,b) = if a > b then a else b
|
||||
Fac(n) = if n > 1 then Fac(n-1) * n else 1
|
||||
|
||||
##############################################################################
|
||||
# ComputationNodes
|
||||
##############################################################################
|
||||
|
||||
LearnableParameter(rows, cols, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ dims = (rows : cols) ] /*plus the function args*/ ]
|
||||
Parameter = LearnableParameter // deprecated
|
||||
ParameterTensor(dims, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedValue|gaussian|fromFile*/, initValueScale = 1, value = 0, initFromFilePath = '', initOnCPUOnly=true, randomSeed=-1, tag='') = new ComputationNode [ operation = 'LearnableParameter' ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
|
||||
Input(dims, tag='feature') = new ComputationNode [ operation = 'InputValue' ; shape = new TensorShape [ /*dims*/ ] ; isImage = false /*plus the function args*/ ]
|
||||
SparseInput(dims, tag='feature') = new ComputationNode [ operation = 'SparseInputValue' ; shape = new TensorShape [ /*dims*/ ] ; isImage = false /*plus the function args*/ ]
|
||||
ImageInput(imageWidth, imageHeight, imageChannels, imageLayout='CHW', tag='feature') = new ComputationNode [ operation = 'InputValue' ; isImage = true /*plus the function args*/ ]
|
||||
SparseImageInput(imageWidth, imageHeight, imageChannels, imageLayout='CHW', tag='feature') = new ComputationNode [ operation = 'SparseInputValue' ; isImage = true /*plus the function args*/ ]
|
||||
Constant(val, rows = 1, cols = 1, tag='') = Parameter(rows, cols, learningRateMultiplier = 0, init = 'fixedValue', value = val)
|
||||
PastValue(dims, input, timeStep = 1, defaultHiddenActivation = 0.1, tag='') = new ComputationNode [ operation = 'PastValue' ; inputs = input ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
|
||||
FutureValue(dims, input, timeStep = 1, defaultHiddenActivation = 0.1, tag='') = new ComputationNode [ operation = 'FutureValue' ; inputs = input ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
|
||||
Shift(input, fromOffset, boundaryValue, boundaryMode=-1/*context*/, dim=-1, tag='') = new ComputationNode [ operation = 'Shift' ; inputs = (input : boundaryValue) /*plus the function args*/ ]
|
||||
RowSlice(startIndex, numRows, input, tag='') = new ComputationNode [ operation = 'RowSlice' ; inputs = input /*plus the function args*/ ]
|
||||
RowRepeat(input, numRepeats, tag='') = new ComputationNode [ operation = 'RowRepeat' ; inputs = input /*plus the function args*/ ]
|
||||
RowStack(inputs, tag='') = new ComputationNode [ operation = 'RowStack' /*plus the function args*/ ]
|
||||
Reshape(input, numRows, imageWidth = 0, imageHeight = 0, imageChannels = 0, tag='') = new ComputationNode [ operation = 'LegacyReshape' ; inputs = input /*plus the function args*/ ]
|
||||
NewReshape(input, dims, beginDim=0, endDim=0, tag='') = new ComputationNode [ operation = 'Reshape' ; inputs = input ; shape = new TensorShape [ /*dims*/ ] /*plus the function args*/ ]
|
||||
ReshapeDimension(x, dim, tensorShape) = NewReshape(x, tensorShape, beginDim=dim, endDim=dim + 1)
|
||||
FlattenDimensions(x, dim, num) = NewReshape(x, 0, beginDim=dim, endDim=dim + num)
|
||||
SplitDimension(x, dim, N) = ReshapeDimension(x, dim, 0:N)
|
||||
TransposeDimensions(input, dim1, dim2, tag='') = new ComputationNode [ operation = 'TransposeDimensions' ; inputs = input /*plus the function args*/ ]
|
||||
Transpose(x) = TransposeDimensions(x, 1, 2)
|
||||
Times(A, B, outputRank=1, tag='') = new ComputationNode [ operation = 'Times' ; inputs = ( A : B ) /*plus the function args*/ ]
|
||||
Logistic(label, probability, tag='') = new ComputationNode [ operation = 'Logistic' ; inputs = (label : probability) /*plus the function args*/ ]
|
||||
WeightedLogistic(label, probability, instanceWeight, tag='') = new ComputationNode [ operation = 'Logistic' ; inputs = (label : probability : instanceWeight) /*plus the function args*/ ]
|
||||
ReconcileMBLayout(dataInput, layoutInput, tag='') = new ComputationNode [ operation = 'ReconcileMBLayout' ; inputs = (dataInput : layoutInput) /*plus the function args*/ ]
|
||||
Convolution(weightNode, inputValueNode, kernelWidth, kernelHeight, outputChannels, horizontalSubsample, verticalSubsample, zeroPadding = false, maxTempMemSizeInSamples = 0, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'Convolution' ; inputs = (weightNode : inputValueNode) /*plus the function args*/ ]
|
||||
MaxPooling(input, windowWidth, windowHeight, horizontalSubsample, verticalSubsample, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'MaxPooling' ; inputs = input /*plus the function args*/ ]
|
||||
AveragePooling(input, windowWidth, windowHeight, horizontalSubsample, verticalSubsample, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'AveragePooling' ; inputs = input /*plus the function args*/ ]
|
||||
ColumnwiseCrossProduct = KhatriRaoProduct // deprecated
|
||||
ClassificationError = ErrorPrediction
|
||||
Delay = PastValue
|
||||
BatchNormalization(input, scale, bias, runMean, runInvStdDev, eval, spatial, normalizationTimeConstant = 0, epsilon = 0.00001, useCntkEngine = true, imageLayout='CHW', tag='') = new ComputationNode [ operation = 'BatchNormalization' ; inputs = (input : scale : bias : runMean : runInvStdDev) /*plus the function args*/ ]
|
||||
ClassBasedCrossEntropyWithSoftmax(labelClassDescriptorVectorSequence, mainInputInfo, mainWeight, classLogProbsBeforeSoftmax, tag='') = new ComputationNode [ operation = 'ClassBasedCrossEntropyWithSoftmax' ; inputs = (labelClassDescriptorVectorSequence : mainInputInfo : mainWeight : classLogProbsBeforeSoftmax) /*plus the function args*/ ]
|
||||
ColumnElementTimes(aVectorSequence, anotherVectorSequence, tag='') = new ComputationNode [ operation = 'ColumnElementTimes' ; inputs = (aVectorSequence : anotherVectorSequence) /*plus the function args*/ ]
|
||||
CosDistance(aVectorSequence, anotherVectorSequence, tag='') = new ComputationNode [ operation = 'CosDistance' ; inputs = (aVectorSequence : anotherVectorSequence) /*plus the function args*/ ]
|
||||
CosDistanceWithNegativeSamples(aVectorSequence, anotherVectorSequence, numShifts, numNegSamples, tag='') = new ComputationNode [ operation = 'CosDistanceWithNegativeSamples' ; inputs = (aVectorSequence : anotherVectorSequence : numShifts : numNegSamples) /*plus the function args*/ ]
|
||||
Cosine(x, tag='') = new ComputationNode [ operation = 'Cosine' ; inputs = x /*plus the function args*/ ]
|
||||
CrossEntropy(refProbVectorSequence, outProbVectorSequence, tag='') = new ComputationNode [ operation = 'CrossEntropy' ; inputs = (refProbVectorSequence : outProbVectorSequence) /*plus the function args*/ ]
|
||||
CrossEntropyWithSoftmax(labelVectorSequence, outProbVectorSequence, tag='') = new ComputationNode [ operation = 'CrossEntropyWithSoftmax' ; inputs = (labelVectorSequence : outProbVectorSequence) /*plus the function args*/ ]
|
||||
DiagTimes(diagonalMatrixAsColumnVector, matrix, tag='') = new ComputationNode [ operation = 'DiagTimes' ; inputs = (diagonalMatrixAsColumnVector : matrix) /*plus the function args*/ ]
|
||||
Dropout(activationVectorSequence, tag='') = new ComputationNode [ operation = 'Dropout' ; inputs = activationVectorSequence /*plus the function args*/ ]
|
||||
ElementTimes(aMatrix, anotherMatrix, tag='') = new ComputationNode [ operation = 'ElementTimes' ; inputs = (aMatrix : anotherMatrix) /*plus the function args*/ ]
|
||||
ErrorPrediction(labelVectorSequence, outVectorSequence, tag='') = new ComputationNode [ operation = 'ErrorPrediction' ; inputs = (labelVectorSequence : outVectorSequence) /*plus the function args*/ ]
|
||||
Exp(x, tag='') = new ComputationNode [ operation = 'Exp' ; inputs = x /*plus the function args*/ ]
|
||||
GMMLogLikelihood(unnormalizedPriorVector, meansAsRows, logStdDevAsRows, dataVectorSequence, tag='') = new ComputationNode [ operation = 'GMMLogLikelihood' ; inputs = (unnormalizedPriorVector : meansAsRows : logStdDevAsRows : dataVectorSequence) /*plus the function args*/ ]
|
||||
InvStdDev(dataVectorSequence, tag='') = new ComputationNode [ operation = 'InvStdDev' ; inputs = dataVectorSequence /*plus the function args*/ ]
|
||||
KhatriRaoProduct(leftMatrix, rightMatrix, tag='') = new ComputationNode [ operation = 'KhatriRaoProduct' ; inputs = (leftMatrix : rightMatrix) /*plus the function args*/ ]
|
||||
Log(x, tag='') = new ComputationNode [ operation = 'Log' ; inputs = x /*plus the function args*/ ]
|
||||
LogSoftmax(z, tag='') = new ComputationNode [ operation = 'LogSoftmax' ; inputs = z /*plus the function args*/ ]
|
||||
MatrixL1Reg(matrix, tag='') = new ComputationNode [ operation = 'MatrixL1Reg' ; inputs = matrix /*plus the function args*/ ]
|
||||
MatrixL2Reg(matrix, tag='') = new ComputationNode [ operation = 'MatrixL2Reg' ; inputs = matrix /*plus the function args*/ ]
|
||||
Mean(dataVectorSequence, tag='') = new ComputationNode [ operation = 'Mean' ; inputs = dataVectorSequence /*plus the function args*/ ]
|
||||
Minus(leftMatrix, rightMatrix, tag='') = new ComputationNode [ operation = 'Minus' ; inputs = (leftMatrix : rightMatrix) /*plus the function args*/ ]
|
||||
Negate(input, tag='') = new ComputationNode [ operation = 'Negate' ; inputs = input /*plus the function args*/ ]
|
||||
PerDimMeanVarDeNormalization(dataVectorSequence, meanVector, invStdDevVector, tag='') = new ComputationNode [ operation = 'PerDimMeanVarDeNormalization' ; inputs = (dataVectorSequence : meanVector : invStdDevVector) /*plus the function args*/ ]
|
||||
PerDimMeanVarNormalization(dataVectorSequence, meanVector, invStdDevVector, tag='') = new ComputationNode [ operation = 'PerDimMeanVarNormalization' ; inputs = (dataVectorSequence : meanVector : invStdDevVector) /*plus the function args*/ ]
|
||||
Plus(leftMatrix, rightMatrix, tag='') = new ComputationNode [ operation = 'Plus' ; inputs = (leftMatrix : rightMatrix) /*plus the function args*/ ]
|
||||
RectifiedLinear(z, tag='') = new ComputationNode [ operation = 'RectifiedLinear' ; inputs = z /*plus the function args*/ ]
|
||||
Scale(scalarScalingFactor, matrix, tag='') = new ComputationNode [ operation = 'Scale' ; inputs = (scalarScalingFactor : matrix) /*plus the function args*/ ]
|
||||
Sigmoid(z, tag='') = new ComputationNode [ operation = 'Sigmoid' ; inputs = z /*plus the function args*/ ]
|
||||
Softmax(z, tag='') = new ComputationNode [ operation = 'Softmax' ; inputs = z /*plus the function args*/ ]
|
||||
Hardmax(z, tag='') = new ComputationNode [ operation = 'Hardmax' ; inputs = z /*plus the function args*/ ]
|
||||
SquareError(aMatrix, anotherMatrix, tag='') = new ComputationNode [ operation = 'SquareError' ; inputs = (aMatrix : anotherMatrix) /*plus the function args*/ ]
|
||||
SumColumnElements(z, tag='') = new ComputationNode [ operation = 'SumColumnElements' ; inputs = z /*plus the function args*/ ]
|
||||
SumElements(matrix, tag='') = new ComputationNode [ operation = 'SumElements' ; inputs = matrix /*plus the function args*/ ]
|
||||
Tanh(z, tag='') = new ComputationNode [ operation = 'Tanh' ; inputs = z /*plus the function args*/ ]
|
||||
TimeReverse(vectorSequence, tag='') = new ComputationNode [ operation = 'TimeReverse' ; inputs = vectorSequence /*plus the function args*/ ]
|
||||
TransposeTimes(leftMatrix, rightMatrix, tag='') = new ComputationNode [ operation = 'TransposeTimes' ; inputs = (leftMatrix : rightMatrix) /*plus the function args*/ ]
|
||||
|
||||
##############################################################################
|
||||
# common macros
|
||||
##############################################################################
|
||||
|
||||
BFF(in, rows, cols) = [ B = Parameter(rows, 1, init = 'fixedValue', value = 0) ; W = Parameter(rows, cols) ; z = W*in+B ]
|
||||
SBFF(in, rows, cols) = [ Eh = Sigmoid(BFF(in, rows, cols).z) ]
|
||||
MeanVarNorm(feat) = PerDimMeanVarNormalization(feat, Mean(feat), InvStdDev(feat))
|
||||
LogPrior(labels) = Log(Mean(labels))
|
|
@ -1,3 +1,4 @@
|
|||
#if 0 // this entire file can be removed once CNTK.core.bs works
|
||||
// ExperimentalNetworkBuilder.cpp -- interface to new version of NDL (and config) parser --fseide
|
||||
|
||||
#define _CRT_NONSTDC_NO_DEPRECATE // make VS accept POSIX functions without _
|
||||
|
@ -130,3 +131,4 @@ L"ParameterTensor(dims, learningRateMultiplier = 1.0, init = 'uniform'/*|fixedVa
|
|||
//BinaryStandardNode(ParallelNode)
|
||||
//BinaryStandardNode(StrideTimesNode)
|
||||
;
|
||||
#endif
|
||||
|
|
|
@ -391,9 +391,9 @@ static wstring PathToBSStringLiteral(const wstring& path) // quote a pathname fo
|
|||
}
|
||||
|
||||
// TODO: decide where these should go. Also, do we need three variables?
|
||||
extern wstring standardFunctions;
|
||||
extern wstring commonMacros;
|
||||
extern wstring computationNodes;
|
||||
//extern wstring standardFunctions;
|
||||
//extern wstring commonMacros;
|
||||
//extern wstring computationNodes;
|
||||
|
||||
int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wrapper that catches & reports Win32 exceptions
|
||||
{
|
||||
|
@ -435,7 +435,8 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wrapp
|
|||
|
||||
// compile the BrainScript
|
||||
wstring bs = L"[\n";
|
||||
bs += standardFunctions + computationNodes + commonMacros + L"\n"; // start with standard macros
|
||||
bs += L"include \'CNTK.core.bs'"; // start with including the standard macros
|
||||
//bs += standardFunctions + computationNodes + commonMacros + L"\n";
|
||||
for (const auto& sourceFile : sourceFiles)
|
||||
bs += L"include " + PathToBSStringLiteral(sourceFile) + L"\n";
|
||||
bs += L"\n]\n";
|
||||
|
|
|
@ -122,11 +122,19 @@
|
|||
<Link>
|
||||
<DelayLoadDLLs>Math.dll; msmpi.dll;</DelayLoadDLLs>
|
||||
</Link>
|
||||
<PostBuildEvent>
|
||||
<Message Condition="'$(Configuration)|$(Platform)'=='Release_CpuOnly|x64'">Copying dependencies</Message>
|
||||
<Command Condition="'$(Configuration)|$(Platform)'=='Release_CpuOnly|x64'">xcopy /I /D /Y $(ProjectDir)BrainScript\CNTKCoreLib\CNTK.core.bs $(TargetDir)</Command>
|
||||
</PostBuildEvent>
|
||||
<PostBuildEvent>
|
||||
<Message Condition="'$(Configuration)|$(Platform)'=='Debug_CpuOnly|x64'">Copying dependencies</Message>
|
||||
<Command Condition="'$(Configuration)|$(Platform)'=='Debug_CpuOnly|x64'">xcopy /I /D /Y $(ProjectDir)BrainScript\CNTKCoreLib\CNTK.core.bs $(TargetDir)</Command>
|
||||
</PostBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="$(GpuBuild)">
|
||||
<PostBuildEvent>
|
||||
<Command>if exist "%ProgramW6432%\NVIDIA Corporation\NVSMI" xcopy /I /D /Y "%ProgramW6432%\NVIDIA Corporation\NVSMI\nvml*.dll" $(TargetDir)</Command>
|
||||
<Message>Copying NVidia GDK extension DLL to target folder</Message>
|
||||
<Command>xcopy /I /D /Y $(ProjectDir)BrainScript\CNTKCoreLib\CNTK.core.bs $(TargetDir) && if exist "%ProgramW6432%\NVIDIA Corporation\NVSMI" xcopy /I /D /Y "%ProgramW6432%\NVIDIA Corporation\NVSMI\nvml*.dll" $(TargetDir)</Command>
|
||||
<Message>Copying dependencies</Message>
|
||||
</PostBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
|
@ -211,6 +219,17 @@
|
|||
<ClCompile Include="tests.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Xml Include="BrainScript\CNTKCoreLib\CNTK.core.bs">
|
||||
<FileType>Document</FileType>
|
||||
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</DeploymentContent>
|
||||
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Debug_CpuOnly|x64'">true</DeploymentContent>
|
||||
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</DeploymentContent>
|
||||
<DeploymentContent Condition="'$(Configuration)|$(Platform)'=='Release_CpuOnly|x64'">true</DeploymentContent>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug_CpuOnly|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release_CpuOnly|x64'">false</ExcludedFromBuild>
|
||||
</Xml>
|
||||
<None Include="BrainScript\Doc\BrainScript--extending the CNTK config language, Frank Seide August 2015.pptx" />
|
||||
<None Include="prebuild.bat" />
|
||||
</ItemGroup>
|
||||
|
|
|
@ -257,6 +257,9 @@
|
|||
<Filter Include="BrainScript\Doc">
|
||||
<UniqueIdentifier>{23e7cd74-fd60-4fb4-a925-c3dea584f176}</UniqueIdentifier>
|
||||
</Filter>
|
||||
<Filter Include="BrainScript\CNTKCoreLib">
|
||||
<UniqueIdentifier>{899f31fa-5906-4485-8875-14ad2c43ed8f}</UniqueIdentifier>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="prebuild.bat">
|
||||
|
@ -266,4 +269,9 @@
|
|||
<Filter>BrainScript\Doc</Filter>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Xml Include="BrainScript\CNTKCoreLib\CNTK.core.bs">
|
||||
<Filter>BrainScript\CNTKCoreLib</Filter>
|
||||
</Xml>
|
||||
</ItemGroup>
|
||||
</Project>
|
|
@ -62,6 +62,8 @@ std::string ConfigParameters::ParseCommandLine(int argc, wchar_t* argv[], Config
|
|||
// if haven't already read this file, read it
|
||||
resolvedConfigFiles.push_back(filePath);
|
||||
configString += config.ReadConfigFile(filePath);
|
||||
// remember all config directories, for use as include paths by BrainScriptNetworkBuilder
|
||||
GetBrainScriptNetworkBuilderIncludePaths().push_back(File::DirectoryPathOf(msra::strfun::utf16(filePath)));
|
||||
}
|
||||
else
|
||||
RuntimeError("Cannot specify same config file multiple times at the command line.");
|
||||
|
@ -305,4 +307,5 @@ void TrimQuotes(std::string& str)
|
|||
if (str.front() == '"' && str.back() == '"')
|
||||
str = str.substr(1, str.size() - 2);
|
||||
}
|
||||
} } }
|
||||
|
||||
}}}
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
#ifdef _WIN32
|
||||
#define NOMINMAX
|
||||
#include "Windows.h"
|
||||
#include <Pathcch.h>
|
||||
#pragma comment(lib, "Pathcch.lib")
|
||||
#endif
|
||||
#ifdef __unix__
|
||||
#include <unistd.h>
|
||||
|
@ -140,6 +142,63 @@ void File::Init(const wchar_t* filename, int fileOptions)
|
|||
});
|
||||
}
|
||||
|
||||
// determine the directory for a given pathname
|
||||
// (wstring only for now; feel free to make this a template if needed)
|
||||
/*static*/ wstring File::DirectoryPathOf(wstring path)
|
||||
{
|
||||
#if WIN32
|
||||
auto hr = PathCchRemoveFileSpec(&path[0], path.size());
|
||||
if (hr == S_OK) // done
|
||||
path.resize(wcslen(&path[0]));
|
||||
else if (hr == S_FALSE) // nothing to remove: use .
|
||||
path = L".";
|
||||
#else
|
||||
auto pos = path.find_last_of(L"/");
|
||||
if (pos != path.npos)
|
||||
path.erase(pos - 1);
|
||||
else // if no directory path at all, use current directory
|
||||
return L".";
|
||||
#endif
|
||||
return path;
|
||||
}
|
||||
|
||||
// determine the file name for a given pathname
|
||||
// (wstring only for now; feel free to make this a template if needed)
|
||||
/*static*/ wstring File::FileNameOf(wstring path)
|
||||
{
|
||||
#if WIN32
|
||||
static const wstring delim = L"\\:/";
|
||||
#else
|
||||
static const wstring delim = L"/";
|
||||
#endif
|
||||
auto pos = path.find_last_of(delim);
|
||||
if (pos != path.npos)
|
||||
return path.substr(pos + 1);
|
||||
else // no directory path
|
||||
return path;
|
||||
}
|
||||
|
||||
// get path of current executable
|
||||
/*static*/ wstring File::GetExecutablePath()
|
||||
{
|
||||
#if WIN32
|
||||
wchar_t path[33000];
|
||||
if (GetModuleFileNameW(NULL, path, _countof(path)) == 0)
|
||||
LogicError("GetExecutablePath: GetModuleFileNameW() unexpectedly failed.");
|
||||
return path;
|
||||
#else
|
||||
// from http://stackoverflow.com/questions/4025370/can-an-executable-discover-its-own-path-linux
|
||||
pid_t pid = getpid();
|
||||
char path[PATH_MAX];
|
||||
sprintf(path, "/proc/%d/exe", pid);
|
||||
char dest[PATH_MAX];
|
||||
if (readlink(path, dest, PATH_MAX) == -1)
|
||||
RuntimeError("GetExecutableDirectory: readlink() call failed.");
|
||||
else
|
||||
return msra::strfun::utf16(dest);
|
||||
#endif
|
||||
}
|
||||
|
||||
// skip to given delimiter character
|
||||
void File::SkipToDelimiter(int delim)
|
||||
{
|
||||
|
|
|
@ -1198,6 +1198,13 @@ public:
|
|||
|
||||
static std::string ParseCommandLine(int argc, wchar_t* argv[], ConfigParameters& config);
|
||||
|
||||
// support for BrainScriptNetworkBuilder: It needs the config directories in order to know where to include files from.
|
||||
static vector<wstring>& GetBrainScriptNetworkBuilderIncludePaths()
|
||||
{
|
||||
static vector<wstring> allConfigDirs;
|
||||
return allConfigDirs;
|
||||
}
|
||||
|
||||
// dump for debugging purposes
|
||||
void dump() const
|
||||
{
|
||||
|
@ -1547,4 +1554,5 @@ public:
|
|||
typedef argvector<int> intargvector;
|
||||
typedef argvector<float> floatargvector;
|
||||
typedef argvector<std::wstring> stringargvector;
|
||||
} } }
|
||||
|
||||
}}}
|
||||
|
|
|
@ -122,13 +122,6 @@ public:
|
|||
bool IsWhiteSpace(bool skip = false);
|
||||
int EndOfLineOrEOF(bool skip = false);
|
||||
|
||||
// test whether a file exists
|
||||
template<class String>
|
||||
static bool Exists(const String& filename);
|
||||
|
||||
// make intermediate directories
|
||||
template<class String>
|
||||
static void MakeIntermediateDirs(const String& filename);
|
||||
|
||||
// TryGetText - for text value, try and get a particular type
|
||||
// returns - true if value returned, otherwise false, can't parse
|
||||
|
@ -143,6 +136,22 @@ public:
|
|||
void GetLines(std::vector<std::wstring>& lines);
|
||||
void GetLines(std::vector<std::string>& lines);
|
||||
|
||||
// static helpers
|
||||
// test whether a file exists
|
||||
template<class String>
|
||||
static bool Exists(const String& filename);
|
||||
|
||||
// make intermediate directories
|
||||
template<class String>
|
||||
static void MakeIntermediateDirs(const String& filename);
|
||||
|
||||
// determine the directory and naked file name for a given pathname
|
||||
static std::wstring DirectoryPathOf(std::wstring path);
|
||||
static std::wstring FileNameOf(std::wstring path);
|
||||
|
||||
// get path of current executable
|
||||
static std::wstring File::GetExecutablePath();
|
||||
|
||||
// put operator for basic types
|
||||
template <typename T>
|
||||
File& operator<<(T val)
|
||||
|
|
|
@ -40,7 +40,7 @@ public:
|
|||
// interface through which this node is operated on are these two functions
|
||||
|
||||
// check whether node has already undergone precomputation
|
||||
virtual bool /*IPreComputeNode::*/ HasComputed() const override { m_hasComputed; }
|
||||
virtual bool /*IPreComputeNode::*/ HasComputed() const override { return m_hasComputed; }
|
||||
|
||||
// call this with 'false' at start and with 'true' at end
|
||||
// This is used for resetting and updating from accumulators.
|
||||
|
@ -89,10 +89,12 @@ public:
|
|||
InvalidArgument("%ls %ls operation requires its input to come in minibatches of samples.", NodeName().c_str(), OperationName().c_str());
|
||||
m_pMBLayout = nullptr; // this node does not hold mini-batch data
|
||||
|
||||
if (!m_hasComputed) // this node retains state, and state gets destroyed by Resize(), so we must be careful
|
||||
//if (!m_hasComputed) // this node retains state, and state gets destroyed by Resize(), so we must be careful
|
||||
SetDims(Input(0)->GetSampleLayout(), false);
|
||||
else if (!GetSampleLayout().IsElementwiseCompatibleWith(Input(0)->GetSampleLayout()))
|
||||
InvalidArgument("%ls %ls operation: Precomputed parameter does not match input dimensions.", NodeName().c_str(), OperationName().c_str());
|
||||
//else if (!GetSampleLayout().IsElementwiseCompatibleWith(Input(0)->GetSampleLayout()))
|
||||
// InvalidArgument("%ls %ls operation: Precomputed parameter does not match input dimensions.", NodeName().c_str(), OperationName().c_str());
|
||||
// BUGBUG: Above is a workaround, which may be OK since m_hasComputed getting set requires Validate() to have passed.
|
||||
// This workaround won't guard agains corrupt files.
|
||||
}
|
||||
|
||||
virtual void CopyTo(ComputationNodeBasePtr nodeP, const std::wstring& newName, const CopyNodeFlags flags) const override
|
||||
|
|
|
@ -80,10 +80,10 @@
|
|||
<AdditionalDependencies>ReaderLib.lib;Math.lib;$(OpenCVLib);$(ZipLibs);%(AdditionalDependencies)</AdditionalDependencies>
|
||||
</Link>
|
||||
<PostBuildEvent>
|
||||
<Command>if "$(HasOpenCv)" == "true" xcopy /Y "$(OPENCV_PATH)\x64\vc12\bin\opencv_world300.dll" $(TargetDir)
|
||||
if "$(UseZip)" == "true" copy /Y "$(ZLIB_PATH)\bin\zip.dll" "$(TargetDir)"
|
||||
if "$(UseZip)" == "true" if exist "$(ZLIB_PATH)\bin\zlib1.dll" (copy /Y "$(ZLIB_PATH)\bin\zlib1.dll" "$(TargetDir)") else (copy /Y "$(ZLIB_PATH)\bin\zlib.dll" "$(TargetDir)\zlib1.dll")
|
||||
</Command>
|
||||
<Command>if "$(HasOpenCv)" == "true" xcopy /I /D /Y "$(OPENCV_PATH)\x64\vc12\bin\opencv_world300.dll" $(TargetDir)
|
||||
if "$(UseZip)" == "true" xcopy /I /D /Y "$(ZLIB_PATH)\bin\zip.dll" "$(TargetDir)"
|
||||
if "$(UseZip)" == "true" if exist "$(ZLIB_PATH)\bin\zlib1.dll" (xcopy /I /D /Y "$(ZLIB_PATH)\bin\zlib1.dll" "$(TargetDir)") else (xcopy /I /D /Y "$(ZLIB_PATH)\bin\zlib.dll" "$(TargetDir)\zlib1.dll")
|
||||
</Command>
|
||||
<Message>Copying dependencies</Message>
|
||||
</PostBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
|
|
Загрузка…
Ссылка в новой задаче