Undoing backward path fix inherited from other branch

This commit is contained in:
Thilo Will 2016-08-26 11:31:09 +02:00
Родитель 3f5f0028aa
Коммит 3e41faaa4d
1 изменённых файлов: 7 добавлений и 3 удалений

Просмотреть файл

@ -336,8 +336,11 @@ public:
if (inputIndex == 0) // left derivative
{
// TODO: we might leverage the fact that eg. for dense * sparse input(0)->Gradient() might ideally be represented as block sparse.
// Currently AddMatrixProductOf(..) doesn't support that.
// currently we only support one combination when the input is sparse
// If input data is sparse, then gradient is block sparse.
// BUGBUG: This does not accumulate into the Input(0)->Gradient, which might cause problems elsewhere.
if (Input(1)->Value().GetMatrixType() == SPARSE && Input(0)->Gradient().GetMatrixType() == DENSE && Gradient().GetMatrixType() == DENSE)
Input(0)->Gradient().SwitchToMatrixType(SPARSE, MatrixFormat::matrixFormatSparseBlockCol, false);
auto input0Gradient = OneSampleTensorFor(0, /*gradient=*/true, fr.AllowBroadcast());
auto input1 = OneSampleTensorFor(1, /*gradient=*/false, fr.AllowBroadcast());
auto outputGradient = OneSampleTensorFor(-1, /*gradient=*/true, fr);
@ -345,6 +348,7 @@ public:
}
else if (inputIndex == 1) // right derivative
{
// BUGBUG: Above block has potetially sparse gradient. We should have it here too or remove it completly.
auto input0 = OneSampleTensorFor(0, /*gradient=*/false, fr.AllowBroadcast());
auto input1Gradient = OneSampleTensorFor(1, /*gradient=*/true, fr.AllowBroadcast());
auto outputGradient = OneSampleTensorFor(-1, /*gradient=*/true, fr);