It doesn't make sense to complain about input not being batch-sized if
it isn't even 4-d in the first place.
This commit is contained in:
Jonathan L Long 2014-07-18 16:32:12 -07:00
Родитель fc48d5de5c
Коммит 32fefa96f9
1 изменённых файлов: 4 добавлений и 4 удалений

Просмотреть файл

@ -57,10 +57,10 @@ def _Net_forward(self, blobs=None, **kwargs):
# Set input according to defined shapes and make arrays single and
# C-contiguous as Caffe expects.
for in_, blob in kwargs.iteritems():
if blob.shape[0] != self.blobs[in_].num:
raise Exception('Input is not batch sized')
if blob.ndim != 4:
raise Exception('{} blob is not 4-d'.format(in_))
if blob.shape[0] != self.blobs[in_].num:
raise Exception('Input is not batch sized')
self.blobs[in_].data[...] = blob
self._forward()
@ -91,10 +91,10 @@ def _Net_backward(self, diffs=None, **kwargs):
# Set top diffs according to defined shapes and make arrays single and
# C-contiguous as Caffe expects.
for top, diff in kwargs.iteritems():
if diff.shape[0] != self.blobs[top].num:
raise Exception('Diff is not batch sized')
if diff.ndim != 4:
raise Exception('{} diff is not 4-d'.format(top))
if diff.shape[0] != self.blobs[top].num:
raise Exception('Diff is not batch sized')
self.blobs[top].diff[...] = diff
self._backward()