From 7112dd78f0ea9fcde7021df0e78666ada48f0de3 Mon Sep 17 00:00:00 2001 From: thefiddler Date: Mon, 15 Jan 2018 18:44:45 +0100 Subject: [PATCH] Avoid crash when linear activation does not have alpha and beta defined (#306) --- nnvm/python/nnvm/frontend/keras.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nnvm/python/nnvm/frontend/keras.py b/nnvm/python/nnvm/frontend/keras.py index 9b3540ae..4314a496 100644 --- a/nnvm/python/nnvm/frontend/keras.py +++ b/nnvm/python/nnvm/frontend/keras.py @@ -27,8 +27,10 @@ def _convert_activation(insym, keras_layer, _): if act_type == 'linear': if isinstance(keras_layer, str): return insym + alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1 + beta = keras_layer.beta if hasattr(keras_layer, "beta") else 0 return _sym.__add_scalar__(_sym.__mul_scalar__(insym, \ - scalar=keras_layer.alpha), scalar=keras_layer.beta) + scalar=alpha), scalar=beta) elif act_type == 'softmax': return _sym.softmax(insym) elif act_type == 'sigmoid':