diff --git a/ide/static/js/data.js b/ide/static/js/data.js
index b9bb5d410..41bfb7119 100644
--- a/ide/static/js/data.js
+++ b/ide/static/js/data.js
@@ -3065,6 +3065,36 @@ export default {
},
learn: true
},
+ Linear: { // Only Keras
+ name: 'linear',
+ color: '#009688',
+ endpoint: {
+ src: ['Bottom'],
+ trg: ['Top']
+ },
+ params: {
+ inplace: {
+ name: 'Inplace operation',
+ value: true,
+ type: 'checkbox',
+ required: false
+ },
+ caffe: {
+ name: 'Available Caffe',
+ value: false,
+ type: 'checkbox',
+ required: false
+ }
+ },
+ props: {
+ name: {
+ name: 'Name',
+ value: '',
+ type: 'text'
+ }
+ },
+ learn: false
+ },
/* ********** Utility Layers ********** */
Flatten: {
name: 'flatten',
diff --git a/ide/static/js/pane.js b/ide/static/js/pane.js
index 6090f6996..0b5436407 100644
--- a/ide/static/js/pane.js
+++ b/ide/static/js/pane.js
@@ -322,6 +322,9 @@ class Pane extends React.Component {
Scale
+ Linear
diff --git a/ide/tasks.py b/ide/tasks.py
index ecc46fa0c..24fe8c8df 100644
--- a/ide/tasks.py
+++ b/ide/tasks.py
@@ -78,6 +78,7 @@ def export_keras_json(net, net_name, is_tf, reply_channel):
'TanH': activation,
'Sigmoid': activation,
'HardSigmoid': activation,
+ 'Linear': activation,
'Dropout': dropout,
'Flatten': flatten,
'Reshape': reshape,
diff --git a/keras_app/views/export_json.py b/keras_app/views/export_json.py
index af8913fea..216879ce3 100644
--- a/keras_app/views/export_json.py
+++ b/keras_app/views/export_json.py
@@ -49,6 +49,7 @@ def export_json(request, is_tf=False):
'TanH': activation,
'Sigmoid': activation,
'HardSigmoid': activation,
+ 'Linear': activation,
'Dropout': dropout,
'Flatten': flatten,
'Reshape': reshape,
diff --git a/keras_app/views/import_json.py b/keras_app/views/import_json.py
index d8b919884..561b85e93 100644
--- a/keras_app/views/import_json.py
+++ b/keras_app/views/import_json.py
@@ -62,6 +62,7 @@ def import_json(request):
'tanh': Activation,
'sigmoid': Activation,
'hard_sigmoid': Activation,
+ 'linear': Activation,
'Dropout': Dropout,
'Flatten': Flatten,
'Reshape': Reshape,
diff --git a/keras_app/views/layers_export.py b/keras_app/views/layers_export.py
index 257065610..9d126b911 100644
--- a/keras_app/views/layers_export.py
+++ b/keras_app/views/layers_export.py
@@ -123,6 +123,8 @@ def activation(layer, layer_in, layerId, tensor=True):
out[layerId] = Activation('softsign')
elif (layer['info']['type'] == 'HardSigmoid'):
out[layerId] = Activation('hard_sigmoid')
+ elif (layer['info']['type'] == 'Linear'):
+ out[layerId] = Activation('linear')
if tensor:
out[layerId] = out[layerId](*layer_in)
return out
diff --git a/keras_app/views/layers_import.py b/keras_app/views/layers_import.py
index 7be30f85c..d32b45d6e 100644
--- a/keras_app/views/layers_import.py
+++ b/keras_app/views/layers_import.py
@@ -41,7 +41,8 @@ def Activation(layer):
'selu': 'SELU',
'softplus': 'Softplus',
'softsign': 'Softsign',
- 'hard_sigmoid': 'HardSigmoid'
+ 'hard_sigmoid': 'HardSigmoid',
+ 'linear': 'Linear'
}
if (layer.__class__.__name__ == 'Activation'):
return jsonLayer(activationMap[layer.activation.func_name], {}, layer)
diff --git a/tests/unit/keras_app/keras_export_test.json b/tests/unit/keras_app/keras_export_test.json
index c778c2045..ca49615d0 100644
--- a/tests/unit/keras_app/keras_export_test.json
+++ b/tests/unit/keras_app/keras_export_test.json
@@ -777,6 +777,21 @@
"inplace": true
}
},
+ "Linear": {
+ "connection": {
+ "input": [
+ "l0"
+ ],
+ "ouput": []
+ },
+ "info": {
+ "phase": null,
+ "type": "Linear"
+ },
+ "params": {
+ "inplace": true
+ }
+ },
"Upsample": {
"info": {
"phase": null,
diff --git a/tests/unit/keras_app/test_views.py b/tests/unit/keras_app/test_views.py
index d5237a7f2..c21b420ff 100644
--- a/tests/unit/keras_app/test_views.py
+++ b/tests/unit/keras_app/test_views.py
@@ -285,6 +285,11 @@ def test_keras_import(self):
model.add(ThresholdedReLU(theta=1, input_shape=(15,)))
model.build()
self.keras_type_test(model, 0, 'ThresholdedReLU')
+ # Linear
+ model = Sequential()
+ model.add(Activation('linear', input_shape=(15,)))
+ model.build()
+ self.keras_type_test(model, 0, 'Linear')
class DropoutImportTest(unittest.TestCase, HelperFunctions):
@@ -943,6 +948,24 @@ def test_keras_export(self):
self.assertEqual(model.layers[1].__class__.__name__, 'Activation')
+class LinearActivationExportTest(unittest.TestCase):
+ def setUp(self):
+ self.client = Client()
+
+ def test_keras_export(self):
+ tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'keras_app',
+ 'keras_export_test.json'), 'r')
+ response = json.load(tests)
+ tests.close()
+ net = yaml.safe_load(json.dumps(response['net']))
+ net = {'l0': net['Input'], 'l1': net['Linear']}
+ net['l0']['connection']['output'].append('l1')
+ inp = data(net['l0'], '', 'l0')['l0']
+ net = activation(net['l1'], [inp], 'l1')
+ model = Model(inp, net['l1'])
+ self.assertEqual(model.layers[1].__class__.__name__, 'Activation')
+
+
class DropoutExportTest(unittest.TestCase):
def setUp(self):
self.client = Client()