Skip to content

Commit f9a2412

Browse files
authored
update keras activation parsing, especially leaky relu (#1085)
* update keras activation parsing, especially leaky relu * fix assert in pytest as in oneAPI branch
1 parent 4518537 commit f9a2412

File tree

3 files changed

+12
-14
lines changed

3 files changed

+12
-14
lines changed

hls4ml/converters/keras/core.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,8 @@ def parse_activation_layer(keras_layer, input_names, input_shapes, data_reader):
6767
layer['class_name'] = 'ELU' # always use ELU type for elu, even if passed as activation
6868

6969
if layer['class_name'] == 'LeakyReLU':
70-
layer['activ_param'] = keras_layer['config'].get('alpha', 0.3)
70+
# the name changes for version 3
71+
layer['activ_param'] = keras_layer['config'].get('negative_slope', keras_layer['config'].get('alpha', 0.3))
7172
elif layer['class_name'] == 'ThresholdedReLU':
7273
layer['activ_param'] = keras_layer['config'].get('theta', 1.0)
7374
elif layer['class_name'] == 'ELU':
@@ -83,6 +84,10 @@ def parse_activation_layer(keras_layer, input_names, input_shapes, data_reader):
8384
layer['class_name'] = 'HardActivation'
8485
if layer['class_name'] == 'Softmax':
8586
layer['axis'] = keras_layer['config'].get('axis', -1)
87+
if layer['class_name'] == 'Activation' and layer['activation'] == 'leaky_relu':
88+
layer['class_name'] = 'LeakyReLU'
89+
# The parameter name changes for API v3; the default is different than in LeakyReLU layer
90+
layer['activ_param'] = keras_layer['config'].get('negative_slope', keras_layer['config'].get('alpha', 0.2))
8691

8792
return layer, [shape for shape in input_shapes[0]]
8893

hls4ml/converters/keras_to_hls.py

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -297,26 +297,18 @@ def parse_keras_model(model_arch, reader):
297297
layer_list.append(layer)
298298
if 'activation' in layer and layer['class_name'] not in activation_layers + recurrent_layers: # + qkeras_layers:
299299
act_layer = {}
300+
act_details = layer['activation']
300301
# Workaround for QKeras activations passed as an argument
301-
if isinstance(layer['activation'], dict):
302-
act_details = layer['activation']
302+
if isinstance(act_details, dict):
303303
act_layer['class_name'] = 'QActivation'
304304
act_layer['config'] = {
305305
'name': layer['name'] + '_' + act_details['class_name'],
306306
'activation': act_details,
307307
}
308-
act_layer, output_shape = layer_handlers['QActivation'](act_layer, None, [output_shape], reader)
309308
else:
310-
act_layer['name'] = layer['name'] + '_' + layer['activation']
311-
act_layer['activation'] = layer['activation']
312-
if 'activ_param' in layer:
313-
act_layer['activ_param'] = layer['activ_param']
314-
act_layer['class_name'] = layer['activation']
315-
elif layer['activation'] == 'softmax':
316-
act_layer['class_name'] = 'Softmax'
317-
act_layer['axis'] = -1
318-
else:
319-
act_layer['class_name'] = 'Activation'
309+
act_layer['class_name'] = 'Activation'
310+
act_layer['config'] = {'name': layer['name'] + '_' + act_details, 'activation': act_details}
311+
act_layer, output_shape = layer_handlers[act_layer['class_name']](act_layer, None, [output_shape], reader)
320312
inputs_map[layer['name']] = act_layer['name']
321313
if output_layers is not None and layer['name'] in output_layers:
322314
output_layers = [act_layer['name'] if name == layer['name'] else name for name in output_layers]

test/pytest/test_activations.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
[
2020
(ReLU(), 'relu'),
2121
(LeakyReLU(alpha=1.5), 'leaky_relu'),
22+
(Activation('leaky_relu'), 'leaky_relu_act'),
2223
(ThresholdedReLU(theta=0.75), 'threshold_relu'),
2324
(ELU(alpha=1.25), 'elu'),
2425
(Activation('selu'), 'selu'),

0 commit comments

Comments
 (0)