@@ -61,17 +61,21 @@ def parse_activation_layer(operation, layer_name, input_names, input_shapes, nod
61
61
layer ['class_name' ] = 'ThresholdedReLU'
62
62
layer ['activation' ] = 'ThresholdedReLU'
63
63
if layer ['activ_param' ] < 0 :
64
- raise Exception ('negative threshold values not supported' )
65
-
66
- if hasattr (node , 'dim' ):
64
+ raise Exception ('negative threshold values not supported' )
65
+ if hasattr (class_object , 'dim' ):
67
66
layer ['axis' ] = class_object .dim
67
+ if layer ['class_name' ] == 'Softmax' and layer ['axis' ] is None :
68
+ layer ['axis' ] = - 1
69
+ if 'IOType' in config :
70
+ if layer ['class_name' ] == 'Softmax' and config ['IOType' ] == 'io_stream' and layer ['axis' ] != - 1 :
71
+ raise Exception ('dim needs to be -1 for io_stream' )
68
72
else :
69
73
if layer ['class_name' ] in ['ReLU' , 'Sigmoid' , 'Tanh' ]:
70
74
layer ['class_name' ] = 'Activation'
71
75
if layer ['class_name' ] == 'LeakyReLU' :
72
76
layer ['activ_param' ] = node .kwargs ['negative_slope' ]
73
77
if layer ['class_name' ] == 'ELU' :
74
- layer ['activ_param' ] = node .kwargs ['alpha' ]
78
+ layer ['activ_param' ] = node .kwargs ['alpha' ]
75
79
if layer ['class_name' ] == 'Threshold' :
76
80
layer ['activ_param' ] = node .args [1 ]
77
81
if layer ['activ_param' ] < 0 :
@@ -80,7 +84,12 @@ def parse_activation_layer(operation, layer_name, input_names, input_shapes, nod
80
84
layer ['activation' ] = 'ThresholdedReLU'
81
85
if 'dim' in node .kwargs :
82
86
layer ['axis' ] = node .kwargs ['dim' ]
83
-
87
+ if layer ['class_name' ] == 'Softmax' and layer ['axis' ] is None :
88
+ layer ['axis' ] = - 1
89
+ if 'IOType' in config :
90
+ if layer ['class_name' ] == 'Softmax' and config ['IOType' ] == 'io_stream' and layer ['axis' ] != - 1 :
91
+ raise Exception ('dim needs to be -1 for io_stream' )
92
+
84
93
output_shape = input_shapes [0 ]
85
94
return layer , output_shape
86
95
0 commit comments