@@ -64,6 +64,7 @@ def test_linear(backend, io_type):
64
64
"activation_function" ,
65
65
[
66
66
nn .ReLU (),
67
+ nn .Tanh (),
67
68
nn .LeakyReLU (negative_slope = 1.0 ),
68
69
nn .ELU (alpha = 1.0 ),
69
70
nn .PReLU (init = 0.25 ),
@@ -102,7 +103,7 @@ def test_activations(activation_function, backend, io_type):
102
103
103
104
assert nNodes - 1 == len (hls_model .get_layers ())
104
105
105
- if activation_function .__class__ .__name__ == 'ReLU' or activation_function . __class__ . __name__ == 'Sigmoid' :
106
+ if activation_function .__class__ .__name__ in [ 'ReLU' , 'Sigmoid' , 'Tanh' ] :
106
107
assert list (hls_model .get_layers ())[2 ].attributes ['class_name' ] == 'Activation'
107
108
elif activation_function .__class__ .__name__ == 'Threshold' :
108
109
assert list (hls_model .get_layers ())[2 ].attributes ['class_name' ] == 'ThresholdedReLU'
@@ -118,6 +119,14 @@ def forward(self, x):
118
119
return nn .functional .relu (x )
119
120
120
121
122
+ class TanHModel (nn .Module ):
123
+ def __init__ (self ):
124
+ super ().__init__ ()
125
+
126
+ def forward (self , x ):
127
+ return nn .functional .tanh (x )
128
+
129
+
121
130
class LeakyReLuModel (nn .Module ):
122
131
def __init__ (self ):
123
132
super ().__init__ ()
@@ -154,6 +163,7 @@ def forward(self, x):
154
163
"activation_function" ,
155
164
[
156
165
ReLuModel (),
166
+ TanHModel (),
157
167
LeakyReLuModel (),
158
168
EluModel (),
159
169
SigmoidModel (),
@@ -172,7 +182,7 @@ def test_activation_functionals(activation_function, backend, io_type):
172
182
173
183
config = config_from_pytorch_model (model , (1 ,))
174
184
fn_name = activation_function .__class__ .__name__
175
- output_dir = str (test_root_path / f'hls4mlprj_pytorch_api_activations_functional_relu_ { backend } _{ io_type } _{ fn_name } ' )
185
+ output_dir = str (test_root_path / f'hls4mlprj_pytorch_api_activations_functional_ { fn_name } _{ backend } _{ io_type } ' )
176
186
hls_model = convert_from_pytorch_model (model , hls_config = config , output_dir = output_dir , backend = backend , io_type = io_type )
177
187
hls_model .compile ()
178
188
@@ -268,7 +278,7 @@ def test_conv1d(padds, backend, io_type):
268
278
act_index = 2
269
279
assert list (hls_model .get_layers ())[conv_index ].attributes ['name' ] == convNode .name
270
280
assert list (hls_model .get_layers ())[conv_index ].attributes ['class_name' ] == 'Conv1D'
271
- assert list (hls_model .get_layers ())[act_index ].attributes ['activation' ] == class_object_relu .__class__ .__name__
281
+ assert list (hls_model .get_layers ())[act_index ].attributes ['activation' ] == class_object_relu .__class__ .__name__ . lower ()
272
282
if io_type == "io_stream" and (backend == "Vivado" or backend == "Vitis" ) and padds == 1 :
273
283
assert list (hls_model .get_layers ())[conv_index ].attributes ["in_width" ] == size_in + 2
274
284
else :
@@ -412,7 +422,9 @@ def test_conv2d(padds, backend, io_type):
412
422
act_index = 2
413
423
assert list (hls_model .get_layers ())[conv_index ].attributes ['name' ] == convNode .name
414
424
assert list (hls_model .get_layers ())[conv_index ].attributes ['class_name' ] == 'Conv2D'
415
- assert list (hls_model .get_layers ())[act_index ].attributes ['activation' ] == class_object_relu .__class__ .__name__
425
+ assert (
426
+ list (hls_model .get_layers ())[act_index ].attributes ['activation' ] == class_object_relu .__class__ .__name__ .lower ()
427
+ )
416
428
assert list (hls_model .get_layers ())[conv_index ].attributes ["in_width" ] == size_in_width
417
429
assert list (hls_model .get_layers ())[conv_index ].attributes ["in_height" ] == size_in_height
418
430
assert list (hls_model .get_layers ())[conv_index ].attributes ['filt_width' ] == class_object_conv .kernel_size [1 ]
0 commit comments