Skip to content

Commit 9f5c249

Browse files
committed
Disable some unsupported layers
1 parent 36cd38c commit 9f5c249

File tree

2 files changed

+11
-3
lines changed

2 files changed

+11
-3
lines changed

hls4ml/converters/onnx/core.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,10 @@ def parse_gemm_layer(reader, node, inputs_map, input_shapes, graph, config):
2323
return layer, output_shape
2424

2525
#------------------Global paras for activations
26-
activation_layers = ['Relu', 'Tanh', 'Sigmoid', 'LeakyRelu', 'ThresholdedRelu', 'HardSigmoid', 'Elu', 'Selu', 'PRelu', 'Softmax', 'Softsign', 'Softplus', 'Clip']
26+
# TODO: repair HardSigmoid support
27+
# https://github.com/fastmachinelearning/hls4ml/issues/409
28+
#activation_layers = ['Relu', 'Tanh', 'Sigmoid', 'LeakyRelu', 'ThresholdedRelu', 'HardSigmoid', 'Elu', 'Selu', 'PRelu', 'Softmax', 'Softsign', 'Softplus', 'Clip']
29+
activation_layers = ['Relu', 'Tanh', 'Sigmoid', 'LeakyRelu', 'ThresholdedRelu', 'Elu', 'Selu', 'PRelu', 'Softmax', 'Softsign', 'Softplus', 'Clip']
2730

2831
activation_map = {'Relu':'ReLU', 'Tanh':'Activation',
2932
'Sigmoid':'Activation', 'LeakyRelu':'LeakyReLU',

hls4ml/converters/pytorch/core.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
from hls4ml.converters.pytorch_to_hls import pytorch_handler
44

5+
# TODO: propagate use_bias info properly
6+
# https://github.com/fastmachinelearning/hls4ml/issues/409
57
@pytorch_handler('Linear')
68
def parse_linear_layer(pytorch_layer, layer_name, input_shapes, data_reader, config):
79
assert('Linear' in pytorch_layer.__class__.__name__)
@@ -15,6 +17,7 @@ def parse_linear_layer(pytorch_layer, layer_name, input_shapes, data_reader, con
1517
layer['n_out'] = pytorch_layer.out_features
1618

1719
#Handling whether bias is used or not
20+
assert not pytorch_layer.bias is None, "PyTorch Linear with bias=False not yet supported"
1821
if pytorch_layer.bias is None:
1922
layer['use_bias'] = False
2023
else:
@@ -24,8 +27,10 @@ def parse_linear_layer(pytorch_layer, layer_name, input_shapes, data_reader, con
2427

2528
return layer, output_shape
2629

27-
28-
activation_layers = ['LeakyReLU', 'ThresholdedReLU', 'ELU', 'PReLU', 'Softmax', 'ReLU']
30+
# TODO: propagate parametrized activation parameters
31+
# https://github.com/fastmachinelearning/hls4ml/issues/409
32+
# activation_layers = ['LeakyReLU', 'ThresholdedReLU', 'ELU', 'PReLU', 'Softmax', 'ReLU']
33+
activation_layers = ['Softmax', 'ReLU']
2934
@pytorch_handler(*activation_layers)
3035
def parse_activation_layer(pytorch_layer, layer_name, input_shapes, data_reader, config):
3136

0 commit comments

Comments
 (0)