From 571fd41ae09a638461556cf74dd87253ee5ab264 Mon Sep 17 00:00:00 2001 From: Danielle Ben Bashat Date: Mon, 16 May 2022 17:37:59 +0300 Subject: [PATCH 1/3] add hard sigmoid convert function --- onnx2kerastl/activation_layers.py | 20 ++++++++++++++++++++ onnx2kerastl/layers.py | 3 ++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/onnx2kerastl/activation_layers.py b/onnx2kerastl/activation_layers.py index 55be903b..6168435c 100644 --- a/onnx2kerastl/activation_layers.py +++ b/onnx2kerastl/activation_layers.py @@ -85,6 +85,26 @@ def convert_sigmoid(node, params, layers, lambda_func, node_name, keras_name): layers[node_name] = sigmoid(input_0) +def convert_hard_sigmoid(node, params, layers, lambda_func, node_name, keras_name): + """ + Convert Hard Sigmoid activation layer + :param node: current operation node + :param params: operation attributes + :param layers: available keras layers + :param lambda_func: function for keras Lambda layer + :param node_name: internal converter name + :param keras_name: resulting layer name + :return: None + """ + if len(node.input) != 1: + assert AttributeError('More than 1 input for an activation layer.') + + input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) + + hard_sigmoid = keras.layers.Activation('hard_sigmoid', name=keras_name) + layers[node_name] = hard_sigmoid(input_0) + + def convert_tanh(node, params, layers, lambda_func, node_name, keras_name): """ Convert Tanh activation layer diff --git a/onnx2kerastl/layers.py b/onnx2kerastl/layers.py index 336e8255..debaada6 100644 --- a/onnx2kerastl/layers.py +++ b/onnx2kerastl/layers.py @@ -1,6 +1,6 @@ from .convolution_layers import convert_conv, convert_convtranspose from .activation_layers import convert_relu, convert_elu, convert_lrelu, convert_selu, \ - convert_sigmoid, convert_tanh, convert_softmax, convert_prelu + convert_sigmoid, convert_hard_sigmoid, convert_tanh, convert_softmax, convert_prelu from .operation_layers import convert_clip, convert_exp, convert_reduce_sum, convert_reduce_mean, \ convert_log, convert_pow, convert_sqrt, convert_split, convert_cast, convert_floor, convert_identity, \ convert_argmax, convert_reduce_l2, convert_reduce_max @@ -22,6 +22,7 @@ 'Elu': convert_elu, 'LeakyRelu': convert_lrelu, 'Sigmoid': convert_sigmoid, + 'HardSigmoid': convert_hard_sigmoid, 'Tanh': convert_tanh, 'Selu': convert_selu, 'Clip': convert_clip, From eb8e06f1cfc8df280fa2abc336d38ed016b51072 Mon Sep 17 00:00:00 2001 From: Danielle Ben Bashat Date: Mon, 16 May 2022 19:07:01 +0300 Subject: [PATCH 2/3] add test to hard sigmoid activation --- test/layers/activations/test_hard_sigmoid.py | 46 ++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 test/layers/activations/test_hard_sigmoid.py diff --git a/test/layers/activations/test_hard_sigmoid.py b/test/layers/activations/test_hard_sigmoid.py new file mode 100644 index 00000000..82fe085a --- /dev/null +++ b/test/layers/activations/test_hard_sigmoid.py @@ -0,0 +1,46 @@ +import torch.nn as nn +import numpy as np +import pytest + +from test.utils import convert_and_test + + +class LayerHardSigmoid(nn.Module): + """ + Test for nn.layers based types + """ + def __init__(self): + super(LayerHardSigmoid, self).__init__() + self.hard_sig = nn.Hardsigmoid() + + def forward(self, x): + x = self.hard_sig(x) + return x + + +class FHardSigmoid(nn.Module): + """ + Test for nn.functional types + """ + def __init__(self): + super(FHardSigmoid, self).__init__() + + def forward(self, x): + from torch.nn import functional as F + return F.hardsigmoid(x) + + +@pytest.mark.parametrize('change_ordering', [True, False]) +def test_layer_sigmoid(change_ordering): + model = LayerHardSigmoid() + model.eval() + input_np = np.random.uniform(0, 1, (1, 3, 224, 224)) + error = convert_and_test(model, input_np, verbose=False, change_ordering=change_ordering) + + +@pytest.mark.parametrize('change_ordering', [True, False]) +def test_f_hard_sigmoid(change_ordering): + model = FHardSigmoid() + model.eval() + input_np = np.random.uniform(0, 1, (1, 3, 224, 224)) + error = convert_and_test(model, input_np, verbose=False, change_ordering=change_ordering) From cf69f4db73a1c48dab8f76fa1a6d8953e8670ec6 Mon Sep 17 00:00:00 2001 From: Danielle Ben Bashat Date: Tue, 17 May 2022 15:40:25 +0300 Subject: [PATCH 3/3] wip convert torch func to tf version --- onnx2kerastl/activation_layers.py | 5 ++++- onnx2kerastl/utils.py | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/onnx2kerastl/activation_layers.py b/onnx2kerastl/activation_layers.py index 6168435c..336905c8 100644 --- a/onnx2kerastl/activation_layers.py +++ b/onnx2kerastl/activation_layers.py @@ -1,3 +1,5 @@ +import torch.nn +import tensorflow as tf from tensorflow import keras import logging from .utils import ensure_tf_type, ensure_numpy_type @@ -85,7 +87,7 @@ def convert_sigmoid(node, params, layers, lambda_func, node_name, keras_name): layers[node_name] = sigmoid(input_0) -def convert_hard_sigmoid(node, params, layers, lambda_func, node_name, keras_name): +def convert_hard_sigmoid(node, params, layers, lambda_func, node_name, keras_name, alpha=0.167, beta=0.5): """ Convert Hard Sigmoid activation layer :param node: current operation node @@ -101,6 +103,7 @@ def convert_hard_sigmoid(node, params, layers, lambda_func, node_name, keras_nam input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) + input_0 = tf.multiply(input_0, 5/6) # TODO: conversion from torch HardSigmoid to tf version hard_sigmoid = keras.layers.Activation('hard_sigmoid', name=keras_name) layers[node_name] = hard_sigmoid(input_0) diff --git a/onnx2kerastl/utils.py b/onnx2kerastl/utils.py index bc4dca2b..d9769e4f 100644 --- a/onnx2kerastl/utils.py +++ b/onnx2kerastl/utils.py @@ -117,3 +117,6 @@ def check_torch_keras_error(model, k_model, input_np, epsilon=1e-5, change_order max_error = error return max_error + + +