From 3075356525f0f6230c2bb26b5d434f757aa36e7f Mon Sep 17 00:00:00 2001 From: on-jungwoan Date: Tue, 2 May 2023 22:24:07 +0900 Subject: [PATCH] new implementation of prelu --- nobuco/node_converters/activation.py | 27 +++++++++++++++++++++++++++ nobuco/node_converters/math.py | 4 ++-- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/nobuco/node_converters/activation.py b/nobuco/node_converters/activation.py index a64cbd9..0a6806f 100755 --- a/nobuco/node_converters/activation.py +++ b/nobuco/node_converters/activation.py @@ -9,11 +9,16 @@ import torch.nn.functional as F from torch import nn +from tensorflow.python.framework.ops import disable_eager_execution + from nobuco.commons import ChannelOrder, ChannelOrderingStrategy from nobuco.converters.channel_ordering import set_channel_order, get_channel_order from nobuco.converters.node_converter import converter from nobuco.converters.tensor import dim_pytorch2keras +def prelu(x, weight): + x = tf.math.maximum(0, x) + weight * tf.math.minimum(0, x) + return x def hard_sigmoid_pytorch_compatible(x): x = tf.clip_by_value(x/6 + 1/2, clip_value_min=0, clip_value_max=1) @@ -63,6 +68,28 @@ def func(input): return func +# # please help me +# @converter(nn.PReLU, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) +# def converter_PReLU(self, input: Tensor): +# return keras.layers.PReLU() + + +@converter(F.prelu, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) +def converter_prelu(input: Tensor, alpha: float = 0.0, inplace: bool = False): + def func(input, alpha=0.0, inplace=False): + # return keras.layers.PReLU(tf.initializers.constant(alpha))(input) # please help me + return keras.layers.PReLU(tf.initializers.constant(0))(input) + return func + + +@converter(torch.prelu, torch.Tensor.prelu, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) +def converter_prelu(input: Tensor, alpha: float = 0.0, inplace: bool = False): + def func(input, alpha=0.0, inplace=False): + # return keras.layers.PReLU(tf.initializers.constant(alpha))(input) # please help me + return keras.layers.PReLU(tf.initializers.constant(0))(input) + return func + + @converter(nn.LeakyReLU, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) def converter_LeakyRelu(self, input: Tensor): return keras.layers.LeakyReLU(alpha=self.negative_slope) diff --git a/nobuco/node_converters/math.py b/nobuco/node_converters/math.py index 9750a09..81eba42 100644 --- a/nobuco/node_converters/math.py +++ b/nobuco/node_converters/math.py @@ -59,13 +59,13 @@ def func(input, dim, keepdim=False, *, dtype=None, out=None): return out return func -@converter(torch.sin, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) +@converter(torch.sin, torch.Tensor.sin, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) def converter_sin(input, *args, **kwargs): def func(input, *args, **kwargs): return tf.math.sin(input) return func -@converter(torch.cos, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) +@converter(torch.cos, torch.Tensor.cos, channel_ordering_strategy=ChannelOrderingStrategy.MINIMUM_TRANSPOSITIONS) def converter_cos(input, *args, **kwargs): def func(input, *args, **kwargs): return tf.math.cos(input)