From eb7d2a871e412f3789971e704246bfc835a19cc0 Mon Sep 17 00:00:00 2001 From: Matt Watson Date: Mon, 1 Apr 2024 11:36:04 -0700 Subject: [PATCH] Remove unused beta param for silu, use torch op directly The beta param was only accepted on the tensorflow/torch backends and not in the `keras.ops` API, nor was it tested. I think best just to ditch, since no one could be relying on it. --- keras/backend/tensorflow/nn.py | 4 ++-- keras/backend/torch/nn.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/keras/backend/tensorflow/nn.py b/keras/backend/tensorflow/nn.py index 4d6433aed51e..807f0206439a 100644 --- a/keras/backend/tensorflow/nn.py +++ b/keras/backend/tensorflow/nn.py @@ -40,8 +40,8 @@ def softsign(x): return tf.nn.softsign(x) -def silu(x, beta=1.0): - return tf.nn.silu(x, beta=beta) +def silu(x): + return tf.nn.silu(x) def log_sigmoid(x): diff --git a/keras/backend/torch/nn.py b/keras/backend/torch/nn.py index c2d1c17e069d..af7aba02ddd0 100644 --- a/keras/backend/torch/nn.py +++ b/keras/backend/torch/nn.py @@ -47,9 +47,9 @@ def softsign(x): return tnn.softsign(x) -def silu(x, beta=1.0): +def silu(x): x = convert_to_tensor(x) - return tnn.silu(x) / beta + return tnn.silu(x) def log_sigmoid(x):