From d1edae0159139727689071be954e824e4394e49c Mon Sep 17 00:00:00 2001 From: lvyufeng Date: Fri, 15 Aug 2025 14:15:11 +0800 Subject: [PATCH] fix layer_norm with bias=None --- mindnlp/core/nn/functional.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mindnlp/core/nn/functional.py b/mindnlp/core/nn/functional.py index 2cb847e2b..68d132536 100644 --- a/mindnlp/core/nn/functional.py +++ b/mindnlp/core/nn/functional.py @@ -560,6 +560,10 @@ def softmax(input, dim=-1, *, dtype=None): return out def layer_norm(input, normalized_shape, weight=None, bias=None, eps=1e-5): + if weight is None: + weight = core.ones(normalized_shape, dtype=input.dtype, device=input.device) + if bias is None: + bias = core.zeros(normalized_shape, dtype=input.dtype, device=input.device) return execute('layer_norm_ext', input, normalized_shape, weight, bias, eps)[0]