From e5bd7d8ef5249eefd92cabecfe9aaa545f3b8c79 Mon Sep 17 00:00:00 2001 From: Gong Baitao Date: Tue, 11 Apr 2023 11:25:25 +0800 Subject: [PATCH] fits in one line --- src/transformers/models/cpmant/modeling_cpmant.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/transformers/models/cpmant/modeling_cpmant.py b/src/transformers/models/cpmant/modeling_cpmant.py index 7fd1e0d0d7ccd7..5aa696b3f27c86 100755 --- a/src/transformers/models/cpmant/modeling_cpmant.py +++ b/src/transformers/models/cpmant/modeling_cpmant.py @@ -47,10 +47,7 @@ class CpmAntLayerNorm(nn.Module): We use Root Mean Square (RMS) Layer Normalization, please see https://arxiv.org/abs/1910.07467 for details." """ - def __init__( - self, - config: CpmAntConfig, - ): + def __init__(self, config: CpmAntConfig,): super().__init__() self.eps = config.eps @@ -425,10 +422,7 @@ def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: class CpmAntSegmentPositionEmbedding(nn.Module): - def __init__( - self, - config: CpmAntConfig, - ): + def __init__(self, config: CpmAntConfig): super().__init__() self.num_heads = config.num_attention_heads