From e6a9d9b1c8b4d86c7221f5606e0ff90437abeb4e Mon Sep 17 00:00:00 2001 From: zxcd <228587199@qq.com> Date: Tue, 21 Feb 2023 11:46:36 +0000 Subject: [PATCH 1/2] remove fluid api and useless import. --- paddlespeech/s2t/models/wav2vec2/wav2vec2_ASR.py | 1 - paddlespeech/s2t/training/gradclip.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/paddlespeech/s2t/models/wav2vec2/wav2vec2_ASR.py b/paddlespeech/s2t/models/wav2vec2/wav2vec2_ASR.py index baa7392eb4f..7468fdce01d 100755 --- a/paddlespeech/s2t/models/wav2vec2/wav2vec2_ASR.py +++ b/paddlespeech/s2t/models/wav2vec2/wav2vec2_ASR.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. from collections import defaultdict -from turtle import Turtle from typing import Dict from typing import List from typing import Tuple diff --git a/paddlespeech/s2t/training/gradclip.py b/paddlespeech/s2t/training/gradclip.py index be6fcf5899f..74f1f233133 100644 --- a/paddlespeech/s2t/training/gradclip.py +++ b/paddlespeech/s2t/training/gradclip.py @@ -56,14 +56,14 @@ def _dygraph_clip(self, params_grads): if len(sum_square_list) == 0: return params_grads - global_norm_var = layers.concat(sum_square_list) + global_norm_var = paddle.concat(sum_square_list) global_norm_var = paddle.sum(global_norm_var) global_norm_var = paddle.sqrt(global_norm_var) # debug log logger.debug(f"Grad Global Norm: {float(global_norm_var)}!!!!") - max_global_norm = layers.fill_constant( + max_global_norm = paddle.full( shape=[1], dtype=global_norm_var.dtype, value=self.clip_norm) clip_var = paddle.divide( x=max_global_norm, From 0214bdb62bf66fe06c9d5a4e027b2fdcc4c77076 Mon Sep 17 00:00:00 2001 From: zxcd <228587199@qq.com> Date: Tue, 21 Feb 2023 12:04:50 +0000 Subject: [PATCH 2/2] fix variable name --- paddlespeech/s2t/training/gradclip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/paddlespeech/s2t/training/gradclip.py b/paddlespeech/s2t/training/gradclip.py index 74f1f233133..06587c749b5 100644 --- a/paddlespeech/s2t/training/gradclip.py +++ b/paddlespeech/s2t/training/gradclip.py @@ -64,7 +64,7 @@ def _dygraph_clip(self, params_grads): logger.debug(f"Grad Global Norm: {float(global_norm_var)}!!!!") max_global_norm = paddle.full( - shape=[1], dtype=global_norm_var.dtype, value=self.clip_norm) + shape=[1], dtype=global_norm_var.dtype, fill_value=self.clip_norm) clip_var = paddle.divide( x=max_global_norm, y=paddle.maximum(x=global_norm_var, y=max_global_norm))