From a66ae1eed1b2329e5bf413b206f44d61f5167284 Mon Sep 17 00:00:00 2001 From: Edenzzzz Date: Tue, 2 Apr 2024 18:18:30 +0800 Subject: [PATCH] fix incorrect sharding without zero --- colossalai/shardformer/shard/shard_config.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/colossalai/shardformer/shard/shard_config.py b/colossalai/shardformer/shard/shard_config.py index 646b611932b7..9130caaca3f1 100644 --- a/colossalai/shardformer/shard/shard_config.py +++ b/colossalai/shardformer/shard/shard_config.py @@ -74,7 +74,9 @@ def _turn_on_all_optimization(self): self.enable_fused_normalization = True self.enable_flash_attention = True self.enable_jit_fused = True - self.enable_sequence_parallelism = True + # This can cause non-in-place param sharding when used without ZeRO. + # It may also slow down training when seq len is small. Plz enable manually. + # self.enable_sequence_parallelism = True self.enable_sequence_overlap = True def _infer(self):