diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 7267d79b3c86c5..549fb0f85ec071 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -2753,6 +2753,7 @@ def save_model(self, output_dir: Optional[str] = None, _internal_call: bool = Fa or ShardedDDPOption.ZERO_DP_3 in self.args.sharded_ddp or self.fsdp is not None ): + os.makedirs(output_dir, exist_ok=True) state_dict = self.model.state_dict() if self.args.should_save: