From 1b6cb1eefc140ea2dbd74e23389050ebcb7f417d Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Fri, 13 Dec 2024 13:23:00 -0500 Subject: [PATCH] don't use no_sync when deepspeed doesn't support it for certain zero stages (#35157) * don't use no_sync when deepspeed doesn't support it for certain zero stages * chore: lint * fix no_sync context for deepspeed across all zero types * chore: lint --- src/transformers/trainer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 8c224665cc39..616d9ff5c2dc 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -2517,6 +2517,7 @@ def _inner_training_loop( context = ( functools.partial(self.accelerator.no_sync, model=model) if i != len(batch_samples) - 1 + and self.accelerator.distributed_type != DistributedType.DEEPSPEED else contextlib.nullcontext ) with context():