From 2c887cf8e0cb1ac96d28361ff3235a77f83c36ee Mon Sep 17 00:00:00 2001 From: Sylvain Gugger <35901082+sgugger@users.noreply.github.com> Date: Wed, 7 Jun 2023 15:31:32 -0400 Subject: [PATCH] Do not prepare lr scheduler as it as the right number of steps (#24088) * Do not prepare lr scheduler as it as the right number of steps * Trigger CI * Trigger CI * Trigger CI * Add fake comment * Remove fake comment * Trigger CI please! --- src/transformers/trainer.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 0468b74360ef98..0785e03ac545e0 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -1747,9 +1747,7 @@ def _inner_training_loop( # prepare using `accelerator` prepare if use_accelerator_prepare: - model, self.optimizer, self.lr_scheduler = self.accelerator.prepare( - self.model, self.optimizer, self.lr_scheduler - ) + model, self.optimizer = self.accelerator.prepare(self.model, self.optimizer) if self.is_fsdp_enabled: self.model = model @@ -1996,6 +1994,7 @@ def _inner_training_loop( optimizer_was_run = scale_before <= scale_after else: self.optimizer.step() + optimizer_was_run = not self.accelerator.optimizer_step_was_skipped if optimizer_was_run: # Delay optimizer scheduling until metrics are generated