Skip to content

Commit

Permalink
Bug Fix: Accelerator scaler set after initi (#363)
Browse files Browse the repository at this point in the history
The "optimizer.py" script in accelerator has more variables that must be set if "scaler" is not set to None. I just copied the setup script directly from their code-base (link here: https://github.com/huggingface/accelerate/blob/956114ac92cfbdfe0874ca73aa37ac815326f040/src/accelerate/optimizer.py#L63)
  • Loading branch information
gauenk authored Oct 4, 2023
1 parent f9fb8be commit a551a64
Showing 1 changed file with 9 additions and 0 deletions.
9 changes: 9 additions & 0 deletions imagen_pytorch/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,12 +491,21 @@ def wrap_unet(self, unet_number):
# hacking accelerator due to not having separate gradscaler per optimizer

def set_accelerator_scaler(self, unet_number):
def patch_optimizer_step(accelerated_optimizer, method):
def patched_step(*args, **kwargs):
accelerated_optimizer._accelerate_step_called = True
return method(*args, **kwargs)
return patched_step

unet_number = self.validate_unet_number(unet_number)
scaler = getattr(self, f'scaler{unet_number - 1}')

self.accelerator.scaler = scaler
for optimizer in self.accelerator._optimizers:
optimizer.scaler = scaler
optimizer._accelerate_step_called = False
optimizer._optimizer_original_step_method = optimizer.optimizer.step
optimizer._optimizer_patched_step_method = patch_optimizer_step(optimizer, optimizer.optimizer.step)

# helper print

Expand Down

0 comments on commit a551a64

Please sign in to comment.