Skip to content

Commit

Permalink
use fused Adam on CUDA
Browse files Browse the repository at this point in the history
  • Loading branch information
LoicGrobol committed Nov 17, 2024
1 parent 7596537 commit faad953
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 20 deletions.
File renamed without changes.
20 changes: 0 additions & 20 deletions tox.ini

This file was deleted.

26 changes: 26 additions & 0 deletions tox.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
minversion = "4"
env_list = ["py312", "py311", "py310", "py39"]
isolated_build = true
skip_missing_interpreters = true

[env_run_base]
extras = [tests]
commands = [
[
'pytest',
"tests",
"--basetemp",
"{envtmpdir}",
{ replace = "posargs", extend = true },
],
]

[pytest]
script_launch_mode = "subprocess"

[gh-actions]
python = """
3.9: py39
3.10: py310
3.11: py311
3.12: py312"""
1 change: 1 addition & 0 deletions zeldarose/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def configure_optimizers(self):
optimizer = torch.optim.AdamW(
optimizer_grouped_parameters,
betas=self.training_config.betas,
fused=True,
lr=self.training_config.learning_rate,
eps=self.training_config.epsilon,
weight_decay=decay_rate,
Expand Down
1 change: 1 addition & 0 deletions zeldarose/tasks/rtd.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,7 @@ def configure_optimizers(self):
optimizer = torch.optim.AdamW(
optimizer_grouped_parameters,
betas=self.training_config.betas,
fused=True,
lr=self.training_config.learning_rate,
eps=self.training_config.epsilon,
weight_decay=decay_rate,
Expand Down

0 comments on commit faad953

Please sign in to comment.