Skip to content

Commit

Permalink
[Bugfix] Fix AdamG instability (#308)
Browse files Browse the repository at this point in the history
* fix adamg instability

* add the trailing newline
  • Loading branch information
Vectorrent authored Dec 6, 2024
1 parent 5def5d7 commit d1db791
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions pytorch_optimizer/optimizer/adamg.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ class AdamG(BaseOptimizer):
def __init__(
self,
params: PARAMETERS,
lr: float = 1e-3,
lr: float = 1.0,
betas: BETAS = (0.95, 0.999, 0.95),
p: float = 0.5,
p: float = 0.2,
q: float = 0.24,
weight_decay: float = 0.0,
weight_decouple: bool = False,
Expand Down Expand Up @@ -88,8 +88,8 @@ def step(self, closure: CLOSURE = None) -> LOSS:

beta1, beta2, beta3 = group['betas']

bias_correction1: float = 1.0 - self.debias(beta1, group['step'])
bias_correction2: float = 1.0 - self.debias(beta2, group['step'])
bias_correction1: float = self.debias(beta1, group['step'])
bias_correction2: float = self.debias(beta2, group['step'])
step_size: float = min(group['lr'], 1.0 / math.sqrt(group['step']))

for p in group['params']:
Expand Down

0 comments on commit d1db791

Please sign in to comment.