Skip to content

Instantly share code, notes, and snippets.

@aerinkim
aerinkim / my_adam.py
Last active December 24, 2020 16:58
Adam Implementation from scratch
from torch.optim import Optimizer
class ADAMOptimizer(Optimizer):
"""
implements ADAM Algorithm, as a preceding step.
"""
def __init__(self, params, lr=1e-3, betas=(0.9, 0.99), eps=1e-8, weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
super(ADAMOptimizer, self).__init__(params, defaults)