@ -3,6 +3,7 @@ from .adamw import AdamW
from .adafactor import Adafactor
from .adahessian import Adahessian
from .lookahead import Lookahead
from .madgrad import MADGRAD
from .nadam import Nadam
from .nvnovograd import NvNovoGrad
from .radam import RAdam
@ -1,3 +1,9 @@
""" PyTorch MADGRAD optimizer
MADGRAD: https://arxiv.org/abs/2101.11075
Code from: https://github.com/facebookresearch/madgrad
"""
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
@ -1,12 +1,11 @@
""" Optimizer Factory w/ Custom Weight Decay
Hacked together by / Copyright 2020 Ross Wightman
Hacked together by / Copyright 2021 Ross Wightman
from typing import Optional
import torch
import torch.nn as nn
import torch.optim as optim
from torch.optim.optimizer import required
from .adabelief import AdaBelief