A few optimizer comments, dead import, missing import

pull/813/head
Ross Wightman 3 years ago
parent 959eaff121
commit 8a9eca5157

@ -3,6 +3,7 @@ from .adamw import AdamW
from .adafactor import Adafactor from .adafactor import Adafactor
from .adahessian import Adahessian from .adahessian import Adahessian
from .lookahead import Lookahead from .lookahead import Lookahead
from .madgrad import MADGRAD
from .nadam import Nadam from .nadam import Nadam
from .nvnovograd import NvNovoGrad from .nvnovograd import NvNovoGrad
from .radam import RAdam from .radam import RAdam

@ -1,3 +1,9 @@
""" PyTorch MADGRAD optimizer
MADGRAD: https://arxiv.org/abs/2101.11075
Code from: https://github.com/facebookresearch/madgrad
"""
# Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c) Facebook, Inc. and its affiliates.
# #
# This source code is licensed under the MIT license found in the # This source code is licensed under the MIT license found in the

@ -1,12 +1,11 @@
""" Optimizer Factory w/ Custom Weight Decay """ Optimizer Factory w/ Custom Weight Decay
Hacked together by / Copyright 2020 Ross Wightman Hacked together by / Copyright 2021 Ross Wightman
""" """
from typing import Optional from typing import Optional
import torch import torch
import torch.nn as nn import torch.nn as nn
import torch.optim as optim import torch.optim as optim
from torch.optim.optimizer import required
from .adabelief import AdaBelief from .adabelief import AdaBelief
from .adafactor import Adafactor from .adafactor import Adafactor

Loading…
Cancel
Save