From 4fae5f4c0e44abf9c0a02f81fb795100bf6e4100 Mon Sep 17 00:00:00 2001 From: Sangdoo Yun Date: Wed, 22 Jul 2020 11:04:01 +0900 Subject: [PATCH] Update optim_factory.py Add `adamp` optimizer --- timm/optim/optim_factory.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/timm/optim/optim_factory.py b/timm/optim/optim_factory.py index d97887d5..f3a6deb0 100644 --- a/timm/optim/optim_factory.py +++ b/timm/optim/optim_factory.py @@ -1,6 +1,6 @@ import torch from torch import optim as optim -from timm.optim import Nadam, RMSpropTF, AdamW, RAdam, NovoGrad, NvNovoGrad, Lookahead +from timm.optim import Nadam, RMSpropTF, AdamW, RAdam, NovoGrad, NvNovoGrad, Lookahead, AdamP try: from apex.optimizers import FusedNovoGrad, FusedAdam, FusedLAMB, FusedSGD has_apex = True @@ -60,6 +60,10 @@ def create_optimizer(args, model, filter_bias_and_bn=True): elif opt_lower == 'radam': optimizer = RAdam( parameters, lr=args.lr, weight_decay=weight_decay, eps=args.opt_eps) + elif opt_lower == 'adamp': + optimizer = AdamP( + parameters, lr=args.lr, weight_decay=weight_decay, eps=args.opt_eps, + delta=0.1, wd_ratio=0.01, nesterov=True) elif opt_lower == 'adadelta': optimizer = optim.Adadelta( parameters, lr=args.lr, weight_decay=weight_decay, eps=args.opt_eps)