Update lamp.py comment

pull/816/head
Ross Wightman 3 years ago committed by GitHub
parent 4d284017b8
commit 8f68193c91
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -169,7 +169,7 @@ class Lamb(Optimizer):
trust_ratio = one_tensor
if weight_decay != 0 or group['use_nvlamb']:
# Layer adaptation. By default, skip layer adaptation on parameters that are
# excluded from weight norm, unless use_nvlamb == True, then always enabled.
# excluded from weight decay, unless use_nvlamb == True, then always enabled.
w_norm = p.data.norm(2.0)
g_norm = update.norm(2.0)
trust_ratio = torch.where(

Loading…
Cancel
Save