From 298fba09ac7938a60a530f564bdf74cc70651768 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 17 Jul 2020 18:41:37 -0700 Subject: [PATCH] Back out some activation hacks trialing upcoming pytorch changes --- timm/models/layers/activations.py | 2 +- timm/models/layers/create_act.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/timm/models/layers/activations.py b/timm/models/layers/activations.py index c1066b7b..71904935 100644 --- a/timm/models/layers/activations.py +++ b/timm/models/layers/activations.py @@ -82,7 +82,7 @@ class HardSwish(nn.Module): self.inplace = inplace def forward(self, x): - return F.hardswish(x) #hard_swish(x, self.inplace) + return hard_swish(x, self.inplace) def hard_sigmoid(x, inplace: bool = False): diff --git a/timm/models/layers/create_act.py b/timm/models/layers/create_act.py index bf4ad119..6404d62f 100644 --- a/timm/models/layers/create_act.py +++ b/timm/models/layers/create_act.py @@ -39,7 +39,7 @@ _ACT_FN_ME = dict( ) _ACT_LAYER_DEFAULT = dict( - swish=Swish, #nn.SiLU, # + swish=Swish, mish=Mish, relu=nn.ReLU, relu6=nn.ReLU6, @@ -56,7 +56,7 @@ _ACT_LAYER_DEFAULT = dict( ) _ACT_LAYER_JIT = dict( - #swish=SwishJit, + swish=SwishJit, mish=MishJit, hard_sigmoid=HardSigmoidJit, hard_swish=HardSwishJit,