Back out some activation hacks trialing upcoming pytorch changes

pull/175/head
Ross Wightman 4 years ago
parent 3b9004bef9
commit 298fba09ac

@ -82,7 +82,7 @@ class HardSwish(nn.Module):
self.inplace = inplace self.inplace = inplace
def forward(self, x): def forward(self, x):
return F.hardswish(x) #hard_swish(x, self.inplace) return hard_swish(x, self.inplace)
def hard_sigmoid(x, inplace: bool = False): def hard_sigmoid(x, inplace: bool = False):

@ -39,7 +39,7 @@ _ACT_FN_ME = dict(
) )
_ACT_LAYER_DEFAULT = dict( _ACT_LAYER_DEFAULT = dict(
swish=Swish, #nn.SiLU, # swish=Swish,
mish=Mish, mish=Mish,
relu=nn.ReLU, relu=nn.ReLU,
relu6=nn.ReLU6, relu6=nn.ReLU6,
@ -56,7 +56,7 @@ _ACT_LAYER_DEFAULT = dict(
) )
_ACT_LAYER_JIT = dict( _ACT_LAYER_JIT = dict(
#swish=SwishJit, swish=SwishJit,
mish=MishJit, mish=MishJit,
hard_sigmoid=HardSigmoidJit, hard_sigmoid=HardSigmoidJit,
hard_swish=HardSwishJit, hard_swish=HardSwishJit,

Loading…
Cancel
Save