Back out some activation hacks trialing upcoming pytorch changes

pull/175/head
Ross Wightman 4 years ago
parent 3b9004bef9
commit 298fba09ac

@ -82,7 +82,7 @@ class HardSwish(nn.Module):
self.inplace = inplace
def forward(self, x):
return F.hardswish(x) #hard_swish(x, self.inplace)
return hard_swish(x, self.inplace)
def hard_sigmoid(x, inplace: bool = False):

@ -39,7 +39,7 @@ _ACT_FN_ME = dict(
)
_ACT_LAYER_DEFAULT = dict(
swish=Swish, #nn.SiLU, #
swish=Swish,
mish=Mish,
relu=nn.ReLU,
relu6=nn.ReLU6,
@ -56,7 +56,7 @@ _ACT_LAYER_DEFAULT = dict(
)
_ACT_LAYER_JIT = dict(
#swish=SwishJit,
swish=SwishJit,
mish=MishJit,
hard_sigmoid=HardSigmoidJit,
hard_swish=HardSwishJit,

Loading…
Cancel
Save