Merge branch 'master' into eca-weights

pull/413/head
Ross Wightman 4 years ago committed by GitHub
commit a39c3ee216
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -10,7 +10,7 @@
* Remove separate tiered (`t`) vs tiered_narrow (`tn`) ResNet model defs, all `tn` changed to `t` and `t` models removed (`seresnext26t_32x4d` only model w/ weights that was removed). * Remove separate tiered (`t`) vs tiered_narrow (`tn`) ResNet model defs, all `tn` changed to `t` and `t` models removed (`seresnext26t_32x4d` only model w/ weights that was removed).
* Support model default_cfgs with separate train vs test resolution `test_input_size` * Support model default_cfgs with separate train vs test resolution `test_input_size`
### Jan 30, 2012 ### Jan 30, 2021
* Add initial "Normalization Free" NF-RegNet-B* and NF-ResNet model definitions based on [paper](https://arxiv.org/abs/2101.08692) * Add initial "Normalization Free" NF-RegNet-B* and NF-ResNet model definitions based on [paper](https://arxiv.org/abs/2101.08692)
### Jan 25, 2021 ### Jan 25, 2021

@ -30,6 +30,9 @@ class SwishJitAutoFn(torch.autograd.Function):
Inspired by conversation btw Jeremy Howard & Adam Pazske Inspired by conversation btw Jeremy Howard & Adam Pazske
https://twitter.com/jeremyphoward/status/1188251041835315200 https://twitter.com/jeremyphoward/status/1188251041835315200
""" """
@staticmethod
def symbolic(g, x):
return g.op("Mul", x, g.op("Sigmoid", x))
@staticmethod @staticmethod
def forward(ctx, x): def forward(ctx, x):
@ -152,6 +155,13 @@ class HardSwishJitAutoFn(torch.autograd.Function):
x = ctx.saved_tensors[0] x = ctx.saved_tensors[0]
return hard_swish_jit_bwd(x, grad_output) return hard_swish_jit_bwd(x, grad_output)
@staticmethod
def symbolic(g, self):
input = g.op("Add", self, g.op('Constant', value_t=torch.tensor(3, dtype=torch.float)))
hardtanh_ = g.op("Clip", input, g.op('Constant', value_t=torch.tensor(0, dtype=torch.float)), g.op('Constant', value_t=torch.tensor(6, dtype=torch.float)))
hardtanh_ = g.op("Div", hardtanh_, g.op('Constant', value_t=torch.tensor(6, dtype=torch.float)))
return g.op("Mul", self, hardtanh_)
def hard_swish_me(x, inplace=False): def hard_swish_me(x, inplace=False):
return HardSwishJitAutoFn.apply(x) return HardSwishJitAutoFn.apply(x)

Loading…
Cancel
Save