You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
69 lines
1.5 KiB
69 lines
1.5 KiB
import torch
|
|
import torch.nn as nn
|
|
|
|
from timm.layers import create_act_layer, set_layer_config
|
|
|
|
|
|
class MLP(nn.Module):
|
|
def __init__(self, act_layer="relu", inplace=True):
|
|
super(MLP, self).__init__()
|
|
self.fc1 = nn.Linear(1000, 100)
|
|
self.act = create_act_layer(act_layer, inplace=inplace)
|
|
self.fc2 = nn.Linear(100, 10)
|
|
|
|
def forward(self, x):
|
|
x = self.fc1(x)
|
|
x = self.act(x)
|
|
x = self.fc2(x)
|
|
return x
|
|
|
|
|
|
def _run_act_layer_grad(act_type, inplace=True):
|
|
x = torch.rand(10, 1000) * 10
|
|
m = MLP(act_layer=act_type, inplace=inplace)
|
|
|
|
def _run(x, act_layer=''):
|
|
if act_layer:
|
|
# replace act layer if set
|
|
m.act = create_act_layer(act_layer, inplace=inplace)
|
|
out = m(x)
|
|
l = (out - 0).pow(2).sum()
|
|
return l
|
|
|
|
out_me = _run(x)
|
|
|
|
with set_layer_config(scriptable=True):
|
|
out_jit = _run(x, act_type)
|
|
|
|
assert torch.isclose(out_jit, out_me)
|
|
|
|
with set_layer_config(no_jit=True):
|
|
out_basic = _run(x, act_type)
|
|
|
|
assert torch.isclose(out_basic, out_jit)
|
|
|
|
|
|
def test_swish_grad():
|
|
for _ in range(100):
|
|
_run_act_layer_grad('swish')
|
|
|
|
|
|
def test_mish_grad():
|
|
for _ in range(100):
|
|
_run_act_layer_grad('mish')
|
|
|
|
|
|
def test_hard_sigmoid_grad():
|
|
for _ in range(100):
|
|
_run_act_layer_grad('hard_sigmoid', inplace=None)
|
|
|
|
|
|
def test_hard_swish_grad():
|
|
for _ in range(100):
|
|
_run_act_layer_grad('hard_swish')
|
|
|
|
|
|
def test_hard_mish_grad():
|
|
for _ in range(100):
|
|
_run_act_layer_grad('hard_mish')
|