Compare commits

...

3 Commits

Author SHA1 Message Date
Ross Wightman 7f5393f94d maxxvit type
1 year ago
Ross Wightman 8968a03ed4 More kwarg handling tweaks, maxvit_base_rw def added
1 year ago
Ross Wightman bd39f677c5 Improving kwarg merging in more models
1 year ago

@ -12,7 +12,7 @@ import torch.utils.checkpoint as cp
from torch.jit.annotations import List
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
from timm.layers import BatchNormAct2d, create_norm_act_layer, BlurPool2d, create_classifier
from timm.layers import BatchNormAct2d, get_norm_act_layer, BlurPool2d, create_classifier
from ._builder import build_model_with_cfg
from ._manipulate import MATCH_PREV_GROUP
from ._registry import register_model
@ -115,8 +115,15 @@ class DenseBlock(nn.ModuleDict):
_version = 2
def __init__(
self, num_layers, num_input_features, bn_size, growth_rate, norm_layer=BatchNormAct2d,
drop_rate=0., memory_efficient=False):
self,
num_layers,
num_input_features,
bn_size,
growth_rate,
norm_layer=BatchNormAct2d,
drop_rate=0.,
memory_efficient=False,
):
super(DenseBlock, self).__init__()
for i in range(num_layers):
layer = DenseLayer(
@ -165,12 +172,25 @@ class DenseNet(nn.Module):
"""
def __init__(
self, growth_rate=32, block_config=(6, 12, 24, 16), num_classes=1000, in_chans=3, global_pool='avg',
bn_size=4, stem_type='', norm_layer=BatchNormAct2d, aa_layer=None, drop_rate=0,
memory_efficient=False, aa_stem_only=True):
self,
growth_rate=32,
block_config=(6, 12, 24, 16),
num_classes=1000,
in_chans=3,
global_pool='avg',
bn_size=4,
stem_type='',
act_layer='relu',
norm_layer='batchnorm2d',
aa_layer=None,
drop_rate=0,
memory_efficient=False,
aa_stem_only=True,
):
self.num_classes = num_classes
self.drop_rate = drop_rate
super(DenseNet, self).__init__()
norm_layer = get_norm_act_layer(norm_layer, act_layer=act_layer)
# Stem
deep_stem = 'deep' in stem_type # 3x3 deep stem
@ -226,8 +246,11 @@ class DenseNet(nn.Module):
dict(num_chs=num_features, reduction=current_stride, module='features.' + module_name)]
current_stride *= 2
trans = DenseTransition(
num_input_features=num_features, num_output_features=num_features // 2,
norm_layer=norm_layer, aa_layer=transition_aa_layer)
num_input_features=num_features,
num_output_features=num_features // 2,
norm_layer=norm_layer,
aa_layer=transition_aa_layer,
)
self.features.add_module(f'transition{i + 1}', trans)
num_features = num_features // 2
@ -322,8 +345,8 @@ def densenetblur121d(pretrained=False, **kwargs):
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
model = _create_densenet(
'densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, stem_type='deep',
aa_layer=BlurPool2d, **kwargs)
'densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained,
stem_type='deep', aa_layer=BlurPool2d, **kwargs)
return model
@ -382,11 +405,9 @@ def densenet264(pretrained=False, **kwargs):
def densenet264d_iabn(pretrained=False, **kwargs):
r"""Densenet-264 model with deep stem and Inplace-ABN
"""
def norm_act_fn(num_features, **kwargs):
return create_norm_act_layer('iabn', num_features, act_layer='leaky_relu', **kwargs)
model = _create_densenet(
'densenet264d_iabn', growth_rate=48, block_config=(6, 12, 64, 48), stem_type='deep',
norm_layer=norm_act_fn, pretrained=pretrained, **kwargs)
norm_layer='iabn', act_layer='leaky_relu', pretrained=pretrained, **kwargs)
return model

@ -15,7 +15,7 @@ import torch.nn as nn
import torch.nn.functional as F
from timm.data import IMAGENET_DPN_MEAN, IMAGENET_DPN_STD, IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
from timm.layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier
from timm.layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier, get_norm_act_layer
from ._builder import build_model_with_cfg
from ._registry import register_model
@ -33,6 +33,7 @@ def _cfg(url='', **kwargs):
default_cfgs = {
'dpn48b': _cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
'dpn68': _cfg(
url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn68-66bebafa7.pth'),
'dpn68b': _cfg(
@ -82,7 +83,16 @@ class BnActConv2d(nn.Module):
class DualPathBlock(nn.Module):
def __init__(
self, in_chs, num_1x1_a, num_3x3_b, num_1x1_c, inc, groups, block_type='normal', b=False):
self,
in_chs,
num_1x1_a,
num_3x3_b,
num_1x1_c,
inc,
groups,
block_type='normal',
b=False,
):
super(DualPathBlock, self).__init__()
self.num_1x1_c = num_1x1_c
self.inc = inc
@ -167,16 +177,31 @@ class DualPathBlock(nn.Module):
class DPN(nn.Module):
def __init__(
self, small=False, num_init_features=64, k_r=96, groups=32, global_pool='avg',
b=False, k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128), output_stride=32,
num_classes=1000, in_chans=3, drop_rate=0., fc_act_layer=nn.ELU):
self,
k_sec=(3, 4, 20, 3),
inc_sec=(16, 32, 24, 128),
k_r=96,
groups=32,
num_classes=1000,
in_chans=3,
output_stride=32,
global_pool='avg',
small=False,
num_init_features=64,
b=False,
drop_rate=0.,
norm_layer='batchnorm2d',
act_layer='relu',
fc_act_layer='elu',
):
super(DPN, self).__init__()
self.num_classes = num_classes
self.drop_rate = drop_rate
self.b = b
assert output_stride == 32 # FIXME look into dilation support
norm_layer = partial(BatchNormAct2d, eps=.001)
fc_norm_layer = partial(BatchNormAct2d, eps=.001, act_layer=fc_act_layer, inplace=False)
norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=act_layer), eps=.001)
fc_norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=fc_act_layer), eps=.001, inplace=False)
bw_factor = 1 if small else 4
blocks = OrderedDict()
@ -291,49 +316,57 @@ def _create_dpn(variant, pretrained=False, **kwargs):
**kwargs)
@register_model
def dpn48b(pretrained=False, **kwargs):
model_kwargs = dict(
small=True, num_init_features=10, k_r=128, groups=32,
b=True, k_sec=(3, 4, 6, 3), inc_sec=(16, 32, 32, 64), act_layer='silu')
return _create_dpn('dpn48b', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
def dpn68(pretrained=False, **kwargs):
model_kwargs = dict(
small=True, num_init_features=10, k_r=128, groups=32,
k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64), **kwargs)
return _create_dpn('dpn68', pretrained=pretrained, **model_kwargs)
k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64))
return _create_dpn('dpn68', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
def dpn68b(pretrained=False, **kwargs):
model_kwargs = dict(
small=True, num_init_features=10, k_r=128, groups=32,
b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64), **kwargs)
return _create_dpn('dpn68b', pretrained=pretrained, **model_kwargs)
b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64))
return _create_dpn('dpn68b', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
def dpn92(pretrained=False, **kwargs):
model_kwargs = dict(
num_init_features=64, k_r=96, groups=32,
k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128), **kwargs)
return _create_dpn('dpn92', pretrained=pretrained, **model_kwargs)
k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128))
return _create_dpn('dpn92', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
def dpn98(pretrained=False, **kwargs):
model_kwargs = dict(
num_init_features=96, k_r=160, groups=40,
k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128), **kwargs)
return _create_dpn('dpn98', pretrained=pretrained, **model_kwargs)
k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128))
return _create_dpn('dpn98', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
def dpn131(pretrained=False, **kwargs):
model_kwargs = dict(
num_init_features=128, k_r=160, groups=40,
k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128), **kwargs)
return _create_dpn('dpn131', pretrained=pretrained, **model_kwargs)
k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128))
return _create_dpn('dpn131', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
def dpn107(pretrained=False, **kwargs):
model_kwargs = dict(
num_init_features=128, k_r=200, groups=50,
k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128), **kwargs)
return _create_dpn('dpn107', pretrained=pretrained, **model_kwargs)
k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128))
return _create_dpn('dpn107', pretrained=pretrained, **dict(model_kwargs, **kwargs))

@ -1116,6 +1116,26 @@ class NormMlpHead(nn.Module):
return x
def _overlay_kwargs(cfg: MaxxVitCfg, **kwargs):
transformer_kwargs = {}
conv_kwargs = {}
base_kwargs = {}
for k, v in kwargs.items():
if k.startswith('transformer_'):
transformer_kwargs[k.replace('transformer_', '')] = v
elif k.startswith('conv_'):
conv_kwargs[k.replace('conv_', '')] = v
else:
base_kwargs[k] = v
cfg = replace(
cfg,
transformer_cfg=replace(cfg.transformer_cfg, **transformer_kwargs),
conv_cfg=replace(cfg.conv_cfg, **conv_kwargs),
**base_kwargs
)
return cfg
class MaxxVit(nn.Module):
""" CoaTNet + MaxVit base model.
@ -1130,10 +1150,13 @@ class MaxxVit(nn.Module):
num_classes: int = 1000,
global_pool: str = 'avg',
drop_rate: float = 0.,
drop_path_rate: float = 0.
drop_path_rate: float = 0.,
**kwargs,
):
super().__init__()
img_size = to_2tuple(img_size)
if kwargs:
cfg = _overlay_kwargs(cfg, **kwargs)
transformer_cfg = cfg_window_size(cfg.transformer_cfg, img_size)
self.num_classes = num_classes
self.global_pool = global_pool
@ -1657,6 +1680,26 @@ model_cfgs = dict(
init_values=1e-6,
),
),
maxvit_rmlp_base_rw_224=MaxxVitCfg(
embed_dim=(96, 192, 384, 768),
depths=(2, 6, 14, 2),
block_type=('M',) * 4,
stem_width=(32, 64),
head_hidden_size=768,
**_rw_max_cfg(
rel_pos_type='mlp',
),
),
maxvit_rmlp_base_rw_384=MaxxVitCfg(
embed_dim=(96, 192, 384, 768),
depths=(2, 6, 14, 2),
block_type=('M',) * 4,
stem_width=(32, 64),
head_hidden_size=768,
**_rw_max_cfg(
rel_pos_type='mlp',
),
),
maxvit_tiny_pm_256=MaxxVitCfg(
embed_dim=(64, 128, 256, 512),
@ -1839,6 +1882,12 @@ default_cfgs = generate_default_cfgs({
'maxvit_rmlp_small_rw_256': _cfg(
url='',
input_size=(3, 256, 256), pool_size=(8, 8)),
'maxvit_rmlp_base_rw_224': _cfg(
url='',
),
'maxvit_rmlp_base_rw_384': _cfg(
url='',
input_size=(3, 384, 384), pool_size=(12, 12)),
'maxvit_tiny_pm_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),
@ -2068,6 +2117,16 @@ def maxvit_rmlp_small_rw_256(pretrained=False, **kwargs):
return _create_maxxvit('maxvit_rmlp_small_rw_256', pretrained=pretrained, **kwargs)
@register_model
def maxvit_rmlp_base_rw_224(pretrained=False, **kwargs):
return _create_maxxvit('maxvit_rmlp_base_rw_224', pretrained=pretrained, **kwargs)
@register_model
def maxvit_rmlp_base_rw_384(pretrained=False, **kwargs):
return _create_maxxvit('maxvit_rmlp_base_rw_384', pretrained=pretrained, **kwargs)
@register_model
def maxvit_tiny_pm_256(pretrained=False, **kwargs):
return _create_maxxvit('maxvit_tiny_pm_256', pretrained=pretrained, **kwargs)

@ -266,9 +266,16 @@ class MobileVitBlock(nn.Module):
self.transformer = nn.Sequential(*[
TransformerBlock(
transformer_dim, mlp_ratio=mlp_ratio, num_heads=num_heads, qkv_bias=True,
attn_drop=attn_drop, drop=drop, drop_path=drop_path_rate,
act_layer=layers.act, norm_layer=transformer_norm_layer)
transformer_dim,
mlp_ratio=mlp_ratio,
num_heads=num_heads,
qkv_bias=True,
attn_drop=attn_drop,
drop=drop,
drop_path=drop_path_rate,
act_layer=layers.act,
norm_layer=transformer_norm_layer,
)
for _ in range(transformer_depth)
])
self.norm = transformer_norm_layer(transformer_dim)

@ -156,8 +156,8 @@ def res2net50_26w_4s(pretrained=False, **kwargs):
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model_args = dict(
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=4), **kwargs)
return _create_res2net('res2net50_26w_4s', pretrained, **model_args)
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=4))
return _create_res2net('res2net50_26w_4s', pretrained, **dict(model_args, **kwargs))
@register_model
@ -167,8 +167,8 @@ def res2net101_26w_4s(pretrained=False, **kwargs):
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model_args = dict(
block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, block_args=dict(scale=4), **kwargs)
return _create_res2net('res2net101_26w_4s', pretrained, **model_args)
block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, block_args=dict(scale=4))
return _create_res2net('res2net101_26w_4s', pretrained, **dict(model_args, **kwargs))
@register_model
@ -178,8 +178,8 @@ def res2net50_26w_6s(pretrained=False, **kwargs):
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model_args = dict(
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=6), **kwargs)
return _create_res2net('res2net50_26w_6s', pretrained, **model_args)
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=6))
return _create_res2net('res2net50_26w_6s', pretrained, **dict(model_args, **kwargs))
@register_model
@ -189,8 +189,8 @@ def res2net50_26w_8s(pretrained=False, **kwargs):
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model_args = dict(
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=8), **kwargs)
return _create_res2net('res2net50_26w_8s', pretrained, **model_args)
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=8))
return _create_res2net('res2net50_26w_8s', pretrained, **dict(model_args, **kwargs))
@register_model
@ -200,8 +200,8 @@ def res2net50_48w_2s(pretrained=False, **kwargs):
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model_args = dict(
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=48, block_args=dict(scale=2), **kwargs)
return _create_res2net('res2net50_48w_2s', pretrained, **model_args)
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=48, block_args=dict(scale=2))
return _create_res2net('res2net50_48w_2s', pretrained, **dict(model_args, **kwargs))
@register_model
@ -211,8 +211,8 @@ def res2net50_14w_8s(pretrained=False, **kwargs):
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model_args = dict(
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=14, block_args=dict(scale=8), **kwargs)
return _create_res2net('res2net50_14w_8s', pretrained, **model_args)
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=14, block_args=dict(scale=8))
return _create_res2net('res2net50_14w_8s', pretrained, **dict(model_args, **kwargs))
@register_model
@ -222,5 +222,5 @@ def res2next50(pretrained=False, **kwargs):
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model_args = dict(
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=4, cardinality=8, block_args=dict(scale=4), **kwargs)
return _create_res2net('res2next50', pretrained, **model_args)
block=Bottle2neck, layers=[3, 4, 6, 3], base_width=4, cardinality=8, block_args=dict(scale=4))
return _create_res2net('res2next50', pretrained, **dict(model_args, **kwargs))

@ -163,8 +163,8 @@ def resnest14d(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[1, 1, 1, 1],
stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1,
block_args=dict(radix=2, avd=True, avd_first=False), **kwargs)
return _create_resnest('resnest14d', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=2, avd=True, avd_first=False))
return _create_resnest('resnest14d', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
@ -174,8 +174,8 @@ def resnest26d(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[2, 2, 2, 2],
stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1,
block_args=dict(radix=2, avd=True, avd_first=False), **kwargs)
return _create_resnest('resnest26d', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=2, avd=True, avd_first=False))
return _create_resnest('resnest26d', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
@ -186,8 +186,8 @@ def resnest50d(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[3, 4, 6, 3],
stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1,
block_args=dict(radix=2, avd=True, avd_first=False), **kwargs)
return _create_resnest('resnest50d', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=2, avd=True, avd_first=False))
return _create_resnest('resnest50d', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
@ -198,8 +198,8 @@ def resnest101e(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[3, 4, 23, 3],
stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1,
block_args=dict(radix=2, avd=True, avd_first=False), **kwargs)
return _create_resnest('resnest101e', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=2, avd=True, avd_first=False))
return _create_resnest('resnest101e', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
@ -210,8 +210,8 @@ def resnest200e(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[3, 24, 36, 3],
stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1,
block_args=dict(radix=2, avd=True, avd_first=False), **kwargs)
return _create_resnest('resnest200e', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=2, avd=True, avd_first=False))
return _create_resnest('resnest200e', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
@ -222,8 +222,8 @@ def resnest269e(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[3, 30, 48, 8],
stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1,
block_args=dict(radix=2, avd=True, avd_first=False), **kwargs)
return _create_resnest('resnest269e', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=2, avd=True, avd_first=False))
return _create_resnest('resnest269e', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
@ -233,8 +233,8 @@ def resnest50d_4s2x40d(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[3, 4, 6, 3],
stem_type='deep', stem_width=32, avg_down=True, base_width=40, cardinality=2,
block_args=dict(radix=4, avd=True, avd_first=True), **kwargs)
return _create_resnest('resnest50d_4s2x40d', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=4, avd=True, avd_first=True))
return _create_resnest('resnest50d_4s2x40d', pretrained=pretrained, **dict(model_kwargs, **kwargs))
@register_model
@ -244,5 +244,5 @@ def resnest50d_1s4x24d(pretrained=False, **kwargs):
model_kwargs = dict(
block=ResNestBottleneck, layers=[3, 4, 6, 3],
stem_type='deep', stem_width=32, avg_down=True, base_width=24, cardinality=4,
block_args=dict(radix=1, avd=True, avd_first=True), **kwargs)
return _create_resnest('resnest50d_1s4x24d', pretrained=pretrained, **model_kwargs)
block_args=dict(radix=1, avd=True, avd_first=True))
return _create_resnest('resnest50d_1s4x24d', pretrained=pretrained, **dict(model_kwargs, **kwargs))

@ -704,7 +704,7 @@ class ResNet(nn.Module):
self.num_classes = num_classes
self.drop_rate = drop_rate
self.grad_checkpointing = False
act_layer = get_act_layer(act_layer)
norm_layer = get_norm_layer(norm_layer)
@ -845,77 +845,72 @@ def _create_resnet(variant, pretrained=False, **kwargs):
def resnet10t(pretrained=False, **kwargs):
"""Constructs a ResNet-10-T model.
"""
model_args = dict(
block=BasicBlock, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs)
return _create_resnet('resnet10t', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True)
return _create_resnet('resnet10t', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet14t(pretrained=False, **kwargs):
"""Constructs a ResNet-14-T model.
"""
model_args = dict(
block=Bottleneck, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs)
return _create_resnet('resnet14t', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True)
return _create_resnet('resnet14t', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
"""
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs)
return _create_resnet('resnet18', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2])
return _create_resnet('resnet18', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet18d(pretrained=False, **kwargs):
"""Constructs a ResNet-18-D model.
"""
model_args = dict(
block=BasicBlock, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet18d', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnet18d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model.
"""
model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], **kwargs)
return _create_resnet('resnet34', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3])
return _create_resnet('resnet34', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet34d(pretrained=False, **kwargs):
"""Constructs a ResNet-34-D model.
"""
model_args = dict(
block=BasicBlock, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet34d', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnet34d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet26(pretrained=False, **kwargs):
"""Constructs a ResNet-26 model.
"""
model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], **kwargs)
return _create_resnet('resnet26', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2])
return _create_resnet('resnet26', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet26t(pretrained=False, **kwargs):
"""Constructs a ResNet-26-T model.
"""
model_args = dict(
block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs)
return _create_resnet('resnet26t', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep_tiered', avg_down=True)
return _create_resnet('resnet26t', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet26d(pretrained=False, **kwargs):
"""Constructs a ResNet-26-D model.
"""
model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet26d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnet26d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -923,83 +918,79 @@ def resnet50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs)
return _create_resnet('resnet50', pretrained, **model_args)
return _create_resnet('resnet50', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet50d(pretrained=False, **kwargs) -> ResNet:
"""Constructs a ResNet-50-D model.
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet50d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnet50d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet50t(pretrained=False, **kwargs):
"""Constructs a ResNet-50-T model.
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs)
return _create_resnet('resnet50t', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True)
return _create_resnet('resnet50t', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet101(pretrained=False, **kwargs):
"""Constructs a ResNet-101 model.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], **kwargs)
return _create_resnet('resnet101', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3])
return _create_resnet('resnet101', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet101d(pretrained=False, **kwargs):
"""Constructs a ResNet-101-D model.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet101d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnet101d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet152(pretrained=False, **kwargs):
"""Constructs a ResNet-152 model.
"""
model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], **kwargs)
return _create_resnet('resnet152', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3])
return _create_resnet('resnet152', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet152d(pretrained=False, **kwargs):
"""Constructs a ResNet-152-D model.
"""
model_args = dict(
block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet152d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnet152d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet200(pretrained=False, **kwargs):
"""Constructs a ResNet-200 model.
"""
model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3], **kwargs)
return _create_resnet('resnet200', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3])
return _create_resnet('resnet200', pretrained, **dict(model_args, **kwargs))
@register_model
def resnet200d(pretrained=False, **kwargs):
"""Constructs a ResNet-200-D model.
"""
model_args = dict(
block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet200d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnet200d', pretrained, **dict(model_args, **kwargs))
@register_model
def tv_resnet34(pretrained=False, **kwargs):
"""Constructs a ResNet-34 model with original Torchvision weights.
"""
model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], **kwargs)
return _create_resnet('tv_resnet34', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3])
return _create_resnet('tv_resnet34', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1007,23 +998,23 @@ def tv_resnet50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model with original Torchvision weights.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs)
return _create_resnet('tv_resnet50', pretrained, **model_args)
return _create_resnet('tv_resnet50', pretrained, **dict(model_args, **kwargs))
@register_model
def tv_resnet101(pretrained=False, **kwargs):
"""Constructs a ResNet-101 model w/ Torchvision pretrained weights.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], **kwargs)
return _create_resnet('tv_resnet101', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3])
return _create_resnet('tv_resnet101', pretrained, **dict(model_args, **kwargs))
@register_model
def tv_resnet152(pretrained=False, **kwargs):
"""Constructs a ResNet-152 model w/ Torchvision pretrained weights.
"""
model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], **kwargs)
return _create_resnet('tv_resnet152', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3])
return _create_resnet('tv_resnet152', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1034,8 +1025,8 @@ def wide_resnet50_2(pretrained=False, **kwargs):
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], base_width=128, **kwargs)
return _create_resnet('wide_resnet50_2', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], base_width=128)
return _create_resnet('wide_resnet50_2', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1045,8 +1036,8 @@ def wide_resnet101_2(pretrained=False, **kwargs):
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], base_width=128, **kwargs)
return _create_resnet('wide_resnet101_2', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], base_width=128)
return _create_resnet('wide_resnet101_2', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1061,8 +1052,8 @@ def resnet50_gn(pretrained=False, **kwargs):
def resnext50_32x4d(pretrained=False, **kwargs):
"""Constructs a ResNeXt50-32x4d model.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs)
return _create_resnet('resnext50_32x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4)
return _create_resnet('resnext50_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1071,40 +1062,40 @@ def resnext50d_32x4d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4,
stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnext50d_32x4d', pretrained, **model_args)
stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnext50d_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnext101_32x4d(pretrained=False, **kwargs):
"""Constructs a ResNeXt-101 32x4d model.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs)
return _create_resnet('resnext101_32x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4)
return _create_resnet('resnext101_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnext101_32x8d(pretrained=False, **kwargs):
"""Constructs a ResNeXt-101 32x8d model.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs)
return _create_resnet('resnext101_32x8d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8)
return _create_resnet('resnext101_32x8d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnext101_64x4d(pretrained=False, **kwargs):
"""Constructs a ResNeXt101-64x4d model.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=64, base_width=4, **kwargs)
return _create_resnet('resnext101_64x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=64, base_width=4)
return _create_resnet('resnext101_64x4d', pretrained, **dict(model_args, **kwargs))
@register_model
def tv_resnext50_32x4d(pretrained=False, **kwargs):
"""Constructs a ResNeXt50-32x4d model with original Torchvision weights.
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs)
return _create_resnet('tv_resnext50_32x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4)
return _create_resnet('tv_resnext50_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1114,8 +1105,8 @@ def ig_resnext101_32x8d(pretrained=False, **kwargs):
`"Exploring the Limits of Weakly Supervised Pretraining" <https://arxiv.org/abs/1805.00932>`_
Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs)
return _create_resnet('ig_resnext101_32x8d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8)
return _create_resnet('ig_resnext101_32x8d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1125,8 +1116,8 @@ def ig_resnext101_32x16d(pretrained=False, **kwargs):
`"Exploring the Limits of Weakly Supervised Pretraining" <https://arxiv.org/abs/1805.00932>`_
Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs)
return _create_resnet('ig_resnext101_32x16d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16)
return _create_resnet('ig_resnext101_32x16d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1136,8 +1127,8 @@ def ig_resnext101_32x32d(pretrained=False, **kwargs):
`"Exploring the Limits of Weakly Supervised Pretraining" <https://arxiv.org/abs/1805.00932>`_
Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=32, **kwargs)
return _create_resnet('ig_resnext101_32x32d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=32)
return _create_resnet('ig_resnext101_32x32d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1147,8 +1138,8 @@ def ig_resnext101_32x48d(pretrained=False, **kwargs):
`"Exploring the Limits of Weakly Supervised Pretraining" <https://arxiv.org/abs/1805.00932>`_
Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=48, **kwargs)
return _create_resnet('ig_resnext101_32x48d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=48)
return _create_resnet('ig_resnext101_32x48d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1157,8 +1148,8 @@ def ssl_resnet18(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs)
return _create_resnet('ssl_resnet18', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2])
return _create_resnet('ssl_resnet18', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1168,7 +1159,7 @@ def ssl_resnet50(pretrained=False, **kwargs):
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs)
return _create_resnet('ssl_resnet50', pretrained, **model_args)
return _create_resnet('ssl_resnet50', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1177,8 +1168,8 @@ def ssl_resnext50_32x4d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs)
return _create_resnet('ssl_resnext50_32x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4)
return _create_resnet('ssl_resnext50_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1187,8 +1178,8 @@ def ssl_resnext101_32x4d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs)
return _create_resnet('ssl_resnext101_32x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4)
return _create_resnet('ssl_resnext101_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1197,8 +1188,8 @@ def ssl_resnext101_32x8d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs)
return _create_resnet('ssl_resnext101_32x8d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8)
return _create_resnet('ssl_resnext101_32x8d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1207,8 +1198,8 @@ def ssl_resnext101_32x16d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs)
return _create_resnet('ssl_resnext101_32x16d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16)
return _create_resnet('ssl_resnext101_32x16d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1218,8 +1209,8 @@ def swsl_resnet18(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs)
return _create_resnet('swsl_resnet18', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2])
return _create_resnet('swsl_resnet18', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1230,7 +1221,7 @@ def swsl_resnet50(pretrained=False, **kwargs):
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs)
return _create_resnet('swsl_resnet50', pretrained, **model_args)
return _create_resnet('swsl_resnet50', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1240,8 +1231,8 @@ def swsl_resnext50_32x4d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs)
return _create_resnet('swsl_resnext50_32x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4)
return _create_resnet('swsl_resnext50_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1251,8 +1242,8 @@ def swsl_resnext101_32x4d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs)
return _create_resnet('swsl_resnext101_32x4d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4)
return _create_resnet('swsl_resnext101_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1262,8 +1253,8 @@ def swsl_resnext101_32x8d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs)
return _create_resnet('swsl_resnext101_32x8d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8)
return _create_resnet('swsl_resnext101_32x8d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1273,8 +1264,8 @@ def swsl_resnext101_32x16d(pretrained=False, **kwargs):
`"Billion-scale Semi-Supervised Learning for Image Classification" <https://arxiv.org/abs/1905.00546>`_
Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs)
return _create_resnet('swsl_resnext101_32x16d', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16)
return _create_resnet('swsl_resnext101_32x16d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1285,8 +1276,8 @@ def ecaresnet26t(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32,
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet26t', pretrained, **model_args)
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet26t', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1295,8 +1286,8 @@ def ecaresnet50d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet50d', pretrained, **model_args)
block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet50d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1306,8 +1297,8 @@ def ecaresnet50d_pruned(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **model_args)
block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs))
@register_model
@ -1317,8 +1308,8 @@ def ecaresnet50t(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32,
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet50t', pretrained, **model_args)
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet50t', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1327,8 +1318,8 @@ def ecaresnetlight(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[1, 1, 11, 3], stem_width=32, avg_down=True,
block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnetlight', pretrained, **model_args)
block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnetlight', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1337,8 +1328,8 @@ def ecaresnet101d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet101d', pretrained, **model_args)
block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet101d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1348,8 +1339,8 @@ def ecaresnet101d_pruned(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **model_args)
block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs))
@register_model
@ -1358,8 +1349,8 @@ def ecaresnet200d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet200d', pretrained, **model_args)
block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet200d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1368,8 +1359,8 @@ def ecaresnet269d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnet269d', pretrained, **model_args)
block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnet269d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1380,8 +1371,8 @@ def ecaresnext26t_32x4d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32,
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnext26t_32x4d', pretrained, **model_args)
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnext26t_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1392,54 +1383,54 @@ def ecaresnext50t_32x4d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32,
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs)
return _create_resnet('ecaresnext50t_32x4d', pretrained, **model_args)
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'))
return _create_resnet('ecaresnext50t_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnet18(pretrained=False, **kwargs):
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet18', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], block_args=dict(attn_layer='se'))
return _create_resnet('seresnet18', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnet34(pretrained=False, **kwargs):
model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet34', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'))
return _create_resnet('seresnet34', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnet50(pretrained=False, **kwargs):
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet50', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'))
return _create_resnet('seresnet50', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnet50t(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet50t', pretrained, **model_args)
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered',
avg_down=True, block_args=dict(attn_layer='se'))
return _create_resnet('seresnet50t', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnet101(pretrained=False, **kwargs):
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet101', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], block_args=dict(attn_layer='se'))
return _create_resnet('seresnet101', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnet152(pretrained=False, **kwargs):
model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet152', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], block_args=dict(attn_layer='se'))
return _create_resnet('seresnet152', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnet152d(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet152d', pretrained, **model_args)
block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep',
avg_down=True, block_args=dict(attn_layer='se'))
return _create_resnet('seresnet152d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1447,9 +1438,9 @@ def seresnet200d(pretrained=False, **kwargs):
"""Constructs a ResNet-200-D model with SE attn.
"""
model_args = dict(
block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet200d', pretrained, **model_args)
block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep',
avg_down=True, block_args=dict(attn_layer='se'))
return _create_resnet('seresnet200d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1457,9 +1448,9 @@ def seresnet269d(pretrained=False, **kwargs):
"""Constructs a ResNet-269-D model with SE attn.
"""
model_args = dict(
block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnet269d', pretrained, **model_args)
block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep',
avg_down=True, block_args=dict(attn_layer='se'))
return _create_resnet('seresnet269d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1470,8 +1461,8 @@ def seresnext26d_32x4d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32,
stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnext26d_32x4d', pretrained, **model_args)
stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'))
return _create_resnet('seresnext26d_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1482,8 +1473,8 @@ def seresnext26t_32x4d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32,
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnext26t_32x4d', pretrained, **model_args)
stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='se'))
return _create_resnet('seresnext26t_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1499,24 +1490,24 @@ def seresnext26tn_32x4d(pretrained=False, **kwargs):
def seresnext50_32x4d(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnext50_32x4d', pretrained, **model_args)
block_args=dict(attn_layer='se'))
return _create_resnet('seresnext50_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnext101_32x4d(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnext101_32x4d', pretrained, **model_args)
block_args=dict(attn_layer='se'))
return _create_resnet('seresnext101_32x4d', pretrained, **dict(model_args, **kwargs))
@register_model
def seresnext101_32x8d(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnext101_32x8d', pretrained, **model_args)
block_args=dict(attn_layer='se'))
return _create_resnet('seresnext101_32x8d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1524,32 +1515,32 @@ def seresnext101d_32x8d(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8,
stem_width=32, stem_type='deep', avg_down=True,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnext101d_32x8d', pretrained, **model_args)
block_args=dict(attn_layer='se'))
return _create_resnet('seresnext101d_32x8d', pretrained, **dict(model_args, **kwargs))
@register_model
def senet154(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 8, 36, 3], cardinality=64, base_width=4, stem_type='deep',
down_kernel_size=3, block_reduce_first=2, block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('senet154', pretrained, **model_args)
down_kernel_size=3, block_reduce_first=2, block_args=dict(attn_layer='se'))
return _create_resnet('senet154', pretrained, **dict(model_args, **kwargs))
@register_model
def resnetblur18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model with blur anti-aliasing
"""
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], aa_layer=BlurPool2d, **kwargs)
return _create_resnet('resnetblur18', pretrained, **model_args)
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], aa_layer=BlurPool2d)
return _create_resnet('resnetblur18', pretrained, **dict(model_args, **kwargs))
@register_model
def resnetblur50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model with blur anti-aliasing
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d, **kwargs)
return _create_resnet('resnetblur50', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d)
return _create_resnet('resnetblur50', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1558,8 +1549,8 @@ def resnetblur50d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d,
stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnetblur50d', pretrained, **model_args)
stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnetblur50d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1568,16 +1559,25 @@ def resnetblur101d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], aa_layer=BlurPool2d,
stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnetblur101d', pretrained, **model_args)
stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnetblur101d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnetaa34d(pretrained=False, **kwargs):
"""Constructs a ResNet-34-D model w/ avgpool anti-aliasing
"""
model_args = dict(
block=BasicBlock, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnetaa34d', pretrained, **dict(model_args, **kwargs))
@register_model
def resnetaa50(pretrained=False, **kwargs):
"""Constructs a ResNet-50 model with avgpool anti-aliasing
"""
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, **kwargs)
return _create_resnet('resnetaa50', pretrained, **model_args)
model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d)
return _create_resnet('resnetaa50', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1586,8 +1586,8 @@ def resnetaa50d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d,
stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnetaa50d', pretrained, **model_args)
stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnetaa50d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1596,8 +1596,8 @@ def resnetaa101d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], aa_layer=nn.AvgPool2d,
stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnetaa101d', pretrained, **model_args)
stem_width=32, stem_type='deep', avg_down=True)
return _create_resnet('resnetaa101d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1606,8 +1606,8 @@ def seresnetaa50d(pretrained=False, **kwargs):
"""
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d,
stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnetaa50d', pretrained, **model_args)
stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'))
return _create_resnet('seresnetaa50d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1617,8 +1617,8 @@ def seresnextaa101d_32x8d(pretrained=False, **kwargs):
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8,
stem_width=32, stem_type='deep', avg_down=True, aa_layer=nn.AvgPool2d,
block_args=dict(attn_layer='se'), **kwargs)
return _create_resnet('seresnextaa101d_32x8d', pretrained, **model_args)
block_args=dict(attn_layer='se'))
return _create_resnet('seresnextaa101d_32x8d', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1630,8 +1630,8 @@ def resnetrs50(pretrained=False, **kwargs):
attn_layer = partial(get_attn('se'), rd_ratio=0.25)
model_args = dict(
block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', replace_stem_pool=True,
avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs)
return _create_resnet('resnetrs50', pretrained, **model_args)
avg_down=True, block_args=dict(attn_layer=attn_layer))
return _create_resnet('resnetrs50', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1643,8 +1643,8 @@ def resnetrs101(pretrained=False, **kwargs):
attn_layer = partial(get_attn('se'), rd_ratio=0.25)
model_args = dict(
block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', replace_stem_pool=True,
avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs)
return _create_resnet('resnetrs101', pretrained, **model_args)
avg_down=True, block_args=dict(attn_layer=attn_layer))
return _create_resnet('resnetrs101', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1656,8 +1656,8 @@ def resnetrs152(pretrained=False, **kwargs):
attn_layer = partial(get_attn('se'), rd_ratio=0.25)
model_args = dict(
block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', replace_stem_pool=True,
avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs)
return _create_resnet('resnetrs152', pretrained, **model_args)
avg_down=True, block_args=dict(attn_layer=attn_layer))
return _create_resnet('resnetrs152', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1669,8 +1669,8 @@ def resnetrs200(pretrained=False, **kwargs):
attn_layer = partial(get_attn('se'), rd_ratio=0.25)
model_args = dict(
block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', replace_stem_pool=True,
avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs)
return _create_resnet('resnetrs200', pretrained, **model_args)
avg_down=True, block_args=dict(attn_layer=attn_layer))
return _create_resnet('resnetrs200', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1682,8 +1682,8 @@ def resnetrs270(pretrained=False, **kwargs):
attn_layer = partial(get_attn('se'), rd_ratio=0.25)
model_args = dict(
block=Bottleneck, layers=[4, 29, 53, 4], stem_width=32, stem_type='deep', replace_stem_pool=True,
avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs)
return _create_resnet('resnetrs270', pretrained, **model_args)
avg_down=True, block_args=dict(attn_layer=attn_layer))
return _create_resnet('resnetrs270', pretrained, **dict(model_args, **kwargs))
@ -1696,8 +1696,8 @@ def resnetrs350(pretrained=False, **kwargs):
attn_layer = partial(get_attn('se'), rd_ratio=0.25)
model_args = dict(
block=Bottleneck, layers=[4, 36, 72, 4], stem_width=32, stem_type='deep', replace_stem_pool=True,
avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs)
return _create_resnet('resnetrs350', pretrained, **model_args)
avg_down=True, block_args=dict(attn_layer=attn_layer))
return _create_resnet('resnetrs350', pretrained, **dict(model_args, **kwargs))
@register_model
@ -1709,5 +1709,5 @@ def resnetrs420(pretrained=False, **kwargs):
attn_layer = partial(get_attn('se'), rd_ratio=0.25)
model_args = dict(
block=Bottleneck, layers=[4, 44, 87, 4], stem_width=32, stem_type='deep', replace_stem_pool=True,
avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs)
return _create_resnet('resnetrs420', pretrained, **model_args)
avg_down=True, block_args=dict(attn_layer=attn_layer))
return _create_resnet('resnetrs420', pretrained, **dict(model_args, **kwargs))

@ -746,86 +746,83 @@ def resnetv2_152x2_bit_teacher_384(pretrained=False, **kwargs):
@register_model
def resnetv2_50(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_50', pretrained=pretrained,
layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs)
model_args = dict(layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d)
return _create_resnetv2('resnetv2_50', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_50d(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_50d', pretrained=pretrained,
model_args = dict(
layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,
stem_type='deep', avg_down=True, **kwargs)
stem_type='deep', avg_down=True)
return _create_resnetv2('resnetv2_50d', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_50t(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_50t', pretrained=pretrained,
model_args = dict(
layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,
stem_type='tiered', avg_down=True, **kwargs)
stem_type='tiered', avg_down=True)
return _create_resnetv2('resnetv2_50t', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_101(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_101', pretrained=pretrained,
layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs)
model_args = dict(layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d)
return _create_resnetv2('resnetv2_101', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_101d(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_101d', pretrained=pretrained,
model_args = dict(
layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,
stem_type='deep', avg_down=True, **kwargs)
stem_type='deep', avg_down=True)
return _create_resnetv2('resnetv2_101d', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_152(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_152', pretrained=pretrained,
layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs)
model_args = dict(layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d)
return _create_resnetv2('resnetv2_152', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_152d(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_152d', pretrained=pretrained,
model_args = dict(
layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d,
stem_type='deep', avg_down=True, **kwargs)
stem_type='deep', avg_down=True)
return _create_resnetv2('resnetv2_152d', pretrained=pretrained, **dict(model_args, **kwargs))
# Experimental configs (may change / be removed)
@register_model
def resnetv2_50d_gn(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_50d_gn', pretrained=pretrained,
model_args = dict(
layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=GroupNormAct,
stem_type='deep', avg_down=True, **kwargs)
stem_type='deep', avg_down=True)
return _create_resnetv2('resnetv2_50d_gn', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_50d_evob(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_50d_evob', pretrained=pretrained,
model_args = dict(
layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=EvoNorm2dB0,
stem_type='deep', avg_down=True, zero_init_last=True, **kwargs)
stem_type='deep', avg_down=True, zero_init_last=True)
return _create_resnetv2('resnetv2_50d_evob', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_50d_evos(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_50d_evos', pretrained=pretrained,
model_args = dict(
layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=EvoNorm2dS0,
stem_type='deep', avg_down=True, **kwargs)
stem_type='deep', avg_down=True)
return _create_resnetv2('resnetv2_50d_evos', pretrained=pretrained, **dict(model_args, **kwargs))
@register_model
def resnetv2_50d_frn(pretrained=False, **kwargs):
return _create_resnetv2(
'resnetv2_50d_frn', pretrained=pretrained,
model_args = dict(
layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=FilterResponseNormTlu2d,
stem_type='deep', avg_down=True, **kwargs)
stem_type='deep', avg_down=True)
return _create_resnetv2('resnetv2_50d_frn', pretrained=pretrained, **dict(model_args, **kwargs))

Loading…
Cancel
Save