|
|
|
@ -23,6 +23,10 @@ An implementation of EfficienNet that covers variety of related models with effi
|
|
|
|
|
* Single-Path NAS Pixel1
|
|
|
|
|
- Single-Path NAS: Designing Hardware-Efficient ConvNets - https://arxiv.org/abs/1904.02877
|
|
|
|
|
|
|
|
|
|
* TinyNet
|
|
|
|
|
- Model Rubik's Cube: Twisting Resolution, Depth and Width for TinyNets - https://arxiv.org/abs/2010.14819
|
|
|
|
|
- Definitions & weights borrowed from https://github.com/huawei-noah/CV-Backbones/tree/master/tinynet_pytorch
|
|
|
|
|
|
|
|
|
|
* And likely more...
|
|
|
|
|
|
|
|
|
|
The majority of the above models (EfficientNet*, MixNet, MnasNet) and original weights were made available
|
|
|
|
@ -427,11 +431,27 @@ default_cfgs = {
|
|
|
|
|
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_m-0f4d8805.pth'),
|
|
|
|
|
'tf_mixnet_l': _cfg(
|
|
|
|
|
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_l-6c92e0c8.pth'),
|
|
|
|
|
|
|
|
|
|
"tinynet_a": _cfg(
|
|
|
|
|
input_size=(3, 192, 192), pool_size=(6, 6), # int(224 * 0.86)
|
|
|
|
|
url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_a.pth'),
|
|
|
|
|
"tinynet_b": _cfg(
|
|
|
|
|
input_size=(3, 188, 188), pool_size=(6, 6), # int(224 * 0.84)
|
|
|
|
|
url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_b.pth'),
|
|
|
|
|
"tinynet_c": _cfg(
|
|
|
|
|
input_size=(3, 184, 184), pool_size=(6, 6), # int(224 * 0.825)
|
|
|
|
|
url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_c.pth'),
|
|
|
|
|
"tinynet_d": _cfg(
|
|
|
|
|
input_size=(3, 152, 152), pool_size=(5, 5), # int(224 * 0.68)
|
|
|
|
|
url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_d.pth'),
|
|
|
|
|
"tinynet_e": _cfg(
|
|
|
|
|
input_size=(3, 106, 106), pool_size=(4, 4), # int(224 * 0.475)
|
|
|
|
|
url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_e.pth'),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class EfficientNet(nn.Module):
|
|
|
|
|
""" (Generic) EfficientNet
|
|
|
|
|
""" EfficientNet
|
|
|
|
|
|
|
|
|
|
A flexible and performant PyTorch implementation of efficient network architectures, including:
|
|
|
|
|
* EfficientNet-V2 Small, Medium, Large, XL & B0-B3
|
|
|
|
@ -443,7 +463,7 @@ class EfficientNet(nn.Module):
|
|
|
|
|
* MobileNet-V2
|
|
|
|
|
* FBNet C
|
|
|
|
|
* Single-Path NAS Pixel1
|
|
|
|
|
|
|
|
|
|
* TinyNet
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def __init__(self, block_args, num_classes=1000, num_features=1280, in_chans=3, stem_size=32, fix_stem=False,
|
|
|
|
@ -1160,6 +1180,31 @@ def _gen_mixnet_m(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrai
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _gen_tinynet(
|
|
|
|
|
variant, model_width=1.0, depth_multiplier=1.0, pretrained=False, **kwargs
|
|
|
|
|
):
|
|
|
|
|
"""Creates a TinyNet model.
|
|
|
|
|
"""
|
|
|
|
|
arch_def = [
|
|
|
|
|
['ds_r1_k3_s1_e1_c16_se0.25'], ['ir_r2_k3_s2_e6_c24_se0.25'],
|
|
|
|
|
['ir_r2_k5_s2_e6_c40_se0.25'], ['ir_r3_k3_s2_e6_c80_se0.25'],
|
|
|
|
|
['ir_r3_k5_s1_e6_c112_se0.25'], ['ir_r4_k5_s2_e6_c192_se0.25'],
|
|
|
|
|
['ir_r1_k3_s1_e6_c320_se0.25'],
|
|
|
|
|
]
|
|
|
|
|
model_kwargs = dict(
|
|
|
|
|
block_args=decode_arch_def(arch_def, depth_multiplier, depth_trunc='round'),
|
|
|
|
|
num_features=max(1280, round_channels(1280, model_width, 8, None)),
|
|
|
|
|
stem_size=32,
|
|
|
|
|
fix_stem=True,
|
|
|
|
|
round_chs_fn=partial(round_channels, multiplier=model_width),
|
|
|
|
|
act_layer=resolve_act_layer(kwargs, 'swish'),
|
|
|
|
|
norm_layer=kwargs.pop('norm_layer', None) or partial(nn.BatchNorm2d, **resolve_bn_args(kwargs)),
|
|
|
|
|
**kwargs,
|
|
|
|
|
)
|
|
|
|
|
model = _create_effnet(variant, pretrained, **model_kwargs)
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@register_model
|
|
|
|
|
def mnasnet_050(pretrained=False, **kwargs):
|
|
|
|
|
""" MNASNet B1, depth multiplier of 0.5. """
|
|
|
|
@ -2298,3 +2343,33 @@ def tf_mixnet_l(pretrained=False, **kwargs):
|
|
|
|
|
model = _gen_mixnet_m(
|
|
|
|
|
'tf_mixnet_l', channel_multiplier=1.3, pretrained=pretrained, **kwargs)
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@register_model
|
|
|
|
|
def tinynet_a(pretrained=False, **kwargs):
|
|
|
|
|
model = _gen_tinynet('tinynet_a', 1.0, 1.2, pretrained=pretrained, **kwargs)
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@register_model
|
|
|
|
|
def tinynet_b(pretrained=False, **kwargs):
|
|
|
|
|
model = _gen_tinynet('tinynet_b', 0.75, 1.1, pretrained=pretrained, **kwargs)
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@register_model
|
|
|
|
|
def tinynet_c(pretrained=False, **kwargs):
|
|
|
|
|
model = _gen_tinynet('tinynet_c', 0.54, 0.85, pretrained=pretrained, **kwargs)
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@register_model
|
|
|
|
|
def tinynet_d(pretrained=False, **kwargs):
|
|
|
|
|
model = _gen_tinynet('tinynet_d', 0.54, 0.695, pretrained=pretrained, **kwargs)
|
|
|
|
|
return model
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@register_model
|
|
|
|
|
def tinynet_e(pretrained=False, **kwargs):
|
|
|
|
|
model = _gen_tinynet('tinynet_e', 0.51, 0.6, pretrained=pretrained, **kwargs)
|
|
|
|
|
return model
|
|
|
|
|