From 3bd96609c8e1b6e4690410eb2ca4ecea8b36a5aa Mon Sep 17 00:00:00 2001 From: Fredo Guan Date: Tue, 6 Dec 2022 17:19:25 -0800 Subject: [PATCH 01/34] Davit (#1) Implement the davit model from https://arxiv.org/abs/2204.03645 and https://github.com/dingmyu/davit --- timm/models/__init__.py | 1 + timm/models/davit.py | 624 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 625 insertions(+) create mode 100644 timm/models/davit.py diff --git a/timm/models/__init__.py b/timm/models/__init__.py index 301186dd..b1f82789 100644 --- a/timm/models/__init__.py +++ b/timm/models/__init__.py @@ -8,6 +8,7 @@ from .convmixer import * from .convnext import * from .crossvit import * from .cspnet import * +from .davit import * from .deit import * from .densenet import * from .dla import * diff --git a/timm/models/davit.py b/timm/models/davit.py new file mode 100644 index 00000000..444f21f3 --- /dev/null +++ b/timm/models/davit.py @@ -0,0 +1,624 @@ +""" DaViT: Dual Attention Vision Transformers + +As described in https://arxiv.org/abs/2204.03645 + +Input size invariant transformer architecture that combines channel and spacial +attention in each block. The attention mechanisms used are linear in complexity. + +DaViT model defs and weights adapted from https://github.com/dingmyu/davit, original copyright below + + + + + +""" +# Copyright (c) 2022 Mingyu Ding +# All rights reserved. +# This source code is licensed under the MIT license + +import itertools +from typing import Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F +from .helpers import build_model_with_cfg +from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD +from .layers import DropPath, to_2tuple, trunc_normal_, SelectAdaptivePool2d, ClassifierHead, Mlp +from collections import OrderedDict +import torch.utils.checkpoint as checkpoint +from .pretrained import generate_default_cfgs +from .registry import register_model + + +__all__ = ['DaViT'] + + + + +class MySequential(nn.Sequential): + def forward(self, *inputs): + for module in self._modules.values(): + if type(inputs) == tuple: + inputs = module(*inputs) + else: + inputs = module(inputs) + return inputs + + +class ConvPosEnc(nn.Module): + def __init__(self, dim, k=3, act=False, normtype=False): + super(ConvPosEnc, self).__init__() + self.proj = nn.Conv2d(dim, + dim, + to_2tuple(k), + to_2tuple(1), + to_2tuple(k // 2), + groups=dim) + self.normtype = normtype + if self.normtype == 'batch': + self.norm = nn.BatchNorm2d(dim) + elif self.normtype == 'layer': + self.norm = nn.LayerNorm(dim) + self.activation = nn.GELU() if act else nn.Identity() + + def forward(self, x, size: Tuple[int, int]): + B, N, C = x.shape + H, W = size + assert N == H * W + + feat = x.transpose(1, 2).view(B, C, H, W) + feat = self.proj(feat) + if self.normtype == 'batch': + feat = self.norm(feat).flatten(2).transpose(1, 2) + elif self.normtype == 'layer': + feat = self.norm(feat.flatten(2).transpose(1, 2)) + else: + feat = feat.flatten(2).transpose(1, 2) + x = x + self.activation(feat) + return x + + +class PatchEmbed(nn.Module): + """ Size-agnostic implementation of 2D image to patch embedding, + allowing input size to be adjusted during model forward operation + """ + + def __init__( + self, + patch_size=16, + in_chans=3, + embed_dim=96, + overlapped=False): + super().__init__() + patch_size = to_2tuple(patch_size) + self.patch_size = patch_size + + if patch_size[0] == 4: + self.proj = nn.Conv2d( + in_chans, + embed_dim, + kernel_size=(7, 7), + stride=patch_size, + padding=(3, 3)) + self.norm = nn.LayerNorm(embed_dim) + if patch_size[0] == 2: + kernel = 3 if overlapped else 2 + pad = 1 if overlapped else 0 + self.proj = nn.Conv2d( + in_chans, + embed_dim, + kernel_size=to_2tuple(kernel), + stride=patch_size, + padding=to_2tuple(pad)) + self.norm = nn.LayerNorm(in_chans) + + def forward(self, x, size): + H, W = size + dim = len(x.shape) + if dim == 3: + B, HW, C = x.shape + x = self.norm(x) + x = x.reshape(B, + H, + W, + C).permute(0, 3, 1, 2).contiguous() + + B, C, H, W = x.shape + if W % self.patch_size[1] != 0: + x = F.pad(x, (0, self.patch_size[1] - W % self.patch_size[1])) + if H % self.patch_size[0] != 0: + x = F.pad(x, (0, 0, 0, self.patch_size[0] - H % self.patch_size[0])) + + x = self.proj(x) + newsize = (x.size(2), x.size(3)) + x = x.flatten(2).transpose(1, 2) + if dim == 4: + x = self.norm(x) + return x, newsize + + +class ChannelAttention(nn.Module): + + def __init__(self, dim, num_heads=8, qkv_bias=False): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + + def forward(self, x): + B, N, C = x.shape + + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + + k = k * self.scale + attention = k.transpose(-1, -2) @ v + attention = attention.softmax(dim=-1) + x = (attention @ q.transpose(-1, -2)).transpose(-1, -2) + x = x.transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + return x + + +class ChannelBlock(nn.Module): + + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, + ffn=True, cpe_act=False): + super().__init__() + + self.cpe = nn.ModuleList([ConvPosEnc(dim=dim, k=3, act=cpe_act), + ConvPosEnc(dim=dim, k=3, act=cpe_act)]) + self.ffn = ffn + self.norm1 = norm_layer(dim) + self.attn = ChannelAttention(dim, num_heads=num_heads, qkv_bias=qkv_bias) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + if self.ffn: + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer) + + def forward(self, x, size): + x = self.cpe[0](x, size) + cur = self.norm1(x) + cur = self.attn(cur) + x = x + self.drop_path(cur) + + x = self.cpe[1](x, size) + if self.ffn: + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x, size + + +def window_partition(x, window_size: int): + """ + Args: + x: (B, H, W, C) + window_size (int): window size + Returns: + windows: (num_windows*B, window_size, window_size, C) + """ + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows + + +def window_reverse(windows, window_size: int, H: int, W: int): + """ + Args: + windows: (num_windows*B, window_size, window_size, C) + window_size (int): Window size + H (int): Height of image + W (int): Width of image + Returns: + x: (B, H, W, C) + """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + +class WindowAttention(nn.Module): + r""" Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + """ + + def __init__(self, dim, window_size, num_heads, qkv_bias=True): + + super().__init__() + self.dim = dim + self.window_size = window_size + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x): + B_, N, C = x.shape + + qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) + attn = self.softmax(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + return x + + +class SpatialBlock(nn.Module): + r""" Windows Block. + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): Window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, dim, num_heads, window_size=7, + mlp_ratio=4., qkv_bias=True, drop_path=0., + act_layer=nn.GELU, norm_layer=nn.LayerNorm, + ffn=True, cpe_act=False): + super().__init__() + self.dim = dim + self.ffn = ffn + self.num_heads = num_heads + self.window_size = window_size + self.mlp_ratio = mlp_ratio + self.cpe = nn.ModuleList([ConvPosEnc(dim=dim, k=3, act=cpe_act), + ConvPosEnc(dim=dim, k=3, act=cpe_act)]) + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias) + + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + if self.ffn: + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer) + + def forward(self, x, size): + H, W = size + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + + shortcut = self.cpe[0](x, size) + x = self.norm1(shortcut) + x = x.view(B, H, W, C) + + pad_l = pad_t = 0 + pad_r = (self.window_size - W % self.window_size) % self.window_size + pad_b = (self.window_size - H % self.window_size) % self.window_size + x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b)) + _, Hp, Wp, _ = x.shape + + x_windows = window_partition(x, self.window_size) + x_windows = x_windows.view(-1, self.window_size * self.window_size, C) + + # W-MSA/SW-MSA + attn_windows = self.attn(x_windows) + + # merge windows + attn_windows = attn_windows.view(-1, + self.window_size, + self.window_size, + C) + x = window_reverse(attn_windows, self.window_size, Hp, Wp) + + if pad_r > 0 or pad_b > 0: + x = x[:, :H, :W, :].contiguous() + + x = x.view(B, H * W, C) + x = shortcut + self.drop_path(x) + + x = self.cpe[1](x, size) + if self.ffn: + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x, size + + + +class DaViT(nn.Module): + r""" Dual Attention Transformer + Args: + patch_size (int | tuple(int)): Patch size. Default: 4 + in_chans (int): Number of input image channels. Default: 3 + embed_dims (tuple(int)): Patch embedding dimension. Default: (64, 128, 192, 256) + num_heads (tuple(int)): Number of attention heads in different layers. Default: (4, 8, 12, 16) + window_size (int): Window size. Default: 7 + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + drop_path_rate (float): Stochastic depth rate. Default: 0.1 + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + """ + + def __init__( + self, + in_chans=3, + depths=(1, 1, 3, 1), + patch_size=4, + embed_dims=(96, 192, 384, 768), + num_heads=(3, 6, 12, 24), + window_size=7, + mlp_ratio=4., + qkv_bias=True, + drop_path_rate=0.1, + norm_layer=nn.LayerNorm, + attention_types=('spatial', 'channel'), + ffn=True, + overlapped_patch=False, + cpe_act=False, + drop_rate=0., + attn_drop_rate=0., + img_size=224, + num_classes=1000, + global_pool='avg' + ): + super().__init__() + + architecture = [[index] * item for index, item in enumerate(depths)] + self.architecture = architecture + self.embed_dims = embed_dims + self.num_heads = num_heads + self.num_stages = len(self.embed_dims) + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, 2 * len(list(itertools.chain(*self.architecture))))] + assert self.num_stages == len(self.num_heads) == (sorted(list(itertools.chain(*self.architecture)))[-1] + 1) + + self.num_classes = num_classes + self.num_features = embed_dims[-1] + self.drop_rate=drop_rate + self.grad_checkpointing = False + + + self.patch_embeds = nn.ModuleList([ + PatchEmbed(patch_size=patch_size if i == 0 else 2, + in_chans=in_chans if i == 0 else self.embed_dims[i - 1], + embed_dim=self.embed_dims[i], + overlapped=overlapped_patch) + for i in range(self.num_stages)]) + + main_blocks = [] + for block_id, block_param in enumerate(self.architecture): + layer_offset_id = len(list(itertools.chain(*self.architecture[:block_id]))) + + block = nn.ModuleList([ + MySequential(*[ + ChannelBlock( + dim=self.embed_dims[item], + num_heads=self.num_heads[item], + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id], + norm_layer=nn.LayerNorm, + ffn=ffn, + cpe_act=cpe_act + ) if attention_type == 'channel' else + SpatialBlock( + dim=self.embed_dims[item], + num_heads=self.num_heads[item], + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id], + norm_layer=nn.LayerNorm, + ffn=ffn, + cpe_act=cpe_act, + window_size=window_size, + ) if attention_type == 'spatial' else None + for attention_id, attention_type in enumerate(attention_types)] + ) for layer_id, item in enumerate(block_param) + ]) + main_blocks.append(block) + self.main_blocks = nn.ModuleList(main_blocks) + + ''' + # layer norms for pyramid feature extraction + # + # TODO implement pyramid feature extraction + # + # davit should be a good transformer candidate, since the only official implementation + # is for segmentation and detection + for i_layer in range(self.num_stages): + layer = norm_layer(self.embed_dims[i_layer]) + layer_name = f'norm{i_layer}' + self.add_module(layer_name, layer) + ''' + self.norms = norm_layer(self.num_features) + self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + + + @torch.jit.ignore + def set_grad_checkpointing(self, enable=True): + self.grad_checkpointing = enable + + @torch.jit.ignore + def get_classifier(self): + return self.head.fc + + def reset_classifier(self, num_classes, global_pool=None): + self.num_classes = num_classes + if global_pool is None: + global_pool = self.head.global_pool.pool_type + self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate) + + + def forward_features_full(self, x): + x, size = self.patch_embeds[0](x, (x.size(2), x.size(3))) + features = [x] + sizes = [size] + branches = [0] + + for block_index, block_param in enumerate(self.architecture): + branch_ids = sorted(set(block_param)) + for branch_id in branch_ids: + if branch_id not in branches: + x, size = self.patch_embeds[branch_id](features[-1], sizes[-1]) + features.append(x) + sizes.append(size) + branches.append(branch_id) + for layer_index, branch_id in enumerate(block_param): + if self.grad_checkpointing and not torch.jit.is_scripting(): + features[branch_id], _ = checkpoint.checkpoint(self.main_blocks[block_index][layer_index], features[branch_id], sizes[branch_id]) + else: + features[branch_id], _ = self.main_blocks[block_index][layer_index](features[branch_id], sizes[branch_id]) + ''' + # pyramid feature norm logic, no weights for these extra norm layers from pretrained classification model + outs = [] + for i in range(self.num_stages): + norm_layer = getattr(self, f'norm{i}') + x_out = norm_layer(features[i]) + H, W = sizes[i] + out = x_out.view(-1, H, W, self.embed_dims[i]).permute(0, 3, 1, 2).contiguous() + outs.append(out) + ''' + # non-normalized pyramid features + corresponding sizes + return tuple(features), tuple(sizes) + + def forward_features(self, x): + x, sizes = self.forward_features_full(x) + # take final feature and norm + x = self.norms(x[-1]) + H, W = sizes[-1] + x = x.view(-1, H, W, self.embed_dims[-1]).permute(0, 3, 1, 2).contiguous() + #print(x.shape) + return x + + def forward_head(self, x, pre_logits: bool = False): + + return self.head(x, pre_logits=pre_logits) + + def forward(self, x): + x = self.forward_features(x) + x = self.forward_head(x) + return x + +def checkpoint_filter_fn(state_dict, model): + """ Remap MSFT checkpoints -> timm """ + if 'head.norm.weight' in state_dict: + return state_dict # non-MSFT checkpoint + + if 'state_dict' in state_dict: + state_dict = state_dict['state_dict'] + + out_dict = {} + import re + for k, v in state_dict.items(): + + k = k.replace('head.', 'head.fc.') + out_dict[k] = v + return out_dict + + + +def _create_davit(variant, pretrained=False, **kwargs): + model = build_model_with_cfg(DaViT, variant, pretrained, + pretrained_filter_fn=checkpoint_filter_fn, **kwargs) + return model + + + +def _cfg(url='', **kwargs): # not sure how this should be set up + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), + 'crop_pct': 0.875, 'interpolation': 'bilinear', + 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, + 'first_conv': 'patch_embeds.0.proj', 'classifier': 'head.fc', + **kwargs + } + + + +default_cfgs = generate_default_cfgs({ + +'davit_tiny.msft_in1k': _cfg( + url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_tiny_ed28dd55.pth.tar"), +'davit_small.msft_in1k': _cfg( + url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_small_d1ecf281.pth.tar"), +'davit_base.msft_in1k': _cfg( + url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_base_67d9ac26.pth.tar"), +}) + + + +@register_model +def davit_tiny(pretrained=False, **kwargs): + model_kwargs = dict(depths=(1, 1, 3, 1), embed_dims=(96, 192, 384, 768), + num_heads=(3, 6, 12, 24), **kwargs) + return _create_davit('davit_tiny', pretrained=pretrained, **model_kwargs) + +@register_model +def davit_small(pretrained=False, **kwargs): + model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(96, 192, 384, 768), + num_heads=(3, 6, 12, 24), **kwargs) + return _create_davit('davit_small', pretrained=pretrained, **model_kwargs) + +@register_model +def davit_base(pretrained=False, **kwargs): + model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(128, 256, 512, 1024), + num_heads=(4, 8, 16, 32), **kwargs) + return _create_davit('davit_base', pretrained=pretrained, **model_kwargs) + +''' models without weights +# TODO contact authors to get larger pretrained models +@register_model +def davit_large(pretrained=False, **kwargs): + model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(192, 384, 768, 1536), + num_heads=(6, 12, 24, 48), **kwargs) + return _create_davit('davit_large', pretrained=pretrained, **model_kwargs) + +@register_model +def davit_huge(pretrained=False, **kwargs): + model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(256, 512, 1024, 2048), + num_heads=(8, 16, 32, 64), **kwargs) + return _create_davit('davit_huge', pretrained=pretrained, **model_kwargs) + +@register_model +def davit_giant(pretrained=False, **kwargs): + model_kwargs = dict(depths=(1, 1, 12, 3), embed_dims=(384, 768, 1536, 3072), + num_heads=(12, 24, 48, 96), **kwargs) + return _create_davit('davit_giant', pretrained=pretrained, **model_kwargs) +''' \ No newline at end of file From edea013dd15a7d9634bf619233c2860dfbe3c4fc Mon Sep 17 00:00:00 2001 From: Fredo Guan Date: Fri, 9 Dec 2022 02:53:21 -0800 Subject: [PATCH 02/34] Davit std (#3) Davit with all features working --- tests/test_models.py | 2 + timm/models/davit.py | 240 +++++++++++++++++++++++-------------------- 2 files changed, 129 insertions(+), 113 deletions(-) diff --git a/tests/test_models.py b/tests/test_models.py index 87d75cbd..97872fde 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -27,7 +27,9 @@ NON_STD_FILTERS = [ 'vit_*', 'tnt_*', 'pit_*', 'swin_*', 'coat_*', 'cait_*', '*mixer_*', 'gmlp_*', 'resmlp_*', 'twins_*', 'convit_*', 'levit*', 'visformer*', 'deit*', 'jx_nest_*', 'nest_*', 'xcit_*', 'crossvit_*', 'beit*', 'poolformer_*', 'volo_*', 'sequencer2d_*', 'swinv2_*', 'pvt_v2*', 'mvitv2*', 'gcvit*', 'efficientformer*', + 'coatnet*', 'coatnext*', 'maxvit*', 'maxxvit*', 'eva_*' + ] NUM_NON_STD = len(NON_STD_FILTERS) diff --git a/timm/models/davit.py b/timm/models/davit.py index 444f21f3..eda928e4 100644 --- a/timm/models/davit.py +++ b/timm/models/davit.py @@ -7,47 +7,34 @@ attention in each block. The attention mechanisms used are linear in complexity. DaViT model defs and weights adapted from https://github.com/dingmyu/davit, original copyright below - - - - """ # Copyright (c) 2022 Mingyu Ding # All rights reserved. # This source code is licensed under the MIT license import itertools -from typing import Tuple +from typing import Any, Dict, Iterable, Iterator, Mapping, Optional, overload, Tuple, TypeVar, Union, List +from collections import OrderedDict import torch import torch.nn as nn import torch.nn.functional as F -from .helpers import build_model_with_cfg -from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from .layers import DropPath, to_2tuple, trunc_normal_, SelectAdaptivePool2d, ClassifierHead, Mlp -from collections import OrderedDict +from torch import Tensor import torch.utils.checkpoint as checkpoint + +from .features import FeatureInfo +from .fx_features import register_notrace_function, register_notrace_module +from .helpers import build_model_with_cfg, pretrained_cfg_for_features +from .layers import DropPath, to_2tuple, trunc_normal_, SelectAdaptivePool2d, ClassifierHead, Mlp from .pretrained import generate_default_cfgs from .registry import register_model - +from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD __all__ = ['DaViT'] - - - -class MySequential(nn.Sequential): - def forward(self, *inputs): - for module in self._modules.values(): - if type(inputs) == tuple: - inputs = module(*inputs) - else: - inputs = module(inputs) - return inputs - - class ConvPosEnc(nn.Module): - def __init__(self, dim, k=3, act=False, normtype=False): + def __init__(self, dim : int, k : int=3, act : bool=False, normtype : str='none'): + super(ConvPosEnc, self).__init__() self.proj = nn.Conv2d(dim, dim, @@ -56,16 +43,16 @@ class ConvPosEnc(nn.Module): to_2tuple(k // 2), groups=dim) self.normtype = normtype + self.norm = nn.Identity() if self.normtype == 'batch': self.norm = nn.BatchNorm2d(dim) elif self.normtype == 'layer': self.norm = nn.LayerNorm(dim) self.activation = nn.GELU() if act else nn.Identity() - def forward(self, x, size: Tuple[int, int]): + def forward(self, x : Tensor, size: Tuple[int, int]): B, N, C = x.shape H, W = size - assert N == H * W feat = x.transpose(1, 2).view(B, C, H, W) feat = self.proj(feat) @@ -77,8 +64,11 @@ class ConvPosEnc(nn.Module): feat = feat.flatten(2).transpose(1, 2) x = x + self.activation(feat) return x + - +# reason: dim in control sequence +# FIXME reimplement to allow tracing +@register_notrace_module class PatchEmbed(nn.Module): """ Size-agnostic implementation of 2D image to patch embedding, allowing input size to be adjusted during model forward operation @@ -113,9 +103,10 @@ class PatchEmbed(nn.Module): padding=to_2tuple(pad)) self.norm = nn.LayerNorm(in_chans) - def forward(self, x, size): + + def forward(self, x : Tensor, size: Tuple[int, int]): H, W = size - dim = len(x.shape) + dim = x.dim() if dim == 3: B, HW, C = x.shape x = self.norm(x) @@ -149,7 +140,7 @@ class ChannelAttention(nn.Module): self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) self.proj = nn.Linear(dim, dim) - def forward(self, x): + def forward(self, x : Tensor): B, N, C = x.shape qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) @@ -186,7 +177,8 @@ class ChannelBlock(nn.Module): hidden_features=mlp_hidden_dim, act_layer=act_layer) - def forward(self, x, size): + + def forward(self, x : Tensor, size: Tuple[int, int]): x = self.cpe[0](x, size) cur = self.norm1(x) cur = self.attn(cur) @@ -198,7 +190,7 @@ class ChannelBlock(nn.Module): return x, size -def window_partition(x, window_size: int): +def window_partition(x : Tensor, window_size: int): """ Args: x: (B, H, W, C) @@ -211,8 +203,8 @@ def window_partition(x, window_size: int): windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) return windows - -def window_reverse(windows, window_size: int, H: int, W: int): +@register_notrace_function # reason: int argument is a Proxy +def window_reverse(windows : Tensor, window_size: int, H: int, W: int): """ Args: windows: (num_windows*B, window_size, window_size, C) @@ -222,6 +214,7 @@ def window_reverse(windows, window_size: int, H: int, W: int): Returns: x: (B, H, W, C) """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1) x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) @@ -252,7 +245,7 @@ class WindowAttention(nn.Module): self.softmax = nn.Softmax(dim=-1) - def forward(self, x): + def forward(self, x : Tensor): B_, N, C = x.shape qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) @@ -310,10 +303,11 @@ class SpatialBlock(nn.Module): hidden_features=mlp_hidden_dim, act_layer=act_layer) - def forward(self, x, size): + + def forward(self, x : Tensor, size: Tuple[int, int]): + H, W = size B, L, C = x.shape - assert L == H * W, "input feature has wrong size" shortcut = self.cpe[0](x, size) x = self.norm1(shortcut) @@ -338,8 +332,8 @@ class SpatialBlock(nn.Module): C) x = window_reverse(attn_windows, self.window_size, Hp, Wp) - if pad_r > 0 or pad_b > 0: - x = x[:, :H, :W, :].contiguous() + #if pad_r > 0 or pad_b > 0: + x = x[:, :H, :W, :].contiguous() x = x.view(B, H * W, C) x = shortcut + self.drop_path(x) @@ -352,12 +346,17 @@ class SpatialBlock(nn.Module): class DaViT(nn.Module): - r""" Dual Attention Transformer + r""" DaViT + A PyTorch implementation of `DaViT: Dual Attention Vision Transformers` - https://arxiv.org/abs/2204.03645 + Supports arbitrary input sizes and pyramid feature extraction + Args: - patch_size (int | tuple(int)): Patch size. Default: 4 in_chans (int): Number of input image channels. Default: 3 - embed_dims (tuple(int)): Patch embedding dimension. Default: (64, 128, 192, 256) - num_heads (tuple(int)): Number of attention heads in different layers. Default: (4, 8, 12, 16) + num_classes (int): Number of classes for classification head. Default: 1000 + depths (tuple(int)): Number of blocks in each stage. Default: (1, 1, 3, 1) + patch_size (int | tuple(int)): Patch size. Default: 4 + embed_dims (tuple(int)): Patch embedding dimension. Default: (96, 192, 384, 768) + num_heads (tuple(int)): Number of attention heads in different layers. Default: (3, 6, 12, 24) window_size (int): Window size. Default: 7 mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4 qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True @@ -383,7 +382,6 @@ class DaViT(nn.Module): cpe_act=False, drop_rate=0., attn_drop_rate=0., - img_size=224, num_classes=1000, global_pool='avg' ): @@ -401,7 +399,7 @@ class DaViT(nn.Module): self.num_features = embed_dims[-1] self.drop_rate=drop_rate self.grad_checkpointing = False - + self.feature_info = [] self.patch_embeds = nn.ModuleList([ PatchEmbed(patch_size=patch_size if i == 0 else 2, @@ -410,12 +408,12 @@ class DaViT(nn.Module): overlapped=overlapped_patch) for i in range(self.num_stages)]) - main_blocks = [] - for block_id, block_param in enumerate(self.architecture): - layer_offset_id = len(list(itertools.chain(*self.architecture[:block_id]))) + self.stages = nn.ModuleList() + for stage_id, stage_param in enumerate(self.architecture): + layer_offset_id = len(list(itertools.chain(*self.architecture[:stage_id]))) - block = nn.ModuleList([ - MySequential(*[ + stage = nn.ModuleList([ + nn.ModuleList([ ChannelBlock( dim=self.embed_dims[item], num_heads=self.num_heads[item], @@ -438,27 +436,17 @@ class DaViT(nn.Module): window_size=window_size, ) if attention_type == 'spatial' else None for attention_id, attention_type in enumerate(attention_types)] - ) for layer_id, item in enumerate(block_param) + ) for layer_id, item in enumerate(stage_param) ]) - main_blocks.append(block) - self.main_blocks = nn.ModuleList(main_blocks) - - ''' - # layer norms for pyramid feature extraction - # - # TODO implement pyramid feature extraction - # - # davit should be a good transformer candidate, since the only official implementation - # is for segmentation and detection - for i_layer in range(self.num_stages): - layer = norm_layer(self.embed_dims[i_layer]) - layer_name = f'norm{i_layer}' - self.add_module(layer_name, layer) - ''' + + self.stages.add_module(f'stage_{stage_id}', stage) + self.feature_info += [dict(num_chs=self.embed_dims[stage_id], reduction=2, module=f'stages.stage_{stage_id}')] + self.norms = norm_layer(self.num_features) self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate) self.apply(self._init_weights) - + + def _init_weights(self, m): if isinstance(m, nn.Linear): trunc_normal_(m.weight, std=.02) @@ -467,9 +455,7 @@ class DaViT(nn.Module): elif isinstance(m, nn.LayerNorm): nn.init.constant_(m.bias, 0) nn.init.constant_(m.weight, 1.0) - - - + @torch.jit.ignore def set_grad_checkpointing(self, enable=True): self.grad_checkpointing = enable @@ -485,55 +471,67 @@ class DaViT(nn.Module): self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate) - def forward_features_full(self, x): - x, size = self.patch_embeds[0](x, (x.size(2), x.size(3))) + def forward_network(self, x): + size: Tuple[int, int] = (x.size(2), x.size(3)) features = [x] sizes = [size] - branches = [0] - - for block_index, block_param in enumerate(self.architecture): - branch_ids = sorted(set(block_param)) - for branch_id in branch_ids: - if branch_id not in branches: - x, size = self.patch_embeds[branch_id](features[-1], sizes[-1]) - features.append(x) - sizes.append(size) - branches.append(branch_id) - for layer_index, branch_id in enumerate(block_param): - if self.grad_checkpointing and not torch.jit.is_scripting(): - features[branch_id], _ = checkpoint.checkpoint(self.main_blocks[block_index][layer_index], features[branch_id], sizes[branch_id]) - else: - features[branch_id], _ = self.main_blocks[block_index][layer_index](features[branch_id], sizes[branch_id]) - ''' - # pyramid feature norm logic, no weights for these extra norm layers from pretrained classification model + + for patch_layer, stage in zip(self.patch_embeds, self.stages): + features[-1], sizes[-1] = patch_layer(features[-1], sizes[-1]) + for _, block in enumerate(stage): + for _, layer in enumerate(block): + if self.grad_checkpointing and not torch.jit.is_scripting(): + features[-1], sizes[-1] = checkpoint.checkpoint(layer, features[-1], sizes[-1]) + else: + features[-1], sizes[-1] = layer(features[-1], sizes[-1]) + + # don't append outputs of last stage, since they are already there + if(len(features) < self.num_stages): + features.append(features[-1]) + sizes.append(sizes[-1]) + + + # non-normalized pyramid features + corresponding sizes + return features, sizes + + def forward_pyramid_features(self, x) -> List[Tensor]: + x, sizes = self.forward_network(x) outs = [] - for i in range(self.num_stages): - norm_layer = getattr(self, f'norm{i}') - x_out = norm_layer(features[i]) + for i, out in enumerate(x): H, W = sizes[i] - out = x_out.view(-1, H, W, self.embed_dims[i]).permute(0, 3, 1, 2).contiguous() - outs.append(out) - ''' - # non-normalized pyramid features + corresponding sizes - return tuple(features), tuple(sizes) + outs.append(out.view(-1, H, W, self.embed_dims[i]).permute(0, 3, 1, 2).contiguous()) + return outs + def forward_features(self, x): - x, sizes = self.forward_features_full(x) + x, sizes = self.forward_network(x) # take final feature and norm x = self.norms(x[-1]) H, W = sizes[-1] x = x.view(-1, H, W, self.embed_dims[-1]).permute(0, 3, 1, 2).contiguous() - #print(x.shape) return x def forward_head(self, x, pre_logits: bool = False): - return self.head(x, pre_logits=pre_logits) - def forward(self, x): + def forward_classifier(self, x): x = self.forward_features(x) x = self.forward_head(x) return x + + def forward(self, x): + return self.forward_classifier(x) + + +class DaViTFeatures(DaViT): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.feature_info = FeatureInfo(self.feature_info, kwargs.get('out_indices', (0, 1, 2, 3))) + + def forward(self, x) -> List[Tensor]: + return self.forward_pyramid_features(x) + def checkpoint_filter_fn(state_dict, model): """ Remap MSFT checkpoints -> timm """ @@ -542,11 +540,10 @@ def checkpoint_filter_fn(state_dict, model): if 'state_dict' in state_dict: state_dict = state_dict['state_dict'] - + out_dict = {} - import re for k, v in state_dict.items(): - + k = k.replace('main_blocks.', 'stages.stage_') k = k.replace('head.', 'head.fc.') out_dict[k] = v return out_dict @@ -554,8 +551,25 @@ def checkpoint_filter_fn(state_dict, model): def _create_davit(variant, pretrained=False, **kwargs): - model = build_model_with_cfg(DaViT, variant, pretrained, - pretrained_filter_fn=checkpoint_filter_fn, **kwargs) + model_cls = DaViT + features_only = False + kwargs_filter = None + default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1)))) + out_indices = kwargs.pop('out_indices', default_out_indices) + if kwargs.pop('features_only', False): + model_cls = DaViTFeatures + kwargs_filter = ('num_classes', 'global_pool') + features_only = True + model = build_model_with_cfg( + model_cls, + variant, + pretrained, + pretrained_filter_fn=checkpoint_filter_fn, + feature_cfg=dict(flatten_sequential=True, out_indices=out_indices), + **kwargs) + if features_only: + model.pretrained_cfg = pretrained_cfg_for_features(model.default_cfg) + model.default_cfg = model.pretrained_cfg # backwards compat return model @@ -573,13 +587,13 @@ def _cfg(url='', **kwargs): # not sure how this should be set up default_cfgs = generate_default_cfgs({ - -'davit_tiny.msft_in1k': _cfg( - url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_tiny_ed28dd55.pth.tar"), -'davit_small.msft_in1k': _cfg( - url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_small_d1ecf281.pth.tar"), -'davit_base.msft_in1k': _cfg( - url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_base_67d9ac26.pth.tar"), + # official microsoft weights from https://github.com/dingmyu/davit + 'davit_tiny.msft_in1k': _cfg( + url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_tiny_ed28dd55.pth.tar"), + 'davit_small.msft_in1k': _cfg( + url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_small_d1ecf281.pth.tar"), + 'davit_base.msft_in1k': _cfg( + url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_base_67d9ac26.pth.tar"), }) From c43340ddd4e45e728781f8c06d9ebd589894e62c Mon Sep 17 00:00:00 2001 From: Fredo Guan Date: Sun, 11 Dec 2022 03:03:22 -0800 Subject: [PATCH 03/34] Davit std (#5) * Update davit.py * Update test_models.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * starting point * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update test_models.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Davit revised (#4) * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py clean up * Update test_models.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update davit.py * Update test_models.py * Update davit.py --- tests/test_models.py | 4 +- timm/models/davit.py | 340 +++++++++++++++++++++++-------------------- 2 files changed, 181 insertions(+), 163 deletions(-) diff --git a/tests/test_models.py b/tests/test_models.py index 97872fde..008d87b7 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -40,7 +40,7 @@ if 'GITHUB_ACTIONS' in os.environ: '*efficientnet_l2*', '*resnext101_32x48d', '*in21k', '*152x4_bitm', '*101x3_bitm', '*50x3_bitm', '*nfnet_f3*', '*nfnet_f4*', '*nfnet_f5*', '*nfnet_f6*', '*nfnet_f7*', '*efficientnetv2_xl*', '*resnetrs350*', '*resnetrs420*', 'xcit_large_24_p8*', 'vit_huge*', 'vit_gi*', 'swin*huge*', - 'swin*giant*'] + 'swin*giant*', 'davit*giant', 'davit*huge'] NON_STD_EXCLUDE_FILTERS = ['vit_huge*', 'vit_gi*', 'swin*giant*', 'eva_giant*'] else: EXCLUDE_FILTERS = [] @@ -271,7 +271,7 @@ if 'GITHUB_ACTIONS' not in os.environ: EXCLUDE_JIT_FILTERS = [ '*iabn*', 'tresnet*', # models using inplace abn unlikely to ever be scriptable - 'dla*', 'hrnet*', 'ghostnet*', # hopefully fix at some point + 'dla*', 'hrnet*', 'ghostnet*' # hopefully fix at some point 'vit_large_*', 'vit_huge_*', 'vit_gi*', ] diff --git a/timm/models/davit.py b/timm/models/davit.py index eda928e4..e551cc61 100644 --- a/timm/models/davit.py +++ b/timm/models/davit.py @@ -12,8 +12,10 @@ DaViT model defs and weights adapted from https://github.com/dingmyu/davit, orig # All rights reserved. # This source code is licensed under the MIT license +# FIXME remove unused imports + import itertools -from typing import Any, Dict, Iterable, Iterator, Mapping, Optional, overload, Tuple, TypeVar, Union, List +from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, overload, Tuple, TypeVar, Union from collections import OrderedDict import torch @@ -32,6 +34,7 @@ from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD __all__ = ['DaViT'] + class ConvPosEnc(nn.Module): def __init__(self, dim : int, k : int=3, act : bool=False, normtype : str='none'): @@ -50,25 +53,21 @@ class ConvPosEnc(nn.Module): self.norm = nn.LayerNorm(dim) self.activation = nn.GELU() if act else nn.Identity() - def forward(self, x : Tensor, size: Tuple[int, int]): - B, N, C = x.shape - H, W = size + def forward(self, x : Tensor): + B, C, H, W = x.shape - feat = x.transpose(1, 2).view(B, C, H, W) - feat = self.proj(feat) + #feat = x.transpose(1, 2).view(B, C, H, W) + feat = self.proj(x) if self.normtype == 'batch': feat = self.norm(feat).flatten(2).transpose(1, 2) elif self.normtype == 'layer': feat = self.norm(feat.flatten(2).transpose(1, 2)) else: feat = feat.flatten(2).transpose(1, 2) - x = x + self.activation(feat) + x = x + self.activation(feat).transpose(1, 2).view(B, C, H, W) return x - -# reason: dim in control sequence -# FIXME reimplement to allow tracing -@register_notrace_module + class PatchEmbed(nn.Module): """ Size-agnostic implementation of 2D image to patch embedding, allowing input size to be adjusted during model forward operation @@ -76,13 +75,15 @@ class PatchEmbed(nn.Module): def __init__( self, - patch_size=16, + patch_size=4, in_chans=3, embed_dim=96, overlapped=False): super().__init__() patch_size = to_2tuple(patch_size) self.patch_size = patch_size + self.in_chans = in_chans + self.embed_dim = embed_dim if patch_size[0] == 4: self.proj = nn.Conv2d( @@ -104,31 +105,20 @@ class PatchEmbed(nn.Module): self.norm = nn.LayerNorm(in_chans) - def forward(self, x : Tensor, size: Tuple[int, int]): - H, W = size - dim = x.dim() - if dim == 3: - B, HW, C = x.shape - x = self.norm(x) - x = x.reshape(B, - H, - W, - C).permute(0, 3, 1, 2).contiguous() - + def forward(self, x : Tensor): B, C, H, W = x.shape - if W % self.patch_size[1] != 0: - x = F.pad(x, (0, self.patch_size[1] - W % self.patch_size[1])) - if H % self.patch_size[0] != 0: - x = F.pad(x, (0, 0, 0, self.patch_size[0] - H % self.patch_size[0])) + if self.norm.normalized_shape[0] == self.in_chans: + x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + + x = F.pad(x, (0, (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1])) + x = F.pad(x, (0, 0, 0, (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0])) x = self.proj(x) - newsize = (x.size(2), x.size(3)) - x = x.flatten(2).transpose(1, 2) - if dim == 4: - x = self.norm(x) - return x, newsize - + if self.norm.normalized_shape[0] == self.embed_dim: + x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + return x + class ChannelAttention(nn.Module): def __init__(self, dim, num_heads=8, qkv_bias=False): @@ -153,7 +143,7 @@ class ChannelAttention(nn.Module): x = x.transpose(1, 2).reshape(B, N, C) x = self.proj(x) return x - + class ChannelBlock(nn.Module): @@ -162,13 +152,13 @@ class ChannelBlock(nn.Module): ffn=True, cpe_act=False): super().__init__() - self.cpe = nn.ModuleList([ConvPosEnc(dim=dim, k=3, act=cpe_act), - ConvPosEnc(dim=dim, k=3, act=cpe_act)]) + self.cpe1 = ConvPosEnc(dim=dim, k=3, act=cpe_act) self.ffn = ffn self.norm1 = norm_layer(dim) self.attn = ChannelAttention(dim, num_heads=num_heads, qkv_bias=qkv_bias) self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() - + self.cpe2 = ConvPosEnc(dim=dim, k=3, act=cpe_act) + if self.ffn: self.norm2 = norm_layer(dim) mlp_hidden_dim = int(dim * mlp_ratio) @@ -178,17 +168,23 @@ class ChannelBlock(nn.Module): act_layer=act_layer) - def forward(self, x : Tensor, size: Tuple[int, int]): - x = self.cpe[0](x, size) + def forward(self, x : Tensor): + + B, C, H, W = x.shape + + x = self.cpe1(x).flatten(2).transpose(1, 2) + cur = self.norm1(x) cur = self.attn(cur) x = x + self.drop_path(cur) - x = self.cpe[1](x, size) + x = self.cpe2(x.transpose(1, 2).view(B, C, H, W)).flatten(2).transpose(1, 2) if self.ffn: x = x + self.drop_path(self.mlp(self.norm2(x))) - return x, size - + + x = x.transpose(1, 2).view(B, C, H, W) + + return x def window_partition(x : Tensor, window_size: int): """ @@ -283,9 +279,8 @@ class SpatialBlock(nn.Module): self.num_heads = num_heads self.window_size = window_size self.mlp_ratio = mlp_ratio - self.cpe = nn.ModuleList([ConvPosEnc(dim=dim, k=3, act=cpe_act), - ConvPosEnc(dim=dim, k=3, act=cpe_act)]) - + + self.cpe1 = ConvPosEnc(dim=dim, k=3, act=cpe_act) self.norm1 = norm_layer(dim) self.attn = WindowAttention( dim, @@ -294,7 +289,8 @@ class SpatialBlock(nn.Module): qkv_bias=qkv_bias) self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() - + self.cpe2 = ConvPosEnc(dim=dim, k=3, act=cpe_act) + if self.ffn: self.norm2 = norm_layer(dim) mlp_hidden_dim = int(dim * mlp_ratio) @@ -304,12 +300,11 @@ class SpatialBlock(nn.Module): act_layer=act_layer) - def forward(self, x : Tensor, size: Tuple[int, int]): + def forward(self, x : Tensor): + B, C, H, W = x.shape - H, W = size - B, L, C = x.shape - shortcut = self.cpe[0](x, size) + shortcut = self.cpe1(x).flatten(2).transpose(1, 2) x = self.norm1(shortcut) x = x.view(B, H, W, C) @@ -338,11 +333,92 @@ class SpatialBlock(nn.Module): x = x.view(B, H * W, C) x = shortcut + self.drop_path(x) - x = self.cpe[1](x, size) + x = self.cpe2(x.transpose(1, 2).view(B, C, H, W)).flatten(2).transpose(1, 2) if self.ffn: x = x + self.drop_path(self.mlp(self.norm2(x))) - return x, size + + x = x.transpose(1, 2).view(B, C, H, W) + + return x + +class DaViTStage(nn.Module): + def __init__( + self, + in_chs, + out_chs, + depth = 1, + patch_size = 4, + overlapped_patch = False, + attention_types = ('spatial', 'channel'), + num_heads = 3, + window_size = 7, + mlp_ratio = 4, + qkv_bias = True, + drop_path_rates = (0, 0), + norm_layer = nn.LayerNorm, + ffn = True, + cpe_act = False + ): + super().__init__() + + self.grad_checkpointing = False + + # patch embedding layer at the beginning of each stage + self.patch_embed = PatchEmbed( + patch_size=patch_size, + in_chans=in_chs, + embed_dim=out_chs, + overlapped=overlapped_patch + ) + ''' + repeating alternating attention blocks in each stage + default: (spatial -> channel) x depth + + potential opportunity to integrate with a more general version of ByobNet/ByoaNet + since the logic is similar + ''' + stage_blocks = [] + for block_idx in range(depth): + + dual_attention_block = [] + + for attention_id, attention_type in enumerate(attention_types): + if attention_type == 'spatial': + dual_attention_block.append(SpatialBlock( + dim=out_chs, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path=drop_path_rates[len(attention_types) * block_idx + attention_id], + norm_layer=nn.LayerNorm, + ffn=ffn, + cpe_act=cpe_act, + window_size=window_size, + )) + elif attention_type == 'channel': + dual_attention_block.append(ChannelBlock( + dim=out_chs, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path=drop_path_rates[len(attention_types) * block_idx + attention_id], + norm_layer=nn.LayerNorm, + ffn=ffn, + cpe_act=cpe_act + )) + + stage_blocks.append(nn.Sequential(*dual_attention_block)) + + self.blocks = nn.Sequential(*stage_blocks) + + def forward(self, x : Tensor): + x = self.patch_embed(x) + if self.grad_checkpointing and not torch.jit.is_scripting(): + x = checkpoint_seq(self.blocks, x) + else: + x = self.blocks(x) + return x class DaViT(nn.Module): @@ -392,7 +468,7 @@ class DaViT(nn.Module): self.embed_dims = embed_dims self.num_heads = num_heads self.num_stages = len(self.embed_dims) - dpr = [x.item() for x in torch.linspace(0, drop_path_rate, 2 * len(list(itertools.chain(*self.architecture))))] + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, len(attention_types) * len(list(itertools.chain(*self.architecture))))] assert self.num_stages == len(self.num_heads) == (sorted(list(itertools.chain(*self.architecture)))[-1] + 1) self.num_classes = num_classes @@ -401,51 +477,37 @@ class DaViT(nn.Module): self.grad_checkpointing = False self.feature_info = [] - self.patch_embeds = nn.ModuleList([ - PatchEmbed(patch_size=patch_size if i == 0 else 2, - in_chans=in_chans if i == 0 else self.embed_dims[i - 1], - embed_dim=self.embed_dims[i], - overlapped=overlapped_patch) - for i in range(self.num_stages)]) - - self.stages = nn.ModuleList() - for stage_id, stage_param in enumerate(self.architecture): - layer_offset_id = len(list(itertools.chain(*self.architecture[:stage_id]))) - - stage = nn.ModuleList([ - nn.ModuleList([ - ChannelBlock( - dim=self.embed_dims[item], - num_heads=self.num_heads[item], - mlp_ratio=mlp_ratio, - qkv_bias=qkv_bias, - drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id], - norm_layer=nn.LayerNorm, - ffn=ffn, - cpe_act=cpe_act - ) if attention_type == 'channel' else - SpatialBlock( - dim=self.embed_dims[item], - num_heads=self.num_heads[item], - mlp_ratio=mlp_ratio, - qkv_bias=qkv_bias, - drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id], - norm_layer=nn.LayerNorm, - ffn=ffn, - cpe_act=cpe_act, - window_size=window_size, - ) if attention_type == 'spatial' else None - for attention_id, attention_type in enumerate(attention_types)] - ) for layer_id, item in enumerate(stage_param) - ]) + stages = [] + + for stage_id in range(self.num_stages): + stage_drop_rates = dpr[len(attention_types) * sum(depths[:stage_id]):len(attention_types) * sum(depths[:stage_id + 1])] + + stage = DaViTStage( + in_chans if stage_id == 0 else embed_dims[stage_id - 1], + embed_dims[stage_id], + depth = depths[stage_id], + patch_size = patch_size if stage_id == 0 else 2, + overlapped_patch = overlapped_patch, + attention_types = attention_types, + num_heads = num_heads[stage_id], + window_size = window_size, + mlp_ratio = mlp_ratio, + qkv_bias = qkv_bias, + drop_path_rates = stage_drop_rates, + norm_layer = nn.LayerNorm, + ffn = ffn, + cpe_act = cpe_act + ) - self.stages.add_module(f'stage_{stage_id}', stage) - self.feature_info += [dict(num_chs=self.embed_dims[stage_id], reduction=2, module=f'stages.stage_{stage_id}')] - + stages.append(stage) + self.feature_info += [dict(num_chs=self.embed_dims[stage_id], reduction=2, module=f'stages.{stage_id}')] + + + self.stages = nn.Sequential(*stages) + self.norms = norm_layer(self.num_features) self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate) self.apply(self._init_weights) - def _init_weights(self, m): if isinstance(m, nn.Linear): @@ -469,46 +531,13 @@ class DaViT(nn.Module): if global_pool is None: global_pool = self.head.global_pool.pool_type self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate) - - - def forward_network(self, x): - size: Tuple[int, int] = (x.size(2), x.size(3)) - features = [x] - sizes = [size] - - for patch_layer, stage in zip(self.patch_embeds, self.stages): - features[-1], sizes[-1] = patch_layer(features[-1], sizes[-1]) - for _, block in enumerate(stage): - for _, layer in enumerate(block): - if self.grad_checkpointing and not torch.jit.is_scripting(): - features[-1], sizes[-1] = checkpoint.checkpoint(layer, features[-1], sizes[-1]) - else: - features[-1], sizes[-1] = layer(features[-1], sizes[-1]) - - # don't append outputs of last stage, since they are already there - if(len(features) < self.num_stages): - features.append(features[-1]) - sizes.append(sizes[-1]) - - - # non-normalized pyramid features + corresponding sizes - return features, sizes - - def forward_pyramid_features(self, x) -> List[Tensor]: - x, sizes = self.forward_network(x) - outs = [] - for i, out in enumerate(x): - H, W = sizes[i] - outs.append(out.view(-1, H, W, self.embed_dims[i]).permute(0, 3, 1, 2).contiguous()) - - return outs def forward_features(self, x): - x, sizes = self.forward_network(x) + x = self.stages(x) # take final feature and norm - x = self.norms(x[-1]) - H, W = sizes[-1] - x = x.view(-1, H, W, self.embed_dims[-1]).permute(0, 3, 1, 2).contiguous() + x = self.norms(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + #H, W = sizes[-1] + #x = x.view(-1, H, W, self.embed_dims[-1]).permute(0, 3, 1, 2).contiguous() return x def forward_head(self, x, pre_logits: bool = False): @@ -521,17 +550,6 @@ class DaViT(nn.Module): def forward(self, x): return self.forward_classifier(x) - - -class DaViTFeatures(DaViT): - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.feature_info = FeatureInfo(self.feature_info, kwargs.get('out_indices', (0, 1, 2, 3))) - - def forward(self, x) -> List[Tensor]: - return self.forward_pyramid_features(x) - def checkpoint_filter_fn(state_dict, model): """ Remap MSFT checkpoints -> timm """ @@ -541,38 +559,36 @@ def checkpoint_filter_fn(state_dict, model): if 'state_dict' in state_dict: state_dict = state_dict['state_dict'] + import re out_dict = {} for k, v in state_dict.items(): - k = k.replace('main_blocks.', 'stages.stage_') + k = re.sub(r'patch_embeds.([0-9]+)', r'stages.\1.patch_embed', k) + k = re.sub(r'main_blocks.([0-9]+)', r'stages.\1.blocks', k) k = k.replace('head.', 'head.fc.') + k = k.replace('cpe.0', 'cpe1') + k = k.replace('cpe.1', 'cpe2') out_dict[k] = v return out_dict - - + def _create_davit(variant, pretrained=False, **kwargs): - model_cls = DaViT - features_only = False - kwargs_filter = None + + + default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1)))) out_indices = kwargs.pop('out_indices', default_out_indices) - if kwargs.pop('features_only', False): - model_cls = DaViTFeatures - kwargs_filter = ('num_classes', 'global_pool') - features_only = True + model = build_model_with_cfg( - model_cls, + DaViT, variant, pretrained, pretrained_filter_fn=checkpoint_filter_fn, feature_cfg=dict(flatten_sequential=True, out_indices=out_indices), **kwargs) - if features_only: - model.pretrained_cfg = pretrained_cfg_for_features(model.default_cfg) - model.default_cfg = model.pretrained_cfg # backwards compat - return model - + return model + + def _cfg(url='', **kwargs): # not sure how this should be set up return { @@ -580,7 +596,7 @@ def _cfg(url='', **kwargs): # not sure how this should be set up 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), 'crop_pct': 0.875, 'interpolation': 'bilinear', 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, - 'first_conv': 'patch_embeds.0.proj', 'classifier': 'head.fc', + 'first_conv': 'stages.0.patch_embed.proj', 'classifier': 'head.fc', **kwargs } @@ -594,6 +610,9 @@ default_cfgs = generate_default_cfgs({ url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_small_d1ecf281.pth.tar"), 'davit_base.msft_in1k': _cfg( url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_base_67d9ac26.pth.tar"), + 'davit_large': _cfg(), + 'davit_huge': _cfg(), + 'davit_giant': _cfg(), }) @@ -616,7 +635,7 @@ def davit_base(pretrained=False, **kwargs): num_heads=(4, 8, 16, 32), **kwargs) return _create_davit('davit_base', pretrained=pretrained, **model_kwargs) -''' models without weights + # TODO contact authors to get larger pretrained models @register_model def davit_large(pretrained=False, **kwargs): @@ -635,4 +654,3 @@ def davit_giant(pretrained=False, **kwargs): model_kwargs = dict(depths=(1, 1, 12, 3), embed_dims=(384, 768, 1536, 3072), num_heads=(12, 24, 48, 96), **kwargs) return _create_davit('davit_giant', pretrained=pretrained, **model_kwargs) -''' \ No newline at end of file From 10b3f696b4913132fac6c3d7d708405b92319818 Mon Sep 17 00:00:00 2001 From: Fredo Guan Date: Fri, 16 Dec 2022 21:50:28 -0800 Subject: [PATCH 04/34] Davit std (#6) Separate patch_embed module --- timm/models/davit.py | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/timm/models/davit.py b/timm/models/davit.py index e551cc61..0ccd2ae0 100644 --- a/timm/models/davit.py +++ b/timm/models/davit.py @@ -12,25 +12,21 @@ DaViT model defs and weights adapted from https://github.com/dingmyu/davit, orig # All rights reserved. # This source code is licensed under the MIT license -# FIXME remove unused imports - import itertools -from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, overload, Tuple, TypeVar, Union -from collections import OrderedDict import torch import torch.nn as nn import torch.nn.functional as F from torch import Tensor -import torch.utils.checkpoint as checkpoint - -from .features import FeatureInfo -from .fx_features import register_notrace_function, register_notrace_module -from .helpers import build_model_with_cfg, pretrained_cfg_for_features -from .layers import DropPath, to_2tuple, trunc_normal_, SelectAdaptivePool2d, ClassifierHead, Mlp -from .pretrained import generate_default_cfgs -from .registry import register_model + from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD +from timm.layers import DropPath, to_2tuple, trunc_normal_, ClassifierHead, Mlp +from ._builder import build_model_with_cfg +from ._features import FeatureInfo +from ._features_fx import register_notrace_function +from ._manipulate import checkpoint_seq +from ._pretrained import generate_default_cfgs +from ._registry import register_model __all__ = ['DaViT'] @@ -391,7 +387,7 @@ class DaViTStage(nn.Module): mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, drop_path=drop_path_rates[len(attention_types) * block_idx + attention_id], - norm_layer=nn.LayerNorm, + norm_layer=norm_layer, ffn=ffn, cpe_act=cpe_act, window_size=window_size, @@ -403,7 +399,7 @@ class DaViTStage(nn.Module): mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, drop_path=drop_path_rates[len(attention_types) * block_idx + attention_id], - norm_layer=nn.LayerNorm, + norm_layer=norm_layer, ffn=ffn, cpe_act=cpe_act )) @@ -476,7 +472,8 @@ class DaViT(nn.Module): self.drop_rate=drop_rate self.grad_checkpointing = False self.feature_info = [] - + + self.patch_embed = None stages = [] for stage_id in range(self.num_stages): @@ -499,6 +496,10 @@ class DaViT(nn.Module): cpe_act = cpe_act ) + if stage_id == 0: + self.patch_embed = stage.patch_embed + stage.patch_embed = nn.Identity() + stages.append(stage) self.feature_info += [dict(num_chs=self.embed_dims[stage_id], reduction=2, module=f'stages.{stage_id}')] @@ -533,6 +534,7 @@ class DaViT(nn.Module): self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate) def forward_features(self, x): + x = self.patch_embed(x) x = self.stages(x) # take final feature and norm x = self.norms(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) @@ -562,8 +564,10 @@ def checkpoint_filter_fn(state_dict, model): import re out_dict = {} for k, v in state_dict.items(): + k = re.sub(r'patch_embeds.([0-9]+)', r'stages.\1.patch_embed', k) k = re.sub(r'main_blocks.([0-9]+)', r'stages.\1.blocks', k) + k = k.replace('stages.0.patch_embed', 'patch_embed') k = k.replace('head.', 'head.fc.') k = k.replace('cpe.0', 'cpe1') k = k.replace('cpe.1', 'cpe2') @@ -596,12 +600,13 @@ def _cfg(url='', **kwargs): # not sure how this should be set up 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), 'crop_pct': 0.875, 'interpolation': 'bilinear', 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, - 'first_conv': 'stages.0.patch_embed.proj', 'classifier': 'head.fc', + 'first_conv': 'patch_embed.proj', 'classifier': 'head.fc', **kwargs } +# TODO contact authors to get larger pretrained models default_cfgs = generate_default_cfgs({ # official microsoft weights from https://github.com/dingmyu/davit 'davit_tiny.msft_in1k': _cfg( @@ -635,8 +640,6 @@ def davit_base(pretrained=False, **kwargs): num_heads=(4, 8, 16, 32), **kwargs) return _create_davit('davit_base', pretrained=pretrained, **model_kwargs) - -# TODO contact authors to get larger pretrained models @register_model def davit_large(pretrained=False, **kwargs): model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(192, 384, 768, 1536), From add3fb864e51e489371753082b84372060ca3e19 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 5 Jan 2023 17:50:11 -0800 Subject: [PATCH 05/34] Working on improved model card template for push_to_hf_hub --- timm/models/_hub.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/timm/models/_hub.py b/timm/models/_hub.py index 7c64df0b..df1a1ef7 100644 --- a/timm/models/_hub.py +++ b/timm/models/_hub.py @@ -209,6 +209,7 @@ def push_to_hf_hub( private: bool = False, create_pr: bool = False, model_config: Optional[dict] = None, + model_card: Optional[dict] = None, ): # Create repo if it doesn't exist yet repo_url = create_repo(repo_id, token=token, private=private, exist_ok=True) @@ -232,9 +233,23 @@ def push_to_hf_hub( # Add readme if it does not exist if not has_readme: + model_card = model_card or {} model_name = repo_id.split('/')[-1] readme_path = Path(tmpdir) / "README.md" - readme_text = f'---\ntags:\n- image-classification\n- timm\nlibrary_tag: timm\n---\n# Model card for {model_name}' + readme_text = "---\n" + readme_text += "tags:\n- image-classification\n- timm\n" + readme_text += "library_tag: timm\n" + readme_text += f"license: {model_card.get('license', 'apache-2.0')}\n" + readme_text += "---\n" + readme_text += f"# Model card for {model_name}\n" + if 'description' in model_card: + readme_text += f"\n{model_card['description']}\n" + if 'details' in model_card: + readme_text += f"\n## Model Details\n" + for k, v in model_card['details'].items(): + readme_text += f"- **{k}:** {v}\n" + if 'citation' in model_card: + readme_text += f"\n## Citation\n```\n{model_card['citation']}```\n" readme_path.write_text(readme_text) # Upload model and return From e861b74cf805232204c4645f68bfb43333e4d0a5 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 6 Jan 2023 12:01:43 -0800 Subject: [PATCH 06/34] Pass through --model-kwargs (and --opt-kwargs for train) from command line through to model __init__. Update some models to improve arg overlay. Cleanup along the way. --- benchmark.py | 26 +-- inference.py | 14 +- timm/models/byobnet.py | 24 ++- timm/models/convnext.py | 36 +++-- timm/models/cspnet.py | 35 ++-- timm/models/nfnet.py | 156 +++++++++++++++--- timm/models/regnet.py | 105 ++++++++++-- timm/models/resnet.py | 78 +++++---- timm/models/resnetv2.py | 73 +++++++-- timm/models/vision_transformer.py | 220 +++++++++++++------------ timm/models/vovnet.py | 74 +++++++-- timm/utils/__init__.py | 2 +- timm/utils/misc.py | 14 ++ train.py | 258 +++++++++++++++--------------- validate.py | 20 ++- 15 files changed, 775 insertions(+), 360 deletions(-) diff --git a/benchmark.py b/benchmark.py index 58435ff8..2cce3e2c 100755 --- a/benchmark.py +++ b/benchmark.py @@ -22,7 +22,7 @@ from timm.data import resolve_data_config from timm.layers import set_fast_norm from timm.models import create_model, is_model, list_models from timm.optim import create_optimizer_v2 -from timm.utils import setup_default_logging, set_jit_fuser, decay_batch_step, check_batch_size_retry +from timm.utils import setup_default_logging, set_jit_fuser, decay_batch_step, check_batch_size_retry, ParseKwargs has_apex = False try: @@ -108,12 +108,15 @@ parser.add_argument('--grad-checkpointing', action='store_true', default=False, help='Enable gradient checkpointing through model blocks/stages') parser.add_argument('--amp', action='store_true', default=False, help='use PyTorch Native AMP for mixed precision training. Overrides --precision arg.') +parser.add_argument('--amp-dtype', default='float16', type=str, + help='lower precision AMP dtype (default: float16). Overrides --precision arg if args.amp True.') parser.add_argument('--precision', default='float32', type=str, help='Numeric precision. One of (amp, float32, float16, bfloat16, tf32)') parser.add_argument('--fuser', default='', type=str, help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") parser.add_argument('--fast-norm', default=False, action='store_true', help='enable experimental fast-norm') +parser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs) # codegen (model compilation) options scripting_group = parser.add_mutually_exclusive_group() @@ -124,7 +127,6 @@ scripting_group.add_argument('--torchcompile', nargs='?', type=str, default=None scripting_group.add_argument('--aot-autograd', default=False, action='store_true', help="Enable AOT Autograd optimization.") - # train optimizer parameters parser.add_argument('--opt', default='sgd', type=str, metavar='OPTIMIZER', help='Optimizer (default: "sgd"') @@ -168,19 +170,21 @@ def count_params(model: nn.Module): def resolve_precision(precision: str): - assert precision in ('amp', 'float16', 'bfloat16', 'float32') - use_amp = False + assert precision in ('amp', 'amp_bfloat16', 'float16', 'bfloat16', 'float32') + amp_dtype = None # amp disabled model_dtype = torch.float32 data_dtype = torch.float32 if precision == 'amp': - use_amp = True + amp_dtype = torch.float16 + elif precision == 'amp_bfloat16': + amp_dtype = torch.bfloat16 elif precision == 'float16': model_dtype = torch.float16 data_dtype = torch.float16 elif precision == 'bfloat16': model_dtype = torch.bfloat16 data_dtype = torch.bfloat16 - return use_amp, model_dtype, data_dtype + return amp_dtype, model_dtype, data_dtype def profile_deepspeed(model, input_size=(3, 224, 224), batch_size=1, detailed=False): @@ -228,9 +232,12 @@ class BenchmarkRunner: self.model_name = model_name self.detail = detail self.device = device - self.use_amp, self.model_dtype, self.data_dtype = resolve_precision(precision) + self.amp_dtype, self.model_dtype, self.data_dtype = resolve_precision(precision) self.channels_last = kwargs.pop('channels_last', False) - self.amp_autocast = partial(torch.cuda.amp.autocast, dtype=torch.float16) if self.use_amp else suppress + if self.amp_dtype is not None: + self.amp_autocast = partial(torch.cuda.amp.autocast, dtype=self.amp_dtype) + else: + self.amp_autocast = suppress if fuser: set_jit_fuser(fuser) @@ -243,6 +250,7 @@ class BenchmarkRunner: drop_rate=kwargs.pop('drop', 0.), drop_path_rate=kwargs.pop('drop_path', None), drop_block_rate=kwargs.pop('drop_block', None), + **kwargs.pop('model_kwargs', {}), ) self.model.to( device=self.device, @@ -560,7 +568,7 @@ def _try_run( def benchmark(args): if args.amp: _logger.warning("Overriding precision to 'amp' since --amp flag set.") - args.precision = 'amp' + args.precision = 'amp' if args.amp_dtype == 'float16' else '_'.join(['amp', args.amp_dtype]) _logger.info(f'Benchmarking in {args.precision} precision. ' f'{"NHWC" if args.channels_last else "NCHW"} layout. ' f'torchscript {"enabled" if args.torchscript else "disabled"}') diff --git a/inference.py b/inference.py index 1509b323..cfbe62d1 100755 --- a/inference.py +++ b/inference.py @@ -20,7 +20,7 @@ import torch from timm.data import create_dataset, create_loader, resolve_data_config from timm.layers import apply_test_time_pool from timm.models import create_model -from timm.utils import AverageMeter, setup_default_logging, set_jit_fuser +from timm.utils import AverageMeter, setup_default_logging, set_jit_fuser, ParseKwargs try: from apex import amp @@ -72,6 +72,8 @@ parser.add_argument('-b', '--batch-size', default=256, type=int, metavar='N', help='mini-batch size (default: 256)') parser.add_argument('--img-size', default=None, type=int, metavar='N', help='Input image dimension, uses model default if empty') +parser.add_argument('--in-chans', type=int, default=None, metavar='N', + help='Image input channels (default: None => 3)') parser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N N N', help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') parser.add_argument('--use-train-size', action='store_true', default=False, @@ -110,6 +112,7 @@ parser.add_argument('--amp-dtype', default='float16', type=str, help='lower precision AMP dtype (default: float16)') parser.add_argument('--fuser', default='', type=str, help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") +parser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs) scripting_group = parser.add_mutually_exclusive_group() scripting_group.add_argument('--torchscript', default=False, action='store_true', @@ -170,12 +173,19 @@ def main(): set_jit_fuser(args.fuser) # create model + in_chans = 3 + if args.in_chans is not None: + in_chans = args.in_chans + elif args.input_size is not None: + in_chans = args.input_size[0] + model = create_model( args.model, num_classes=args.num_classes, - in_chans=3, + in_chans=in_chans, pretrained=args.pretrained, checkpoint_path=args.checkpoint, + **args.model_kwargs, ) if args.num_classes is None: assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' diff --git a/timm/models/byobnet.py b/timm/models/byobnet.py index 15f78044..1c7f1137 100644 --- a/timm/models/byobnet.py +++ b/timm/models/byobnet.py @@ -218,7 +218,10 @@ def _rep_vgg_bcfg(d=(4, 6, 16, 1), wf=(1., 1., 1., 1.), groups=0): def interleave_blocks( - types: Tuple[str, str], d, every: Union[int, List[int]] = 1, first: bool = False, **kwargs + types: Tuple[str, str], d, + every: Union[int, List[int]] = 1, + first: bool = False, + **kwargs, ) -> Tuple[ByoBlockCfg]: """ interleave 2 block types in stack """ @@ -1587,15 +1590,32 @@ class ByobNet(nn.Module): in_chans=3, global_pool='avg', output_stride=32, - zero_init_last=True, img_size=None, drop_rate=0., drop_path_rate=0., + zero_init_last=True, + **kwargs, ): + """ + + Args: + cfg (ByoModelCfg): Model architecture configuration + num_classes (int): Number of classifier classes (default: 1000) + in_chans (int): Number of input channels (default: 3) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + img_size (Union[int, Tuple[int]): Image size for fixed image size models (i.e. self-attn) + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + zero_init_last (bool): Zero-init last weight of residual path + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate self.grad_checkpointing = False + + cfg = replace(cfg, **kwargs) # overlay kwargs onto cfg layers = get_layer_fns(cfg) if cfg.fixed_input_size: assert img_size is not None, 'img_size argument is required for fixed input size model' diff --git a/timm/models/convnext.py b/timm/models/convnext.py index e9214429..e799a7de 100644 --- a/timm/models/convnext.py +++ b/timm/models/convnext.py @@ -167,7 +167,7 @@ class ConvNeXtStage(nn.Module): conv_bias=conv_bias, use_grn=use_grn, act_layer=act_layer, - norm_layer=norm_layer if conv_mlp else norm_layer_cl + norm_layer=norm_layer if conv_mlp else norm_layer_cl, )) in_chs = out_chs self.blocks = nn.Sequential(*stage_blocks) @@ -184,16 +184,6 @@ class ConvNeXtStage(nn.Module): class ConvNeXt(nn.Module): r""" ConvNeXt A PyTorch impl of : `A ConvNet for the 2020s` - https://arxiv.org/pdf/2201.03545.pdf - - Args: - in_chans (int): Number of input image channels. Default: 3 - num_classes (int): Number of classes for classification head. Default: 1000 - depths (tuple(int)): Number of blocks at each stage. Default: [3, 3, 9, 3] - dims (tuple(int)): Feature dimension at each stage. Default: [96, 192, 384, 768] - drop_rate (float): Head dropout rate - drop_path_rate (float): Stochastic depth rate. Default: 0. - ls_init_value (float): Init value for Layer Scale. Default: 1e-6. - head_init_scale (float): Init scaling value for classifier weights and biases. Default: 1. """ def __init__( @@ -218,6 +208,28 @@ class ConvNeXt(nn.Module): drop_rate=0., drop_path_rate=0., ): + """ + Args: + in_chans (int): Number of input image channels (default: 3) + num_classes (int): Number of classes for classification head (default: 1000) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + depths (tuple(int)): Number of blocks at each stage. (default: [3, 3, 9, 3]) + dims (tuple(int)): Feature dimension at each stage. (default: [96, 192, 384, 768]) + kernel_sizes (Union[int, List[int]]: Depthwise convolution kernel-sizes for each stage (default: 7) + ls_init_value (float): Init value for Layer Scale (default: 1e-6) + stem_type (str): Type of stem (default: 'patch') + patch_size (int): Stem patch size for patch stem (default: 4) + head_init_scale (float): Init scaling value for classifier weights and biases (default: 1) + head_norm_first (bool): Apply normalization before global pool + head (default: False) + conv_mlp (bool): Use 1x1 conv in MLP, improves speed for small networks w/ chan last (default: False) + conv_bias (bool): Use bias layers w/ all convolutions (default: True) + use_grn (bool): Use Global Response Norm (ConvNeXt-V2) in MLP (default: False) + act_layer (Union[str, nn.Module]): Activation Layer + norm_layer (Union[str, nn.Module]): Normalization Layer + drop_rate (float): Head dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth rate (default: 0.) + """ super().__init__() assert output_stride in (8, 16, 32) kernel_sizes = to_ntuple(4)(kernel_sizes) @@ -279,7 +291,7 @@ class ConvNeXt(nn.Module): use_grn=use_grn, act_layer=act_layer, norm_layer=norm_layer, - norm_layer_cl=norm_layer_cl + norm_layer_cl=norm_layer_cl, )) prev_chs = out_chs # NOTE feature_info use currently assumes stage 0 == stride 1, rest are stride 2 diff --git a/timm/models/cspnet.py b/timm/models/cspnet.py index 280f929e..26ec54d9 100644 --- a/timm/models/cspnet.py +++ b/timm/models/cspnet.py @@ -12,7 +12,7 @@ Reference impl via darknet cfg files at https://github.com/WongKinYiu/CrossStage Hacked together by / Copyright 2020 Ross Wightman """ -from dataclasses import dataclass, asdict +from dataclasses import dataclass, asdict, replace from functools import partial from typing import Any, Dict, Optional, Tuple, Union @@ -518,7 +518,7 @@ class CrossStage(nn.Module): cross_linear=False, block_dpr=None, block_fn=BottleneckBlock, - **block_kwargs + **block_kwargs, ): super(CrossStage, self).__init__() first_dilation = first_dilation or dilation @@ -558,7 +558,7 @@ class CrossStage(nn.Module): bottle_ratio=bottle_ratio, groups=groups, drop_path=block_dpr[i] if block_dpr is not None else 0., - **block_kwargs + **block_kwargs, )) prev_chs = block_out_chs @@ -597,7 +597,7 @@ class CrossStage3(nn.Module): cross_linear=False, block_dpr=None, block_fn=BottleneckBlock, - **block_kwargs + **block_kwargs, ): super(CrossStage3, self).__init__() first_dilation = first_dilation or dilation @@ -635,7 +635,7 @@ class CrossStage3(nn.Module): bottle_ratio=bottle_ratio, groups=groups, drop_path=block_dpr[i] if block_dpr is not None else 0., - **block_kwargs + **block_kwargs, )) prev_chs = block_out_chs @@ -668,7 +668,7 @@ class DarkStage(nn.Module): avg_down=False, block_fn=BottleneckBlock, block_dpr=None, - **block_kwargs + **block_kwargs, ): super(DarkStage, self).__init__() first_dilation = first_dilation or dilation @@ -715,7 +715,7 @@ def create_csp_stem( padding='', act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - aa_layer=None + aa_layer=None, ): stem = nn.Sequential() feature_info = [] @@ -738,7 +738,7 @@ def create_csp_stem( stride=conv_stride, padding=padding if i == 0 else '', act_layer=act_layer, - norm_layer=norm_layer + norm_layer=norm_layer, )) stem_stride *= conv_stride prev_chs = chs @@ -800,7 +800,7 @@ def create_csp_stages( cfg: CspModelCfg, drop_path_rate: float, output_stride: int, - stem_feat: Dict[str, Any] + stem_feat: Dict[str, Any], ): cfg_dict = asdict(cfg.stages) num_stages = len(cfg.stages.depth) @@ -868,12 +868,27 @@ class CspNet(nn.Module): global_pool='avg', drop_rate=0., drop_path_rate=0., - zero_init_last=True + zero_init_last=True, + **kwargs, ): + """ + Args: + cfg (CspModelCfg): Model architecture configuration + in_chans (int): Number of input channels (default: 3) + num_classes (int): Number of classifier classes (default: 1000) + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + global_pool (str): Global pooling type (default: 'avg') + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + zero_init_last (bool): Zero-init last weight of residual path + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate assert output_stride in (8, 16, 32) + + cfg = replace(cfg, **kwargs) # overlay kwargs onto cfg layer_args = dict( act_layer=cfg.act_layer, norm_layer=cfg.norm_layer, diff --git a/timm/models/nfnet.py b/timm/models/nfnet.py index 48f91b35..f9a90ab3 100644 --- a/timm/models/nfnet.py +++ b/timm/models/nfnet.py @@ -17,7 +17,7 @@ Status: Hacked together by / copyright Ross Wightman, 2021. """ from collections import OrderedDict -from dataclasses import dataclass +from dataclasses import dataclass, replace from functools import partial from typing import Tuple, Optional @@ -159,11 +159,25 @@ class NfCfg: def _nfres_cfg( - depths, channels=(256, 512, 1024, 2048), group_size=None, act_layer='relu', attn_layer=None, attn_kwargs=None): + depths, + channels=(256, 512, 1024, 2048), + group_size=None, + act_layer='relu', + attn_layer=None, + attn_kwargs=None, +): attn_kwargs = attn_kwargs or {} cfg = NfCfg( - depths=depths, channels=channels, stem_type='7x7_pool', stem_chs=64, bottle_ratio=0.25, - group_size=group_size, act_layer=act_layer, attn_layer=attn_layer, attn_kwargs=attn_kwargs) + depths=depths, + channels=channels, + stem_type='7x7_pool', + stem_chs=64, + bottle_ratio=0.25, + group_size=group_size, + act_layer=act_layer, + attn_layer=attn_layer, + attn_kwargs=attn_kwargs, + ) return cfg @@ -171,28 +185,70 @@ def _nfreg_cfg(depths, channels=(48, 104, 208, 440)): num_features = 1280 * channels[-1] // 440 attn_kwargs = dict(rd_ratio=0.5) cfg = NfCfg( - depths=depths, channels=channels, stem_type='3x3', group_size=8, width_factor=0.75, bottle_ratio=2.25, - num_features=num_features, reg=True, attn_layer='se', attn_kwargs=attn_kwargs) + depths=depths, + channels=channels, + stem_type='3x3', + group_size=8, + width_factor=0.75, + bottle_ratio=2.25, + num_features=num_features, + reg=True, + attn_layer='se', + attn_kwargs=attn_kwargs, + ) return cfg def _nfnet_cfg( - depths, channels=(256, 512, 1536, 1536), group_size=128, bottle_ratio=0.5, feat_mult=2., - act_layer='gelu', attn_layer='se', attn_kwargs=None): + depths, + channels=(256, 512, 1536, 1536), + group_size=128, + bottle_ratio=0.5, + feat_mult=2., + act_layer='gelu', + attn_layer='se', + attn_kwargs=None, +): num_features = int(channels[-1] * feat_mult) attn_kwargs = attn_kwargs if attn_kwargs is not None else dict(rd_ratio=0.5) cfg = NfCfg( - depths=depths, channels=channels, stem_type='deep_quad', stem_chs=128, group_size=group_size, - bottle_ratio=bottle_ratio, extra_conv=True, num_features=num_features, act_layer=act_layer, - attn_layer=attn_layer, attn_kwargs=attn_kwargs) + depths=depths, + channels=channels, + stem_type='deep_quad', + stem_chs=128, + group_size=group_size, + bottle_ratio=bottle_ratio, + extra_conv=True, + num_features=num_features, + act_layer=act_layer, + attn_layer=attn_layer, + attn_kwargs=attn_kwargs, + ) return cfg -def _dm_nfnet_cfg(depths, channels=(256, 512, 1536, 1536), act_layer='gelu', skipinit=True): +def _dm_nfnet_cfg( + depths, + channels=(256, 512, 1536, 1536), + act_layer='gelu', + skipinit=True, +): cfg = NfCfg( - depths=depths, channels=channels, stem_type='deep_quad', stem_chs=128, group_size=128, - bottle_ratio=0.5, extra_conv=True, gamma_in_act=True, same_padding=True, skipinit=skipinit, - num_features=int(channels[-1] * 2.0), act_layer=act_layer, attn_layer='se', attn_kwargs=dict(rd_ratio=0.5)) + depths=depths, + channels=channels, + stem_type='deep_quad', + stem_chs=128, + group_size=128, + bottle_ratio=0.5, + extra_conv=True, + gamma_in_act=True, + same_padding=True, + skipinit=skipinit, + num_features=int(channels[-1] * 2.0), + act_layer=act_layer, + attn_layer='se', + attn_kwargs=dict(rd_ratio=0.5), + ) return cfg @@ -278,7 +334,14 @@ def act_with_gamma(act_type, gamma: float = 1.): class DownsampleAvg(nn.Module): def __init__( - self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, conv_layer=ScaledStdConv2d): + self, + in_chs, + out_chs, + stride=1, + dilation=1, + first_dilation=None, + conv_layer=ScaledStdConv2d, + ): """ AvgPool Downsampling as in 'D' ResNet variants. Support for dilation.""" super(DownsampleAvg, self).__init__() avg_stride = stride if dilation == 1 else 1 @@ -299,9 +362,26 @@ class NormFreeBlock(nn.Module): """ def __init__( - self, in_chs, out_chs=None, stride=1, dilation=1, first_dilation=None, - alpha=1.0, beta=1.0, bottle_ratio=0.25, group_size=None, ch_div=1, reg=True, extra_conv=False, - skipinit=False, attn_layer=None, attn_gain=2.0, act_layer=None, conv_layer=None, drop_path_rate=0.): + self, + in_chs, + out_chs=None, + stride=1, + dilation=1, + first_dilation=None, + alpha=1.0, + beta=1.0, + bottle_ratio=0.25, + group_size=None, + ch_div=1, + reg=True, + extra_conv=False, + skipinit=False, + attn_layer=None, + attn_gain=2.0, + act_layer=None, + conv_layer=None, + drop_path_rate=0., + ): super().__init__() first_dilation = first_dilation or dilation out_chs = out_chs or in_chs @@ -316,7 +396,13 @@ class NormFreeBlock(nn.Module): if in_chs != out_chs or stride != 1 or dilation != first_dilation: self.downsample = DownsampleAvg( - in_chs, out_chs, stride=stride, dilation=dilation, first_dilation=first_dilation, conv_layer=conv_layer) + in_chs, + out_chs, + stride=stride, + dilation=dilation, + first_dilation=first_dilation, + conv_layer=conv_layer, + ) else: self.downsample = None @@ -452,14 +538,33 @@ class NormFreeNet(nn.Module): for what it is/does. Approx 8-10% throughput loss. """ def __init__( - self, cfg: NfCfg, num_classes=1000, in_chans=3, global_pool='avg', output_stride=32, - drop_rate=0., drop_path_rate=0. + self, + cfg: NfCfg, + num_classes=1000, + in_chans=3, + global_pool='avg', + output_stride=32, + drop_rate=0., + drop_path_rate=0., + **kwargs, ): + """ + Args: + cfg (NfCfg): Model architecture configuration + num_classes (int): Number of classifier classes (default: 1000) + in_chans (int): Number of input channels (default: 3) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate self.grad_checkpointing = False + cfg = replace(cfg, **kwargs) assert cfg.act_layer in _nonlin_gamma, f"Please add non-linearity constants for activation ({cfg.act_layer})." conv_layer = ScaledStdConv2dSame if cfg.same_padding else ScaledStdConv2d if cfg.gamma_in_act: @@ -472,7 +577,12 @@ class NormFreeNet(nn.Module): stem_chs = make_divisible((cfg.stem_chs or cfg.channels[0]) * cfg.width_factor, cfg.ch_div) self.stem, stem_stride, stem_feat = create_stem( - in_chans, stem_chs, cfg.stem_type, conv_layer=conv_layer, act_layer=act_layer) + in_chans, + stem_chs, + cfg.stem_type, + conv_layer=conv_layer, + act_layer=act_layer, + ) self.feature_info = [stem_feat] drop_path_rates = [x.tolist() for x in torch.linspace(0, drop_path_rate, sum(cfg.depths)).split(cfg.depths)] diff --git a/timm/models/regnet.py b/timm/models/regnet.py index e1cc821b..9d2528f6 100644 --- a/timm/models/regnet.py +++ b/timm/models/regnet.py @@ -14,7 +14,7 @@ Weights from original impl have been modified Hacked together by / Copyright 2020 Ross Wightman """ import math -from dataclasses import dataclass +from dataclasses import dataclass, replace from functools import partial from typing import Optional, Union, Callable @@ -237,7 +237,15 @@ def downsample_avg(in_chs, out_chs, kernel_size=1, stride=1, dilation=1, norm_la def create_shortcut( - downsample_type, in_chs, out_chs, kernel_size, stride, dilation=(1, 1), norm_layer=None, preact=False): + downsample_type, + in_chs, + out_chs, + kernel_size, + stride, + dilation=(1, 1), + norm_layer=None, + preact=False, +): assert downsample_type in ('avg', 'conv1x1', '', None) if in_chs != out_chs or stride != 1 or dilation[0] != dilation[1]: dargs = dict(stride=stride, dilation=dilation[0], norm_layer=norm_layer, preact=preact) @@ -259,9 +267,21 @@ class Bottleneck(nn.Module): """ def __init__( - self, in_chs, out_chs, stride=1, dilation=(1, 1), bottle_ratio=1, group_size=1, se_ratio=0.25, - downsample='conv1x1', linear_out=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + stride=1, + dilation=(1, 1), + bottle_ratio=1, + group_size=1, + se_ratio=0.25, + downsample='conv1x1', + linear_out=False, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + drop_block=None, + drop_path_rate=0., + ): super(Bottleneck, self).__init__() act_layer = get_act_layer(act_layer) bottleneck_chs = int(round(out_chs * bottle_ratio)) @@ -307,9 +327,21 @@ class PreBottleneck(nn.Module): """ def __init__( - self, in_chs, out_chs, stride=1, dilation=(1, 1), bottle_ratio=1, group_size=1, se_ratio=0.25, - downsample='conv1x1', linear_out=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + stride=1, + dilation=(1, 1), + bottle_ratio=1, + group_size=1, + se_ratio=0.25, + downsample='conv1x1', + linear_out=False, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + drop_block=None, + drop_path_rate=0., + ): super(PreBottleneck, self).__init__() norm_act_layer = get_norm_act_layer(norm_layer, act_layer) bottleneck_chs = int(round(out_chs * bottle_ratio)) @@ -353,8 +385,16 @@ class RegStage(nn.Module): """Stage (sequence of blocks w/ the same output shape).""" def __init__( - self, depth, in_chs, out_chs, stride, dilation, - drop_path_rates=None, block_fn=Bottleneck, **block_kwargs): + self, + depth, + in_chs, + out_chs, + stride, + dilation, + drop_path_rates=None, + block_fn=Bottleneck, + **block_kwargs, + ): super(RegStage, self).__init__() self.grad_checkpointing = False @@ -367,8 +407,13 @@ class RegStage(nn.Module): name = "b{}".format(i + 1) self.add_module( name, block_fn( - block_in_chs, out_chs, stride=block_stride, dilation=block_dilation, - drop_path_rate=dpr, **block_kwargs) + block_in_chs, + out_chs, + stride=block_stride, + dilation=block_dilation, + drop_path_rate=dpr, + **block_kwargs, + ) ) first_dilation = dilation @@ -389,12 +434,35 @@ class RegNet(nn.Module): """ def __init__( - self, cfg: RegNetCfg, in_chans=3, num_classes=1000, output_stride=32, global_pool='avg', - drop_rate=0., drop_path_rate=0., zero_init_last=True): + self, + cfg: RegNetCfg, + in_chans=3, + num_classes=1000, + output_stride=32, + global_pool='avg', + drop_rate=0., + drop_path_rate=0., + zero_init_last=True, + **kwargs, + ): + """ + + Args: + cfg (RegNetCfg): Model architecture configuration + in_chans (int): Number of input channels (default: 3) + num_classes (int): Number of classifier classes (default: 1000) + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + global_pool (str): Global pooling type (default: 'avg') + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + zero_init_last (bool): Zero-init last weight of residual path + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate assert output_stride in (8, 16, 32) + cfg = replace(cfg, **kwargs) # update cfg with extra passed kwargs # Construct the stem stem_width = cfg.stem_width @@ -461,8 +529,12 @@ class RegNet(nn.Module): dict(zip(arg_names, params)) for params in zip(stage_widths, stage_strides, stage_dilations, stage_depths, stage_br, stage_gs, stage_dpr)] common_args = dict( - downsample=cfg.downsample, se_ratio=cfg.se_ratio, linear_out=cfg.linear_out, - act_layer=cfg.act_layer, norm_layer=cfg.norm_layer) + downsample=cfg.downsample, + se_ratio=cfg.se_ratio, + linear_out=cfg.linear_out, + act_layer=cfg.act_layer, + norm_layer=cfg.norm_layer, + ) return per_stage_args, common_args @torch.jit.ignore @@ -518,7 +590,6 @@ def _init_weights(module, name='', zero_init_last=False): def _filter_fn(state_dict): - """ convert patch embedding weight from manual patchify + linear proj to conv""" if 'classy_state_dict' in state_dict: import re state_dict = state_dict['classy_state_dict']['base_model']['model'] diff --git a/timm/models/resnet.py b/timm/models/resnet.py index 2976c1f9..a783e3e1 100644 --- a/timm/models/resnet.py +++ b/timm/models/resnet.py @@ -16,7 +16,7 @@ import torch.nn.functional as F from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from timm.layers import DropBlock2d, DropPath, AvgPool2dSame, BlurPool2d, GroupNorm, create_attn, get_attn, \ - create_classifier + get_act_layer, get_norm_layer, create_classifier from ._builder import build_model_with_cfg from ._manipulate import checkpoint_seq from ._registry import register_model, model_entrypoint @@ -500,7 +500,14 @@ class Bottleneck(nn.Module): def downsample_conv( - in_channels, out_channels, kernel_size, stride=1, dilation=1, first_dilation=None, norm_layer=None): + in_channels, + out_channels, + kernel_size, + stride=1, + dilation=1, + first_dilation=None, + norm_layer=None, +): norm_layer = norm_layer or nn.BatchNorm2d kernel_size = 1 if stride == 1 and dilation == 1 else kernel_size first_dilation = (first_dilation or dilation) if kernel_size > 1 else 1 @@ -514,7 +521,14 @@ def downsample_conv( def downsample_avg( - in_channels, out_channels, kernel_size, stride=1, dilation=1, first_dilation=None, norm_layer=None): + in_channels, + out_channels, + kernel_size, + stride=1, + dilation=1, + first_dilation=None, + norm_layer=None, +): norm_layer = norm_layer or nn.BatchNorm2d avg_stride = stride if dilation == 1 else 1 if stride == 1 and dilation == 1: @@ -627,31 +641,6 @@ class ResNet(nn.Module): SENet-154 - 3 layer deep 3x3 stem (same as v1c-v1s), stem_width = 64, cardinality=64, reduction by 2 on width of first bottleneck convolution, 3x3 downsample convs after first block - - Parameters - ---------- - block : Block, class for the residual block. Options are BasicBlockGl, BottleneckGl. - layers : list of int, number of layers in each block - num_classes : int, default 1000, number of classification classes. - in_chans : int, default 3, number of input (color) channels. - output_stride : int, default 32, output stride of the network, 32, 16, or 8. - global_pool : str, Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' - cardinality : int, default 1, number of convolution groups for 3x3 conv in Bottleneck. - base_width : int, default 64, factor determining bottleneck channels. `planes * base_width / 64 * cardinality` - stem_width : int, default 64, number of channels in stem convolutions - stem_type : str, default '' - The type of stem: - * '', default - a single 7x7 conv with a width of stem_width - * 'deep' - three 3x3 convolution layers of widths stem_width, stem_width, stem_width * 2 - * 'deep_tiered' - three 3x3 conv layers of widths stem_width//4 * 3, stem_width, stem_width * 2 - block_reduce_first : int, default 1 - Reduction factor for first convolution output width of residual blocks, 1 for all archs except senets, where 2 - down_kernel_size : int, default 1, kernel size of residual block downsample path, 1x1 for most, 3x3 for senets - avg_down : bool, default False, use average pooling for projection skip connection between stages/downsample. - act_layer : nn.Module, activation layer - norm_layer : nn.Module, normalization layer - aa_layer : nn.Module, anti-aliasing layer - drop_rate : float, default 0. Dropout probability before classifier, for training """ def __init__( @@ -679,6 +668,36 @@ class ResNet(nn.Module): zero_init_last=True, block_args=None, ): + """ + Args: + block (nn.Module): class for the residual block. Options are BasicBlock, Bottleneck. + layers (List[int]) : number of layers in each block + num_classes (int): number of classification classes (default 1000) + in_chans (int): number of input (color) channels. (default 3) + output_stride (int): output stride of the network, 32, 16, or 8. (default 32) + global_pool (str): Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' (default 'avg') + cardinality (int): number of convolution groups for 3x3 conv in Bottleneck. (default 1) + base_width (int): bottleneck channels factor. `planes * base_width / 64 * cardinality` (default 64) + stem_width (int): number of channels in stem convolutions (default 64) + stem_type (str): The type of stem (default ''): + * '', default - a single 7x7 conv with a width of stem_width + * 'deep' - three 3x3 convolution layers of widths stem_width, stem_width, stem_width * 2 + * 'deep_tiered' - three 3x3 conv layers of widths stem_width//4 * 3, stem_width, stem_width * 2 + replace_stem_pool (bool): replace stem max-pooling layer with a 3x3 stride-2 convolution + block_reduce_first (int): Reduction factor for first convolution output width of residual blocks, + 1 for all archs except senets, where 2 (default 1) + down_kernel_size (int): kernel size of residual block downsample path, + 1x1 for most, 3x3 for senets (default: 1) + avg_down (bool): use avg pooling for projection skip connection between stages/downsample (default False) + act_layer (str, nn.Module): activation layer + norm_layer (str, nn.Module): normalization layer + aa_layer (nn.Module): anti-aliasing layer + drop_rate (float): Dropout probability before classifier, for training (default 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default 0.) + drop_block_rate (float): Drop block rate (default 0.) + zero_init_last (bool): zero-init the last weight in residual path (usually last BN affine weight) + block_args (dict): Extra kwargs to pass through to block module + """ super(ResNet, self).__init__() block_args = block_args or dict() assert output_stride in (8, 16, 32) @@ -686,6 +705,9 @@ class ResNet(nn.Module): self.drop_rate = drop_rate self.grad_checkpointing = False + act_layer = get_act_layer(act_layer) + norm_layer = get_norm_layer(norm_layer) + # Stem deep_stem = 'deep' in stem_type inplanes = stem_width * 2 if deep_stem else 64 diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py index a55f48ac..d696b291 100644 --- a/timm/models/resnetv2.py +++ b/timm/models/resnetv2.py @@ -37,7 +37,7 @@ import torch.nn as nn from timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD from timm.layers import GroupNormAct, BatchNormAct2d, EvoNorm2dB0, EvoNorm2dS0, FilterResponseNormTlu2d, \ - ClassifierHead, DropPath, AvgPool2dSame, create_pool2d, StdConv2d, create_conv2d + ClassifierHead, DropPath, AvgPool2dSame, create_pool2d, StdConv2d, create_conv2d, get_act_layer, get_norm_act_layer from ._builder import build_model_with_cfg from ._manipulate import checkpoint_seq, named_apply, adapt_input_conv from ._registry import register_model @@ -276,8 +276,16 @@ class Bottleneck(nn.Module): class DownsampleConv(nn.Module): def __init__( - self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, preact=True, - conv_layer=None, norm_layer=None): + self, + in_chs, + out_chs, + stride=1, + dilation=1, + first_dilation=None, + preact=True, + conv_layer=None, + norm_layer=None, + ): super(DownsampleConv, self).__init__() self.conv = conv_layer(in_chs, out_chs, 1, stride=stride) self.norm = nn.Identity() if preact else norm_layer(out_chs, apply_act=False) @@ -288,8 +296,16 @@ class DownsampleConv(nn.Module): class DownsampleAvg(nn.Module): def __init__( - self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, - preact=True, conv_layer=None, norm_layer=None): + self, + in_chs, + out_chs, + stride=1, + dilation=1, + first_dilation=None, + preact=True, + conv_layer=None, + norm_layer=None, + ): """ AvgPool Downsampling as in 'D' ResNet variants. This is not in RegNet space but I might experiment.""" super(DownsampleAvg, self).__init__() avg_stride = stride if dilation == 1 else 1 @@ -334,9 +350,18 @@ class ResNetStage(nn.Module): drop_path_rate = block_dpr[block_idx] if block_dpr else 0. stride = stride if block_idx == 0 else 1 self.blocks.add_module(str(block_idx), block_fn( - prev_chs, out_chs, stride=stride, dilation=dilation, bottle_ratio=bottle_ratio, groups=groups, - first_dilation=first_dilation, proj_layer=proj_layer, drop_path_rate=drop_path_rate, - **layer_kwargs, **block_kwargs)) + prev_chs, + out_chs, + stride=stride, + dilation=dilation, + bottle_ratio=bottle_ratio, + groups=groups, + first_dilation=first_dilation, + proj_layer=proj_layer, + drop_path_rate=drop_path_rate, + **layer_kwargs, + **block_kwargs, + )) prev_chs = out_chs first_dilation = dilation proj_layer = None @@ -413,21 +438,49 @@ class ResNetV2(nn.Module): avg_down=False, preact=True, act_layer=nn.ReLU, - conv_layer=StdConv2d, norm_layer=partial(GroupNormAct, num_groups=32), + conv_layer=StdConv2d, drop_rate=0., drop_path_rate=0., zero_init_last=False, ): + """ + Args: + layers (List[int]) : number of layers in each block + channels (List[int]) : number of channels in each block: + num_classes (int): number of classification classes (default 1000) + in_chans (int): number of input (color) channels. (default 3) + global_pool (str): Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' (default 'avg') + output_stride (int): output stride of the network, 32, 16, or 8. (default 32) + width_factor (int): channel (width) multiplication factor + stem_chs (int): stem width (default: 64) + stem_type (str): stem type (default: '' == 7x7) + avg_down (bool): average pooling in residual downsampling (default: False) + preact (bool): pre-activiation (default: True) + act_layer (Union[str, nn.Module]): activation layer + norm_layer (Union[str, nn.Module]): normalization layer + conv_layer (nn.Module): convolution module + drop_rate: classifier dropout rate (default: 0.) + drop_path_rate: stochastic depth rate (default: 0.) + zero_init_last: zero-init last weight in residual path (default: False) + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate wf = width_factor + norm_layer = get_norm_act_layer(norm_layer, act_layer=act_layer) + act_layer = get_act_layer(act_layer) self.feature_info = [] stem_chs = make_div(stem_chs * wf) self.stem = create_resnetv2_stem( - in_chans, stem_chs, stem_type, preact, conv_layer=conv_layer, norm_layer=norm_layer) + in_chans, + stem_chs, + stem_type, + preact, + conv_layer=conv_layer, + norm_layer=norm_layer, + ) stem_feat = ('stem.conv3' if is_stem_deep(stem_type) else 'stem.conv') if preact else 'stem.norm' self.feature_info.append(dict(num_chs=stem_chs, reduction=2, module=stem_feat)) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index d6865549..9441a3b2 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -1152,8 +1152,8 @@ def _create_vision_transformer(variant, pretrained=False, **kwargs): def vit_tiny_patch16_224(pretrained=False, **kwargs): """ ViT-Tiny (Vit-Ti/16) """ - model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) - model = _create_vision_transformer('vit_tiny_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3) + model = _create_vision_transformer('vit_tiny_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1161,8 +1161,8 @@ def vit_tiny_patch16_224(pretrained=False, **kwargs): def vit_tiny_patch16_384(pretrained=False, **kwargs): """ ViT-Tiny (Vit-Ti/16) @ 384x384. """ - model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) - model = _create_vision_transformer('vit_tiny_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3) + model = _create_vision_transformer('vit_tiny_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1170,8 +1170,8 @@ def vit_tiny_patch16_384(pretrained=False, **kwargs): def vit_small_patch32_224(pretrained=False, **kwargs): """ ViT-Small (ViT-S/32) """ - model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch32_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch32_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1179,8 +1179,8 @@ def vit_small_patch32_224(pretrained=False, **kwargs): def vit_small_patch32_384(pretrained=False, **kwargs): """ ViT-Small (ViT-S/32) at 384x384. """ - model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch32_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch32_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1188,8 +1188,8 @@ def vit_small_patch32_384(pretrained=False, **kwargs): def vit_small_patch16_224(pretrained=False, **kwargs): """ ViT-Small (ViT-S/16) """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1197,8 +1197,8 @@ def vit_small_patch16_224(pretrained=False, **kwargs): def vit_small_patch16_384(pretrained=False, **kwargs): """ ViT-Small (ViT-S/16) """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1206,8 +1206,8 @@ def vit_small_patch16_384(pretrained=False, **kwargs): def vit_small_patch8_224(pretrained=False, **kwargs): """ ViT-Small (ViT-S/8) """ - model_kwargs = dict(patch_size=8, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch8_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=8, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch8_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1216,8 +1216,8 @@ def vit_base_patch32_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1226,8 +1226,8 @@ def vit_base_patch32_384(pretrained=False, **kwargs): """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1236,8 +1236,8 @@ def vit_base_patch16_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1246,8 +1246,8 @@ def vit_base_patch16_384(pretrained=False, **kwargs): """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1256,8 +1256,8 @@ def vit_base_patch8_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/8) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch8_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch8_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1265,8 +1265,8 @@ def vit_base_patch8_224(pretrained=False, **kwargs): def vit_large_patch32_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. """ - model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1275,8 +1275,8 @@ def vit_large_patch32_384(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1285,8 +1285,8 @@ def vit_large_patch16_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1295,8 +1295,8 @@ def vit_large_patch16_384(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1304,8 +1304,8 @@ def vit_large_patch16_384(pretrained=False, **kwargs): def vit_large_patch14_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/14) """ - model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1313,8 +1313,8 @@ def vit_large_patch14_224(pretrained=False, **kwargs): def vit_huge_patch14_224(pretrained=False, **kwargs): """ ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929). """ - model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_huge_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16) + model = _create_vision_transformer('vit_huge_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1322,8 +1322,8 @@ def vit_huge_patch14_224(pretrained=False, **kwargs): def vit_giant_patch14_224(pretrained=False, **kwargs): """ ViT-Giant (little-g) model (ViT-g/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 """ - model_kwargs = dict(patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_giant_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16) + model = _create_vision_transformer('vit_giant_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1331,8 +1331,9 @@ def vit_giant_patch14_224(pretrained=False, **kwargs): def vit_gigantic_patch14_224(pretrained=False, **kwargs): """ ViT-Gigantic (big-G) model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 """ - model_kwargs = dict(patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_gigantic_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16) + model = _create_vision_transformer( + 'vit_gigantic_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1341,8 +1342,9 @@ def vit_base_patch16_224_miil(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs) - model = _create_vision_transformer('vit_base_patch16_224_miil', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False) + model = _create_vision_transformer( + 'vit_base_patch16_224_miil', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1352,8 +1354,9 @@ def vit_medium_patch16_gap_240(pretrained=False, **kwargs): """ model_kwargs = dict( patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), qkv_bias=False, init_values=1e-6, fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_medium_patch16_gap_240', pretrained=pretrained, **model_kwargs) + global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False) + model = _create_vision_transformer( + 'vit_medium_patch16_gap_240', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1363,8 +1366,9 @@ def vit_medium_patch16_gap_256(pretrained=False, **kwargs): """ model_kwargs = dict( patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), qkv_bias=False, init_values=1e-6, fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_medium_patch16_gap_256', pretrained=pretrained, **model_kwargs) + global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False) + model = _create_vision_transformer( + 'vit_medium_patch16_gap_256', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1374,8 +1378,9 @@ def vit_medium_patch16_gap_384(pretrained=False, **kwargs): """ model_kwargs = dict( patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), qkv_bias=False, init_values=1e-6, fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_medium_patch16_gap_384', pretrained=pretrained, **model_kwargs) + global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False) + model = _create_vision_transformer( + 'vit_medium_patch16_gap_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1384,9 +1389,9 @@ def vit_base_patch16_gap_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) w/o class token, w/ avg-pool @ 256x256 """ model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=16, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_base_patch16_gap_224', pretrained=pretrained, **model_kwargs) + patch_size=16, embed_dim=768, depth=12, num_heads=16, class_token=False, global_pool='avg', fc_norm=False) + model = _create_vision_transformer( + 'vit_base_patch16_gap_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1395,8 +1400,9 @@ def vit_base_patch32_clip_224(pretrained=False, **kwargs): """ ViT-B/32 CLIP image tower @ 224x224 """ model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch32_clip_224', pretrained=pretrained, **model_kwargs) + patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch32_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1405,8 +1411,9 @@ def vit_base_patch32_clip_384(pretrained=False, **kwargs): """ ViT-B/32 CLIP image tower @ 384x384 """ model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch32_clip_384', pretrained=pretrained, **model_kwargs) + patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch32_clip_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1415,8 +1422,9 @@ def vit_base_patch32_clip_448(pretrained=False, **kwargs): """ ViT-B/32 CLIP image tower @ 448x448 """ model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch32_clip_448', pretrained=pretrained, **model_kwargs) + patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch32_clip_448', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1424,9 +1432,9 @@ def vit_base_patch32_clip_448(pretrained=False, **kwargs): def vit_base_patch16_clip_224(pretrained=False, **kwargs): """ ViT-B/16 CLIP image tower """ - model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch16_clip_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch16_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1434,9 +1442,9 @@ def vit_base_patch16_clip_224(pretrained=False, **kwargs): def vit_base_patch16_clip_384(pretrained=False, **kwargs): """ ViT-B/16 CLIP image tower @ 384x384 """ - model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch16_clip_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch16_clip_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1444,9 +1452,9 @@ def vit_base_patch16_clip_384(pretrained=False, **kwargs): def vit_large_patch14_clip_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/14) CLIP image tower """ - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_large_patch14_clip_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_large_patch14_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1454,9 +1462,9 @@ def vit_large_patch14_clip_224(pretrained=False, **kwargs): def vit_large_patch14_clip_336(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/14) CLIP image tower @ 336x336 """ - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_large_patch14_clip_336', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_large_patch14_clip_336', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1464,9 +1472,9 @@ def vit_large_patch14_clip_336(pretrained=False, **kwargs): def vit_huge_patch14_clip_224(pretrained=False, **kwargs): """ ViT-Huge model (ViT-H/14) CLIP image tower. """ - model_kwargs = dict( - patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_huge_patch14_clip_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_huge_patch14_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1474,9 +1482,9 @@ def vit_huge_patch14_clip_224(pretrained=False, **kwargs): def vit_huge_patch14_clip_336(pretrained=False, **kwargs): """ ViT-Huge model (ViT-H/14) CLIP image tower @ 336x336 """ - model_kwargs = dict( - patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_huge_patch14_clip_336', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_huge_patch14_clip_336', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1486,9 +1494,9 @@ def vit_giant_patch14_clip_224(pretrained=False, **kwargs): Pretrained weights from CLIP image tower. """ model_kwargs = dict( - patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, - pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_giant_patch14_clip_224', pretrained=pretrained, **model_kwargs) + patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_giant_patch14_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1498,8 +1506,9 @@ def vit_giant_patch14_clip_224(pretrained=False, **kwargs): def vit_base_patch32_plus_256(pretrained=False, **kwargs): """ ViT-Base (ViT-B/32+) """ - model_kwargs = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) - model = _create_vision_transformer('vit_base_patch32_plus_256', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, init_values=1e-5) + model = _create_vision_transformer( + 'vit_base_patch32_plus_256', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1507,8 +1516,9 @@ def vit_base_patch32_plus_256(pretrained=False, **kwargs): def vit_base_patch16_plus_240(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16+) """ - model_kwargs = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) - model = _create_vision_transformer('vit_base_patch16_plus_240', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14, init_values=1e-5) + model = _create_vision_transformer( + 'vit_base_patch16_plus_240', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1517,9 +1527,10 @@ def vit_base_patch16_rpn_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) w/ residual post-norm """ model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, init_values=1e-5, class_token=False, - block_fn=ResPostBlock, global_pool=kwargs.pop('global_pool', 'avg'), **kwargs) - model = _create_vision_transformer('vit_base_patch16_rpn_224', pretrained=pretrained, **model_kwargs) + patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, init_values=1e-5, + class_token=False, block_fn=ResPostBlock, global_pool='avg') + model = _create_vision_transformer( + 'vit_base_patch16_rpn_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1529,8 +1540,9 @@ def vit_small_patch16_36x1_224(pretrained=False, **kwargs): Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=36, num_heads=6, init_values=1e-5, **kwargs) - model = _create_vision_transformer('vit_small_patch16_36x1_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=36, num_heads=6, init_values=1e-5) + model = _create_vision_transformer( + 'vit_small_patch16_36x1_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1541,8 +1553,9 @@ def vit_small_patch16_18x2_224(pretrained=False, **kwargs): Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. """ model_kwargs = dict( - patch_size=16, embed_dim=384, depth=18, num_heads=6, init_values=1e-5, block_fn=ParallelBlock, **kwargs) - model = _create_vision_transformer('vit_small_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + patch_size=16, embed_dim=384, depth=18, num_heads=6, init_values=1e-5, block_fn=ParallelBlock) + model = _create_vision_transformer( + 'vit_small_patch16_18x2_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1551,27 +1564,26 @@ def vit_base_patch16_18x2_224(pretrained=False, **kwargs): """ ViT-Base w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove. Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 """ - model_kwargs = dict( - patch_size=16, embed_dim=768, depth=18, num_heads=12, init_values=1e-5, block_fn=ParallelBlock, **kwargs) - model = _create_vision_transformer('vit_base_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=18, num_heads=12, init_values=1e-5, block_fn=ParallelBlock) + model = _create_vision_transformer( + 'vit_base_patch16_18x2_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @register_model def eva_large_patch14_196(pretrained=False, **kwargs): """ EVA-large model https://arxiv.org/abs/2211.07636 /via MAE MIM pretrain""" - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg', **kwargs) - model = _create_vision_transformer('eva_large_patch14_196', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg') + model = _create_vision_transformer( + 'eva_large_patch14_196', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @register_model def eva_large_patch14_336(pretrained=False, **kwargs): """ EVA-large model https://arxiv.org/abs/2211.07636 via MAE MIM pretrain""" - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg', **kwargs) - model = _create_vision_transformer('eva_large_patch14_336', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg') + model = _create_vision_transformer('eva_large_patch14_336', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1579,8 +1591,8 @@ def eva_large_patch14_336(pretrained=False, **kwargs): def flexivit_small(pretrained=False, **kwargs): """ FlexiViT-Small """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, no_embed_class=True, **kwargs) - model = _create_vision_transformer('flexivit_small', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, no_embed_class=True) + model = _create_vision_transformer('flexivit_small', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1588,8 +1600,8 @@ def flexivit_small(pretrained=False, **kwargs): def flexivit_base(pretrained=False, **kwargs): """ FlexiViT-Base """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, no_embed_class=True, **kwargs) - model = _create_vision_transformer('flexivit_base', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, no_embed_class=True) + model = _create_vision_transformer('flexivit_base', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1597,6 +1609,6 @@ def flexivit_base(pretrained=False, **kwargs): def flexivit_large(pretrained=False, **kwargs): """ FlexiViT-Large """ - model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, no_embed_class=True, **kwargs) - model = _create_vision_transformer('flexivit_large', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, no_embed_class=True) + model = _create_vision_transformer('flexivit_large', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model diff --git a/timm/models/vovnet.py b/timm/models/vovnet.py index bf0e4f89..8aea5802 100644 --- a/timm/models/vovnet.py +++ b/timm/models/vovnet.py @@ -181,8 +181,18 @@ class SequentialAppendList(nn.Sequential): class OsaBlock(nn.Module): def __init__( - self, in_chs, mid_chs, out_chs, layer_per_block, residual=False, - depthwise=False, attn='', norm_layer=BatchNormAct2d, act_layer=nn.ReLU, drop_path=None): + self, + in_chs, + mid_chs, + out_chs, + layer_per_block, + residual=False, + depthwise=False, + attn='', + norm_layer=BatchNormAct2d, + act_layer=nn.ReLU, + drop_path=None, + ): super(OsaBlock, self).__init__() self.residual = residual @@ -232,9 +242,20 @@ class OsaBlock(nn.Module): class OsaStage(nn.Module): def __init__( - self, in_chs, mid_chs, out_chs, block_per_stage, layer_per_block, downsample=True, - residual=True, depthwise=False, attn='ese', norm_layer=BatchNormAct2d, act_layer=nn.ReLU, - drop_path_rates=None): + self, + in_chs, + mid_chs, + out_chs, + block_per_stage, + layer_per_block, + downsample=True, + residual=True, + depthwise=False, + attn='ese', + norm_layer=BatchNormAct2d, + act_layer=nn.ReLU, + drop_path_rates=None, + ): super(OsaStage, self).__init__() self.grad_checkpointing = False @@ -270,16 +291,38 @@ class OsaStage(nn.Module): class VovNet(nn.Module): def __init__( - self, cfg, in_chans=3, num_classes=1000, global_pool='avg', drop_rate=0., stem_stride=4, - output_stride=32, norm_layer=BatchNormAct2d, act_layer=nn.ReLU, drop_path_rate=0.): - """ VovNet (v2) + self, + cfg, + in_chans=3, + num_classes=1000, + global_pool='avg', + output_stride=32, + norm_layer=BatchNormAct2d, + act_layer=nn.ReLU, + drop_rate=0., + drop_path_rate=0., + **kwargs, + ): + """ + Args: + cfg (dict): Model architecture configuration + in_chans (int): Number of input channels (default: 3) + num_classes (int): Number of classifier classes (default: 1000) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + norm_layer (Union[str, nn.Module]): normalization layer + act_layer (Union[str, nn.Module]): activation layer + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + kwargs (dict): Extra kwargs overlayed onto cfg """ super(VovNet, self).__init__() self.num_classes = num_classes self.drop_rate = drop_rate - assert stem_stride in (4, 2) assert output_stride == 32 # FIXME support dilation + cfg = dict(cfg, **kwargs) + stem_stride = cfg.get("stem_stride", 4) stem_chs = cfg["stem_chs"] stage_conv_chs = cfg["stage_conv_chs"] stage_out_chs = cfg["stage_out_chs"] @@ -307,9 +350,15 @@ class VovNet(nn.Module): for i in range(4): # num_stages downsample = stem_stride == 2 or i > 0 # first stage has no stride/downsample if stem_stride is 4 stages += [OsaStage( - in_ch_list[i], stage_conv_chs[i], stage_out_chs[i], block_per_stage[i], layer_per_block, - downsample=downsample, drop_path_rates=stage_dpr[i], **stage_args) - ] + in_ch_list[i], + stage_conv_chs[i], + stage_out_chs[i], + block_per_stage[i], + layer_per_block, + downsample=downsample, + drop_path_rates=stage_dpr[i], + **stage_args, + )] self.num_features = stage_out_chs[i] current_stride *= 2 if downsample else 1 self.feature_info += [dict(num_chs=self.num_features, reduction=current_stride, module=f'stages.{i}')] @@ -324,7 +373,6 @@ class VovNet(nn.Module): elif isinstance(m, nn.Linear): nn.init.zeros_(m.bias) - @torch.jit.ignore def group_matcher(self, coarse=False): return dict( diff --git a/timm/utils/__init__.py b/timm/utils/__init__.py index a9ff0c78..7727adff 100644 --- a/timm/utils/__init__.py +++ b/timm/utils/__init__.py @@ -8,7 +8,7 @@ from .distributed import distribute_bn, reduce_tensor, init_distributed_device,\ from .jit import set_jit_legacy, set_jit_fuser from .log import setup_default_logging, FormatterNoInfo from .metrics import AverageMeter, accuracy -from .misc import natural_key, add_bool_arg +from .misc import natural_key, add_bool_arg, ParseKwargs from .model import unwrap_model, get_state_dict, freeze, unfreeze from .model_ema import ModelEma, ModelEmaV2 from .random import random_seed diff --git a/timm/utils/misc.py b/timm/utils/misc.py index 39c0097c..326a50f7 100644 --- a/timm/utils/misc.py +++ b/timm/utils/misc.py @@ -2,6 +2,8 @@ Hacked together by / Copyright 2020 Ross Wightman """ +import argparse +import ast import re @@ -16,3 +18,15 @@ def add_bool_arg(parser, name, default=False, help=''): group.add_argument('--' + name, dest=dest_name, action='store_true', help=help) group.add_argument('--no-' + name, dest=dest_name, action='store_false', help=help) parser.set_defaults(**{dest_name: default}) + + +class ParseKwargs(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + kw = {} + for value in values: + key, value = value.split('=') + try: + kw[key] = ast.literal_eval(value) + except ValueError: + kw[key] = str(value) # fallback to string (avoid need to escape on command line) + setattr(namespace, self.dest, kw) diff --git a/train.py b/train.py index e51d7c90..9f450ab8 100755 --- a/train.py +++ b/train.py @@ -89,56 +89,58 @@ parser.add_argument('--data-dir', metavar='DIR', parser.add_argument('--dataset', metavar='NAME', default='', help='dataset type + name ("/") (default: ImageFolder or ImageTar if empty)') group.add_argument('--train-split', metavar='NAME', default='train', - help='dataset train split (default: train)') + help='dataset train split (default: train)') group.add_argument('--val-split', metavar='NAME', default='validation', - help='dataset validation split (default: validation)') + help='dataset validation split (default: validation)') group.add_argument('--dataset-download', action='store_true', default=False, - help='Allow download of dataset for torch/ and tfds/ datasets that support it.') + help='Allow download of dataset for torch/ and tfds/ datasets that support it.') group.add_argument('--class-map', default='', type=str, metavar='FILENAME', - help='path to class to idx mapping file (default: "")') + help='path to class to idx mapping file (default: "")') # Model parameters group = parser.add_argument_group('Model parameters') group.add_argument('--model', default='resnet50', type=str, metavar='MODEL', - help='Name of model to train (default: "resnet50")') + help='Name of model to train (default: "resnet50")') group.add_argument('--pretrained', action='store_true', default=False, - help='Start with pretrained version of specified network (if avail)') + help='Start with pretrained version of specified network (if avail)') group.add_argument('--initial-checkpoint', default='', type=str, metavar='PATH', - help='Initialize model from this checkpoint (default: none)') + help='Initialize model from this checkpoint (default: none)') group.add_argument('--resume', default='', type=str, metavar='PATH', - help='Resume full model and optimizer state from checkpoint (default: none)') + help='Resume full model and optimizer state from checkpoint (default: none)') group.add_argument('--no-resume-opt', action='store_true', default=False, - help='prevent resume of optimizer state when resuming model') + help='prevent resume of optimizer state when resuming model') group.add_argument('--num-classes', type=int, default=None, metavar='N', - help='number of label classes (Model default if None)') + help='number of label classes (Model default if None)') group.add_argument('--gp', default=None, type=str, metavar='POOL', - help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.') + help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.') group.add_argument('--img-size', type=int, default=None, metavar='N', - help='Image size (default: None => model default)') + help='Image size (default: None => model default)') group.add_argument('--in-chans', type=int, default=None, metavar='N', - help='Image input channels (default: None => 3)') + help='Image input channels (default: None => 3)') group.add_argument('--input-size', default=None, nargs=3, type=int, - metavar='N N N', help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') + metavar='N N N', + help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') group.add_argument('--crop-pct', default=None, type=float, - metavar='N', help='Input image center crop percent (for validation only)') + metavar='N', help='Input image center crop percent (for validation only)') group.add_argument('--mean', type=float, nargs='+', default=None, metavar='MEAN', - help='Override mean pixel value of dataset') + help='Override mean pixel value of dataset') group.add_argument('--std', type=float, nargs='+', default=None, metavar='STD', - help='Override std deviation of dataset') + help='Override std deviation of dataset') group.add_argument('--interpolation', default='', type=str, metavar='NAME', - help='Image resize interpolation type (overrides model)') + help='Image resize interpolation type (overrides model)') group.add_argument('-b', '--batch-size', type=int, default=128, metavar='N', - help='Input batch size for training (default: 128)') + help='Input batch size for training (default: 128)') group.add_argument('-vb', '--validation-batch-size', type=int, default=None, metavar='N', - help='Validation batch size override (default: None)') + help='Validation batch size override (default: None)') group.add_argument('--channels-last', action='store_true', default=False, - help='Use channels_last memory layout') + help='Use channels_last memory layout') group.add_argument('--fuser', default='', type=str, - help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") + help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") group.add_argument('--grad-checkpointing', action='store_true', default=False, - help='Enable gradient checkpointing through model blocks/stages') + help='Enable gradient checkpointing through model blocks/stages') group.add_argument('--fast-norm', default=False, action='store_true', - help='enable experimental fast-norm') + help='enable experimental fast-norm') +group.add_argument('--model-kwargs', nargs='*', default={}, action=utils.ParseKwargs) scripting_group = group.add_mutually_exclusive_group() scripting_group.add_argument('--torchscript', dest='torchscript', action='store_true', @@ -151,199 +153,200 @@ scripting_group.add_argument('--aot-autograd', default=False, action='store_true # Optimizer parameters group = parser.add_argument_group('Optimizer parameters') group.add_argument('--opt', default='sgd', type=str, metavar='OPTIMIZER', - help='Optimizer (default: "sgd")') + help='Optimizer (default: "sgd")') group.add_argument('--opt-eps', default=None, type=float, metavar='EPSILON', - help='Optimizer Epsilon (default: None, use opt default)') + help='Optimizer Epsilon (default: None, use opt default)') group.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA', - help='Optimizer Betas (default: None, use opt default)') + help='Optimizer Betas (default: None, use opt default)') group.add_argument('--momentum', type=float, default=0.9, metavar='M', - help='Optimizer momentum (default: 0.9)') + help='Optimizer momentum (default: 0.9)') group.add_argument('--weight-decay', type=float, default=2e-5, - help='weight decay (default: 2e-5)') + help='weight decay (default: 2e-5)') group.add_argument('--clip-grad', type=float, default=None, metavar='NORM', - help='Clip gradient norm (default: None, no clipping)') + help='Clip gradient norm (default: None, no clipping)') group.add_argument('--clip-mode', type=str, default='norm', - help='Gradient clipping mode. One of ("norm", "value", "agc")') + help='Gradient clipping mode. One of ("norm", "value", "agc")') group.add_argument('--layer-decay', type=float, default=None, - help='layer-wise learning rate decay (default: None)') + help='layer-wise learning rate decay (default: None)') +group.add_argument('--opt-kwargs', nargs='*', default={}, action=utils.ParseKwargs) # Learning rate schedule parameters group = parser.add_argument_group('Learning rate schedule parameters') group.add_argument('--sched', type=str, default='cosine', metavar='SCHEDULER', - help='LR scheduler (default: "step"') + help='LR scheduler (default: "step"') group.add_argument('--sched-on-updates', action='store_true', default=False, - help='Apply LR scheduler step on update instead of epoch end.') + help='Apply LR scheduler step on update instead of epoch end.') group.add_argument('--lr', type=float, default=None, metavar='LR', - help='learning rate, overrides lr-base if set (default: None)') + help='learning rate, overrides lr-base if set (default: None)') group.add_argument('--lr-base', type=float, default=0.1, metavar='LR', - help='base learning rate: lr = lr_base * global_batch_size / base_size') + help='base learning rate: lr = lr_base * global_batch_size / base_size') group.add_argument('--lr-base-size', type=int, default=256, metavar='DIV', - help='base learning rate batch size (divisor, default: 256).') + help='base learning rate batch size (divisor, default: 256).') group.add_argument('--lr-base-scale', type=str, default='', metavar='SCALE', - help='base learning rate vs batch_size scaling ("linear", "sqrt", based on opt if empty)') + help='base learning rate vs batch_size scaling ("linear", "sqrt", based on opt if empty)') group.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct', - help='learning rate noise on/off epoch percentages') + help='learning rate noise on/off epoch percentages') group.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT', - help='learning rate noise limit percent (default: 0.67)') + help='learning rate noise limit percent (default: 0.67)') group.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV', - help='learning rate noise std-dev (default: 1.0)') + help='learning rate noise std-dev (default: 1.0)') group.add_argument('--lr-cycle-mul', type=float, default=1.0, metavar='MULT', - help='learning rate cycle len multiplier (default: 1.0)') + help='learning rate cycle len multiplier (default: 1.0)') group.add_argument('--lr-cycle-decay', type=float, default=0.5, metavar='MULT', - help='amount to decay each learning rate cycle (default: 0.5)') + help='amount to decay each learning rate cycle (default: 0.5)') group.add_argument('--lr-cycle-limit', type=int, default=1, metavar='N', - help='learning rate cycle limit, cycles enabled if > 1') + help='learning rate cycle limit, cycles enabled if > 1') group.add_argument('--lr-k-decay', type=float, default=1.0, - help='learning rate k-decay for cosine/poly (default: 1.0)') + help='learning rate k-decay for cosine/poly (default: 1.0)') group.add_argument('--warmup-lr', type=float, default=1e-5, metavar='LR', - help='warmup learning rate (default: 1e-5)') + help='warmup learning rate (default: 1e-5)') group.add_argument('--min-lr', type=float, default=0, metavar='LR', - help='lower lr bound for cyclic schedulers that hit 0 (default: 0)') + help='lower lr bound for cyclic schedulers that hit 0 (default: 0)') group.add_argument('--epochs', type=int, default=300, metavar='N', - help='number of epochs to train (default: 300)') + help='number of epochs to train (default: 300)') group.add_argument('--epoch-repeats', type=float, default=0., metavar='N', - help='epoch repeat multiplier (number of times to repeat dataset epoch per train epoch).') + help='epoch repeat multiplier (number of times to repeat dataset epoch per train epoch).') group.add_argument('--start-epoch', default=None, type=int, metavar='N', - help='manual epoch number (useful on restarts)') + help='manual epoch number (useful on restarts)') group.add_argument('--decay-milestones', default=[90, 180, 270], type=int, nargs='+', metavar="MILESTONES", - help='list of decay epoch indices for multistep lr. must be increasing') + help='list of decay epoch indices for multistep lr. must be increasing') group.add_argument('--decay-epochs', type=float, default=90, metavar='N', - help='epoch interval to decay LR') + help='epoch interval to decay LR') group.add_argument('--warmup-epochs', type=int, default=5, metavar='N', - help='epochs to warmup LR, if scheduler supports') + help='epochs to warmup LR, if scheduler supports') group.add_argument('--warmup-prefix', action='store_true', default=False, - help='Exclude warmup period from decay schedule.'), + help='Exclude warmup period from decay schedule.'), group.add_argument('--cooldown-epochs', type=int, default=0, metavar='N', - help='epochs to cooldown LR at min_lr, after cyclic schedule ends') + help='epochs to cooldown LR at min_lr, after cyclic schedule ends') group.add_argument('--patience-epochs', type=int, default=10, metavar='N', - help='patience epochs for Plateau LR scheduler (default: 10)') + help='patience epochs for Plateau LR scheduler (default: 10)') group.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE', - help='LR decay rate (default: 0.1)') + help='LR decay rate (default: 0.1)') # Augmentation & regularization parameters group = parser.add_argument_group('Augmentation and regularization parameters') group.add_argument('--no-aug', action='store_true', default=False, - help='Disable all training augmentation, override other train aug args') + help='Disable all training augmentation, override other train aug args') group.add_argument('--scale', type=float, nargs='+', default=[0.08, 1.0], metavar='PCT', - help='Random resize scale (default: 0.08 1.0)') -group.add_argument('--ratio', type=float, nargs='+', default=[3./4., 4./3.], metavar='RATIO', - help='Random resize aspect ratio (default: 0.75 1.33)') + help='Random resize scale (default: 0.08 1.0)') +group.add_argument('--ratio', type=float, nargs='+', default=[3. / 4., 4. / 3.], metavar='RATIO', + help='Random resize aspect ratio (default: 0.75 1.33)') group.add_argument('--hflip', type=float, default=0.5, - help='Horizontal flip training aug probability') + help='Horizontal flip training aug probability') group.add_argument('--vflip', type=float, default=0., - help='Vertical flip training aug probability') + help='Vertical flip training aug probability') group.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT', - help='Color jitter factor (default: 0.4)') + help='Color jitter factor (default: 0.4)') group.add_argument('--aa', type=str, default=None, metavar='NAME', - help='Use AutoAugment policy. "v0" or "original". (default: None)'), + help='Use AutoAugment policy. "v0" or "original". (default: None)'), group.add_argument('--aug-repeats', type=float, default=0, - help='Number of augmentation repetitions (distributed training only) (default: 0)') + help='Number of augmentation repetitions (distributed training only) (default: 0)') group.add_argument('--aug-splits', type=int, default=0, - help='Number of augmentation splits (default: 0, valid: 0 or >=2)') + help='Number of augmentation splits (default: 0, valid: 0 or >=2)') group.add_argument('--jsd-loss', action='store_true', default=False, - help='Enable Jensen-Shannon Divergence + CE loss. Use with `--aug-splits`.') + help='Enable Jensen-Shannon Divergence + CE loss. Use with `--aug-splits`.') group.add_argument('--bce-loss', action='store_true', default=False, - help='Enable BCE loss w/ Mixup/CutMix use.') + help='Enable BCE loss w/ Mixup/CutMix use.') group.add_argument('--bce-target-thresh', type=float, default=None, - help='Threshold for binarizing softened BCE targets (default: None, disabled)') + help='Threshold for binarizing softened BCE targets (default: None, disabled)') group.add_argument('--reprob', type=float, default=0., metavar='PCT', - help='Random erase prob (default: 0.)') + help='Random erase prob (default: 0.)') group.add_argument('--remode', type=str, default='pixel', - help='Random erase mode (default: "pixel")') + help='Random erase mode (default: "pixel")') group.add_argument('--recount', type=int, default=1, - help='Random erase count (default: 1)') + help='Random erase count (default: 1)') group.add_argument('--resplit', action='store_true', default=False, - help='Do not random erase first (clean) augmentation split') + help='Do not random erase first (clean) augmentation split') group.add_argument('--mixup', type=float, default=0.0, - help='mixup alpha, mixup enabled if > 0. (default: 0.)') + help='mixup alpha, mixup enabled if > 0. (default: 0.)') group.add_argument('--cutmix', type=float, default=0.0, - help='cutmix alpha, cutmix enabled if > 0. (default: 0.)') + help='cutmix alpha, cutmix enabled if > 0. (default: 0.)') group.add_argument('--cutmix-minmax', type=float, nargs='+', default=None, - help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)') + help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)') group.add_argument('--mixup-prob', type=float, default=1.0, - help='Probability of performing mixup or cutmix when either/both is enabled') + help='Probability of performing mixup or cutmix when either/both is enabled') group.add_argument('--mixup-switch-prob', type=float, default=0.5, - help='Probability of switching to cutmix when both mixup and cutmix enabled') + help='Probability of switching to cutmix when both mixup and cutmix enabled') group.add_argument('--mixup-mode', type=str, default='batch', - help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"') + help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"') group.add_argument('--mixup-off-epoch', default=0, type=int, metavar='N', - help='Turn off mixup after this epoch, disabled if 0 (default: 0)') + help='Turn off mixup after this epoch, disabled if 0 (default: 0)') group.add_argument('--smoothing', type=float, default=0.1, - help='Label smoothing (default: 0.1)') + help='Label smoothing (default: 0.1)') group.add_argument('--train-interpolation', type=str, default='random', - help='Training interpolation (random, bilinear, bicubic default: "random")') + help='Training interpolation (random, bilinear, bicubic default: "random")') group.add_argument('--drop', type=float, default=0.0, metavar='PCT', - help='Dropout rate (default: 0.)') + help='Dropout rate (default: 0.)') group.add_argument('--drop-connect', type=float, default=None, metavar='PCT', - help='Drop connect rate, DEPRECATED, use drop-path (default: None)') + help='Drop connect rate, DEPRECATED, use drop-path (default: None)') group.add_argument('--drop-path', type=float, default=None, metavar='PCT', - help='Drop path rate (default: None)') + help='Drop path rate (default: None)') group.add_argument('--drop-block', type=float, default=None, metavar='PCT', - help='Drop block rate (default: None)') + help='Drop block rate (default: None)') # Batch norm parameters (only works with gen_efficientnet based models currently) group = parser.add_argument_group('Batch norm parameters', 'Only works with gen_efficientnet based models currently.') group.add_argument('--bn-momentum', type=float, default=None, - help='BatchNorm momentum override (if not None)') + help='BatchNorm momentum override (if not None)') group.add_argument('--bn-eps', type=float, default=None, - help='BatchNorm epsilon override (if not None)') + help='BatchNorm epsilon override (if not None)') group.add_argument('--sync-bn', action='store_true', - help='Enable NVIDIA Apex or Torch synchronized BatchNorm.') + help='Enable NVIDIA Apex or Torch synchronized BatchNorm.') group.add_argument('--dist-bn', type=str, default='reduce', - help='Distribute BatchNorm stats between nodes after each epoch ("broadcast", "reduce", or "")') + help='Distribute BatchNorm stats between nodes after each epoch ("broadcast", "reduce", or "")') group.add_argument('--split-bn', action='store_true', - help='Enable separate BN layers per augmentation split.') + help='Enable separate BN layers per augmentation split.') # Model Exponential Moving Average group = parser.add_argument_group('Model exponential moving average parameters') group.add_argument('--model-ema', action='store_true', default=False, - help='Enable tracking moving average of model weights') + help='Enable tracking moving average of model weights') group.add_argument('--model-ema-force-cpu', action='store_true', default=False, - help='Force ema to be tracked on CPU, rank=0 node only. Disables EMA validation.') + help='Force ema to be tracked on CPU, rank=0 node only. Disables EMA validation.') group.add_argument('--model-ema-decay', type=float, default=0.9998, - help='decay factor for model weights moving average (default: 0.9998)') + help='decay factor for model weights moving average (default: 0.9998)') # Misc group = parser.add_argument_group('Miscellaneous parameters') group.add_argument('--seed', type=int, default=42, metavar='S', - help='random seed (default: 42)') + help='random seed (default: 42)') group.add_argument('--worker-seeding', type=str, default='all', - help='worker seed mode (default: all)') + help='worker seed mode (default: all)') group.add_argument('--log-interval', type=int, default=50, metavar='N', - help='how many batches to wait before logging training status') + help='how many batches to wait before logging training status') group.add_argument('--recovery-interval', type=int, default=0, metavar='N', - help='how many batches to wait before writing recovery checkpoint') + help='how many batches to wait before writing recovery checkpoint') group.add_argument('--checkpoint-hist', type=int, default=10, metavar='N', - help='number of checkpoints to keep (default: 10)') + help='number of checkpoints to keep (default: 10)') group.add_argument('-j', '--workers', type=int, default=4, metavar='N', - help='how many training processes to use (default: 4)') + help='how many training processes to use (default: 4)') group.add_argument('--save-images', action='store_true', default=False, - help='save images of input bathes every log interval for debugging') + help='save images of input bathes every log interval for debugging') group.add_argument('--amp', action='store_true', default=False, - help='use NVIDIA Apex AMP or Native AMP for mixed precision training') + help='use NVIDIA Apex AMP or Native AMP for mixed precision training') group.add_argument('--amp-dtype', default='float16', type=str, - help='lower precision AMP dtype (default: float16)') + help='lower precision AMP dtype (default: float16)') group.add_argument('--amp-impl', default='native', type=str, - help='AMP impl to use, "native" or "apex" (default: native)') + help='AMP impl to use, "native" or "apex" (default: native)') group.add_argument('--no-ddp-bb', action='store_true', default=False, - help='Force broadcast buffers for native DDP to off.') + help='Force broadcast buffers for native DDP to off.') group.add_argument('--pin-mem', action='store_true', default=False, - help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.') + help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.') group.add_argument('--no-prefetcher', action='store_true', default=False, - help='disable fast prefetcher') + help='disable fast prefetcher') group.add_argument('--output', default='', type=str, metavar='PATH', - help='path to output folder (default: none, current dir)') + help='path to output folder (default: none, current dir)') group.add_argument('--experiment', default='', type=str, metavar='NAME', - help='name of train experiment, name of sub-folder for output') + help='name of train experiment, name of sub-folder for output') group.add_argument('--eval-metric', default='top1', type=str, metavar='EVAL_METRIC', - help='Best metric (default: "top1"') + help='Best metric (default: "top1"') group.add_argument('--tta', type=int, default=0, metavar='N', - help='Test/inference time augmentation (oversampling) factor. 0=None (default: 0)') + help='Test/inference time augmentation (oversampling) factor. 0=None (default: 0)') group.add_argument("--local_rank", default=0, type=int) group.add_argument('--use-multi-epochs-loader', action='store_true', default=False, - help='use the multi-epochs-loader to save time at the beginning of every epoch') + help='use the multi-epochs-loader to save time at the beginning of every epoch') group.add_argument('--log-wandb', action='store_true', default=False, - help='log training and validation metrics to wandb') + help='log training and validation metrics to wandb') def _parse_args(): @@ -371,8 +374,6 @@ def main(): torch.backends.cuda.matmul.allow_tf32 = True torch.backends.cudnn.benchmark = True - if args.data and not args.data_dir: - args.data_dir = args.data args.prefetcher = not args.no_prefetcher device = utils.init_distributed_device(args) if args.distributed: @@ -383,14 +384,6 @@ def main(): _logger.info(f'Training with a single process on 1 device ({args.device}).') assert args.rank >= 0 - if utils.is_primary(args) and args.log_wandb: - if has_wandb: - wandb.init(project=args.experiment, config=args) - else: - _logger.warning( - "You've requested to log metrics to wandb but package not found. " - "Metrics not being logged to wandb, try `pip install wandb`") - # resolve AMP arguments based on PyTorch / Apex availability use_amp = None amp_dtype = torch.float16 @@ -432,6 +425,7 @@ def main(): bn_eps=args.bn_eps, scriptable=args.torchscript, checkpoint_path=args.initial_checkpoint, + **args.model_kwargs, ) if args.num_classes is None: assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' @@ -504,7 +498,11 @@ def main(): f'Learning rate ({args.lr}) calculated from base learning rate ({args.lr_base}) ' f'and global batch size ({global_batch_size}) with {args.lr_base_scale} scaling.') - optimizer = create_optimizer_v2(model, **optimizer_kwargs(cfg=args)) + optimizer = create_optimizer_v2( + model, + **optimizer_kwargs(cfg=args), + **args.opt_kwargs, + ) # setup automatic mixed-precision (AMP) loss scaling and op casting amp_autocast = suppress # do nothing @@ -559,6 +557,8 @@ def main(): # NOTE: EMA model does not need to be wrapped by DDP # create the train and eval datasets + if args.data and not args.data_dir: + args.data_dir = args.data dataset_train = create_dataset( args.dataset, root=args.data_dir, @@ -712,6 +712,14 @@ def main(): with open(os.path.join(output_dir, 'args.yaml'), 'w') as f: f.write(args_text) + if utils.is_primary(args) and args.log_wandb: + if has_wandb: + wandb.init(project=args.experiment, config=args) + else: + _logger.warning( + "You've requested to log metrics to wandb but package not found. " + "Metrics not being logged to wandb, try `pip install wandb`") + # setup learning rate schedule and starting epoch updates_per_epoch = len(loader_train) lr_scheduler, num_epochs = create_scheduler_v2( diff --git a/validate.py b/validate.py index 4669fbac..b606103d 100755 --- a/validate.py +++ b/validate.py @@ -26,7 +26,7 @@ from timm.data import create_dataset, create_loader, resolve_data_config, RealLa from timm.layers import apply_test_time_pool, set_fast_norm from timm.models import create_model, load_checkpoint, is_model, list_models from timm.utils import accuracy, AverageMeter, natural_key, setup_default_logging, set_jit_fuser, \ - decay_batch_step, check_batch_size_retry + decay_batch_step, check_batch_size_retry, ParseKwargs try: from apex import amp @@ -71,6 +71,8 @@ parser.add_argument('-b', '--batch-size', default=256, type=int, metavar='N', help='mini-batch size (default: 256)') parser.add_argument('--img-size', default=None, type=int, metavar='N', help='Input image dimension, uses model default if empty') +parser.add_argument('--in-chans', type=int, default=None, metavar='N', + help='Image input channels (default: None => 3)') parser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N N N', help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') parser.add_argument('--use-train-size', action='store_true', default=False, @@ -123,6 +125,8 @@ parser.add_argument('--fuser', default='', type=str, help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") parser.add_argument('--fast-norm', default=False, action='store_true', help='enable experimental fast-norm') +parser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs) + scripting_group = parser.add_mutually_exclusive_group() scripting_group.add_argument('--torchscript', default=False, action='store_true', @@ -181,13 +185,20 @@ def validate(args): set_fast_norm() # create model + in_chans = 3 + if args.in_chans is not None: + in_chans = args.in_chans + elif args.input_size is not None: + in_chans = args.input_size[0] + model = create_model( args.model, pretrained=args.pretrained, num_classes=args.num_classes, - in_chans=3, + in_chans=in_chans, global_pool=args.gp, scriptable=args.torchscript, + **args.model_kwargs, ) if args.num_classes is None: assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' @@ -232,8 +243,9 @@ def validate(args): criterion = nn.CrossEntropyLoss().to(device) + root_dir = args.data or args.data_dir dataset = create_dataset( - root=args.data, + root=root_dir, name=args.dataset, split=args.split, download=args.dataset_download, @@ -389,7 +401,7 @@ def main(): if args.model == 'all': # validate all models in a list of names with pretrained checkpoints args.pretrained = True - model_names = list_models(pretrained=True, exclude_filters=['*_in21k', '*_in22k', '*_dino']) + model_names = list_models('convnext*', pretrained=True, exclude_filters=['*_in21k', '*_in22k', '*in12k', '*_dino', '*fcmae']) model_cfgs = [(n, '') for n in model_names] elif not is_model(args.model): # model name doesn't exist, try as wildcard filter From 60ebb6cefaec745ef6d118085b7a4b29693ee1cc Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 6 Jan 2023 14:35:26 -0800 Subject: [PATCH 07/34] Re-order vit pretrained entries for more sensible default weights (no .tag specified) --- timm/models/vision_transformer.py | 178 +++++++++++++++--------------- 1 file changed, 88 insertions(+), 90 deletions(-) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 9441a3b2..8ffb1200 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -697,6 +697,13 @@ def _cfg(url='', **kwargs): default_cfgs = generate_default_cfgs({ + # re-finetuned augreg 21k FT on in1k weights + 'vit_base_patch16_224.augreg2_in21k_ft_in1k': _cfg( + hf_hub_id='timm/'), + 'vit_base_patch16_384.augreg2_in21k_ft_in1k': _cfg(), + 'vit_base_patch8_224.augreg2_in21k_ft_in1k': _cfg( + hf_hub_id='timm/'), + # How to train your ViT (augreg) weights, pretrained on 21k FT on in1k 'vit_tiny_patch16_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz', @@ -751,13 +758,6 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), - # re-finetuned augreg 21k FT on in1k weights - 'vit_base_patch16_224.augreg2_in21k_ft_in1k': _cfg( - hf_hub_id='timm/'), - 'vit_base_patch16_384.augreg2_in21k_ft_in1k': _cfg(), - 'vit_base_patch8_224.augreg2_in21k_ft_in1k': _cfg( - hf_hub_id='timm/'), - # patch models (weights from official Google JAX impl) pretrained on in21k FT on in1k 'vit_base_patch16_224.orig_in21k_ft_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth', @@ -802,7 +802,6 @@ default_cfgs = generate_default_cfgs({ 'vit_giant_patch14_224.untrained': _cfg(url=''), 'vit_gigantic_patch14_224.untrained': _cfg(url=''), - # patch models, imagenet21k (weights from official Google JAX impl) 'vit_large_patch32_224.orig_in21k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth', @@ -869,7 +868,6 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), - # ViT ImageNet-21K-P pretraining by MILL 'vit_base_patch16_224_miil.in21k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/vit_base_patch16_224_in21k_miil-887286df.pth', @@ -880,7 +878,7 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='timm/', mean=(0., 0., 0.), std=(1., 1., 1.), crop_pct=0.875, interpolation='bilinear'), - # custom timm variants + # Custom timm variants 'vit_base_patch16_rpn_224.in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_base_patch16_rpn_224-sw-3b07e89d.pth', hf_hub_id='timm/'), @@ -896,52 +894,6 @@ default_cfgs = generate_default_cfgs({ 'vit_base_patch16_gap_224': _cfg(), # CLIP pretrained image tower and related fine-tuned weights - 'vit_base_patch32_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-B-32-laion2B-s34B-b79K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), - 'vit_base_patch16_clip_224.laion2b': _cfg( - #hf_hub_id='laion/CLIP-ViT-B-16-laion2B-s34B-b88K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512), - 'vit_large_patch14_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-L-14-laion2B-s32B-b82K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=768), - 'vit_huge_patch14_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-H-14-laion2B-s32B-b79K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), - 'vit_giant_patch14_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-g-14-laion2B-s12B-b42K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), - - 'vit_base_patch32_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), - 'vit_base_patch16_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), - 'vit_base_patch16_clip_384.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'), - 'vit_large_patch14_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0), - 'vit_large_patch14_clip_336.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), - 'vit_huge_patch14_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), - 'vit_huge_patch14_clip_336.laion2b_ft_in1k': _cfg( - hf_hub_id='', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), - 'vit_base_patch32_clip_224.laion2b_ft_in12k_in1k': _cfg( hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), @@ -973,28 +925,52 @@ default_cfgs = generate_default_cfgs({ mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), - 'vit_base_patch32_clip_224.laion2b_ft_in12k': _cfg( - #hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in12k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), - 'vit_base_patch16_clip_224.laion2b_ft_in12k': _cfg( + 'vit_base_patch32_clip_224.openai_ft_in12k_in1k': _cfg( + # hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k_in1k', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), + 'vit_base_patch32_clip_384.openai_ft_in12k_in1k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), - 'vit_large_patch14_clip_224.laion2b_ft_in12k': _cfg( + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'), + 'vit_base_patch16_clip_224.openai_ft_in12k_in1k': _cfg( hf_hub_id='timm/', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=11821), - 'vit_huge_patch14_clip_224.laion2b_ft_in12k': _cfg( + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95), + 'vit_base_patch16_clip_384.openai_ft_in12k_in1k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821), + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'), + 'vit_large_patch14_clip_224.openai_ft_in12k_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), + 'vit_large_patch14_clip_336.openai_ft_in12k_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), - 'vit_base_patch32_clip_224.openai': _cfg( + 'vit_base_patch32_clip_224.laion2b_ft_in1k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), - 'vit_base_patch16_clip_224.openai': _cfg( + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), + 'vit_base_patch16_clip_224.laion2b_ft_in1k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), - 'vit_large_patch14_clip_224.openai': _cfg( + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), + 'vit_base_patch16_clip_384.laion2b_ft_in1k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768), + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'), + 'vit_large_patch14_clip_224.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0), + 'vit_large_patch14_clip_336.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), + 'vit_huge_patch14_clip_224.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), + 'vit_huge_patch14_clip_336.laion2b_ft_in1k': _cfg( + hf_hub_id='', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), 'vit_base_patch32_clip_224.openai_ft_in1k': _cfg( hf_hub_id='timm/', @@ -1010,30 +986,21 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), - 'vit_base_patch32_clip_224.openai_ft_in12k_in1k': _cfg( - #hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), - 'vit_base_patch32_clip_384.openai_ft_in12k_in1k': _cfg( - hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'), - 'vit_base_patch16_clip_224.openai_ft_in12k_in1k': _cfg( - hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95), - 'vit_base_patch16_clip_384.openai_ft_in12k_in1k': _cfg( + 'vit_base_patch32_clip_224.laion2b_ft_in12k': _cfg( + #hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in12k', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), + 'vit_base_patch16_clip_224.laion2b_ft_in12k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'), - 'vit_large_patch14_clip_224.openai_ft_in12k_in1k': _cfg( + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), + 'vit_large_patch14_clip_224.laion2b_ft_in12k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), - 'vit_large_patch14_clip_336.openai_ft_in12k_in1k': _cfg( + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=11821), + 'vit_huge_patch14_clip_224.laion2b_ft_in12k': _cfg( hf_hub_id='timm/', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821), 'vit_base_patch32_clip_224.openai_ft_in12k': _cfg( - #hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k', + # hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), 'vit_base_patch16_clip_224.openai_ft_in12k': _cfg( hf_hub_id='timm/', @@ -1042,6 +1009,37 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821), + 'vit_base_patch32_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-B-32-laion2B-s34B-b79K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), + 'vit_base_patch16_clip_224.laion2b': _cfg( + # hf_hub_id='laion/CLIP-ViT-B-16-laion2B-s34B-b88K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512), + 'vit_large_patch14_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-L-14-laion2B-s32B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=768), + 'vit_huge_patch14_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-H-14-laion2B-s32B-b79K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), + 'vit_giant_patch14_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-g-14-laion2B-s12B-b42K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), + + 'vit_base_patch32_clip_224.openai': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), + 'vit_base_patch16_clip_224.openai': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), + 'vit_large_patch14_clip_224.openai': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768), + # experimental (may be removed) 'vit_base_patch32_plus_256': _cfg(url='', input_size=(3, 256, 256), crop_pct=0.95), 'vit_base_patch16_plus_240': _cfg(url='', input_size=(3, 240, 240), crop_pct=0.95), From ae9153052f37d4e237a45463b46d3f8f8106267b Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 6 Jan 2023 17:17:35 -0800 Subject: [PATCH 08/34] Update version.py --- timm/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/version.py b/timm/version.py index b110e6cc..85eb542a 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.8.4dev0' +__version__ = '0.8.5dev0' From d2ef5a3a94fd42fc71679a0483f4f0c643aaed04 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 6 Jan 2023 21:38:40 -0800 Subject: [PATCH 09/34] Update README.md --- README.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 459b70c1..7789e5d7 100644 --- a/README.md +++ b/README.md @@ -21,12 +21,14 @@ And a big thanks to all GitHub sponsors who helped with some of my costs before ## What's New -### 🤗 Survey: Feedback Appreciated 🤗 - -For a few months now, `timm` has been part of the Hugging Face ecosystem. Yearly, we survey users of our tools to see what we could do better, what we need to continue doing, or what we need to stop doing. - -If you have a couple of minutes and want to participate in shaping the future of the ecosystem, please share your thoughts: -[**hf.co/oss-survey**](https://hf.co/oss-survey) 🙏 +* ❗Updates after Oct 10, 2022 are available in 0.8.x pre-releases (`pip install --pre timm`) or cloning main❗ +* Stable releases are 0.6.x and available by normal pip install or clone from [0.6.x](https://github.com/rwightman/pytorch-image-models/tree/0.6.x) branch. + +### Jan 6, 2023 +* Finally got around to adding `--model-kwargs` and `--opt-kwargs` to scripts to pass through rare args directly to model classes from cmd line + * `train.py /imagenet --model resnet50 --amp --model-kwargs output_stride=16 act_layer=silu` + * `train.py /imagenet --model vit_base_patch16_clip_224 --amp --model-kwargs img_size=240 patch_size=12` +* Cleanup some popular models to better support arg passthrough / merge with model configs, more to go. ### Jan 5, 2023 * ConvNeXt-V2 models and weights added to existing `convnext.py` From 94a91598c30125f6d95b08c7f2edcafd72b7dfa6 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 6 Jan 2023 21:39:25 -0800 Subject: [PATCH 10/34] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7789e5d7..e52996f5 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ And a big thanks to all GitHub sponsors who helped with some of my costs before ### Jan 6, 2023 * Finally got around to adding `--model-kwargs` and `--opt-kwargs` to scripts to pass through rare args directly to model classes from cmd line * `train.py /imagenet --model resnet50 --amp --model-kwargs output_stride=16 act_layer=silu` - * `train.py /imagenet --model vit_base_patch16_clip_224 --amp --model-kwargs img_size=240 patch_size=12` + * `train.py /imagenet --model vit_base_patch16_clip_224 --img-size 240 --amp --model-kwargs img_size=240 patch_size=12` * Cleanup some popular models to better support arg passthrough / merge with model configs, more to go. ### Jan 5, 2023 From c0d7388a1b2880f37aa22a594cb47be0351024d6 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 7 Jan 2023 16:29:12 -0800 Subject: [PATCH 11/34] Improving kwarg merging in more models --- timm/models/dpn.py | 71 +++++--- timm/models/maxxvit.py | 25 ++- timm/models/res2net.py | 28 ++-- timm/models/resnest.py | 32 ++-- timm/models/resnet.py | 372 ++++++++++++++++++++--------------------- 5 files changed, 292 insertions(+), 236 deletions(-) diff --git a/timm/models/dpn.py b/timm/models/dpn.py index 87bd918f..6dbabad2 100644 --- a/timm/models/dpn.py +++ b/timm/models/dpn.py @@ -15,7 +15,7 @@ import torch.nn as nn import torch.nn.functional as F from timm.data import IMAGENET_DPN_MEAN, IMAGENET_DPN_STD, IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier +from timm.layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier, get_norm_act_layer from ._builder import build_model_with_cfg from ._registry import register_model @@ -33,6 +33,7 @@ def _cfg(url='', **kwargs): default_cfgs = { + 'dpn48b': _cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), 'dpn68': _cfg( url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn68-66bebafa7.pth'), 'dpn68b': _cfg( @@ -82,7 +83,16 @@ class BnActConv2d(nn.Module): class DualPathBlock(nn.Module): def __init__( - self, in_chs, num_1x1_a, num_3x3_b, num_1x1_c, inc, groups, block_type='normal', b=False): + self, + in_chs, + num_1x1_a, + num_3x3_b, + num_1x1_c, + inc, + groups, + block_type='normal', + b=False, + ): super(DualPathBlock, self).__init__() self.num_1x1_c = num_1x1_c self.inc = inc @@ -167,16 +177,31 @@ class DualPathBlock(nn.Module): class DPN(nn.Module): def __init__( - self, small=False, num_init_features=64, k_r=96, groups=32, global_pool='avg', - b=False, k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128), output_stride=32, - num_classes=1000, in_chans=3, drop_rate=0., fc_act_layer=nn.ELU): + self, + num_classes=1000, + in_chans=3, + output_stride=32, + global_pool='avg', + k_sec=(3, 4, 20, 3), + inc_sec=(16, 32, 24, 128), + k_r=96, + groups=32, + small=False, + num_init_features=64, + b=False, + drop_rate=0., + norm_layer='batchnorm2d', + act_layer='relu', + fc_act_layer=nn.ELU, + ): super(DPN, self).__init__() self.num_classes = num_classes self.drop_rate = drop_rate self.b = b assert output_stride == 32 # FIXME look into dilation support - norm_layer = partial(BatchNormAct2d, eps=.001) - fc_norm_layer = partial(BatchNormAct2d, eps=.001, act_layer=fc_act_layer, inplace=False) + + norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=act_layer), eps=.001) + fc_norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=fc_act_layer), eps=.001, inplace=False) bw_factor = 1 if small else 4 blocks = OrderedDict() @@ -291,49 +316,57 @@ def _create_dpn(variant, pretrained=False, **kwargs): **kwargs) +@register_model +def dpn48b(pretrained=False, **kwargs): + model_kwargs = dict( + small=True, num_init_features=10, k_r=128, groups=32, + b=True, k_sec=(3, 4, 6, 3), inc_sec=(16, 32, 32, 64), act_layer='silu') + return _create_dpn('dpn48b', pretrained=pretrained, **dict(model_kwargs, **kwargs)) + + @register_model def dpn68(pretrained=False, **kwargs): model_kwargs = dict( small=True, num_init_features=10, k_r=128, groups=32, - k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64), **kwargs) - return _create_dpn('dpn68', pretrained=pretrained, **model_kwargs) + k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64)) + return _create_dpn('dpn68', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn68b(pretrained=False, **kwargs): model_kwargs = dict( small=True, num_init_features=10, k_r=128, groups=32, - b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64), **kwargs) - return _create_dpn('dpn68b', pretrained=pretrained, **model_kwargs) + b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64)) + return _create_dpn('dpn68b', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn92(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=64, k_r=96, groups=32, - k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128), **kwargs) - return _create_dpn('dpn92', pretrained=pretrained, **model_kwargs) + k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128)) + return _create_dpn('dpn92', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn98(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=96, k_r=160, groups=40, - k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128), **kwargs) - return _create_dpn('dpn98', pretrained=pretrained, **model_kwargs) + k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128)) + return _create_dpn('dpn98', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn131(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=128, k_r=160, groups=40, - k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128), **kwargs) - return _create_dpn('dpn131', pretrained=pretrained, **model_kwargs) + k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128)) + return _create_dpn('dpn131', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn107(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=128, k_r=200, groups=50, - k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128), **kwargs) - return _create_dpn('dpn107', pretrained=pretrained, **model_kwargs) + k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128)) + return _create_dpn('dpn107', pretrained=pretrained, **dict(model_kwargs, **kwargs)) diff --git a/timm/models/maxxvit.py b/timm/models/maxxvit.py index 1170e7e3..d68301b1 100644 --- a/timm/models/maxxvit.py +++ b/timm/models/maxxvit.py @@ -1116,6 +1116,26 @@ class NormMlpHead(nn.Module): return x +def _overlay_kwargs(cfg: MaxxVitCfg, **kwargs): + transformer_kwargs = {} + conv_kwargs = {} + base_kwargs = {} + for k, v in kwargs.items(): + if k.startswith('transformer_'): + transformer_kwargs[k.replace('transformer_', '')] = v + elif k.startswith('conv_'): + conv_kwargs[k.replace('conv_', '')] = v + else: + base_kwargs[k] = v + cfg = replace( + cfg, + transformer_cfg=replace(cfg.transformer_cfg, **transformer_kwargs), + conv_cfg=replace(cfg.conv_cfg, **conv_kwargs), + **base_kwargs + ) + return cfg + + class MaxxVit(nn.Module): """ CoaTNet + MaxVit base model. @@ -1130,10 +1150,13 @@ class MaxxVit(nn.Module): num_classes: int = 1000, global_pool: str = 'avg', drop_rate: float = 0., - drop_path_rate: float = 0. + drop_path_rate: float = 0., + **kwargs, ): super().__init__() img_size = to_2tuple(img_size) + if kwargs: + cfg = _overlay_kwargs(cfg, **kwargs) transformer_cfg = cfg_window_size(cfg.transformer_cfg, img_size) self.num_classes = num_classes self.global_pool = global_pool diff --git a/timm/models/res2net.py b/timm/models/res2net.py index 607ba722..29a49953 100644 --- a/timm/models/res2net.py +++ b/timm/models/res2net.py @@ -156,8 +156,8 @@ def res2net50_26w_4s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=4), **kwargs) - return _create_res2net('res2net50_26w_4s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=4)) + return _create_res2net('res2net50_26w_4s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -167,8 +167,8 @@ def res2net101_26w_4s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, block_args=dict(scale=4), **kwargs) - return _create_res2net('res2net101_26w_4s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, block_args=dict(scale=4)) + return _create_res2net('res2net101_26w_4s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -178,8 +178,8 @@ def res2net50_26w_6s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=6), **kwargs) - return _create_res2net('res2net50_26w_6s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=6)) + return _create_res2net('res2net50_26w_6s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -189,8 +189,8 @@ def res2net50_26w_8s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=8), **kwargs) - return _create_res2net('res2net50_26w_8s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=8)) + return _create_res2net('res2net50_26w_8s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -200,8 +200,8 @@ def res2net50_48w_2s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=48, block_args=dict(scale=2), **kwargs) - return _create_res2net('res2net50_48w_2s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=48, block_args=dict(scale=2)) + return _create_res2net('res2net50_48w_2s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -211,8 +211,8 @@ def res2net50_14w_8s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=14, block_args=dict(scale=8), **kwargs) - return _create_res2net('res2net50_14w_8s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=14, block_args=dict(scale=8)) + return _create_res2net('res2net50_14w_8s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -222,5 +222,5 @@ def res2next50(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=4, cardinality=8, block_args=dict(scale=4), **kwargs) - return _create_res2net('res2next50', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=4, cardinality=8, block_args=dict(scale=4)) + return _create_res2net('res2next50', pretrained, **dict(model_args, **kwargs)) diff --git a/timm/models/resnest.py b/timm/models/resnest.py index 853ee1d0..38303f9c 100644 --- a/timm/models/resnest.py +++ b/timm/models/resnest.py @@ -163,8 +163,8 @@ def resnest14d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[1, 1, 1, 1], stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest14d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest14d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -174,8 +174,8 @@ def resnest26d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[2, 2, 2, 2], stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest26d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest26d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -186,8 +186,8 @@ def resnest50d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 6, 3], stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest50d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest50d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -198,8 +198,8 @@ def resnest101e(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 23, 3], stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest101e', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest101e', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -210,8 +210,8 @@ def resnest200e(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 24, 36, 3], stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest200e', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest200e', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -222,8 +222,8 @@ def resnest269e(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 30, 48, 8], stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest269e', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest269e', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -233,8 +233,8 @@ def resnest50d_4s2x40d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 6, 3], stem_type='deep', stem_width=32, avg_down=True, base_width=40, cardinality=2, - block_args=dict(radix=4, avd=True, avd_first=True), **kwargs) - return _create_resnest('resnest50d_4s2x40d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=4, avd=True, avd_first=True)) + return _create_resnest('resnest50d_4s2x40d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -244,5 +244,5 @@ def resnest50d_1s4x24d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 6, 3], stem_type='deep', stem_width=32, avg_down=True, base_width=24, cardinality=4, - block_args=dict(radix=1, avd=True, avd_first=True), **kwargs) - return _create_resnest('resnest50d_1s4x24d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=1, avd=True, avd_first=True)) + return _create_resnest('resnest50d_1s4x24d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) diff --git a/timm/models/resnet.py b/timm/models/resnet.py index a783e3e1..b3c041c4 100644 --- a/timm/models/resnet.py +++ b/timm/models/resnet.py @@ -704,7 +704,7 @@ class ResNet(nn.Module): self.num_classes = num_classes self.drop_rate = drop_rate self.grad_checkpointing = False - + act_layer = get_act_layer(act_layer) norm_layer = get_norm_layer(norm_layer) @@ -845,77 +845,72 @@ def _create_resnet(variant, pretrained=False, **kwargs): def resnet10t(pretrained=False, **kwargs): """Constructs a ResNet-10-T model. """ - model_args = dict( - block=BasicBlock, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet10t', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet10t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet14t(pretrained=False, **kwargs): """Constructs a ResNet-14-T model. """ - model_args = dict( - block=Bottleneck, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet14t', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet14t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet18(pretrained=False, **kwargs): """Constructs a ResNet-18 model. """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('resnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2]) + return _create_resnet('resnet18', pretrained, **dict(model_args, **kwargs)) @register_model def resnet18d(pretrained=False, **kwargs): """Constructs a ResNet-18-D model. """ - model_args = dict( - block=BasicBlock, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet18d', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet18d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet34(pretrained=False, **kwargs): """Constructs a ResNet-34 model. """ - model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('resnet34', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3]) + return _create_resnet('resnet34', pretrained, **dict(model_args, **kwargs)) @register_model def resnet34d(pretrained=False, **kwargs): """Constructs a ResNet-34-D model. """ - model_args = dict( - block=BasicBlock, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet34d', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet34d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet26(pretrained=False, **kwargs): """Constructs a ResNet-26 model. """ - model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('resnet26', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2]) + return _create_resnet('resnet26', pretrained, **dict(model_args, **kwargs)) @register_model def resnet26t(pretrained=False, **kwargs): """Constructs a ResNet-26-T model. """ - model_args = dict( - block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet26t', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet26t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet26d(pretrained=False, **kwargs): """Constructs a ResNet-26-D model. """ - model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet26d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet26d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -923,83 +918,79 @@ def resnet50(pretrained=False, **kwargs): """Constructs a ResNet-50 model. """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('resnet50', pretrained, **model_args) + return _create_resnet('resnet50', pretrained, **dict(model_args, **kwargs)) @register_model def resnet50d(pretrained=False, **kwargs) -> ResNet: """Constructs a ResNet-50-D model. """ - model_args = dict( - block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet50d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet50d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet50t(pretrained=False, **kwargs): """Constructs a ResNet-50-T model. """ - model_args = dict( - block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet50t', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet50t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet101(pretrained=False, **kwargs): """Constructs a ResNet-101 model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], **kwargs) - return _create_resnet('resnet101', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3]) + return _create_resnet('resnet101', pretrained, **dict(model_args, **kwargs)) @register_model def resnet101d(pretrained=False, **kwargs): """Constructs a ResNet-101-D model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet101d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet101d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet152(pretrained=False, **kwargs): """Constructs a ResNet-152 model. """ - model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], **kwargs) - return _create_resnet('resnet152', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3]) + return _create_resnet('resnet152', pretrained, **dict(model_args, **kwargs)) @register_model def resnet152d(pretrained=False, **kwargs): """Constructs a ResNet-152-D model. """ - model_args = dict( - block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet152d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet152d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet200(pretrained=False, **kwargs): """Constructs a ResNet-200 model. """ - model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3], **kwargs) - return _create_resnet('resnet200', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3]) + return _create_resnet('resnet200', pretrained, **dict(model_args, **kwargs)) @register_model def resnet200d(pretrained=False, **kwargs): """Constructs a ResNet-200-D model. """ - model_args = dict( - block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet200d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet200d', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnet34(pretrained=False, **kwargs): """Constructs a ResNet-34 model with original Torchvision weights. """ - model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('tv_resnet34', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3]) + return _create_resnet('tv_resnet34', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1007,23 +998,23 @@ def tv_resnet50(pretrained=False, **kwargs): """Constructs a ResNet-50 model with original Torchvision weights. """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('tv_resnet50', pretrained, **model_args) + return _create_resnet('tv_resnet50', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnet101(pretrained=False, **kwargs): """Constructs a ResNet-101 model w/ Torchvision pretrained weights. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], **kwargs) - return _create_resnet('tv_resnet101', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3]) + return _create_resnet('tv_resnet101', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnet152(pretrained=False, **kwargs): """Constructs a ResNet-152 model w/ Torchvision pretrained weights. """ - model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], **kwargs) - return _create_resnet('tv_resnet152', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3]) + return _create_resnet('tv_resnet152', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1034,8 +1025,8 @@ def wide_resnet50_2(pretrained=False, **kwargs): convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048 channels, and in Wide ResNet-50-2 has 2048-1024-2048. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], base_width=128, **kwargs) - return _create_resnet('wide_resnet50_2', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], base_width=128) + return _create_resnet('wide_resnet50_2', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1045,8 +1036,8 @@ def wide_resnet101_2(pretrained=False, **kwargs): which is twice larger in every block. The number of channels in outer 1x1 convolutions is the same. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], base_width=128, **kwargs) - return _create_resnet('wide_resnet101_2', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], base_width=128) + return _create_resnet('wide_resnet101_2', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1061,8 +1052,8 @@ def resnet50_gn(pretrained=False, **kwargs): def resnext50_32x4d(pretrained=False, **kwargs): """Constructs a ResNeXt50-32x4d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1071,40 +1062,40 @@ def resnext50d_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnext50d_32x4d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnext50d_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def resnext101_32x4d(pretrained=False, **kwargs): """Constructs a ResNeXt-101 32x4d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('resnext101_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4) + return _create_resnet('resnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def resnext101_32x8d(pretrained=False, **kwargs): """Constructs a ResNeXt-101 32x8d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model def resnext101_64x4d(pretrained=False, **kwargs): """Constructs a ResNeXt101-64x4d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=64, base_width=4, **kwargs) - return _create_resnet('resnext101_64x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=64, base_width=4) + return _create_resnet('resnext101_64x4d', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnext50_32x4d(pretrained=False, **kwargs): """Constructs a ResNeXt50-32x4d model with original Torchvision weights. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('tv_resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('tv_resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1114,8 +1105,8 @@ def ig_resnext101_32x8d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('ig_resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('ig_resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1125,8 +1116,8 @@ def ig_resnext101_32x16d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs) - return _create_resnet('ig_resnext101_32x16d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16) + return _create_resnet('ig_resnext101_32x16d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1136,8 +1127,8 @@ def ig_resnext101_32x32d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=32, **kwargs) - return _create_resnet('ig_resnext101_32x32d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=32) + return _create_resnet('ig_resnext101_32x32d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1147,8 +1138,8 @@ def ig_resnext101_32x48d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=48, **kwargs) - return _create_resnet('ig_resnext101_32x48d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=48) + return _create_resnet('ig_resnext101_32x48d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1157,8 +1148,8 @@ def ssl_resnet18(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('ssl_resnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2]) + return _create_resnet('ssl_resnet18', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1168,7 +1159,7 @@ def ssl_resnet50(pretrained=False, **kwargs): Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('ssl_resnet50', pretrained, **model_args) + return _create_resnet('ssl_resnet50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1177,8 +1168,8 @@ def ssl_resnext50_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('ssl_resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('ssl_resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1187,8 +1178,8 @@ def ssl_resnext101_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('ssl_resnext101_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4) + return _create_resnet('ssl_resnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1197,8 +1188,8 @@ def ssl_resnext101_32x8d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('ssl_resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('ssl_resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1207,8 +1198,8 @@ def ssl_resnext101_32x16d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs) - return _create_resnet('ssl_resnext101_32x16d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16) + return _create_resnet('ssl_resnext101_32x16d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1218,8 +1209,8 @@ def swsl_resnet18(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('swsl_resnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2]) + return _create_resnet('swsl_resnet18', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1230,7 +1221,7 @@ def swsl_resnet50(pretrained=False, **kwargs): Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('swsl_resnet50', pretrained, **model_args) + return _create_resnet('swsl_resnet50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1240,8 +1231,8 @@ def swsl_resnext50_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('swsl_resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('swsl_resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1251,8 +1242,8 @@ def swsl_resnext101_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('swsl_resnext101_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4) + return _create_resnet('swsl_resnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1262,8 +1253,8 @@ def swsl_resnext101_32x8d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('swsl_resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('swsl_resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1273,8 +1264,8 @@ def swsl_resnext101_32x16d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs) - return _create_resnet('swsl_resnext101_32x16d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16) + return _create_resnet('swsl_resnext101_32x16d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1285,8 +1276,8 @@ def ecaresnet26t(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet26t', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet26t', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1295,8 +1286,8 @@ def ecaresnet50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet50d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1306,7 +1297,7 @@ def ecaresnet50d_pruned(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) + block_args=dict(attn_layer='eca')) return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **model_args) @@ -1317,8 +1308,8 @@ def ecaresnet50t(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet50t', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet50t', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1327,8 +1318,8 @@ def ecaresnetlight(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[1, 1, 11, 3], stem_width=32, avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnetlight', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnetlight', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1337,8 +1328,8 @@ def ecaresnet101d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet101d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet101d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1348,7 +1339,7 @@ def ecaresnet101d_pruned(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) + block_args=dict(attn_layer='eca')) return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **model_args) @@ -1358,8 +1349,8 @@ def ecaresnet200d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet200d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet200d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1368,8 +1359,8 @@ def ecaresnet269d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet269d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet269d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1380,8 +1371,8 @@ def ecaresnext26t_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnext26t_32x4d', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnext26t_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1392,54 +1383,54 @@ def ecaresnext50t_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnext50t_32x4d', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnext50t_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet18(pretrained=False, **kwargs): - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet18', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet34(pretrained=False, **kwargs): - model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet34', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet34', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet50(pretrained=False, **kwargs): - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet50', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet50', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet50t(pretrained=False, **kwargs): model_args = dict( - block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet50t', pretrained, **model_args) + block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet50t', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet101(pretrained=False, **kwargs): - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet101', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet101', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet152(pretrained=False, **kwargs): - model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet152', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet152', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet152d(pretrained=False, **kwargs): model_args = dict( - block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet152d', pretrained, **model_args) + block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet152d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1447,9 +1438,9 @@ def seresnet200d(pretrained=False, **kwargs): """Constructs a ResNet-200-D model with SE attn. """ model_args = dict( - block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet200d', pretrained, **model_args) + block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet200d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1457,9 +1448,9 @@ def seresnet269d(pretrained=False, **kwargs): """Constructs a ResNet-269-D model with SE attn. """ model_args = dict( - block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet269d', pretrained, **model_args) + block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet269d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1470,8 +1461,8 @@ def seresnext26d_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext26d_32x4d', pretrained, **model_args) + stem_type='deep', avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnext26d_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1482,8 +1473,8 @@ def seresnext26t_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext26t_32x4d', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnext26t_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1499,24 +1490,24 @@ def seresnext26tn_32x4d(pretrained=False, **kwargs): def seresnext50_32x4d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext50_32x4d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def seresnext101_32x4d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext101_32x4d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def seresnext101_32x8d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext101_32x8d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1524,32 +1515,32 @@ def seresnext101d_32x8d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext101d_32x8d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext101d_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model def senet154(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 8, 36, 3], cardinality=64, base_width=4, stem_type='deep', - down_kernel_size=3, block_reduce_first=2, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('senet154', pretrained, **model_args) + down_kernel_size=3, block_reduce_first=2, block_args=dict(attn_layer='se')) + return _create_resnet('senet154', pretrained, **dict(model_args, **kwargs)) @register_model def resnetblur18(pretrained=False, **kwargs): """Constructs a ResNet-18 model with blur anti-aliasing """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], aa_layer=BlurPool2d, **kwargs) - return _create_resnet('resnetblur18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], aa_layer=BlurPool2d) + return _create_resnet('resnetblur18', pretrained, **dict(model_args, **kwargs)) @register_model def resnetblur50(pretrained=False, **kwargs): """Constructs a ResNet-50 model with blur anti-aliasing """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d, **kwargs) - return _create_resnet('resnetblur50', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d) + return _create_resnet('resnetblur50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1558,8 +1549,8 @@ def resnetblur50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetblur50d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetblur50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1568,16 +1559,25 @@ def resnetblur101d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], aa_layer=BlurPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetblur101d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetblur101d', pretrained, **dict(model_args, **kwargs)) + + +@register_model +def resnetaa34d(pretrained=False, **kwargs): + """Constructs a ResNet-34-D model w/ avgpool anti-aliasing + """ + model_args = dict( + block=BasicBlock, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetaa34d', pretrained, **dict(model_args, **kwargs)) @register_model def resnetaa50(pretrained=False, **kwargs): """Constructs a ResNet-50 model with avgpool anti-aliasing """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, **kwargs) - return _create_resnet('resnetaa50', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d) + return _create_resnet('resnetaa50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1586,8 +1586,8 @@ def resnetaa50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetaa50d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetaa50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1596,8 +1596,8 @@ def resnetaa101d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], aa_layer=nn.AvgPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetaa101d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetaa101d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1606,8 +1606,8 @@ def seresnetaa50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, - stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnetaa50d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnetaa50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1617,8 +1617,8 @@ def seresnextaa101d_32x8d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, stem_width=32, stem_type='deep', avg_down=True, aa_layer=nn.AvgPool2d, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnextaa101d_32x8d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnextaa101d_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1630,8 +1630,8 @@ def resnetrs50(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs50', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1643,8 +1643,8 @@ def resnetrs101(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs101', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs101', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1656,8 +1656,8 @@ def resnetrs152(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs152', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs152', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1669,8 +1669,8 @@ def resnetrs200(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs200', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs200', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1682,8 +1682,8 @@ def resnetrs270(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[4, 29, 53, 4], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs270', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs270', pretrained, **dict(model_args, **kwargs)) @@ -1696,8 +1696,8 @@ def resnetrs350(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[4, 36, 72, 4], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs350', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs350', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1709,5 +1709,5 @@ def resnetrs420(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[4, 44, 87, 4], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs420', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs420', pretrained, **dict(model_args, **kwargs)) From 5078b28f8aec21bad91d0b866eb43bab33bc0bba Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sun, 8 Jan 2023 18:17:17 -0800 Subject: [PATCH 12/34] More kwarg handling tweaks, maxvit_base_rw def added --- timm/models/densenet.py | 47 +++++++++++++++++++++--------- timm/models/dpn.py | 10 +++---- timm/models/maxxvit.py | 36 +++++++++++++++++++++++ timm/models/mobilevit.py | 13 +++++++-- timm/models/resnet.py | 4 +-- timm/models/resnetv2.py | 63 +++++++++++++++++++--------------------- 6 files changed, 117 insertions(+), 56 deletions(-) diff --git a/timm/models/densenet.py b/timm/models/densenet.py index e731f7b0..ccbb491c 100644 --- a/timm/models/densenet.py +++ b/timm/models/densenet.py @@ -12,7 +12,7 @@ import torch.utils.checkpoint as cp from torch.jit.annotations import List from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import BatchNormAct2d, create_norm_act_layer, BlurPool2d, create_classifier +from timm.layers import BatchNormAct2d, get_norm_act_layer, BlurPool2d, create_classifier from ._builder import build_model_with_cfg from ._manipulate import MATCH_PREV_GROUP from ._registry import register_model @@ -115,8 +115,15 @@ class DenseBlock(nn.ModuleDict): _version = 2 def __init__( - self, num_layers, num_input_features, bn_size, growth_rate, norm_layer=BatchNormAct2d, - drop_rate=0., memory_efficient=False): + self, + num_layers, + num_input_features, + bn_size, + growth_rate, + norm_layer=BatchNormAct2d, + drop_rate=0., + memory_efficient=False, + ): super(DenseBlock, self).__init__() for i in range(num_layers): layer = DenseLayer( @@ -165,12 +172,25 @@ class DenseNet(nn.Module): """ def __init__( - self, growth_rate=32, block_config=(6, 12, 24, 16), num_classes=1000, in_chans=3, global_pool='avg', - bn_size=4, stem_type='', norm_layer=BatchNormAct2d, aa_layer=None, drop_rate=0, - memory_efficient=False, aa_stem_only=True): + self, + growth_rate=32, + block_config=(6, 12, 24, 16), + num_classes=1000, + in_chans=3, + global_pool='avg', + bn_size=4, + stem_type='', + act_layer='relu', + norm_layer='batchnorm2d', + aa_layer=None, + drop_rate=0, + memory_efficient=False, + aa_stem_only=True, + ): self.num_classes = num_classes self.drop_rate = drop_rate super(DenseNet, self).__init__() + norm_layer = get_norm_act_layer(norm_layer, act_layer=act_layer) # Stem deep_stem = 'deep' in stem_type # 3x3 deep stem @@ -226,8 +246,11 @@ class DenseNet(nn.Module): dict(num_chs=num_features, reduction=current_stride, module='features.' + module_name)] current_stride *= 2 trans = DenseTransition( - num_input_features=num_features, num_output_features=num_features // 2, - norm_layer=norm_layer, aa_layer=transition_aa_layer) + num_input_features=num_features, + num_output_features=num_features // 2, + norm_layer=norm_layer, + aa_layer=transition_aa_layer, + ) self.features.add_module(f'transition{i + 1}', trans) num_features = num_features // 2 @@ -322,8 +345,8 @@ def densenetblur121d(pretrained=False, **kwargs): `"Densely Connected Convolutional Networks" ` """ model = _create_densenet( - 'densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, stem_type='deep', - aa_layer=BlurPool2d, **kwargs) + 'densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, + stem_type='deep', aa_layer=BlurPool2d, **kwargs) return model @@ -382,11 +405,9 @@ def densenet264(pretrained=False, **kwargs): def densenet264d_iabn(pretrained=False, **kwargs): r"""Densenet-264 model with deep stem and Inplace-ABN """ - def norm_act_fn(num_features, **kwargs): - return create_norm_act_layer('iabn', num_features, act_layer='leaky_relu', **kwargs) model = _create_densenet( 'densenet264d_iabn', growth_rate=48, block_config=(6, 12, 64, 48), stem_type='deep', - norm_layer=norm_act_fn, pretrained=pretrained, **kwargs) + norm_layer='iabn', act_layer='leaky_relu', pretrained=pretrained, **kwargs) return model diff --git a/timm/models/dpn.py b/timm/models/dpn.py index 6dbabad2..29a7a7e8 100644 --- a/timm/models/dpn.py +++ b/timm/models/dpn.py @@ -178,21 +178,21 @@ class DualPathBlock(nn.Module): class DPN(nn.Module): def __init__( self, - num_classes=1000, - in_chans=3, - output_stride=32, - global_pool='avg', k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128), k_r=96, groups=32, + num_classes=1000, + in_chans=3, + output_stride=32, + global_pool='avg', small=False, num_init_features=64, b=False, drop_rate=0., norm_layer='batchnorm2d', act_layer='relu', - fc_act_layer=nn.ELU, + fc_act_layer='elu', ): super(DPN, self).__init__() self.num_classes = num_classes diff --git a/timm/models/maxxvit.py b/timm/models/maxxvit.py index d68301b1..7f9075b9 100644 --- a/timm/models/maxxvit.py +++ b/timm/models/maxxvit.py @@ -1680,6 +1680,26 @@ model_cfgs = dict( init_values=1e-6, ), ), + maxvit_rmlp_base_rw_224=MaxxVitCfg( + embed_dim=(96, 192, 384, 768), + depths=(2, 6, 14, 2), + block_type=('M',) * 4, + stem_width=(32, 64), + head_hidden_size=768, + **_rw_max_cfg( + rel_pos_type='mlp', + ), + ), + maxvit_rmlp_base_rw_384=MaxxVitCfg( + embed_dim=(96, 192, 384, 768), + depths=(2, 6, 14, 2), + block_type=('M',) * 4, + stem_width=(32, 64), + head_hidden_size=768, + **_rw_max_cfg( + rel_pos_type='mlp', + ), + ), maxvit_tiny_pm_256=MaxxVitCfg( embed_dim=(64, 128, 256, 512), @@ -1862,6 +1882,12 @@ default_cfgs = generate_default_cfgs({ 'maxvit_rmlp_small_rw_256': _cfg( url='', input_size=(3, 256, 256), pool_size=(8, 8)), + 'maxvit_rmlp_base_rw_2244': _cfg( + url='', + ), + 'maxvit_rmlp_base_rw_384': _cfg( + url='', + input_size=(3, 384, 384), pool_size=(12, 12)), 'maxvit_tiny_pm_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), @@ -2091,6 +2117,16 @@ def maxvit_rmlp_small_rw_256(pretrained=False, **kwargs): return _create_maxxvit('maxvit_rmlp_small_rw_256', pretrained=pretrained, **kwargs) +@register_model +def maxvit_rmlp_base_rw_224(pretrained=False, **kwargs): + return _create_maxxvit('maxvit_rmlp_base_rw_224', pretrained=pretrained, **kwargs) + + +@register_model +def maxvit_rmlp_base_rw_384(pretrained=False, **kwargs): + return _create_maxxvit('maxvit_rmlp_base_rw_384', pretrained=pretrained, **kwargs) + + @register_model def maxvit_tiny_pm_256(pretrained=False, **kwargs): return _create_maxxvit('maxvit_tiny_pm_256', pretrained=pretrained, **kwargs) diff --git a/timm/models/mobilevit.py b/timm/models/mobilevit.py index 3d2ae84a..8e8f4428 100644 --- a/timm/models/mobilevit.py +++ b/timm/models/mobilevit.py @@ -266,9 +266,16 @@ class MobileVitBlock(nn.Module): self.transformer = nn.Sequential(*[ TransformerBlock( - transformer_dim, mlp_ratio=mlp_ratio, num_heads=num_heads, qkv_bias=True, - attn_drop=attn_drop, drop=drop, drop_path=drop_path_rate, - act_layer=layers.act, norm_layer=transformer_norm_layer) + transformer_dim, + mlp_ratio=mlp_ratio, + num_heads=num_heads, + qkv_bias=True, + attn_drop=attn_drop, + drop=drop, + drop_path=drop_path_rate, + act_layer=layers.act, + norm_layer=transformer_norm_layer, + ) for _ in range(transformer_depth) ]) self.norm = transformer_norm_layer(transformer_dim) diff --git a/timm/models/resnet.py b/timm/models/resnet.py index b3c041c4..200280b3 100644 --- a/timm/models/resnet.py +++ b/timm/models/resnet.py @@ -1298,7 +1298,7 @@ def ecaresnet50d_pruned(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='eca')) - return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **model_args) + return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs)) @register_model @@ -1340,7 +1340,7 @@ def ecaresnet101d_pruned(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='eca')) - return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **model_args) + return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs)) @register_model diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py index d696b291..41e29e12 100644 --- a/timm/models/resnetv2.py +++ b/timm/models/resnetv2.py @@ -746,86 +746,83 @@ def resnetv2_152x2_bit_teacher_384(pretrained=False, **kwargs): @register_model def resnetv2_50(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50', pretrained=pretrained, - layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs) + model_args = dict(layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d) + return _create_resnetv2('resnetv2_50', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50t(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50t', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='tiered', avg_down=True, **kwargs) + stem_type='tiered', avg_down=True) + return _create_resnetv2('resnetv2_50t', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_101(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_101', pretrained=pretrained, - layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs) + model_args = dict(layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d) + return _create_resnetv2('resnetv2_101', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_101d(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_101d', pretrained=pretrained, + model_args = dict( layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_101d', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_152(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_152', pretrained=pretrained, - layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs) + model_args = dict(layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d) + return _create_resnetv2('resnetv2_152', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_152d(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_152d', pretrained=pretrained, + model_args = dict( layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_152d', pretrained=pretrained, **dict(model_args, **kwargs)) # Experimental configs (may change / be removed) @register_model def resnetv2_50d_gn(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_gn', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=GroupNormAct, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d_gn', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d_evob(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_evob', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=EvoNorm2dB0, - stem_type='deep', avg_down=True, zero_init_last=True, **kwargs) + stem_type='deep', avg_down=True, zero_init_last=True) + return _create_resnetv2('resnetv2_50d_evob', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d_evos(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_evos', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=EvoNorm2dS0, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d_evos', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d_frn(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_frn', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=FilterResponseNormTlu2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d_frn', pretrained=pretrained, **dict(model_args, **kwargs)) From 1825b5e314b702d603e3e7a2d02616a8ffd49ea2 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sun, 8 Jan 2023 22:42:24 -0800 Subject: [PATCH 13/34] maxxvit type --- timm/models/maxxvit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/models/maxxvit.py b/timm/models/maxxvit.py index 7f9075b9..dd424078 100644 --- a/timm/models/maxxvit.py +++ b/timm/models/maxxvit.py @@ -1882,7 +1882,7 @@ default_cfgs = generate_default_cfgs({ 'maxvit_rmlp_small_rw_256': _cfg( url='', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_rmlp_base_rw_2244': _cfg( + 'maxvit_rmlp_base_rw_224': _cfg( url='', ), 'maxvit_rmlp_base_rw_384': _cfg( From 2c24cb98f10d0f98b8fa7d47ac00f19e28d8ea54 Mon Sep 17 00:00:00 2001 From: Ikko Eltociear Ashimine Date: Tue, 10 Jan 2023 01:26:57 +0900 Subject: [PATCH 14/34] Fix typo in results/README.md occuring -> occurring --- results/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/results/README.md b/results/README.md index 4fabf64b..81f30061 100644 --- a/results/README.md +++ b/results/README.md @@ -38,7 +38,7 @@ An ImageNet test set of 10,000 images sampled from new images roughly 10 years a ### ImageNet-Adversarial - [`results-imagenet-a.csv`](results-imagenet-a.csv) -A collection of 7500 images covering 200 of the 1000 ImageNet classes. Images are naturally occuring adversarial examples that confuse typical ImageNet classifiers. This is a challenging dataset, your typical ResNet-50 will score 0% top-1. +A collection of 7500 images covering 200 of the 1000 ImageNet classes. Images are naturally occurring adversarial examples that confuse typical ImageNet classifiers. This is a challenging dataset, your typical ResNet-50 will score 0% top-1. For clean validation with same 200 classes, see [`results-imagenet-a-clean.csv`](results-imagenet-a-clean.csv) From 2e83bba1422295f177c2681d5835012633cbae19 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 9 Jan 2023 13:37:40 -0800 Subject: [PATCH 15/34] Revert head norm changes to ConvNeXt as it broke some downstream use, alternate workaround for fcmae weights --- timm/models/convnext.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/timm/models/convnext.py b/timm/models/convnext.py index e799a7de..11c061f6 100644 --- a/timm/models/convnext.py +++ b/timm/models/convnext.py @@ -301,11 +301,10 @@ class ConvNeXt(nn.Module): # if head_norm_first == true, norm -> global pool -> fc ordering, like most other nets # otherwise pool -> norm -> fc, the default ConvNeXt ordering (pretrained FB weights) - self.head_norm_first = head_norm_first self.norm_pre = norm_layer(self.num_features) if head_norm_first else nn.Identity() self.head = nn.Sequential(OrderedDict([ ('global_pool', SelectAdaptivePool2d(pool_type=global_pool)), - ('norm', nn.Identity() if head_norm_first or num_classes == 0 else norm_layer(self.num_features)), + ('norm', nn.Identity() if head_norm_first else norm_layer(self.num_features)), ('flatten', nn.Flatten(1) if global_pool else nn.Identity()), ('drop', nn.Dropout(self.drop_rate)), ('fc', nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity())])) @@ -336,14 +335,7 @@ class ConvNeXt(nn.Module): if global_pool is not None: self.head.global_pool = SelectAdaptivePool2d(pool_type=global_pool) self.head.flatten = nn.Flatten(1) if global_pool else nn.Identity() - if num_classes == 0: - self.head.norm = nn.Identity() - self.head.fc = nn.Identity() - else: - if not self.head_norm_first: - norm_layer = type(self.stem[-1]) # obtain type from stem norm - self.head.norm = norm_layer(self.num_features) - self.head.fc = nn.Linear(self.num_features, num_classes) + self.head.fc = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity() def forward_features(self, x): x = self.stem(x) @@ -407,6 +399,11 @@ def checkpoint_filter_fn(state_dict, model): def _create_convnext(variant, pretrained=False, **kwargs): + if kwargs.get('pretrained_cfg', '') == 'fcmae': + # NOTE fcmae pretrained weights have no classifier or final norm-layer (`head.norm`) + # This is workaround loading with num_classes=0 w/o removing norm-layer. + kwargs.setdefault('pretrained_strict', False) + model = build_model_with_cfg( ConvNeXt, variant, pretrained, pretrained_filter_fn=checkpoint_filter_fn, From 01aea8c1bfa925608fd302fca63881a4d569ff4a Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 9 Jan 2023 13:38:31 -0800 Subject: [PATCH 16/34] Version 0.8.6dev0 --- timm/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/version.py b/timm/version.py index 85eb542a..b285df69 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.8.5dev0' +__version__ = '0.8.6dev0' From a2c14c20642c464dcd28ebe5cd66b945d8fa47f4 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 11 Jan 2023 14:50:39 -0800 Subject: [PATCH 17/34] Add tiny/small in12k pretrained and fine-tuned ConvNeXt models --- README.md | 6 ++++++ timm/models/convnext.py | 12 ++++++++++++ 2 files changed, 18 insertions(+) diff --git a/README.md b/README.md index e52996f5..ee07c368 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,12 @@ And a big thanks to all GitHub sponsors who helped with some of my costs before * ❗Updates after Oct 10, 2022 are available in 0.8.x pre-releases (`pip install --pre timm`) or cloning main❗ * Stable releases are 0.6.x and available by normal pip install or clone from [0.6.x](https://github.com/rwightman/pytorch-image-models/tree/0.6.x) branch. +### Jan 11, 2023 +* Update ConvNeXt ImageNet-12k pretrain series w/ two new fine-tuned weights (and pre FT `.in12k` tags) + * `convnext_nano.in12k_ft_in1k` - 82.3 @ 224, 82.9 @ 288 (previously released) + * `convnext_tiny.in12k_ft_in1k` - 84.2 @ 224, 84.5 @ 288 + * `convnext_small.in12k_ft_in1k` - 85.2 @ 224, 85.3 @ 288 + ### Jan 6, 2023 * Finally got around to adding `--model-kwargs` and `--opt-kwargs` to scripts to pass through rare args directly to model classes from cmd line * `train.py /imagenet --model resnet50 --amp --model-kwargs output_stride=16 act_layer=silu` diff --git a/timm/models/convnext.py b/timm/models/convnext.py index 11c061f6..26a3d560 100644 --- a/timm/models/convnext.py +++ b/timm/models/convnext.py @@ -478,10 +478,22 @@ default_cfgs = generate_default_cfgs({ url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_tiny_hnf_a2h-ab7e9df2.pth', hf_hub_id='timm/', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnext_tiny.in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnext_small.in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_nano.in12k': _cfg( hf_hub_id='timm/', crop_pct=0.95, num_classes=11821), + 'convnext_tiny.in12k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, num_classes=11821), + 'convnext_small.in12k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, num_classes=11821), 'convnext_tiny.fb_in1k': _cfg( url="https://dl.fbaipublicfiles.com/convnext/convnext_tiny_1k_224_ema.pth", From e520553e3e6f67afc495b3d394afc04ae54ba3cd Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 12 Jan 2023 16:55:47 -0800 Subject: [PATCH 18/34] Update batchnorm freezing to handle NormAct variants, Add GroupNorm1Act, update BatchNormAct2d tracing change from PyTorch --- timm/layers/__init__.py | 3 +- timm/layers/norm_act.py | 228 ++++++++++++++++++++++++++++++++++++++-- timm/utils/model.py | 82 +++------------ 3 files changed, 237 insertions(+), 76 deletions(-) diff --git a/timm/layers/__init__.py b/timm/layers/__init__.py index 625b4826..6b2dabba 100644 --- a/timm/layers/__init__.py +++ b/timm/layers/__init__.py @@ -29,7 +29,8 @@ from .mixed_conv2d import MixedConv2d from .mlp import Mlp, GluMlp, GatedMlp, ConvMlp, GlobalResponseNormMlp from .non_local_attn import NonLocalAttn, BatNonLocalAttn from .norm import GroupNorm, GroupNorm1, LayerNorm, LayerNorm2d -from .norm_act import BatchNormAct2d, GroupNormAct, convert_sync_batchnorm +from .norm_act import BatchNormAct2d, GroupNormAct, GroupNorm1Act, LayerNormAct, LayerNormAct2d,\ + SyncBatchNormAct, convert_sync_batchnorm, FrozenBatchNormAct2d, freeze_batch_norm_2d, unfreeze_batch_norm_2d from .padding import get_padding, get_same_padding, pad_same from .patch_embed import PatchEmbed, resample_patch_embed from .pool2d_same import AvgPool2dSame, create_pool2d diff --git a/timm/layers/norm_act.py b/timm/layers/norm_act.py index ff075fbc..5ca21d18 100644 --- a/timm/layers/norm_act.py +++ b/timm/layers/norm_act.py @@ -17,6 +17,7 @@ from typing import Union, List, Optional, Any import torch from torch import nn as nn from torch.nn import functional as F +from torchvision.ops.misc import FrozenBatchNorm2d from .create_act import get_act_layer from .fast_norm import is_fast_norm, fast_group_norm, fast_layer_norm @@ -77,7 +78,7 @@ class BatchNormAct2d(nn.BatchNorm2d): if self.training and self.track_running_stats: # TODO: if statement only here to tell the jit to skip emitting this when it is None if self.num_batches_tracked is not None: # type: ignore[has-type] - self.num_batches_tracked = self.num_batches_tracked + 1 # type: ignore[has-type] + self.num_batches_tracked.add_(1) # type: ignore[has-type] if self.momentum is None: # use cumulative moving average exponential_average_factor = 1.0 / float(self.num_batches_tracked) else: # use exponential moving average @@ -169,6 +170,159 @@ def convert_sync_batchnorm(module, process_group=None): return module_output +class FrozenBatchNormAct2d(torch.nn.Module): + """ + BatchNormAct2d where the batch statistics and the affine parameters are fixed + + Args: + num_features (int): Number of features ``C`` from an expected input of size ``(N, C, H, W)`` + eps (float): a value added to the denominator for numerical stability. Default: 1e-5 + """ + + def __init__( + self, + num_features: int, + eps: float = 1e-5, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): + super().__init__() + self.eps = eps + self.register_buffer("weight", torch.ones(num_features)) + self.register_buffer("bias", torch.zeros(num_features)) + self.register_buffer("running_mean", torch.zeros(num_features)) + self.register_buffer("running_var", torch.ones(num_features)) + + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module + if act_layer is not None and apply_act: + act_args = dict(inplace=True) if inplace else {} + self.act = act_layer(**act_args) + else: + self.act = nn.Identity() + + def _load_from_state_dict( + self, + state_dict: dict, + prefix: str, + local_metadata: dict, + strict: bool, + missing_keys: List[str], + unexpected_keys: List[str], + error_msgs: List[str], + ): + num_batches_tracked_key = prefix + "num_batches_tracked" + if num_batches_tracked_key in state_dict: + del state_dict[num_batches_tracked_key] + + super()._load_from_state_dict( + state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + # move reshapes to the beginning + # to make it fuser-friendly + w = self.weight.reshape(1, -1, 1, 1) + b = self.bias.reshape(1, -1, 1, 1) + rv = self.running_var.reshape(1, -1, 1, 1) + rm = self.running_mean.reshape(1, -1, 1, 1) + scale = w * (rv + self.eps).rsqrt() + bias = b - rm * scale + x = x * scale + bias + x = self.act(self.drop(x)) + return x + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.weight.shape[0]}, eps={self.eps}, act={self.act})" + + +def freeze_batch_norm_2d(module): + """ + Converts all `BatchNorm2d` and `SyncBatchNorm` or `BatchNormAct2d` and `SyncBatchNormAct2d` layers + of provided module into `FrozenBatchNorm2d` or `FrozenBatchNormAct2d` respectively. + + Args: + module (torch.nn.Module): Any PyTorch module. + + Returns: + torch.nn.Module: Resulting module + + Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 + """ + res = module + if isinstance(module, (BatchNormAct2d, SyncBatchNormAct)): + res = FrozenBatchNormAct2d(module.num_features) + res.num_features = module.num_features + res.affine = module.affine + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + res.drop = module.drop + res.act = module.act + elif isinstance(module, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): + res = FrozenBatchNorm2d(module.num_features) + res.num_features = module.num_features + res.affine = module.affine + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for name, child in module.named_children(): + new_child = freeze_batch_norm_2d(child) + if new_child is not child: + res.add_module(name, new_child) + return res + + +def unfreeze_batch_norm_2d(module): + """ + Converts all `FrozenBatchNorm2d` layers of provided module into `BatchNorm2d`. If `module` is itself and instance + of `FrozenBatchNorm2d`, it is converted into `BatchNorm2d` and returned. Otherwise, the module is walked + recursively and submodules are converted in place. + + Args: + module (torch.nn.Module): Any PyTorch module. + + Returns: + torch.nn.Module: Resulting module + + Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 + """ + res = module + if isinstance(module, FrozenBatchNormAct2d): + res = BatchNormAct2d(module.num_features) + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + res.drop = module.drop + res.act = module.act + elif isinstance(module, FrozenBatchNorm2d): + res = torch.nn.BatchNorm2d(module.num_features) + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for name, child in module.named_children(): + new_child = unfreeze_batch_norm_2d(child) + if new_child is not child: + res.add_module(name, new_child) + return res + + def _num_groups(num_channels, num_groups, group_size): if group_size: assert num_channels % group_size == 0 @@ -179,10 +333,54 @@ def _num_groups(num_channels, num_groups, group_size): class GroupNormAct(nn.GroupNorm): # NOTE num_channel and num_groups order flipped for easier layer swaps / binding of fixed args def __init__( - self, num_channels, num_groups=32, eps=1e-5, affine=True, group_size=None, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + self, + num_channels, + num_groups=32, + eps=1e-5, + affine=True, + group_size=None, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): super(GroupNormAct, self).__init__( - _num_groups(num_channels, num_groups, group_size), num_channels, eps=eps, affine=affine) + _num_groups(num_channels, num_groups, group_size), + num_channels, + eps=eps, + affine=affine, + ) + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module + if act_layer is not None and apply_act: + act_args = dict(inplace=True) if inplace else {} + self.act = act_layer(**act_args) + else: + self.act = nn.Identity() + self._fast_norm = is_fast_norm() + + def forward(self, x): + if self._fast_norm: + x = fast_group_norm(x, self.num_groups, self.weight, self.bias, self.eps) + else: + x = F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps) + x = self.drop(x) + x = self.act(x) + return x + + +class GroupNorm1Act(nn.GroupNorm): + def __init__( + self, + num_channels, + eps=1e-5, + affine=True, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): + super(GroupNorm1Act, self).__init__(1, num_channels, eps=eps, affine=affine) self.drop = drop_layer() if drop_layer is not None else nn.Identity() act_layer = get_act_layer(act_layer) # string -> nn.Module if act_layer is not None and apply_act: @@ -204,8 +402,15 @@ class GroupNormAct(nn.GroupNorm): class LayerNormAct(nn.LayerNorm): def __init__( - self, normalization_shape: Union[int, List[int], torch.Size], eps=1e-5, affine=True, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + self, + normalization_shape: Union[int, List[int], torch.Size], + eps=1e-5, + affine=True, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): super(LayerNormAct, self).__init__(normalization_shape, eps=eps, elementwise_affine=affine) self.drop = drop_layer() if drop_layer is not None else nn.Identity() act_layer = get_act_layer(act_layer) # string -> nn.Module @@ -228,8 +433,15 @@ class LayerNormAct(nn.LayerNorm): class LayerNormAct2d(nn.LayerNorm): def __init__( - self, num_channels, eps=1e-5, affine=True, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + self, + num_channels, + eps=1e-5, + affine=True, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): super(LayerNormAct2d, self).__init__(num_channels, eps=eps, elementwise_affine=affine) self.drop = drop_layer() if drop_layer is not None else nn.Identity() act_layer = get_act_layer(act_layer) # string -> nn.Module diff --git a/timm/utils/model.py b/timm/utils/model.py index b95c4539..d74ee5b7 100644 --- a/timm/utils/model.py +++ b/timm/utils/model.py @@ -7,6 +7,8 @@ import fnmatch import torch from torchvision.ops.misc import FrozenBatchNorm2d +from timm.layers import BatchNormAct2d, SyncBatchNormAct, FrozenBatchNormAct2d,\ + freeze_batch_norm_2d, unfreeze_batch_norm_2d from .model_ema import ModelEma @@ -100,70 +102,6 @@ def extract_spp_stats( return hook.stats -def freeze_batch_norm_2d(module): - """ - Converts all `BatchNorm2d` and `SyncBatchNorm` layers of provided module into `FrozenBatchNorm2d`. If `module` is - itself an instance of either `BatchNorm2d` or `SyncBatchNorm`, it is converted into `FrozenBatchNorm2d` and - returned. Otherwise, the module is walked recursively and submodules are converted in place. - - Args: - module (torch.nn.Module): Any PyTorch module. - - Returns: - torch.nn.Module: Resulting module - - Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 - """ - res = module - if isinstance(module, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): - res = FrozenBatchNorm2d(module.num_features) - res.num_features = module.num_features - res.affine = module.affine - if module.affine: - res.weight.data = module.weight.data.clone().detach() - res.bias.data = module.bias.data.clone().detach() - res.running_mean.data = module.running_mean.data - res.running_var.data = module.running_var.data - res.eps = module.eps - else: - for name, child in module.named_children(): - new_child = freeze_batch_norm_2d(child) - if new_child is not child: - res.add_module(name, new_child) - return res - - -def unfreeze_batch_norm_2d(module): - """ - Converts all `FrozenBatchNorm2d` layers of provided module into `BatchNorm2d`. If `module` is itself and instance - of `FrozenBatchNorm2d`, it is converted into `BatchNorm2d` and returned. Otherwise, the module is walked - recursively and submodules are converted in place. - - Args: - module (torch.nn.Module): Any PyTorch module. - - Returns: - torch.nn.Module: Resulting module - - Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 - """ - res = module - if isinstance(module, FrozenBatchNorm2d): - res = torch.nn.BatchNorm2d(module.num_features) - if module.affine: - res.weight.data = module.weight.data.clone().detach() - res.bias.data = module.bias.data.clone().detach() - res.running_mean.data = module.running_mean.data - res.running_var.data = module.running_var.data - res.eps = module.eps - else: - for name, child in module.named_children(): - new_child = unfreeze_batch_norm_2d(child) - if new_child is not child: - res.add_module(name, new_child) - return res - - def _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, mode='freeze'): """ Freeze or unfreeze parameters of the specified modules and those of all their hierarchical descendants. This is @@ -179,7 +117,12 @@ def _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, """ assert mode in ["freeze", "unfreeze"], '`mode` must be one of "freeze" or "unfreeze"' - if isinstance(root_module, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): + if isinstance(root_module, ( + torch.nn.modules.batchnorm.BatchNorm2d, + torch.nn.modules.batchnorm.SyncBatchNorm, + BatchNormAct2d, + SyncBatchNormAct, + )): # Raise assertion here because we can't convert it in place raise AssertionError( "You have provided a batch norm layer as the `root module`. Please use " @@ -213,13 +156,18 @@ def _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, # It's possible that `m` is a type of BatchNorm in itself, in which case `unfreeze_batch_norm_2d` won't # convert it in place, but will return the converted result. In this case `res` holds the converted # result and we may try to re-assign the named module - if isinstance(m, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): + if isinstance(m, ( + torch.nn.modules.batchnorm.BatchNorm2d, + torch.nn.modules.batchnorm.SyncBatchNorm, + BatchNormAct2d, + SyncBatchNormAct, + )): _add_submodule(root_module, n, res) # Unfreeze batch norm else: res = unfreeze_batch_norm_2d(m) # Ditto. See note above in mode == 'freeze' branch - if isinstance(m, FrozenBatchNorm2d): + if isinstance(m, (FrozenBatchNorm2d, FrozenBatchNormAct2d)): _add_submodule(root_module, n, res) From 65aea97067994110e8e0b28bb573970c44a67340 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 12 Jan 2023 21:31:44 -0800 Subject: [PATCH 19/34] Update tests.yml Attempt to work around flaky azure ubuntu mirrors --- .github/workflows/tests.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5690c88c..70352d0a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -40,9 +40,10 @@ jobs: - name: Install torch on ubuntu if: startsWith(matrix.os, 'ubuntu') run: | - pip install --no-cache-dir torch==${{ matrix.torch }}+cpu torchvision==${{ matrix.torchvision }}+cpu -f https://download.pytorch.org/whl/torch_stable.html + sudo sed -i 's/azure\.//' /etc/apt/sources.list sudo apt update sudo apt install -y google-perftools + pip install --no-cache-dir torch==${{ matrix.torch }}+cpu torchvision==${{ matrix.torchvision }}+cpu -f https://download.pytorch.org/whl/torch_stable.html - name: Install requirements run: | pip install -r requirements.txt From 42bd8f7bcbc7741cacc70864a54dc231116f5a05 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 14 Jan 2023 21:16:29 -0800 Subject: [PATCH 20/34] Add convnext_base CLIP image tower weights for fine-tuning / features --- timm/models/convnext.py | 39 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/timm/models/convnext.py b/timm/models/convnext.py index 26a3d560..b814119c 100644 --- a/timm/models/convnext.py +++ b/timm/models/convnext.py @@ -205,6 +205,7 @@ class ConvNeXt(nn.Module): use_grn=False, act_layer='gelu', norm_layer=None, + norm_eps=None, drop_rate=0., drop_path_rate=0., ): @@ -236,10 +237,15 @@ class ConvNeXt(nn.Module): if norm_layer is None: norm_layer = LayerNorm2d norm_layer_cl = norm_layer if conv_mlp else LayerNorm + if norm_eps is not None: + norm_layer = partial(norm_layer, eps=norm_eps) + norm_layer_cl = partial(norm_layer_cl, eps=norm_eps) else: assert conv_mlp,\ 'If a norm_layer is specified, conv MLP must be used so all norm expect rank-4, channels-first input' norm_layer_cl = norm_layer + if norm_eps is not None: + norm_layer_cl = partial(norm_layer_cl, eps=norm_eps) self.num_classes = num_classes self.drop_rate = drop_rate @@ -250,7 +256,7 @@ class ConvNeXt(nn.Module): # NOTE: this stem is a minimal form of ViT PatchEmbed, as used in SwinTransformer w/ patch_size = 4 self.stem = nn.Sequential( nn.Conv2d(in_chans, dims[0], kernel_size=patch_size, stride=patch_size, bias=conv_bias), - norm_layer(dims[0]) + norm_layer(dims[0]), ) stem_stride = patch_size else: @@ -376,7 +382,15 @@ def checkpoint_filter_fn(state_dict, model): return state_dict # non-FB checkpoint if 'model' in state_dict: state_dict = state_dict['model'] + out_dict = {} + if 'visual.trunk.stem.0.weight' in state_dict: + out_dict = {k.replace('visual.trunk.', ''): v for k, v in state_dict.items() if k.startswith('visual.trunk.')} + if 'visual.head.proj.weight' in state_dict: + out_dict['head.fc.weight'] = state_dict['visual.head.proj.weight'] + out_dict['head.fc.bias'] = torch.zeros(state_dict['visual.head.proj.weight'].shape[0]) + return out_dict + import re for k, v in state_dict.items(): k = k.replace('downsample_layers.0.', 'stem.') @@ -395,6 +409,7 @@ def checkpoint_filter_fn(state_dict, model): model_shape = model.state_dict()[k].shape v = v.reshape(model_shape) out_dict[k] = v + return out_dict @@ -685,6 +700,28 @@ default_cfgs = generate_default_cfgs({ num_classes=0), 'convnextv2_small.untrained': _cfg(), + + # CLIP based weights, original image tower weights and fine-tunes + 'convnext_base.clip_laion2b': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laion2b_augreg': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K-augreg', + hf_hub_filename='open_clip_pytorch_model.bin', + input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laiona': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laiona_320': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laiona_augreg_320': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K-augreg', + hf_hub_filename='open_clip_pytorch_model.bin', + input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), }) From e9aac412de82310e6905992e802b1ee4dc52b5d1 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 14 Jan 2023 22:53:56 -0800 Subject: [PATCH 21/34] Correct mean/std for CLIP convnexts --- timm/models/convnext.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/timm/models/convnext.py b/timm/models/convnext.py index b814119c..05e29a73 100644 --- a/timm/models/convnext.py +++ b/timm/models/convnext.py @@ -43,7 +43,7 @@ from functools import partial import torch import torch.nn as nn -from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD +from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, OPENAI_CLIP_MEAN, OPENAI_CLIP_STD from timm.layers import trunc_normal_, SelectAdaptivePool2d, DropPath, Mlp, GlobalResponseNormMlp, \ LayerNorm2d, LayerNorm, create_conv2d, get_act_layer, make_divisible, to_ntuple from ._builder import build_model_with_cfg @@ -705,22 +705,27 @@ default_cfgs = generate_default_cfgs({ 'convnext_base.clip_laion2b': _cfg( hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K', hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laion2b_augreg': _cfg( hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K-augreg', hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laiona': _cfg( hf_hub_id='laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K', hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laiona_320': _cfg( hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K', hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laiona_augreg_320': _cfg( hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K-augreg', hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), }) From 81ca323751505c7e4ee9b7561020fd3c6008d116 Mon Sep 17 00:00:00 2001 From: Fredo Guan Date: Sun, 15 Jan 2023 14:34:56 -0800 Subject: [PATCH 22/34] Davit update formatting and fix grad checkpointing (#7) fixed head to gap->norm->fc as per convnext, along with option for norm->gap->fc failed tests due to clip convnext models, davit tests passed --- .github/workflows/build_documentation.yml | 1 + .github/workflows/build_pr_documentation.yml | 1 + .github/workflows/tests.yml | 3 +- .gitignore | 10 + README.md | 78 +- benchmark.py | 26 +- docs/archived_changes.md | 40 + docs/changes.md | 218 ++- hfdocs/README.md | 14 + hfdocs/source/_toctree.yml | 295 ++-- hfdocs/source/archived_changes.mdx | 418 ----- hfdocs/source/changes.mdx | 187 --- hfdocs/source/hf_hub.mdx | 54 + hfdocs/source/index.mdx | 101 +- hfdocs/source/installation.mdx | 74 + hfdocs/source/model_pages.mdx | 5 - hfdocs/source/quickstart.mdx | 228 +++ hfdocs/source/reference/data.mdx | 9 + hfdocs/source/reference/models.mdx | 5 + hfdocs/source/reference/optimizers.mdx | 27 + hfdocs/source/reference/schedulers.mdx | 19 + hfdocs/source/scripts.mdx | 35 - ...param_examples.mdx => training_script.mdx} | 58 +- inference.py | 14 +- mkdocs.yml | 8 + requirements-docs.txt | 3 +- results/README.md | 2 +- results/results-imagenet-a-clean.csv | 890 +++++----- results/results-imagenet-a.csv | 1454 ++++++++-------- results/results-imagenet-r-clean.csv | 894 +++++----- results/results-imagenet-r.csv | 1452 ++++++++-------- results/results-imagenet-real.csv | 1454 ++++++++-------- results/results-imagenet.csv | 1394 +++++++++------- .../results-imagenetv2-matched-frequency.csv | 1454 ++++++++-------- results/results-sketch.csv | 1458 +++++++++-------- tests/test_models.py | 12 +- timm/data/auto_augment.py | 1 - timm/data/dataset_factory.py | 2 +- timm/data/readers/reader_factory.py | 2 +- timm/data/readers/reader_hfds.py | 20 +- timm/layers/__init__.py | 12 +- timm/layers/attention_pool2d.py | 2 +- timm/layers/grn.py | 39 + timm/layers/helpers.py | 2 +- timm/layers/mlp.py | 97 +- timm/layers/norm_act.py | 228 ++- timm/layers/patch_embed.py | 138 +- timm/layers/pos_embed.py | 235 +-- timm/layers/pos_embed_rel.py | 283 ++++ timm/layers/pos_embed_sincos.py | 219 +++ timm/models/_builder.py | 68 +- timm/models/_hub.py | 76 +- timm/models/_pretrained.py | 13 +- timm/models/_registry.py | 44 +- timm/models/beit.py | 36 +- timm/models/byobnet.py | 185 ++- timm/models/convnext.py | 437 ++++- timm/models/cspnet.py | 35 +- timm/models/davit.py | 67 +- timm/models/densenet.py | 47 +- timm/models/dpn.py | 71 +- timm/models/efficientnet.py | 1251 +++++++------- timm/models/layers/__init__.py | 1 + timm/models/maxxvit.py | 157 +- timm/models/mobilenetv3.py | 274 ++-- timm/models/mobilevit.py | 13 +- timm/models/nfnet.py | 156 +- timm/models/regnet.py | 105 +- timm/models/res2net.py | 49 +- timm/models/resnest.py | 60 +- timm/models/resnet.py | 574 ++++--- timm/models/resnetv2.py | 235 ++- timm/models/sknet.py | 48 +- timm/models/vision_transformer.py | 772 +++++---- timm/models/vision_transformer_hybrid.py | 143 +- timm/models/vision_transformer_relpos.py | 265 +-- timm/models/vovnet.py | 74 +- timm/utils/__init__.py | 2 +- timm/utils/misc.py | 14 + timm/utils/model.py | 82 +- timm/version.py | 2 +- train.py | 258 +-- validate.py | 20 +- 83 files changed, 11091 insertions(+), 8218 deletions(-) create mode 100644 hfdocs/README.md delete mode 100644 hfdocs/source/archived_changes.mdx delete mode 100644 hfdocs/source/changes.mdx create mode 100644 hfdocs/source/hf_hub.mdx create mode 100644 hfdocs/source/installation.mdx delete mode 100644 hfdocs/source/model_pages.mdx create mode 100644 hfdocs/source/quickstart.mdx create mode 100644 hfdocs/source/reference/data.mdx create mode 100644 hfdocs/source/reference/models.mdx create mode 100644 hfdocs/source/reference/optimizers.mdx create mode 100644 hfdocs/source/reference/schedulers.mdx delete mode 100644 hfdocs/source/scripts.mdx rename hfdocs/source/{training_hparam_examples.mdx => training_script.mdx} (63%) create mode 100644 timm/layers/grn.py create mode 100644 timm/layers/pos_embed_rel.py create mode 100644 timm/layers/pos_embed_sincos.py diff --git a/.github/workflows/build_documentation.yml b/.github/workflows/build_documentation.yml index b7d1f895..167b7d61 100644 --- a/.github/workflows/build_documentation.yml +++ b/.github/workflows/build_documentation.yml @@ -16,5 +16,6 @@ jobs: package_name: timm repo_owner: rwightman path_to_docs: pytorch-image-models/hfdocs/source + version_tag_suffix: "" secrets: token: ${{ secrets.HUGGINGFACE_PUSH }} \ No newline at end of file diff --git a/.github/workflows/build_pr_documentation.yml b/.github/workflows/build_pr_documentation.yml index 4b1b5c9d..2b44619f 100644 --- a/.github/workflows/build_pr_documentation.yml +++ b/.github/workflows/build_pr_documentation.yml @@ -17,3 +17,4 @@ jobs: package_name: timm repo_owner: rwightman path_to_docs: pytorch-image-models/hfdocs/source + version_tag_suffix: "" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5690c88c..70352d0a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -40,9 +40,10 @@ jobs: - name: Install torch on ubuntu if: startsWith(matrix.os, 'ubuntu') run: | - pip install --no-cache-dir torch==${{ matrix.torch }}+cpu torchvision==${{ matrix.torchvision }}+cpu -f https://download.pytorch.org/whl/torch_stable.html + sudo sed -i 's/azure\.//' /etc/apt/sources.list sudo apt update sudo apt install -y google-perftools + pip install --no-cache-dir torch==${{ matrix.torch }}+cpu torchvision==${{ matrix.torchvision }}+cpu -f https://download.pytorch.org/whl/torch_stable.html - name: Install requirements run: | pip install -r requirements.txt diff --git a/.gitignore b/.gitignore index e5142b32..9f8f33d9 100644 --- a/.gitignore +++ b/.gitignore @@ -106,6 +106,16 @@ output/ *.tar *.pth *.pt +*.torch *.gz Untitled.ipynb Testing notebook.ipynb + +# Root dir exclusions +/*.csv +/*.yaml +/*.json +/*.jpg +/*.png +/*.zip +/*.tar.* \ No newline at end of file diff --git a/README.md b/README.md index bb6485c0..ee07c368 100644 --- a/README.md +++ b/README.md @@ -21,12 +21,36 @@ And a big thanks to all GitHub sponsors who helped with some of my costs before ## What's New -### 🤗 Survey: Feedback Appreciated 🤗 - -For a few months now, `timm` has been part of the Hugging Face ecosystem. Yearly, we survey users of our tools to see what we could do better, what we need to continue doing, or what we need to stop doing. - -If you have a couple of minutes and want to participate in shaping the future of the ecosystem, please share your thoughts: -[**hf.co/oss-survey**](https://hf.co/oss-survey) 🙏 +* ❗Updates after Oct 10, 2022 are available in 0.8.x pre-releases (`pip install --pre timm`) or cloning main❗ +* Stable releases are 0.6.x and available by normal pip install or clone from [0.6.x](https://github.com/rwightman/pytorch-image-models/tree/0.6.x) branch. + +### Jan 11, 2023 +* Update ConvNeXt ImageNet-12k pretrain series w/ two new fine-tuned weights (and pre FT `.in12k` tags) + * `convnext_nano.in12k_ft_in1k` - 82.3 @ 224, 82.9 @ 288 (previously released) + * `convnext_tiny.in12k_ft_in1k` - 84.2 @ 224, 84.5 @ 288 + * `convnext_small.in12k_ft_in1k` - 85.2 @ 224, 85.3 @ 288 + +### Jan 6, 2023 +* Finally got around to adding `--model-kwargs` and `--opt-kwargs` to scripts to pass through rare args directly to model classes from cmd line + * `train.py /imagenet --model resnet50 --amp --model-kwargs output_stride=16 act_layer=silu` + * `train.py /imagenet --model vit_base_patch16_clip_224 --img-size 240 --amp --model-kwargs img_size=240 patch_size=12` +* Cleanup some popular models to better support arg passthrough / merge with model configs, more to go. + +### Jan 5, 2023 +* ConvNeXt-V2 models and weights added to existing `convnext.py` + * Paper: [ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders](http://arxiv.org/abs/2301.00808) + * Reference impl: https://github.com/facebookresearch/ConvNeXt-V2 (NOTE: weights currently CC-BY-NC) + +### Dec 23, 2022 🎄☃ +* Add FlexiViT models and weights from https://github.com/google-research/big_vision (check out paper at https://arxiv.org/abs/2212.08013) + * NOTE currently resizing is static on model creation, on-the-fly dynamic / train patch size sampling is a WIP +* Many more models updated to multi-weight and downloadable via HF hub now (convnext, efficientnet, mobilenet, vision_transformer*, beit) +* More model pretrained tag and adjustments, some model names changed (working on deprecation translations, consider main branch DEV branch right now, use 0.6.x for stable use) +* More ImageNet-12k (subset of 22k) pretrain models popping up: + * `efficientnet_b5.in12k_ft_in1k` - 85.9 @ 448x448 + * `vit_medium_patch16_gap_384.in12k_ft_in1k` - 85.5 @ 384x384 + * `vit_medium_patch16_gap_256.in12k_ft_in1k` - 84.5 @ 256x256 + * `convnext_nano.in12k_ft_in1k` - 82.9 @ 288x288 ### Dec 8, 2022 * Add 'EVA l' to `vision_transformer.py`, MAE style ViT-L/14 MIM pretrain w/ EVA-CLIP targets, FT on ImageNet-1k (w/ ImageNet-22k intermediate for some) @@ -325,46 +349,6 @@ More models, more fixes * TinyNet models added by [rsomani95](https://github.com/rsomani95) * LCNet added via MobileNetV3 architecture -### Nov 22, 2021 -* A number of updated weights anew new model defs - * `eca_halonext26ts` - 79.5 @ 256 - * `resnet50_gn` (new) - 80.1 @ 224, 81.3 @ 288 - * `resnet50` - 80.7 @ 224, 80.9 @ 288 (trained at 176, not replacing current a1 weights as default since these don't scale as well to higher res, [weights](https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a1h2_176-001a1197.pth)) - * `resnext50_32x4d` - 81.1 @ 224, 82.0 @ 288 - * `sebotnet33ts_256` (new) - 81.2 @ 224 - * `lamhalobotnet50ts_256` - 81.5 @ 256 - * `halonet50ts` - 81.7 @ 256 - * `halo2botnet50ts_256` - 82.0 @ 256 - * `resnet101` - 82.0 @ 224, 82.8 @ 288 - * `resnetv2_101` (new) - 82.1 @ 224, 83.0 @ 288 - * `resnet152` - 82.8 @ 224, 83.5 @ 288 - * `regnetz_d8` (new) - 83.5 @ 256, 84.0 @ 320 - * `regnetz_e8` (new) - 84.5 @ 256, 85.0 @ 320 -* `vit_base_patch8_224` (85.8 top-1) & `in21k` variant weights added thanks [Martins Bruveris](https://github.com/martinsbruveris) -* Groundwork in for FX feature extraction thanks to [Alexander Soare](https://github.com/alexander-soare) - * models updated for tracing compatibility (almost full support with some distlled transformer exceptions) - -### Oct 19, 2021 -* ResNet strikes back (https://arxiv.org/abs/2110.00476) weights added, plus any extra training components used. Model weights and some more details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-rsb-weights) -* BCE loss and Repeated Augmentation support for RSB paper -* 4 series of ResNet based attention model experiments being added (implemented across byobnet.py/byoanet.py). These include all sorts of attention, from channel attn like SE, ECA to 2D QKV self-attention layers such as Halo, Bottlneck, Lambda. Details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) -* Working implementations of the following 2D self-attention modules (likely to be differences from paper or eventual official impl): - * Halo (https://arxiv.org/abs/2103.12731) - * Bottleneck Transformer (https://arxiv.org/abs/2101.11605) - * LambdaNetworks (https://arxiv.org/abs/2102.08602) -* A RegNetZ series of models with some attention experiments (being added to). These do not follow the paper (https://arxiv.org/abs/2103.06877) in any way other than block architecture, details of official models are not available. See more here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) -* ConvMixer (https://openreview.net/forum?id=TVHS5Y4dNvM), CrossVit (https://arxiv.org/abs/2103.14899), and BeiT (https://arxiv.org/abs/2106.08254) architectures + weights added -* freeze/unfreeze helpers by [Alexander Soare](https://github.com/alexander-soare) - -### Aug 18, 2021 -* Optimizer bonanza! - * Add LAMB and LARS optimizers, incl trust ratio clipping options. Tweaked to work properly in PyTorch XLA (tested on TPUs w/ `timm bits` [branch](https://github.com/rwightman/pytorch-image-models/tree/bits_and_tpu/timm/bits)) - * Add MADGRAD from FB research w/ a few tweaks (decoupled decay option, step handling that works with PyTorch XLA) - * Some cleanup on all optimizers and factory. No more `.data`, a bit more consistency, unit tests for all! - * SGDP and AdamP still won't work with PyTorch XLA but others should (have yet to test Adabelief, Adafactor, Adahessian myself). -* EfficientNet-V2 XL TF ported weights added, but they don't validate well in PyTorch (L is better). The pre-processing for the V2 TF training is a bit diff and the fine-tuned 21k -> 1k weights are very sensitive and less robust than the 1k weights. -* Added PyTorch trained EfficientNet-V2 'Tiny' w/ GlobalContext attn weights. Only .1-.2 top-1 better than the SE so more of a curiosity for those interested. - ## Introduction Py**T**orch **Im**age **M**odels (`timm`) is a collection of image models, layers, utilities, optimizers, schedulers, data-loaders / augmentations, and reference training / validation scripts that aim to pull together a wide variety of SOTA models with ability to reproduce ImageNet training results. @@ -385,6 +369,7 @@ A full version of the list below with source links can be found in the [document * CoaT (Co-Scale Conv-Attentional Image Transformers) - https://arxiv.org/abs/2104.06399 * CoAtNet (Convolution and Attention) - https://arxiv.org/abs/2106.04803 * ConvNeXt - https://arxiv.org/abs/2201.03545 +* ConvNeXt-V2 - http://arxiv.org/abs/2301.00808 * ConViT (Soft Convolutional Inductive Biases Vision Transformers)- https://arxiv.org/abs/2103.10697 * CspNet (Cross-Stage Partial Networks) - https://arxiv.org/abs/1911.11929 * DeiT - https://arxiv.org/abs/2012.12877 @@ -407,6 +392,7 @@ A full version of the list below with source links can be found in the [document * Single-Path NAS - https://arxiv.org/abs/1904.02877 * TinyNet - https://arxiv.org/abs/2010.14819 * EVA - https://arxiv.org/abs/2211.07636 +* FlexiViT - https://arxiv.org/abs/2212.08013 * GCViT (Global Context Vision Transformer) - https://arxiv.org/abs/2206.09959 * GhostNet - https://arxiv.org/abs/1911.11907 * gMLP - https://arxiv.org/abs/2105.08050 diff --git a/benchmark.py b/benchmark.py index 58435ff8..2cce3e2c 100755 --- a/benchmark.py +++ b/benchmark.py @@ -22,7 +22,7 @@ from timm.data import resolve_data_config from timm.layers import set_fast_norm from timm.models import create_model, is_model, list_models from timm.optim import create_optimizer_v2 -from timm.utils import setup_default_logging, set_jit_fuser, decay_batch_step, check_batch_size_retry +from timm.utils import setup_default_logging, set_jit_fuser, decay_batch_step, check_batch_size_retry, ParseKwargs has_apex = False try: @@ -108,12 +108,15 @@ parser.add_argument('--grad-checkpointing', action='store_true', default=False, help='Enable gradient checkpointing through model blocks/stages') parser.add_argument('--amp', action='store_true', default=False, help='use PyTorch Native AMP for mixed precision training. Overrides --precision arg.') +parser.add_argument('--amp-dtype', default='float16', type=str, + help='lower precision AMP dtype (default: float16). Overrides --precision arg if args.amp True.') parser.add_argument('--precision', default='float32', type=str, help='Numeric precision. One of (amp, float32, float16, bfloat16, tf32)') parser.add_argument('--fuser', default='', type=str, help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") parser.add_argument('--fast-norm', default=False, action='store_true', help='enable experimental fast-norm') +parser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs) # codegen (model compilation) options scripting_group = parser.add_mutually_exclusive_group() @@ -124,7 +127,6 @@ scripting_group.add_argument('--torchcompile', nargs='?', type=str, default=None scripting_group.add_argument('--aot-autograd', default=False, action='store_true', help="Enable AOT Autograd optimization.") - # train optimizer parameters parser.add_argument('--opt', default='sgd', type=str, metavar='OPTIMIZER', help='Optimizer (default: "sgd"') @@ -168,19 +170,21 @@ def count_params(model: nn.Module): def resolve_precision(precision: str): - assert precision in ('amp', 'float16', 'bfloat16', 'float32') - use_amp = False + assert precision in ('amp', 'amp_bfloat16', 'float16', 'bfloat16', 'float32') + amp_dtype = None # amp disabled model_dtype = torch.float32 data_dtype = torch.float32 if precision == 'amp': - use_amp = True + amp_dtype = torch.float16 + elif precision == 'amp_bfloat16': + amp_dtype = torch.bfloat16 elif precision == 'float16': model_dtype = torch.float16 data_dtype = torch.float16 elif precision == 'bfloat16': model_dtype = torch.bfloat16 data_dtype = torch.bfloat16 - return use_amp, model_dtype, data_dtype + return amp_dtype, model_dtype, data_dtype def profile_deepspeed(model, input_size=(3, 224, 224), batch_size=1, detailed=False): @@ -228,9 +232,12 @@ class BenchmarkRunner: self.model_name = model_name self.detail = detail self.device = device - self.use_amp, self.model_dtype, self.data_dtype = resolve_precision(precision) + self.amp_dtype, self.model_dtype, self.data_dtype = resolve_precision(precision) self.channels_last = kwargs.pop('channels_last', False) - self.amp_autocast = partial(torch.cuda.amp.autocast, dtype=torch.float16) if self.use_amp else suppress + if self.amp_dtype is not None: + self.amp_autocast = partial(torch.cuda.amp.autocast, dtype=self.amp_dtype) + else: + self.amp_autocast = suppress if fuser: set_jit_fuser(fuser) @@ -243,6 +250,7 @@ class BenchmarkRunner: drop_rate=kwargs.pop('drop', 0.), drop_path_rate=kwargs.pop('drop_path', None), drop_block_rate=kwargs.pop('drop_block', None), + **kwargs.pop('model_kwargs', {}), ) self.model.to( device=self.device, @@ -560,7 +568,7 @@ def _try_run( def benchmark(args): if args.amp: _logger.warning("Overriding precision to 'amp' since --amp flag set.") - args.precision = 'amp' + args.precision = 'amp' if args.amp_dtype == 'float16' else '_'.join(['amp', args.amp_dtype]) _logger.info(f'Benchmarking in {args.precision} precision. ' f'{"NHWC" if args.channels_last else "NCHW"} layout. ' f'torchscript {"enabled" if args.torchscript else "disabled"}') diff --git a/docs/archived_changes.md b/docs/archived_changes.md index 9c2b62b6..35f84bc4 100644 --- a/docs/archived_changes.md +++ b/docs/archived_changes.md @@ -1,5 +1,45 @@ # Archived Changes +### Nov 22, 2021 +* A number of updated weights anew new model defs + * `eca_halonext26ts` - 79.5 @ 256 + * `resnet50_gn` (new) - 80.1 @ 224, 81.3 @ 288 + * `resnet50` - 80.7 @ 224, 80.9 @ 288 (trained at 176, not replacing current a1 weights as default since these don't scale as well to higher res, [weights](https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a1h2_176-001a1197.pth)) + * `resnext50_32x4d` - 81.1 @ 224, 82.0 @ 288 + * `sebotnet33ts_256` (new) - 81.2 @ 224 + * `lamhalobotnet50ts_256` - 81.5 @ 256 + * `halonet50ts` - 81.7 @ 256 + * `halo2botnet50ts_256` - 82.0 @ 256 + * `resnet101` - 82.0 @ 224, 82.8 @ 288 + * `resnetv2_101` (new) - 82.1 @ 224, 83.0 @ 288 + * `resnet152` - 82.8 @ 224, 83.5 @ 288 + * `regnetz_d8` (new) - 83.5 @ 256, 84.0 @ 320 + * `regnetz_e8` (new) - 84.5 @ 256, 85.0 @ 320 +* `vit_base_patch8_224` (85.8 top-1) & `in21k` variant weights added thanks [Martins Bruveris](https://github.com/martinsbruveris) +* Groundwork in for FX feature extraction thanks to [Alexander Soare](https://github.com/alexander-soare) + * models updated for tracing compatibility (almost full support with some distlled transformer exceptions) + +### Oct 19, 2021 +* ResNet strikes back (https://arxiv.org/abs/2110.00476) weights added, plus any extra training components used. Model weights and some more details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-rsb-weights) +* BCE loss and Repeated Augmentation support for RSB paper +* 4 series of ResNet based attention model experiments being added (implemented across byobnet.py/byoanet.py). These include all sorts of attention, from channel attn like SE, ECA to 2D QKV self-attention layers such as Halo, Bottlneck, Lambda. Details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) +* Working implementations of the following 2D self-attention modules (likely to be differences from paper or eventual official impl): + * Halo (https://arxiv.org/abs/2103.12731) + * Bottleneck Transformer (https://arxiv.org/abs/2101.11605) + * LambdaNetworks (https://arxiv.org/abs/2102.08602) +* A RegNetZ series of models with some attention experiments (being added to). These do not follow the paper (https://arxiv.org/abs/2103.06877) in any way other than block architecture, details of official models are not available. See more here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) +* ConvMixer (https://openreview.net/forum?id=TVHS5Y4dNvM), CrossVit (https://arxiv.org/abs/2103.14899), and BeiT (https://arxiv.org/abs/2106.08254) architectures + weights added +* freeze/unfreeze helpers by [Alexander Soare](https://github.com/alexander-soare) + +### Aug 18, 2021 +* Optimizer bonanza! + * Add LAMB and LARS optimizers, incl trust ratio clipping options. Tweaked to work properly in PyTorch XLA (tested on TPUs w/ `timm bits` [branch](https://github.com/rwightman/pytorch-image-models/tree/bits_and_tpu/timm/bits)) + * Add MADGRAD from FB research w/ a few tweaks (decoupled decay option, step handling that works with PyTorch XLA) + * Some cleanup on all optimizers and factory. No more `.data`, a bit more consistency, unit tests for all! + * SGDP and AdamP still won't work with PyTorch XLA but others should (have yet to test Adabelief, Adafactor, Adahessian myself). +* EfficientNet-V2 XL TF ported weights added, but they don't validate well in PyTorch (L is better). The pre-processing for the V2 TF training is a bit diff and the fine-tuned 21k -> 1k weights are very sensitive and less robust than the 1k weights. +* Added PyTorch trained EfficientNet-V2 'Tiny' w/ GlobalContext attn weights. Only .1-.2 top-1 better than the SE so more of a curiosity for those interested. + ### July 12, 2021 * Add XCiT models from [official facebook impl](https://github.com/facebookresearch/xcit). Contributed by [Alexander Soare](https://github.com/alexander-soare) diff --git a/docs/changes.md b/docs/changes.md index 800dc443..edf88c62 100644 --- a/docs/changes.md +++ b/docs/changes.md @@ -1,4 +1,183 @@ # Recent Changes +### Jan 5, 2023 +* ConvNeXt-V2 models and weights added to existing `convnext.py` + * Paper: [ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders](http://arxiv.org/abs/2301.00808) + * Reference impl: https://github.com/facebookresearch/ConvNeXt-V2 (NOTE: weights currently CC-BY-NC) + +### Dec 23, 2022 🎄☃ +* Add FlexiViT models and weights from https://github.com/google-research/big_vision (check out paper at https://arxiv.org/abs/2212.08013) + * NOTE currently resizing is static on model creation, on-the-fly dynamic / train patch size sampling is a WIP +* Many more models updated to multi-weight and downloadable via HF hub now (convnext, efficientnet, mobilenet, vision_transformer*, beit) +* More model pretrained tag and adjustments, some model names changed (working on deprecation translations, consider main branch DEV branch right now, use 0.6.x for stable use) +* More ImageNet-12k (subset of 22k) pretrain models popping up: + * `efficientnet_b5.in12k_ft_in1k` - 85.9 @ 448x448 + * `vit_medium_patch16_gap_384.in12k_ft_in1k` - 85.5 @ 384x384 + * `vit_medium_patch16_gap_256.in12k_ft_in1k` - 84.5 @ 256x256 + * `convnext_nano.in12k_ft_in1k` - 82.9 @ 288x288 + +### Dec 8, 2022 +* Add 'EVA l' to `vision_transformer.py`, MAE style ViT-L/14 MIM pretrain w/ EVA-CLIP targets, FT on ImageNet-1k (w/ ImageNet-22k intermediate for some) + * original source: https://github.com/baaivision/EVA + +| model | top1 | param_count | gmac | macts | hub | +|:------------------------------------------|-----:|------------:|------:|------:|:----------------------------------------| +| eva_large_patch14_336.in22k_ft_in22k_in1k | 89.2 | 304.5 | 191.1 | 270.2 | [link](https://huggingface.co/BAAI/EVA) | +| eva_large_patch14_336.in22k_ft_in1k | 88.7 | 304.5 | 191.1 | 270.2 | [link](https://huggingface.co/BAAI/EVA) | +| eva_large_patch14_196.in22k_ft_in22k_in1k | 88.6 | 304.1 | 61.6 | 63.5 | [link](https://huggingface.co/BAAI/EVA) | +| eva_large_patch14_196.in22k_ft_in1k | 87.9 | 304.1 | 61.6 | 63.5 | [link](https://huggingface.co/BAAI/EVA) | + +### Dec 6, 2022 +* Add 'EVA g', BEiT style ViT-g/14 model weights w/ both MIM pretrain and CLIP pretrain to `beit.py`. + * original source: https://github.com/baaivision/EVA + * paper: https://arxiv.org/abs/2211.07636 + +| model | top1 | param_count | gmac | macts | hub | +|:-----------------------------------------|-------:|--------------:|-------:|--------:|:----------------------------------------| +| eva_giant_patch14_560.m30m_ft_in22k_in1k | 89.8 | 1014.4 | 1906.8 | 2577.2 | [link](https://huggingface.co/BAAI/EVA) | +| eva_giant_patch14_336.m30m_ft_in22k_in1k | 89.6 | 1013 | 620.6 | 550.7 | [link](https://huggingface.co/BAAI/EVA) | +| eva_giant_patch14_336.clip_ft_in1k | 89.4 | 1013 | 620.6 | 550.7 | [link](https://huggingface.co/BAAI/EVA) | +| eva_giant_patch14_224.clip_ft_in1k | 89.1 | 1012.6 | 267.2 | 192.6 | [link](https://huggingface.co/BAAI/EVA) | + +### Dec 5, 2022 + +* Pre-release (`0.8.0dev0`) of multi-weight support (`model_arch.pretrained_tag`). Install with `pip install --pre timm` + * vision_transformer, maxvit, convnext are the first three model impl w/ support + * model names are changing with this (previous _21k, etc. fn will merge), still sorting out deprecation handling + * bugs are likely, but I need feedback so please try it out + * if stability is needed, please use 0.6.x pypi releases or clone from [0.6.x branch](https://github.com/rwightman/pytorch-image-models/tree/0.6.x) +* Support for PyTorch 2.0 compile is added in train/validate/inference/benchmark, use `--torchcompile` argument +* Inference script allows more control over output, select k for top-class index + prob json, csv or parquet output +* Add a full set of fine-tuned CLIP image tower weights from both LAION-2B and original OpenAI CLIP models + +| model | top1 | param_count | gmac | macts | hub | +|:-------------------------------------------------|-------:|--------------:|-------:|--------:|:-------------------------------------------------------------------------------------| +| vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k | 88.6 | 632.5 | 391 | 407.5 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k) | +| vit_large_patch14_clip_336.openai_ft_in12k_in1k | 88.3 | 304.5 | 191.1 | 270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.openai_ft_in12k_in1k) | +| vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k | 88.2 | 632 | 167.4 | 139.4 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k) | +| vit_large_patch14_clip_336.laion2b_ft_in12k_in1k | 88.2 | 304.5 | 191.1 | 270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.laion2b_ft_in12k_in1k) | +| vit_large_patch14_clip_224.openai_ft_in12k_in1k | 88.2 | 304.2 | 81.1 | 88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.openai_ft_in12k_in1k) | +| vit_large_patch14_clip_224.laion2b_ft_in12k_in1k | 87.9 | 304.2 | 81.1 | 88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.laion2b_ft_in12k_in1k) | +| vit_large_patch14_clip_224.openai_ft_in1k | 87.9 | 304.2 | 81.1 | 88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.openai_ft_in1k) | +| vit_large_patch14_clip_336.laion2b_ft_in1k | 87.9 | 304.5 | 191.1 | 270.2 | [link](https://huggingface.co/timm/vit_large_patch14_clip_336.laion2b_ft_in1k) | +| vit_huge_patch14_clip_224.laion2b_ft_in1k | 87.6 | 632 | 167.4 | 139.4 | [link](https://huggingface.co/timm/vit_huge_patch14_clip_224.laion2b_ft_in1k) | +| vit_large_patch14_clip_224.laion2b_ft_in1k | 87.3 | 304.2 | 81.1 | 88.8 | [link](https://huggingface.co/timm/vit_large_patch14_clip_224.laion2b_ft_in1k) | +| vit_base_patch16_clip_384.laion2b_ft_in12k_in1k | 87.2 | 86.9 | 55.5 | 101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.laion2b_ft_in12k_in1k) | +| vit_base_patch16_clip_384.openai_ft_in12k_in1k | 87 | 86.9 | 55.5 | 101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.openai_ft_in12k_in1k) | +| vit_base_patch16_clip_384.laion2b_ft_in1k | 86.6 | 86.9 | 55.5 | 101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.laion2b_ft_in1k) | +| vit_base_patch16_clip_384.openai_ft_in1k | 86.2 | 86.9 | 55.5 | 101.6 | [link](https://huggingface.co/timm/vit_base_patch16_clip_384.openai_ft_in1k) | +| vit_base_patch16_clip_224.laion2b_ft_in12k_in1k | 86.2 | 86.6 | 17.6 | 23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.laion2b_ft_in12k_in1k) | +| vit_base_patch16_clip_224.openai_ft_in12k_in1k | 85.9 | 86.6 | 17.6 | 23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.openai_ft_in12k_in1k) | +| vit_base_patch32_clip_448.laion2b_ft_in12k_in1k | 85.8 | 88.3 | 17.9 | 23.9 | [link](https://huggingface.co/timm/vit_base_patch32_clip_448.laion2b_ft_in12k_in1k) | +| vit_base_patch16_clip_224.laion2b_ft_in1k | 85.5 | 86.6 | 17.6 | 23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.laion2b_ft_in1k) | +| vit_base_patch32_clip_384.laion2b_ft_in12k_in1k | 85.4 | 88.3 | 13.1 | 16.5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_384.laion2b_ft_in12k_in1k) | +| vit_base_patch16_clip_224.openai_ft_in1k | 85.3 | 86.6 | 17.6 | 23.9 | [link](https://huggingface.co/timm/vit_base_patch16_clip_224.openai_ft_in1k) | +| vit_base_patch32_clip_384.openai_ft_in12k_in1k | 85.2 | 88.3 | 13.1 | 16.5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_384.openai_ft_in12k_in1k) | +| vit_base_patch32_clip_224.laion2b_ft_in12k_in1k | 83.3 | 88.2 | 4.4 | 5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.laion2b_ft_in12k_in1k) | +| vit_base_patch32_clip_224.laion2b_ft_in1k | 82.6 | 88.2 | 4.4 | 5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.laion2b_ft_in1k) | +| vit_base_patch32_clip_224.openai_ft_in1k | 81.9 | 88.2 | 4.4 | 5 | [link](https://huggingface.co/timm/vit_base_patch32_clip_224.openai_ft_in1k) | + +* Port of MaxViT Tensorflow Weights from official impl at https://github.com/google-research/maxvit + * There was larger than expected drops for the upscaled 384/512 in21k fine-tune weights, possible detail missing, but the 21k FT did seem sensitive to small preprocessing + +| model | top1 | param_count | gmac | macts | hub | +|:-----------------------------------|-------:|--------------:|-------:|--------:|:-----------------------------------------------------------------------| +| maxvit_xlarge_tf_512.in21k_ft_in1k | 88.5 | 475.8 | 534.1 | 1413.2 | [link](https://huggingface.co/timm/maxvit_xlarge_tf_512.in21k_ft_in1k) | +| maxvit_xlarge_tf_384.in21k_ft_in1k | 88.3 | 475.3 | 292.8 | 668.8 | [link](https://huggingface.co/timm/maxvit_xlarge_tf_384.in21k_ft_in1k) | +| maxvit_base_tf_512.in21k_ft_in1k | 88.2 | 119.9 | 138 | 704 | [link](https://huggingface.co/timm/maxvit_base_tf_512.in21k_ft_in1k) | +| maxvit_large_tf_512.in21k_ft_in1k | 88 | 212.3 | 244.8 | 942.2 | [link](https://huggingface.co/timm/maxvit_large_tf_512.in21k_ft_in1k) | +| maxvit_large_tf_384.in21k_ft_in1k | 88 | 212 | 132.6 | 445.8 | [link](https://huggingface.co/timm/maxvit_large_tf_384.in21k_ft_in1k) | +| maxvit_base_tf_384.in21k_ft_in1k | 87.9 | 119.6 | 73.8 | 332.9 | [link](https://huggingface.co/timm/maxvit_base_tf_384.in21k_ft_in1k) | +| maxvit_base_tf_512.in1k | 86.6 | 119.9 | 138 | 704 | [link](https://huggingface.co/timm/maxvit_base_tf_512.in1k) | +| maxvit_large_tf_512.in1k | 86.5 | 212.3 | 244.8 | 942.2 | [link](https://huggingface.co/timm/maxvit_large_tf_512.in1k) | +| maxvit_base_tf_384.in1k | 86.3 | 119.6 | 73.8 | 332.9 | [link](https://huggingface.co/timm/maxvit_base_tf_384.in1k) | +| maxvit_large_tf_384.in1k | 86.2 | 212 | 132.6 | 445.8 | [link](https://huggingface.co/timm/maxvit_large_tf_384.in1k) | +| maxvit_small_tf_512.in1k | 86.1 | 69.1 | 67.3 | 383.8 | [link](https://huggingface.co/timm/maxvit_small_tf_512.in1k) | +| maxvit_tiny_tf_512.in1k | 85.7 | 31 | 33.5 | 257.6 | [link](https://huggingface.co/timm/maxvit_tiny_tf_512.in1k) | +| maxvit_small_tf_384.in1k | 85.5 | 69 | 35.9 | 183.6 | [link](https://huggingface.co/timm/maxvit_small_tf_384.in1k) | +| maxvit_tiny_tf_384.in1k | 85.1 | 31 | 17.5 | 123.4 | [link](https://huggingface.co/timm/maxvit_tiny_tf_384.in1k) | +| maxvit_large_tf_224.in1k | 84.9 | 211.8 | 43.7 | 127.4 | [link](https://huggingface.co/timm/maxvit_large_tf_224.in1k) | +| maxvit_base_tf_224.in1k | 84.9 | 119.5 | 24 | 95 | [link](https://huggingface.co/timm/maxvit_base_tf_224.in1k) | +| maxvit_small_tf_224.in1k | 84.4 | 68.9 | 11.7 | 53.2 | [link](https://huggingface.co/timm/maxvit_small_tf_224.in1k) | +| maxvit_tiny_tf_224.in1k | 83.4 | 30.9 | 5.6 | 35.8 | [link](https://huggingface.co/timm/maxvit_tiny_tf_224.in1k) | + +### Oct 15, 2022 +* Train and validation script enhancements +* Non-GPU (ie CPU) device support +* SLURM compatibility for train script +* HF datasets support (via ReaderHfds) +* TFDS/WDS dataloading improvements (sample padding/wrap for distributed use fixed wrt sample count estimate) +* in_chans !=3 support for scripts / loader +* Adan optimizer +* Can enable per-step LR scheduling via args +* Dataset 'parsers' renamed to 'readers', more descriptive of purpose +* AMP args changed, APEX via `--amp-impl apex`, bfloat16 supportedf via `--amp-dtype bfloat16` +* main branch switched to 0.7.x version, 0.6x forked for stable release of weight only adds +* master -> main branch rename + +### Oct 10, 2022 +* More weights in `maxxvit` series, incl first ConvNeXt block based `coatnext` and `maxxvit` experiments: + * `coatnext_nano_rw_224` - 82.0 @ 224 (G) -- (uses ConvNeXt conv block, no BatchNorm) + * `maxxvit_rmlp_nano_rw_256` - 83.0 @ 256, 83.7 @ 320 (G) (uses ConvNeXt conv block, no BN) + * `maxvit_rmlp_small_rw_224` - 84.5 @ 224, 85.1 @ 320 (G) + * `maxxvit_rmlp_small_rw_256` - 84.6 @ 256, 84.9 @ 288 (G) -- could be trained better, hparams need tuning (uses ConvNeXt block, no BN) + * `coatnet_rmlp_2_rw_224` - 84.6 @ 224, 85 @ 320 (T) + * NOTE: official MaxVit weights (in1k) have been released at https://github.com/google-research/maxvit -- some extra work is needed to port and adapt since my impl was created independently of theirs and has a few small differences + the whole TF same padding fun. + +### Sept 23, 2022 +* LAION-2B CLIP image towers supported as pretrained backbones for fine-tune or features (no classifier) + * vit_base_patch32_224_clip_laion2b + * vit_large_patch14_224_clip_laion2b + * vit_huge_patch14_224_clip_laion2b + * vit_giant_patch14_224_clip_laion2b + +### Sept 7, 2022 +* Hugging Face [`timm` docs](https://huggingface.co/docs/hub/timm) home now exists, look for more here in the future +* Add BEiT-v2 weights for base and large 224x224 models from https://github.com/microsoft/unilm/tree/master/beit2 +* Add more weights in `maxxvit` series incl a `pico` (7.5M params, 1.9 GMACs), two `tiny` variants: + * `maxvit_rmlp_pico_rw_256` - 80.5 @ 256, 81.3 @ 320 (T) + * `maxvit_tiny_rw_224` - 83.5 @ 224 (G) + * `maxvit_rmlp_tiny_rw_256` - 84.2 @ 256, 84.8 @ 320 (T) + +### Aug 29, 2022 +* MaxVit window size scales with img_size by default. Add new RelPosMlp MaxViT weight that leverages this: + * `maxvit_rmlp_nano_rw_256` - 83.0 @ 256, 83.6 @ 320 (T) + +### Aug 26, 2022 +* CoAtNet (https://arxiv.org/abs/2106.04803) and MaxVit (https://arxiv.org/abs/2204.01697) `timm` original models + * both found in [`maxxvit.py`](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/maxxvit.py) model def, contains numerous experiments outside scope of original papers + * an unfinished Tensorflow version from MaxVit authors can be found https://github.com/google-research/maxvit +* Initial CoAtNet and MaxVit timm pretrained weights (working on more): + * `coatnet_nano_rw_224` - 81.7 @ 224 (T) + * `coatnet_rmlp_nano_rw_224` - 82.0 @ 224, 82.8 @ 320 (T) + * `coatnet_0_rw_224` - 82.4 (T) -- NOTE timm '0' coatnets have 2 more 3rd stage blocks + * `coatnet_bn_0_rw_224` - 82.4 (T) + * `maxvit_nano_rw_256` - 82.9 @ 256 (T) + * `coatnet_rmlp_1_rw_224` - 83.4 @ 224, 84 @ 320 (T) + * `coatnet_1_rw_224` - 83.6 @ 224 (G) + * (T) = TPU trained with `bits_and_tpu` branch training code, (G) = GPU trained +* GCVit (weights adapted from https://github.com/NVlabs/GCVit, code 100% `timm` re-write for license purposes) +* MViT-V2 (multi-scale vit, adapted from https://github.com/facebookresearch/mvit) +* EfficientFormer (adapted from https://github.com/snap-research/EfficientFormer) +* PyramidVisionTransformer-V2 (adapted from https://github.com/whai362/PVT) +* 'Fast Norm' support for LayerNorm and GroupNorm that avoids float32 upcast w/ AMP (uses APEX LN if available for further boost) + + +### Aug 15, 2022 +* ConvNeXt atto weights added + * `convnext_atto` - 75.7 @ 224, 77.0 @ 288 + * `convnext_atto_ols` - 75.9 @ 224, 77.2 @ 288 + +### Aug 5, 2022 +* More custom ConvNeXt smaller model defs with weights + * `convnext_femto` - 77.5 @ 224, 78.7 @ 288 + * `convnext_femto_ols` - 77.9 @ 224, 78.9 @ 288 + * `convnext_pico` - 79.5 @ 224, 80.4 @ 288 + * `convnext_pico_ols` - 79.5 @ 224, 80.5 @ 288 + * `convnext_nano_ols` - 80.9 @ 224, 81.6 @ 288 +* Updated EdgeNeXt to improve ONNX export, add new base variant and weights from original (https://github.com/mmaaz60/EdgeNeXt) + +### July 28, 2022 +* Add freshly minted DeiT-III Medium (width=512, depth=12, num_heads=8) model weights. Thanks [Hugo Touvron](https://github.com/TouvronHugo)! ### July 27, 2022 * All runtime benchmark and validation result csv files are up-to-date! @@ -133,42 +312,3 @@ More models, more fixes * TinyNet models added by [rsomani95](https://github.com/rsomani95) * LCNet added via MobileNetV3 architecture -### Nov 22, 2021 -* A number of updated weights anew new model defs - * `eca_halonext26ts` - 79.5 @ 256 - * `resnet50_gn` (new) - 80.1 @ 224, 81.3 @ 288 - * `resnet50` - 80.7 @ 224, 80.9 @ 288 (trained at 176, not replacing current a1 weights as default since these don't scale as well to higher res, [weights](https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a1h2_176-001a1197.pth)) - * `resnext50_32x4d` - 81.1 @ 224, 82.0 @ 288 - * `sebotnet33ts_256` (new) - 81.2 @ 224 - * `lamhalobotnet50ts_256` - 81.5 @ 256 - * `halonet50ts` - 81.7 @ 256 - * `halo2botnet50ts_256` - 82.0 @ 256 - * `resnet101` - 82.0 @ 224, 82.8 @ 288 - * `resnetv2_101` (new) - 82.1 @ 224, 83.0 @ 288 - * `resnet152` - 82.8 @ 224, 83.5 @ 288 - * `regnetz_d8` (new) - 83.5 @ 256, 84.0 @ 320 - * `regnetz_e8` (new) - 84.5 @ 256, 85.0 @ 320 -* `vit_base_patch8_224` (85.8 top-1) & `in21k` variant weights added thanks [Martins Bruveris](https://github.com/martinsbruveris) -* Groundwork in for FX feature extraction thanks to [Alexander Soare](https://github.com/alexander-soare) - * models updated for tracing compatibility (almost full support with some distlled transformer exceptions) - -### Oct 19, 2021 -* ResNet strikes back (https://arxiv.org/abs/2110.00476) weights added, plus any extra training components used. Model weights and some more details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-rsb-weights) -* BCE loss and Repeated Augmentation support for RSB paper -* 4 series of ResNet based attention model experiments being added (implemented across byobnet.py/byoanet.py). These include all sorts of attention, from channel attn like SE, ECA to 2D QKV self-attention layers such as Halo, Bottlneck, Lambda. Details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) -* Working implementations of the following 2D self-attention modules (likely to be differences from paper or eventual official impl): - * Halo (https://arxiv.org/abs/2103.12731) - * Bottleneck Transformer (https://arxiv.org/abs/2101.11605) - * LambdaNetworks (https://arxiv.org/abs/2102.08602) -* A RegNetZ series of models with some attention experiments (being added to). These do not follow the paper (https://arxiv.org/abs/2103.06877) in any way other than block architecture, details of official models are not available. See more here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) -* ConvMixer (https://openreview.net/forum?id=TVHS5Y4dNvM), CrossVit (https://arxiv.org/abs/2103.14899), and BeiT (https://arxiv.org/abs/2106.08254) architectures + weights added -* freeze/unfreeze helpers by [Alexander Soare](https://github.com/alexander-soare) - -### Aug 18, 2021 -* Optimizer bonanza! - * Add LAMB and LARS optimizers, incl trust ratio clipping options. Tweaked to work properly in PyTorch XLA (tested on TPUs w/ `timm bits` [branch](https://github.com/rwightman/pytorch-image-models/tree/bits_and_tpu/timm/bits)) - * Add MADGRAD from FB research w/ a few tweaks (decoupled decay option, step handling that works with PyTorch XLA) - * Some cleanup on all optimizers and factory. No more `.data`, a bit more consistency, unit tests for all! - * SGDP and AdamP still won't work with PyTorch XLA but others should (have yet to test Adabelief, Adafactor, Adahessian myself). -* EfficientNet-V2 XL TF ported weights added, but they don't validate well in PyTorch (L is better). The pre-processing for the V2 TF training is a bit diff and the fine-tuned 21k -> 1k weights are very sensitive and less robust than the 1k weights. -* Added PyTorch trained EfficientNet-V2 'Tiny' w/ GlobalContext attn weights. Only .1-.2 top-1 better than the SE so more of a curiosity for those interested. diff --git a/hfdocs/README.md b/hfdocs/README.md new file mode 100644 index 00000000..a0178812 --- /dev/null +++ b/hfdocs/README.md @@ -0,0 +1,14 @@ +# Hugging Face Timm Docs + +## Getting Started + +``` +pip install git+https://github.com/huggingface/doc-builder.git@main#egg=hf-doc-builder +pip install watchdog black +``` + +## Preview the Docs Locally + +``` +doc-builder preview timm hfdocs/source +``` diff --git a/hfdocs/source/_toctree.yml b/hfdocs/source/_toctree.yml index 3fa994b6..9af48fdc 100644 --- a/hfdocs/source/_toctree.yml +++ b/hfdocs/source/_toctree.yml @@ -1,149 +1,160 @@ - sections: - local: index - title: Pytorch Image Models (timm) + title: Home + - local: quickstart + title: Quickstart + - local: installation + title: Installation + title: Get started +- sections: + - local: feature_extraction + title: Using Pretrained Models as Feature Extractors + - local: training_script + title: Training With The Official Training Script + - local: hf_hub + title: Share and Load Models from the 🤗 Hugging Face Hub + title: Tutorials +- sections: - local: models title: Model Summaries - local: results title: Results - - local: scripts - title: Scripts - - local: training_hparam_examples - title: Training Examples - - local: feature_extraction - title: Feature Extraction - - local: changes - title: Recent Changes - - local: archived_changes - title: Archived Changes - - local: model_pages - title: Model Pages - isExpanded: false - sections: - - local: models/adversarial-inception-v3 - title: Adversarial Inception v3 - - local: models/advprop - title: AdvProp (EfficientNet) - - local: models/big-transfer - title: Big Transfer (BiT) - - local: models/csp-darknet - title: CSP-DarkNet - - local: models/csp-resnet - title: CSP-ResNet - - local: models/csp-resnext - title: CSP-ResNeXt - - local: models/densenet - title: DenseNet - - local: models/dla - title: Deep Layer Aggregation - - local: models/dpn - title: Dual Path Network (DPN) - - local: models/ecaresnet - title: ECA-ResNet - - local: models/efficientnet - title: EfficientNet - - local: models/efficientnet-pruned - title: EfficientNet (Knapsack Pruned) - - local: models/ensemble-adversarial - title: Ensemble Adversarial Inception ResNet v2 - - local: models/ese-vovnet - title: ESE-VoVNet - - local: models/fbnet - title: FBNet - - local: models/gloun-inception-v3 - title: (Gluon) Inception v3 - - local: models/gloun-resnet - title: (Gluon) ResNet - - local: models/gloun-resnext - title: (Gluon) ResNeXt - - local: models/gloun-senet - title: (Gluon) SENet - - local: models/gloun-seresnext - title: (Gluon) SE-ResNeXt - - local: models/gloun-xception - title: (Gluon) Xception - - local: models/hrnet - title: HRNet - - local: models/ig-resnext - title: Instagram ResNeXt WSL - - local: models/inception-resnet-v2 - title: Inception ResNet v2 - - local: models/inception-v3 - title: Inception v3 - - local: models/inception-v4 - title: Inception v4 - - local: models/legacy-se-resnet - title: (Legacy) SE-ResNet - - local: models/legacy-se-resnext - title: (Legacy) SE-ResNeXt - - local: models/legacy-senet - title: (Legacy) SENet - - local: models/mixnet - title: MixNet - - local: models/mnasnet - title: MnasNet - - local: models/mobilenet-v2 - title: MobileNet v2 - - local: models/mobilenet-v3 - title: MobileNet v3 - - local: models/nasnet - title: NASNet - - local: models/noisy-student - title: Noisy Student (EfficientNet) - - local: models/pnasnet - title: PNASNet - - local: models/regnetx - title: RegNetX - - local: models/regnety - title: RegNetY - - local: models/res2net - title: Res2Net - - local: models/res2next - title: Res2NeXt - - local: models/resnest - title: ResNeSt - - local: models/resnet - title: ResNet - - local: models/resnet-d - title: ResNet-D - - local: models/resnext - title: ResNeXt - - local: models/rexnet - title: RexNet - - local: models/se-resnet - title: SE-ResNet - - local: models/selecsls - title: SelecSLS - - local: models/seresnext - title: SE-ResNeXt - - local: models/skresnet - title: SK-ResNet - - local: models/skresnext - title: SK-ResNeXt - - local: models/spnasnet - title: SPNASNet - - local: models/ssl-resnet - title: SSL ResNet - - local: models/swsl-resnet - title: SWSL ResNet - - local: models/swsl-resnext - title: SWSL ResNeXt - - local: models/tf-efficientnet - title: (Tensorflow) EfficientNet - - local: models/tf-efficientnet-condconv - title: (Tensorflow) EfficientNet CondConv - - local: models/tf-efficientnet-lite - title: (Tensorflow) EfficientNet Lite - - local: models/tf-inception-v3 - title: (Tensorflow) Inception v3 - - local: models/tf-mixnet - title: (Tensorflow) MixNet - - local: models/tf-mobilenet-v3 - title: (Tensorflow) MobileNet v3 - - local: models/tresnet - title: TResNet - - local: models/wide-resnet - title: Wide ResNet - - local: models/xception - title: Xception - title: Get started + - local: models/adversarial-inception-v3 + title: Adversarial Inception v3 + - local: models/advprop + title: AdvProp (EfficientNet) + - local: models/big-transfer + title: Big Transfer (BiT) + - local: models/csp-darknet + title: CSP-DarkNet + - local: models/csp-resnet + title: CSP-ResNet + - local: models/csp-resnext + title: CSP-ResNeXt + - local: models/densenet + title: DenseNet + - local: models/dla + title: Deep Layer Aggregation + - local: models/dpn + title: Dual Path Network (DPN) + - local: models/ecaresnet + title: ECA-ResNet + - local: models/efficientnet + title: EfficientNet + - local: models/efficientnet-pruned + title: EfficientNet (Knapsack Pruned) + - local: models/ensemble-adversarial + title: Ensemble Adversarial Inception ResNet v2 + - local: models/ese-vovnet + title: ESE-VoVNet + - local: models/fbnet + title: FBNet + - local: models/gloun-inception-v3 + title: (Gluon) Inception v3 + - local: models/gloun-resnet + title: (Gluon) ResNet + - local: models/gloun-resnext + title: (Gluon) ResNeXt + - local: models/gloun-senet + title: (Gluon) SENet + - local: models/gloun-seresnext + title: (Gluon) SE-ResNeXt + - local: models/gloun-xception + title: (Gluon) Xception + - local: models/hrnet + title: HRNet + - local: models/ig-resnext + title: Instagram ResNeXt WSL + - local: models/inception-resnet-v2 + title: Inception ResNet v2 + - local: models/inception-v3 + title: Inception v3 + - local: models/inception-v4 + title: Inception v4 + - local: models/legacy-se-resnet + title: (Legacy) SE-ResNet + - local: models/legacy-se-resnext + title: (Legacy) SE-ResNeXt + - local: models/legacy-senet + title: (Legacy) SENet + - local: models/mixnet + title: MixNet + - local: models/mnasnet + title: MnasNet + - local: models/mobilenet-v2 + title: MobileNet v2 + - local: models/mobilenet-v3 + title: MobileNet v3 + - local: models/nasnet + title: NASNet + - local: models/noisy-student + title: Noisy Student (EfficientNet) + - local: models/pnasnet + title: PNASNet + - local: models/regnetx + title: RegNetX + - local: models/regnety + title: RegNetY + - local: models/res2net + title: Res2Net + - local: models/res2next + title: Res2NeXt + - local: models/resnest + title: ResNeSt + - local: models/resnet + title: ResNet + - local: models/resnet-d + title: ResNet-D + - local: models/resnext + title: ResNeXt + - local: models/rexnet + title: RexNet + - local: models/se-resnet + title: SE-ResNet + - local: models/selecsls + title: SelecSLS + - local: models/seresnext + title: SE-ResNeXt + - local: models/skresnet + title: SK-ResNet + - local: models/skresnext + title: SK-ResNeXt + - local: models/spnasnet + title: SPNASNet + - local: models/ssl-resnet + title: SSL ResNet + - local: models/swsl-resnet + title: SWSL ResNet + - local: models/swsl-resnext + title: SWSL ResNeXt + - local: models/tf-efficientnet + title: (Tensorflow) EfficientNet + - local: models/tf-efficientnet-condconv + title: (Tensorflow) EfficientNet CondConv + - local: models/tf-efficientnet-lite + title: (Tensorflow) EfficientNet Lite + - local: models/tf-inception-v3 + title: (Tensorflow) Inception v3 + - local: models/tf-mixnet + title: (Tensorflow) MixNet + - local: models/tf-mobilenet-v3 + title: (Tensorflow) MobileNet v3 + - local: models/tresnet + title: TResNet + - local: models/wide-resnet + title: Wide ResNet + - local: models/xception + title: Xception + title: Model Pages + isExpanded: false +- sections: + - local: reference/models + title: Models + - local: reference/data + title: Data + - local: reference/optimizers + title: Optimizers + - local: reference/schedulers + title: Learning Rate Schedulers + title: Reference diff --git a/hfdocs/source/archived_changes.mdx b/hfdocs/source/archived_changes.mdx deleted file mode 100644 index 25778562..00000000 --- a/hfdocs/source/archived_changes.mdx +++ /dev/null @@ -1,418 +0,0 @@ -# Archived Changes - -### July 12, 2021 - -* Add XCiT models from [official facebook impl](https://github.com/facebookresearch/xcit). Contributed by [Alexander Soare](https://github.com/alexander-soare) - -### July 5-9, 2021 - -* Add `efficientnetv2_rw_t` weights, a custom 'tiny' 13.6M param variant that is a bit better than (non NoisyStudent) B3 models. Both faster and better accuracy (at same or lower res) - * top-1 82.34 @ 288x288 and 82.54 @ 320x320 -* Add [SAM pretrained](https://arxiv.org/abs/2106.01548) in1k weight for ViT B/16 (`vit_base_patch16_sam_224`) and B/32 (`vit_base_patch32_sam_224`) models. -* Add 'Aggregating Nested Transformer' (NesT) w/ weights converted from official [Flax impl](https://github.com/google-research/nested-transformer). Contributed by [Alexander Soare](https://github.com/alexander-soare). - * `jx_nest_base` - 83.534, `jx_nest_small` - 83.120, `jx_nest_tiny` - 81.426 - -### June 23, 2021 - -* Reproduce gMLP model training, `gmlp_s16_224` trained to 79.6 top-1, matching [paper](https://arxiv.org/abs/2105.08050). Hparams for this and other recent MLP training [here](https://gist.github.com/rwightman/d6c264a9001f9167e06c209f630b2cc6) - -### June 20, 2021 - -* Release Vision Transformer 'AugReg' weights from [How to train your ViT? Data, Augmentation, and Regularization in Vision Transformers](https://arxiv.org/abs/2106.10270) - * .npz weight loading support added, can load any of the 50K+ weights from the [AugReg series](https://console.cloud.google.com/storage/browser/vit_models/augreg) - * See [example notebook](https://colab.research.google.com/github/google-research/vision_transformer/blob/master/vit_jax_augreg.ipynb) from [official impl](https://github.com/google-research/vision_transformer/) for navigating the augreg weights - * Replaced all default weights w/ best AugReg variant (if possible). All AugReg 21k classifiers work. - * Highlights: `vit_large_patch16_384` (87.1 top-1), `vit_large_r50_s32_384` (86.2 top-1), `vit_base_patch16_384` (86.0 top-1) - * `vit_deit_*` renamed to just `deit_*` - * Remove my old small model, replace with DeiT compatible small w/ AugReg weights -* Add 1st training of my `gmixer_24_224` MLP /w GLU, 78.1 top-1 w/ 25M params. -* Add weights from official ResMLP release (https://github.com/facebookresearch/deit) -* Add `eca_nfnet_l2` weights from my 'lightweight' series. 84.7 top-1 at 384x384. -* Add distilled BiT 50x1 student and 152x2 Teacher weights from [Knowledge distillation: A good teacher is patient and consistent](https://arxiv.org/abs/2106.05237) -* NFNets and ResNetV2-BiT models work w/ Pytorch XLA now - * weight standardization uses F.batch_norm instead of std_mean (std_mean wasn't lowered) - * eps values adjusted, will be slight differences but should be quite close -* Improve test coverage and classifier interface of non-conv (vision transformer and mlp) models -* Cleanup a few classifier / flatten details for models w/ conv classifiers or early global pool -* Please report any regressions, this PR touched quite a few models. - -### June 8, 2021 - -* Add first ResMLP weights, trained in PyTorch XLA on TPU-VM w/ my XLA branch. 24 block variant, 79.2 top-1. -* Add ResNet51-Q model w/ pretrained weights at 82.36 top-1. - * NFNet inspired block layout with quad layer stem and no maxpool - * Same param count (35.7M) and throughput as ResNetRS-50 but +1.5 top-1 @ 224x224 and +2.5 top-1 at 288x288 - -### May 25, 2021 - -* Add LeViT, Visformer, Convit (PR by Aman Arora), Twins (PR by paper authors) transformer models -* Cleanup input_size/img_size override handling and testing for all vision transformer models -* Add `efficientnetv2_rw_m` model and weights (started training before official code). 84.8 top-1, 53M params. - -### May 14, 2021 - -* Add EfficientNet-V2 official model defs w/ ported weights from official [Tensorflow/Keras](https://github.com/google/automl/tree/master/efficientnetv2) impl. - * 1k trained variants: `tf_efficientnetv2_s/m/l` - * 21k trained variants: `tf_efficientnetv2_s/m/l_in21k` - * 21k pretrained -> 1k fine-tuned: `tf_efficientnetv2_s/m/l_in21ft1k` - * v2 models w/ v1 scaling: `tf_efficientnetv2_b0` through `b3` - * Rename my prev V2 guess `efficientnet_v2s` -> `efficientnetv2_rw_s` - * Some blank `efficientnetv2_*` models in-place for future native PyTorch training - -### May 5, 2021 - -* Add MLP-Mixer models and port pretrained weights from [Google JAX impl](https://github.com/google-research/vision_transformer/tree/linen) -* Add CaiT models and pretrained weights from [FB](https://github.com/facebookresearch/deit) -* Add ResNet-RS models and weights from [TF](https://github.com/tensorflow/tpu/tree/master/models/official/resnet/resnet_rs). Thanks [Aman Arora](https://github.com/amaarora) -* Add CoaT models and weights. Thanks [Mohammed Rizin](https://github.com/morizin) -* Add new ImageNet-21k weights & finetuned weights for TResNet, MobileNet-V3, ViT models. Thanks [mrT](https://github.com/mrT23) -* Add GhostNet models and weights. Thanks [Kai Han](https://github.com/iamhankai) -* Update ByoaNet attention modles - * Improve SA module inits - * Hack together experimental stand-alone Swin based attn module and `swinnet` - * Consistent '26t' model defs for experiments. -* Add improved Efficientnet-V2S (prelim model def) weights. 83.8 top-1. -* WandB logging support - -### April 13, 2021 - -* Add Swin Transformer models and weights from https://github.com/microsoft/Swin-Transformer - -### April 12, 2021 - -* Add ECA-NFNet-L1 (slimmed down F1 w/ SiLU, 41M params) trained with this code. 84% top-1 @ 320x320. Trained at 256x256. -* Add EfficientNet-V2S model (unverified model definition) weights. 83.3 top-1 @ 288x288. Only trained single res 224. Working on progressive training. -* Add ByoaNet model definition (Bring-your-own-attention) w/ SelfAttention block and corresponding SA/SA-like modules and model defs - * Lambda Networks - https://arxiv.org/abs/2102.08602 - * Bottleneck Transformers - https://arxiv.org/abs/2101.11605 - * Halo Nets - https://arxiv.org/abs/2103.12731 -* Adabelief optimizer contributed by Juntang Zhuang - -### April 1, 2021 - -* Add snazzy `benchmark.py` script for bulk `timm` model benchmarking of train and/or inference -* Add Pooling-based Vision Transformer (PiT) models (from https://github.com/naver-ai/pit) - * Merged distilled variant into main for torchscript compatibility - * Some `timm` cleanup/style tweaks and weights have hub download support -* Cleanup Vision Transformer (ViT) models - * Merge distilled (DeiT) model into main so that torchscript can work - * Support updated weight init (defaults to old still) that closer matches original JAX impl (possibly better training from scratch) - * Separate hybrid model defs into different file and add several new model defs to fiddle with, support patch_size != 1 for hybrids - * Fix fine-tuning num_class changes (PiT and ViT) and pos_embed resizing (Vit) with distilled variants - * nn.Sequential for block stack (does not break downstream compat) -* TnT (Transformer-in-Transformer) models contributed by author (from https://gitee.com/mindspore/mindspore/tree/master/model_zoo/research/cv/TNT) -* Add RegNetY-160 weights from DeiT teacher model -* Add new NFNet-L0 w/ SE attn (rename `nfnet_l0b`->`nfnet_l0`) weights 82.75 top-1 @ 288x288 -* Some fixes/improvements for TFDS dataset wrapper - -### March 7, 2021 - -* First 0.4.x PyPi release w/ NFNets (& related), ByoB (GPU-Efficient, RepVGG, etc). -* Change feature extraction for pre-activation nets (NFNets, ResNetV2) to return features before activation. - -### Feb 18, 2021 - -* Add pretrained weights and model variants for NFNet-F* models from [DeepMind Haiku impl](https://github.com/deepmind/deepmind-research/tree/master/nfnets). - * Models are prefixed with `dm_`. They require SAME padding conv, skipinit enabled, and activation gains applied in act fn. - * These models are big, expect to run out of GPU memory. With the GELU activiation + other options, they are roughly 1/2 the inference speed of my SiLU PyTorch optimized `s` variants. - * Original model results are based on pre-processing that is not the same as all other models so you'll see different results in the results csv (once updated). - * Matching the original pre-processing as closely as possible I get these results: - * `dm_nfnet_f6` - 86.352 - * `dm_nfnet_f5` - 86.100 - * `dm_nfnet_f4` - 85.834 - * `dm_nfnet_f3` - 85.676 - * `dm_nfnet_f2` - 85.178 - * `dm_nfnet_f1` - 84.696 - * `dm_nfnet_f0` - 83.464 - -### Feb 16, 2021 - -* Add Adaptive Gradient Clipping (AGC) as per https://arxiv.org/abs/2102.06171. Integrated w/ PyTorch gradient clipping via mode arg that defaults to prev 'norm' mode. For backward arg compat, clip-grad arg must be specified to enable when using train.py. - * AGC w/ default clipping factor `--clip-grad .01 --clip-mode agc` - * PyTorch global norm of 1.0 (old behaviour, always norm), `--clip-grad 1.0` - * PyTorch value clipping of 10, `--clip-grad 10. --clip-mode value` - * AGC performance is definitely sensitive to the clipping factor. More experimentation needed to determine good values for smaller batch sizes and optimizers besides those in paper. So far I've found .001-.005 is necessary for stable RMSProp training w/ NFNet/NF-ResNet. - -### Feb 12, 2021 - -* Update Normalization-Free nets to include new NFNet-F (https://arxiv.org/abs/2102.06171) model defs - -### Feb 10, 2021 - -* More model archs, incl a flexible ByobNet backbone ('Bring-your-own-blocks') - * GPU-Efficient-Networks (https://github.com/idstcv/GPU-Efficient-Networks), impl in `byobnet.py` - * RepVGG (https://github.com/DingXiaoH/RepVGG), impl in `byobnet.py` - * classic VGG (from torchvision, impl in `vgg`) -* Refinements to normalizer layer arg handling and normalizer+act layer handling in some models -* Default AMP mode changed to native PyTorch AMP instead of APEX. Issues not being fixed with APEX. Native works with `--channels-last` and `--torchscript` model training, APEX does not. -* Fix a few bugs introduced since last pypi release - -### Feb 8, 2021 - -* Add several ResNet weights with ECA attention. 26t & 50t trained @ 256, test @ 320. 269d train @ 256, fine-tune @320, test @ 352. - * `ecaresnet26t` - 79.88 top-1 @ 320x320, 79.08 @ 256x256 - * `ecaresnet50t` - 82.35 top-1 @ 320x320, 81.52 @ 256x256 - * `ecaresnet269d` - 84.93 top-1 @ 352x352, 84.87 @ 320x320 -* Remove separate tiered (`t`) vs tiered_narrow (`tn`) ResNet model defs, all `tn` changed to `t` and `t` models removed (`seresnext26t_32x4d` only model w/ weights that was removed). -* Support model default_cfgs with separate train vs test resolution `test_input_size` and remove extra `_320` suffix ResNet model defs that were just for test. - -### Jan 30, 2021 - -* Add initial "Normalization Free" NF-RegNet-B* and NF-ResNet model definitions based on [paper](https://arxiv.org/abs/2101.08692) - -### Jan 25, 2021 - -* Add ResNetV2 Big Transfer (BiT) models w/ ImageNet-1k and 21k weights from https://github.com/google-research/big_transfer -* Add official R50+ViT-B/16 hybrid models + weights from https://github.com/google-research/vision_transformer -* ImageNet-21k ViT weights are added w/ model defs and representation layer (pre logits) support - * NOTE: ImageNet-21k classifier heads were zero'd in original weights, they are only useful for transfer learning -* Add model defs and weights for DeiT Vision Transformer models from https://github.com/facebookresearch/deit -* Refactor dataset classes into ImageDataset/IterableImageDataset + dataset specific parser classes -* Add Tensorflow-Datasets (TFDS) wrapper to allow use of TFDS image classification sets with train script - * Ex: `train.py /data/tfds --dataset tfds/oxford_iiit_pet --val-split test --model resnet50 -b 256 --amp --num-classes 37 --opt adamw --lr 3e-4 --weight-decay .001 --pretrained -j 2` -* Add improved .tar dataset parser that reads images from .tar, folder of .tar files, or .tar within .tar - * Run validation on full ImageNet-21k directly from tar w/ BiT model: `validate.py /data/fall11_whole.tar --model resnetv2_50x1_bitm_in21k --amp` -* Models in this update should be stable w/ possible exception of ViT/BiT, possibility of some regressions with train/val scripts and dataset handling - -### Jan 3, 2021 - -* Add SE-ResNet-152D weights - * 256x256 val, 0.94 crop top-1 - 83.75 - * 320x320 val, 1.0 crop - 84.36 -* Update results files - -### Dec 18, 2020 - -* Add ResNet-101D, ResNet-152D, and ResNet-200D weights trained @ 256x256 - * 256x256 val, 0.94 crop (top-1) - 101D (82.33), 152D (83.08), 200D (83.25) - * 288x288 val, 1.0 crop - 101D (82.64), 152D (83.48), 200D (83.76) - * 320x320 val, 1.0 crop - 101D (83.00), 152D (83.66), 200D (84.01) - -### Dec 7, 2020 - -* Simplify EMA module (ModelEmaV2), compatible with fully torchscripted models -* Misc fixes for SiLU ONNX export, default_cfg missing from Feature extraction models, Linear layer w/ AMP + torchscript -* PyPi release @ 0.3.2 (needed by EfficientDet) - - -### Oct 30, 2020 - -* Test with PyTorch 1.7 and fix a small top-n metric view vs reshape issue. -* Convert newly added 224x224 Vision Transformer weights from official JAX repo. 81.8 top-1 for B/16, 83.1 L/16. -* Support PyTorch 1.7 optimized, native SiLU (aka Swish) activation. Add mapping to 'silu' name, custom swish will eventually be deprecated. -* Fix regression for loading pretrained classifier via direct model entrypoint functions. Didn't impact create_model() factory usage. -* PyPi release @ 0.3.0 version! - -### Oct 26, 2020 - -* Update Vision Transformer models to be compatible with official code release at https://github.com/google-research/vision_transformer -* Add Vision Transformer weights (ImageNet-21k pretrain) for 384x384 base and large models converted from official jax impl - * ViT-B/16 - 84.2 - * ViT-B/32 - 81.7 - * ViT-L/16 - 85.2 - * ViT-L/32 - 81.5 - -### Oct 21, 2020 - -* Weights added for Vision Transformer (ViT) models. 77.86 top-1 for 'small' and 79.35 for 'base'. Thanks to [Christof](https://www.kaggle.com/christofhenkel) for training the base model w/ lots of GPUs. - -### Oct 13, 2020 - -* Initial impl of Vision Transformer models. Both patch and hybrid (CNN backbone) variants. Currently trying to train... -* Adafactor and AdaHessian (FP32 only, no AMP) optimizers -* EdgeTPU-M (`efficientnet_em`) model trained in PyTorch, 79.3 top-1 -* Pip release, doc updates pending a few more changes... - -### Sept 18, 2020 - -* New ResNet 'D' weights. 72.7 (top-1) ResNet-18-D, 77.1 ResNet-34-D, 80.5 ResNet-50-D -* Added a few untrained defs for other ResNet models (66D, 101D, 152D, 200/200D) - -### Sept 3, 2020 - -* New weights - * Wide-ResNet50 - 81.5 top-1 (vs 78.5 torchvision) - * SEResNeXt50-32x4d - 81.3 top-1 (vs 79.1 cadene) -* Support for native Torch AMP and channels_last memory format added to train/validate scripts (`--channels-last`, `--native-amp` vs `--apex-amp`) -* Models tested with channels_last on latest NGC 20.08 container. AdaptiveAvgPool in attn layers changed to mean((2,3)) to work around bug with NHWC kernel. - -### Aug 12, 2020 - -* New/updated weights from training experiments - * EfficientNet-B3 - 82.1 top-1 (vs 81.6 for official with AA and 81.9 for AdvProp) - * RegNetY-3.2GF - 82.0 top-1 (78.9 from official ver) - * CSPResNet50 - 79.6 top-1 (76.6 from official ver) -* Add CutMix integrated w/ Mixup. See [pull request](https://github.com/rwightman/pytorch-image-models/pull/218) for some usage examples -* Some fixes for using pretrained weights with `in_chans` != 3 on several models. - -### Aug 5, 2020 - -Universal feature extraction, new models, new weights, new test sets. -* All models support the `features_only=True` argument for `create_model` call to return a network that extracts feature maps from the deepest layer at each stride. -* New models - * CSPResNet, CSPResNeXt, CSPDarkNet, DarkNet - * ReXNet - * (Modified Aligned) Xception41/65/71 (a proper port of TF models) -* New trained weights - * SEResNet50 - 80.3 top-1 - * CSPDarkNet53 - 80.1 top-1 - * CSPResNeXt50 - 80.0 top-1 - * DPN68b - 79.2 top-1 - * EfficientNet-Lite0 (non-TF ver) - 75.5 (submitted by [@hal-314](https://github.com/hal-314)) -* Add 'real' labels for ImageNet and ImageNet-Renditions test set, see [`results/README.md`](results/README.md) -* Test set ranking/top-n diff script by [@KushajveerSingh](https://github.com/KushajveerSingh) -* Train script and loader/transform tweaks to punch through more aug arguments -* README and documentation overhaul. See initial (WIP) documentation at https://rwightman.github.io/pytorch-image-models/ -* adamp and sgdp optimizers added by [@hellbell](https://github.com/hellbell) - -### June 11, 2020 - -Bunch of changes: -* DenseNet models updated with memory efficient addition from torchvision (fixed a bug), blur pooling and deep stem additions -* VoVNet V1 and V2 models added, 39 V2 variant (ese_vovnet_39b) trained to 79.3 top-1 -* Activation factory added along with new activations: - * select act at model creation time for more flexibility in using activations compatible with scripting or tracing (ONNX export) - * hard_mish (experimental) added with memory-efficient grad, along with ME hard_swish - * context mgr for setting exportable/scriptable/no_jit states -* Norm + Activation combo layers added with initial trial support in DenseNet and VoVNet along with impl of EvoNorm and InplaceAbn wrapper that fit the interface -* Torchscript works for all but two of the model types as long as using Pytorch 1.5+, tests added for this -* Some import cleanup and classifier reset changes, all models will have classifier reset to nn.Identity on reset_classifer(0) call -* Prep for 0.1.28 pip release - -### May 12, 2020 - -* Add ResNeSt models (code adapted from https://github.com/zhanghang1989/ResNeSt, paper https://arxiv.org/abs/2004.08955)) - -### May 3, 2020 - -* Pruned EfficientNet B1, B2, and B3 (https://arxiv.org/abs/2002.08258) contributed by [Yonathan Aflalo](https://github.com/yoniaflalo) - -### May 1, 2020 - -* Merged a number of execellent contributions in the ResNet model family over the past month - * BlurPool2D and resnetblur models initiated by [Chris Ha](https://github.com/VRandme), I trained resnetblur50 to 79.3. - * TResNet models and SpaceToDepth, AntiAliasDownsampleLayer layers by [mrT23](https://github.com/mrT23) - * ecaresnet (50d, 101d, light) models and two pruned variants using pruning as per (https://arxiv.org/abs/2002.08258) by [Yonathan Aflalo](https://github.com/yoniaflalo) -* 200 pretrained models in total now with updated results csv in results folder - -### April 5, 2020 - -* Add some newly trained MobileNet-V2 models trained with latest h-params, rand augment. They compare quite favourably to EfficientNet-Lite - * 3.5M param MobileNet-V2 100 @ 73% - * 4.5M param MobileNet-V2 110d @ 75% - * 6.1M param MobileNet-V2 140 @ 76.5% - * 5.8M param MobileNet-V2 120d @ 77.3% - -### March 18, 2020 - -* Add EfficientNet-Lite models w/ weights ported from [Tensorflow TPU](https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/lite) -* Add RandAugment trained ResNeXt-50 32x4d weights with 79.8 top-1. Trained by [Andrew Lavin](https://github.com/andravin) (see Training section for hparams) - -### April 5, 2020 - -* Add some newly trained MobileNet-V2 models trained with latest h-params, rand augment. They compare quite favourably to EfficientNet-Lite - * 3.5M param MobileNet-V2 100 @ 73% - * 4.5M param MobileNet-V2 110d @ 75% - * 6.1M param MobileNet-V2 140 @ 76.5% - * 5.8M param MobileNet-V2 120d @ 77.3% - -### March 18, 2020 - -* Add EfficientNet-Lite models w/ weights ported from [Tensorflow TPU](https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/lite) -* Add RandAugment trained ResNeXt-50 32x4d weights with 79.8 top-1. Trained by [Andrew Lavin](https://github.com/andravin) (see Training section for hparams) - -### Feb 29, 2020 - -* New MobileNet-V3 Large weights trained from stratch with this code to 75.77% top-1 -* IMPORTANT CHANGE - default weight init changed for all MobilenetV3 / EfficientNet / related models - * overall results similar to a bit better training from scratch on a few smaller models tried - * performance early in training seems consistently improved but less difference by end - * set `fix_group_fanout=False` in `_init_weight_goog` fn if you need to reproducte past behaviour -* Experimental LR noise feature added applies a random perturbation to LR each epoch in specified range of training - -### Feb 18, 2020 - -* Big refactor of model layers and addition of several attention mechanisms. Several additions motivated by 'Compounding the Performance Improvements...' (https://arxiv.org/abs/2001.06268): - * Move layer/module impl into `layers` subfolder/module of `models` and organize in a more granular fashion - * ResNet downsample paths now properly support dilation (output stride != 32) for avg_pool ('D' variant) and 3x3 (SENets) networks - * Add Selective Kernel Nets on top of ResNet base, pretrained weights - * skresnet18 - 73% top-1 - * skresnet34 - 76.9% top-1 - * skresnext50_32x4d (equiv to SKNet50) - 80.2% top-1 - * ECA and CECA (circular padding) attention layer contributed by [Chris Ha](https://github.com/VRandme) - * CBAM attention experiment (not the best results so far, may remove) - * Attention factory to allow dynamically selecting one of SE, ECA, CBAM in the `.se` position for all ResNets - * Add DropBlock and DropPath (formerly DropConnect for EfficientNet/MobileNetv3) support to all ResNet variants -* Full dataset results updated that incl NoisyStudent weights and 2 of the 3 SK weights - -### Feb 12, 2020 - -* Add EfficientNet-L2 and B0-B7 NoisyStudent weights ported from [Tensorflow TPU](https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet) - -### Feb 6, 2020 - -* Add RandAugment trained EfficientNet-ES (EdgeTPU-Small) weights with 78.1 top-1. Trained by [Andrew Lavin](https://github.com/andravin) (see Training section for hparams) - -### Feb 1/2, 2020 - -* Port new EfficientNet-B8 (RandAugment) weights, these are different than the B8 AdvProp, different input normalization. -* Update results csv files on all models for ImageNet validation and three other test sets -* Push PyPi package update - -### Jan 31, 2020 - -* Update ResNet50 weights with a new 79.038 result from further JSD / AugMix experiments. Full command line for reproduction in training section below. - -### Jan 11/12, 2020 - -* Master may be a bit unstable wrt to training, these changes have been tested but not all combos -* Implementations of AugMix added to existing RA and AA. Including numerous supporting pieces like JSD loss (Jensen-Shannon divergence + CE), and AugMixDataset -* SplitBatchNorm adaptation layer added for implementing Auxiliary BN as per AdvProp paper -* ResNet-50 AugMix trained model w/ 79% top-1 added -* `seresnext26tn_32x4d` - 77.99 top-1, 93.75 top-5 added to tiered experiment, higher img/s than 't' and 'd' - -### Jan 3, 2020 - -* Add RandAugment trained EfficientNet-B0 weight with 77.7 top-1. Trained by [Michael Klachko](https://github.com/michaelklachko) with this code and recent hparams (see Training section) -* Add `avg_checkpoints.py` script for post training weight averaging and update all scripts with header docstrings and shebangs. - -### Dec 30, 2019 - -* Merge [Dushyant Mehta's](https://github.com/mehtadushy) PR for SelecSLS (Selective Short and Long Range Skip Connections) networks. Good GPU memory consumption and throughput. Original: https://github.com/mehtadushy/SelecSLS-Pytorch - -### Dec 28, 2019 - -* Add new model weights and training hparams (see Training Hparams section) - * `efficientnet_b3` - 81.5 top-1, 95.7 top-5 at default res/crop, 81.9, 95.8 at 320x320 1.0 crop-pct - * trained with RandAugment, ended up with an interesting but less than perfect result (see training section) - * `seresnext26d_32x4d`- 77.6 top-1, 93.6 top-5 - * deep stem (32, 32, 64), avgpool downsample - * stem/dowsample from bag-of-tricks paper - * `seresnext26t_32x4d`- 78.0 top-1, 93.7 top-5 - * deep tiered stem (24, 48, 64), avgpool downsample (a modified 'D' variant) - * stem sizing mods from Jeremy Howard and fastai devs discussing ResNet architecture experiments - -### Dec 23, 2019 - -* Add RandAugment trained MixNet-XL weights with 80.48 top-1. -* `--dist-bn` argument added to train.py, will distribute BN stats between nodes after each train epoch, before eval - -### Dec 4, 2019 - -* Added weights from the first training from scratch of an EfficientNet (B2) with my new RandAugment implementation. Much better than my previous B2 and very close to the official AdvProp ones (80.4 top-1, 95.08 top-5). - -### Nov 29, 2019 - -* Brought EfficientNet and MobileNetV3 up to date with my https://github.com/rwightman/gen-efficientnet-pytorch code. Torchscript and ONNX export compat excluded. - * AdvProp weights added - * Official TF MobileNetv3 weights added -* EfficientNet and MobileNetV3 hook based 'feature extraction' classes added. Will serve as basis for using models as backbones in obj detection/segmentation tasks. Lots more to be done here... -* HRNet classification models and weights added from https://github.com/HRNet/HRNet-Image-Classification -* Consistency in global pooling, `reset_classifer`, and `forward_features` across models - * `forward_features` always returns unpooled feature maps now -* Reasonable chance I broke something... let me know - -### Nov 22, 2019 - -* Add ImageNet training RandAugment implementation alongside AutoAugment. PyTorch Transform compatible format, using PIL. Currently training two EfficientNet models from scratch with promising results... will update. -* `drop-connect` cmd line arg finally added to `train.py`, no need to hack model fns. Works for efficientnet/mobilenetv3 based models, ignored otherwise. \ No newline at end of file diff --git a/hfdocs/source/changes.mdx b/hfdocs/source/changes.mdx deleted file mode 100644 index 93dc9fac..00000000 --- a/hfdocs/source/changes.mdx +++ /dev/null @@ -1,187 +0,0 @@ -# Recent Changes - -### July 27, 2022 - -* All runtime benchmark and validation result csv files are up-to-date! -* A few more weights & model defs added: - * `darknetaa53` - 79.8 @ 256, 80.5 @ 288 - * `convnext_nano` - 80.8 @ 224, 81.5 @ 288 - * `cs3sedarknet_l` - 81.2 @ 256, 81.8 @ 288 - * `cs3darknet_x` - 81.8 @ 256, 82.2 @ 288 - * `cs3sedarknet_x` - 82.2 @ 256, 82.7 @ 288 - * `cs3edgenet_x` - 82.2 @ 256, 82.7 @ 288 - * `cs3se_edgenet_x` - 82.8 @ 256, 83.5 @ 320 -* `cs3*` weights above all trained on TPU w/ `bits_and_tpu` branch. Thanks to TRC program! -* Add output_stride=8 and 16 support to ConvNeXt (dilation) -* deit3 models not being able to resize pos_emb fixed -* Version 0.6.7 PyPi release (/w above bug fixes and new weighs since 0.6.5) - -### July 8, 2022 - -More models, more fixes - -* Official research models (w/ weights) added: - * EdgeNeXt from (https://github.com/mmaaz60/EdgeNeXt) - * MobileViT-V2 from (https://github.com/apple/ml-cvnets) - * DeiT III (Revenge of the ViT) from (https://github.com/facebookresearch/deit) -* My own models: - * Small `ResNet` defs added by request with 1 block repeats for both basic and bottleneck (resnet10 and resnet14) - * `CspNet` refactored with dataclass config, simplified CrossStage3 (`cs3`) option. These are closer to YOLO-v5+ backbone defs. - * More relative position vit fiddling. Two `srelpos` (shared relative position) models trained, and a medium w/ class token. - * Add an alternate downsample mode to EdgeNeXt and train a `small` model. Better than original small, but not their new USI trained weights. -* My own model weight results (all ImageNet-1k training) - * `resnet10t` - 66.5 @ 176, 68.3 @ 224 - * `resnet14t` - 71.3 @ 176, 72.3 @ 224 - * `resnetaa50` - 80.6 @ 224 , 81.6 @ 288 - * `darknet53` - 80.0 @ 256, 80.5 @ 288 - * `cs3darknet_m` - 77.0 @ 256, 77.6 @ 288 - * `cs3darknet_focus_m` - 76.7 @ 256, 77.3 @ 288 - * `cs3darknet_l` - 80.4 @ 256, 80.9 @ 288 - * `cs3darknet_focus_l` - 80.3 @ 256, 80.9 @ 288 - * `vit_srelpos_small_patch16_224` - 81.1 @ 224, 82.1 @ 320 - * `vit_srelpos_medium_patch16_224` - 82.3 @ 224, 83.1 @ 320 - * `vit_relpos_small_patch16_cls_224` - 82.6 @ 224, 83.6 @ 320 - * `edgnext_small_rw` - 79.6 @ 224, 80.4 @ 320 -* `cs3`, `darknet`, and `vit_*relpos` weights above all trained on TPU thanks to TRC program! Rest trained on overheating GPUs. -* Hugging Face Hub support fixes verified, demo notebook TBA -* Pretrained weights / configs can be loaded externally (ie from local disk) w/ support for head adaptation. -* Add support to change image extensions scanned by `timm` datasets/parsers. See (https://github.com/rwightman/pytorch-image-models/pull/1274#issuecomment-1178303103) -* Default ConvNeXt LayerNorm impl to use `F.layer_norm(x.permute(0, 2, 3, 1), ...).permute(0, 3, 1, 2)` via `LayerNorm2d` in all cases. - * a bit slower than previous custom impl on some hardware (ie Ampere w/ CL), but overall fewer regressions across wider HW / PyTorch version ranges. - * previous impl exists as `LayerNormExp2d` in `models/layers/norm.py` -* Numerous bug fixes -* Currently testing for imminent PyPi 0.6.x release -* LeViT pretraining of larger models still a WIP, they don't train well / easily without distillation. Time to add distill support (finally)? -* ImageNet-22k weight training + finetune ongoing, work on multi-weight support (slowly) chugging along (there are a LOT of weights, sigh) ... - -### May 13, 2022 - -* Official Swin-V2 models and weights added from (https://github.com/microsoft/Swin-Transformer). Cleaned up to support torchscript. -* Some refactoring for existing `timm` Swin-V2-CR impl, will likely do a bit more to bring parts closer to official and decide whether to merge some aspects. -* More Vision Transformer relative position / residual post-norm experiments (all trained on TPU thanks to TRC program) - * `vit_relpos_small_patch16_224` - 81.5 @ 224, 82.5 @ 320 -- rel pos, layer scale, no class token, avg pool - * `vit_relpos_medium_patch16_rpn_224` - 82.3 @ 224, 83.1 @ 320 -- rel pos + res-post-norm, no class token, avg pool - * `vit_relpos_medium_patch16_224` - 82.5 @ 224, 83.3 @ 320 -- rel pos, layer scale, no class token, avg pool - * `vit_relpos_base_patch16_gapcls_224` - 82.8 @ 224, 83.9 @ 320 -- rel pos, layer scale, class token, avg pool (by mistake) -* Bring 512 dim, 8-head 'medium' ViT model variant back to life (after using in a pre DeiT 'small' model for first ViT impl back in 2020) -* Add ViT relative position support for switching btw existing impl and some additions in official Swin-V2 impl for future trials -* Sequencer2D impl (https://arxiv.org/abs/2205.01972), added via PR from author (https://github.com/okojoalg) - -### May 2, 2022 - -* Vision Transformer experiments adding Relative Position (Swin-V2 log-coord) (`vision_transformer_relpos.py`) and Residual Post-Norm branches (from Swin-V2) (`vision_transformer*.py`) - * `vit_relpos_base_patch32_plus_rpn_256` - 79.5 @ 256, 80.6 @ 320 -- rel pos + extended width + res-post-norm, no class token, avg pool - * `vit_relpos_base_patch16_224` - 82.5 @ 224, 83.6 @ 320 -- rel pos, layer scale, no class token, avg pool - * `vit_base_patch16_rpn_224` - 82.3 @ 224 -- rel pos + res-post-norm, no class token, avg pool -* Vision Transformer refactor to remove representation layer that was only used in initial vit and rarely used since with newer pretrain (ie `How to Train Your ViT`) -* `vit_*` models support removal of class token, use of global average pool, use of fc_norm (ala beit, mae). - -### April 22, 2022 - -* `timm` models are now officially supported in [fast.ai](https://www.fast.ai/)! Just in time for the new Practical Deep Learning course. `timmdocs` documentation link updated to [timm.fast.ai](http://timm.fast.ai/). -* Two more model weights added in the TPU trained [series](https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-tpu-weights). Some In22k pretrain still in progress. - * `seresnext101d_32x8d` - 83.69 @ 224, 84.35 @ 288 - * `seresnextaa101d_32x8d` (anti-aliased w/ AvgPool2d) - 83.85 @ 224, 84.57 @ 288 - -### March 23, 2022 - -* Add `ParallelBlock` and `LayerScale` option to base vit models to support model configs in [Three things everyone should know about ViT](https://arxiv.org/abs/2203.09795) -* `convnext_tiny_hnf` (head norm first) weights trained with (close to) A2 recipe, 82.2% top-1, could do better with more epochs. - -### March 21, 2022 - -* Merge `norm_norm_norm`. **IMPORTANT** this update for a coming 0.6.x release will likely de-stabilize the master branch for a while. Branch [`0.5.x`](https://github.com/rwightman/pytorch-image-models/tree/0.5.x) or a previous 0.5.x release can be used if stability is required. -* Significant weights update (all TPU trained) as described in this [release](https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-tpu-weights) - * `regnety_040` - 82.3 @ 224, 82.96 @ 288 - * `regnety_064` - 83.0 @ 224, 83.65 @ 288 - * `regnety_080` - 83.17 @ 224, 83.86 @ 288 - * `regnetv_040` - 82.44 @ 224, 83.18 @ 288 (timm pre-act) - * `regnetv_064` - 83.1 @ 224, 83.71 @ 288 (timm pre-act) - * `regnetz_040` - 83.67 @ 256, 84.25 @ 320 - * `regnetz_040h` - 83.77 @ 256, 84.5 @ 320 (w/ extra fc in head) - * `resnetv2_50d_gn` - 80.8 @ 224, 81.96 @ 288 (pre-act GroupNorm) - * `resnetv2_50d_evos` 80.77 @ 224, 82.04 @ 288 (pre-act EvoNormS) - * `regnetz_c16_evos` - 81.9 @ 256, 82.64 @ 320 (EvoNormS) - * `regnetz_d8_evos` - 83.42 @ 256, 84.04 @ 320 (EvoNormS) - * `xception41p` - 82 @ 299 (timm pre-act) - * `xception65` - 83.17 @ 299 - * `xception65p` - 83.14 @ 299 (timm pre-act) - * `resnext101_64x4d` - 82.46 @ 224, 83.16 @ 288 - * `seresnext101_32x8d` - 83.57 @ 224, 84.270 @ 288 - * `resnetrs200` - 83.85 @ 256, 84.44 @ 320 -* HuggingFace hub support fixed w/ initial groundwork for allowing alternative 'config sources' for pretrained model definitions and weights (generic local file / remote url support soon) -* SwinTransformer-V2 implementation added. Submitted by [Christoph Reich](https://github.com/ChristophReich1996). Training experiments and model changes by myself are ongoing so expect compat breaks. -* Swin-S3 (AutoFormerV2) models / weights added from https://github.com/microsoft/Cream/tree/main/AutoFormerV2 -* MobileViT models w/ weights adapted from https://github.com/apple/ml-cvnets -* PoolFormer models w/ weights adapted from https://github.com/sail-sg/poolformer -* VOLO models w/ weights adapted from https://github.com/sail-sg/volo -* Significant work experimenting with non-BatchNorm norm layers such as EvoNorm, FilterResponseNorm, GroupNorm, etc -* Enhance support for alternate norm + act ('NormAct') layers added to a number of models, esp EfficientNet/MobileNetV3, RegNet, and aligned Xception -* Grouped conv support added to EfficientNet family -* Add 'group matching' API to all models to allow grouping model parameters for application of 'layer-wise' LR decay, lr scale added to LR scheduler -* Gradient checkpointing support added to many models -* `forward_head(x, pre_logits=False)` fn added to all models to allow separate calls of `forward_features` + `forward_head` -* All vision transformer and vision MLP models update to return non-pooled / non-token selected features from `foward_features`, for consistency with CNN models, token selection or pooling now applied in `forward_head` - -### Feb 2, 2022 - -* [Chris Hughes](https://github.com/Chris-hughes10) posted an exhaustive run through of `timm` on his blog yesterday. Well worth a read. [Getting Started with PyTorch Image Models (timm): A Practitioner’s Guide](https://towardsdatascience.com/getting-started-with-pytorch-image-models-timm-a-practitioners-guide-4e77b4bf9055) -* I'm currently prepping to merge the `norm_norm_norm` branch back to master (ver 0.6.x) in next week or so. - * The changes are more extensive than usual and may destabilize and break some model API use (aiming for full backwards compat). So, beware `pip install git+https://github.com/rwightman/pytorch-image-models` installs! - * `0.5.x` releases and a `0.5.x` branch will remain stable with a cherry pick or two until dust clears. Recommend sticking to pypi install for a bit if you want stable. - -### Jan 14, 2022 - -* Version 0.5.4 w/ release to be pushed to pypi. It's been a while since last pypi update and riskier changes will be merged to main branch soon.... -* Add ConvNeXT models /w weights from official impl (https://github.com/facebookresearch/ConvNeXt), a few perf tweaks, compatible with timm features -* Tried training a few small (~1.8-3M param) / mobile optimized models, a few are good so far, more on the way... - * `mnasnet_small` - 65.6 top-1 - * `mobilenetv2_050` - 65.9 - * `lcnet_100/075/050` - 72.1 / 68.8 / 63.1 - * `semnasnet_075` - 73 - * `fbnetv3_b/d/g` - 79.1 / 79.7 / 82.0 -* TinyNet models added by [rsomani95](https://github.com/rsomani95) -* LCNet added via MobileNetV3 architecture - -### Nov 22, 2021 - -* A number of updated weights anew new model defs - * `eca_halonext26ts` - 79.5 @ 256 - * `resnet50_gn` (new) - 80.1 @ 224, 81.3 @ 288 - * `resnet50` - 80.7 @ 224, 80.9 @ 288 (trained at 176, not replacing current a1 weights as default since these don't scale as well to higher res, [weights](https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/resnet50_a1h2_176-001a1197.pth)) - * `resnext50_32x4d` - 81.1 @ 224, 82.0 @ 288 - * `sebotnet33ts_256` (new) - 81.2 @ 224 - * `lamhalobotnet50ts_256` - 81.5 @ 256 - * `halonet50ts` - 81.7 @ 256 - * `halo2botnet50ts_256` - 82.0 @ 256 - * `resnet101` - 82.0 @ 224, 82.8 @ 288 - * `resnetv2_101` (new) - 82.1 @ 224, 83.0 @ 288 - * `resnet152` - 82.8 @ 224, 83.5 @ 288 - * `regnetz_d8` (new) - 83.5 @ 256, 84.0 @ 320 - * `regnetz_e8` (new) - 84.5 @ 256, 85.0 @ 320 -* `vit_base_patch8_224` (85.8 top-1) & `in21k` variant weights added thanks [Martins Bruveris](https://github.com/martinsbruveris) -* Groundwork in for FX feature extraction thanks to [Alexander Soare](https://github.com/alexander-soare) - * models updated for tracing compatibility (almost full support with some distlled transformer exceptions) - -### Oct 19, 2021 - -* ResNet strikes back (https://arxiv.org/abs/2110.00476) weights added, plus any extra training components used. Model weights and some more details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-rsb-weights) -* BCE loss and Repeated Augmentation support for RSB paper -* 4 series of ResNet based attention model experiments being added (implemented across byobnet.py/byoanet.py). These include all sorts of attention, from channel attn like SE, ECA to 2D QKV self-attention layers such as Halo, Bottlneck, Lambda. Details here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) -* Working implementations of the following 2D self-attention modules (likely to be differences from paper or eventual official impl): - * Halo (https://arxiv.org/abs/2103.12731) - * Bottleneck Transformer (https://arxiv.org/abs/2101.11605) - * LambdaNetworks (https://arxiv.org/abs/2102.08602) -* A RegNetZ series of models with some attention experiments (being added to). These do not follow the paper (https://arxiv.org/abs/2103.06877) in any way other than block architecture, details of official models are not available. See more here (https://github.com/rwightman/pytorch-image-models/releases/tag/v0.1-attn-weights) -* ConvMixer (https://openreview.net/forum?id=TVHS5Y4dNvM), CrossVit (https://arxiv.org/abs/2103.14899), and BeiT (https://arxiv.org/abs/2106.08254) architectures + weights added -* freeze/unfreeze helpers by [Alexander Soare](https://github.com/alexander-soare) - -### Aug 18, 2021 - -* Optimizer bonanza! - * Add LAMB and LARS optimizers, incl trust ratio clipping options. Tweaked to work properly in PyTorch XLA (tested on TPUs w/ `timm bits` [branch](https://github.com/rwightman/pytorch-image-models/tree/bits_and_tpu/timm/bits)) - * Add MADGRAD from FB research w/ a few tweaks (decoupled decay option, step handling that works with PyTorch XLA) - * Some cleanup on all optimizers and factory. No more `.data`, a bit more consistency, unit tests for all! - * SGDP and AdamP still won't work with PyTorch XLA but others should (have yet to test Adabelief, Adafactor, Adahessian myself). -* EfficientNet-V2 XL TF ported weights added, but they don't validate well in PyTorch (L is better). The pre-processing for the V2 TF training is a bit diff and the fine-tuned 21k -> 1k weights are very sensitive and less robust than the 1k weights. -* Added PyTorch trained EfficientNet-V2 'Tiny' w/ GlobalContext attn weights. Only .1-.2 top-1 better than the SE so more of a curiosity for those interested. diff --git a/hfdocs/source/hf_hub.mdx b/hfdocs/source/hf_hub.mdx new file mode 100644 index 00000000..f4ac7fa9 --- /dev/null +++ b/hfdocs/source/hf_hub.mdx @@ -0,0 +1,54 @@ +# Sharing and Loading Models From the Hugging Face Hub + +The `timm` library has a built-in integration with the Hugging Face Hub, making it easy to share and load models from the 🤗 Hub. + +In this short guide, we'll see how to: + 1. Share a `timm` model on the Hub + 2. How to load that model back from the Hub + +## Authenticating + +First, you'll need to make sure you have the `huggingface_hub` package installed. + +```bash +pip install huggingface_hub +``` + +Then, you'll need to authenticate yourself. You can do this by running the following command: + +```bash +huggingface-cli login +``` + +Or, if you're using a notebook, you can use the `notebook_login` helper: + +```py +>>> from huggingface_hub import notebook_login +>>> notebook_login() +``` + +## Sharing a Model + +```py +>>> import timm +>>> model = timm.create_model('resnet18', pretrained=True, num_classes=4) +``` + +Here is where you would normally train or fine-tune the model. We'll skip that for the sake of this tutorial. + +Let's pretend we've now fine-tuned the model. The next step would be to push it to the Hub! We can do this with the `timm.models.hub.push_to_hf_hub` function. + +```py +>>> model_cfg = dict(labels=['a', 'b', 'c', 'd']) +>>> timm.models.hub.push_to_hf_hub(model, 'resnet18-random', model_config=model_cfg) +``` + +Running the above would push the model to `/resnet18-random` on the Hub. You can now share this model with your friends, or use it in your own code! + +## Loading a Model + +Loading a model from the Hub is as simple as calling `timm.create_model` with the `pretrained` argument set to the name of the model you want to load. In this case, we'll use [`nateraw/resnet18-random`](https://huggingface.co/nateraw/resnet18-random), which is the model we just pushed to the Hub. + +```py +>>> model_reloaded = timm.create_model('hf_hub:nateraw/resnet18-random', pretrained=True) +``` diff --git a/hfdocs/source/index.mdx b/hfdocs/source/index.mdx index 3733ae1e..cffa5693 100644 --- a/hfdocs/source/index.mdx +++ b/hfdocs/source/index.mdx @@ -1,89 +1,22 @@ -# Getting Started +# timm -## Welcome + -Welcome to the `timm` documentation, a lean set of docs that covers the basics of `timm`. +`timm` is a library containing SOTA computer vision models, layers, utilities, optimizers, schedulers, data-loaders, augmentations, and training/evaluation scripts. -For a more comprehensive set of docs (currently under development), please visit [timmdocs](http://timm.fast.ai) by [Aman Arora](https://github.com/amaarora). +It comes packaged with >700 pretrained models, and is designed to be flexible and easy to use. -## Install +Read the [quick start guide](quickstart) to get up and running with the `timm` library. You will learn how to load, discover, and use pretrained models included in the library. -The library can be installed with pip: - -``` -pip install timm -``` - -I update the PyPi (pip) packages when I'm confident there are no significant model regressions from previous releases. If you want to pip install the bleeding edge from GitHub, use: -``` -pip install git+https://github.com/rwightman/pytorch-image-models.git -``` - -### Conda Environment - - - -- All development and testing has been done in Conda Python 3 environments on Linux x86-64 systems, specifically 3.7, 3.8, 3.9, 3.10 - -- Little to no care has been taken to be Python 2.x friendly and will not support it. If you run into any challenges running on Windows, or other OS, I'm definitely open to looking into those issues so long as it's in a reproducible (read Conda) environment. - -- PyTorch versions 1.9, 1.10, 1.11 have been tested with the latest versions of this code. - - - -I've tried to keep the dependencies minimal, the setup is as per the PyTorch default install instructions for Conda: - -```bash -conda create -n torch-env -conda activate torch-env -conda install pytorch torchvision cudatoolkit=11.3 -c pytorch -conda install pyyaml -``` - -## Load a Pretrained Model - -Pretrained models can be loaded using `timm.create_model` - -```py ->>> import timm - ->>> m = timm.create_model('mobilenetv3_large_100', pretrained=True) ->>> m.eval() -``` - -## List Models with Pretrained Weights - -```py ->>> import timm ->>> from pprint import pprint ->>> model_names = timm.list_models(pretrained=True) ->>> pprint(model_names) -[ - 'adv_inception_v3', - 'cspdarknet53', - 'cspresnext50', - 'densenet121', - 'densenet161', - 'densenet169', - 'densenet201', - 'densenetblur121d', - 'dla34', - 'dla46_c', -] -``` - -## List Model Architectures by Wildcard - -```py ->>> import timm ->>> from pprint import pprint ->>> model_names = timm.list_models('*resne*t*') ->>> pprint(model_names) -[ - 'cspresnet50', - 'cspresnet50d', - 'cspresnet50w', - 'cspresnext50', - ... -] -``` + diff --git a/hfdocs/source/installation.mdx b/hfdocs/source/installation.mdx new file mode 100644 index 00000000..3ff210f3 --- /dev/null +++ b/hfdocs/source/installation.mdx @@ -0,0 +1,74 @@ +# Installation + +Before you start, you'll need to setup your environment and install the appropriate packages. `timm` is tested on **Python 3+**. + +## Virtual Environment + +You should install `timm` in a [virtual environment](https://docs.python.org/3/library/venv.html) to keep things tidy and avoid dependency conflicts. + +1. Create and navigate to your project directory: + + ```bash + mkdir ~/my-project + cd ~/my-project + ``` + +2. Start a virtual environment inside your directory: + + ```bash + python -m venv .env + ``` + +3. Activate and deactivate the virtual environment with the following commands: + + ```bash + # Activate the virtual environment + source .env/bin/activate + + # Deactivate the virtual environment + source .env/bin/deactivate + ``` +` +Once you've created your virtual environment, you can install `timm` in it. + +## Using pip + +The most straightforward way to install `timm` is with pip: + +```bash +pip install timm +``` + +Alternatively, you can install `timm` from GitHub directly to get the latest, bleeding-edge version: + +```bash +pip install git+https://github.com/rwightman/pytorch-image-models.git +``` + +Run the following command to check if `timm` has been properly installed: + +```bash +python -c "from timm import list_models; print(list_models(pretrained=True)[:5])" +``` + +This command lists the first five pretrained models available in `timm` (which are sorted alphebetically). You should see the following output: + +```python +['adv_inception_v3', 'bat_resnext26ts', 'beit_base_patch16_224', 'beit_base_patch16_224_in22k', 'beit_base_patch16_384'] +``` + +## From Source + +Building `timm` from source lets you make changes to the code base. To install from the source, clone the repository and install with the following commands: + +```bash +git clone https://github.com/rwightman/pytorch-image-models.git +cd timm +pip install -e . +``` + +Again, you can check if `timm` was properly installed with the following command: + +```bash +python -c "from timm import list_models; print(list_models(pretrained=True)[:5])" +``` diff --git a/hfdocs/source/model_pages.mdx b/hfdocs/source/model_pages.mdx deleted file mode 100644 index a78663f1..00000000 --- a/hfdocs/source/model_pages.mdx +++ /dev/null @@ -1,5 +0,0 @@ -# Available Models - -`timm` comes bundled with a number of model architectures and corresponding pretrained models. - -In these pages, you will find the models available in the `timm` library, as well as information on how to use them. \ No newline at end of file diff --git a/hfdocs/source/quickstart.mdx b/hfdocs/source/quickstart.mdx new file mode 100644 index 00000000..20771024 --- /dev/null +++ b/hfdocs/source/quickstart.mdx @@ -0,0 +1,228 @@ +# Quickstart + +This quickstart is intended for developers who are ready to dive into the code and see an example of how to integrate `timm` into their model training workflow. + +First, you'll need to install `timm`. For more information on installation, see [Installation](installation). + +```bash +pip install timm +``` + +## Load a Pretrained Model + +Pretrained models can be loaded using [`create_model`]. + +Here, we load the pretrained `mobilenetv3_large_100` model. + +```py +>>> import timm + +>>> m = timm.create_model('mobilenetv3_large_100', pretrained=True) +>>> m.eval() +``` + + + Note: The returned PyTorch model is set to train mode by default, so you must call .eval() on it if you plan to use it for inference. + + +## List Models with Pretrained Weights + +To list models packaged with `timm`, you can use [`list_models`]. If you specify `pretrained=True`, this function will only return model names that have associated pretrained weights available. + +```py +>>> import timm +>>> from pprint import pprint +>>> model_names = timm.list_models(pretrained=True) +>>> pprint(model_names) +[ + 'adv_inception_v3', + 'cspdarknet53', + 'cspresnext50', + 'densenet121', + 'densenet161', + 'densenet169', + 'densenet201', + 'densenetblur121d', + 'dla34', + 'dla46_c', +] +``` + +You can also list models with a specific pattern in their name. + +```py +>>> import timm +>>> from pprint import pprint +>>> model_names = timm.list_models('*resne*t*') +>>> pprint(model_names) +[ + 'cspresnet50', + 'cspresnet50d', + 'cspresnet50w', + 'cspresnext50', + ... +] +``` + +## Fine-Tune a Pretrained Model + +You can finetune any of the pre-trained models just by changing the classifier (the last layer). + +```py +>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True, num_classes=NUM_FINETUNE_CLASSES) +``` + +To fine-tune on your own dataset, you have to write a PyTorch training loop or adapt `timm`'s [training script](training_script) to use your dataset. + +## Use a Pretrained Model for Feature Extraction + +Without modifying the network, one can call model.forward_features(input) on any model instead of the usual model(input). This will bypass the head classifier and global pooling for networks. + +For a more in depth guide to using `timm` for feature extraction, see [Feature Extraction](feature_extraction). + +```py +>>> import timm +>>> import torch +>>> x = torch.randn(1, 3, 224, 224) +>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True) +>>> features = model.forward_features(x) +>>> print(features.shape) +torch.Size([1, 960, 7, 7]) +``` + +## Image Augmentation + +To transform images into valid inputs for a model, you can use [`timm.data.create_transform`], providing the desired `input_size` that the model expects. + +This will return a generic transform that uses reasonable defaults. + +```py +>>> timm.data.create_transform((3, 224, 224)) +Compose( + Resize(size=256, interpolation=bilinear, max_size=None, antialias=None) + CenterCrop(size=(224, 224)) + ToTensor() + Normalize(mean=tensor([0.4850, 0.4560, 0.4060]), std=tensor([0.2290, 0.2240, 0.2250])) +) +``` + +Pretrained models have specific transforms that were applied to images fed into them while training. If you use the wrong transform on your image, the model won't understand what it's seeing! + +To figure out which transformations were used for a given pretrained model, we can start by taking a look at its `pretrained_cfg` + +```py +>>> model.pretrained_cfg +{'url': 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_large_100_ra-f55367f5.pth', + 'num_classes': 1000, + 'input_size': (3, 224, 224), + 'pool_size': (7, 7), + 'crop_pct': 0.875, + 'interpolation': 'bicubic', + 'mean': (0.485, 0.456, 0.406), + 'std': (0.229, 0.224, 0.225), + 'first_conv': 'conv_stem', + 'classifier': 'classifier', + 'architecture': 'mobilenetv3_large_100'} +``` + +We can then resolve only the data related configuration by using [`timm.data.resolve_data_config`]. + +```py +>>> timm.data.resolve_data_config(model.pretrained_cfg) +{'input_size': (3, 224, 224), + 'interpolation': 'bicubic', + 'mean': (0.485, 0.456, 0.406), + 'std': (0.229, 0.224, 0.225), + 'crop_pct': 0.875} +``` + +We can pass this data config to [`timm.data.create_transform`] to initialize the model's associated transform. + +```py +>>> data_cfg = timm.data.resolve_data_config(model.pretrained_cfg) +>>> transform = timm.data.create_transform(**data_cfg) +>>> transform +Compose( + Resize(size=256, interpolation=bicubic, max_size=None, antialias=None) + CenterCrop(size=(224, 224)) + ToTensor() + Normalize(mean=tensor([0.4850, 0.4560, 0.4060]), std=tensor([0.2290, 0.2240, 0.2250])) +) +``` + + + Note: Here, the pretrained model's config happens to be the same as the generic config we made earlier. This is not always the case. So, it's safer to use the data config to create the transform as we did here instead of using the generic transform. + + +## Using Pretrained Models for Inference + +Here, we will put together the above sections and use a pretrained model for inference. + +First we'll need an image to do inference on. Here we load a picture of a leaf from the web: + +```py +>>> import requests +>>> from PIL import Image +>>> from io import BytesIO +>>> url = 'https://datasets-server.huggingface.co/assets/imagenet-1k/--/default/test/12/image/image.jpg' +>>> image = Image.open(requests.get(url, stream=True).raw) +>>> image +``` + +Here's the image we loaded: + +An Image from a link + +Now, we'll create our model and transforms again. This time, we make sure to set our model in evaluation mode. + +```py +>>> model = timm.create_model('mobilenetv3_large_100', pretrained=True).eval() +>>> transform = timm.data.create_transform( + **timm.data.resolve_data_config(model.pretrained_cfg) +) +``` + +We can prepare this image for the model by passing it to the transform. + +```py +>>> image_tensor = transform(image) +>>> image_tensor.shape +torch.Size([3, 224, 224]) +``` + +Now we can pass that image to the model to get the predictions. We use `unsqueeze(0)` in this case, as the model is expecting a batch dimension. + +```py +>>> output = model(image_tensor.unsqueeze(0)) +>>> output.shape +torch.Size([1, 1000]) +``` + +To get the predicted probabilities, we apply softmax to the output. This leaves us with a tensor of shape `(num_classes,)`. + +```py +>>> probabilities = torch.nn.functional.softmax(output[0], dim=0) +>>> probabilities.shape +torch.Size([1000]) +``` + +Now we'll find the top 5 predicted class indexes and values using `torch.topk`. + +```py +>>> values, indices = torch.topk(probabilities, 5) +>>> indices +tensor([162, 166, 161, 164, 167]) +``` + +If we check the imagenet labels for the top index, we can see what the model predicted... + +```py +>>> IMAGENET_1k_URL = 'https://storage.googleapis.com/bit_models/ilsvrc2012_wordnet_lemmas.txt' +>>> IMAGENET_1k_LABELS = requests.get(IMAGENET_1k_URL).text.strip().split('\n') +>>> [{'label': IMAGENET_1k_LABELS[idx], 'value': val.item()} for val, idx in zip(values, indices)] +[{'label': 'beagle', 'value': 0.8486220836639404}, + {'label': 'Walker_hound, Walker_foxhound', 'value': 0.03753996267914772}, + {'label': 'basset, basset_hound', 'value': 0.024628572165966034}, + {'label': 'bluetick', 'value': 0.010317106731235981}, + {'label': 'English_foxhound', 'value': 0.006958036217838526}] +``` \ No newline at end of file diff --git a/hfdocs/source/reference/data.mdx b/hfdocs/source/reference/data.mdx new file mode 100644 index 00000000..b5048739 --- /dev/null +++ b/hfdocs/source/reference/data.mdx @@ -0,0 +1,9 @@ +# Data + +[[autodoc]] timm.data.create_dataset + +[[autodoc]] timm.data.create_loader + +[[autodoc]] timm.data.create_transform + +[[autodoc]] timm.data.resolve_data_config \ No newline at end of file diff --git a/hfdocs/source/reference/models.mdx b/hfdocs/source/reference/models.mdx new file mode 100644 index 00000000..31bb3c27 --- /dev/null +++ b/hfdocs/source/reference/models.mdx @@ -0,0 +1,5 @@ +# Models + +[[autodoc]] timm.create_model + +[[autodoc]] timm.list_models diff --git a/hfdocs/source/reference/optimizers.mdx b/hfdocs/source/reference/optimizers.mdx new file mode 100644 index 00000000..637e7f0a --- /dev/null +++ b/hfdocs/source/reference/optimizers.mdx @@ -0,0 +1,27 @@ +# Optimization + +This page contains the API reference documentation for learning rate optimizers included in `timm`. + +## Optimizers + +### Factory functions + +[[autodoc]] timm.optim.optim_factory.create_optimizer +[[autodoc]] timm.optim.optim_factory.create_optimizer_v2 + +### Optimizer Classes + +[[autodoc]] timm.optim.adabelief.AdaBelief +[[autodoc]] timm.optim.adafactor.Adafactor +[[autodoc]] timm.optim.adahessian.Adahessian +[[autodoc]] timm.optim.adamp.AdamP +[[autodoc]] timm.optim.adamw.AdamW +[[autodoc]] timm.optim.lamb.Lamb +[[autodoc]] timm.optim.lars.Lars +[[autodoc]] timm.optim.lookahead.Lookahead +[[autodoc]] timm.optim.madgrad.MADGRAD +[[autodoc]] timm.optim.nadam.Nadam +[[autodoc]] timm.optim.nvnovograd.NvNovoGrad +[[autodoc]] timm.optim.radam.RAdam +[[autodoc]] timm.optim.rmsprop_tf.RMSpropTF +[[autodoc]] timm.optim.sgdp.SGDP diff --git a/hfdocs/source/reference/schedulers.mdx b/hfdocs/source/reference/schedulers.mdx new file mode 100644 index 00000000..c44577d6 --- /dev/null +++ b/hfdocs/source/reference/schedulers.mdx @@ -0,0 +1,19 @@ +# Learning Rate Schedulers + +This page contains the API reference documentation for learning rate schedulers included in `timm`. + +## Schedulers + +### Factory functions + +[[autodoc]] timm.scheduler.scheduler_factory.create_scheduler +[[autodoc]] timm.scheduler.scheduler_factory.create_scheduler_v2 + +### Scheduler Classes + +[[autodoc]] timm.scheduler.cosine_lr.CosineLRScheduler +[[autodoc]] timm.scheduler.multistep_lr.MultiStepLRScheduler +[[autodoc]] timm.scheduler.plateau_lr.PlateauLRScheduler +[[autodoc]] timm.scheduler.poly_lr.PolyLRScheduler +[[autodoc]] timm.scheduler.step_lr.StepLRScheduler +[[autodoc]] timm.scheduler.tanh_lr.TanhLRScheduler diff --git a/hfdocs/source/scripts.mdx b/hfdocs/source/scripts.mdx deleted file mode 100644 index 46404d81..00000000 --- a/hfdocs/source/scripts.mdx +++ /dev/null @@ -1,35 +0,0 @@ -# Scripts -A train, validation, inference, and checkpoint cleaning script included in the github root folder. Scripts are not currently packaged in the pip release. - -The training and validation scripts evolved from early versions of the [PyTorch Imagenet Examples](https://github.com/pytorch/examples). I have added significant functionality over time, including CUDA specific performance enhancements based on -[NVIDIA's APEX Examples](https://github.com/NVIDIA/apex/tree/master/examples). - -## Training Script - -The variety of training args is large and not all combinations of options (or even options) have been fully tested. For the training dataset folder, specify the folder to the base that contains a `train` and `validation` folder. - -To train an SE-ResNet34 on ImageNet, locally distributed, 4 GPUs, one process per GPU w/ cosine schedule, random-erasing prob of 50% and per-pixel random value: - -```bash -./distributed_train.sh 4 /data/imagenet --model seresnet34 --sched cosine --epochs 150 --warmup-epochs 5 --lr 0.4 --reprob 0.5 --remode pixel --batch-size 256 --amp -j 4 -``` - - - It is recommended to use PyTorch 1.9+ w/ PyTorch native AMP and DDP instead of APEX AMP. --amp defaults to native AMP as of timm ver 0.4.3. --apex-amp will force use of APEX components if they are installed. - - -## Validation / Inference Scripts - -Validation and inference scripts are similar in usage. One outputs metrics on a validation set and the other outputs topk class ids in a csv. Specify the folder containing validation images, not the base as in training script. - -To validate with the model's pretrained weights (if they exist): - -```bash -python validate.py /imagenet/validation/ --model seresnext26_32x4d --pretrained -``` - -To run inference from a checkpoint: - -```bash -python inference.py /imagenet/validation/ --model mobilenetv3_large_100 --checkpoint ./output/train/model_best.pth.tar -``` \ No newline at end of file diff --git a/hfdocs/source/training_hparam_examples.mdx b/hfdocs/source/training_script.mdx similarity index 63% rename from hfdocs/source/training_hparam_examples.mdx rename to hfdocs/source/training_script.mdx index e582cfc9..3eb772a3 100644 --- a/hfdocs/source/training_hparam_examples.mdx +++ b/hfdocs/source/training_script.mdx @@ -1,6 +1,44 @@ -# Training Examples +# Scripts -## EfficientNet-B2 with RandAugment - 80.4 top-1, 95.1 top-5 +A train, validation, inference, and checkpoint cleaning script included in the github root folder. Scripts are not currently packaged in the pip release. + +The training and validation scripts evolved from early versions of the [PyTorch Imagenet Examples](https://github.com/pytorch/examples). I have added significant functionality over time, including CUDA specific performance enhancements based on +[NVIDIA's APEX Examples](https://github.com/NVIDIA/apex/tree/master/examples). + +## Training Script + +The variety of training args is large and not all combinations of options (or even options) have been fully tested. For the training dataset folder, specify the folder to the base that contains a `train` and `validation` folder. + +To train an SE-ResNet34 on ImageNet, locally distributed, 4 GPUs, one process per GPU w/ cosine schedule, random-erasing prob of 50% and per-pixel random value: + +```bash +./distributed_train.sh 4 /data/imagenet --model seresnet34 --sched cosine --epochs 150 --warmup-epochs 5 --lr 0.4 --reprob 0.5 --remode pixel --batch-size 256 --amp -j 4 +``` + + + It is recommended to use PyTorch 1.9+ w/ PyTorch native AMP and DDP instead of APEX AMP. --amp defaults to native AMP as of timm ver 0.4.3. --apex-amp will force use of APEX components if they are installed. + + + +## Validation / Inference Scripts + +Validation and inference scripts are similar in usage. One outputs metrics on a validation set and the other outputs topk class ids in a csv. Specify the folder containing validation images, not the base as in training script. + +To validate with the model's pretrained weights (if they exist): + +```bash +python validate.py /imagenet/validation/ --model seresnext26_32x4d --pretrained +``` + +To run inference from a checkpoint: + +```bash +python inference.py /imagenet/validation/ --model mobilenetv3_large_100 --checkpoint ./output/train/model_best.pth.tar +``` + +## Training Examples + +### EfficientNet-B2 with RandAugment - 80.4 top-1, 95.1 top-5 These params are for dual Titan RTX cards with NVIDIA Apex installed: @@ -8,7 +46,7 @@ These params are for dual Titan RTX cards with NVIDIA Apex installed: ./distributed_train.sh 2 /imagenet/ --model efficientnet_b2 -b 128 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .97 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.3 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .016 ``` -## MixNet-XL with RandAugment - 80.5 top-1, 94.9 top-5 +### MixNet-XL with RandAugment - 80.5 top-1, 94.9 top-5 This params are for dual Titan RTX cards with NVIDIA Apex installed: @@ -16,45 +54,45 @@ This params are for dual Titan RTX cards with NVIDIA Apex installed: ./distributed_train.sh 2 /imagenet/ --model mixnet_xl -b 128 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .969 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.3 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.3 --amp --lr .016 --dist-bn reduce ``` -## SE-ResNeXt-26-D and SE-ResNeXt-26-T +### SE-ResNeXt-26-D and SE-ResNeXt-26-T These hparams (or similar) work well for a wide range of ResNet architecture, generally a good idea to increase the epoch # as the model size increases... ie approx 180-200 for ResNe(X)t50, and 220+ for larger. Increase batch size and LR proportionally for better GPUs or with AMP enabled. These params were for 2 1080Ti cards: ```bash ./distributed_train.sh 2 /imagenet/ --model seresnext26t_32x4d --lr 0.1 --warmup-epochs 5 --epochs 160 --weight-decay 1e-4 --sched cosine --reprob 0.4 --remode pixel -b 112 ``` -## EfficientNet-B3 with RandAugment - 81.5 top-1, 95.7 top-5 +### EfficientNet-B3 with RandAugment - 81.5 top-1, 95.7 top-5 The training of this model started with the same command line as EfficientNet-B2 w/ RA above. After almost three weeks of training the process crashed. The results weren't looking amazing so I resumed the training several times with tweaks to a few params (increase RE prob, decrease rand-aug, increase ema-decay). Nothing looked great. I ended up averaging the best checkpoints from all restarts. The result is mediocre at default res/crop but oddly performs much better with a full image test crop of 1.0. -## EfficientNet-B0 with RandAugment - 77.7 top-1, 95.3 top-5 +### EfficientNet-B0 with RandAugment - 77.7 top-1, 95.3 top-5 [Michael Klachko](https://github.com/michaelklachko) achieved these results with the command line for B2 adapted for larger batch size, with the recommended B0 dropout rate of 0.2. ```bash ./distributed_train.sh 2 /imagenet/ --model efficientnet_b0 -b 384 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .97 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.2 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .048 ``` -## ResNet50 with JSD loss and RandAugment (clean + 2x RA augs) - 79.04 top-1, 94.39 top-5 +### ResNet50 with JSD loss and RandAugment (clean + 2x RA augs) - 79.04 top-1, 94.39 top-5 Trained on two older 1080Ti cards, this took a while. Only slightly, non statistically better ImageNet validation result than my first good AugMix training of 78.99. However, these weights are more robust on tests with ImageNetV2, ImageNet-Sketch, etc. Unlike my first AugMix runs, I've enabled SplitBatchNorm, disabled random erasing on the clean split, and cranked up random erasing prob on the 2 augmented paths. ```bash ./distributed_train.sh 2 /imagenet -b 64 --model resnet50 --sched cosine --epochs 200 --lr 0.05 --amp --remode pixel --reprob 0.6 --aug-splits 3 --aa rand-m9-mstd0.5-inc1 --resplit --split-bn --jsd --dist-bn reduce ``` -## EfficientNet-ES (EdgeTPU-Small) with RandAugment - 78.066 top-1, 93.926 top-5 +### EfficientNet-ES (EdgeTPU-Small) with RandAugment - 78.066 top-1, 93.926 top-5 Trained by [Andrew Lavin](https://github.com/andravin) with 8 V100 cards. Model EMA was not used, final checkpoint is the average of 8 best checkpoints during training. ```bash ./distributed_train.sh 8 /imagenet --model efficientnet_es -b 128 --sched step --epochs 450 --decay-epochs 2.4 --decay-rate .97 --opt rmsproptf --opt-eps .001 -j 8 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.2 --drop-path 0.2 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .064 ``` -## MobileNetV3-Large-100 - 75.766 top-1, 92,542 top-5 +### MobileNetV3-Large-100 - 75.766 top-1, 92,542 top-5 ```bash ./distributed_train.sh 2 /imagenet/ --model mobilenetv3_large_100 -b 512 --sched step --epochs 600 --decay-epochs 2.4 --decay-rate .973 --opt rmsproptf --opt-eps .001 -j 7 --warmup-lr 1e-6 --weight-decay 1e-5 --drop 0.2 --drop-path 0.2 --model-ema --model-ema-decay 0.9999 --aa rand-m9-mstd0.5 --remode pixel --reprob 0.2 --amp --lr .064 --lr-noise 0.42 0.9 ``` -## ResNeXt-50 32x4d w/ RandAugment - 79.762 top-1, 94.60 top-5 +### ResNeXt-50 32x4d w/ RandAugment - 79.762 top-1, 94.60 top-5 These params will also work well for SE-ResNeXt-50 and SK-ResNeXt-50 and likely 101. I used them for the SK-ResNeXt-50 32x4d that I trained with 2 GPU using a slightly higher LR per effective batch size (lr=0.18, b=192 per GPU). The cmd line below are tuned for 8 GPU training. diff --git a/inference.py b/inference.py index 1509b323..cfbe62d1 100755 --- a/inference.py +++ b/inference.py @@ -20,7 +20,7 @@ import torch from timm.data import create_dataset, create_loader, resolve_data_config from timm.layers import apply_test_time_pool from timm.models import create_model -from timm.utils import AverageMeter, setup_default_logging, set_jit_fuser +from timm.utils import AverageMeter, setup_default_logging, set_jit_fuser, ParseKwargs try: from apex import amp @@ -72,6 +72,8 @@ parser.add_argument('-b', '--batch-size', default=256, type=int, metavar='N', help='mini-batch size (default: 256)') parser.add_argument('--img-size', default=None, type=int, metavar='N', help='Input image dimension, uses model default if empty') +parser.add_argument('--in-chans', type=int, default=None, metavar='N', + help='Image input channels (default: None => 3)') parser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N N N', help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') parser.add_argument('--use-train-size', action='store_true', default=False, @@ -110,6 +112,7 @@ parser.add_argument('--amp-dtype', default='float16', type=str, help='lower precision AMP dtype (default: float16)') parser.add_argument('--fuser', default='', type=str, help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") +parser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs) scripting_group = parser.add_mutually_exclusive_group() scripting_group.add_argument('--torchscript', default=False, action='store_true', @@ -170,12 +173,19 @@ def main(): set_jit_fuser(args.fuser) # create model + in_chans = 3 + if args.in_chans is not None: + in_chans = args.in_chans + elif args.input_size is not None: + in_chans = args.input_size[0] + model = create_model( args.model, num_classes=args.num_classes, - in_chans=3, + in_chans=in_chans, pretrained=args.pretrained, checkpoint_path=args.checkpoint, + **args.model_kwargs, ) if args.num_classes is None: assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' diff --git a/mkdocs.yml b/mkdocs.yml index a72436c6..7adb4d34 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -44,3 +44,11 @@ markdown_extensions: plugins: - search - awesome-pages + - redirects: + redirect_maps: + 'index.md': 'https://huggingface.co/docs/timm/index' + 'models.md': 'https://huggingface.co/docs/timm/models' + 'results.md': 'https://huggingface.co/docs/timm/results' + 'scripts.md': 'https://huggingface.co/docs/timm/training_script' + 'training_hparam_examples.md': 'https://huggingface.co/docs/timm/training_script#training-examples' + 'feature_extraction.md': 'https://huggingface.co/docs/timm/feature_extraction' diff --git a/requirements-docs.txt b/requirements-docs.txt index 716a3bf7..d782d5fb 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -1,4 +1,5 @@ mkdocs mkdocs-material +mkdocs-redirects mdx_truly_sane_lists -mkdocs-awesome-pages-plugin \ No newline at end of file +mkdocs-awesome-pages-plugin diff --git a/results/README.md b/results/README.md index 4fabf64b..81f30061 100644 --- a/results/README.md +++ b/results/README.md @@ -38,7 +38,7 @@ An ImageNet test set of 10,000 images sampled from new images roughly 10 years a ### ImageNet-Adversarial - [`results-imagenet-a.csv`](results-imagenet-a.csv) -A collection of 7500 images covering 200 of the 1000 ImageNet classes. Images are naturally occuring adversarial examples that confuse typical ImageNet classifiers. This is a challenging dataset, your typical ResNet-50 will score 0% top-1. +A collection of 7500 images covering 200 of the 1000 ImageNet classes. Images are naturally occurring adversarial examples that confuse typical ImageNet classifiers. This is a challenging dataset, your typical ResNet-50 will score 0% top-1. For clean validation with same 200 classes, see [`results-imagenet-a-clean.csv`](results-imagenet-a-clean.csv) diff --git a/results/results-imagenet-a-clean.csv b/results/results-imagenet-a-clean.csv index 68822bd8..cd4afa70 100644 --- a/results/results-imagenet-a-clean.csv +++ b/results/results-imagenet-a-clean.csv @@ -1,669 +1,791 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation -beit_large_patch16_512,98.560,1.440,99.840,0.160,305.67,512,1.000,bicubic -tf_efficientnet_l2_ns,98.550,1.450,99.820,0.180,480.31,800,0.960,bicubic -beit_large_patch16_384,98.520,1.480,99.820,0.180,305.00,384,1.000,bicubic -tf_efficientnet_l2_ns_475,98.500,1.500,99.830,0.170,480.31,475,0.936,bicubic +eva_giant_patch14_560.m30m_ft_in22k_in1k,98.820,1.180,99.900,0.100,"1,014.45",560,1.000,bicubic +eva_giant_patch14_336.clip_ft_in1k,98.820,1.180,99.820,0.180,"1,013.01",336,1.000,bicubic +eva_giant_patch14_336.m30m_ft_in22k_in1k,98.810,1.190,99.900,0.100,"1,013.01",336,1.000,bicubic +eva_large_patch14_336.in22k_ft_in22k_in1k,98.740,1.260,99.810,0.190,304.53,336,1.000,bicubic +eva_large_patch14_336.in22k_ft_in1k,98.710,1.290,99.870,0.130,304.53,336,1.000,bicubic +maxvit_base_tf_512.in21k_ft_in1k,98.630,1.370,99.800,0.200,119.88,512,1.000,bicubic +maxvit_xlarge_tf_512.in21k_ft_in1k,98.620,1.380,99.800,0.200,475.77,512,1.000,bicubic +maxvit_large_tf_512.in21k_ft_in1k,98.620,1.380,99.790,0.210,212.33,512,1.000,bicubic +beit_large_patch16_512.in22k_ft_in22k_in1k,98.560,1.440,99.840,0.160,305.67,512,1.000,bicubic +tf_efficientnet_l2.ns_jft_in1k,98.550,1.450,99.820,0.180,480.31,800,0.960,bicubic +beitv2_large_patch16_224.in1k_ft_in22k_in1k,98.540,1.460,99.760,0.240,304.43,224,0.950,bicubic +beit_large_patch16_384.in22k_ft_in22k_in1k,98.520,1.480,99.820,0.180,305.00,384,1.000,bicubic +maxvit_base_tf_384.in21k_ft_in1k,98.520,1.480,99.750,0.250,119.65,384,1.000,bicubic +tf_efficientnet_l2.ns_jft_in1k_475,98.500,1.500,99.830,0.170,480.31,475,0.936,bicubic +maxvit_xlarge_tf_384.in21k_ft_in1k,98.500,1.500,99.780,0.220,475.32,384,1.000,bicubic +maxvit_large_tf_384.in21k_ft_in1k,98.490,1.510,99.750,0.250,212.03,384,1.000,bicubic +eva_giant_patch14_224.clip_ft_in1k,98.480,1.520,99.820,0.180,"1,012.56",224,1.000,bicubic deit3_large_patch16_384_in21ft1k,98.460,1.540,99.760,0.240,304.76,384,1.000,bicubic -convnext_xlarge_384_in22ft1k,98.350,1.650,99.800,0.200,350.20,384,1.000,bicubic -vit_large_patch16_384,98.220,1.780,99.800,0.200,304.72,384,1.000,bicubic -convnext_large_384_in22ft1k,98.220,1.780,99.730,0.270,197.77,384,1.000,bicubic -beit_large_patch16_224,98.180,1.820,99.760,0.240,304.43,224,0.900,bicubic +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,98.430,1.570,99.810,0.190,632.46,336,1.000,bicubic +eva_large_patch14_196.in22k_ft_in22k_in1k,98.430,1.570,99.770,0.230,304.14,196,1.000,bicubic +convnext_xlarge.fb_in22k_ft_in1k_384,98.420,1.580,99.810,0.190,350.20,384,1.000,bicubic +eva_large_patch14_196.in22k_ft_in1k,98.350,1.650,99.820,0.180,304.14,196,1.000,bicubic +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,98.330,1.670,99.760,0.240,304.53,336,1.000,bicubic +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,98.270,1.730,99.760,0.240,632.05,224,1.000,bicubic +vit_large_patch14_clip_336.openai_ft_in12k_in1k,98.260,1.740,99.770,0.230,304.53,336,1.000,bicubic +convnext_large.fb_in22k_ft_in1k_384,98.230,1.770,99.750,0.250,197.77,384,1.000,bicubic +vit_large_patch16_384.augreg_in21k_ft_in1k,98.220,1.780,99.800,0.200,304.72,384,1.000,bicubic +vit_large_patch14_clip_224.openai_ft_in12k_in1k,98.220,1.780,99.730,0.270,304.20,224,1.000,bicubic +vit_large_patch14_clip_336.laion2b_ft_in1k,98.220,1.780,99.720,0.280,304.53,336,1.000,bicubic +vit_base_patch16_clip_384.openai_ft_in12k_in1k,98.200,1.800,99.660,0.340,86.86,384,0.950,bicubic +beit_large_patch16_224.in22k_ft_in22k_in1k,98.180,1.820,99.760,0.240,304.43,224,0.900,bicubic deit3_large_patch16_224_in21ft1k,98.170,1.830,99.760,0.240,304.37,224,1.000,bicubic deit3_huge_patch14_224_in21ft1k,98.170,1.830,99.730,0.270,632.13,224,1.000,bicubic +vit_large_patch14_clip_224.openai_ft_in1k,98.160,1.840,99.660,0.340,304.20,224,1.000,bicubic swinv2_large_window12to24_192to384_22kft1k,98.150,1.850,99.690,0.310,196.74,384,1.000,bicubic swinv2_base_window12to24_192to384_22kft1k,98.140,1.860,99.780,0.220,87.92,384,1.000,bicubic +convnext_xlarge.fb_in22k_ft_in1k,98.120,1.880,99.780,0.220,350.20,288,1.000,bicubic +convnext_large.fb_in22k_ft_in1k,98.120,1.880,99.750,0.250,197.77,288,1.000,bicubic +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,98.080,1.920,99.760,0.240,304.20,224,1.000,bicubic +convnext_base.fb_in22k_ft_in1k_384,98.070,1.930,99.650,0.350,88.59,384,1.000,bicubic swin_large_patch4_window12_384,98.040,1.960,99.690,0.310,196.74,384,1.000,bicubic -convnext_base_384_in22ft1k,97.950,2.050,99.650,0.350,88.59,384,1.000,bicubic -tf_efficientnet_b7_ns,97.920,2.080,99.720,0.280,66.35,600,0.949,bicubic -convnext_xlarge_in22ft1k,97.920,2.080,99.680,0.320,350.20,224,0.875,bicubic +vit_huge_patch14_clip_224.laion2b_ft_in1k,98.020,1.980,99.720,0.280,632.05,224,1.000,bicubic +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,98.010,1.990,99.660,0.340,86.86,384,1.000,bicubic +tf_efficientnet_b7.ns_jft_in1k,97.910,2.090,99.720,0.280,66.35,600,0.949,bicubic +tf_efficientnetv2_xl.in21k_ft_in1k,97.910,2.090,99.570,0.430,208.12,512,1.000,bicubic +vit_large_patch14_clip_224.laion2b_ft_in1k,97.900,2.100,99.650,0.350,304.20,224,1.000,bicubic swin_base_patch4_window12_384,97.890,2.110,99.710,0.290,87.90,384,1.000,bicubic -vit_large_r50_s32_384,97.860,2.140,99.670,0.330,329.09,384,1.000,bicubic +convnext_base.fb_in22k_ft_in1k,97.860,2.140,99.680,0.320,88.59,288,1.000,bicubic +vit_large_r50_s32_384.augreg_in21k_ft_in1k,97.860,2.140,99.670,0.330,329.09,384,1.000,bicubic swinv2_large_window12to16_192to256_22kft1k,97.860,2.140,99.650,0.350,196.74,256,0.900,bicubic -vit_base_patch16_384,97.840,2.160,99.670,0.330,86.86,384,1.000,bicubic -convnext_large_in22ft1k,97.830,2.170,99.690,0.310,197.77,224,0.875,bicubic -deit3_base_patch16_384_in21ft1k,97.830,2.170,99.680,0.320,86.88,384,1.000,bicubic -beit_base_patch16_384,97.820,2.180,99.700,0.300,86.74,384,1.000,bicubic +deit3_base_patch16_384_in21ft1k,97.840,2.160,99.680,0.320,86.88,384,1.000,bicubic +vit_base_patch16_384.augreg_in21k_ft_in1k,97.840,2.160,99.670,0.330,86.86,384,1.000,bicubic +maxvit_large_tf_512.in1k,97.830,2.170,99.560,0.440,212.33,512,1.000,bicubic +beit_base_patch16_384.in22k_ft_in22k_in1k,97.820,2.180,99.700,0.300,86.74,384,1.000,bicubic +tf_efficientnetv2_m.in21k_ft_in1k,97.820,2.180,99.600,0.400,54.14,480,1.000,bicubic +tf_efficientnetv2_l.in21k_ft_in1k,97.800,2.200,99.770,0.230,118.52,480,1.000,bicubic volo_d5_512,97.770,2.230,99.670,0.330,296.09,512,1.150,bicubic volo_d5_448,97.760,2.240,99.620,0.380,295.91,448,1.150,bicubic -tf_efficientnetv2_l_in21ft1k,97.700,2.300,99.670,0.330,118.52,480,1.000,bicubic +maxvit_small_tf_512.in1k,97.750,2.250,99.550,0.450,69.13,512,1.000,bicubic +maxvit_base_tf_512.in1k,97.740,2.260,99.610,0.390,119.88,512,1.000,bicubic +vit_base_patch16_clip_384.laion2b_ft_in1k,97.730,2.270,99.630,0.370,86.86,384,1.000,bicubic +vit_base_patch8_224.augreg2_in21k_ft_in1k,97.710,2.290,99.650,0.350,86.58,224,0.900,bicubic +beitv2_base_patch16_224.in1k_ft_in22k_in1k,97.690,2.310,99.680,0.320,86.53,224,0.900,bicubic volo_d4_448,97.670,2.330,99.610,0.390,193.41,448,1.150,bicubic -tf_efficientnetv2_xl_in21ft1k,97.660,2.340,99.490,0.510,208.12,512,1.000,bicubic swinv2_base_window12to16_192to256_22kft1k,97.650,2.350,99.720,0.280,87.92,256,0.900,bicubic swin_large_patch4_window7_224,97.650,2.350,99.580,0.420,196.53,224,0.900,bicubic -vit_large_patch16_224,97.640,2.360,99.590,0.410,304.33,224,0.900,bicubic -tf_efficientnet_b6_ns,97.630,2.370,99.580,0.420,43.04,528,0.942,bicubic +vit_large_patch16_224.augreg_in21k_ft_in1k,97.640,2.360,99.590,0.410,304.33,224,0.900,bicubic +tf_efficientnet_b6.ns_jft_in1k,97.630,2.370,99.580,0.420,43.04,528,0.942,bicubic ig_resnext101_32x48d,97.620,2.380,99.700,0.300,828.41,224,0.875,bilinear +convnext_small.fb_in22k_ft_in1k_384,97.600,2.400,99.600,0.400,50.22,384,1.000,bicubic dm_nfnet_f6,97.600,2.400,99.550,0.450,438.36,576,0.956,bicubic -vit_base_patch8_224,97.580,2.420,99.670,0.330,86.58,224,0.900,bicubic +vit_base_patch8_224.augreg_in21k_ft_in1k,97.580,2.420,99.670,0.330,86.58,224,0.900,bicubic dm_nfnet_f4,97.580,2.420,99.510,0.490,316.07,512,0.951,bicubic +maxvit_base_tf_384.in1k,97.570,2.430,99.590,0.410,119.65,384,1.000,bicubic +maxvit_tiny_tf_512.in1k,97.570,2.430,99.560,0.440,31.05,512,1.000,bicubic +maxvit_large_tf_384.in1k,97.570,2.430,99.530,0.470,212.03,384,1.000,bicubic +vit_base_patch16_clip_384.openai_ft_in1k,97.550,2.450,99.660,0.340,86.86,384,1.000,bicubic volo_d3_448,97.550,2.450,99.550,0.450,86.63,448,1.000,bicubic dm_nfnet_f5,97.540,2.460,99.570,0.430,377.21,544,0.954,bicubic xcit_large_24_p8_384_dist,97.520,2.480,99.540,0.460,188.93,384,1.000,bicubic +vit_base_patch16_clip_224.openai_ft_in12k_in1k,97.520,2.480,99.500,0.500,86.57,224,0.950,bicubic xcit_large_24_p16_384_dist,97.520,2.480,99.480,0.520,189.10,384,1.000,bicubic -tf_efficientnet_b5_ns,97.500,2.500,99.630,0.370,30.39,456,0.934,bicubic +tf_efficientnet_b5.ns_jft_in1k,97.500,2.500,99.630,0.370,30.39,456,0.934,bicubic resnetv2_152x4_bitm,97.490,2.510,99.610,0.390,936.53,480,1.000,bilinear -deit3_base_patch16_224_in21ft1k,97.490,2.510,99.600,0.400,86.59,224,1.000,bicubic +deit3_base_patch16_224_in21ft1k,97.480,2.520,99.600,0.400,86.59,224,1.000,bicubic cait_m48_448,97.480,2.520,99.550,0.450,356.46,448,1.000,bicubic -tf_efficientnetv2_m_in21ft1k,97.480,2.520,99.530,0.470,54.14,480,1.000,bicubic -convnext_base_in22ft1k,97.470,2.530,99.600,0.400,88.59,224,0.875,bicubic -convnext_small_384_in22ft1k,97.460,2.540,99.580,0.420,50.22,384,1.000,bicubic +tf_efficientnetv2_l.in1k,97.470,2.530,99.530,0.470,118.52,480,1.000,bicubic +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,97.450,2.550,99.540,0.460,86.57,224,0.950,bicubic +vit_medium_patch16_gap_384.in12k_ft_in1k,97.440,2.560,99.640,0.360,39.03,384,0.950,bicubic deit3_large_patch16_384,97.420,2.580,99.620,0.380,304.76,384,1.000,bicubic +maxvit_small_tf_384.in1k,97.420,2.580,99.510,0.490,69.02,384,1.000,bicubic +flexivit_large.1200ep_in1k,97.410,2.590,99.600,0.400,304.36,240,0.950,bicubic +efficientnet_b5.in12k_ft_in1k,97.410,2.590,99.540,0.460,30.39,448,1.000,bicubic cait_m36_384,97.400,2.600,99.510,0.490,271.22,384,1.000,bicubic -volo_d5_224,97.390,2.610,99.570,0.430,295.46,224,0.960,bicubic -ig_resnext101_32x32d,97.370,2.630,99.680,0.320,468.53,224,0.875,bilinear +volo_d5_224,97.380,2.620,99.570,0.430,295.46,224,0.960,bicubic +ig_resnext101_32x32d,97.360,2.640,99.680,0.320,468.53,224,0.875,bilinear +convnext_small.fb_in22k_ft_in1k,97.360,2.640,99.530,0.470,50.22,288,1.000,bicubic +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,97.360,2.640,99.520,0.480,88.30,384,1.000,bicubic dm_nfnet_f3,97.350,2.650,99.560,0.440,254.92,416,0.940,bicubic cait_s36_384,97.330,2.670,99.530,0.470,68.37,384,1.000,bicubic -volo_d2_384,97.310,2.690,99.600,0.400,58.87,384,1.000,bicubic +volo_d2_384,97.320,2.680,99.600,0.400,58.87,384,1.000,bicubic +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,97.320,2.680,99.480,0.520,88.34,448,1.000,bicubic volo_d4_224,97.300,2.700,99.520,0.480,192.96,224,0.960,bicubic +maxvit_tiny_tf_384.in1k,97.300,2.700,99.500,0.500,30.98,384,1.000,bicubic xcit_medium_24_p8_384_dist,97.290,2.710,99.510,0.490,84.32,384,1.000,bicubic -tf_efficientnetv2_l,97.280,2.720,99.550,0.450,118.52,480,1.000,bicubic -xcit_medium_24_p16_384_dist,97.280,2.720,99.460,0.540,84.40,384,1.000,bicubic +flexivit_large.600ep_in1k,97.280,2.720,99.590,0.410,304.36,240,0.950,bicubic +xcit_medium_24_p16_384_dist,97.270,2.730,99.460,0.540,84.40,384,1.000,bicubic swin_base_patch4_window7_224,97.250,2.750,99.530,0.470,87.77,224,0.900,bicubic -xcit_small_24_p8_384_dist,97.240,2.760,99.610,0.390,47.63,384,1.000,bicubic +flexivit_large.300ep_in1k,97.250,2.750,99.490,0.510,304.36,240,0.950,bicubic +xcit_small_24_p8_384_dist,97.240,2.760,99.600,0.400,47.63,384,1.000,bicubic xcit_small_12_p8_384_dist,97.230,2.770,99.480,0.520,26.21,384,1.000,bicubic +tf_efficientnetv2_m.in1k,97.210,2.790,99.530,0.470,54.14,480,1.000,bicubic swsl_resnext101_32x8d,97.200,2.800,99.570,0.430,88.79,224,0.875,bilinear -tf_efficientnet_b7_ap,97.200,2.800,99.540,0.460,66.35,600,0.949,bicubic +tf_efficientnet_b7.ap_in1k,97.200,2.800,99.540,0.460,66.35,600,0.949,bicubic regnetz_e8,97.200,2.800,99.500,0.500,57.70,320,1.000,bicubic -tf_efficientnet_b8,97.200,2.800,99.500,0.500,87.41,672,0.954,bicubic -vit_base_r50_s16_384,97.180,2.820,99.560,0.440,98.95,384,1.000,bicubic -tf_efficientnetv2_m,97.140,2.860,99.410,0.590,54.14,480,1.000,bicubic +tf_efficientnet_b8.ra_in1k,97.200,2.800,99.500,0.500,87.41,672,0.954,bicubic +vit_base_r50_s16_384.orig_in21k_ft_in1k,97.180,2.820,99.560,0.440,98.95,384,1.000,bicubic +vit_base_patch16_224.augreg2_in21k_ft_in1k,97.150,2.850,99.540,0.460,86.57,224,0.900,bicubic deit3_small_patch16_384_in21ft1k,97.130,2.870,99.500,0.500,22.21,384,1.000,bicubic -xcit_small_24_p16_384_dist,97.120,2.880,99.450,0.550,47.67,384,1.000,bicubic -tf_efficientnet_b8_ap,97.110,2.890,99.660,0.340,87.41,672,0.954,bicubic -beit_base_patch16_224,97.090,2.910,99.610,0.390,86.53,224,0.900,bicubic +vit_base_patch16_clip_224.laion2b_ft_in1k,97.130,2.870,99.460,0.540,86.57,224,1.000,bicubic +xcit_small_24_p16_384_dist,97.120,2.880,99.460,0.540,47.67,384,1.000,bicubic +tf_efficientnet_b8.ap_in1k,97.110,2.890,99.660,0.340,87.41,672,0.954,bicubic +vit_base_patch32_clip_384.openai_ft_in12k_in1k,97.110,2.890,99.500,0.500,88.30,384,0.950,bicubic +convnext_large.fb_in1k,97.100,2.900,99.450,0.550,197.77,288,1.000,bicubic +beit_base_patch16_224.in22k_ft_in22k_in1k,97.090,2.910,99.610,0.390,86.53,224,0.900,bicubic eca_nfnet_l2,97.090,2.910,99.510,0.490,56.72,384,1.000,bicubic volo_d3_224,97.090,2.910,99.470,0.530,86.33,224,0.960,bicubic -tf_efficientnet_b6_ap,97.080,2.920,99.620,0.380,43.04,528,0.942,bicubic +tf_efficientnet_b6.ap_in1k,97.080,2.920,99.620,0.380,43.04,528,0.942,bicubic +convnext_tiny.fb_in22k_ft_in1k_384,97.080,2.920,99.510,0.490,28.59,384,1.000,bicubic +vit_base_patch16_clip_224.openai_ft_in1k,97.080,2.920,99.490,0.510,86.57,224,0.900,bicubic ecaresnet269d,97.080,2.920,99.470,0.530,102.09,352,1.000,bicubic cait_s24_384,97.070,2.930,99.430,0.570,47.06,384,1.000,bicubic -xcit_large_24_p8_224_dist,97.070,2.930,99.420,0.580,188.93,224,1.000,bicubic +xcit_large_24_p8_224_dist,97.060,2.940,99.420,0.580,188.93,224,1.000,bicubic dm_nfnet_f2,97.020,2.980,99.440,0.560,193.78,352,0.920,bicubic deit3_base_patch16_384,97.020,2.980,99.390,0.610,86.88,384,1.000,bicubic resnetv2_152x2_bitm,97.010,2.990,99.590,0.410,236.34,448,1.000,bilinear -tf_efficientnet_b7,97.010,2.990,99.520,0.480,66.35,600,0.949,bicubic -volo_d2_224,97.000,3.000,99.390,0.610,58.68,224,0.960,bicubic +tf_efficientnet_b7.ra_in1k,97.010,2.990,99.520,0.480,66.35,600,0.949,bicubic resnetv2_101x3_bitm,96.990,3.010,99.490,0.510,387.93,448,1.000,bilinear -convnext_small_in22ft1k,96.990,3.010,99.410,0.590,50.22,224,0.875,bicubic -efficientnetv2_rw_m,96.980,3.020,99.540,0.460,53.24,416,1.000,bicubic +volo_d2_224,96.990,3.010,99.390,0.610,58.68,224,0.960,bicubic +efficientnetv2_rw_m.agc_in1k,96.980,3.020,99.540,0.460,53.24,416,1.000,bicubic +deit3_medium_patch16_224_in21ft1k,96.970,3.030,99.430,0.570,38.85,224,1.000,bicubic deit_base_distilled_patch16_384,96.960,3.040,99.480,0.520,87.63,384,1.000,bicubic -tf_efficientnet_b4_ns,96.950,3.050,99.580,0.420,19.34,380,0.922,bicubic +maxvit_large_tf_224.in1k,96.960,3.040,99.250,0.750,211.79,224,0.950,bicubic +tf_efficientnet_b4.ns_jft_in1k,96.950,3.050,99.580,0.420,19.34,380,0.922,bicubic +mvitv2_large,96.950,3.050,99.400,0.600,217.99,224,0.900,bicubic seresnextaa101d_32x8d,96.950,3.050,99.390,0.610,93.59,288,1.000,bicubic +maxvit_base_tf_224.in1k,96.950,3.050,99.260,0.740,119.47,224,0.950,bicubic deit3_large_patch16_224,96.940,3.060,99.340,0.660,304.37,224,0.900,bicubic xcit_small_12_p16_384_dist,96.930,3.070,99.400,0.600,26.25,384,1.000,bicubic +volo_d1_384,96.920,3.080,99.520,0.480,26.78,384,1.000,bicubic +dm_nfnet_f1,96.920,3.080,99.410,0.590,132.63,320,0.910,bicubic xcit_medium_24_p8_224_dist,96.920,3.080,99.390,0.610,84.32,224,1.000,bicubic -volo_d1_384,96.910,3.090,99.520,0.480,26.78,384,1.000,bicubic resnetrs420,96.910,3.090,99.460,0.540,191.89,416,1.000,bicubic -dm_nfnet_f1,96.910,3.090,99.410,0.590,132.63,320,0.910,bicubic -deit3_huge_patch14_224,96.890,3.110,99.480,0.520,632.13,224,0.900,bicubic -vit_base_patch16_224,96.880,3.120,99.530,0.470,86.57,224,0.900,bicubic -convnext_tiny_384_in22ft1k,96.880,3.120,99.470,0.530,28.59,384,1.000,bicubic +vit_base_patch16_224.augreg_in21k_ft_in1k,96.880,3.120,99.530,0.470,86.57,224,0.900,bicubic +deit3_huge_patch14_224,96.880,3.120,99.480,0.520,632.13,224,0.900,bicubic xcit_small_24_p8_224_dist,96.870,3.130,99.480,0.520,47.63,224,1.000,bicubic resnetv2_152x2_bit_teacher_384,96.830,3.170,99.450,0.550,236.34,384,1.000,bicubic -ig_resnext101_32x16d,96.810,3.190,99.600,0.400,194.03,224,0.875,bilinear +ig_resnext101_32x16d,96.820,3.180,99.590,0.410,194.03,224,0.875,bilinear +convnext_base.fb_in1k,96.820,3.180,99.410,0.590,88.59,288,1.000,bicubic +maxxvit_rmlp_small_rw_256,96.810,3.190,99.380,0.620,66.01,256,0.950,bicubic xcit_large_24_p16_224_dist,96.800,3.200,99.350,0.650,189.10,224,1.000,bicubic -vit_large_r50_s32_224,96.790,3.210,99.350,0.650,328.99,224,0.900,bicubic +vit_large_r50_s32_224.augreg_in21k_ft_in1k,96.790,3.210,99.350,0.650,328.99,224,0.900,bicubic seresnet152d,96.770,3.230,99.450,0.550,66.84,320,1.000,bicubic -seresnext101_32x8d,96.770,3.230,99.350,0.650,93.57,288,1.000,bicubic +mvitv2_base,96.770,3.230,99.270,0.730,51.47,224,0.900,bicubic resnetrs350,96.760,3.240,99.370,0.630,163.96,384,1.000,bicubic -swinv2_base_window16_256,96.760,3.240,99.350,0.650,87.92,256,0.900,bicubic -convnext_large,96.760,3.240,99.300,0.700,197.77,224,0.875,bicubic -tf_efficientnetv2_s_in21ft1k,96.720,3.280,99.420,0.580,21.46,384,1.000,bicubic +flexivit_base.1200ep_in1k,96.760,3.240,99.360,0.640,86.59,240,0.950,bicubic +seresnext101_32x8d,96.760,3.240,99.340,0.660,93.57,288,1.000,bicubic +swinv2_base_window16_256,96.750,3.250,99.350,0.650,87.92,256,0.900,bicubic +tf_efficientnetv2_s.in21k_ft_in1k,96.730,3.270,99.420,0.580,21.46,384,1.000,bicubic +seresnext101d_32x8d,96.730,3.270,99.360,0.640,93.59,288,1.000,bicubic resnet200d,96.720,3.280,99.330,0.670,64.69,320,1.000,bicubic resnetv2_50x3_bitm,96.710,3.290,99.550,0.450,217.32,448,1.000,bilinear regnetz_040h,96.710,3.290,99.500,0.500,28.94,320,1.000,bicubic regnetz_040,96.710,3.290,99.470,0.530,27.12,320,1.000,bicubic -seresnext101d_32x8d,96.710,3.290,99.360,0.640,93.59,288,1.000,bicubic -vit_small_patch16_384,96.700,3.300,99.480,0.520,22.20,384,1.000,bicubic +vit_base_patch16_384.orig_in21k_ft_in1k,96.700,3.300,99.510,0.490,86.86,384,1.000,bicubic +vit_small_patch16_384.augreg_in21k_ft_in1k,96.700,3.300,99.480,0.520,22.20,384,1.000,bicubic +edgenext_base,96.700,3.300,99.430,0.570,18.51,320,1.000,bicubic resnetrs200,96.700,3.300,99.370,0.630,93.21,320,1.000,bicubic eca_nfnet_l1,96.700,3.300,99.290,0.710,41.41,320,1.000,bicubic xcit_small_12_p8_224_dist,96.690,3.310,99.390,0.610,26.21,224,1.000,bicubic +maxvit_small_tf_224.in1k,96.690,3.310,99.370,0.630,68.93,224,0.950,bicubic resnetrs270,96.690,3.310,99.350,0.650,129.86,352,1.000,bicubic -vit_small_r26_s32_384,96.680,3.320,99.580,0.420,36.47,384,1.000,bicubic -tf_efficientnet_b5_ap,96.680,3.320,99.460,0.540,30.39,456,0.934,bicubic -tf_efficientnet_b6,96.670,3.330,99.370,0.630,43.04,528,0.942,bicubic -pit_b_distilled_224,96.670,3.330,99.350,0.650,74.79,224,0.900,bicubic +vit_small_r26_s32_384.augreg_in21k_ft_in1k,96.680,3.320,99.570,0.430,36.47,384,1.000,bicubic +tf_efficientnet_b5.ap_in1k,96.680,3.320,99.460,0.540,30.39,456,0.934,bicubic +pit_b_distilled_224,96.680,3.320,99.350,0.650,74.79,224,0.900,bicubic +tf_efficientnet_b6.aa_in1k,96.670,3.330,99.370,0.630,43.04,528,0.942,bicubic +vit_medium_patch16_gap_256.in12k_ft_in1k,96.660,3.340,99.510,0.490,38.86,256,0.950,bicubic deit3_small_patch16_224_in21ft1k,96.660,3.340,99.330,0.670,22.06,224,1.000,bicubic +flexivit_base.600ep_in1k,96.630,3.370,99.330,0.670,86.59,240,0.950,bicubic resmlp_big_24_224_in22ft1k,96.620,3.380,99.510,0.490,129.14,224,0.875,bicubic regnetz_d8,96.620,3.380,99.450,0.550,23.37,320,1.000,bicubic -regnetz_d8_evos,96.610,3.390,99.440,0.560,23.46,320,0.950,bicubic +flexivit_base.300ep_in1k,96.620,3.380,99.270,0.730,86.59,240,0.950,bicubic +regnetz_d8_evos,96.610,3.390,99.450,0.550,23.46,320,0.950,bicubic resnest200e,96.610,3.390,99.350,0.650,70.20,320,0.909,bicubic -swsl_resnext101_32x16d,96.600,3.400,99.530,0.470,194.03,224,0.875,bilinear +swsl_resnext101_32x16d,96.600,3.400,99.520,0.480,194.03,224,0.875,bilinear regnetz_d32,96.600,3.400,99.380,0.620,27.58,320,0.950,bicubic xcit_medium_24_p16_224_dist,96.590,3.410,99.270,0.730,84.40,224,1.000,bicubic +maxvit_rmlp_small_rw_224,96.590,3.410,99.110,0.890,64.90,224,0.900,bicubic resnetrs152,96.580,3.420,99.240,0.760,86.62,320,1.000,bicubic +gcvit_base,96.570,3.430,99.230,0.770,90.32,224,0.875,bicubic +convnext_small.fb_in1k,96.560,3.440,99.340,0.660,50.22,288,1.000,bicubic +cait_xs24_384,96.550,3.450,99.420,0.580,26.67,384,1.000,bicubic xcit_tiny_24_p8_384_dist,96.550,3.450,99.320,0.680,12.11,384,1.000,bicubic -cait_xs24_384,96.540,3.460,99.420,0.580,26.67,384,1.000,bicubic -efficientnetv2_rw_s,96.540,3.460,99.360,0.640,23.94,384,1.000,bicubic +efficientnetv2_rw_s.ra2_in1k,96.540,3.460,99.360,0.640,23.94,384,1.000,bicubic swinv2_base_window8_256,96.540,3.460,99.270,0.730,87.92,256,0.900,bicubic +coatnet_rmlp_2_rw_224,96.540,3.460,99.100,0.900,73.88,224,0.950,bicubic regnety_080,96.530,3.470,99.320,0.680,39.18,288,1.000,bicubic crossvit_18_dagger_408,96.530,3.470,99.260,0.740,44.61,408,1.000,bicubic resnest269e,96.520,3.480,99.350,0.650,110.93,416,0.928,bicubic -vit_base_patch32_384,96.490,3.510,99.410,0.590,88.30,384,1.000,bicubic -convnext_base,96.470,3.530,99.230,0.770,88.59,224,0.875,bicubic -swinv2_small_window16_256,96.460,3.540,99.200,0.800,49.73,256,0.900,bicubic +vit_base_patch32_384.augreg_in21k_ft_in1k,96.490,3.510,99.410,0.590,88.30,384,1.000,bicubic +swinv2_small_window16_256,96.470,3.530,99.200,0.800,49.73,256,0.900,bicubic +vit_base_patch16_224_miil.in21k_ft_in1k,96.460,3.540,99.300,0.700,86.54,224,0.875,bilinear resmlp_big_24_distilled_224,96.450,3.550,99.310,0.690,129.14,224,0.875,bicubic -vit_base_patch16_224_miil,96.450,3.550,99.300,0.700,86.54,224,0.875,bilinear cs3se_edgenet_x,96.440,3.560,99.400,0.600,50.72,320,1.000,bicubic -swsl_resnext101_32x4d,96.430,3.570,99.470,0.530,44.18,224,0.875,bilinear +swsl_resnext101_32x4d,96.420,3.580,99.470,0.530,44.18,224,0.875,bilinear +maxvit_rmlp_tiny_rw_256,96.410,3.590,99.390,0.610,29.15,256,0.950,bicubic regnetv_064,96.410,3.590,99.360,0.640,30.58,288,1.000,bicubic xcit_large_24_p8_224,96.410,3.590,98.980,1.020,188.93,224,1.000,bicubic xcit_small_24_p8_224,96.400,3.600,99.150,0.850,47.63,224,1.000,bicubic -tf_efficientnet_b3_ns,96.390,3.610,99.350,0.650,12.23,300,0.904,bicubic +tf_efficientnet_b3.ns_jft_in1k,96.390,3.610,99.350,0.650,12.23,300,0.904,bicubic crossvit_15_dagger_408,96.390,3.610,99.160,0.840,28.50,408,1.000,bicubic cait_s24_224,96.380,3.620,99.150,0.850,46.92,224,1.000,bicubic resnet152d,96.360,3.640,99.390,0.610,60.21,320,1.000,bicubic regnety_064,96.360,3.640,99.230,0.770,30.58,288,1.000,bicubic +mvitv2_small,96.360,3.640,99.200,0.800,34.87,224,0.900,bicubic +pvt_v2_b5,96.360,3.640,99.170,0.830,81.96,224,0.900,bicubic regnety_160,96.350,3.650,99.330,0.670,83.59,288,1.000,bicubic -tf_efficientnet_b5,96.350,3.650,99.310,0.690,30.39,456,0.934,bicubic +tf_efficientnet_b5.ra_in1k,96.350,3.650,99.310,0.690,30.39,456,0.934,bicubic xception65,96.350,3.650,99.240,0.760,39.92,299,0.940,bicubic -tf_efficientnetv2_s,96.340,3.660,99.200,0.800,21.46,384,1.000,bicubic +tf_efficientnetv2_s.in1k,96.340,3.660,99.200,0.800,21.46,384,1.000,bicubic volo_d1_224,96.330,3.670,99.310,0.690,26.63,224,0.960,bicubic -ig_resnext101_32x8d,96.310,3.690,99.430,0.570,88.79,224,0.875,bilinear -resnet101d,96.300,3.700,99.230,0.770,44.57,320,1.000,bicubic +pvt_v2_b4,96.330,3.670,99.180,0.820,62.56,224,0.900,bicubic +ig_resnext101_32x8d,96.320,3.680,99.430,0.570,88.79,224,0.875,bilinear deit3_base_patch16_224,96.300,3.700,99.180,0.820,86.59,224,0.900,bicubic -swinv2_small_window8_256,96.290,3.710,99.210,0.790,49.73,256,0.900,bicubic +gcvit_small,96.300,3.700,99.140,0.860,51.09,224,0.875,bicubic +resnet101d,96.290,3.710,99.230,0.770,44.57,320,1.000,bicubic +swinv2_small_window8_256,96.270,3.730,99.210,0.790,49.73,256,0.900,bicubic twins_svt_large,96.270,3.730,99.170,0.830,99.27,224,0.900,bicubic jx_nest_base,96.250,3.750,99.210,0.790,67.72,224,0.875,bicubic swin_s3_base_224,96.250,3.750,99.140,0.860,71.13,224,0.900,bicubic -swin_s3_small_224,96.230,3.770,99.090,0.910,49.74,224,0.900,bicubic -convnext_tiny_in22ft1k,96.220,3.780,99.340,0.660,28.59,224,0.875,bicubic +maxvit_tiny_rw_224,96.240,3.760,99.120,0.880,29.06,224,0.950,bicubic +tf_efficientnetv2_b3.in21k_ft_in1k,96.220,3.780,99.230,0.770,14.36,300,0.900,bicubic +swin_s3_small_224,96.220,3.780,99.080,0.920,49.74,224,0.900,bicubic xcit_small_24_p16_224_dist,96.210,3.790,99.210,0.790,47.67,224,1.000,bicubic xception65p,96.210,3.790,99.180,0.820,39.82,299,0.940,bicubic deit3_small_patch16_384,96.200,3.800,99.290,0.710,22.21,384,1.000,bicubic -regnetv_040,96.190,3.810,99.330,0.670,20.64,288,1.000,bicubic +regnetv_040,96.180,3.820,99.330,0.670,20.64,288,1.000,bicubic swinv2_cr_small_ns_224,96.180,3.820,99.140,0.860,49.70,224,0.900,bicubic mobilevitv2_175_384_in22ft1k,96.180,3.820,99.130,0.870,14.25,384,1.000,bicubic -convnext_small,96.170,3.830,99.100,0.900,50.22,224,0.875,bicubic -tf_efficientnet_b4_ap,96.160,3.840,99.280,0.720,19.34,380,0.922,bicubic +gcvit_tiny,96.170,3.830,99.240,0.760,28.22,224,0.875,bicubic +tf_efficientnet_b4.ap_in1k,96.160,3.840,99.280,0.720,19.34,380,0.922,bicubic +tresnet_v2_l,96.160,3.840,99.240,0.760,46.17,224,0.875,bilinear twins_svt_base,96.160,3.840,99.060,0.940,56.07,224,0.900,bicubic dm_nfnet_f0,96.150,3.850,99.250,0.750,71.49,256,0.900,bicubic -efficientnet_b4,96.150,3.850,99.190,0.810,19.34,384,1.000,bicubic +efficientnet_b4.ra2_in1k,96.150,3.850,99.200,0.800,19.34,384,1.000,bicubic twins_pcpvt_large,96.150,3.850,99.180,0.820,60.99,224,0.900,bicubic deit_base_patch16_384,96.150,3.850,99.140,0.860,86.86,384,1.000,bicubic +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,96.140,3.860,99.210,0.790,88.22,224,0.900,bicubic sequencer2d_l,96.140,3.860,99.160,0.840,54.30,224,0.875,bicubic -regnetz_c16_evos,96.130,3.870,99.360,0.640,13.49,320,0.950,bicubic -resnetv2_50x1_bit_distilled,96.120,3.880,99.280,0.720,25.55,224,0.875,bicubic +resnetv2_50x1_bit_distilled,96.130,3.870,99.280,0.720,25.55,224,0.875,bicubic +regnetz_c16_evos,96.120,3.880,99.360,0.640,13.49,320,0.950,bicubic nfnet_l0,96.120,3.880,99.240,0.760,35.07,288,1.000,bicubic +efficientformer_l7,96.110,3.890,99.270,0.730,82.23,224,0.950,bicubic xcit_small_12_p8_224,96.110,3.890,99.160,0.840,26.21,224,1.000,bicubic xcit_medium_24_p8_224,96.110,3.890,98.890,1.110,84.32,224,1.000,bicubic resnetv2_101x1_bitm,96.100,3.900,99.280,0.720,44.54,448,1.000,bilinear -resnetv2_152x2_bit_teacher,96.100,3.900,99.270,0.730,236.34,224,0.875,bicubic +resnetv2_152x2_bit_teacher,96.100,3.900,99.280,0.720,236.34,224,0.875,bicubic +maxvit_tiny_tf_224.in1k,96.100,3.900,99.250,0.750,30.92,224,0.950,bicubic deit_base_distilled_patch16_224,96.090,3.910,99.190,0.810,87.34,224,0.900,bicubic resnext101_64x4d,96.080,3.920,99.240,0.760,83.46,288,1.000,bicubic xcit_tiny_12_p8_384_dist,96.080,3.920,99.140,0.860,6.71,384,1.000,bicubic -swinv2_cr_small_224,96.060,3.940,98.870,1.130,49.70,224,0.900,bicubic -cs3edgenet_x,96.050,3.950,99.140,0.860,47.82,288,1.000,bicubic -cs3sedarknet_x,96.040,3.960,99.110,0.890,35.40,288,1.000,bicubic +deit3_medium_patch16_224,96.070,3.930,99.200,0.800,38.85,224,0.900,bicubic +swinv2_cr_small_224,96.070,3.930,98.870,1.130,49.70,224,0.900,bicubic mobilevitv2_200_384_in22ft1k,96.040,3.960,99.080,0.920,18.45,384,1.000,bicubic -xcit_small_12_p16_224_dist,96.020,3.980,99.130,0.870,26.25,224,1.000,bicubic -regnety_040,96.010,3.990,99.180,0.820,20.65,288,1.000,bicubic +maxxvit_rmlp_nano_rw_256,96.030,3.970,99.260,0.740,16.78,256,0.950,bicubic +xcit_small_12_p16_224_dist,96.030,3.970,99.150,0.850,26.25,224,1.000,bicubic +cs3edgenet_x,96.030,3.970,99.140,0.860,47.82,288,1.000,bicubic +cs3sedarknet_x,96.030,3.970,99.110,0.890,35.40,288,1.000,bicubic +coatnet_1_rw_224,96.030,3.970,99.050,0.950,41.72,224,0.950,bicubic +regnety_040,96.020,3.980,99.190,0.810,20.65,288,1.000,bicubic +convnext_tiny_hnf.a2h_in1k,96.020,3.980,99.070,0.930,28.59,288,1.000,bicubic +pvt_v2_b3,95.990,4.010,99.190,0.810,45.24,224,0.900,bicubic sequencer2d_s,95.990,4.010,99.050,0.950,27.65,224,0.875,bicubic +convnext_nano.in12k_ft_in1k,95.980,4.020,99.320,0.680,15.59,288,1.000,bicubic +maxvit_rmlp_nano_rw_256,95.980,4.020,98.970,1.030,15.50,256,0.950,bicubic regnety_032,95.970,4.030,99.190,0.810,19.44,288,1.000,bicubic tresnet_xl_448,95.970,4.030,99.130,0.870,78.44,448,0.875,bilinear jx_nest_small,95.960,4.040,99.030,0.970,38.35,224,0.875,bicubic +xcit_tiny_24_p16_384_dist,95.950,4.050,99.220,0.780,12.12,384,1.000,bicubic eca_nfnet_l0,95.950,4.050,99.210,0.790,24.14,288,1.000,bicubic -swinv2_tiny_window16_256,95.940,4.060,99.140,0.860,28.35,256,0.900,bicubic -xcit_tiny_24_p16_384_dist,95.920,4.080,99.220,0.780,12.12,384,1.000,bicubic +coatnet_rmlp_1_rw_224,95.950,4.050,99.160,0.840,41.69,224,0.950,bicubic +swinv2_tiny_window16_256,95.930,4.070,99.140,0.860,28.35,256,0.900,bicubic +maxvit_nano_rw_256,95.930,4.070,99.000,1.000,15.45,256,0.950,bicubic swin_small_patch4_window7_224,95.910,4.090,99.020,0.980,49.61,224,0.900,bicubic -tf_efficientnet_b4,95.900,4.100,99.170,0.830,19.34,380,0.922,bicubic -resnet152,95.900,4.100,99.080,0.920,60.19,224,0.950,bicubic -resnet51q,95.870,4.130,99.130,0.870,35.70,288,1.000,bilinear -swsl_resnext50_32x4d,95.860,4.140,99.250,0.750,25.03,224,0.875,bilinear +tf_efficientnet_b4.aa_in1k,95.900,4.100,99.170,0.830,19.34,380,0.922,bicubic +resnet152,95.880,4.120,99.070,0.930,60.19,224,0.950,bicubic +swsl_resnext50_32x4d,95.870,4.130,99.250,0.750,25.03,224,0.875,bilinear +mvitv2_tiny,95.870,4.130,99.070,0.930,24.17,224,0.900,bicubic resnest101e,95.860,4.140,99.210,0.790,48.28,256,0.875,bilinear cs3darknet_x,95.860,4.140,99.180,0.820,35.05,288,1.000,bicubic +resnet51q,95.860,4.140,99.120,0.880,35.70,288,1.000,bilinear tresnet_l_448,95.860,4.140,99.120,0.880,55.99,448,0.875,bilinear -cait_xxs36_384,95.840,4.160,99.090,0.910,17.37,384,1.000,bicubic -vit_large_patch32_384,95.830,4.170,99.150,0.850,306.63,384,1.000,bicubic +cait_xxs36_384,95.850,4.150,99.090,0.910,17.37,384,1.000,bicubic +vit_large_patch32_384.orig_in21k_ft_in1k,95.830,4.170,99.150,0.850,306.63,384,1.000,bicubic xcit_tiny_24_p8_224_dist,95.810,4.190,99.210,0.790,12.11,224,1.000,bicubic sequencer2d_m,95.810,4.190,99.110,0.890,38.31,224,0.875,bicubic +ssl_resnext101_32x16d,95.800,4.200,99.180,0.820,194.03,224,0.875,bilinear regnetz_c16,95.800,4.200,99.100,0.900,13.46,320,0.940,bicubic -ssl_resnext101_32x16d,95.790,4.210,99.180,0.820,194.03,224,0.875,bilinear +convnext_tiny.fb_in1k,95.790,4.210,99.160,0.840,28.59,288,1.000,bicubic twins_pcpvt_base,95.790,4.210,99.130,0.870,43.83,224,0.900,bicubic -resnet61q,95.780,4.220,98.990,1.010,36.85,288,1.000,bicubic -tf_efficientnet_b2_ns,95.760,4.240,99.120,0.880,9.11,260,0.890,bicubic -vit_relpos_base_patch16_clsgap_224,95.760,4.240,99.040,0.960,86.43,224,0.900,bicubic -gc_efficientnetv2_rw_t,95.740,4.260,99.020,0.980,13.68,288,1.000,bicubic -efficientnet_b3,95.710,4.290,99.040,0.960,12.23,320,1.000,bicubic -tresnet_m,95.710,4.290,99.030,0.970,31.39,224,0.875,bilinear +tf_efficientnet_b2.ns_jft_in1k,95.770,4.230,99.120,0.880,9.11,260,0.890,bicubic +resnet61q,95.770,4.230,98.990,1.010,36.85,288,1.000,bicubic +vit_relpos_base_patch16_clsgap_224.sw_in1k,95.760,4.240,99.040,0.960,86.43,224,0.900,bicubic +gc_efficientnetv2_rw_t.agc_in1k,95.740,4.260,99.020,0.980,13.68,288,1.000,bicubic +tresnet_m,95.720,4.280,99.030,0.970,31.39,224,0.875,bilinear +efficientnet_b3.ra2_in1k,95.710,4.290,99.040,0.960,12.23,320,1.000,bicubic pnasnet5large,95.710,4.290,98.920,1.080,86.06,331,0.911,bicubic mobilevitv2_150_384_in22ft1k,95.700,4.300,99.140,0.860,10.59,384,1.000,bicubic +coatnet_bn_0_rw_224,95.700,4.300,99.050,0.950,27.44,224,0.950,bicubic crossvit_15_dagger_240,95.690,4.310,98.830,1.170,28.21,240,0.875,bicubic +flexivit_small.600ep_in1k,95.680,4.320,99.050,0.950,22.06,240,0.950,bicubic nasnetalarge,95.680,4.320,98.930,1.070,88.75,331,0.911,bicubic xcit_tiny_24_p8_224,95.670,4.330,99.050,0.950,12.11,224,1.000,bicubic -vit_small_r26_s32_224,95.640,4.360,99.190,0.810,36.43,224,0.900,bicubic +resnetv2_101,95.640,4.360,98.990,1.010,44.54,224,0.950,bicubic poolformer_m48,95.640,4.360,98.940,1.060,73.47,224,0.950,bicubic -pit_b_224,95.640,4.360,98.670,1.330,73.76,224,0.900,bicubic -resnetv2_101,95.620,4.380,98.990,1.010,44.54,224,0.950,bicubic +pit_b_224,95.640,4.360,98.660,1.340,73.76,224,0.900,bicubic +vit_small_r26_s32_224.augreg_in21k_ft_in1k,95.630,4.370,99.190,0.810,36.43,224,0.900,bicubic +efficientnetv2_rw_t.ra2_in1k,95.610,4.390,99.070,0.930,13.65,288,1.000,bicubic resnetv2_50d_evos,95.610,4.390,99.030,0.970,25.59,288,0.950,bicubic -efficientnetv2_rw_t,95.600,4.400,99.070,0.930,13.65,288,1.000,bicubic +efficientformer_l3,95.600,4.400,99.160,0.840,31.41,224,0.950,bicubic +gcvit_xtiny,95.580,4.420,99.040,0.960,19.98,224,0.875,bicubic crossvit_18_dagger_240,95.570,4.430,99.060,0.940,44.27,240,0.875,bicubic -vit_relpos_base_patch16_224,95.570,4.430,99.030,0.970,86.43,224,0.900,bicubic -convnext_tiny,95.550,4.450,99.000,1.000,28.59,224,0.875,bicubic +flexivit_small.1200ep_in1k,95.560,4.440,99.120,0.880,22.06,240,0.950,bicubic +vit_relpos_base_patch16_224.sw_in1k,95.560,4.440,99.030,0.970,86.43,224,0.900,bicubic +pvt_v2_b2_li,95.550,4.450,98.990,1.010,22.55,224,0.900,bicubic convit_base,95.550,4.450,98.870,1.130,86.54,224,0.875,bicubic coat_lite_small,95.540,4.460,98.860,1.140,19.84,224,0.900,bicubic ecaresnet101d,95.530,4.470,99.130,0.870,44.57,224,0.875,bicubic levit_384,95.530,4.470,99.050,0.950,39.13,224,0.900,bicubic +vit_base_patch32_clip_224.laion2b_ft_in1k,95.530,4.470,98.860,1.140,88.22,224,0.900,bicubic +crossvit_base_240,95.530,4.470,98.820,1.180,105.03,240,0.875,bicubic xcit_small_24_p16_224,95.530,4.470,98.770,1.230,47.67,224,1.000,bicubic xcit_medium_24_p16_224,95.530,4.470,98.740,1.260,84.40,224,1.000,bicubic -crossvit_base_240,95.520,4.480,98.820,1.180,105.03,240,0.875,bicubic +fbnetv3_g.ra2_in1k,95.520,4.480,98.990,1.010,16.62,288,0.950,bilinear +xception41p,95.520,4.480,98.920,1.080,26.91,299,0.940,bicubic ecaresnet50t,95.510,4.490,99.120,0.880,25.57,320,0.950,bicubic -vit_relpos_medium_patch16_rpn_224,95.510,4.490,99.080,0.920,38.73,224,0.900,bicubic -convnext_tiny_hnf,95.510,4.490,99.020,0.980,28.59,224,0.950,bicubic -fbnetv3_g,95.510,4.490,98.990,1.010,16.62,288,0.950,bilinear -xception41p,95.510,4.490,98.910,1.090,26.91,299,0.940,bicubic +vit_relpos_medium_patch16_rpn_224.sw_in1k,95.510,4.490,99.080,0.920,38.73,224,0.900,bicubic swinv2_tiny_window8_256,95.500,4.500,99.120,0.880,28.35,256,0.900,bicubic -ssl_resnext101_32x8d,95.490,4.510,99.120,0.880,88.79,224,0.875,bilinear -vit_relpos_medium_patch16_cls_224,95.480,4.520,98.950,1.050,38.76,224,0.900,bicubic -visformer_small,95.470,4.530,98.900,1.100,40.22,224,0.900,bicubic -vit_relpos_medium_patch16_224,95.460,4.540,98.960,1.040,38.75,224,0.900,bicubic +pvt_v2_b2,95.500,4.500,99.000,1.000,25.36,224,0.900,bicubic +flexivit_small.300ep_in1k,95.500,4.500,98.960,1.040,22.06,240,0.950,bicubic +visformer_small,95.490,4.510,98.900,1.100,40.22,224,0.900,bicubic +vit_relpos_medium_patch16_cls_224.sw_in1k,95.480,4.520,98.950,1.050,38.76,224,0.900,bicubic +ssl_resnext101_32x8d,95.470,4.530,99.110,0.890,88.79,224,0.875,bilinear +vit_relpos_medium_patch16_224.sw_in1k,95.460,4.540,98.960,1.040,38.75,224,0.900,bicubic ssl_resnext101_32x4d,95.440,4.560,99.130,0.870,44.18,224,0.875,bilinear tresnet_xl,95.440,4.560,99.050,0.950,78.44,224,0.875,bilinear deit_base_patch16_224,95.440,4.560,98.840,1.160,86.57,224,0.900,bicubic crossvit_18_240,95.440,4.560,98.790,1.210,43.27,240,0.875,bicubic resnetv2_50d_gn,95.430,4.570,99.040,0.960,25.57,288,0.950,bicubic resnetrs101,95.430,4.570,99.030,0.970,63.62,288,0.940,bicubic -halo2botnet50ts_256,95.420,4.580,99.010,0.990,22.64,256,0.950,bicubic +coatnext_nano_rw_224,95.430,4.570,99.000,1.000,14.70,224,0.900,bicubic +coatnet_rmlp_nano_rw_224,95.430,4.570,98.990,1.010,15.15,224,0.900,bicubic +coatnet_0_rw_224,95.430,4.570,98.720,1.280,27.44,224,0.950,bicubic xcit_small_12_p16_224,95.420,4.580,98.840,1.160,26.25,224,1.000,bicubic -xcit_large_24_p16_224,95.420,4.580,98.620,1.380,189.10,224,1.000,bicubic -swsl_resnet50,95.410,4.590,99.300,0.700,25.56,224,0.875,bilinear -edgenext_small,95.410,4.590,99.100,0.900,5.59,320,1.000,bicubic -vit_base_patch16_rpn_224,95.380,4.620,98.930,1.070,86.54,224,0.900,bicubic +xcit_large_24_p16_224,95.420,4.580,98.610,1.390,189.10,224,1.000,bicubic +swsl_resnet50,95.410,4.590,99.290,0.710,25.56,224,0.875,bilinear +edgenext_small,95.400,4.600,99.100,0.900,5.59,320,1.000,bicubic +halo2botnet50ts_256,95.390,4.610,99.010,0.990,22.64,256,0.950,bicubic +vit_base_patch16_rpn_224.in1k,95.380,4.620,98.930,1.070,86.54,224,0.900,bicubic poolformer_m36,95.380,4.620,98.850,1.150,56.17,224,0.950,bicubic -vit_small_patch16_224,95.370,4.630,99.150,0.850,22.05,224,0.900,bicubic -swinv2_cr_tiny_ns_224,95.370,4.630,98.940,1.060,28.33,224,0.900,bicubic -resnet101,95.360,4.640,98.860,1.140,44.55,224,0.950,bicubic -convnext_nano,95.360,4.640,98.850,1.150,15.59,288,1.000,bicubic -tf_efficientnet_b3_ap,95.320,4.680,98.900,1.100,12.23,300,0.904,bicubic -cs3sedarknet_l,95.310,4.690,99.130,0.870,21.91,288,0.950,bicubic +vit_small_patch16_224.augreg_in21k_ft_in1k,95.370,4.630,99.150,0.850,22.05,224,0.900,bicubic +swinv2_cr_tiny_ns_224,95.370,4.630,98.930,1.070,28.33,224,0.900,bicubic +convnext_nano.d1h_in1k,95.350,4.650,98.860,1.140,15.59,288,1.000,bicubic +resnet101,95.350,4.650,98.860,1.140,44.55,224,0.950,bicubic +vit_base_patch16_224.orig_in21k_ft_in1k,95.330,4.670,99.000,1.000,86.57,224,0.900,bicubic +tf_efficientnet_b3.ap_in1k,95.320,4.680,98.900,1.100,12.23,300,0.904,bicubic +cs3sedarknet_l,95.310,4.690,99.120,0.880,21.91,288,0.950,bicubic mixer_b16_224_miil,95.300,4.700,98.880,1.120,59.88,224,0.875,bilinear -tresnet_l,95.290,4.710,99.010,0.990,55.99,224,0.875,bilinear -cait_xxs24_384,95.280,4.720,98.960,1.040,12.03,384,1.000,bicubic +vit_small_patch16_384.augreg_in1k,95.290,4.710,99.000,1.000,22.20,384,1.000,bicubic +tresnet_l,95.280,4.720,99.010,0.990,55.99,224,0.875,bilinear +cait_xxs24_384,95.260,4.740,98.960,1.040,12.03,384,1.000,bicubic +jx_nest_tiny,95.250,4.750,98.980,1.020,17.06,224,0.875,bicubic +coatnet_nano_rw_224,95.250,4.750,98.870,1.130,15.14,224,0.900,bicubic pit_s_distilled_224,95.240,4.760,99.050,0.950,24.04,224,0.900,bicubic -jx_nest_tiny,95.240,4.760,98.980,1.020,17.06,224,0.875,bicubic -vit_srelpos_medium_patch16_224,95.230,4.770,98.990,1.010,38.74,224,0.900,bicubic +vit_srelpos_medium_patch16_224.sw_in1k,95.230,4.770,98.990,1.010,38.74,224,0.900,bicubic mobilevitv2_175_in22ft1k,95.230,4.770,98.790,1.210,14.25,256,0.888,bicubic resnetaa50,95.210,4.790,98.930,1.070,25.56,288,1.000,bicubic twins_pcpvt_small,95.210,4.790,98.880,1.120,24.11,224,0.900,bicubic convit_small,95.200,4.800,98.900,1.100,27.78,224,0.875,bicubic twins_svt_small,95.200,4.800,98.880,1.120,24.06,224,0.900,bicubic -tf_efficientnet_b1_ns,95.180,4.820,99.110,0.890,7.79,240,0.882,bicubic +tf_efficientnet_b1.ns_jft_in1k,95.170,4.830,99.110,0.890,7.79,240,0.882,bicubic cs3darknet_focus_l,95.170,4.830,98.960,1.040,21.15,288,0.950,bicubic -mobilevitv2_200_in22ft1k,95.160,4.840,98.950,1.050,18.45,256,0.888,bicubic -vit_relpos_small_patch16_224,95.160,4.840,98.950,1.050,21.98,224,0.900,bicubic +vit_relpos_small_patch16_224.sw_in1k,95.160,4.840,98.950,1.050,21.98,224,0.900,bicubic swin_s3_tiny_224,95.160,4.840,98.940,1.060,28.33,224,0.900,bicubic -tf_efficientnetv2_b3,95.160,4.840,98.820,1.180,14.36,300,0.904,bicubic +mobilevitv2_200_in22ft1k,95.160,4.840,98.930,1.070,18.45,256,0.888,bicubic +lamhalobotnet50ts_256,95.160,4.840,98.880,1.120,22.57,256,0.950,bicubic +tf_efficientnetv2_b3.in1k,95.160,4.840,98.820,1.180,14.36,300,0.904,bicubic crossvit_15_240,95.150,4.850,98.930,1.070,27.53,240,0.875,bicubic -lamhalobotnet50ts_256,95.150,4.850,98.880,1.120,22.57,256,0.950,bicubic mobilevitv2_150_in22ft1k,95.140,4.860,98.860,1.140,10.59,256,0.888,bicubic -halonet50ts,95.140,4.860,98.770,1.230,22.73,256,0.940,bicubic +swin_tiny_patch4_window7_224,95.140,4.860,98.850,1.150,28.29,224,0.900,bicubic xcit_tiny_12_p16_384_dist,95.130,4.870,99.020,0.980,6.72,384,1.000,bicubic -swin_tiny_patch4_window7_224,95.130,4.870,98.850,1.150,28.29,224,0.900,bicubic +convnext_nano_ols.d1h_in1k,95.130,4.870,98.720,1.280,15.65,288,1.000,bicubic +efficientnet_el.ra_in1k,95.120,4.880,98.990,1.010,10.59,300,0.904,bicubic cs3darknet_l,95.120,4.880,98.980,1.020,21.16,288,0.950,bicubic -efficientnet_el,95.120,4.880,98.980,1.020,10.59,300,0.904,bicubic -xcit_tiny_12_p8_224_dist,95.100,4.900,98.910,1.090,6.71,224,1.000,bicubic +vit_base_patch32_clip_224.openai_ft_in1k,95.120,4.880,98.980,1.020,88.22,224,0.900,bicubic +halonet50ts,95.100,4.900,98.780,1.220,22.73,256,0.940,bicubic poolformer_s36,95.090,4.910,98.910,1.090,30.86,224,0.900,bicubic gernet_l,95.090,4.910,98.900,1.100,31.08,256,0.875,bilinear ecaresnet101d_pruned,95.080,4.920,98.980,1.020,24.88,224,0.875,bicubic wide_resnet50_2,95.080,4.920,98.970,1.030,68.88,224,0.875,bicubic -convmixer_1536_20,95.070,4.930,99.030,0.970,51.63,224,0.960,bicubic +xcit_tiny_12_p8_224_dist,95.080,4.920,98.910,1.090,6.71,224,1.000,bicubic +regnetz_b16,95.070,4.930,99.050,0.950,9.72,288,0.940,bicubic legacy_senet154,95.070,4.930,98.830,1.170,115.09,224,0.875,bilinear -regnetz_b16,95.060,4.940,99.050,0.950,9.72,288,0.940,bicubic -vit_small_patch32_384,95.050,4.950,98.990,1.010,22.92,384,1.000,bicubic +convmixer_1536_20,95.060,4.940,99.030,0.970,51.63,224,0.960,bicubic +vit_small_patch32_384.augreg_in21k_ft_in1k,95.050,4.950,98.990,1.010,22.92,384,1.000,bicubic +vit_srelpos_small_patch16_224.sw_in1k,95.040,4.960,98.960,1.040,21.97,224,0.900,bicubic gluon_resnet152_v1s,95.040,4.960,98.930,1.070,60.32,224,0.875,bicubic +seresnext50_32x4d,95.040,4.960,98.880,1.120,27.56,224,0.875,bicubic tnt_s_patch16_224,95.040,4.960,98.830,1.170,23.76,224,0.900,bicubic -vit_srelpos_small_patch16_224,95.030,4.970,98.960,1.040,21.97,224,0.900,bicubic -seresnext50_32x4d,95.030,4.970,98.880,1.120,27.56,224,0.875,bicubic resnetv2_50x1_bitm,95.010,4.990,99.060,0.940,25.55,448,1.000,bilinear -tf_efficientnet_b3,95.010,4.990,98.910,1.090,12.23,300,0.904,bicubic +tf_efficientnet_b3.aa_in1k,95.010,4.990,98.910,1.090,12.23,300,0.904,bicubic levit_256,95.010,4.990,98.890,1.110,18.89,224,0.900,bicubic -vit_base_patch32_224,95.000,5.000,99.030,0.970,88.22,224,0.900,bicubic +vit_base_patch32_224.augreg_in21k_ft_in1k,95.000,5.000,99.030,0.970,88.22,224,0.900,bicubic deit3_small_patch16_224,95.000,5.000,98.460,1.540,22.06,224,0.900,bicubic tresnet_m_448,94.990,5.010,98.980,1.020,31.39,448,0.875,bilinear coat_mini,94.970,5.030,98.780,1.220,10.34,224,0.900,bicubic resnest50d_4s2x40d,94.960,5.040,99.070,0.930,30.42,224,0.875,bicubic -rexnet_200,94.950,5.050,99.010,0.990,16.37,224,0.875,bicubic -gluon_seresnext101_64x4d,94.920,5.080,98.830,1.170,88.23,224,0.875,bicubic +rexnet_200,94.940,5.060,99.010,0.990,16.37,224,0.875,bicubic +vit_base_patch16_384.augreg_in1k,94.940,5.060,98.890,1.110,86.86,384,1.000,bicubic +gluon_seresnext101_64x4d,94.930,5.070,98.830,1.170,88.23,224,0.875,bicubic gluon_seresnext101_32x4d,94.920,5.080,98.810,1.190,48.96,224,0.875,bicubic gluon_senet154,94.920,5.080,98.760,1.240,115.09,224,0.875,bicubic -mobilevitv2_175,94.890,5.110,98.860,1.140,14.25,256,0.888,bicubic -tf_efficientnet_lite4,94.880,5.120,99.020,0.980,13.01,380,0.920,bilinear -resmlp_36_distilled_224,94.880,5.120,98.840,1.160,44.69,224,0.875,bicubic -ssl_resnext50_32x4d,94.870,5.130,98.890,1.110,25.03,224,0.875,bilinear +mobilevitv2_175,94.900,5.100,98.870,1.130,14.25,256,0.888,bicubic +tf_efficientnet_lite4.in1k,94.890,5.110,99.020,0.980,13.01,380,0.920,bilinear +resmlp_36_distilled_224,94.890,5.110,98.850,1.150,44.69,224,0.875,bicubic +ssl_resnext50_32x4d,94.870,5.130,98.880,1.120,25.03,224,0.875,bilinear seresnet33ts,94.860,5.140,98.790,1.210,19.78,256,0.900,bicubic -resnest50d,94.850,5.150,98.880,1.120,27.48,224,0.875,bilinear +convnext_tiny.fb_in22k_ft_in1k,94.860,5.140,98.530,1.470,28.59,288,1.000,bicubic gcresnet50t,94.850,5.150,98.790,1.210,25.90,256,0.900,bicubic +cspresnext50,94.840,5.160,98.770,1.230,20.57,256,0.887,bilinear crossvit_small_240,94.830,5.170,99.020,0.980,26.86,240,0.875,bicubic -cspresnext50,94.830,5.170,98.770,1.230,20.57,256,0.887,bilinear +resnest50d,94.830,5.170,98.880,1.120,27.48,224,0.875,bilinear mobilevitv2_200,94.830,5.170,98.710,1.290,18.45,256,0.888,bicubic -sehalonet33ts,94.780,5.220,98.570,1.430,13.69,256,0.940,bicubic -lambda_resnet50ts,94.780,5.220,98.460,1.540,21.54,256,0.950,bicubic ecaresnetlight,94.770,5.230,98.800,1.200,30.16,224,0.875,bicubic +sehalonet33ts,94.770,5.230,98.570,1.430,13.69,256,0.940,bicubic +lambda_resnet50ts,94.770,5.230,98.470,1.530,21.54,256,0.950,bicubic resnest50d_1s4x24d,94.750,5.250,98.980,1.020,25.68,224,0.875,bicubic gluon_resnet152_v1d,94.740,5.260,98.740,1.260,60.21,224,0.875,bicubic +convnext_pico.d1_in1k,94.740,5.260,98.700,1.300,9.05,288,0.950,bicubic gluon_resnet101_v1s,94.720,5.280,98.820,1.180,44.67,224,0.875,bicubic deit_small_distilled_patch16_224,94.710,5.290,99.030,0.970,22.44,224,0.900,bicubic haloregnetz_b,94.700,5.300,98.660,1.340,11.68,224,0.940,bicubic -xcit_tiny_12_p8_224,94.690,5.310,98.830,1.170,6.71,224,1.000,bicubic +xcit_tiny_12_p8_224,94.680,5.320,98.830,1.170,6.71,224,1.000,bicubic +gluon_resnext101_64x4d,94.670,5.330,98.650,1.350,83.46,224,0.875,bicubic cspdarknet53,94.660,5.340,98.800,1.200,27.64,256,0.887,bilinear edgenext_small_rw,94.660,5.340,98.790,1.210,7.83,320,1.000,bicubic -gluon_resnext101_64x4d,94.660,5.340,98.650,1.350,83.46,224,0.875,bicubic resmlp_big_24_224,94.660,5.340,98.480,1.520,129.14,224,0.875,bicubic darknet53,94.630,5.370,98.890,1.110,41.61,288,1.000,bicubic -efficientnet_b3_pruned,94.630,5.370,98.760,1.240,9.86,300,0.904,bicubic -ecaresnet50d,94.620,5.380,98.890,1.110,25.58,224,0.875,bicubic +ecaresnet50d,94.630,5.370,98.890,1.110,25.58,224,0.875,bicubic +maxvit_rmlp_pico_rw_256,94.630,5.370,98.820,1.180,7.52,256,0.950,bicubic +efficientnet_b3_pruned.in1k,94.630,5.370,98.760,1.240,9.86,300,0.904,bicubic gernet_m,94.620,5.380,98.860,1.140,21.14,224,0.875,bilinear -efficientnet_b2,94.610,5.390,98.710,1.290,9.11,288,1.000,bicubic -pit_s_224,94.590,5.410,98.700,1.300,23.46,224,0.900,bicubic -sebotnet33ts_256,94.580,5.420,98.500,1.500,13.70,256,0.940,bicubic +convnext_pico_ols.d1_in1k,94.620,5.380,98.770,1.230,9.06,288,1.000,bicubic +efficientnet_b2.ra_in1k,94.610,5.390,98.710,1.290,9.11,288,1.000,bicubic +pit_s_224,94.590,5.410,98.710,1.290,23.46,224,0.900,bicubic +sebotnet33ts_256,94.590,5.410,98.500,1.500,13.70,256,0.940,bicubic repvgg_b3,94.570,5.430,98.780,1.220,123.09,224,0.875,bilinear -poolformer_s24,94.550,5.450,98.880,1.120,21.39,224,0.900,bicubic -nf_resnet50,94.550,5.450,98.790,1.210,25.56,288,0.940,bicubic +poolformer_s24,94.560,5.440,98.880,1.120,21.39,224,0.900,bicubic +nf_resnet50,94.560,5.440,98.790,1.210,25.56,288,0.940,bicubic +resnext50_32x4d,94.560,5.440,98.610,1.390,25.03,224,0.950,bicubic seresnet50,94.550,5.450,98.750,1.250,28.09,224,0.875,bicubic mobilevitv2_150,94.550,5.450,98.710,1.290,10.59,256,0.888,bicubic -regnety_320,94.540,5.460,98.860,1.140,145.05,224,0.875,bicubic -gluon_resnext101_32x4d,94.540,5.460,98.630,1.370,44.18,224,0.875,bicubic -resnext50_32x4d,94.540,5.460,98.610,1.390,25.03,224,0.950,bicubic -inception_resnet_v2,94.530,5.470,98.780,1.220,55.84,299,0.897,bicubic +regnety_320,94.540,5.460,98.850,1.150,145.05,224,0.875,bicubic +inception_resnet_v2,94.540,5.460,98.790,1.210,55.84,299,0.897,bicubic xcit_tiny_24_p16_224_dist,94.530,5.470,98.780,1.220,12.12,224,1.000,bicubic +gluon_resnext101_32x4d,94.530,5.470,98.630,1.370,44.18,224,0.875,bicubic repvgg_b3g4,94.520,5.480,98.970,1.030,83.83,224,0.875,bilinear convmixer_768_32,94.500,5.500,98.850,1.150,21.11,224,0.960,bicubic +efficientformer_l1,94.490,5.510,98.830,1.170,12.29,224,0.950,bicubic gcresnext50ts,94.490,5.510,98.670,1.330,15.67,256,0.900,bicubic -tf_efficientnet_b2_ap,94.490,5.510,98.620,1.380,9.11,260,0.890,bicubic +tf_efficientnet_b2.ap_in1k,94.490,5.510,98.620,1.380,9.11,260,0.890,bicubic regnety_120,94.480,5.520,98.810,1.190,51.82,224,0.875,bicubic rexnet_150,94.480,5.520,98.790,1.210,9.73,224,0.875,bicubic +darknetaa53,94.470,5.530,98.770,1.230,36.02,288,1.000,bilinear gcresnet33ts,94.470,5.530,98.770,1.230,19.88,256,0.900,bicubic -darknetaa53,94.470,5.530,98.760,1.240,36.02,288,1.000,bilinear resmlp_24_distilled_224,94.460,5.540,98.770,1.230,30.02,224,0.875,bicubic regnetx_320,94.460,5.540,98.740,1.260,107.81,224,0.875,bicubic -ssl_resnet50,94.440,5.560,98.920,1.080,25.56,224,0.875,bilinear -resnetv2_50,94.430,5.570,98.730,1.270,25.55,224,0.950,bicubic -tf_efficientnetv2_b2,94.420,5.580,98.570,1.430,10.10,260,0.890,bicubic -efficientnet_el_pruned,94.400,5.600,98.740,1.260,10.59,300,0.904,bicubic -tf_efficientnet_el,94.400,5.600,98.710,1.290,10.59,300,0.904,bicubic -deit_small_patch16_224,94.390,5.610,98.690,1.310,22.05,224,0.900,bicubic +ssl_resnet50,94.450,5.550,98.920,1.080,25.56,224,0.875,bilinear +resnetv2_50,94.440,5.560,98.740,1.260,25.55,224,0.950,bicubic +tf_efficientnetv2_b2.in1k,94.420,5.580,98.570,1.430,10.10,260,0.890,bicubic +tf_efficientnet_el.in1k,94.410,5.590,98.710,1.290,10.59,300,0.904,bicubic +gcvit_xxtiny,94.400,5.600,98.900,1.100,12.00,224,0.875,bicubic +efficientnet_el_pruned.in1k,94.400,5.600,98.740,1.260,10.59,300,0.904,bicubic +deit_small_patch16_224,94.400,5.600,98.690,1.310,22.05,224,0.900,bicubic inception_v4,94.380,5.620,98.580,1.420,42.68,299,0.875,bicubic legacy_seresnext101_32x4d,94.370,5.630,98.650,1.350,48.96,224,0.875,bilinear -tf_efficientnet_b2,94.360,5.640,98.610,1.390,9.11,260,0.890,bicubic +tf_efficientnet_b2.aa_in1k,94.360,5.640,98.610,1.390,9.11,260,0.890,bicubic resnet50_gn,94.350,5.650,98.710,1.290,25.56,224,0.940,bicubic -resnet50,94.340,5.660,98.440,1.560,25.56,224,0.950,bicubic -gluon_seresnext50_32x4d,94.330,5.670,98.610,1.390,27.56,224,0.875,bicubic -ecaresnet26t,94.320,5.680,98.720,1.280,16.01,320,0.950,bicubic -dpn107,94.310,5.690,98.470,1.530,86.92,224,0.875,bicubic -resnetrs50,94.300,5.700,98.640,1.360,35.69,224,0.910,bicubic +gluon_seresnext50_32x4d,94.340,5.660,98.610,1.390,27.56,224,0.875,bicubic +resnet50,94.320,5.680,98.440,1.560,25.56,224,0.950,bicubic +ecaresnet26t,94.310,5.690,98.720,1.280,16.01,320,0.950,bicubic +resnetrs50,94.310,5.690,98.640,1.360,35.69,224,0.910,bicubic +dpn107,94.310,5.690,98.480,1.520,86.92,224,0.875,bicubic xception71,94.280,5.720,98.640,1.360,42.34,299,0.903,bicubic cait_xxs36_224,94.260,5.740,98.720,1.280,17.30,224,1.000,bicubic resnet50d,94.260,5.740,98.720,1.280,25.58,224,0.875,bicubic gluon_xception65,94.260,5.740,98.570,1.430,39.92,299,0.903,bicubic -skresnext50_32x4d,94.250,5.750,98.460,1.540,27.48,224,0.875,bicubic +skresnext50_32x4d,94.260,5.740,98.460,1.540,27.48,224,0.875,bicubic regnetx_120,94.240,5.760,98.650,1.350,46.11,224,0.875,bicubic dpn92,94.230,5.770,98.730,1.270,37.67,224,0.875,bicubic -gluon_resnet101_v1d,94.230,5.770,98.550,1.450,44.57,224,0.875,bicubic ecaresnet50d_pruned,94.220,5.780,98.730,1.270,19.94,224,0.875,bicubic -tf_efficientnet_lite3,94.210,5.790,98.640,1.360,8.20,300,0.904,bilinear -resmlp_36_224,94.200,5.800,98.660,1.340,44.69,224,0.875,bicubic +gluon_resnet101_v1d,94.220,5.780,98.550,1.450,44.57,224,0.875,bicubic +tf_efficientnet_lite3.in1k,94.200,5.800,98.640,1.360,8.20,300,0.904,bilinear eca_resnet33ts,94.190,5.810,98.760,1.240,19.68,256,0.900,bicubic -resnext50d_32x4d,94.190,5.810,98.560,1.440,25.05,224,0.875,bicubic -mixnet_xl,94.190,5.810,98.340,1.660,11.90,224,0.875,bicubic -levit_192,94.180,5.820,98.540,1.460,10.95,224,0.900,bicubic +resmlp_36_224,94.190,5.810,98.660,1.340,44.69,224,0.875,bicubic +mixnet_xl.ra_in1k,94.190,5.810,98.340,1.660,11.90,224,0.875,bicubic +resnext50d_32x4d,94.180,5.820,98.570,1.430,25.05,224,0.875,bicubic +levit_192,94.170,5.830,98.540,1.460,10.95,224,0.900,bicubic gluon_resnet152_v1c,94.160,5.840,98.640,1.360,60.21,224,0.875,bicubic ens_adv_inception_resnet_v2,94.160,5.840,98.600,1.400,55.84,299,0.897,bicubic gmlp_s16_224,94.160,5.840,98.500,1.500,19.42,224,0.875,bicubic -efficientnet_b2_pruned,94.150,5.850,98.530,1.470,8.31,260,0.890,bicubic -vit_base_patch16_224_sam,94.140,5.860,98.670,1.330,86.57,224,0.900,bicubic -regnetx_160,94.130,5.870,98.740,1.260,54.28,224,0.875,bicubic -dpn98,94.120,5.880,98.580,1.420,61.57,224,0.875,bicubic -nf_regnet_b1,94.110,5.890,98.630,1.370,10.22,288,0.900,bicubic +vit_base_patch16_224.sam,94.140,5.860,98.670,1.330,86.57,224,0.900,bicubic +efficientnet_b2_pruned.in1k,94.140,5.860,98.530,1.470,8.31,260,0.890,bicubic +dpn98,94.130,5.870,98.570,1.430,61.57,224,0.875,bicubic +regnetx_160,94.120,5.880,98.740,1.260,54.28,224,0.875,bicubic +nf_regnet_b1,94.120,5.880,98.630,1.370,10.22,288,0.900,bicubic ese_vovnet39b,94.090,5.910,98.660,1.340,24.57,224,0.875,bicubic -xcit_tiny_24_p16_224,94.070,5.930,98.530,1.470,12.12,224,1.000,bicubic -gluon_resnet152_v1b,94.070,5.930,98.460,1.540,60.19,224,0.875,bicubic -coat_lite_mini,94.050,5.950,98.560,1.440,11.01,224,0.900,bicubic +xcit_tiny_24_p16_224,94.080,5.920,98.520,1.480,12.12,224,1.000,bicubic +gluon_resnet152_v1b,94.080,5.920,98.450,1.550,60.19,224,0.875,bicubic +coat_lite_mini,94.060,5.940,98.560,1.440,11.01,224,0.900,bicubic eca_halonext26ts,94.040,5.960,98.490,1.510,10.76,256,0.940,bicubic -hrnet_w64,94.020,5.980,98.620,1.380,128.06,224,0.875,bilinear resmlp_24_224,94.020,5.980,98.330,1.670,30.02,224,0.875,bicubic -halonet26t,94.010,5.990,98.500,1.500,12.48,256,0.950,bicubic -dpn131,93.990,6.010,98.720,1.280,79.25,224,0.875,bicubic -fbnetv3_b,93.970,6.030,98.630,1.370,8.60,256,0.950,bilinear -mobilevitv2_125,93.970,6.030,98.560,1.440,7.48,256,0.888,bicubic -dla102x2,93.970,6.030,98.500,1.500,41.28,224,0.875,bilinear -tf_efficientnetv2_b1,93.940,6.060,98.620,1.380,8.14,240,0.882,bicubic -resnetblur50,93.940,6.060,98.580,1.420,25.56,224,0.875,bicubic -fbnetv3_d,93.930,6.070,98.740,1.260,10.31,256,0.950,bilinear +dpn131,94.010,5.990,98.720,1.280,79.25,224,0.875,bicubic +hrnet_w64,94.010,5.990,98.610,1.390,128.06,224,0.875,bilinear +halonet26t,93.980,6.020,98.500,1.500,12.48,256,0.950,bicubic +fbnetv3_b.ra2_in1k,93.960,6.040,98.630,1.370,8.60,256,0.950,bilinear +resnetblur50,93.960,6.040,98.590,1.410,25.56,224,0.875,bicubic +mobilevitv2_125,93.960,6.040,98.560,1.440,7.48,256,0.888,bicubic +dla102x2,93.950,6.050,98.490,1.510,41.28,224,0.875,bilinear +tf_efficientnetv2_b1.in1k,93.940,6.060,98.620,1.380,8.14,240,0.882,bicubic +fbnetv3_d.ra2_in1k,93.930,6.070,98.740,1.260,10.31,256,0.950,bilinear +convnext_femto_ols.d1_in1k,93.920,6.080,98.610,1.390,5.23,288,0.950,bicubic hrnet_w48,93.920,6.080,98.610,1.390,77.47,224,0.875,bilinear -tf_efficientnet_cc_b1_8e,93.910,6.090,98.260,1.740,39.72,240,0.882,bicubic +convnext_femto.d1_in1k,93.920,6.080,98.520,1.480,5.22,288,0.950,bicubic rexnet_130,93.900,6.100,98.400,1.600,7.56,224,0.875,bicubic +tf_efficientnet_cc_b1_8e.in1k,93.900,6.100,98.260,1.740,39.72,240,0.882,bicubic regnetx_064,93.890,6.110,98.630,1.370,26.21,224,0.875,bicubic +vit_small_patch16_224.augreg_in1k,93.890,6.110,98.440,1.560,22.05,224,0.900,bicubic regnetx_080,93.870,6.130,98.520,1.480,39.57,224,0.875,bicubic -efficientnet_em,93.840,6.160,98.810,1.190,6.90,240,0.882,bicubic -repvgg_b2g4,93.840,6.160,98.590,1.410,61.76,224,0.875,bilinear -lambda_resnet26t,93.830,6.170,98.650,1.350,10.96,256,0.940,bicubic -pit_xs_distilled_224,93.820,6.180,98.670,1.330,11.00,224,0.900,bicubic -resnext101_32x8d,93.820,6.180,98.580,1.420,88.79,224,0.875,bilinear +repvgg_b2g4,93.860,6.140,98.590,1.410,61.76,224,0.875,bilinear +efficientnet_em.ra2_in1k,93.840,6.160,98.810,1.190,6.90,240,0.882,bicubic +lambda_resnet26t,93.840,6.160,98.640,1.360,10.96,256,0.940,bicubic +resnext101_32x8d,93.830,6.170,98.580,1.420,88.79,224,0.875,bilinear +pvt_v2_b1,93.820,6.180,98.660,1.340,14.01,224,0.900,bicubic +pit_xs_distilled_224,93.810,6.190,98.670,1.330,11.00,224,0.900,bicubic gluon_resnext50_32x4d,93.810,6.190,98.410,1.590,25.03,224,0.875,bicubic -eca_botnext26ts_256,93.780,6.220,98.500,1.500,10.59,256,0.950,bicubic +eca_botnext26ts_256,93.790,6.210,98.500,1.500,10.59,256,0.950,bicubic gluon_resnet50_v1d,93.770,6.230,98.390,1.610,25.58,224,0.875,bicubic gluon_resnet101_v1b,93.750,6.250,98.380,1.620,44.55,224,0.875,bicubic res2net101_26w_4s,93.750,6.250,98.310,1.690,45.21,224,0.875,bilinear -cspresnet50,93.730,6.270,98.640,1.360,21.62,256,0.887,bilinear +cspresnet50,93.740,6.260,98.640,1.360,21.62,256,0.887,bilinear legacy_seresnext50_32x4d,93.730,6.270,98.580,1.420,27.56,224,0.875,bilinear -vit_relpos_base_patch32_plus_rpn_256,93.730,6.270,98.070,1.930,119.42,256,0.900,bicubic -lambda_resnet26rpt_256,93.720,6.280,98.520,1.480,10.99,256,0.940,bicubic -wide_resnet101_2,93.710,6.290,98.540,1.460,126.89,224,0.875,bilinear -dpn68b,93.690,6.310,98.520,1.480,12.61,224,0.875,bicubic -tf_efficientnet_b1_ap,93.680,6.320,98.360,1.640,7.79,240,0.882,bicubic +wide_resnet101_2,93.720,6.280,98.540,1.460,126.89,224,0.875,bilinear +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,93.720,6.280,98.070,1.930,119.42,256,0.900,bicubic +lambda_resnet26rpt_256,93.710,6.290,98.510,1.490,10.99,256,0.940,bicubic +dpn68b,93.690,6.310,98.510,1.490,12.61,224,0.875,bicubic +tf_efficientnet_b1.ap_in1k,93.690,6.310,98.360,1.640,7.79,240,0.882,bicubic gluon_resnet101_v1c,93.670,6.330,98.420,1.580,44.57,224,0.875,bicubic -vit_tiny_patch16_384,93.650,6.350,98.600,1.400,5.79,384,1.000,bicubic -tf_efficientnet_b0_ns,93.630,6.370,98.640,1.360,5.29,224,0.875,bicubic +vit_tiny_patch16_384.augreg_in21k_ft_in1k,93.650,6.350,98.600,1.400,5.79,384,1.000,bicubic +vit_base_patch32_384.augreg_in1k,93.640,6.360,98.400,1.600,88.30,384,1.000,bicubic +tf_efficientnet_b0.ns_jft_in1k,93.630,6.370,98.640,1.360,5.29,224,0.875,bicubic +vit_base_patch16_224.augreg_in1k,93.630,6.370,98.240,1.760,86.57,224,0.900,bicubic gluon_resnet50_v1s,93.620,6.380,98.460,1.540,25.68,224,0.875,bicubic -resnet33ts,93.600,6.400,98.530,1.470,19.68,256,0.900,bicubic +resnet33ts,93.600,6.400,98.540,1.460,19.68,256,0.900,bicubic cait_xxs24_224,93.600,6.400,98.440,1.560,11.96,224,1.000,bicubic -coat_tiny,93.590,6.410,98.420,1.580,5.50,224,0.900,bicubic -regnetx_040,93.560,6.440,98.550,1.450,22.12,224,0.875,bicubic +coat_tiny,93.590,6.410,98.430,1.570,5.50,224,0.900,bicubic +regnetx_040,93.560,6.440,98.540,1.460,22.12,224,0.875,bicubic hrnet_w44,93.550,6.450,98.700,1.300,67.06,224,0.875,bilinear -hrnet_w32,93.530,6.470,98.460,1.540,41.23,224,0.875,bilinear -xcit_nano_12_p8_384_dist,93.520,6.480,98.540,1.460,3.05,384,1.000,bicubic -dla102x,93.520,6.480,98.500,1.500,26.31,224,0.875,bilinear -botnet26t_256,93.510,6.490,98.300,1.700,12.49,256,0.950,bicubic -tf_efficientnet_b1,93.500,6.500,98.360,1.640,7.79,240,0.882,bicubic -repvgg_b2,93.490,6.510,98.730,1.270,89.02,224,0.875,bilinear +res2net50_26w_8s,93.540,6.460,98.260,1.740,48.40,224,0.875,bilinear +hrnet_w32,93.530,6.470,98.450,1.550,41.23,224,0.875,bilinear +dla102x,93.520,6.480,98.510,1.490,26.31,224,0.875,bilinear +botnet26t_256,93.520,6.480,98.300,1.700,12.49,256,0.950,bicubic +repvgg_b2,93.500,6.500,98.730,1.270,89.02,224,0.875,bilinear +xcit_nano_12_p8_384_dist,93.500,6.500,98.530,1.470,3.05,384,1.000,bicubic +tf_efficientnet_b1.aa_in1k,93.500,6.500,98.360,1.640,7.79,240,0.882,bicubic hrnet_w40,93.490,6.510,98.580,1.420,57.56,224,0.875,bilinear -xception,93.470,6.530,98.530,1.470,22.86,299,0.897,bicubic +gluon_inception_v3,93.460,6.540,98.570,1.430,23.83,299,0.875,bicubic +xception,93.460,6.540,98.530,1.470,22.86,299,0.897,bicubic resnet32ts,93.460,6.540,98.490,1.510,17.96,256,0.900,bicubic -gluon_inception_v3,93.450,6.550,98.570,1.430,23.83,299,0.875,bicubic -mixnet_l,93.450,6.550,98.220,1.780,7.33,224,0.875,bicubic +mixnet_l.ft_in1k,93.450,6.550,98.220,1.780,7.33,224,0.875,bicubic xception41,93.430,6.570,98.430,1.570,26.97,299,0.903,bicubic -res2net50_26w_8s,93.420,6.580,98.170,1.830,48.40,224,0.875,bilinear res2net50_26w_6s,93.410,6.590,98.280,1.720,37.05,224,0.875,bilinear -xcit_tiny_12_p16_224_dist,93.400,6.600,98.480,1.520,6.72,224,1.000,bicubic -legacy_seresnet152,93.390,6.610,98.340,1.660,66.82,224,0.875,bilinear -cs3darknet_m,93.360,6.640,98.600,1.400,9.31,288,0.950,bicubic -dla169,93.340,6.660,98.590,1.410,53.39,224,0.875,bilinear +legacy_seresnet152,93.400,6.600,98.350,1.650,66.82,224,0.875,bilinear +xcit_tiny_12_p16_224_dist,93.390,6.610,98.500,1.500,6.72,224,1.000,bicubic +cs3darknet_m,93.350,6.650,98.600,1.400,9.31,288,0.950,bicubic +dla169,93.340,6.660,98.600,1.400,53.39,224,0.875,bilinear +levit_128,93.340,6.660,98.380,1.620,9.21,224,0.900,bicubic resnest26d,93.330,6.670,98.630,1.370,17.07,224,0.875,bilinear -levit_128,93.330,6.670,98.380,1.620,9.21,224,0.900,bicubic +repvgg_b1,93.330,6.670,98.510,1.490,57.42,224,0.875,bilinear bat_resnext26ts,93.330,6.670,98.350,1.650,10.73,256,0.900,bicubic -repvgg_b1,93.320,6.680,98.510,1.490,57.42,224,0.875,bilinear tf_inception_v3,93.320,6.680,98.030,1.970,23.83,299,0.875,bicubic -tf_mixnet_l,93.320,6.680,98.030,1.970,7.33,224,0.875,bicubic -tv_resnet152,93.310,6.690,98.390,1.610,60.19,224,0.875,bilinear -mobilevitv2_100,93.300,6.700,98.280,1.720,4.90,256,0.888,bicubic -legacy_seresnet101,93.290,6.710,98.510,1.490,49.33,224,0.875,bilinear -selecsls60b,93.290,6.710,98.280,1.720,32.77,224,0.875,bicubic -efficientnet_b1,93.240,6.760,98.300,1.700,7.79,256,1.000,bicubic -coat_lite_tiny,93.230,6.770,98.260,1.740,5.72,224,0.900,bicubic +tf_mixnet_l.in1k,93.310,6.690,98.030,1.970,7.33,224,0.875,bicubic +tv_resnet152,93.300,6.700,98.390,1.610,60.19,224,0.875,bilinear +selecsls60b,93.300,6.700,98.280,1.720,32.77,224,0.875,bicubic +legacy_seresnet101,93.280,6.720,98.510,1.490,49.33,224,0.875,bilinear +mobilevitv2_100,93.270,6.730,98.280,1.720,4.90,256,0.888,bicubic +efficientnet_b1.ft_in1k,93.250,6.750,98.290,1.710,7.79,256,1.000,bicubic +coat_lite_tiny,93.240,6.760,98.260,1.740,5.72,224,0.900,bicubic hrnet_w30,93.200,6.800,98.410,1.590,37.71,224,0.875,bilinear -mobilevit_s,93.180,6.820,98.440,1.560,5.58,256,0.900,bicubic -dla60_res2next,93.170,6.830,98.400,1.600,17.03,224,0.875,bilinear -dla60_res2net,93.160,6.840,98.400,1.600,20.85,224,0.875,bilinear -efficientnet_es,93.140,6.860,98.420,1.580,5.44,224,0.875,bicubic +mobilevit_s,93.180,6.820,98.430,1.570,5.58,256,0.900,bicubic +dla60_res2net,93.180,6.820,98.420,1.580,20.85,224,0.875,bilinear +dla60_res2next,93.180,6.820,98.410,1.590,17.03,224,0.875,bilinear +efficientnet_es.ra_in1k,93.140,6.860,98.420,1.580,5.44,224,0.875,bicubic dla60x,93.120,6.880,98.510,1.490,17.35,224,0.875,bilinear regnetx_032,93.120,6.880,98.390,1.610,15.30,224,0.875,bicubic -pit_xs_224,93.120,6.880,98.330,1.670,10.62,224,0.900,bicubic -tf_efficientnetv2_b0,93.110,6.890,98.390,1.610,7.14,224,0.875,bicubic -dla102,93.060,6.940,98.550,1.450,33.27,224,0.875,bilinear +tf_efficientnetv2_b0.in1k,93.110,6.890,98.390,1.610,7.14,224,0.875,bicubic +pit_xs_224,93.110,6.890,98.310,1.690,10.62,224,0.900,bicubic +convnext_atto_ols.a2_in1k,93.080,6.920,98.470,1.530,3.70,288,0.950,bicubic +dla102,93.060,6.940,98.540,1.460,33.27,224,0.875,bilinear gluon_resnet50_v1c,93.030,6.970,98.390,1.610,25.58,224,0.875,bicubic -regnety_016,93.030,6.970,98.350,1.650,11.20,224,0.875,bicubic +regnety_016,93.030,6.970,98.360,1.640,11.20,224,0.875,bicubic +selecsls60,93.030,6.970,98.300,1.700,30.67,224,0.875,bicubic rexnet_100,93.030,6.970,98.190,1.810,4.80,224,0.875,bicubic -selecsls60,93.020,6.980,98.300,1.700,30.67,224,0.875,bicubic repvgg_b1g4,92.980,7.020,98.430,1.570,39.97,224,0.875,bilinear -cs3darknet_focus_m,92.970,7.030,98.390,1.610,9.30,288,0.950,bicubic -legacy_seresnet50,92.970,7.030,98.190,1.810,28.09,224,0.875,bilinear -hardcorenas_f,92.960,7.040,98.160,1.840,8.20,224,0.875,bilinear -tf_efficientnet_em,92.930,7.070,98.200,1.800,6.90,240,0.882,bicubic -crossvit_9_dagger_240,92.890,7.110,98.250,1.750,8.78,240,0.875,bicubic +legacy_seresnet50,92.960,7.040,98.190,1.810,28.09,224,0.875,bilinear +cs3darknet_focus_m,92.950,7.050,98.390,1.610,9.30,288,0.950,bicubic +hardcorenas_f,92.950,7.050,98.160,1.840,8.20,224,0.875,bilinear +tf_efficientnet_em.in1k,92.930,7.070,98.190,1.810,6.90,240,0.882,bicubic +crossvit_9_dagger_240,92.900,7.100,98.240,1.760,8.78,240,0.875,bicubic adv_inception_v3,92.880,7.120,98.140,1.860,23.83,299,0.875,bicubic -res2next50,92.860,7.140,98.190,1.810,24.67,224,0.875,bilinear -resmlp_12_distilled_224,92.840,7.160,98.140,1.860,15.35,224,0.875,bicubic -tf_efficientnet_cc_b0_8e,92.830,7.170,98.180,1.820,24.01,224,0.875,bicubic +res2next50,92.840,7.160,98.180,1.820,24.67,224,0.875,bilinear +tf_efficientnet_cc_b0_8e.in1k,92.830,7.170,98.180,1.820,24.01,224,0.875,bicubic +resmlp_12_distilled_224,92.830,7.170,98.140,1.860,15.35,224,0.875,bicubic gmixer_24_224,92.830,7.170,97.880,2.120,24.72,224,0.875,bicubic seresnext26t_32x4d,92.820,7.180,98.370,1.630,16.81,224,0.875,bicubic -tv_resnet101,92.820,7.180,98.250,1.750,44.55,224,0.875,bilinear -gcresnext26ts,92.780,7.220,98.260,1.740,10.48,256,0.900,bicubic -efficientnet_b1_pruned,92.770,7.230,98.040,1.960,6.33,240,0.882,bicubic -tv_resnext50_32x4d,92.750,7.250,98.280,1.720,25.03,224,0.875,bilinear -resnet26t,92.750,7.250,98.230,1.770,16.01,256,0.940,bicubic -densenet201,92.740,7.260,98.230,1.770,20.01,224,0.875,bicubic +tv_resnet101,92.810,7.190,98.250,1.750,44.55,224,0.875,bilinear +convnext_atto.d2_in1k,92.790,7.210,98.060,1.940,3.70,288,0.950,bicubic +gcresnext26ts,92.770,7.230,98.260,1.740,10.48,256,0.900,bicubic +efficientnet_b1_pruned.in1k,92.770,7.230,98.040,1.960,6.33,240,0.882,bicubic +densenet201,92.750,7.250,98.230,1.770,20.01,224,0.875,bicubic +resnet26t,92.750,7.250,98.210,1.790,16.01,256,0.940,bicubic +tv_resnext50_32x4d,92.740,7.260,98.270,1.730,25.03,224,0.875,bilinear res2net50_14w_8s,92.740,7.260,98.180,1.820,25.06,224,0.875,bilinear inception_v3,92.720,7.280,97.970,2.030,23.83,299,0.875,bicubic +seresnext26d_32x4d,92.700,7.300,98.150,1.850,16.81,224,0.875,bicubic seresnext26ts,92.690,7.310,98.290,1.710,10.39,256,0.900,bicubic -seresnext26d_32x4d,92.690,7.310,98.150,1.850,16.81,224,0.875,bicubic -efficientnet_b0,92.690,7.310,98.070,1.930,5.29,224,0.875,bicubic +efficientnet_b0.ra_in1k,92.690,7.310,98.070,1.930,5.29,224,0.875,bicubic resnet34d,92.680,7.320,98.310,1.690,21.82,224,0.875,bicubic -tf_efficientnet_lite2,92.650,7.350,98.230,1.770,6.09,260,0.890,bicubic +tf_efficientnet_lite2.in1k,92.650,7.350,98.230,1.770,6.09,260,0.890,bicubic legacy_seresnext26_32x4d,92.640,7.360,98.130,1.870,16.79,224,0.875,bicubic -poolformer_s12,92.630,7.370,98.200,1.800,11.92,224,0.900,bicubic -tf_efficientnet_lite1,92.620,7.380,98.080,1.920,5.42,240,0.882,bicubic +poolformer_s12,92.620,7.380,98.200,1.800,11.92,224,0.900,bicubic +tf_efficientnet_lite1.in1k,92.620,7.380,98.080,1.920,5.42,240,0.882,bicubic eca_resnext26ts,92.610,7.390,98.260,1.740,10.30,256,0.900,bicubic -tf_efficientnet_cc_b0_4e,92.590,7.410,98.080,1.920,13.31,224,0.875,bicubic -hardcorenas_e,92.570,7.430,98.100,1.900,8.07,224,0.875,bilinear +tf_efficientnet_cc_b0_4e.in1k,92.590,7.410,98.080,1.920,13.31,224,0.875,bicubic +hardcorenas_e,92.570,7.430,98.110,1.890,8.07,224,0.875,bilinear +res2net50_48w_2s,92.550,7.450,98.080,1.920,25.29,224,0.875,bilinear gluon_resnet50_v1b,92.540,7.460,98.170,1.830,25.56,224,0.875,bicubic -res2net50_48w_2s,92.540,7.460,98.080,1.920,25.29,224,0.875,bilinear densenet161,92.500,7.500,98.290,1.710,28.68,224,0.875,bicubic xcit_tiny_12_p16_224,92.500,7.500,98.240,1.760,6.72,224,1.000,bicubic -res2net50_26w_4s,92.490,7.510,98.060,1.940,25.70,224,0.875,bilinear -tinynet_a,92.440,7.560,98.080,1.920,6.19,192,0.875,bicubic +res2net50_26w_4s,92.500,7.500,98.060,1.940,25.70,224,0.875,bilinear +tinynet_a.in1k,92.440,7.560,98.080,1.920,6.19,192,0.875,bicubic convmixer_1024_20_ks9_p14,92.430,7.570,98.270,1.730,24.38,224,0.960,bicubic -mixnet_m,92.430,7.570,97.860,2.140,5.01,224,0.875,bicubic -mobilenetv2_120d,92.400,7.600,98.050,1.950,5.83,224,0.875,bicubic +mixnet_m.ft_in1k,92.430,7.570,97.870,2.130,5.01,224,0.875,bicubic +hardcorenas_d,92.400,7.600,98.070,1.930,7.50,224,0.875,bilinear +mobilenetv2_120d.ra_in1k,92.400,7.600,98.050,1.950,5.83,224,0.875,bicubic skresnet34,92.390,7.610,98.150,1.850,22.28,224,0.875,bicubic -hardcorenas_d,92.390,7.610,98.080,1.920,7.50,224,0.875,bilinear -hrnet_w18,92.320,7.680,98.250,1.750,21.30,224,0.875,bilinear -tf_mixnet_m,92.320,7.680,97.890,2.110,5.01,224,0.875,bicubic -selecsls42b,92.280,7.720,98.140,1.860,32.46,224,0.875,bicubic -ese_vovnet19b_dw,92.280,7.720,98.090,1.910,6.54,224,0.875,bicubic -mobilenetv3_large_100_miil,92.270,7.730,97.640,2.360,5.48,224,0.875,bilinear -tf_efficientnet_b0,92.250,7.750,97.990,2.010,5.29,224,0.875,bicubic +tf_mixnet_m.in1k,92.330,7.670,97.890,2.110,5.01,224,0.875,bicubic +hrnet_w18,92.320,7.680,98.240,1.760,21.30,224,0.875,bilinear +ese_vovnet19b_dw,92.290,7.710,98.090,1.910,6.54,224,0.875,bicubic +selecsls42b,92.280,7.720,98.150,1.850,32.46,224,0.875,bicubic +mobilenetv3_large_100.miil_in21k_ft_in1k,92.260,7.740,97.640,2.360,5.48,224,0.875,bilinear +tf_efficientnet_b0.aa_in1k,92.250,7.750,98.000,2.000,5.29,224,0.875,bicubic +dla60,92.230,7.770,98.110,1.890,22.04,224,0.875,bilinear resmlp_12_224,92.210,7.790,98.160,1.840,15.35,224,0.875,bicubic -dla60,92.210,7.790,98.100,1.900,22.04,224,0.875,bilinear -tf_efficientnet_b0_ap,92.200,7.800,98.020,1.980,5.29,224,0.875,bicubic -regnetx_016,92.160,7.840,98.200,1.800,9.19,224,0.875,bicubic -gernet_s,92.140,7.860,98.200,1.800,8.17,224,0.875,bilinear -xcit_nano_12_p8_224_dist,92.100,7.900,98.150,1.850,3.05,224,1.000,bicubic -resnet26d,92.070,7.930,97.970,2.030,16.01,224,0.875,bicubic -vit_tiny_r_s16_p8_384,92.040,7.960,98.290,1.710,6.36,384,1.000,bicubic -vit_small_patch32_224,92.030,7.970,98.230,1.770,22.88,224,0.900,bicubic -dpn68,92.030,7.970,98.050,1.950,12.61,224,0.875,bicubic -hardcorenas_c,92.030,7.970,97.840,2.160,5.52,224,0.875,bilinear -tf_efficientnet_es,91.980,8.020,97.860,2.140,5.44,224,0.875,bicubic +tf_efficientnet_b0.ap_in1k,92.200,7.800,98.020,1.980,5.29,224,0.875,bicubic +regnetx_016,92.170,7.830,98.210,1.790,9.19,224,0.875,bicubic +gernet_s,92.140,7.860,98.190,1.810,8.17,224,0.875,bilinear +xcit_nano_12_p8_224_dist,92.090,7.910,98.160,1.840,3.05,224,1.000,bicubic +resnet26d,92.070,7.930,97.960,2.040,16.01,224,0.875,bicubic +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,92.040,7.960,98.290,1.710,6.36,384,1.000,bicubic +vit_small_patch32_224.augreg_in21k_ft_in1k,92.040,7.960,98.230,1.770,22.88,224,0.900,bicubic +hardcorenas_c,92.020,7.980,97.840,2.160,5.52,224,0.875,bilinear +dpn68,92.010,7.990,98.050,1.950,12.61,224,0.875,bicubic +tf_efficientnet_es.in1k,91.980,8.020,97.860,2.140,5.44,224,0.875,bicubic +levit_128s,91.970,8.030,98.060,1.940,7.78,224,0.900,bicubic repvgg_a2,91.940,8.060,98.150,1.850,28.21,224,0.875,bilinear -levit_128s,91.930,8.070,98.070,1.930,7.78,224,0.900,bicubic -densenet169,91.920,8.080,98.100,1.900,14.15,224,0.875,bicubic +densenet169,91.930,8.070,98.100,1.900,14.15,224,0.875,bicubic densenetblur121d,91.910,8.090,98.070,1.930,8.00,224,0.875,bicubic -tv_resnet50,91.900,8.100,98.040,1.960,25.56,224,0.875,bilinear +tv_resnet50,91.880,8.120,98.040,1.960,25.56,224,0.875,bilinear resnext26ts,91.870,8.130,97.920,2.080,10.30,256,0.900,bicubic mixer_b16_224,91.870,8.130,97.250,2.750,59.88,224,0.875,bicubic -xcit_nano_12_p16_384_dist,91.830,8.170,98.020,1.980,3.05,384,1.000,bicubic -mobilenetv2_140,91.830,8.170,97.850,2.150,6.11,224,0.875,bicubic -mixnet_s,91.820,8.180,97.690,2.310,4.13,224,0.875,bicubic -vit_tiny_patch16_224,91.770,8.230,98.040,1.960,5.72,224,0.900,bicubic +mobilenetv2_140.ra_in1k,91.830,8.170,97.860,2.140,6.11,224,0.875,bicubic +mixnet_s.ft_in1k,91.830,8.170,97.690,2.310,4.13,224,0.875,bicubic +xcit_nano_12_p16_384_dist,91.820,8.180,98.020,1.980,3.05,384,1.000,bicubic +hardcorenas_b,91.770,8.230,97.780,2.220,5.18,224,0.875,bilinear +vit_tiny_patch16_224.augreg_in21k_ft_in1k,91.760,8.240,98.040,1.960,5.72,224,0.900,bicubic mobilevitv2_075,91.760,8.240,97.860,2.140,2.87,256,0.888,bicubic -hardcorenas_b,91.760,8.240,97.780,2.220,5.18,224,0.875,bilinear -regnety_008,91.720,8.280,98.180,1.820,6.26,224,0.875,bicubic +regnety_008,91.750,8.250,98.180,1.820,6.26,224,0.875,bicubic resnest14d,91.720,8.280,97.870,2.130,10.61,224,0.875,bilinear -densenet121,91.580,8.420,98.030,1.970,7.98,224,0.875,bicubic -tf_mixnet_s,91.510,8.490,97.610,2.390,4.13,224,0.875,bicubic -repvgg_b0,91.400,8.600,97.990,2.010,15.82,224,0.875,bilinear +edgenext_x_small,91.720,8.280,97.610,2.390,2.34,288,1.000,bicubic +densenet121,91.570,8.430,98.030,1.970,7.98,224,0.875,bicubic +tf_mixnet_s.in1k,91.510,8.490,97.620,2.380,4.13,224,0.875,bicubic +repvgg_b0,91.430,8.570,97.990,2.010,15.82,224,0.875,bilinear regnety_006,91.370,8.630,97.710,2.290,6.06,224,0.875,bicubic hardcorenas_a,91.350,8.650,97.860,2.140,5.26,224,0.875,bilinear -mobilenetv3_large_100,91.330,8.670,97.720,2.280,5.48,224,0.875,bicubic -semnasnet_100,91.280,8.720,97.560,2.440,3.89,224,0.875,bicubic -tf_mobilenetv3_large_100,91.220,8.780,97.660,2.340,5.48,224,0.875,bilinear -mobilenetv3_rw,91.210,8.790,97.660,2.340,5.48,224,0.875,bicubic +mobilenetv3_large_100.ra_in1k,91.320,8.680,97.710,2.290,5.48,224,0.875,bicubic +semnasnet_100.rmsp_in1k,91.280,8.720,97.560,2.440,3.89,224,0.875,bicubic +tf_mobilenetv3_large_100.in1k,91.240,8.760,97.660,2.340,5.48,224,0.875,bilinear +mobilenetv3_rw.rmsp_in1k,91.210,8.790,97.660,2.340,5.48,224,0.875,bicubic hrnet_w18_small_v2,91.190,8.810,97.900,2.100,15.60,224,0.875,bilinear -efficientnet_es_pruned,91.180,8.820,97.750,2.250,5.44,224,0.875,bicubic +vit_base_patch32_224.augreg_in1k,91.190,8.810,97.380,2.620,88.22,224,0.900,bicubic +efficientnet_es_pruned.in1k,91.180,8.820,97.750,2.250,5.44,224,0.875,bicubic +efficientnet_lite0.ra_in1k,91.140,8.860,97.630,2.370,4.65,224,0.875,bicubic resnet34,91.130,8.870,97.620,2.380,21.80,224,0.875,bilinear -resnet26,91.120,8.880,97.750,2.250,16.00,224,0.875,bicubic -efficientnet_lite0,91.110,8.890,97.630,2.370,4.65,224,0.875,bicubic -edgenext_x_small,91.090,8.910,97.550,2.450,2.34,256,0.900,bicubic +resnet26,91.110,8.890,97.740,2.260,16.00,224,0.875,bicubic regnetx_008,91.050,8.950,97.710,2.290,7.26,224,0.875,bicubic -tf_efficientnet_lite0,91.040,8.960,97.590,2.410,4.65,224,0.875,bicubic -xcit_nano_12_p8_224,91.020,8.980,97.790,2.210,3.05,224,1.000,bicubic -gluon_resnet34_v1b,90.960,9.040,97.640,2.360,21.80,224,0.875,bicubic -mobilenetv2_110d,90.960,9.040,97.560,2.440,4.52,224,0.875,bicubic -tinynet_b,90.920,9.080,97.670,2.330,3.73,188,0.875,bicubic -pit_ti_distilled_224,90.900,9.100,97.720,2.280,5.10,224,0.900,bicubic -legacy_seresnet34,90.900,9.100,97.580,2.420,21.96,224,0.875,bilinear +tf_efficientnet_lite0.in1k,91.040,8.960,97.590,2.410,4.65,224,0.875,bicubic +xcit_nano_12_p8_224,91.020,8.980,97.800,2.200,3.05,224,1.000,bicubic +gluon_resnet34_v1b,90.960,9.040,97.630,2.370,21.80,224,0.875,bicubic +mobilenetv2_110d.ra_in1k,90.950,9.050,97.550,2.450,4.52,224,0.875,bicubic +tinynet_b.in1k,90.920,9.080,97.670,2.330,3.73,188,0.875,bicubic +pit_ti_distilled_224,90.900,9.100,97.700,2.300,5.10,224,0.900,bicubic tv_densenet121,90.890,9.110,97.710,2.290,7.98,224,0.875,bicubic -mobilevit_xs,90.820,9.180,97.920,2.080,2.32,256,0.900,bicubic -dla34,90.780,9.220,97.660,2.340,15.74,224,0.875,bilinear -deit_tiny_distilled_patch16_224,90.710,9.290,97.570,2.430,5.91,224,0.900,bicubic -fbnetc_100,90.710,9.290,97.210,2.790,5.57,224,0.875,bilinear +legacy_seresnet34,90.890,9.110,97.580,2.420,21.96,224,0.875,bilinear +mobilevit_xs,90.830,9.170,97.920,2.080,2.32,256,0.900,bicubic +dla34,90.760,9.240,97.660,2.340,15.74,224,0.875,bilinear +deit_tiny_distilled_patch16_224,90.700,9.300,97.570,2.430,5.91,224,0.900,bicubic +fbnetc_100.rmsp_in1k,90.700,9.300,97.210,2.790,5.57,224,0.875,bilinear swsl_resnet18,90.690,9.310,97.700,2.300,11.69,224,0.875,bilinear -convit_tiny,90.640,9.360,97.740,2.260,5.71,224,0.875,bicubic -crossvit_9_240,90.630,9.370,97.740,2.260,8.55,240,0.875,bicubic -regnety_004,90.510,9.490,97.540,2.460,4.34,224,0.875,bicubic -mnasnet_100,90.510,9.490,97.470,2.530,4.38,224,0.875,bicubic -regnetx_006,90.360,9.640,97.430,2.570,6.20,224,0.875,bicubic -spnasnet_100,90.340,9.660,97.190,2.810,4.42,224,0.875,bilinear -crossvit_tiny_240,90.240,9.760,97.590,2.410,7.01,240,0.875,bicubic -ssl_resnet18,90.210,9.790,97.550,2.450,11.69,224,0.875,bilinear +crossvit_9_240,90.640,9.360,97.740,2.260,8.55,240,0.875,bicubic +convit_tiny,90.630,9.370,97.740,2.260,5.71,224,0.875,bicubic +mnasnet_100.rmsp_in1k,90.510,9.490,97.470,2.530,4.38,224,0.875,bicubic +regnety_004,90.500,9.500,97.540,2.460,4.34,224,0.875,bicubic +regnetx_006,90.350,9.650,97.430,2.570,6.20,224,0.875,bicubic +spnasnet_100.rmsp_in1k,90.350,9.650,97.190,2.810,4.42,224,0.875,bilinear +crossvit_tiny_240,90.250,9.750,97.590,2.410,7.01,240,0.875,bicubic +ssl_resnet18,90.220,9.780,97.550,2.450,11.69,224,0.875,bilinear vgg16_bn,90.090,9.910,97.370,2.630,138.37,224,0.875,bilinear vgg19_bn,90.080,9.920,97.580,2.420,143.68,224,0.875,bilinear -semnasnet_075,90.060,9.940,97.430,2.570,2.91,224,0.875,bicubic -ghostnet_100,90.030,9.970,97.370,2.630,5.18,224,0.875,bilinear -pit_ti_224,89.950,10.050,97.440,2.560,4.85,224,0.900,bicubic -tv_resnet34,89.930,10.070,97.340,2.660,21.80,224,0.875,bilinear -vit_base_patch32_224_sam,89.750,10.250,97.000,3.000,88.22,224,0.900,bicubic -xcit_nano_12_p16_224_dist,89.690,10.310,97.100,2.900,3.05,224,1.000,bicubic -tf_mobilenetv3_large_075,89.680,10.320,97.210,2.790,3.99,224,0.875,bilinear -deit_tiny_patch16_224,89.660,10.340,97.450,2.550,5.72,224,0.900,bicubic -skresnet18,89.660,10.340,97.240,2.760,11.96,224,0.875,bicubic -mobilenetv2_100,89.610,10.390,97.150,2.850,3.50,224,0.875,bicubic -resnet18d,89.280,10.720,97.140,2.860,11.71,224,0.875,bicubic -vit_tiny_r_s16_p8_224,89.180,10.820,97.230,2.770,6.34,224,0.900,bicubic +semnasnet_075.rmsp_in1k,90.070,9.930,97.430,2.570,2.91,224,0.875,bicubic +ghostnet_100,90.020,9.980,97.370,2.630,5.18,224,0.875,bilinear +pit_ti_224,89.940,10.060,97.450,2.550,4.85,224,0.900,bicubic +tv_resnet34,89.940,10.060,97.340,2.660,21.80,224,0.875,bilinear +vit_base_patch32_224.sam,89.750,10.250,97.000,3.000,88.22,224,0.900,bicubic +tf_mobilenetv3_large_075.in1k,89.680,10.320,97.210,2.790,3.99,224,0.875,bilinear +xcit_nano_12_p16_224_dist,89.680,10.320,97.090,2.910,3.05,224,1.000,bicubic +deit_tiny_patch16_224,89.670,10.330,97.450,2.550,5.72,224,0.900,bicubic +skresnet18,89.660,10.340,97.230,2.770,11.96,224,0.875,bicubic +mobilenetv2_100.ra_in1k,89.600,10.400,97.140,2.860,3.50,224,0.875,bicubic +resnet18d,89.280,10.720,97.150,2.850,11.71,224,0.875,bicubic +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,89.170,10.830,97.230,2.770,6.34,224,0.900,bicubic +hrnet_w18_small,89.050,10.950,97.110,2.890,13.19,224,0.875,bilinear +resnet14t,89.050,10.950,96.620,3.380,10.08,224,0.950,bilinear vgg19,89.040,10.960,96.870,3.130,143.67,224,0.875,bilinear -resnet14t,89.040,10.960,96.600,3.400,10.08,224,0.950,bilinear -hrnet_w18_small,89.030,10.970,97.110,2.890,13.19,224,0.875,bilinear -tf_mobilenetv3_large_minimal_100,88.970,11.030,96.850,3.150,3.92,224,0.875,bilinear +tf_mobilenetv3_large_minimal_100.in1k,88.970,11.030,96.860,3.140,3.92,224,0.875,bilinear regnetx_004,88.900,11.100,97.120,2.880,5.16,224,0.875,bicubic legacy_seresnet18,88.880,11.120,96.980,3.020,11.78,224,0.875,bicubic -lcnet_100,88.790,11.210,96.730,3.270,2.95,224,0.875,bicubic +edgenext_xx_small,88.880,11.120,96.690,3.310,1.33,288,1.000,bicubic +pvt_v2_b0,88.790,11.210,96.860,3.140,3.67,224,0.900,bicubic +lcnet_100.ra2_in1k,88.790,11.210,96.730,3.270,2.95,224,0.875,bicubic vgg13_bn,88.760,11.240,96.970,3.030,133.05,224,0.875,bilinear xcit_nano_12_p16_224,88.610,11.390,96.790,3.210,3.05,224,1.000,bicubic vgg16,88.550,11.450,96.790,3.210,138.36,224,0.875,bilinear gluon_resnet18_v1b,88.400,11.600,96.680,3.320,11.69,224,0.875,bicubic -edgenext_xx_small,88.350,11.650,96.520,3.480,1.33,256,0.900,bicubic mobilevitv2_050,88.230,11.770,96.990,3.010,1.37,256,0.888,bicubic -tinynet_c,87.780,12.220,96.370,3.630,2.46,184,0.875,bicubic +tinynet_c.in1k,87.770,12.230,96.370,3.630,2.46,184,0.875,bicubic vgg11_bn,87.500,12.500,96.820,3.180,132.87,224,0.875,bilinear resnet18,87.390,12.610,96.290,3.710,11.69,224,0.875,bilinear regnety_002,87.380,12.620,96.590,3.410,3.16,224,0.875,bicubic -mobilevit_xxs,87.190,12.810,96.100,3.900,1.27,256,0.900,bicubic -mixer_l16_224,87.140,12.860,93.520,6.480,208.20,224,0.875,bicubic +mobilevit_xxs,87.170,12.830,96.100,3.900,1.27,256,0.900,bicubic +mixer_l16_224,87.150,12.850,93.520,6.480,208.20,224,0.875,bicubic vgg13,87.050,12.950,96.320,3.680,133.05,224,0.875,bilinear vgg11,86.550,13.450,96.280,3.720,132.86,224,0.875,bilinear -dla60x_c,86.270,13.730,96.170,3.830,1.32,224,0.875,bilinear -resnet10t,86.210,13.790,95.660,4.340,5.44,224,0.950,bilinear -regnetx_002,86.200,13.800,95.980,4.020,2.68,224,0.875,bicubic -lcnet_075,85.990,14.010,95.690,4.310,2.36,224,0.875,bicubic -mobilenetv3_small_100,85.220,14.780,95.630,4.370,2.54,224,0.875,bicubic -tf_mobilenetv3_small_100,85.190,14.810,95.770,4.230,2.54,224,0.875,bilinear -tinynet_d,84.760,15.240,95.180,4.820,2.34,152,0.875,bicubic -mnasnet_small,84.440,15.560,95.180,4.820,2.03,224,0.875,bicubic -dla46x_c,84.250,15.750,95.260,4.740,1.07,224,0.875,bilinear -mobilenetv2_050,83.890,16.110,94.720,5.280,1.97,224,0.875,bicubic -dla46_c,83.640,16.360,94.920,5.080,1.30,224,0.875,bilinear -tf_mobilenetv3_small_075,83.520,16.480,94.800,5.200,2.04,224,0.875,bilinear -mobilenetv3_small_075,83.040,16.960,94.100,5.900,2.04,224,0.875,bicubic -lcnet_050,81.780,18.220,93.720,6.280,1.88,224,0.875,bicubic -tf_mobilenetv3_small_minimal_100,81.400,18.600,93.680,6.320,2.04,224,0.875,bilinear -tinynet_e,78.900,21.100,92.560,7.440,2.04,106,0.875,bicubic -mobilenetv3_small_050,76.990,23.010,91.300,8.700,1.59,224,0.875,bicubic +dla60x_c,86.290,13.710,96.160,3.840,1.32,224,0.875,bilinear +resnet10t,86.200,13.800,95.650,4.350,5.44,224,0.950,bilinear +regnetx_002,86.190,13.810,95.980,4.020,2.68,224,0.875,bicubic +lcnet_075.ra2_in1k,85.990,14.010,95.680,4.320,2.36,224,0.875,bicubic +mobilenetv3_small_100.lamb_in1k,85.220,14.780,95.620,4.380,2.54,224,0.875,bicubic +tf_mobilenetv3_small_100.in1k,85.190,14.810,95.770,4.230,2.54,224,0.875,bilinear +tinynet_d.in1k,84.750,15.250,95.180,4.820,2.34,152,0.875,bicubic +mnasnet_small.lamb_in1k,84.440,15.560,95.180,4.820,2.03,224,0.875,bicubic +dla46x_c,84.250,15.750,95.270,4.730,1.07,224,0.875,bilinear +mobilenetv2_050.lamb_in1k,83.890,16.110,94.720,5.280,1.97,224,0.875,bicubic +dla46_c,83.650,16.350,94.920,5.080,1.30,224,0.875,bilinear +tf_mobilenetv3_small_075.in1k,83.520,16.480,94.790,5.210,2.04,224,0.875,bilinear +mobilenetv3_small_075.lamb_in1k,83.040,16.960,94.100,5.900,2.04,224,0.875,bicubic +lcnet_050.ra2_in1k,81.780,18.220,93.710,6.290,1.88,224,0.875,bicubic +tf_mobilenetv3_small_minimal_100.in1k,81.380,18.620,93.670,6.330,2.04,224,0.875,bilinear +tinynet_e.in1k,78.900,21.100,92.560,7.440,2.04,106,0.875,bicubic +mobilenetv3_small_050.lamb_in1k,76.990,23.010,91.300,8.700,1.59,224,0.875,bicubic diff --git a/results/results-imagenet-a.csv b/results/results-imagenet-a.csv index 4e306fba..0ed6790c 100644 --- a/results/results-imagenet-a.csv +++ b/results/results-imagenet-a.csv @@ -1,669 +1,791 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation,top1_diff,top5_diff,rank_diff -tf_efficientnet_l2_ns,84.760,15.240,96.147,3.853,480.31,800,0.960,bicubic,-13.790,-3.673,+1 -tf_efficientnet_l2_ns_475,83.400,16.600,95.453,4.547,480.31,475,0.936,bicubic,-15.100,-4.377,+2 -beit_large_patch16_512,81.653,18.347,94.880,5.120,305.67,512,1.000,bicubic,-16.907,-4.960,-2 -deit3_large_patch16_384_in21ft1k,79.213,20.787,93.627,6.373,304.76,384,1.000,bicubic,-19.247,-6.133,+1 -beit_large_patch16_384,79.120,20.880,94.280,5.720,305.00,384,1.000,bicubic,-19.400,-5.540,-2 -swinv2_large_window12to24_192to384_22kft1k,73.867,26.133,91.747,8.253,196.74,384,1.000,bicubic,-24.283,-7.943,+6 -deit3_base_patch16_384_in21ft1k,71.280,28.720,89.947,10.053,86.88,384,1.000,bicubic,-26.550,-9.733,+16 -swinv2_base_window12to24_192to384_22kft1k,71.267,28.733,91.280,8.720,87.92,384,1.000,bicubic,-26.873,-8.500,+5 -vit_large_patch16_384,71.227,28.773,89.853,10.147,304.72,384,1.000,bicubic,-26.993,-9.947,-2 -convnext_xlarge_384_in22ft1k,70.787,29.213,90.400,9.600,350.20,384,1.000,bicubic,-27.563,-9.400,-4 -deit3_huge_patch14_224_in21ft1k,70.227,29.773,90.720,9.280,632.13,224,1.000,bicubic,-27.943,-9.010,0 -volo_d5_512,69.653,30.347,90.413,9.587,296.09,512,1.150,bicubic,-28.117,-9.257,+13 -swin_large_patch4_window12_384,69.613,30.387,89.573,10.427,196.74,384,1.000,bicubic,-28.427,-10.117,+1 -deit3_large_patch16_224_in21ft1k,68.707,31.293,90.013,9.987,304.37,224,1.000,bicubic,-29.463,-9.747,-4 -beit_large_patch16_224,68.507,31.493,89.573,10.427,304.43,224,0.900,bicubic,-29.673,-10.187,-6 -volo_d5_448,68.107,31.893,89.707,10.293,295.91,448,1.150,bicubic,-29.653,-9.913,+10 -convnext_large_384_in22ft1k,67.947,32.053,89.200,10.800,197.77,384,1.000,bicubic,-30.273,-10.530,-9 -swinv2_large_window12to16_192to256_22kft1k,67.280,32.720,88.013,11.987,196.74,256,0.900,bicubic,-30.580,-11.637,+2 -tf_efficientnet_b7_ns,67.080,32.920,88.640,11.360,66.35,600,0.949,bicubic,-30.840,-11.080,-3 -tf_efficientnetv2_xl_in21ft1k,67.000,33.000,86.867,13.133,208.12,512,1.000,bicubic,-30.660,-12.623,+9 -volo_d4_448,66.680,33.320,88.987,11.013,193.41,448,1.150,bicubic,-30.990,-10.623,+7 -tf_efficientnetv2_l_in21ft1k,66.320,33.680,87.840,12.160,118.52,480,1.000,bicubic,-31.380,-11.830,+5 -beit_base_patch16_384,65.880,34.120,88.507,11.493,86.74,384,1.000,bicubic,-31.940,-11.193,+1 -volo_d3_448,65.427,34.573,87.560,12.440,86.63,448,1.000,bicubic,-32.123,-11.990,+14 -convnext_base_384_in22ft1k,65.000,35.000,87.867,12.133,88.59,384,1.000,bicubic,-32.950,-11.783,-10 -swin_base_patch4_window12_384,64.467,35.533,87.493,12.507,87.90,384,1.000,bicubic,-33.423,-12.217,-8 -vit_base_patch16_384,63.693,36.307,86.707,13.293,86.86,384,1.000,bicubic,-34.147,-12.963,-6 -swinv2_base_window12to16_192to256_22kft1k,63.227,36.773,87.493,12.507,87.92,256,0.900,bicubic,-34.423,-12.227,+2 -convnext_xlarge_in22ft1k,62.627,37.373,86.000,14.000,350.20,224,0.875,bicubic,-35.293,-13.680,-12 -cait_m48_448,62.347,37.653,86.453,13.547,356.46,448,1.000,bicubic,-35.133,-13.097,+15 -tf_efficientnet_b6_ns,62.267,37.733,85.173,14.827,43.04,528,0.942,bicubic,-35.363,-14.407,+2 -vit_large_r50_s32_384,61.493,38.507,83.960,16.040,329.09,384,1.000,bicubic,-36.367,-15.710,-13 -tf_efficientnetv2_m_in21ft1k,61.387,38.613,85.413,14.587,54.14,480,1.000,bicubic,-36.093,-14.117,+13 -ig_resnext101_32x48d,61.013,38.987,83.333,16.667,828.41,224,0.875,bilinear,-36.607,-16.367,0 -swin_large_patch4_window7_224,60.907,39.093,85.867,14.133,196.53,224,0.900,bicubic,-36.743,-13.713,-4 -resnetv2_152x4_bitm,60.787,39.213,83.560,16.440,936.53,480,1.000,bilinear,-36.703,-16.050,+7 -deit3_large_patch16_384,60.507,39.493,85.707,14.293,304.76,384,1.000,bicubic,-36.913,-13.913,+12 -tf_efficientnet_b5_ns,60.293,39.707,84.480,15.520,30.39,456,0.934,bicubic,-37.207,-15.150,+4 -xcit_large_24_p8_384_dist,59.880,40.120,85.480,14.520,188.93,384,1.000,bicubic,-37.640,-14.060,+1 -convnext_large_in22ft1k,59.773,40.227,84.040,15.960,197.77,224,0.875,bicubic,-38.057,-15.650,-18 -dm_nfnet_f6,59.173,40.827,82.333,17.667,438.36,576,0.956,bicubic,-38.427,-17.217,-6 -vit_base_patch8_224,58.920,41.080,82.733,17.267,86.58,224,0.900,bicubic,-38.660,-16.937,-6 -volo_d2_384,58.600,41.400,84.253,15.747,58.87,384,1.000,bicubic,-38.710,-15.347,+12 -dm_nfnet_f5,58.560,41.440,82.773,17.227,377.21,544,0.954,bicubic,-38.980,-16.797,-5 -dm_nfnet_f4,58.133,41.867,81.973,18.027,316.07,512,0.951,bicubic,-39.447,-17.537,-8 -ig_resnext101_32x32d,58.040,41.960,80.613,19.387,468.53,224,0.875,bilinear,-39.330,-19.067,+6 -cait_m36_384,57.813,42.187,84.827,15.173,271.22,384,1.000,bicubic,-39.587,-14.683,+3 -deit3_base_patch16_224_in21ft1k,57.253,42.747,83.520,16.480,86.59,224,1.000,bicubic,-40.237,-16.080,-4 -volo_d5_224,57.120,42.880,82.720,17.280,295.46,224,0.960,bicubic,-40.270,-16.850,+2 -deit3_small_patch16_384_in21ft1k,57.067,42.933,83.080,16.920,22.21,384,1.000,bicubic,-40.063,-16.420,+19 -xcit_medium_24_p8_384_dist,56.693,43.307,83.400,16.600,84.32,384,1.000,bicubic,-40.597,-16.110,+6 -convnext_small_384_in22ft1k,56.187,43.813,83.760,16.240,50.22,384,1.000,bicubic,-41.273,-15.820,-4 -dm_nfnet_f3,55.827,44.173,80.947,19.053,254.92,416,0.940,bicubic,-41.523,-18.613,0 -vit_large_patch16_224,55.627,44.373,80.080,19.920,304.33,224,0.900,bicubic,-42.013,-19.510,-22 -convnext_base_in22ft1k,54.627,45.373,82.173,17.827,88.59,224,0.875,bicubic,-42.843,-17.427,-8 -vit_base_r50_s16_384,54.627,45.373,81.213,18.787,98.95,384,1.000,bicubic,-42.553,-18.347,+11 -cait_s36_384,54.387,45.613,81.360,18.640,68.37,384,1.000,bicubic,-42.943,-18.170,-3 -volo_d1_384,54.333,45.667,80.973,19.027,26.78,384,1.000,bicubic,-42.577,-18.547,+35 -deit3_huge_patch14_224,54.320,45.680,82.093,17.907,632.13,224,0.900,bicubic,-42.570,-17.387,+37 -xcit_small_24_p8_384_dist,54.267,45.733,81.533,18.467,47.63,384,1.000,bicubic,-42.973,-18.077,+1 -resnetv2_101x3_bitm,54.027,45.973,81.040,18.960,387.93,448,1.000,bilinear,-42.963,-18.450,+23 -resnetv2_152x2_bitm,54.013,45.987,82.013,17.987,236.34,448,1.000,bilinear,-42.997,-17.577,+19 -deit3_base_patch16_384,53.427,46.573,80.573,19.427,86.88,384,1.000,bicubic,-43.593,-18.817,+17 -tf_efficientnetv2_l,53.173,46.827,79.147,20.853,118.52,480,1.000,bicubic,-44.107,-20.403,-6 -ig_resnext101_32x16d,53.093,46.907,76.933,23.067,194.03,224,0.875,bilinear,-43.717,-22.667,+36 -volo_d4_224,52.933,47.067,80.440,19.560,192.96,224,0.960,bicubic,-44.367,-19.080,-10 -xcit_large_24_p16_384_dist,52.840,47.160,81.827,18.173,189.10,384,1.000,bicubic,-44.680,-17.653,-26 -swin_base_patch4_window7_224,51.427,48.573,79.973,20.027,87.77,224,0.900,bicubic,-45.823,-19.557,-8 -tf_efficientnet_b4_ns,51.253,48.747,79.173,20.827,19.34,380,0.922,bicubic,-45.697,-20.407,+19 -swsl_resnext101_32x8d,51.227,48.773,78.240,21.760,88.79,224,0.875,bilinear,-45.973,-21.330,-7 -resnetv2_152x2_bit_teacher_384,51.173,48.827,78.480,21.520,236.34,384,1.000,bicubic,-45.657,-20.970,+29 -beit_base_patch16_224,50.707,49.293,79.693,20.307,86.53,224,0.900,bicubic,-46.383,-19.917,0 -xcit_small_12_p8_384_dist,50.587,49.413,79.600,20.400,26.21,384,1.000,bicubic,-46.643,-19.880,-11 -volo_d3_224,50.253,49.747,78.173,21.827,86.33,224,0.960,bicubic,-46.837,-21.297,0 -cait_s24_384,49.733,50.267,78.733,21.267,47.06,384,1.000,bicubic,-47.337,-20.697,+2 -xcit_medium_24_p16_384_dist,49.333,50.667,79.853,20.147,84.40,384,1.000,bicubic,-47.947,-19.607,-17 -deit_base_distilled_patch16_384,49.320,50.680,79.253,20.747,87.63,384,1.000,bicubic,-47.640,-20.227,+10 -tf_efficientnet_b8,48.960,51.040,77.240,22.760,87.41,672,0.954,bicubic,-48.240,-22.260,-12 -dm_nfnet_f2,48.920,51.080,77.147,22.853,193.78,352,0.920,bicubic,-48.100,-22.293,0 -deit3_large_patch16_224,48.627,51.373,78.133,21.867,304.37,224,0.900,bicubic,-48.313,-21.207,+10 -tf_efficientnetv2_s_in21ft1k,48.507,51.493,77.893,22.107,21.46,384,1.000,bicubic,-48.213,-21.527,+28 -resnest269e,48.200,51.800,74.333,25.667,110.93,416,0.928,bicubic,-48.320,-25.017,+57 -xcit_large_24_p8_224_dist,48.120,51.880,79.107,20.893,188.93,224,1.000,bicubic,-48.950,-20.313,-5 -regnetz_e8,47.827,52.173,76.200,23.800,57.70,320,1.000,bicubic,-49.373,-23.300,-19 -resnetv2_50x3_bitm,47.280,52.720,77.333,22.667,217.32,448,1.000,bilinear,-49.430,-22.217,+26 -xcit_large_24_p8_224,47.160,52.840,74.400,25.600,188.93,224,1.000,bicubic,-49.250,-24.580,+62 -xcit_small_24_p16_384_dist,46.947,53.053,77.160,22.840,47.67,384,1.000,bicubic,-50.173,-22.290,-17 -tf_efficientnet_b8_ap,46.893,53.107,76.507,23.493,87.41,672,0.954,bicubic,-50.217,-23.153,-17 -efficientnetv2_rw_m,46.280,53.720,75.680,24.320,53.24,416,1.000,bicubic,-50.700,-23.860,-3 -swinv2_base_window16_256,46.267,53.733,75.187,24.813,87.92,256,0.900,bicubic,-50.493,-24.163,+17 -swsl_resnext101_32x16d,46.133,53.867,72.253,27.747,194.03,224,0.875,bilinear,-50.467,-27.277,+38 -volo_d2_224,46.080,53.920,75.253,24.747,58.68,224,0.960,bicubic,-50.920,-24.137,-9 -vit_small_patch16_384,45.920,54.080,76.707,23.293,22.20,384,1.000,bicubic,-50.780,-22.773,+22 -ecaresnet269d,45.880,54.120,75.133,24.867,102.09,352,1.000,bicubic,-51.200,-24.337,-18 -vit_small_r26_s32_384,45.733,54.267,76.053,23.947,36.47,384,1.000,bicubic,-50.947,-23.527,+25 -tf_efficientnetv2_m,45.533,54.467,74.533,25.467,54.14,480,1.000,bicubic,-51.607,-24.877,-28 -tf_efficientnet_b7_ap,45.373,54.627,74.213,25.787,66.35,600,0.949,bicubic,-51.827,-25.327,-33 -dm_nfnet_f1,45.320,54.680,74.107,25.893,132.63,320,0.910,bicubic,-51.590,-25.303,-3 -ig_resnext101_32x8d,45.293,54.707,70.853,29.147,88.79,224,0.875,bilinear,-51.017,-28.577,+61 -xcit_medium_24_p8_224_dist,45.213,54.787,76.720,23.280,84.32,224,1.000,bicubic,-51.707,-22.670,-8 -eca_nfnet_l2,44.960,55.040,75.893,24.107,56.72,384,1.000,bicubic,-52.130,-23.617,-28 -convnext_tiny_384_in22ft1k,44.840,55.160,76.680,23.320,28.59,384,1.000,bicubic,-52.040,-22.790,-4 -convnext_small_in22ft1k,44.813,55.187,77.373,22.627,50.22,224,0.875,bicubic,-52.177,-22.037,-18 -crossvit_18_dagger_408,44.293,55.707,73.827,26.173,44.61,408,1.000,bicubic,-52.237,-25.433,+34 -resnest200e,44.133,55.867,73.467,26.533,70.20,320,0.909,bicubic,-52.477,-25.883,+23 -cait_xs24_384,43.947,56.053,75.160,24.840,26.67,384,1.000,bicubic,-52.593,-24.260,+28 -seresnextaa101d_32x8d,43.947,56.053,73.400,26.600,93.59,288,1.000,bicubic,-53.003,-25.990,-18 -resnetrs200,43.747,56.253,72.813,27.187,93.21,320,1.000,bicubic,-52.953,-26.557,+8 -tresnet_xl_448,43.467,56.533,72.453,27.547,78.44,448,0.875,bilinear,-52.503,-26.677,+92 -xcit_small_12_p16_384_dist,43.267,56.733,73.880,26.120,26.25,384,1.000,bicubic,-53.663,-25.520,-19 -vit_base_patch16_224,43.253,56.747,72.893,27.107,86.57,224,0.900,bicubic,-53.627,-26.637,-14 -resnetrs420,43.147,56.853,70.467,29.533,191.89,416,1.000,bicubic,-53.763,-28.993,-18 -xcit_medium_24_p8_224,43.093,56.907,70.347,29.653,84.32,224,1.000,bicubic,-53.017,-28.543,+74 -tf_efficientnet_b7,42.960,57.040,73.133,26.867,66.35,600,0.949,bicubic,-54.050,-26.387,-32 -xcit_tiny_24_p8_384_dist,42.467,57.533,72.867,27.133,12.11,384,1.000,bicubic,-54.083,-26.453,+18 -swinv2_small_window16_256,42.293,57.707,72.920,27.080,49.73,256,0.900,bicubic,-54.167,-26.280,+26 -crossvit_15_dagger_408,41.907,58.093,72.067,27.933,28.50,408,1.000,bicubic,-54.483,-27.093,+34 -xcit_small_24_p8_224_dist,41.893,58.107,73.680,26.320,47.63,224,1.000,bicubic,-54.977,-25.800,-19 -xcit_small_24_p8_224,41.773,58.227,71.013,28.987,47.63,224,1.000,bicubic,-54.627,-28.137,+30 -vit_large_r50_s32_224,41.653,58.347,70.253,29.747,328.99,224,0.900,bicubic,-55.137,-29.097,-17 -swsl_resnext101_32x4d,41.560,58.440,71.747,28.253,44.18,224,0.875,bilinear,-54.870,-27.723,+25 -swinv2_base_window8_256,41.507,58.493,72.440,27.560,87.92,256,0.900,bicubic,-55.033,-26.830,+14 -convnext_large,41.373,58.627,73.293,26.707,197.77,224,0.875,bicubic,-55.387,-26.007,-15 -deit3_small_patch16_224_in21ft1k,41.240,58.760,71.933,28.067,22.06,224,1.000,bicubic,-55.420,-27.397,0 -seresnext101d_32x8d,41.133,58.867,70.880,29.120,93.59,288,1.000,bicubic,-55.577,-28.480,-11 -tf_efficientnet_b6_ap,40.813,59.187,71.627,28.373,43.04,528,0.942,bicubic,-56.267,-27.993,-51 -resmlp_big_24_224_in22ft1k,40.373,59.627,74.787,25.213,129.14,224,0.875,bicubic,-56.247,-24.723,-2 -deit3_small_patch16_384,40.307,59.693,70.333,29.667,22.21,384,1.000,bicubic,-55.893,-28.957,+43 -tresnet_l_448,40.213,59.787,69.907,30.093,55.99,448,0.875,bilinear,-55.647,-29.213,+84 -deit_base_patch16_384,40.173,59.827,70.760,29.240,86.86,384,1.000,bicubic,-55.977,-28.380,+51 -regnetz_d8_evos,40.093,59.907,72.187,27.813,23.46,320,0.950,bicubic,-56.517,-27.253,-4 -regnetz_040h,40.000,60.000,71.333,28.667,28.94,320,1.000,bicubic,-56.710,-28.167,-20 -resnetrs350,39.947,60.053,68.933,31.067,163.96,384,1.000,bicubic,-56.813,-30.437,-27 -regnetz_d8,39.933,60.067,71.640,28.360,23.37,320,1.000,bicubic,-56.687,-27.810,-8 -swin_s3_base_224,39.787,60.213,70.493,29.507,71.13,224,0.900,bicubic,-56.463,-28.647,+31 -seresnext101_32x8d,39.547,60.453,69.467,30.533,93.57,288,1.000,bicubic,-57.223,-29.883,-31 -deit3_base_patch16_224,39.200,60.800,71.027,28.973,86.59,224,0.900,bicubic,-57.100,-28.153,+25 -volo_d1_224,38.947,61.053,70.267,29.733,26.63,224,0.960,bicubic,-57.383,-29.043,+21 -resnetv2_101x1_bitm,38.933,61.067,71.040,28.960,44.54,448,1.000,bilinear,-57.167,-28.240,+49 -vit_large_patch32_384,38.933,61.067,68.947,31.053,306.63,384,1.000,bicubic,-56.897,-30.203,+75 -regnetz_040,38.733,61.267,70.413,29.587,27.12,320,1.000,bicubic,-57.977,-29.057,-28 -xcit_small_12_p8_224_dist,38.213,61.787,71.280,28.720,26.21,224,1.000,bicubic,-58.477,-28.110,-24 -resnet200d,38.147,61.853,68.627,31.373,64.69,320,1.000,bicubic,-58.573,-30.703,-33 -swinv2_small_window8_256,37.787,62.213,69.867,30.133,49.73,256,0.900,bicubic,-58.503,-29.343,+19 -xcit_large_24_p16_224_dist,37.680,62.320,71.587,28.413,189.10,224,1.000,bicubic,-59.120,-27.763,-43 -seresnet152d,37.653,62.347,69.480,30.520,66.84,320,1.000,bicubic,-59.117,-29.970,-42 -eca_nfnet_l1,37.533,62.467,70.960,29.040,41.41,320,1.000,bicubic,-59.167,-28.330,-30 -xcit_small_12_p8_224,37.533,62.467,68.213,31.787,26.21,224,1.000,bicubic,-58.577,-30.947,+38 -twins_svt_large,37.213,62.787,69.227,30.773,99.27,224,0.900,bicubic,-59.057,-29.943,+15 -regnetz_d32,37.133,62.867,70.480,29.520,27.58,320,0.950,bicubic,-59.467,-28.900,-20 -vit_base_patch32_384,37.107,62.893,69.787,30.213,88.30,384,1.000,bicubic,-59.383,-29.623,-11 -regnety_064,37.000,63.000,68.187,31.813,30.58,288,1.000,bicubic,-59.360,-31.043,+2 -swin_s3_small_224,36.867,63.133,68.213,31.787,49.74,224,0.900,bicubic,-59.363,-30.877,+14 -efficientnetv2_rw_s,36.813,63.187,68.320,31.680,23.94,384,1.000,bicubic,-59.727,-31.040,-19 -regnety_160,36.787,63.213,69.107,30.893,83.59,288,1.000,bicubic,-59.563,-30.223,0 -convnext_base,36.747,63.253,70.413,29.587,88.59,224,0.875,bicubic,-59.723,-28.817,-15 -resnext101_64x4d,36.720,63.280,66.653,33.347,83.46,288,1.000,bicubic,-59.360,-32.587,+34 -convnext_tiny_in22ft1k,36.267,63.733,69.560,30.440,28.59,224,0.875,bicubic,-59.953,-29.780,+10 -cait_xxs36_384,36.253,63.747,67.800,32.200,17.37,384,1.000,bicubic,-59.587,-31.290,+55 -jx_nest_base,36.067,63.933,66.760,33.240,67.72,224,0.875,bicubic,-60.183,-32.450,+5 -pit_b_distilled_224,35.627,64.373,69.120,30.880,74.79,224,0.900,bicubic,-61.043,-30.230,-38 -sequencer2d_l,35.560,64.440,67.333,32.667,54.30,224,0.875,bicubic,-60.580,-31.827,+20 -regnety_080,35.560,64.440,67.240,32.760,39.18,288,1.000,bicubic,-60.970,-32.080,-26 -tf_efficientnet_b3_ns,35.507,64.493,67.747,32.253,12.23,300,0.904,bicubic,-60.883,-31.603,-14 -cs3se_edgenet_x,35.427,64.573,67.280,32.720,50.72,320,1.000,bicubic,-61.013,-32.120,-20 -tf_efficientnet_b6,35.227,64.773,67.720,32.280,43.04,528,0.942,bicubic,-61.443,-31.650,-44 -resnetrs270,35.000,65.000,65.480,34.520,129.86,352,1.000,bicubic,-61.690,-33.870,-48 -tf_efficientnet_b5_ap,34.800,65.200,67.467,32.533,30.39,456,0.934,bicubic,-61.880,-31.993,-47 -xcit_tiny_12_p8_384_dist,34.653,65.347,66.280,33.720,6.71,384,1.000,bicubic,-61.427,-32.860,+23 -vit_base_patch16_224_miil,34.520,65.480,65.000,35.000,86.54,224,0.875,bilinear,-61.930,-34.300,-26 -xcit_medium_24_p16_224_dist,34.320,65.680,67.893,32.107,84.40,224,1.000,bicubic,-62.270,-31.377,-40 -resnet152d,34.307,65.693,65.907,34.093,60.21,320,1.000,bicubic,-62.053,-33.483,-19 -tresnet_m_448,34.107,65.893,64.507,35.493,31.39,448,0.875,bilinear,-60.883,-34.473,+147 -resmlp_big_24_distilled_224,34.067,65.933,69.600,30.400,129.14,224,0.875,bicubic,-62.383,-29.710,-31 -regnetv_064,33.987,66.013,67.867,32.133,30.58,288,1.000,bicubic,-62.423,-31.493,-28 -xcit_tiny_24_p16_384_dist,33.827,66.173,65.387,34.613,12.12,384,1.000,bicubic,-62.093,-33.833,+29 -twins_pcpvt_large,33.413,66.587,67.933,32.067,60.99,224,0.900,bicubic,-62.737,-31.247,+3 -twins_svt_base,33.173,66.827,65.773,34.227,56.07,224,0.900,bicubic,-62.987,-33.287,-1 -pit_b_224,33.160,66.840,62.347,37.653,73.76,224,0.900,bicubic,-62.480,-36.323,+55 -resnetv2_152x2_bit_teacher,33.053,66.947,64.253,35.747,236.34,224,0.875,bicubic,-63.047,-35.017,+9 -swsl_resnext50_32x4d,33.027,66.973,65.080,34.920,25.03,224,0.875,bilinear,-62.833,-34.170,+29 -mobilevitv2_200_384_in22ft1k,32.960,67.040,65.480,34.520,18.45,384,1.000,bicubic,-63.080,-33.600,+14 -swinv2_cr_small_ns_224,32.933,67.067,65.960,34.040,49.70,224,0.900,bicubic,-63.247,-33.180,-10 -xception65,32.760,67.240,62.973,37.027,39.92,299,0.940,bicubic,-63.590,-36.267,-27 -xcit_large_24_p16_224,32.760,67.240,62.120,37.880,189.10,224,1.000,bicubic,-62.660,-36.500,+81 -ssl_resnext101_32x16d,32.653,67.347,64.040,35.960,194.03,224,0.875,bilinear,-63.137,-35.140,+33 -swin_small_patch4_window7_224,32.587,67.413,65.453,34.547,49.61,224,0.900,bicubic,-63.323,-33.567,+19 -mobilevitv2_175_384_in22ft1k,32.453,67.547,64.720,35.280,14.25,384,1.000,bicubic,-63.727,-34.410,-14 -jx_nest_small,32.280,67.720,63.733,36.267,38.35,224,0.875,bicubic,-63.680,-35.297,+13 -tf_efficientnet_b5,31.853,68.147,65.307,34.693,30.39,456,0.934,bicubic,-64.497,-34.003,-34 -swinv2_tiny_window16_256,31.720,68.280,65.587,34.413,28.35,256,0.900,bicubic,-64.220,-33.553,+13 -swinv2_cr_small_224,31.680,68.320,62.507,37.493,49.70,224,0.900,bicubic,-64.380,-36.363,+1 -regnetz_c16_evos,31.493,68.507,66.280,33.720,13.49,320,0.950,bicubic,-64.637,-33.080,-10 -resnest101e,31.400,68.600,64.347,35.653,48.28,256,0.875,bilinear,-64.460,-34.863,+17 -crossvit_base_240,31.347,68.653,61.293,38.707,105.03,240,0.875,bicubic,-64.173,-37.527,+52 -regnetv_040,31.333,68.667,64.667,35.333,20.64,288,1.000,bicubic,-64.857,-34.663,-24 -convnext_small,31.320,68.680,66.040,33.960,50.22,224,0.875,bicubic,-64.850,-33.060,-22 -cait_s24_224,31.200,68.800,64.560,35.440,46.92,224,1.000,bicubic,-65.180,-34.590,-46 -efficientnet_b4,30.840,69.160,64.600,35.400,19.34,384,1.000,bicubic,-65.310,-34.590,-20 -regnety_040,30.613,69.387,63.827,36.173,20.65,288,1.000,bicubic,-65.397,-35.353,-2 -sequencer2d_m,30.600,69.400,62.933,37.067,38.31,224,0.875,bicubic,-65.210,-36.177,+16 -crossvit_18_240,30.600,69.400,61.947,38.053,43.27,240,0.875,bicubic,-64.840,-36.843,+59 -dm_nfnet_f0,30.547,69.453,62.867,37.133,71.49,256,0.900,bicubic,-65.603,-36.383,-25 -xcit_small_24_p16_224_dist,30.520,69.480,64.760,35.240,47.67,224,1.000,bicubic,-65.690,-34.450,-35 -crossvit_18_dagger_240,30.507,69.493,61.840,38.160,44.27,240,0.875,bicubic,-65.063,-37.220,+33 -xcit_medium_24_p16_224,30.187,69.813,59.333,40.667,84.40,224,1.000,bicubic,-65.343,-39.407,+40 -cait_xxs24_384,30.040,69.960,63.920,36.080,12.03,384,1.000,bicubic,-65.240,-35.040,+72 -twins_pcpvt_base,29.973,70.027,64.600,35.400,43.83,224,0.900,bicubic,-65.817,-34.530,+12 -swsl_resnet50,29.840,70.160,63.827,36.173,25.56,224,0.875,bilinear,-65.570,-35.473,+58 -mobilevitv2_150_384_in22ft1k,29.840,70.160,62.213,37.787,10.59,384,1.000,bicubic,-65.860,-36.927,+18 -vit_relpos_base_patch16_clsgap_224,29.720,70.280,62.867,37.133,86.43,224,0.900,bicubic,-66.040,-36.173,+12 -deit_base_distilled_patch16_224,29.600,70.400,64.440,35.560,87.34,224,0.900,bicubic,-66.490,-34.750,-22 -cs3sedarknet_x,29.573,70.427,61.493,38.507,35.40,288,1.000,bicubic,-66.467,-37.617,-18 -convit_base,29.507,70.493,61.760,38.240,86.54,224,0.875,bicubic,-66.043,-37.110,+27 -vit_relpos_medium_patch16_cls_224,29.320,70.680,60.653,39.347,38.76,224,0.900,bicubic,-66.160,-38.297,+40 -ssl_resnext101_32x8d,29.120,70.880,61.013,38.987,88.79,224,0.875,bilinear,-66.370,-38.107,+38 -tf_efficientnetv2_s,29.053,70.947,61.227,38.773,21.46,384,1.000,bicubic,-67.287,-37.973,-59 -resnet101d,28.987,71.013,62.040,37.960,44.57,320,1.000,bicubic,-67.313,-37.190,-57 -xception65p,28.987,71.013,59.920,40.080,39.82,299,0.940,bicubic,-67.223,-39.260,-49 -resnetrs152,28.920,71.080,60.507,39.493,86.62,320,1.000,bicubic,-67.660,-38.733,-88 -regnetz_c16,28.907,71.093,63.347,36.653,13.46,320,0.940,bicubic,-66.893,-35.753,-3 -vit_relpos_medium_patch16_224,28.840,71.160,62.013,37.987,38.75,224,0.900,bicubic,-66.620,-36.947,+35 -xcit_tiny_24_p8_224_dist,28.733,71.267,61.373,38.627,12.11,224,1.000,bicubic,-67.077,-37.837,-7 -xcit_tiny_24_p8_224,28.707,71.293,60.440,39.560,12.11,224,1.000,bicubic,-66.963,-38.610,+7 -crossvit_15_dagger_240,28.533,71.467,60.333,39.667,28.21,240,0.875,bicubic,-67.157,-38.497,+4 -xcit_small_24_p16_224,28.347,71.653,58.707,41.293,47.67,224,1.000,bicubic,-67.183,-40.063,+19 -cs3edgenet_x,28.333,71.667,60.813,39.187,47.82,288,1.000,bicubic,-67.717,-38.327,-33 -coat_lite_small,27.547,72.453,58.560,41.440,19.84,224,0.900,bicubic,-67.993,-40.300,+14 -deit_base_patch16_224,27.440,72.560,58.893,41.107,86.57,224,0.900,bicubic,-68.000,-39.947,+31 -vit_relpos_base_patch16_224,27.347,72.653,61.147,38.853,86.43,224,0.900,bicubic,-68.223,-37.883,+9 -resnetv2_50x1_bitm,27.307,72.693,62.853,37.147,25.55,448,1.000,bilinear,-67.703,-36.207,+84 -xcit_small_12_p16_224_dist,27.120,72.880,59.800,40.200,26.25,224,1.000,bicubic,-68.900,-39.330,-35 -vit_small_patch16_224,27.013,72.987,59.187,40.813,22.05,224,0.900,bicubic,-68.357,-39.963,+38 -sequencer2d_s,26.813,73.187,60.613,39.387,27.65,224,0.875,bicubic,-69.177,-38.437,-35 -mobilevitv2_200_in22ft1k,26.680,73.320,59.373,40.627,18.45,256,0.888,bicubic,-68.480,-39.577,+55 -swin_s3_tiny_224,26.520,73.480,60.320,39.680,28.33,224,0.900,bicubic,-68.640,-38.620,+56 -swinv2_tiny_window8_256,26.413,73.587,60.560,39.440,28.35,256,0.900,bicubic,-69.087,-38.560,+16 -tf_efficientnet_b4,26.320,73.680,60.107,39.893,19.34,380,0.922,bicubic,-69.580,-39.063,-31 -tf_efficientnet_b4_ap,26.240,73.760,60.213,39.787,19.34,380,0.922,bicubic,-69.920,-39.067,-63 -nfnet_l0,26.213,73.787,61.720,38.280,35.07,288,1.000,bicubic,-69.907,-37.520,-55 -deit3_small_patch16_224,26.213,73.787,54.413,45.587,22.06,224,0.900,bicubic,-68.787,-44.047,+78 -regnety_032,26.200,73.800,60.973,39.027,19.44,288,1.000,bicubic,-69.770,-38.217,-42 -fbnetv3_g,26.120,73.880,61.067,38.933,16.62,288,0.950,bilinear,-69.390,-37.923,+8 -ecaresnet50t,26.120,73.880,59.987,40.013,25.57,320,0.950,bicubic,-69.390,-39.133,+4 -ecaresnet101d,26.040,73.960,59.000,41.000,44.57,224,0.875,bicubic,-69.490,-40.130,-2 -mobilevitv2_175_in22ft1k,26.040,73.960,58.453,41.547,14.25,256,0.888,bicubic,-69.190,-40.337,+37 -visformer_small,25.840,74.160,58.907,41.093,40.22,224,0.900,bicubic,-69.630,-39.993,+9 -halo2botnet50ts_256,25.587,74.413,56.853,43.147,22.64,256,0.950,bicubic,-69.833,-42.157,+16 -coat_mini,25.493,74.507,57.707,42.293,10.34,224,0.900,bicubic,-69.477,-41.073,+72 -vit_relpos_medium_patch16_rpn_224,25.453,74.547,58.627,41.373,38.73,224,0.900,bicubic,-70.057,-40.453,-1 -crossvit_15_240,25.453,74.547,57.547,42.453,27.53,240,0.875,bicubic,-69.697,-41.383,+43 -vit_srelpos_medium_patch16_224,25.387,74.613,58.480,41.520,38.74,224,0.900,bicubic,-69.843,-40.510,+30 -xcit_small_12_p16_224,25.173,74.827,56.080,43.920,26.25,224,1.000,bicubic,-70.247,-42.760,+12 -resnetv2_50x1_bit_distilled,25.133,74.867,59.653,40.347,25.55,224,0.875,bicubic,-70.987,-39.627,-70 -convit_small,25.107,74.893,57.280,42.720,27.78,224,0.875,bicubic,-70.093,-41.620,+31 -vit_base_patch16_rpn_224,25.080,74.920,58.653,41.347,86.54,224,0.900,bicubic,-70.300,-40.277,+13 -gc_efficientnetv2_rw_t,25.053,74.947,57.720,42.280,13.68,288,1.000,bicubic,-70.687,-41.300,-33 -eca_nfnet_l0,24.813,75.187,60.093,39.907,24.14,288,1.000,bicubic,-71.137,-39.117,-55 -xception41p,24.800,75.200,55.173,44.827,26.91,299,0.940,bicubic,-70.710,-43.737,-7 -tnt_s_patch16_224,24.720,75.280,58.187,41.813,23.76,224,0.900,bicubic,-70.320,-40.643,+52 -resnetv2_50d_evos,24.467,75.533,56.387,43.613,25.59,288,0.950,bicubic,-71.143,-42.643,-25 -xcit_tiny_12_p16_384_dist,24.440,75.560,57.067,42.933,6.72,384,1.000,bicubic,-70.690,-41.953,+36 -cs3darknet_x,24.360,75.640,57.813,42.187,35.05,288,1.000,bicubic,-71.500,-41.367,-51 -efficientnetv2_rw_t,24.280,75.720,57.360,42.640,13.65,288,1.000,bicubic,-71.320,-41.710,-27 -convnext_tiny,24.267,75.733,59.333,40.667,28.59,224,0.875,bicubic,-71.283,-39.667,-25 -ssl_resnext101_32x4d,24.173,75.827,57.413,42.587,44.18,224,0.875,bilinear,-71.267,-41.717,-8 -swinv2_cr_tiny_ns_224,24.120,75.880,58.227,41.773,28.33,224,0.900,bicubic,-71.250,-40.713,+5 -twins_svt_small,24.107,75.893,57.133,42.867,24.06,224,0.900,bicubic,-71.093,-41.747,+19 -vit_small_r26_s32_224,24.080,75.920,56.173,43.827,36.43,224,0.900,bicubic,-71.560,-43.017,-37 -mobilevitv2_150_in22ft1k,24.053,75.947,55.987,44.013,10.59,256,0.888,bicubic,-71.087,-42.873,+26 -vit_relpos_small_patch16_224,24.027,75.973,58.200,41.800,21.98,224,0.900,bicubic,-71.133,-40.750,+20 -poolformer_m48,24.027,75.973,57.280,42.720,73.47,224,0.950,bicubic,-71.613,-41.660,-39 -tf_efficientnet_b2_ns,24.013,75.987,57.280,42.720,9.11,260,0.890,bicubic,-71.747,-41.840,-51 -cs3sedarknet_l,23.960,76.040,58.707,41.293,21.91,288,0.950,bicubic,-71.350,-40.423,+2 -resnetv2_50d_gn,23.920,76.080,56.307,43.693,25.57,288,0.950,bicubic,-71.510,-42.733,-13 -vit_small_patch32_384,23.760,76.240,57.293,42.707,22.92,384,1.000,bicubic,-71.290,-41.697,+34 -convnext_nano,23.640,76.360,55.800,44.200,15.59,288,1.000,bicubic,-71.720,-43.050,-3 -lamhalobotnet50ts_256,23.573,76.427,55.333,44.667,22.57,256,0.950,bicubic,-71.577,-43.547,+17 -resnet152,23.560,76.440,53.680,46.320,60.19,224,0.950,bicubic,-72.340,-45.400,-71 -nasnetalarge,23.467,76.533,55.013,44.987,88.75,331,0.911,bicubic,-72.213,-43.917,-50 -crossvit_small_240,23.440,76.560,56.813,43.187,26.86,240,0.875,bicubic,-71.390,-42.207,+53 -levit_384,23.427,76.573,56.373,43.627,39.13,224,0.900,bicubic,-72.103,-42.677,-38 -pnasnet5large,23.320,76.680,53.640,46.360,86.06,331,0.911,bicubic,-72.390,-45.280,-56 -convnext_tiny_hnf,23.227,76.773,55.200,44.800,28.59,224,0.950,bicubic,-72.283,-43.820,-34 -efficientnet_b3,23.213,76.787,55.960,44.040,12.23,320,1.000,bicubic,-72.497,-43.080,-60 -jx_nest_tiny,23.173,76.827,56.213,43.787,17.06,224,0.875,bicubic,-72.067,-42.767,-5 -resnet61q,22.987,77.013,55.747,44.253,36.85,288,1.000,bicubic,-72.793,-43.243,-66 -halonet50ts,22.920,77.080,54.000,46.000,22.73,256,0.940,bicubic,-72.220,-44.770,+9 -vit_srelpos_small_patch16_224,22.907,77.093,55.733,44.267,21.97,224,0.900,bicubic,-72.123,-43.227,+24 -resmlp_big_24_224,22.853,77.147,54.293,45.707,129.14,224,0.875,bicubic,-71.807,-44.187,+59 -twins_pcpvt_small,22.707,77.293,56.853,43.147,24.11,224,0.900,bicubic,-72.503,-42.027,-6 -poolformer_m36,22.507,77.493,55.293,44.707,56.17,224,0.950,bicubic,-72.873,-43.557,-22 -vit_base_patch32_224,22.400,77.600,53.987,46.013,88.22,224,0.900,bicubic,-72.600,-45.043,+25 -pit_s_distilled_224,22.360,77.640,57.093,42.907,24.04,224,0.900,bicubic,-72.880,-41.957,-14 -xcit_tiny_12_p8_224_dist,22.067,77.933,54.280,45.720,6.71,224,1.000,bicubic,-73.033,-44.630,+7 -tresnet_m,21.667,78.333,53.840,46.160,31.39,224,0.875,bilinear,-74.043,-45.190,-70 -convmixer_1536_20,21.213,78.787,55.520,44.480,51.63,224,0.960,bicubic,-73.857,-43.510,+10 -swin_tiny_patch4_window7_224,21.147,78.853,55.973,44.027,28.29,224,0.900,bicubic,-73.983,-42.877,+1 -pit_s_224,21.093,78.907,53.587,46.413,23.46,224,0.900,bicubic,-73.497,-45.113,+56 -xcit_tiny_12_p8_224,21.027,78.973,52.467,47.533,6.71,224,1.000,bicubic,-73.663,-46.363,+45 -resnet51q,20.960,79.040,55.693,44.307,35.70,288,1.000,bilinear,-74.910,-43.437,-92 -regnetz_b16,20.933,79.067,53.853,46.147,9.72,288,0.940,bicubic,-74.127,-45.197,+7 -resnetrs101,20.867,79.133,52.813,47.187,63.62,288,0.940,bicubic,-74.563,-46.217,-40 -sebotnet33ts_256,20.733,79.267,48.787,51.213,13.70,256,0.940,bicubic,-73.847,-49.713,+52 -deit_small_distilled_patch16_224,20.707,79.293,55.147,44.853,22.44,224,0.900,bicubic,-74.003,-43.883,+38 -resnest50d_4s2x40d,20.373,79.627,52.827,47.173,30.42,224,0.875,bicubic,-74.587,-46.243,+16 -resnetaa50,20.093,79.907,52.000,48.000,25.56,288,1.000,bicubic,-75.117,-46.930,-23 -ssl_resnext50_32x4d,20.013,79.987,53.627,46.373,25.03,224,0.875,bilinear,-74.857,-45.263,+22 -haloregnetz_b,19.987,80.013,50.013,49.987,11.68,224,0.940,bicubic,-74.713,-48.647,+35 -resnetv2_101,19.960,80.040,49.227,50.773,44.54,224,0.950,bicubic,-75.660,-49.763,-75 -xcit_nano_12_p8_384_dist,19.800,80.200,50.573,49.427,3.05,384,1.000,bicubic,-73.720,-47.967,+151 -tresnet_xl,19.640,80.360,53.133,46.867,78.44,224,0.875,bilinear,-75.800,-45.917,-53 -gluon_senet154,19.333,80.667,47.573,52.427,115.09,224,0.875,bicubic,-75.587,-51.187,+13 -resnet101,19.320,80.680,49.587,50.413,44.55,224,0.950,bicubic,-76.040,-49.273,-41 -rexnet_200,19.227,80.773,52.720,47.280,16.37,224,0.875,bicubic,-75.723,-46.290,+8 -levit_256,19.187,80.813,50.093,49.907,18.89,224,0.900,bicubic,-75.823,-48.797,+1 -repvgg_b3,19.133,80.867,50.280,49.720,123.09,224,0.875,bilinear,-75.437,-48.500,+40 -lambda_resnet50ts,19.133,80.867,49.307,50.693,21.54,256,0.950,bicubic,-75.647,-49.153,+20 -mixer_b16_224_miil,19.040,80.960,51.227,48.773,59.88,224,0.875,bilinear,-76.260,-47.653,-42 -legacy_senet154,19.027,80.973,47.960,52.040,115.09,224,0.875,bilinear,-76.043,-50.870,-12 -gluon_seresnext101_64x4d,18.933,81.067,49.160,50.840,88.23,224,0.875,bicubic,-75.987,-49.670,+3 -deit_small_patch16_224,18.920,81.080,51.400,48.600,22.05,224,0.900,bicubic,-75.470,-47.290,+60 -mobilevitv2_200,18.920,81.080,50.560,49.440,18.45,256,0.888,bicubic,-75.910,-48.150,+13 -edgenext_small,18.667,81.333,53.600,46.400,5.59,320,1.000,bicubic,-76.743,-45.500,-56 -tf_efficientnet_b1_ns,18.667,81.333,51.693,48.307,7.79,240,0.882,bicubic,-76.513,-47.417,-37 -poolformer_s36,18.400,81.600,51.867,48.133,30.86,224,0.900,bicubic,-76.690,-47.043,-23 -seresnext50_32x4d,18.360,81.640,50.960,49.040,27.56,224,0.875,bicubic,-76.670,-47.920,-13 -cs3darknet_l,18.307,81.693,51.867,48.133,21.16,288,0.950,bicubic,-76.813,-47.113,-28 -ecaresnet50d,18.267,81.733,51.840,48.160,25.58,224,0.875,bicubic,-76.353,-47.050,+23 -cait_xxs36_224,18.267,81.733,49.427,50.573,17.30,224,1.000,bicubic,-75.993,-49.293,+63 -sehalonet33ts,18.227,81.773,47.787,52.213,13.69,256,0.940,bicubic,-76.553,-50.783,+6 -tf_efficientnet_lite4,18.133,81.867,50.720,49.280,13.01,380,0.920,bilinear,-76.747,-48.300,-4 -vit_tiny_patch16_384,18.013,81.987,50.333,49.667,5.79,384,1.000,bicubic,-75.637,-48.267,+120 -mobilevitv2_175,17.773,82.227,49.760,50.240,14.25,256,0.888,bicubic,-77.117,-49.100,-7 -resnest50d_1s4x24d,17.693,82.307,49.800,50.200,25.68,224,0.875,bicubic,-77.057,-49.180,+5 -resnest50d,17.360,82.640,50.733,49.267,27.48,224,0.875,bilinear,-77.490,-48.147,-4 -gluon_seresnext101_32x4d,17.360,82.640,46.373,53.627,48.96,224,0.875,bicubic,-77.560,-52.437,-12 -efficientnet_el,17.320,82.680,50.000,50.000,10.59,300,0.904,bicubic,-77.800,-48.980,-37 -inception_v4,17.280,82.720,45.933,54.067,42.68,299,0.875,bicubic,-77.100,-52.647,+44 -tf_efficientnet_b3_ap,17.200,82.800,49.667,50.333,12.23,300,0.904,bicubic,-78.120,-49.233,-65 -xcit_tiny_24_p16_224_dist,17.187,82.813,47.480,52.520,12.12,224,1.000,bicubic,-77.343,-51.300,+25 -tf_efficientnet_b3,17.000,83.000,49.267,50.733,12.23,300,0.904,bicubic,-78.010,-49.643,-26 -xception71,17.000,83.000,45.533,54.467,42.34,299,0.903,bicubic,-77.280,-53.107,+49 -cs3darknet_focus_l,16.973,83.027,50.480,49.520,21.15,288,0.950,bicubic,-78.197,-48.480,-55 -resmlp_36_distilled_224,16.880,83.120,51.480,48.520,44.69,224,0.875,bicubic,-78.000,-47.360,-16 -gluon_resnext101_64x4d,16.880,83.120,44.173,55.827,83.46,224,0.875,bicubic,-77.780,-54.477,+2 -tf_efficientnetv2_b3,16.667,83.333,48.680,51.320,14.36,300,0.904,bicubic,-78.493,-50.140,-54 -gluon_resnet152_v1d,16.600,83.400,44.293,55.707,60.21,224,0.875,bicubic,-78.140,-54.447,-7 -tresnet_l,16.573,83.427,49.947,50.053,55.99,224,0.875,bilinear,-78.717,-49.063,-71 -inception_resnet_v2,16.573,83.427,44.933,55.067,55.84,299,0.897,bicubic,-77.957,-53.847,+15 -gluon_resnet152_v1s,16.560,83.440,44.507,55.493,60.32,224,0.875,bicubic,-78.480,-54.423,-40 -gmlp_s16_224,16.547,83.453,45.120,54.880,19.42,224,0.875,bicubic,-77.613,-53.380,+57 -mobilevitv2_150,16.480,83.520,48.453,51.547,10.59,256,0.888,bicubic,-78.070,-50.257,+8 -resmlp_24_distilled_224,16.440,83.560,50.373,49.627,30.02,224,0.875,bicubic,-78.020,-48.397,+21 -gluon_xception65,16.440,83.560,46.040,53.960,39.92,299,0.903,bicubic,-77.820,-52.530,+40 -gcresnet50t,16.373,83.627,48.227,51.773,25.90,256,0.900,bicubic,-78.477,-50.563,-23 -gernet_l,16.347,83.653,47.213,52.787,31.08,256,0.875,bilinear,-78.743,-51.687,-53 -wide_resnet50_2,16.307,83.693,48.400,51.600,68.88,224,0.875,bicubic,-78.773,-50.570,-52 -xcit_tiny_24_p16_224,16.280,83.720,45.973,54.027,12.12,224,1.000,bicubic,-77.790,-52.557,+57 -gcresnext50ts,16.240,83.760,46.533,53.467,15.67,256,0.900,bicubic,-78.250,-52.137,+9 -repvgg_b3g4,16.227,83.773,47.640,52.360,83.83,224,0.875,bilinear,-78.293,-51.330,+6 -ens_adv_inception_resnet_v2,16.213,83.787,43.613,56.387,55.84,299,0.897,bicubic,-77.947,-54.987,+46 -edgenext_small_rw,15.960,84.040,49.667,50.333,7.83,320,1.000,bicubic,-78.700,-49.123,-16 -ssl_resnet50,15.920,84.080,49.400,50.600,25.56,224,0.875,bilinear,-78.520,-49.520,+13 -regnety_320,15.653,84.347,44.827,55.173,145.05,224,0.875,bicubic,-78.887,-54.033,-3 -ecaresnet101d_pruned,15.600,84.400,48.053,51.947,24.88,224,0.875,bicubic,-79.480,-50.927,-61 -convmixer_768_32,15.533,84.467,47.960,52.040,21.11,224,0.960,bicubic,-78.967,-50.890,+1 -ecaresnet26t,15.467,84.533,47.907,52.093,16.01,320,0.950,bicubic,-78.853,-50.813,+21 -coat_tiny,15.387,84.613,45.640,54.360,5.50,224,0.900,bicubic,-78.203,-52.780,+89 -skresnext50_32x4d,15.347,84.653,44.507,55.493,27.48,224,0.875,bicubic,-78.903,-53.953,+26 -vit_relpos_base_patch32_plus_rpn_256,15.240,84.760,42.613,57.387,119.42,256,0.900,bicubic,-78.490,-55.457,+76 -cait_xxs24_224,15.160,84.840,44.947,55.053,11.96,224,1.000,bicubic,-78.440,-53.493,+85 -ecaresnetlight,15.147,84.853,45.800,54.200,30.16,224,0.875,bicubic,-79.623,-53.000,-34 -levit_192,14.893,85.107,44.920,55.080,10.95,224,0.900,bicubic,-79.287,-53.620,+32 -rexnet_150,14.707,85.293,46.920,53.080,9.73,224,0.875,bicubic,-79.773,-51.870,-3 -darknet53,14.680,85.320,47.120,52.880,41.61,288,1.000,bicubic,-79.950,-51.770,-26 -darknetaa53,14.573,85.427,45.453,54.547,36.02,288,1.000,bilinear,-79.897,-53.307,-3 -resnext50_32x4d,14.533,85.467,44.187,55.813,25.03,224,0.950,bicubic,-80.007,-54.423,-14 -coat_lite_mini,14.493,85.507,44.547,55.453,11.01,224,0.900,bicubic,-79.557,-54.013,+39 -efficientnet_el_pruned,14.480,85.520,46.080,53.920,10.59,300,0.904,bicubic,-79.920,-52.660,0 -efficientnet_b2,14.440,85.560,46.067,53.933,9.11,288,1.000,bicubic,-80.170,-52.643,-27 -seresnet33ts,14.427,85.573,46.133,53.867,19.78,256,0.900,bicubic,-80.433,-52.657,-51 -poolformer_s24,14.267,85.733,47.227,52.773,21.39,224,0.900,bicubic,-80.283,-51.653,-25 -legacy_seresnext101_32x4d,14.160,85.840,43.013,56.987,48.96,224,0.875,bilinear,-80.210,-55.637,0 -seresnet50,14.147,85.853,45.507,54.493,28.09,224,0.875,bicubic,-80.403,-53.243,-25 -fbnetv3_d,14.107,85.893,46.480,53.520,10.31,256,0.950,bilinear,-79.823,-52.260,+43 -eca_resnet33ts,14.080,85.920,47.360,52.640,19.68,256,0.900,bicubic,-80.110,-51.400,+16 -gernet_m,14.053,85.947,46.013,53.987,21.14,224,0.875,bilinear,-80.567,-52.847,-35 -mobilevitv2_125,14.000,86.000,44.987,55.013,7.48,256,0.888,bicubic,-79.970,-53.573,+36 -gluon_resnext101_32x4d,13.867,86.133,41.653,58.347,44.18,224,0.875,bicubic,-80.673,-56.977,-27 -gcresnet33ts,13.760,86.240,45.053,54.947,19.88,256,0.900,bicubic,-80.710,-53.717,-18 -gluon_seresnext50_32x4d,13.613,86.387,43.720,56.280,27.56,224,0.875,bicubic,-80.717,-54.890,-4 -resmlp_36_224,13.520,86.480,46.693,53.307,44.69,224,0.875,bicubic,-80.680,-51.967,+9 -resnet50_gn,13.467,86.533,42.747,57.253,25.56,224,0.940,bicubic,-80.883,-55.963,-8 -repvgg_b2g4,13.427,86.573,43.827,56.173,61.76,224,0.875,bilinear,-80.413,-54.763,+41 -eca_botnext26ts_256,13.373,86.627,42.173,57.827,10.59,256,0.950,bicubic,-80.407,-56.327,+45 -ese_vovnet39b,13.333,86.667,43.813,56.187,24.57,224,0.875,bicubic,-80.757,-54.847,+18 -regnetx_320,13.320,86.680,40.720,59.280,107.81,224,0.875,bicubic,-81.140,-58.020,-22 -pit_xs_distilled_224,13.267,86.733,44.560,55.440,11.00,224,0.900,bicubic,-80.553,-54.110,+39 -efficientnet_b3_pruned,13.173,86.827,45.227,54.773,9.86,300,0.904,bicubic,-81.457,-53.533,-49 -gluon_resnet101_v1d,13.173,86.827,41.480,58.520,44.57,224,0.875,bicubic,-81.057,-57.070,-2 -mixnet_xl,13.120,86.880,43.240,56.760,11.90,224,0.875,bicubic,-81.070,-55.100,+3 -cspresnext50,13.053,86.947,45.000,55.000,20.57,256,0.887,bilinear,-81.777,-53.770,-68 -nf_regnet_b1,12.947,87.053,44.387,55.613,10.22,288,0.900,bicubic,-81.163,-54.243,+10 -eca_halonext26ts,12.933,87.067,42.800,57.200,10.76,256,0.940,bicubic,-81.107,-55.690,+14 -mobilevit_s,12.880,87.120,40.787,59.213,5.58,256,0.900,bicubic,-80.300,-57.653,+86 -pit_xs_224,12.813,87.187,42.827,57.173,10.62,224,0.900,bicubic,-80.307,-55.503,+91 -gluon_inception_v3,12.640,87.360,40.493,59.507,23.83,299,0.875,bicubic,-80.810,-58.077,+62 -crossvit_9_dagger_240,12.573,87.427,41.787,58.213,8.78,240,0.875,bicubic,-80.317,-56.463,+101 -coat_lite_tiny,12.547,87.453,41.133,58.867,5.72,224,0.900,bicubic,-80.683,-57.127,+80 -resmlp_24_224,12.507,87.493,43.427,56.573,30.02,224,0.875,bicubic,-81.513,-54.903,+10 -regnety_120,12.400,87.600,42.213,57.787,51.82,224,0.875,bicubic,-82.080,-56.597,-41 -efficientnet_em,12.360,87.640,43.853,56.147,6.90,240,0.882,bicubic,-81.480,-54.957,+22 -cspdarknet53,12.027,87.973,43.280,56.720,27.64,256,0.887,bilinear,-82.633,-55.520,-68 -hrnet_w64,12.000,88.000,40.813,59.187,128.06,224,0.875,bilinear,-82.020,-57.807,+5 -xcit_tiny_12_p16_224_dist,11.973,88.027,40.107,59.893,6.72,224,1.000,bicubic,-81.427,-58.373,+59 -gluon_resnet101_v1s,11.880,88.120,40.973,59.027,44.67,224,0.875,bicubic,-82.840,-57.847,-75 -gmixer_24_224,11.867,88.133,37.787,62.213,24.72,224,0.875,bicubic,-80.963,-60.093,+97 -nf_resnet50,11.760,88.240,45.933,54.067,25.56,288,0.940,bicubic,-82.790,-52.857,-60 -fbnetv3_b,11.733,88.267,44.387,55.613,8.60,256,0.950,bilinear,-82.237,-54.243,+4 -resnet50d,11.720,88.280,42.467,57.533,25.58,224,0.875,bicubic,-82.540,-56.253,-27 -dpn92,11.613,88.387,40.293,59.707,37.67,224,0.875,bicubic,-82.617,-58.437,-24 -dla102x2,11.600,88.400,41.267,58.733,41.28,224,0.875,bilinear,-82.370,-57.233,+3 -xception41,11.600,88.400,39.147,60.853,26.97,299,0.903,bicubic,-81.830,-59.283,+48 -botnet26t_256,11.587,88.413,40.133,59.867,12.49,256,0.950,bicubic,-81.923,-58.167,+39 -vit_small_patch32_224,11.480,88.520,39.547,60.453,22.88,224,0.900,bicubic,-80.550,-58.683,+135 -levit_128,11.387,88.613,40.240,59.760,9.21,224,0.900,bicubic,-81.943,-58.140,+53 -tf_efficientnet_el,11.373,88.627,42.040,57.960,10.59,300,0.904,bicubic,-83.027,-56.670,-47 -lambda_resnet26t,11.373,88.627,40.173,59.827,10.96,256,0.940,bicubic,-82.457,-58.477,+8 -efficientnet_b2_pruned,11.347,88.653,42.013,57.987,8.31,260,0.890,bicubic,-82.803,-56.517,-20 -xcit_nano_12_p16_384_dist,11.227,88.773,39.853,60.147,3.05,384,1.000,bicubic,-80.603,-58.167,+141 -halonet26t,11.120,88.880,38.813,61.187,12.48,256,0.950,bicubic,-82.890,-59.687,-10 -hrnet_w48,11.093,88.907,40.293,59.707,77.47,224,0.875,bilinear,-82.827,-58.317,-3 -vit_tiny_r_s16_p8_384,11.093,88.907,39.973,60.027,6.36,384,1.000,bicubic,-80.947,-58.317,+126 -gluon_resnet152_v1c,11.093,88.907,37.120,62.880,60.21,224,0.875,bicubic,-83.067,-61.520,-28 -dpn107,11.053,88.947,38.640,61.360,86.92,224,0.875,bicubic,-83.257,-59.830,-46 -ecaresnet50d_pruned,11.013,88.987,41.960,58.040,19.94,224,0.875,bicubic,-83.207,-56.770,-37 -mobilevitv2_100,11.013,88.987,40.653,59.347,4.90,256,0.888,bicubic,-82.287,-57.627,+48 -tf_efficientnetv2_b2,11.000,89.000,39.760,60.240,10.10,260,0.890,bicubic,-83.420,-58.810,-60 -adv_inception_v3,11.000,89.000,36.720,63.280,23.83,299,0.875,bicubic,-81.880,-61.420,+71 -tf_efficientnet_b0_ns,10.973,89.027,40.067,59.933,5.29,224,0.875,bicubic,-82.657,-58.573,+13 -xcit_tiny_12_p16_224,10.973,89.027,37.027,62.973,6.72,224,1.000,bicubic,-81.527,-61.213,+97 -resnetv2_50,10.960,89.040,39.333,60.667,25.55,224,0.950,bicubic,-83.470,-59.397,-65 -tf_inception_v3,10.813,89.187,36.840,63.160,23.83,299,0.875,bicubic,-82.507,-61.190,+39 -xcit_nano_12_p8_224_dist,10.800,89.200,38.120,61.880,3.05,224,1.000,bicubic,-81.300,-60.030,+113 -dpn131,10.720,89.280,37.187,62.813,79.25,224,0.875,bicubic,-83.270,-61.533,-23 -tf_efficientnet_b2_ap,10.533,89.467,40.107,59.893,9.11,260,0.890,bicubic,-83.957,-58.513,-77 -resnext50d_32x4d,10.413,89.587,39.733,60.267,25.05,224,0.875,bicubic,-83.777,-58.827,-44 -rexnet_130,10.400,89.600,41.547,58.453,7.56,224,0.875,bicubic,-83.500,-56.853,-17 -hrnet_w44,10.307,89.693,39.480,60.520,67.06,224,0.875,bilinear,-83.243,-59.220,+10 -xcit_nano_12_p8_224,10.293,89.707,37.000,63.000,3.05,224,1.000,bicubic,-80.727,-60.790,+146 -lambda_resnet26rpt_256,10.253,89.747,38.107,61.893,10.99,256,0.940,bicubic,-83.467,-60.413,-4 -resnext101_32x8d,10.173,89.827,37.800,62.200,88.79,224,0.875,bilinear,-83.647,-60.780,-14 -regnetx_160,10.147,89.853,38.000,62.000,54.28,224,0.875,bicubic,-83.983,-60.740,-42 -dpn98,10.133,89.867,36.587,63.413,61.57,224,0.875,bicubic,-83.987,-61.993,-42 -resnet50,10.120,89.880,37.907,62.093,25.56,224,0.950,bicubic,-84.220,-60.533,-69 -legacy_seresnext50_32x4d,10.093,89.907,39.200,60.800,27.56,224,0.875,bilinear,-83.637,-59.380,-11 -resnetrs50,10.067,89.933,37.507,62.493,35.69,224,0.910,bicubic,-84.233,-61.133,-67 -inception_v3,10.027,89.973,35.227,64.773,23.83,299,0.875,bicubic,-82.693,-62.743,+65 -xception,9.987,90.013,38.040,61.960,22.86,299,0.897,bicubic,-83.483,-60.490,+8 -efficientnet_b1,9.987,90.013,37.560,62.440,7.79,256,1.000,bicubic,-83.253,-60.740,+28 -dpn68b,9.787,90.213,38.027,61.973,12.61,224,0.875,bicubic,-83.903,-60.493,-12 -gluon_resnet152_v1b,9.747,90.253,36.080,63.920,60.19,224,0.875,bicubic,-84.323,-62.380,-46 -tf_efficientnet_lite3,9.653,90.347,38.987,61.013,8.20,300,0.904,bilinear,-84.557,-59.653,-63 -tf_efficientnet_b2,9.653,90.347,38.893,61.107,9.11,260,0.890,bicubic,-84.707,-59.717,-80 -tf_efficientnet_cc_b1_8e,9.600,90.400,36.787,63.213,39.72,240,0.882,bicubic,-84.310,-61.473,-35 -res2net101_26w_4s,9.520,90.480,35.027,64.973,45.21,224,0.875,bilinear,-84.230,-63.283,-23 -legacy_seresnet152,9.333,90.667,37.427,62.573,66.82,224,0.875,bilinear,-84.057,-60.913,+8 -cspresnet50,9.293,90.707,39.613,60.387,21.62,256,0.887,bilinear,-84.437,-59.027,-24 -resnet33ts,9.240,90.760,38.667,61.333,19.68,256,0.900,bicubic,-84.360,-59.863,-14 -hrnet_w40,9.227,90.773,36.880,63.120,57.56,224,0.875,bilinear,-84.263,-61.700,-4 -regnetx_120,9.187,90.813,37.187,62.813,46.11,224,0.875,bicubic,-85.053,-61.463,-75 -seresnext26d_32x4d,9.147,90.853,36.813,63.187,16.81,224,0.875,bicubic,-83.543,-61.337,+53 -crossvit_tiny_240,9.107,90.893,34.600,65.400,7.01,240,0.875,bicubic,-81.133,-62.990,+141 -resnest26d,9.067,90.933,37.840,62.160,17.07,224,0.875,bilinear,-84.263,-60.790,+4 -vit_tiny_patch16_224,9.053,90.947,34.627,65.373,5.72,224,0.900,bicubic,-82.717,-63.413,+99 -vit_base_patch16_224_sam,8.987,91.013,36.173,63.827,86.57,224,0.900,bicubic,-85.153,-62.497,-66 -gluon_resnext50_32x4d,8.973,91.027,36.307,63.693,25.03,224,0.875,bicubic,-84.837,-62.103,-38 -rexnet_100,8.907,91.093,36.373,63.627,4.80,224,0.875,bicubic,-84.123,-61.817,+24 -seresnext26t_32x4d,8.893,91.107,36.893,63.107,16.81,224,0.875,bicubic,-83.927,-61.477,+36 -bat_resnext26ts,8.867,91.133,36.413,63.587,10.73,256,0.900,bicubic,-84.463,-61.937,0 -mixnet_l,8.853,91.147,36.213,63.787,7.33,224,0.875,bicubic,-84.597,-62.007,-11 -mobilenetv3_large_100_miil,8.853,91.147,33.080,66.920,5.48,224,0.875,bilinear,-83.417,-64.560,+68 -convit_tiny,8.813,91.187,34.360,65.640,5.71,224,0.875,bicubic,-81.827,-63.380,+125 -resnet32ts,8.760,91.240,37.227,62.773,17.96,256,0.900,bicubic,-84.700,-61.263,-16 -gcresnext26ts,8.707,91.293,35.733,64.267,10.48,256,0.900,bicubic,-84.073,-62.527,+32 -levit_128s,8.707,91.293,33.160,66.840,7.78,224,0.900,bicubic,-83.223,-64.910,+79 -dla169,8.653,91.347,35.947,64.053,53.39,224,0.875,bilinear,-84.687,-62.643,-10 -hrnet_w30,8.600,91.400,37.027,62.973,37.71,224,0.875,bilinear,-84.600,-61.383,+2 -mixer_b16_224,8.600,91.400,29.440,70.560,59.88,224,0.875,bicubic,-83.270,-67.810,+81 -legacy_seresnet101,8.533,91.467,36.013,63.987,49.33,224,0.875,bilinear,-84.757,-62.497,-4 -tf_efficientnet_b1_ap,8.453,91.547,35.227,64.773,7.79,240,0.882,bicubic,-85.227,-63.133,-41 -repvgg_b2,8.427,91.573,36.453,63.547,89.02,224,0.875,bilinear,-85.063,-62.277,-27 -resmlp_12_distilled_224,8.307,91.693,36.840,63.160,15.35,224,0.875,bicubic,-84.533,-61.300,+19 -resnetblur50,8.253,91.747,37.373,62.627,25.56,224,0.875,bicubic,-85.687,-61.207,-67 -crossvit_9_240,8.253,91.747,34.107,65.893,8.55,240,0.875,bicubic,-82.377,-63.633,+114 -dla102x,8.213,91.787,37.040,62.960,26.31,224,0.875,bilinear,-85.307,-61.460,-34 -eca_resnext26ts,8.080,91.920,35.960,64.040,10.30,256,0.900,bicubic,-84.530,-62.300,+35 -hrnet_w32,8.027,91.973,37.520,62.480,41.23,224,0.875,bilinear,-85.503,-60.940,-38 -cs3darknet_m,7.987,92.013,36.507,63.493,9.31,288,0.950,bicubic,-85.373,-62.093,-23 -gluon_resnet101_v1c,7.987,92.013,33.360,66.640,44.57,224,0.875,bicubic,-85.683,-65.060,-49 -gluon_resnet50_v1d,7.933,92.067,35.000,65.000,25.58,224,0.875,bicubic,-85.837,-63.390,-60 -dla60_res2next,7.827,92.173,34.987,65.013,17.03,224,0.875,bilinear,-85.343,-63.413,-10 -res2net50_26w_8s,7.827,92.173,33.720,66.280,48.40,224,0.875,bilinear,-85.593,-64.450,-31 -mobilevitv2_075,7.827,92.173,33.693,66.307,2.87,256,0.888,bicubic,-83.933,-64.167,+71 -mobilevit_xs,7.747,92.253,32.533,67.467,2.32,256,0.900,bicubic,-83.073,-65.387,+98 -densenetblur121d,7.733,92.267,34.773,65.227,8.00,224,0.875,bicubic,-84.177,-63.297,+61 -tf_efficientnetv2_b1,7.707,92.293,34.653,65.347,8.14,240,0.882,bicubic,-86.233,-63.967,-81 -deit_tiny_distilled_patch16_224,7.693,92.307,33.547,66.453,5.91,224,0.900,bicubic,-83.017,-64.023,+97 -dla60_res2net,7.600,92.400,34.600,65.400,20.85,224,0.875,bilinear,-85.560,-63.800,-16 -efficientnet_b1_pruned,7.427,92.573,34.507,65.493,6.33,240,0.882,bicubic,-85.343,-63.533,+8 -wide_resnet101_2,7.360,92.640,34.160,65.840,126.89,224,0.875,bilinear,-86.350,-64.380,-63 -regnetx_064,7.347,92.653,34.373,65.627,26.21,224,0.875,bicubic,-86.543,-64.257,-80 -deit_tiny_patch16_224,7.320,92.680,30.707,69.293,5.72,224,0.900,bicubic,-82.340,-66.743,+112 -hardcorenas_e,7.253,92.747,33.293,66.707,8.07,224,0.875,bilinear,-85.317,-64.807,+20 -gluon_resnet101_v1b,7.240,92.760,32.773,67.227,44.55,224,0.875,bicubic,-86.510,-65.607,-73 -efficientnet_b0,7.227,92.773,34.013,65.987,5.29,224,0.875,bicubic,-85.463,-64.057,+10 -gluon_resnet50_v1s,7.213,92.787,33.493,66.507,25.68,224,0.875,bicubic,-86.407,-64.967,-63 -tf_efficientnet_b1,7.147,92.853,33.053,66.947,7.79,240,0.882,bicubic,-86.353,-65.307,-54 -tf_mixnet_l,7.147,92.853,31.627,68.373,7.33,224,0.875,bicubic,-86.173,-66.403,-36 -tf_efficientnet_cc_b0_8e,7.120,92.880,31.827,68.173,24.01,224,0.875,bicubic,-85.710,-66.353,-7 -convmixer_1024_20_ks9_p14,7.080,92.920,33.067,66.933,24.38,224,0.960,bicubic,-85.350,-65.203,+20 -seresnext26ts,7.053,92.947,34.920,65.080,10.39,256,0.900,bicubic,-85.637,-63.370,+2 -resmlp_12_224,7.013,92.987,33.947,66.053,15.35,224,0.875,bicubic,-85.197,-64.213,+29 -cs3darknet_focus_m,6.947,93.053,34.640,65.360,9.30,288,0.950,bicubic,-86.023,-63.750,-19 -hardcorenas_f,6.853,93.147,34.067,65.933,8.20,224,0.875,bilinear,-86.107,-64.093,-18 -selecsls60b,6.733,93.267,33.253,66.747,32.77,224,0.875,bicubic,-86.557,-65.027,-39 -res2net50_26w_6s,6.720,93.280,31.640,68.360,37.05,224,0.875,bilinear,-86.690,-66.640,-54 -ese_vovnet19b_dw,6.707,93.293,33.400,66.600,6.54,224,0.875,bicubic,-85.573,-64.690,+21 -efficientnet_es,6.680,93.320,33.827,66.173,5.44,224,0.875,bicubic,-86.460,-64.593,-35 -tinynet_a,6.640,93.360,32.227,67.773,6.19,192,0.875,bicubic,-85.800,-65.853,+10 -mixnet_m,6.640,93.360,32.053,67.947,5.01,224,0.875,bicubic,-85.790,-65.807,+11 -pit_ti_distilled_224,6.627,93.373,30.747,69.253,5.10,224,0.900,bicubic,-84.273,-66.973,+68 -legacy_seresnext26_32x4d,6.613,93.387,33.280,66.720,16.79,224,0.875,bicubic,-86.027,-64.850,-4 -poolformer_s12,6.560,93.440,34.453,65.547,11.92,224,0.900,bicubic,-86.070,-63.747,-4 -repvgg_b1,6.467,93.533,33.813,66.187,57.42,224,0.875,bilinear,-86.853,-64.697,-54 -skresnet34,6.467,93.533,31.587,68.413,22.28,224,0.875,bicubic,-85.923,-66.563,+8 -dla60x,6.427,93.573,34.120,65.880,17.35,224,0.875,bilinear,-86.693,-64.390,-42 -hardcorenas_d,6.413,93.587,32.200,67.800,7.50,224,0.875,bilinear,-85.977,-65.880,+7 -resnet34d,6.400,93.600,31.507,68.493,21.82,224,0.875,bicubic,-86.280,-66.803,-12 -edgenext_x_small,6.373,93.627,29.720,70.280,2.34,256,0.900,bicubic,-84.717,-67.830,+53 -regnetx_080,6.307,93.693,32.333,67.667,39.57,224,0.875,bicubic,-87.563,-66.187,-108 -swsl_resnet18,6.253,93.747,31.600,68.400,11.69,224,0.875,bilinear,-84.437,-66.100,+65 -legacy_seresnet50,6.187,93.813,32.653,67.347,28.09,224,0.875,bilinear,-86.783,-65.537,-37 -resnet26t,6.133,93.867,32.227,67.773,16.01,256,0.940,bicubic,-86.617,-66.003,-24 -pit_ti_224,6.107,93.893,30.240,69.760,4.85,224,0.900,bicubic,-83.843,-67.200,+75 -tv_resnet152,6.027,93.973,32.067,67.933,60.19,224,0.875,bilinear,-87.283,-66.323,-62 -tf_efficientnet_cc_b0_4e,5.987,94.013,29.587,70.413,13.31,224,0.875,bicubic,-86.603,-68.493,-14 -regnetx_040,5.973,94.027,31.587,68.413,22.12,224,0.875,bicubic,-87.587,-66.963,-90 -tf_efficientnetv2_b0,5.893,94.107,30.773,69.227,7.14,224,0.875,bicubic,-87.217,-67.617,-51 -mixer_l16_224,5.867,94.133,18.533,81.467,208.20,224,0.875,bicubic,-81.273,-74.987,+98 -dla102,5.853,94.147,32.720,67.280,33.27,224,0.875,bilinear,-87.207,-65.830,-52 -selecsls60,5.667,94.333,32.493,67.507,30.67,224,0.875,bicubic,-87.353,-65.807,-49 -regnety_016,5.653,94.347,30.440,69.560,11.20,224,0.875,bicubic,-87.377,-67.910,-52 -res2next50,5.640,94.360,30.867,69.133,24.67,224,0.875,bilinear,-87.220,-67.323,-43 -hardcorenas_c,5.640,94.360,30.453,69.547,5.52,224,0.875,bilinear,-86.390,-67.387,+7 -hrnet_w18,5.493,94.507,30.987,69.013,21.30,224,0.875,bilinear,-86.827,-67.263,-10 -resnest14d,5.467,94.533,28.547,71.453,10.61,224,0.875,bilinear,-86.253,-69.323,+21 -tf_efficientnet_lite2,5.360,94.640,30.920,69.080,6.09,260,0.890,bicubic,-87.290,-67.310,-30 -tf_efficientnet_em,5.347,94.653,31.107,68.893,6.90,240,0.882,bicubic,-87.583,-67.093,-51 -tf_efficientnet_b0_ap,5.307,94.693,28.827,71.173,5.29,224,0.875,bicubic,-86.893,-69.193,-6 -gernet_s,5.293,94.707,30.107,69.893,8.17,224,0.875,bilinear,-86.847,-68.093,-5 -densenet121,5.293,94.707,29.893,70.107,7.98,224,0.875,bicubic,-86.287,-68.137,+17 -repvgg_b1g4,5.280,94.720,30.800,69.200,39.97,224,0.875,bilinear,-87.700,-67.630,-59 -xcit_nano_12_p16_224_dist,5.240,94.760,26.560,73.440,3.05,224,1.000,bicubic,-84.450,-70.540,+59 -res2net50_26w_4s,5.160,94.840,29.373,70.627,25.70,224,0.875,bilinear,-87.330,-68.687,-26 -vit_tiny_r_s16_p8_224,5.080,94.920,27.067,72.933,6.34,224,0.900,bicubic,-84.100,-70.163,+63 -mobilenetv3_large_100,5.067,94.933,28.187,71.813,5.48,224,0.875,bicubic,-86.263,-69.533,+17 -tf_mixnet_m,5.067,94.933,28.147,71.853,5.01,224,0.875,bicubic,-87.253,-69.743,-21 -tf_efficientnet_b0,5.053,94.947,28.720,71.280,5.29,224,0.875,bicubic,-87.197,-69.270,-18 -res2net50_14w_8s,5.040,94.960,28.773,71.227,25.06,224,0.875,bilinear,-87.700,-69.407,-48 -hardcorenas_b,4.947,95.053,28.067,71.933,5.18,224,0.875,bilinear,-86.813,-69.713,+5 -mixnet_s,4.920,95.080,28.547,71.453,4.13,224,0.875,bicubic,-86.900,-69.143,+1 -mobilenetv3_rw,4.907,95.093,29.853,70.147,5.48,224,0.875,bicubic,-86.303,-67.807,+14 -gluon_resnet50_v1c,4.893,95.107,28.147,71.853,25.58,224,0.875,bicubic,-88.137,-70.243,-74 -hardcorenas_a,4.880,95.120,28.093,71.907,5.26,224,0.875,bilinear,-86.470,-69.767,+8 -regnetx_032,4.853,95.147,30.253,69.747,15.30,224,0.875,bicubic,-88.267,-68.137,-80 -xcit_nano_12_p16_224,4.853,95.147,25.453,74.547,3.05,224,1.000,bicubic,-83.757,-71.337,+61 -tv_resnext50_32x4d,4.840,95.160,30.280,69.720,25.03,224,0.875,bilinear,-87.910,-68.000,-59 -densenet161,4.720,95.280,29.560,70.440,28.68,224,0.875,bicubic,-87.780,-68.730,-42 -tv_resnet101,4.720,95.280,29.373,70.627,44.55,224,0.875,bilinear,-88.100,-68.877,-64 -resnext26ts,4.680,95.320,29.013,70.987,10.30,256,0.900,bicubic,-87.190,-68.907,-12 -selecsls42b,4.667,95.333,28.613,71.387,32.46,224,0.875,bicubic,-87.613,-69.527,-34 -tf_efficientnet_lite1,4.613,95.387,28.413,71.587,5.42,240,0.882,bicubic,-88.007,-69.667,-52 -mobilenetv2_120d,4.533,95.467,29.293,70.707,5.83,224,0.875,bicubic,-87.867,-68.757,-41 -vit_base_patch32_224_sam,4.333,95.667,24.387,75.613,88.22,224,0.900,bicubic,-85.417,-72.613,+37 -tinynet_b,4.187,95.813,26.720,73.280,3.73,188,0.875,bicubic,-86.733,-70.950,+13 -efficientnet_es_pruned,4.187,95.813,26.547,73.453,5.44,224,0.875,bicubic,-86.993,-71.203,+2 -fbnetc_100,4.133,95.867,25.933,74.067,5.57,224,0.875,bilinear,-86.577,-71.277,+18 -densenet201,4.120,95.880,27.533,72.467,20.01,224,0.875,bicubic,-88.620,-70.697,-68 -gluon_resnet50_v1b,4.120,95.880,26.920,73.080,25.56,224,0.875,bicubic,-88.420,-71.250,-55 -resnet26d,4.040,95.960,28.507,71.493,16.01,224,0.875,bicubic,-88.030,-69.463,-33 -semnasnet_100,3.960,96.040,26.933,73.067,3.89,224,0.875,bicubic,-87.320,-70.627,-7 -repvgg_a2,3.933,96.067,27.280,72.720,28.21,224,0.875,bilinear,-88.007,-70.870,-29 -mobilevitv2_050,3.933,96.067,23.867,76.133,1.37,256,0.888,bicubic,-84.297,-73.123,+48 -tf_mixnet_s,3.893,96.107,25.280,74.720,4.13,224,0.875,bicubic,-87.617,-72.330,-15 -semnasnet_075,3.867,96.133,27.000,73.000,2.91,224,0.875,bicubic,-86.193,-70.430,+22 -dpn68,3.867,96.133,26.067,73.933,12.61,224,0.875,bicubic,-88.163,-71.983,-36 -mobilevit_xxs,3.827,96.173,21.707,78.293,1.27,256,0.900,bicubic,-83.363,-74.393,+49 -regnety_008,3.813,96.187,27.160,72.840,6.26,224,0.875,bicubic,-87.907,-71.020,-22 -tf_efficientnet_es,3.813,96.187,26.120,73.880,5.44,224,0.875,bicubic,-88.167,-71.740,-37 -edgenext_xx_small,3.787,96.213,23.693,76.307,1.33,256,0.900,bicubic,-84.563,-72.827,+40 -dla60,3.773,96.227,27.973,72.027,22.04,224,0.875,bilinear,-88.437,-70.127,-49 -ssl_resnet18,3.747,96.253,25.440,74.560,11.69,224,0.875,bilinear,-86.463,-72.110,+12 -mobilenetv2_140,3.720,96.280,26.747,73.253,6.11,224,0.875,bicubic,-88.110,-71.103,-32 -densenet169,3.693,96.307,25.600,74.400,14.15,224,0.875,bicubic,-88.227,-72.500,-39 -regnetx_016,3.613,96.387,26.253,73.747,9.19,224,0.875,bicubic,-88.547,-71.947,-51 -res2net50_48w_2s,3.587,96.413,26.613,73.387,25.29,224,0.875,bilinear,-88.953,-71.467,-71 -tf_mobilenetv3_large_100,3.560,96.440,25.093,74.907,5.48,224,0.875,bilinear,-87.660,-72.567,-22 -spnasnet_100,3.560,96.440,24.293,75.707,4.42,224,0.875,bilinear,-86.780,-72.897,+4 -regnety_006,3.467,96.533,24.893,75.107,6.06,224,0.875,bicubic,-87.903,-72.817,-28 -legacy_seresnet34,3.333,96.667,23.813,76.187,21.96,224,0.875,bilinear,-87.567,-73.767,-10 -efficientnet_lite0,3.253,96.747,25.880,74.120,4.65,224,0.875,bicubic,-87.857,-71.750,-20 -dla34,3.253,96.747,23.613,76.387,15.74,224,0.875,bilinear,-87.527,-74.047,-9 -ghostnet_100,3.227,96.773,24.867,75.133,5.18,224,0.875,bilinear,-86.803,-72.503,+5 -regnety_004,3.200,96.800,22.667,77.333,4.34,224,0.875,bicubic,-87.310,-74.873,-5 -mobilenetv2_110d,3.187,96.813,24.573,75.427,4.52,224,0.875,bicubic,-87.773,-72.987,-18 -mnasnet_100,3.107,96.893,24.200,75.800,4.38,224,0.875,bicubic,-87.403,-73.270,-6 -tinynet_c,3.107,96.893,21.533,78.467,2.46,184,0.875,bicubic,-84.673,-74.837,+25 -tf_efficientnet_lite0,3.093,96.907,22.920,77.080,4.65,224,0.875,bicubic,-87.947,-74.670,-24 -skresnet18,3.000,97.000,22.773,77.227,11.96,224,0.875,bicubic,-86.660,-74.467,+6 -vgg19_bn,2.947,97.053,23.480,76.520,143.68,224,0.875,bilinear,-87.133,-74.100,-4 -resnet34,2.920,97.080,23.693,76.307,21.80,224,0.875,bilinear,-88.210,-73.927,-32 -tf_mobilenetv3_large_075,2.867,97.133,21.560,78.440,3.99,224,0.875,bilinear,-86.813,-75.650,+1 -tinynet_d,2.867,97.133,17.787,82.213,2.34,152,0.875,bicubic,-81.893,-77.393,+33 -resnet14t,2.760,97.240,19.280,80.720,10.08,224,0.950,bilinear,-86.280,-77.320,+6 -hrnet_w18_small_v2,2.707,97.293,23.707,76.293,15.60,224,0.875,bilinear,-88.483,-74.193,-38 -regnetx_008,2.667,97.333,22.467,77.533,7.26,224,0.875,bicubic,-88.383,-75.243,-33 -gluon_resnet34_v1b,2.667,97.333,21.667,78.333,21.80,224,0.875,bicubic,-88.293,-75.973,-31 -vgg16_bn,2.653,97.347,23.787,76.213,138.37,224,0.875,bilinear,-87.437,-73.583,-13 -vgg16,2.627,97.373,20.427,79.573,138.36,224,0.875,bilinear,-85.923,-76.363,+9 -lcnet_100,2.613,97.387,20.867,79.133,2.95,224,0.875,bicubic,-86.177,-75.863,+5 -resnet18d,2.600,97.400,21.600,78.400,11.71,224,0.875,bicubic,-86.680,-75.540,-4 -tv_densenet121,2.560,97.440,22.667,77.333,7.98,224,0.875,bicubic,-88.330,-75.043,-31 -repvgg_b0,2.547,97.453,24.000,76.000,15.82,224,0.875,bilinear,-88.853,-73.990,-53 -regnetx_006,2.507,97.493,20.627,79.373,6.20,224,0.875,bicubic,-87.853,-76.803,-23 -legacy_seresnet18,2.493,97.507,20.080,79.920,11.78,224,0.875,bicubic,-86.387,-76.900,-1 -resnet26,2.480,97.520,23.053,76.947,16.00,224,0.875,bicubic,-88.640,-74.697,-46 -lcnet_075,2.307,97.693,17.160,82.840,2.36,224,0.875,bicubic,-83.683,-78.530,+16 -mobilenetv3_small_075,2.293,97.707,15.907,84.093,2.04,224,0.875,bicubic,-80.747,-78.193,+24 -regnety_002,2.160,97.840,18.893,81.107,3.16,224,0.875,bicubic,-85.220,-77.697,+6 -mobilenetv2_100,2.147,97.853,19.907,80.093,3.50,224,0.875,bicubic,-87.463,-77.243,-14 -vgg19,2.107,97.893,20.747,79.253,143.67,224,0.875,bilinear,-86.933,-76.123,-12 -vgg13_bn,2.093,97.907,20.307,79.693,133.05,224,0.875,bilinear,-86.667,-76.663,-6 -tf_mobilenetv3_small_100,2.013,97.987,15.853,84.147,2.54,224,0.875,bilinear,-83.177,-79.917,+12 -mobilenetv3_small_100,2.000,98.000,17.080,82.920,2.54,224,0.875,bicubic,-83.220,-78.550,+10 -tf_mobilenetv3_small_075,2.000,98.000,14.813,85.187,2.04,224,0.875,bilinear,-81.520,-79.987,+16 -regnetx_004,1.947,98.053,19.160,80.840,5.16,224,0.875,bicubic,-86.953,-77.960,-13 -tv_resnet34,1.867,98.133,20.000,80.000,21.80,224,0.875,bilinear,-88.063,-77.340,-27 +eva_giant_patch14_560.m30m_ft_in22k_in1k,87.533,12.467,96.893,3.107,"1,014.45",560,1.000,bicubic,-11.287,-2.927,0 +eva_giant_patch14_336.clip_ft_in1k,85.280,14.720,95.733,4.267,"1,013.01",336,1.000,bicubic,-13.540,-4.167,0 +eva_giant_patch14_336.m30m_ft_in22k_in1k,85.160,14.840,96.360,3.640,"1,013.01",336,1.000,bicubic,-13.650,-3.540,0 +tf_efficientnet_l2.ns_jft_in1k,84.760,15.240,96.147,3.853,480.31,800,0.960,bicubic,-13.790,-3.673,+6 +eva_large_patch14_336.in22k_ft_in22k_in1k,83.853,16.147,95.347,4.653,304.53,336,1.000,bicubic,-14.887,-4.463,-1 +maxvit_xlarge_tf_512.in21k_ft_in1k,83.400,16.600,95.533,4.467,475.77,512,1.000,bicubic,-15.220,-4.257,+1 +tf_efficientnet_l2.ns_jft_in1k_475,83.373,16.627,95.453,4.547,480.31,475,0.936,bicubic,-15.127,-4.327,+7 +eva_large_patch14_336.in22k_ft_in1k,82.747,17.253,95.520,4.480,304.53,336,1.000,bicubic,-15.963,-4.350,-3 +maxvit_large_tf_512.in21k_ft_in1k,81.747,18.253,95.040,4.960,212.33,512,1.000,bicubic,-16.873,-4.760,-1 +beit_large_patch16_512.in22k_ft_in22k_in1k,81.613,18.387,94.880,5.120,305.67,512,1.000,bicubic,-16.947,-4.960,-1 +maxvit_base_tf_512.in21k_ft_in1k,81.333,18.667,94.467,5.533,119.88,512,1.000,bicubic,-17.297,-5.333,-5 +maxvit_xlarge_tf_384.in21k_ft_in1k,81.067,18.933,94.640,5.360,475.32,384,1.000,bicubic,-17.433,-5.190,+3 +eva_giant_patch14_224.clip_ft_in1k,80.813,19.187,94.320,5.680,"1,012.56",224,1.000,bicubic,-17.667,-5.500,+4 +deit3_large_patch16_384_in21ft1k,79.213,20.787,93.627,6.373,304.76,384,1.000,bicubic,-19.247,-6.133,+4 +beit_large_patch16_384.in22k_ft_in22k_in1k,79.120,20.880,94.280,5.720,305.00,384,1.000,bicubic,-19.400,-5.540,-3 +maxvit_large_tf_384.in21k_ft_in1k,78.027,21.973,93.280,6.720,212.03,384,1.000,bicubic,-20.463,-6.470,0 +vit_large_patch14_clip_336.openai_ft_in12k_in1k,77.293,22.707,93.613,6.387,304.53,336,1.000,bicubic,-20.967,-6.157,+8 +maxvit_base_tf_384.in21k_ft_in1k,76.813,23.187,92.587,7.413,119.65,384,1.000,bicubic,-21.707,-7.163,-5 +beitv2_large_patch16_224.in1k_ft_in22k_in1k,76.733,23.267,93.160,6.840,304.43,224,0.950,bicubic,-21.807,-6.600,-8 +eva_large_patch14_196.in22k_ft_in22k_in1k,75.547,24.453,91.747,8.253,304.14,196,1.000,bicubic,-22.883,-8.063,0 +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,74.227,25.773,92.267,7.733,632.46,336,1.000,bicubic,-24.203,-7.503,-2 +swinv2_large_window12to24_192to384_22kft1k,73.867,26.133,91.747,8.253,196.74,384,1.000,bicubic,-24.283,-7.943,+13 +eva_large_patch14_196.in22k_ft_in1k,73.187,26.813,91.440,8.560,304.14,196,1.000,bicubic,-25.163,-8.380,-1 +vit_large_patch14_clip_224.openai_ft_in12k_in1k,72.333,27.667,90.827,9.173,304.20,224,1.000,bicubic,-25.887,-8.893,+4 +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,71.853,28.147,90.213,9.787,304.53,336,1.000,bicubic,-26.477,-9.547,-2 +vit_large_patch14_clip_224.openai_ft_in1k,71.760,28.240,91.480,8.520,304.20,224,1.000,bicubic,-26.400,-8.180,+8 +deit3_base_patch16_384_in21ft1k,71.293,28.707,89.960,10.040,86.88,384,1.000,bicubic,-26.547,-9.720,+24 +swinv2_base_window12to24_192to384_22kft1k,71.267,28.733,91.293,8.707,87.92,384,1.000,bicubic,-26.873,-8.487,+8 +vit_large_patch16_384.augreg_in21k_ft_in1k,71.227,28.773,89.840,10.160,304.72,384,1.000,bicubic,-26.993,-9.890,-2 +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,70.733,29.267,90.387,9.613,632.05,224,1.000,bicubic,-27.537,-9.373,-6 +deit3_huge_patch14_224_in21ft1k,70.240,29.760,90.720,9.280,632.13,224,1.000,bicubic,-27.930,-9.040,+2 +volo_d5_512,69.653,30.347,90.427,9.573,296.09,512,1.150,bicubic,-28.117,-9.243,+25 +swin_large_patch4_window12_384,69.627,30.373,89.560,10.440,196.74,384,1.000,bicubic,-28.413,-10.130,+8 +convnext_xlarge.fb_in22k_ft_in1k_384,69.320,30.680,89.293,10.707,350.20,384,1.000,bicubic,-29.100,-10.517,-13 +deit3_large_patch16_224_in21ft1k,68.693,31.307,90.000,10.000,304.37,224,1.000,bicubic,-29.477,-9.730,-3 +beit_large_patch16_224.in22k_ft_in22k_in1k,68.507,31.493,89.560,10.440,304.43,224,0.900,bicubic,-29.673,-10.200,-5 +volo_d5_448,68.107,31.893,89.707,10.293,295.91,448,1.150,bicubic,-29.653,-9.913,+21 +maxvit_base_tf_512.in1k,67.920,32.080,88.507,11.493,119.88,512,1.000,bicubic,-29.820,-11.103,+22 +maxvit_large_tf_512.in1k,67.867,32.133,87.640,12.360,212.33,512,1.000,bicubic,-29.963,-11.920,+14 +tf_efficientnetv2_xl.in21k_ft_in1k,67.787,32.213,87.373,12.627,208.12,512,1.000,bicubic,-30.123,-12.197,+5 +swinv2_large_window12to16_192to256_22kft1k,67.293,32.707,88.027,11.973,196.74,256,0.900,bicubic,-30.567,-11.643,+9 +vit_large_patch14_clip_336.laion2b_ft_in1k,67.040,32.960,89.453,10.547,304.53,336,1.000,bicubic,-31.180,-10.347,-14 +tf_efficientnet_b7.ns_jft_in1k,67.040,32.960,88.667,11.333,66.35,600,0.949,bicubic,-30.870,-11.053,+2 +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,67.027,32.973,87.987,12.013,304.20,224,1.000,bicubic,-31.053,-11.773,-5 +convnext_xlarge.fb_in22k_ft_in1k,66.947,33.053,88.947,11.053,350.20,288,1.000,bicubic,-31.173,-10.803,-8 +volo_d4_448,66.640,33.360,88.987,11.013,193.41,448,1.150,bicubic,-31.030,-10.623,+18 +beit_base_patch16_384.in22k_ft_in22k_in1k,65.880,34.120,88.507,11.493,86.74,384,1.000,bicubic,-31.940,-11.193,+7 +vit_huge_patch14_clip_224.laion2b_ft_in1k,65.560,34.440,87.707,12.293,632.05,224,1.000,bicubic,-32.460,-12.013,-6 +convnext_large.fb_in22k_ft_in1k_384,65.547,34.453,87.467,12.533,197.77,384,1.000,bicubic,-32.683,-12.283,-23 +volo_d3_448,65.440,34.560,87.573,12.427,86.63,448,1.000,bicubic,-32.110,-11.977,+28 +convnext_large.fb_in22k_ft_in1k,65.027,34.973,87.933,12.067,197.77,288,1.000,bicubic,-33.093,-11.847,-13 +tf_efficientnetv2_l.in21k_ft_in1k,64.973,35.027,87.813,12.187,118.52,480,1.000,bicubic,-32.827,-11.957,+4 +swin_base_patch4_window12_384,64.480,35.520,87.493,12.507,87.90,384,1.000,bicubic,-33.410,-12.217,-6 +vit_base_patch16_384.augreg_in21k_ft_in1k,63.693,36.307,86.707,13.293,86.86,384,1.000,bicubic,-34.147,-12.963,-2 +maxvit_large_tf_384.in1k,63.467,36.533,85.107,14.893,212.03,384,1.000,bicubic,-34.103,-14.453,+21 +swinv2_base_window12to16_192to256_22kft1k,63.227,36.773,87.493,12.507,87.92,256,0.900,bicubic,-34.423,-12.087,+9 +maxvit_small_tf_512.in1k,62.867,37.133,86.293,13.707,69.13,512,1.000,bicubic,-34.883,-13.257,+2 +maxvit_base_tf_384.in1k,62.600,37.400,85.200,14.800,119.65,384,1.000,bicubic,-34.970,-14.390,+16 +cait_m48_448,62.373,37.627,86.453,13.547,356.46,448,1.000,bicubic,-35.107,-13.147,+27 +convnext_base.fb_in22k_ft_in1k_384,62.347,37.653,86.213,13.787,88.59,384,1.000,bicubic,-35.723,-13.437,-20 +tf_efficientnet_b6.ns_jft_in1k,62.267,37.733,85.173,14.827,43.04,528,0.942,bicubic,-35.363,-14.407,+7 +vit_base_patch8_224.augreg2_in21k_ft_in1k,62.093,37.907,85.867,14.133,86.58,224,0.900,bicubic,-35.617,-13.783,0 +beitv2_base_patch16_224.in1k_ft_in22k_in1k,61.693,38.307,85.507,14.493,86.53,224,0.900,bicubic,-35.997,-14.173,0 +vit_large_r50_s32_384.augreg_in21k_ft_in1k,61.507,38.493,83.960,16.040,329.09,384,1.000,bicubic,-36.353,-15.690,-15 +ig_resnext101_32x48d,61.013,38.987,83.347,16.653,828.41,224,0.875,bilinear,-36.607,-16.353,+4 +convnext_base.fb_in22k_ft_in1k,60.960,39.040,86.133,13.867,88.59,288,1.000,bicubic,-36.900,-13.547,-18 +swin_large_patch4_window7_224,60.893,39.107,85.840,14.160,196.53,224,0.900,bicubic,-36.757,-13.880,-1 +resnetv2_152x4_bitm,60.787,39.213,83.573,16.427,936.53,480,1.000,bilinear,-36.703,-16.037,+16 +deit3_large_patch16_384,60.507,39.493,85.693,14.307,304.76,384,1.000,bicubic,-36.913,-13.927,+21 +tf_efficientnet_b5.ns_jft_in1k,60.320,39.680,84.493,15.507,30.39,456,0.934,bicubic,-37.180,-15.137,+13 +vit_base_patch16_clip_384.openai_ft_in12k_in1k,60.253,39.747,84.613,15.387,86.86,384,0.950,bicubic,-37.947,-15.047,-41 +vit_large_patch14_clip_224.laion2b_ft_in1k,59.893,40.107,85.733,14.267,304.20,224,1.000,bicubic,-38.007,-13.917,-26 +xcit_large_24_p8_384_dist,59.893,40.107,85.493,14.507,188.93,384,1.000,bicubic,-37.627,-14.007,+7 +tf_efficientnetv2_m.in21k_ft_in1k,59.387,40.613,84.573,15.427,54.14,480,1.000,bicubic,-38.433,-15.027,-19 +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,59.160,40.840,83.267,16.733,86.86,384,1.000,bicubic,-38.850,-16.393,-33 +dm_nfnet_f6,59.160,40.840,82.333,17.667,438.36,576,0.956,bicubic,-38.440,-17.217,-4 +vit_base_patch8_224.augreg_in21k_ft_in1k,58.933,41.067,82.733,17.267,86.58,224,0.900,bicubic,-38.647,-16.777,-5 +maxvit_tiny_tf_512.in1k,58.840,41.160,84.547,15.453,31.05,512,1.000,bicubic,-38.730,-14.983,-3 +volo_d2_384,58.613,41.387,84.267,15.733,58.87,384,1.000,bicubic,-38.707,-15.213,+22 +dm_nfnet_f5,58.573,41.427,82.773,17.227,377.21,544,0.954,bicubic,-38.967,-16.797,-1 +dm_nfnet_f4,58.120,41.880,81.987,18.013,316.07,512,0.951,bicubic,-39.460,-17.683,-8 +ig_resnext101_32x32d,58.093,41.907,80.653,19.347,468.53,224,0.875,bilinear,-39.267,-18.877,+14 +cait_m36_384,57.840,42.160,84.813,15.187,271.22,384,1.000,bicubic,-39.560,-14.697,+11 +deit3_base_patch16_224_in21ft1k,57.253,42.747,83.520,16.480,86.59,224,1.000,bicubic,-40.227,-16.030,+1 +volo_d5_224,57.147,42.853,82.720,17.280,295.46,224,0.960,bicubic,-40.233,-16.850,+10 +deit3_small_patch16_384_in21ft1k,57.080,42.920,83.053,16.947,22.21,384,1.000,bicubic,-40.050,-16.447,+33 +xcit_medium_24_p8_384_dist,56.680,43.320,83.413,16.587,84.32,384,1.000,bicubic,-40.610,-16.097,+18 +maxvit_small_tf_384.in1k,56.587,43.413,82.307,17.693,69.02,384,1.000,bicubic,-40.833,-17.203,+3 +dm_nfnet_f3,55.827,44.173,80.947,19.053,254.92,416,0.940,bicubic,-41.523,-18.613,+10 +vit_large_patch16_224.augreg_in21k_ft_in1k,55.627,44.373,80.093,19.907,304.33,224,0.900,bicubic,-42.013,-19.497,-23 +vit_base_patch16_clip_384.openai_ft_in1k,54.960,45.040,82.600,17.400,86.86,384,1.000,bicubic,-42.590,-17.060,-14 +vit_base_r50_s16_384.orig_in21k_ft_in1k,54.627,45.373,81.213,18.787,98.95,384,1.000,bicubic,-42.553,-18.347,+25 +cait_s36_384,54.413,45.587,81.360,18.640,68.37,384,1.000,bicubic,-42.917,-18.170,+7 +deit3_huge_patch14_224,54.320,45.680,82.093,17.907,632.13,224,0.900,bicubic,-42.560,-17.437,+61 +volo_d1_384,54.307,45.693,80.973,19.027,26.78,384,1.000,bicubic,-42.613,-18.437,+55 +xcit_small_24_p8_384_dist,54.280,45.720,81.533,18.467,47.63,384,1.000,bicubic,-42.960,-18.067,+14 +vit_base_patch16_clip_384.laion2b_ft_in1k,54.227,45.773,80.893,19.107,86.86,384,1.000,bicubic,-43.503,-18.737,-36 +vit_medium_patch16_gap_384.in12k_ft_in1k,54.147,45.853,81.680,18.320,39.03,384,0.950,bicubic,-43.293,-17.960,-9 +resnetv2_101x3_bitm,54.027,45.973,81.027,18.973,387.93,448,1.000,bilinear,-42.963,-18.463,+39 +resnetv2_152x2_bitm,54.013,45.987,82.000,18.000,236.34,448,1.000,bilinear,-42.997,-17.590,+36 +deit3_base_patch16_384,53.440,46.560,80.560,19.440,86.88,384,1.000,bicubic,-43.580,-18.880,+34 +ig_resnext101_32x16d,53.067,46.933,76.907,23.093,194.03,224,0.875,bilinear,-43.753,-22.503,+56 +volo_d4_224,52.920,47.080,80.467,19.533,192.96,224,0.960,bicubic,-44.380,-19.033,0 +xcit_large_24_p16_384_dist,52.827,47.173,81.827,18.173,189.10,384,1.000,bicubic,-44.693,-17.653,-22 +convnext_small.fb_in22k_ft_in1k_384,52.467,47.533,80.827,19.173,50.22,384,1.000,bicubic,-45.133,-18.773,-35 +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,52.307,47.693,79.773,20.227,88.34,448,1.000,bicubic,-45.013,-19.827,-4 +maxvit_tiny_tf_384.in1k,52.133,47.867,79.800,20.200,30.98,384,1.000,bicubic,-45.167,-19.720,-3 +swin_base_patch4_window7_224,51.453,48.547,79.973,20.027,87.77,224,0.900,bicubic,-45.797,-19.517,0 +efficientnet_b5.in12k_ft_in1k,51.253,48.747,78.853,21.147,30.39,448,1.000,bicubic,-46.157,-20.747,-16 +tf_efficientnet_b4.ns_jft_in1k,51.213,48.787,79.187,20.813,19.34,380,0.922,bicubic,-45.737,-20.073,+34 +flexivit_large.1200ep_in1k,51.200,48.800,80.693,19.307,304.36,240,0.950,bicubic,-46.210,-18.847,-19 +resnetv2_152x2_bit_teacher_384,51.187,48.813,78.493,21.507,236.34,384,1.000,bicubic,-45.643,-20.957,+45 +swsl_resnext101_32x8d,51.187,48.813,78.240,21.760,88.79,224,0.875,bilinear,-46.013,-21.260,0 +convnext_small.fb_in22k_ft_in1k,51.120,48.880,80.867,19.133,50.22,288,1.000,bicubic,-46.240,-18.813,-17 +mvitv2_large,50.907,49.093,78.467,21.533,217.99,224,0.900,bicubic,-46.043,-20.933,+30 +beit_base_patch16_224.in22k_ft_in22k_in1k,50.707,49.293,79.693,20.307,86.53,224,0.900,bicubic,-46.383,-19.917,+9 +tf_efficientnetv2_l.in1k,50.680,49.320,77.613,22.387,118.52,480,1.000,bicubic,-46.790,-21.917,-30 +vit_base_patch16_384.orig_in21k_ft_in1k,50.613,49.387,78.200,21.800,86.86,384,1.000,bicubic,-46.087,-21.090,+57 +xcit_small_12_p8_384_dist,50.573,49.427,79.573,20.427,26.21,384,1.000,bicubic,-46.657,-19.907,-8 +flexivit_large.600ep_in1k,50.240,49.760,80.027,19.973,304.36,240,0.950,bicubic,-47.040,-19.563,-14 +volo_d3_224,50.240,49.760,78.173,21.827,86.33,224,0.960,bicubic,-46.850,-21.297,+6 +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,50.093,49.907,78.080,21.920,86.57,224,0.950,bicubic,-47.357,-21.460,-34 +vit_base_patch16_224.augreg2_in21k_ft_in1k,49.840,50.160,78.987,21.013,86.57,224,0.900,bicubic,-47.310,-20.553,-5 +cait_s24_384,49.733,50.267,78.733,21.267,47.06,384,1.000,bicubic,-47.337,-20.697,+8 +vit_base_patch16_clip_224.openai_ft_in12k_in1k,49.733,50.267,77.040,22.960,86.57,224,0.950,bicubic,-47.787,-22.500,-44 +xcit_medium_24_p16_384_dist,49.333,50.667,79.813,20.187,84.40,384,1.000,bicubic,-47.937,-19.647,-20 +deit_base_distilled_patch16_384,49.333,50.667,79.253,20.747,87.63,384,1.000,bicubic,-47.627,-20.227,+16 +tf_efficientnet_b8.ra_in1k,48.947,51.053,77.240,22.760,87.41,672,0.954,bicubic,-48.253,-22.260,-12 +dm_nfnet_f2,48.920,51.080,77.160,22.840,193.78,352,0.920,bicubic,-48.100,-22.230,+5 +deit3_large_patch16_224,48.627,51.373,78.160,21.840,304.37,224,0.900,bicubic,-48.313,-21.180,+18 +flexivit_large.300ep_in1k,48.587,51.413,78.667,21.333,304.36,240,0.950,bicubic,-48.663,-20.863,-22 +tf_efficientnetv2_s.in21k_ft_in1k,48.507,51.493,77.880,22.120,21.46,384,1.000,bicubic,-48.223,-21.480,+37 +deit3_medium_patch16_224_in21ft1k,48.213,51.787,77.067,22.933,38.85,224,1.000,bicubic,-48.757,-22.363,+8 +resnest269e,48.187,51.813,74.333,25.667,110.93,416,0.928,bicubic,-48.333,-25.017,+75 +xcit_large_24_p8_224_dist,48.107,51.893,79.080,20.920,188.93,224,1.000,bicubic,-48.953,-20.340,-2 +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,47.933,52.067,76.867,23.133,88.30,384,1.000,bicubic,-49.427,-22.653,-38 +regnetz_e8,47.813,52.187,76.200,23.800,57.70,320,1.000,bicubic,-49.387,-23.340,-22 +resnetv2_50x3_bitm,47.293,52.707,77.333,22.667,217.32,448,1.000,bilinear,-49.417,-22.137,+34 +vit_base_patch16_clip_224.openai_ft_in1k,47.227,52.773,77.627,22.373,86.57,224,0.900,bicubic,-49.853,-21.993,-9 +xcit_large_24_p8_224,47.173,52.827,74.400,25.600,188.93,224,1.000,bicubic,-49.237,-24.580,+78 +xcit_small_24_p16_384_dist,46.960,53.040,77.147,22.853,47.67,384,1.000,bicubic,-50.160,-22.313,-20 +tf_efficientnet_b8.ap_in1k,46.893,53.107,76.507,23.493,87.41,672,0.954,bicubic,-50.217,-23.153,-20 +convnext_large.fb_in1k,46.840,53.160,76.613,23.387,197.77,288,1.000,bicubic,-50.260,-22.837,-19 +efficientnetv2_rw_m.agc_in1k,46.280,53.720,75.707,24.293,53.24,416,1.000,bicubic,-50.700,-23.833,-4 +swinv2_base_window16_256,46.240,53.760,75.173,24.827,87.92,256,0.900,bicubic,-50.510,-24.177,+23 +swsl_resnext101_32x16d,46.200,53.800,72.200,27.800,194.03,224,0.875,bilinear,-50.400,-27.180,+49 +volo_d2_224,46.080,53.920,75.253,24.747,58.68,224,0.960,bicubic,-50.910,-24.137,-8 +vit_small_patch16_384.augreg_in21k_ft_in1k,45.933,54.067,76.720,23.280,22.20,384,1.000,bicubic,-50.767,-22.710,+28 +ecaresnet269d,45.893,54.107,75.133,24.867,102.09,352,1.000,bicubic,-51.187,-24.357,-18 +vit_small_r26_s32_384.augreg_in21k_ft_in1k,45.720,54.280,76.067,23.933,36.47,384,1.000,bicubic,-50.960,-23.283,+33 +tf_efficientnet_b7.ap_in1k,45.373,54.627,74.213,25.787,66.35,600,0.949,bicubic,-51.827,-25.357,-37 +dm_nfnet_f1,45.333,54.667,74.107,25.893,132.63,320,0.910,bicubic,-51.587,-25.413,-1 +ig_resnext101_32x8d,45.320,54.680,70.867,29.133,88.79,224,0.875,bilinear,-51.000,-28.563,+80 +xcit_medium_24_p8_224_dist,45.213,54.787,76.720,23.280,84.32,224,1.000,bicubic,-51.707,-22.670,-2 +eca_nfnet_l2,44.960,55.040,75.893,24.107,56.72,384,1.000,bicubic,-52.130,-23.617,-29 +maxxvit_rmlp_small_rw_256,44.547,55.453,75.053,24.947,66.01,256,0.950,bicubic,-52.263,-24.327,+4 +crossvit_18_dagger_408,44.293,55.707,73.840,26.160,44.61,408,1.000,bicubic,-52.237,-25.480,+51 +resnest200e,44.147,55.853,73.467,26.533,70.20,320,0.909,bicubic,-52.463,-25.883,+36 +cait_xs24_384,43.947,56.053,75.187,24.813,26.67,384,1.000,bicubic,-52.603,-24.233,+43 +seresnextaa101d_32x8d,43.933,56.067,73.387,26.613,93.59,288,1.000,bicubic,-53.017,-26.003,-14 +mvitv2_base,43.747,56.253,74.520,25.480,51.47,224,0.900,bicubic,-53.023,-24.930,+3 +resnetrs200,43.733,56.267,72.827,27.173,93.21,320,1.000,bicubic,-52.967,-26.683,+16 +tresnet_xl_448,43.480,56.520,72.453,27.547,78.44,448,0.875,bilinear,-52.490,-26.677,+124 +xcit_small_12_p16_384_dist,43.240,56.760,73.880,26.120,26.25,384,1.000,bicubic,-53.690,-25.520,-16 +vit_base_patch16_224.augreg_in21k_ft_in1k,43.240,56.760,72.920,27.080,86.57,224,0.900,bicubic,-53.640,-26.560,-10 +resnetrs420,43.147,56.853,70.453,29.547,191.89,416,1.000,bicubic,-53.763,-29.007,-13 +xcit_medium_24_p8_224,43.093,56.907,70.347,29.653,84.32,224,1.000,bicubic,-53.017,-28.813,+98 +vit_base_patch32_clip_384.openai_ft_in12k_in1k,43.040,56.960,73.227,26.773,88.30,384,0.950,bicubic,-54.070,-26.273,-45 +coatnet_rmlp_2_rw_224,43.000,57.000,71.680,28.320,73.88,224,0.950,bicubic,-53.540,-27.590,+37 +tf_efficientnet_b7.ra_in1k,42.960,57.040,73.133,26.867,66.35,600,0.949,bicubic,-54.050,-26.387,-33 +tf_efficientnetv2_m.in1k,42.867,57.133,72.627,27.373,54.14,480,1.000,bicubic,-54.343,-26.903,-59 +vit_medium_patch16_gap_256.in12k_ft_in1k,42.693,57.307,74.320,25.680,38.86,256,0.950,bicubic,-53.967,-25.010,+15 +gcvit_base,42.507,57.493,73.813,26.187,90.32,224,0.875,bicubic,-54.063,-25.417,+27 +xcit_tiny_24_p8_384_dist,42.453,57.547,72.880,27.120,12.11,384,1.000,bicubic,-54.097,-26.440,+29 +swinv2_small_window16_256,42.320,57.680,72.907,27.093,49.73,256,0.900,bicubic,-54.150,-26.293,+36 +maxvit_rmlp_small_rw_224,42.240,57.760,72.333,27.667,64.90,224,0.900,bicubic,-54.350,-26.937,+22 +convnext_base.fb_in1k,42.000,58.000,73.973,26.027,88.59,288,1.000,bicubic,-54.820,-25.617,-18 +maxvit_base_tf_224.in1k,41.947,58.053,70.107,29.893,119.47,224,0.950,bicubic,-55.003,-29.473,-31 +crossvit_15_dagger_408,41.907,58.093,72.067,27.933,28.50,408,1.000,bicubic,-54.483,-27.283,+42 +xcit_small_24_p8_224_dist,41.893,58.107,73.720,26.280,47.63,224,1.000,bicubic,-54.977,-25.760,-24 +maxvit_large_tf_224.in1k,41.840,58.160,68.613,31.387,211.79,224,0.950,bicubic,-55.120,-30.637,-38 +xcit_small_24_p8_224,41.760,58.240,71.013,28.987,47.63,224,1.000,bicubic,-54.640,-28.137,+37 +vit_large_r50_s32_224.augreg_in21k_ft_in1k,41.640,58.360,70.227,29.773,328.99,224,0.900,bicubic,-55.150,-29.123,-21 +vit_base_patch16_clip_224.laion2b_ft_in1k,41.573,58.427,73.627,26.373,86.57,224,1.000,bicubic,-55.557,-25.833,-64 +swsl_resnext101_32x4d,41.560,58.440,71.760,28.240,44.18,224,0.875,bilinear,-54.860,-27.710,+30 +swinv2_base_window8_256,41.507,58.493,72.440,27.560,87.92,256,0.900,bicubic,-55.033,-26.920,+19 +deit3_small_patch16_224_in21ft1k,41.227,58.773,71.920,28.080,22.06,224,1.000,bicubic,-55.433,-27.590,+1 +seresnext101d_32x8d,41.173,58.827,70.853,29.147,93.59,288,1.000,bicubic,-55.557,-28.567,-18 +maxvit_rmlp_tiny_rw_256,41.160,58.840,71.213,28.787,29.15,256,0.950,bicubic,-55.250,-28.177,+27 +convnext_tiny.fb_in22k_ft_in1k_384,41.013,58.987,72.507,27.493,28.59,384,1.000,bicubic,-56.067,-26.963,-61 +tf_efficientnet_b6.ap_in1k,40.800,59.200,71.627,28.373,43.04,528,0.942,bicubic,-56.280,-27.883,-63 +flexivit_base.1200ep_in1k,40.613,59.387,72.320,27.680,86.59,240,0.950,bicubic,-56.147,-27.050,-26 +resmlp_big_24_224_in22ft1k,40.373,59.627,74.760,25.240,129.14,224,0.875,bicubic,-56.247,-24.510,-3 +deit3_small_patch16_384,40.320,59.680,70.293,29.707,22.21,384,1.000,bicubic,-55.880,-28.997,+52 +tresnet_l_448,40.200,59.800,69.893,30.107,55.99,448,0.875,bilinear,-55.660,-29.227,+107 +deit_base_patch16_384,40.173,59.827,70.760,29.240,86.86,384,1.000,bicubic,-55.977,-28.420,+61 +regnetz_d8_evos,40.080,59.920,72.200,27.800,23.46,320,0.950,bicubic,-56.530,-27.250,-4 +regnetz_040h,40.013,59.987,71.320,28.680,28.94,320,1.000,bicubic,-56.697,-28.180,-25 +resnetrs350,39.960,60.040,68.907,31.093,163.96,384,1.000,bicubic,-56.800,-30.453,-34 +regnetz_d8,39.933,60.067,71.640,28.360,23.37,320,1.000,bicubic,-56.687,-27.810,-9 +flexivit_base.600ep_in1k,39.907,60.093,71.867,28.133,86.59,240,0.950,bicubic,-56.723,-27.463,-12 +swin_s3_base_224,39.787,60.213,70.467,29.533,71.13,224,0.900,bicubic,-56.463,-28.673,+38 +seresnext101_32x8d,39.587,60.413,69.440,30.560,93.57,288,1.000,bicubic,-57.173,-29.900,-36 +flexivit_base.300ep_in1k,39.547,60.453,70.987,29.013,86.59,240,0.950,bicubic,-57.073,-28.523,-12 +gcvit_small,39.427,60.573,70.520,29.480,51.09,224,0.875,bicubic,-56.873,-28.620,+30 +deit3_base_patch16_224,39.200,60.800,71.000,29.000,86.59,224,0.900,bicubic,-57.100,-28.180,+28 +volo_d1_224,38.960,61.040,70.240,29.760,26.63,224,0.960,bicubic,-57.370,-28.940,+24 +vit_large_patch32_384.orig_in21k_ft_in1k,38.933,61.067,68.920,31.080,306.63,384,1.000,bicubic,-56.897,-30.230,+96 +resnetv2_101x1_bitm,38.920,61.080,71.040,28.960,44.54,448,1.000,bilinear,-57.180,-28.210,+57 +regnetz_040,38.760,61.240,70.400,29.600,27.12,320,1.000,bicubic,-57.950,-29.150,-36 +mvitv2_small,38.747,61.253,70.413,29.587,34.87,224,0.900,bicubic,-57.613,-28.817,+14 +xcit_small_12_p8_224_dist,38.200,61.800,71.293,28.707,26.21,224,1.000,bicubic,-58.490,-28.077,-32 +resnet200d,38.147,61.853,68.613,31.387,64.69,320,1.000,bicubic,-58.573,-30.717,-42 +swinv2_small_window8_256,37.773,62.227,69.853,30.147,49.73,256,0.900,bicubic,-58.497,-29.357,+23 +xcit_large_24_p16_224_dist,37.693,62.307,71.587,28.413,189.10,224,1.000,bicubic,-59.107,-27.763,-54 +seresnet152d,37.640,62.360,69.480,30.520,66.84,320,1.000,bicubic,-59.130,-29.790,-53 +eca_nfnet_l1,37.533,62.467,70.947,29.053,41.41,320,1.000,bicubic,-59.167,-28.533,-38 +maxvit_small_tf_224.in1k,37.520,62.480,68.040,31.960,68.93,224,0.950,bicubic,-59.170,-31.310,-37 +xcit_small_12_p8_224,37.507,62.493,68.173,31.827,26.21,224,1.000,bicubic,-58.603,-30.717,+45 +twins_svt_large,37.200,62.800,69.227,30.773,99.27,224,0.900,bicubic,-59.070,-29.943,+18 +regnetz_d32,37.160,62.840,70.467,29.533,27.58,320,0.950,bicubic,-59.440,-29.053,-25 +vit_base_patch32_384.augreg_in21k_ft_in1k,37.080,62.920,69.760,30.240,88.30,384,1.000,bicubic,-59.410,-29.650,-12 +regnety_064,36.973,63.027,68.160,31.840,30.58,288,1.000,bicubic,-59.387,-31.010,+1 +swin_s3_small_224,36.880,63.120,68.240,31.760,49.74,224,0.900,bicubic,-59.340,-30.990,+19 +efficientnetv2_rw_s.ra2_in1k,36.787,63.213,68.320,31.680,23.94,384,1.000,bicubic,-59.753,-30.780,-21 +resnext101_64x4d,36.787,63.213,66.680,33.320,83.46,288,1.000,bicubic,-59.293,-32.560,+44 +regnety_160,36.747,63.253,69.107,30.893,83.59,288,1.000,bicubic,-59.603,-30.223,0 +convnext_small.fb_in1k,36.667,63.333,71.093,28.907,50.22,288,1.000,bicubic,-59.893,-28.247,-27 +pvt_v2_b4,36.640,63.360,68.667,31.333,62.56,224,0.900,bicubic,-59.690,-30.643,+3 +pvt_v2_b5,36.280,63.720,68.453,31.547,81.96,224,0.900,bicubic,-60.080,-30.937,-4 +cait_xxs36_384,36.227,63.773,67.800,32.200,17.37,384,1.000,bicubic,-59.623,-31.290,+72 +jx_nest_base,36.080,63.920,66.760,33.240,67.72,224,0.875,bicubic,-60.170,-32.450,+7 +coatnet_1_rw_224,36.040,63.960,67.080,32.920,41.72,224,0.950,bicubic,-59.990,-32.070,+46 +maxvit_tiny_rw_224,35.920,64.080,65.547,34.453,29.06,224,0.950,bicubic,-60.320,-33.573,+7 +cs3se_edgenet_x,35.653,64.347,67.787,32.213,50.72,320,1.000,bicubic,-60.787,-31.613,-21 +pit_b_distilled_224,35.627,64.373,69.120,30.880,74.79,224,0.900,bicubic,-61.053,-30.450,-51 +regnety_080,35.560,64.440,67.240,32.760,39.18,288,1.000,bicubic,-60.970,-32.020,-30 +sequencer2d_l,35.547,64.453,67.347,32.653,54.30,224,0.875,bicubic,-60.593,-31.863,+21 +tf_efficientnet_b3.ns_jft_in1k,35.520,64.480,67.773,32.227,12.23,300,0.904,bicubic,-60.870,-31.387,-19 +tf_efficientnet_b6.aa_in1k,35.213,64.787,67.720,32.280,43.04,528,0.942,bicubic,-61.457,-31.650,-54 +resnetrs270,35.013,64.987,65.480,34.520,129.86,352,1.000,bicubic,-61.677,-33.910,-59 +gcvit_tiny,34.907,65.093,66.853,33.147,28.22,224,0.875,bicubic,-61.263,-32.387,+8 +tf_efficientnet_b5.ap_in1k,34.787,65.213,67.493,32.507,30.39,456,0.934,bicubic,-61.893,-31.967,-59 +xcit_tiny_12_p8_384_dist,34.653,65.347,66.280,33.720,6.71,384,1.000,bicubic,-61.427,-32.860,+27 +vit_base_patch16_224_miil.in21k_ft_in1k,34.507,65.493,65.000,35.000,86.54,224,0.875,bilinear,-61.953,-34.300,-33 +xcit_medium_24_p16_224_dist,34.373,65.627,67.920,32.080,84.40,224,1.000,bicubic,-62.217,-31.190,-49 +resnet152d,34.320,65.680,65.907,34.093,60.21,320,1.000,bicubic,-62.040,-33.293,-24 +deit3_medium_patch16_224,34.187,65.813,66.027,33.973,38.85,224,0.900,bicubic,-61.883,-33.173,+24 +tresnet_m_448,34.107,65.893,64.493,35.507,31.39,448,0.875,bilinear,-60.883,-34.487,+176 +resmlp_big_24_distilled_224,34.067,65.933,69.600,30.400,129.14,224,0.875,bicubic,-62.383,-29.710,-37 +regnetv_064,33.973,66.027,67.867,32.133,30.58,288,1.000,bicubic,-62.437,-31.493,-34 +xcit_tiny_24_p16_384_dist,33.827,66.173,65.400,34.600,12.12,384,1.000,bicubic,-62.123,-33.760,+37 +pvt_v2_b3,33.653,66.347,67.653,32.347,45.24,224,0.900,bicubic,-62.337,-31.537,+29 +coatnet_rmlp_1_rw_224,33.533,66.467,65.627,34.373,41.69,224,0.950,bicubic,-62.417,-33.593,+37 +twins_pcpvt_large,33.387,66.613,67.933,32.067,60.99,224,0.900,bicubic,-62.763,-31.267,+1 +twins_svt_base,33.173,66.827,65.773,34.227,56.07,224,0.900,bicubic,-62.987,-33.287,-4 +pit_b_224,33.173,66.827,62.320,37.680,73.76,224,0.900,bicubic,-62.467,-36.670,+70 +resnetv2_152x2_bit_teacher,33.053,66.947,64.267,35.733,236.34,224,0.875,bicubic,-63.047,-35.013,+9 +swsl_resnext50_32x4d,33.013,66.987,65.067,34.933,25.03,224,0.875,bilinear,-62.857,-34.003,+38 +mobilevitv2_200_384_in22ft1k,32.987,67.013,65.507,34.493,18.45,384,1.000,bicubic,-63.053,-33.573,+14 +swinv2_cr_small_ns_224,32.933,67.067,65.960,34.040,49.70,224,0.900,bicubic,-63.247,-33.370,-13 +xception65,32.747,67.253,62.960,37.040,39.92,299,0.940,bicubic,-63.603,-36.280,-33 +xcit_large_24_p16_224,32.733,67.267,62.107,37.893,189.10,224,1.000,bicubic,-62.687,-36.733,+102 +swin_small_patch4_window7_224,32.600,67.400,65.440,34.560,49.61,224,0.900,bicubic,-63.310,-33.580,+29 +ssl_resnext101_32x16d,32.600,67.400,64.000,36.000,194.03,224,0.875,bilinear,-63.200,-35.100,+43 +mobilevitv2_175_384_in22ft1k,32.467,67.533,64.680,35.320,14.25,384,1.000,bicubic,-63.713,-34.460,-17 +tf_efficientnetv2_b3.in21k_ft_in1k,32.373,67.627,66.107,33.893,14.36,300,0.900,bicubic,-63.847,-32.973,-25 +jx_nest_small,32.280,67.720,63.760,36.240,38.35,224,0.875,bicubic,-63.680,-35.270,+20 +convnext_tiny_hnf.a2h_in1k,32.227,67.773,62.853,37.147,28.59,288,1.000,bicubic,-63.793,-36.337,+12 +vit_base_patch16_224.orig_in21k_ft_in1k,32.053,67.947,61.573,38.427,86.57,224,0.900,bicubic,-63.277,-37.427,+105 +maxvit_nano_rw_256,31.853,68.147,64.187,35.813,15.45,256,0.950,bicubic,-64.077,-34.953,+22 +tf_efficientnet_b5.ra_in1k,31.840,68.160,65.293,34.707,30.39,456,0.934,bicubic,-64.510,-34.017,-44 +swinv2_tiny_window16_256,31.707,68.293,65.613,34.387,28.35,256,0.900,bicubic,-64.223,-33.387,+19 +swinv2_cr_small_224,31.693,68.307,62.520,37.480,49.70,224,0.900,bicubic,-64.377,-36.350,-1 +regnetz_c16_evos,31.493,68.507,66.280,33.720,13.49,320,0.950,bicubic,-64.627,-32.960,-14 +maxvit_rmlp_nano_rw_256,31.440,68.560,63.373,36.627,15.50,256,0.950,bicubic,-64.540,-35.597,+9 +resnest101e,31.413,68.587,64.360,35.640,48.28,256,0.875,bilinear,-64.447,-34.820,+22 +maxxvit_rmlp_nano_rw_256,31.360,68.640,64.440,35.560,16.78,256,0.950,bicubic,-64.670,-34.610,-4 +crossvit_base_240,31.360,68.640,61.293,38.707,105.03,240,0.875,bicubic,-64.170,-37.567,+63 +regnetv_040,31.333,68.667,64.693,35.307,20.64,288,1.000,bicubic,-64.847,-34.437,-33 +cait_s24_224,31.200,68.800,64.560,35.440,46.92,224,1.000,bicubic,-65.180,-34.590,-59 +convnext_nano.in12k_ft_in1k,31.107,68.893,67.333,32.667,15.59,288,1.000,bicubic,-64.873,-31.987,+2 +efficientnet_b4.ra2_in1k,30.867,69.133,64.600,35.400,19.34,384,1.000,bicubic,-65.283,-34.650,-28 +regnety_040,30.613,69.387,63.840,36.160,20.65,288,1.000,bicubic,-65.407,-35.230,-4 +sequencer2d_m,30.600,69.400,62.920,37.080,38.31,224,0.875,bicubic,-65.210,-36.290,+21 +maxvit_tiny_tf_224.in1k,30.587,69.413,62.760,37.240,30.92,224,0.950,bicubic,-65.513,-36.520,-19 +crossvit_18_240,30.587,69.413,61.960,38.040,43.27,240,0.875,bicubic,-64.853,-37.090,+72 +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,30.573,69.427,62.080,37.920,88.22,224,0.900,bicubic,-65.567,-37.080,-30 +dm_nfnet_f0,30.547,69.453,62.867,37.133,71.49,256,0.900,bicubic,-65.603,-36.273,-35 +crossvit_18_dagger_240,30.480,69.520,61.813,38.187,44.27,240,0.875,bicubic,-65.090,-37.247,+42 +xcit_small_24_p16_224_dist,30.440,69.560,64.720,35.280,47.67,224,1.000,bicubic,-65.770,-34.460,-47 +xcit_medium_24_p16_224,30.187,69.813,59.347,40.653,84.40,224,1.000,bicubic,-65.343,-39.423,+51 +mvitv2_tiny,30.160,69.840,64.333,35.667,24.17,224,0.900,bicubic,-65.710,-34.917,+5 +cait_xxs24_384,30.027,69.973,63.933,36.067,12.03,384,1.000,bicubic,-65.233,-35.027,+87 +twins_pcpvt_base,29.960,70.040,64.587,35.413,43.83,224,0.900,bicubic,-65.830,-34.543,+15 +convnext_tiny.fb_in1k,29.933,70.067,65.107,34.893,28.59,288,1.000,bicubic,-65.857,-34.053,+13 +swsl_resnet50,29.867,70.133,63.853,36.147,25.56,224,0.875,bilinear,-65.543,-35.437,+69 +cs3sedarknet_x,29.827,70.173,61.987,38.013,35.40,288,1.000,bicubic,-66.203,-37.273,-20 +mobilevitv2_150_384_in22ft1k,29.787,70.213,62.187,37.813,10.59,384,1.000,bicubic,-65.913,-36.863,+19 +vit_relpos_base_patch16_clsgap_224.sw_in1k,29.720,70.280,62.893,37.107,86.43,224,0.900,bicubic,-66.040,-36.147,+13 +deit_base_distilled_patch16_224,29.600,70.400,64.453,35.547,87.34,224,0.900,bicubic,-66.490,-34.737,-32 +convit_base,29.520,70.480,61.787,38.213,86.54,224,0.875,bicubic,-66.030,-37.203,+34 +vit_relpos_medium_patch16_cls_224.sw_in1k,29.347,70.653,60.653,39.347,38.76,224,0.900,bicubic,-66.133,-38.297,+49 +tf_efficientnetv2_s.in1k,29.040,70.960,61.213,38.787,21.46,384,1.000,bicubic,-67.300,-37.987,-75 +ssl_resnext101_32x8d,29.040,70.960,60.973,39.027,88.79,224,0.875,bilinear,-66.430,-38.137,+49 +edgenext_base,29.027,70.973,64.920,35.080,18.51,320,1.000,bicubic,-67.673,-34.450,-129 +convnext_tiny.fb_in22k_ft_in1k,29.000,71.000,55.640,44.360,28.59,288,1.000,bicubic,-65.860,-43.150,+131 +resnet101d,28.987,71.013,62.053,37.947,44.57,320,1.000,bicubic,-67.303,-37.177,-72 +xception65p,28.987,71.013,59.933,40.067,39.82,299,0.940,bicubic,-67.223,-39.277,-64 +regnetz_c16,28.920,71.080,63.333,36.667,13.46,320,0.940,bicubic,-66.880,-35.847,-2 +resnetrs152,28.920,71.080,60.520,39.480,86.62,320,1.000,bicubic,-67.660,-38.720,-112 +vit_relpos_medium_patch16_224.sw_in1k,28.853,71.147,61.987,38.013,38.75,224,0.900,bicubic,-66.607,-36.973,+42 +xcit_tiny_24_p8_224_dist,28.720,71.280,61.413,38.587,12.11,224,1.000,bicubic,-67.090,-37.697,-8 +xcit_tiny_24_p8_224,28.707,71.293,60.453,39.547,12.11,224,1.000,bicubic,-66.963,-38.597,+9 +cs3edgenet_x,28.533,71.467,61.147,38.853,47.82,288,1.000,bicubic,-67.497,-37.963,-39 +crossvit_15_dagger_240,28.533,71.467,60.293,39.707,28.21,240,0.875,bicubic,-67.157,-38.537,+5 +pvt_v2_b2_li,28.493,71.507,62.040,37.960,22.55,224,0.900,bicubic,-67.057,-36.830,+18 +xcit_small_24_p16_224,28.333,71.667,58.813,41.187,47.67,224,1.000,bicubic,-67.197,-39.927,+24 +efficientformer_l7,28.027,71.973,62.987,37.013,82.23,224,0.950,bicubic,-68.083,-36.283,-56 +flexivit_small.1200ep_in1k,27.840,72.160,58.667,41.333,22.06,240,0.950,bicubic,-67.720,-40.453,+13 +pvt_v2_b2,27.640,72.360,60.707,39.293,25.36,224,0.900,bicubic,-67.860,-38.293,+28 +vit_base_patch16_384.augreg_in1k,27.560,72.440,57.280,42.720,86.86,384,1.000,bicubic,-67.380,-41.610,+107 +coat_lite_small,27.547,72.453,58.547,41.453,19.84,224,0.900,bicubic,-67.993,-40.313,+14 +deit_base_patch16_224,27.440,72.560,58.893,41.107,86.57,224,0.900,bicubic,-68.000,-40.237,+33 +vit_relpos_base_patch16_224.sw_in1k,27.333,72.667,61.133,38.867,86.43,224,0.900,bicubic,-68.227,-37.897,+9 +resnetv2_50x1_bitm,27.293,72.707,62.853,37.147,25.55,448,1.000,bilinear,-67.717,-36.037,+94 +coatnet_bn_0_rw_224,27.200,72.800,61.280,38.720,27.44,224,0.950,bicubic,-68.500,-37.860,-8 +xcit_small_12_p16_224_dist,27.120,72.880,59.800,40.200,26.25,224,1.000,bicubic,-68.910,-39.340,-52 +vit_small_patch16_224.augreg_in21k_ft_in1k,27.053,72.947,59.213,40.787,22.05,224,0.900,bicubic,-68.317,-39.717,+42 +coatnet_0_rw_224,27.027,72.973,59.387,40.613,27.44,224,0.950,bicubic,-68.403,-39.653,+33 +flexivit_small.600ep_in1k,26.933,73.067,57.280,42.720,22.06,240,0.950,bicubic,-68.747,-41.770,-10 +sequencer2d_s,26.827,73.173,60.587,39.413,27.65,224,0.875,bicubic,-69.163,-38.463,-49 +tresnet_v2_l,26.760,73.240,59.800,40.200,46.17,224,0.875,bilinear,-69.400,-39.440,-81 +mobilevitv2_200_in22ft1k,26.667,73.333,59.400,40.600,18.45,256,0.888,bicubic,-68.493,-39.540,+61 +gcvit_xtiny,26.627,73.373,60.867,39.133,19.98,224,0.875,bicubic,-68.953,-38.173,-4 +swin_s3_tiny_224,26.507,73.493,60.320,39.680,28.33,224,0.900,bicubic,-68.653,-38.610,+58 +coatnet_rmlp_nano_rw_224,26.440,73.560,60.520,39.480,15.15,224,0.900,bicubic,-68.990,-38.510,+25 +swinv2_tiny_window8_256,26.413,73.587,60.573,39.427,28.35,256,0.900,bicubic,-69.087,-38.387,+10 +tf_efficientnet_b4.aa_in1k,26.293,73.707,60.107,39.893,19.34,380,0.922,bicubic,-69.607,-39.063,-44 +tf_efficientnet_b4.ap_in1k,26.240,73.760,60.227,39.773,19.34,380,0.922,bicubic,-69.920,-39.053,-89 +nfnet_l0,26.213,73.787,61.720,38.280,35.07,288,1.000,bicubic,-69.907,-37.640,-79 +regnety_032,26.213,73.787,60.987,39.013,19.44,288,1.000,bicubic,-69.757,-38.203,-56 +deit3_small_patch16_224,26.187,73.813,54.440,45.560,22.06,224,0.900,bicubic,-68.813,-44.590,+81 +coatnext_nano_rw_224,26.173,73.827,59.560,40.440,14.70,224,0.900,bicubic,-69.257,-39.440,+17 +ecaresnet50t,26.133,73.867,60.027,39.973,25.57,320,0.950,bicubic,-69.377,-39.093,+1 +fbnetv3_g.ra2_in1k,26.107,73.893,61.053,38.947,16.62,288,0.950,bilinear,-69.413,-37.937,-2 +ecaresnet101d,26.027,73.973,58.987,41.013,44.57,224,0.875,bicubic,-69.503,-39.833,-9 +mobilevitv2_175_in22ft1k,26.013,73.987,58.480,41.520,14.25,256,0.888,bicubic,-69.217,-40.510,+38 +flexivit_small.300ep_in1k,25.933,74.067,57.053,42.947,22.06,240,0.950,bicubic,-69.567,-42.067,+1 +visformer_small,25.840,74.160,58.907,41.093,40.22,224,0.900,bicubic,-69.650,-39.993,+1 +vit_small_patch16_384.augreg_in1k,25.813,74.187,57.613,42.387,22.20,384,1.000,bicubic,-69.477,-41.387,+28 +halo2botnet50ts_256,25.587,74.413,56.893,43.107,22.64,256,0.950,bicubic,-69.803,-42.117,+16 +coat_mini,25.520,74.480,57.693,42.307,10.34,224,0.900,bicubic,-69.450,-41.087,+73 +vit_relpos_medium_patch16_rpn_224.sw_in1k,25.453,74.547,58.640,41.360,38.73,224,0.900,bicubic,-70.057,-40.440,-8 +crossvit_15_240,25.453,74.547,57.560,42.440,27.53,240,0.875,bicubic,-69.697,-41.370,+44 +vit_srelpos_medium_patch16_224.sw_in1k,25.400,74.600,58.453,41.547,38.74,224,0.900,bicubic,-69.830,-40.337,+29 +xcit_small_12_p16_224,25.133,74.867,56.040,43.960,26.25,224,1.000,bicubic,-70.287,-42.570,+7 +resnetv2_50x1_bit_distilled,25.107,74.893,59.613,40.387,25.55,224,0.875,bicubic,-71.023,-39.667,-98 +convit_small,25.093,74.907,57.280,42.720,27.78,224,0.875,bicubic,-70.107,-41.620,+30 +vit_base_patch16_rpn_224.in1k,25.080,74.920,58.653,41.347,86.54,224,0.900,bicubic,-70.300,-40.197,+9 +gc_efficientnetv2_rw_t.agc_in1k,25.053,74.947,57.707,42.293,13.68,288,1.000,bicubic,-70.687,-41.313,-47 +eca_nfnet_l0,24.827,75.173,60.093,39.907,24.14,288,1.000,bicubic,-71.123,-39.117,-72 +xception41p,24.773,75.227,55.227,44.773,26.91,299,0.940,bicubic,-70.747,-43.693,-18 +tnt_s_patch16_224,24.733,75.267,58.187,41.813,23.76,224,0.900,bicubic,-70.307,-40.773,+55 +convnext_nano_ols.d1h_in1k,24.520,75.480,57.053,42.947,15.65,288,1.000,bicubic,-70.610,-41.967,+37 +resnetv2_50d_evos,24.480,75.520,56.387,43.613,25.59,288,0.950,bicubic,-71.130,-42.643,-37 +xcit_tiny_12_p16_384_dist,24.453,75.547,57.080,42.920,6.72,384,1.000,bicubic,-70.677,-41.640,+34 +cs3darknet_x,24.373,75.627,57.813,42.187,35.05,288,1.000,bicubic,-71.487,-41.397,-68 +efficientnetv2_rw_t.ra2_in1k,24.333,75.667,57.400,42.600,13.65,288,1.000,bicubic,-71.277,-41.670,-41 +ssl_resnext101_32x4d,24.173,75.827,57.413,42.587,44.18,224,0.875,bilinear,-71.267,-41.377,-15 +swinv2_cr_tiny_ns_224,24.133,75.867,58.213,41.787,28.33,224,0.900,bicubic,-71.237,-40.937,+1 +twins_svt_small,24.133,75.867,57.147,42.853,24.06,224,0.900,bicubic,-71.067,-41.733,+18 +coatnet_nano_rw_224,24.120,75.880,57.173,42.827,15.14,224,0.900,bicubic,-71.130,-41.807,+10 +vit_small_r26_s32_224.augreg_in21k_ft_in1k,24.080,75.920,56.213,43.787,36.43,224,0.900,bicubic,-71.550,-42.977,-47 +vit_relpos_small_patch16_224.sw_in1k,24.040,75.960,58.173,41.827,21.98,224,0.900,bicubic,-71.120,-40.707,+16 +poolformer_m48,24.040,75.960,57.293,42.707,73.47,224,0.950,bicubic,-71.600,-41.647,-51 +mobilevitv2_150_in22ft1k,24.040,75.960,55.933,44.067,10.59,256,0.888,bicubic,-71.100,-42.927,+24 +tf_efficientnet_b2.ns_jft_in1k,24.013,75.987,57.293,42.707,9.11,260,0.890,bicubic,-71.757,-41.697,-67 +convnext_nano.d1h_in1k,24.000,76.000,56.187,43.813,15.59,288,1.000,bicubic,-71.350,-42.673,-6 +cs3sedarknet_l,23.973,76.027,58.720,41.280,21.91,288,0.950,bicubic,-71.337,-40.400,-3 +resnetv2_50d_gn,23.920,76.080,56.320,43.680,25.57,288,0.950,bicubic,-71.510,-42.400,-22 +vit_small_patch32_384.augreg_in21k_ft_in1k,23.773,76.227,57.307,42.693,22.92,384,1.000,bicubic,-71.277,-41.683,+33 +lamhalobotnet50ts_256,23.600,76.400,55.267,44.733,22.57,256,0.950,bicubic,-71.560,-43.553,+13 +resnet152,23.560,76.440,53.653,46.347,60.19,224,0.950,bicubic,-72.320,-45.417,-88 +nasnetalarge,23.493,76.507,55.027,44.973,88.75,331,0.911,bicubic,-72.187,-43.903,-63 +crossvit_small_240,23.453,76.547,56.827,43.173,26.86,240,0.875,bicubic,-71.377,-42.193,+55 +levit_384,23.440,76.560,56.387,43.613,39.13,224,0.900,bicubic,-72.090,-42.743,-48 +pnasnet5large,23.333,76.667,53.640,46.360,86.06,331,0.911,bicubic,-72.377,-45.280,-71 +efficientnet_b3.ra2_in1k,23.213,76.787,55.960,44.040,12.23,320,1.000,bicubic,-72.497,-43.080,-73 +jx_nest_tiny,23.200,76.800,56.227,43.773,17.06,224,0.875,bicubic,-72.050,-42.643,-8 +efficientformer_l3,23.160,76.840,57.120,42.880,31.41,224,0.950,bicubic,-72.440,-42.040,-61 +resnet61q,22.987,77.013,55.760,44.240,36.85,288,1.000,bicubic,-72.783,-43.360,-80 +vit_srelpos_small_patch16_224.sw_in1k,22.920,77.080,55.720,44.280,21.97,224,0.900,bicubic,-72.120,-43.210,+23 +vit_base_patch32_clip_224.laion2b_ft_in1k,22.867,77.133,54.973,45.027,88.22,224,0.900,bicubic,-72.663,-44.077,-54 +resmlp_big_24_224,22.853,77.147,54.307,45.693,129.14,224,0.875,bicubic,-71.807,-44.173,+62 +halonet50ts,22.813,77.187,54.013,45.987,22.73,256,0.940,bicubic,-72.287,-44.767,+10 +twins_pcpvt_small,22.720,77.280,56.853,43.147,24.11,224,0.900,bicubic,-72.490,-42.027,-9 +vit_base_patch32_clip_224.openai_ft_in1k,22.573,77.427,55.293,44.707,88.22,224,0.900,bicubic,-72.547,-43.687,+7 +poolformer_m36,22.520,77.480,55.253,44.747,56.17,224,0.950,bicubic,-72.860,-43.677,-29 +vit_base_patch32_224.augreg_in21k_ft_in1k,22.400,77.600,53.933,46.067,88.22,224,0.900,bicubic,-72.600,-44.527,+23 +pit_s_distilled_224,22.360,77.640,57.120,42.880,24.04,224,0.900,bicubic,-72.880,-41.930,-17 +xcit_tiny_12_p8_224_dist,22.080,77.920,54.307,45.693,6.71,224,1.000,bicubic,-73.000,-44.603,+9 +tresnet_m,21.680,78.320,53.840,46.160,31.39,224,0.875,bilinear,-74.040,-45.190,-88 +maxvit_rmlp_pico_rw_256,21.253,78.747,51.880,48.120,7.52,256,0.950,bicubic,-73.377,-46.880,+56 +convmixer_1536_20,21.200,78.800,55.560,44.440,51.63,224,0.960,bicubic,-73.860,-43.470,+9 +swin_tiny_patch4_window7_224,21.173,78.827,55.973,44.027,28.29,224,0.900,bicubic,-73.967,-42.877,-6 +pit_s_224,21.080,78.920,53.573,46.427,23.46,224,0.900,bicubic,-73.510,-45.137,+58 +xcit_tiny_12_p8_224,21.027,78.973,52.453,47.547,6.71,224,1.000,bicubic,-73.653,-46.377,+45 +resnet51q,20.960,79.040,55.720,44.280,35.70,288,1.000,bilinear,-74.900,-43.400,-109 +regnetz_b16,20.960,79.040,53.853,46.147,9.72,288,0.940,bicubic,-74.110,-44.977,+3 +resnetrs101,20.893,79.107,52.813,47.187,63.62,288,0.940,bicubic,-74.537,-46.177,-51 +deit_small_distilled_patch16_224,20.707,79.293,55.133,44.867,22.44,224,0.900,bicubic,-74.003,-43.897,+39 +sebotnet33ts_256,20.707,79.293,48.800,51.200,13.70,256,0.940,bicubic,-73.883,-49.700,+53 +resnest50d_4s2x40d,20.387,79.613,52.800,47.200,30.42,224,0.875,bicubic,-74.573,-46.270,+13 +resnetaa50,20.093,79.907,51.920,48.080,25.56,288,1.000,bicubic,-75.117,-47.010,-28 +ssl_resnext50_32x4d,20.000,80.000,53.613,46.387,25.03,224,0.875,bilinear,-74.870,-45.267,+20 +haloregnetz_b,19.987,80.013,49.987,50.013,11.68,224,0.940,bicubic,-74.713,-48.673,+35 +resnetv2_101,19.920,80.080,49.280,50.720,44.54,224,0.950,bicubic,-75.720,-49.380,-94 +xcit_nano_12_p8_384_dist,19.787,80.213,50.587,49.413,3.05,384,1.000,bicubic,-73.713,-47.773,+165 +tresnet_xl,19.640,80.360,53.133,46.867,78.44,224,0.875,bilinear,-75.800,-45.707,-64 +resnet101,19.307,80.693,49.507,50.493,44.55,224,0.950,bicubic,-76.043,-49.353,-48 +gluon_senet154,19.307,80.693,47.533,52.467,115.09,224,0.875,bicubic,-75.613,-51.277,+11 +rexnet_200,19.227,80.773,52.720,47.280,16.37,224,0.875,bicubic,-75.713,-46.290,+5 +levit_256,19.200,80.800,50.067,49.933,18.89,224,0.900,bicubic,-75.810,-48.843,-2 +repvgg_b3,19.107,80.893,50.253,49.747,123.09,224,0.875,bilinear,-75.463,-48.527,+42 +lambda_resnet50ts,19.093,80.907,49.267,50.733,21.54,256,0.950,bicubic,-75.677,-49.303,+20 +mixer_b16_224_miil,19.053,80.947,51.227,48.773,59.88,224,0.875,bilinear,-76.247,-47.653,-50 +legacy_senet154,19.053,80.947,47.947,52.053,115.09,224,0.875,bilinear,-76.017,-51.103,-14 +mobilevitv2_200,18.920,81.080,50.533,49.467,18.45,256,0.888,bicubic,-75.910,-48.347,+14 +deit_small_patch16_224,18.907,81.093,51.413,48.587,22.05,224,0.900,bicubic,-75.493,-47.487,+64 +gluon_seresnext101_64x4d,18.907,81.093,49.187,50.813,88.23,224,0.875,bicubic,-76.023,-49.643,-1 +gcvit_xxtiny,18.720,81.280,53.347,46.653,12.00,224,0.875,bicubic,-75.680,-45.343,+60 +tf_efficientnet_b1.ns_jft_in1k,18.693,81.307,51.667,48.333,7.79,240,0.882,bicubic,-76.477,-47.293,-42 +edgenext_small,18.653,81.347,53.613,46.387,5.59,320,1.000,bicubic,-76.747,-45.487,-67 +poolformer_s36,18.400,81.600,51.867,48.133,30.86,224,0.900,bicubic,-76.690,-47.033,-28 +seresnext50_32x4d,18.360,81.640,50.973,49.027,27.56,224,0.875,bicubic,-76.680,-47.857,-18 +cs3darknet_l,18.307,81.693,51.880,48.120,21.16,288,0.950,bicubic,-76.813,-47.110,-33 +cait_xxs36_224,18.253,81.747,49.427,50.573,17.30,224,1.000,bicubic,-76.007,-49.293,+67 +ecaresnet50d,18.227,81.773,51.893,48.107,25.58,224,0.875,bicubic,-76.403,-46.997,+20 +sehalonet33ts,18.200,81.800,47.787,52.213,13.69,256,0.940,bicubic,-76.570,-50.683,+5 +tf_efficientnet_lite4.in1k,18.133,81.867,50.707,49.293,13.01,380,0.920,bilinear,-76.757,-48.143,-7 +vit_tiny_patch16_384.augreg_in21k_ft_in1k,18.027,81.973,50.307,49.693,5.79,384,1.000,bicubic,-75.623,-48.293,+127 +mobilevitv2_175,17.773,82.227,49.760,50.240,14.25,256,0.888,bicubic,-77.127,-49.110,-10 +resnest50d_1s4x24d,17.693,82.307,49.800,50.200,25.68,224,0.875,bicubic,-77.057,-49.180,+3 +resnest50d,17.373,82.627,50.707,49.293,27.48,224,0.875,bilinear,-77.457,-48.003,-4 +gluon_seresnext101_32x4d,17.373,82.627,46.373,53.627,48.96,224,0.875,bicubic,-77.547,-52.387,-14 +efficientnet_el.ra_in1k,17.347,82.653,49.987,50.013,10.59,300,0.904,bicubic,-77.773,-48.993,-44 +convnext_pico.d1_in1k,17.333,82.667,50.213,49.787,9.05,288,0.950,bicubic,-77.407,-48.527,+1 +inception_v4,17.267,82.733,45.920,54.080,42.68,299,0.875,bicubic,-77.113,-52.660,+46 +tf_efficientnet_b3.ap_in1k,17.187,82.813,49.680,50.320,12.23,300,0.904,bicubic,-78.133,-49.220,-74 +xcit_tiny_24_p16_224_dist,17.173,82.827,47.453,52.547,12.12,224,1.000,bicubic,-77.357,-51.177,+24 +tf_efficientnet_b3.aa_in1k,17.000,83.000,49.267,50.733,12.23,300,0.904,bicubic,-78.010,-49.793,-31 +xception71,17.000,83.000,45.520,54.480,42.34,299,0.903,bicubic,-77.280,-53.120,+51 +cs3darknet_focus_l,16.960,83.040,50.453,49.547,21.15,288,0.950,bicubic,-78.210,-48.657,-62 +resmlp_36_distilled_224,16.880,83.120,51.467,48.533,44.69,224,0.875,bicubic,-78.010,-47.553,-20 +gluon_resnext101_64x4d,16.853,83.147,44.213,55.787,83.46,224,0.875,bicubic,-77.817,-54.437,-2 +tf_efficientnetv2_b3.in1k,16.667,83.333,48.680,51.320,14.36,300,0.904,bicubic,-78.493,-50.270,-60 +tresnet_l,16.600,83.400,49.920,50.080,55.99,224,0.875,bilinear,-78.680,-49.090,-78 +inception_resnet_v2,16.573,83.427,44.960,55.040,55.84,299,0.897,bicubic,-77.967,-53.890,+13 +gluon_resnet152_v1s,16.573,83.427,44.533,55.467,60.32,224,0.875,bicubic,-78.467,-54.347,-43 +gluon_resnet152_v1d,16.573,83.427,44.280,55.720,60.21,224,0.875,bicubic,-78.167,-54.420,-11 +convnext_pico_ols.d1_in1k,16.520,83.480,49.733,50.267,9.06,288,1.000,bicubic,-78.100,-49.127,+1 +gmlp_s16_224,16.520,83.480,45.120,54.880,19.42,224,0.875,bicubic,-77.640,-53.380,+58 +resmlp_24_distilled_224,16.467,83.533,50.387,49.613,30.02,224,0.875,bicubic,-77.993,-48.353,+22 +mobilevitv2_150,16.453,83.547,48.453,51.547,10.59,256,0.888,bicubic,-78.097,-50.297,+7 +gluon_xception65,16.440,83.560,46.027,53.973,39.92,299,0.903,bicubic,-77.820,-52.693,+41 +gernet_l,16.373,83.627,47.213,52.787,31.08,256,0.875,bilinear,-78.717,-51.697,-59 +gcresnet50t,16.360,83.640,48.240,51.760,25.90,256,0.900,bicubic,-78.490,-50.550,-29 +xcit_tiny_24_p16_224,16.307,83.693,45.960,54.040,12.12,224,1.000,bicubic,-77.773,-52.490,+59 +wide_resnet50_2,16.280,83.720,48.347,51.653,68.88,224,0.875,bicubic,-78.800,-50.623,-60 +gcresnext50ts,16.240,83.760,46.533,53.467,15.67,256,0.900,bicubic,-78.250,-52.137,+8 +ens_adv_inception_resnet_v2,16.240,83.760,43.640,56.360,55.84,299,0.897,bicubic,-77.920,-55.000,+49 +repvgg_b3g4,16.213,83.787,47.653,52.347,83.83,224,0.875,bilinear,-78.307,-51.317,+4 +ssl_resnet50,15.960,84.040,49.467,50.533,25.56,224,0.875,bilinear,-78.490,-49.453,+14 +edgenext_small_rw,15.933,84.067,49.653,50.347,7.83,320,1.000,bicubic,-78.727,-49.137,-19 +regnety_320,15.627,84.373,44.827,55.173,145.05,224,0.875,bicubic,-78.913,-53.963,-3 +vit_base_patch32_384.augreg_in1k,15.613,84.387,44.107,55.893,88.30,384,1.000,bicubic,-78.027,-54.293,+93 +ecaresnet101d_pruned,15.600,84.400,48.027,51.973,24.88,224,0.875,bicubic,-79.480,-50.953,-69 +convmixer_768_32,15.533,84.467,47.933,52.067,21.11,224,0.960,bicubic,-78.967,-50.917,-1 +ecaresnet26t,15.467,84.533,47.920,52.080,16.01,320,0.950,bicubic,-78.843,-50.560,+21 +coat_tiny,15.413,84.587,45.600,54.400,5.50,224,0.900,bicubic,-78.177,-52.830,+95 +skresnext50_32x4d,15.373,84.627,44.493,55.507,27.48,224,0.875,bicubic,-78.887,-53.967,+26 +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,15.267,84.733,42.627,57.373,119.42,256,0.900,bicubic,-78.453,-55.913,+81 +ecaresnetlight,15.160,84.840,45.827,54.173,30.16,224,0.875,bicubic,-79.610,-52.973,-41 +cait_xxs24_224,15.160,84.840,44.960,55.040,11.96,224,1.000,bicubic,-78.440,-53.580,+91 +vit_base_patch16_224.augreg_in1k,14.987,85.013,41.987,58.013,86.57,224,0.900,bicubic,-78.643,-56.253,+86 +levit_192,14.893,85.107,44.920,55.080,10.95,224,0.900,bicubic,-79.277,-53.620,+31 +rexnet_150,14.720,85.280,46.907,53.093,9.73,224,0.875,bicubic,-79.760,-51.883,-5 +darknet53,14.693,85.307,47.120,52.880,41.61,288,1.000,bicubic,-79.937,-51.770,-31 +resnext50_32x4d,14.613,85.387,44.187,55.813,25.03,224,0.950,bicubic,-79.947,-54.423,-20 +darknetaa53,14.573,85.427,45.413,54.587,36.02,288,1.000,bilinear,-79.897,-53.357,-7 +coat_lite_mini,14.507,85.493,44.507,55.493,11.01,224,0.900,bicubic,-79.553,-54.053,+38 +efficientnet_el_pruned.in1k,14.480,85.520,46.120,53.880,10.59,300,0.904,bicubic,-79.920,-52.620,0 +seresnet33ts,14.440,85.560,46.107,53.893,19.78,256,0.900,bicubic,-80.420,-52.423,-58 +efficientnet_b2.ra_in1k,14.440,85.560,46.080,53.920,9.11,288,1.000,bicubic,-80.170,-52.630,-30 +poolformer_s24,14.267,85.733,47.240,52.760,21.39,224,0.900,bicubic,-80.293,-51.550,-28 +seresnet50,14.147,85.853,45.467,54.533,28.09,224,0.875,bicubic,-80.403,-53.243,-27 +legacy_seresnext101_32x4d,14.147,85.853,42.973,57.027,48.96,224,0.875,bilinear,-80.223,-55.677,-1 +fbnetv3_d.ra2_in1k,14.133,85.867,46.453,53.547,10.31,256,0.950,bilinear,-79.797,-52.287,+42 +eca_resnet33ts,14.093,85.907,47.347,52.653,19.68,256,0.900,bicubic,-80.097,-51.413,+14 +gernet_m,14.013,85.987,46.067,53.933,21.14,224,0.875,bilinear,-80.607,-52.703,-39 +pvt_v2_b1,14.000,86.000,47.720,52.280,14.01,224,0.900,bicubic,-79.820,-50.940,+52 +mobilevitv2_125,13.987,86.013,44.933,55.067,7.48,256,0.888,bicubic,-79.973,-53.657,+35 +gluon_resnext101_32x4d,13.867,86.133,41.653,58.347,44.18,224,0.875,bicubic,-80.663,-57.127,-28 +gcresnet33ts,13.760,86.240,45.053,54.947,19.88,256,0.900,bicubic,-80.710,-53.717,-20 +gluon_seresnext50_32x4d,13.600,86.400,43.760,56.240,27.56,224,0.875,bicubic,-80.740,-54.850,-7 +resmlp_36_224,13.507,86.493,46.693,53.307,44.69,224,0.875,bicubic,-80.683,-51.647,+8 +resnet50_gn,13.453,86.547,42.747,57.253,25.56,224,0.940,bicubic,-80.897,-55.963,-10 +repvgg_b2g4,13.440,86.560,43.787,56.213,61.76,224,0.875,bilinear,-80.420,-54.803,+41 +vit_small_patch16_224.augreg_in1k,13.400,86.600,41.400,58.600,22.05,224,0.900,bicubic,-80.490,-57.040,+38 +eca_botnext26ts_256,13.373,86.627,42.160,57.840,10.59,256,0.950,bicubic,-80.417,-56.340,+46 +ese_vovnet39b,13.320,86.680,43.813,56.187,24.57,224,0.875,bicubic,-80.770,-54.847,+15 +regnetx_320,13.307,86.693,40.720,59.280,107.81,224,0.875,bicubic,-81.153,-58.050,-26 +pit_xs_distilled_224,13.240,86.760,44.573,55.427,11.00,224,0.900,bicubic,-80.570,-53.837,+41 +efficientnet_b3_pruned.in1k,13.173,86.827,45.213,54.787,9.86,300,0.904,bicubic,-81.457,-53.607,-54 +gluon_resnet101_v1d,13.160,86.840,41.493,58.507,44.57,224,0.875,bicubic,-81.060,-57.057,-4 +mixnet_xl.ra_in1k,13.120,86.880,43.253,56.747,11.90,224,0.875,bicubic,-81.070,-55.407,-1 +cspresnext50,13.080,86.920,44.920,55.080,20.57,256,0.887,bilinear,-81.760,-53.850,-78 +efficientformer_l1,13.027,86.973,45.600,54.400,12.29,224,0.950,bicubic,-81.463,-53.230,-40 +eca_halonext26ts,12.960,87.040,42.773,57.227,10.76,256,0.940,bicubic,-81.080,-55.717,+11 +nf_regnet_b1,12.947,87.053,44.400,55.600,10.22,288,0.900,bicubic,-81.173,-54.340,+5 +mobilevit_s,12.880,87.120,40.773,59.227,5.58,256,0.900,bicubic,-80.300,-57.647,+88 +pit_xs_224,12.813,87.187,42.840,57.160,10.62,224,0.900,bicubic,-80.297,-55.550,+94 +gluon_inception_v3,12.640,87.360,40.493,59.507,23.83,299,0.875,bicubic,-80.820,-58.077,+63 +crossvit_9_dagger_240,12.573,87.427,41.773,58.227,8.78,240,0.875,bicubic,-80.327,-56.467,+104 +coat_lite_tiny,12.520,87.480,41.160,58.840,5.72,224,0.900,bicubic,-80.720,-57.100,+82 +convnext_femto_ols.d1_in1k,12.493,87.507,44.000,56.000,5.23,288,0.950,bicubic,-81.427,-54.520,+15 +resmlp_24_224,12.493,87.507,43.427,56.573,30.02,224,0.875,bicubic,-81.527,-54.903,+4 +regnety_120,12.427,87.573,42.200,57.800,51.82,224,0.875,bicubic,-82.053,-56.610,-47 +efficientnet_em.ra2_in1k,12.360,87.640,43.880,56.120,6.90,240,0.882,bicubic,-81.480,-54.930,+21 +hrnet_w64,12.027,87.973,40.787,59.213,128.06,224,0.875,bilinear,-81.983,-57.823,+3 +cspdarknet53,12.013,87.987,43.253,56.747,27.64,256,0.887,bilinear,-82.647,-55.547,-77 +xcit_tiny_12_p16_224_dist,11.947,88.053,40.133,59.867,6.72,224,1.000,bicubic,-81.443,-58.367,+61 +gluon_resnet101_v1s,11.880,88.120,40.973,59.027,44.67,224,0.875,bicubic,-82.840,-57.847,-84 +gmixer_24_224,11.853,88.147,37.773,62.227,24.72,224,0.875,bicubic,-80.977,-60.407,+99 +nf_resnet50,11.760,88.240,45.933,54.067,25.56,288,0.940,bicubic,-82.800,-52.947,-67 +fbnetv3_b.ra2_in1k,11.747,88.253,44.387,55.613,8.60,256,0.950,bilinear,-82.213,-54.243,-1 +resnet50d,11.693,88.307,42.453,57.547,25.58,224,0.875,bicubic,-82.567,-56.117,-32 +dpn92,11.627,88.373,40.267,59.733,37.67,224,0.875,bicubic,-82.603,-58.463,-29 +xception41,11.600,88.400,39.133,60.867,26.97,299,0.903,bicubic,-81.830,-59.297,+51 +botnet26t_256,11.587,88.413,40.147,59.853,12.49,256,0.950,bicubic,-81.933,-58.363,+41 +dla102x2,11.573,88.427,41.293,58.707,41.28,224,0.875,bilinear,-82.377,-57.197,-3 +vit_small_patch32_224.augreg_in21k_ft_in1k,11.480,88.520,39.573,60.427,22.88,224,0.900,bicubic,-80.560,-58.717,+138 +levit_128,11.427,88.573,40.267,59.733,9.21,224,0.900,bicubic,-81.913,-58.113,+53 +lambda_resnet26t,11.373,88.627,40.240,59.760,10.96,256,0.940,bicubic,-82.467,-58.400,+7 +efficientnet_b2_pruned.in1k,11.360,88.640,42.027,57.973,8.31,260,0.890,bicubic,-82.780,-56.643,-23 +tf_efficientnet_el.in1k,11.333,88.667,42.040,57.960,10.59,300,0.904,bicubic,-83.077,-56.670,-56 +xcit_nano_12_p16_384_dist,11.253,88.747,39.867,60.133,3.05,384,1.000,bicubic,-80.567,-58.153,+146 +convnext_femto.d1_in1k,11.240,88.760,42.827,57.173,5.22,288,0.950,bicubic,-82.680,-55.783,-5 +halonet26t,11.133,88.867,38.800,61.200,12.48,256,0.950,bicubic,-82.847,-59.700,-15 +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,11.093,88.907,39.987,60.013,6.36,384,1.000,bicubic,-80.947,-58.243,+128 +gluon_resnet152_v1c,11.093,88.907,37.120,62.880,60.21,224,0.875,bicubic,-83.067,-61.480,-32 +hrnet_w48,11.080,88.920,40.320,59.680,77.47,224,0.875,bilinear,-82.840,-58.290,-11 +dpn107,11.080,88.920,38.693,61.307,86.92,224,0.875,bicubic,-83.230,-59.947,-50 +ecaresnet50d_pruned,11.027,88.973,41.947,58.053,19.94,224,0.875,bicubic,-83.193,-56.783,-44 +mobilevitv2_100,11.027,88.973,40.600,59.400,4.90,256,0.888,bicubic,-82.243,-57.680,+50 +tf_efficientnetv2_b2.in1k,11.027,88.973,39.760,60.240,10.10,260,0.890,bicubic,-83.393,-58.810,-67 +adv_inception_v3,11.013,88.987,36.720,63.280,23.83,299,0.875,bicubic,-81.867,-61.420,+72 +resnetv2_50,10.960,89.040,39.373,60.627,25.55,224,0.950,bicubic,-83.480,-59.367,-70 +xcit_tiny_12_p16_224,10.960,89.040,37.040,62.960,6.72,224,1.000,bicubic,-81.540,-61.020,+99 +tf_efficientnet_b0.ns_jft_in1k,10.933,89.067,40.067,59.933,5.29,224,0.875,bicubic,-82.697,-58.573,+10 +tf_inception_v3,10.840,89.160,36.880,63.120,23.83,299,0.875,bicubic,-82.480,-61.150,+39 +xcit_nano_12_p8_224_dist,10.787,89.213,38.080,61.920,3.05,224,1.000,bicubic,-81.303,-60.080,+114 +dpn131,10.787,89.213,37.200,62.800,79.25,224,0.875,bicubic,-83.223,-61.520,-30 +tf_efficientnet_b2.ap_in1k,10.533,89.467,40.107,59.893,9.11,260,0.890,bicubic,-83.957,-58.513,-84 +resnext50d_32x4d,10.413,89.587,39.733,60.267,25.05,224,0.875,bicubic,-83.767,-58.837,-49 +rexnet_130,10.400,89.600,41.547,58.453,7.56,224,0.875,bicubic,-83.500,-56.853,-22 +xcit_nano_12_p8_224,10.333,89.667,36.987,63.013,3.05,224,1.000,bicubic,-80.687,-60.813,+150 +hrnet_w44,10.320,89.680,39.507,60.493,67.06,224,0.875,bilinear,-83.230,-59.193,+9 +lambda_resnet26rpt_256,10.253,89.747,38.093,61.907,10.99,256,0.940,bicubic,-83.457,-60.417,-5 +resnext101_32x8d,10.187,89.813,37.827,62.173,88.79,224,0.875,bilinear,-83.643,-60.753,-18 +regnetx_160,10.147,89.853,38.000,62.000,54.28,224,0.875,bicubic,-83.973,-60.630,-47 +resnet50,10.133,89.867,37.867,62.133,25.56,224,0.950,bicubic,-84.187,-60.573,-74 +dpn98,10.133,89.867,36.587,63.413,61.57,224,0.875,bicubic,-83.997,-61.983,-49 +legacy_seresnext50_32x4d,10.107,89.893,39.200,60.800,27.56,224,0.875,bilinear,-83.623,-59.380,-13 +resnetrs50,10.093,89.907,37.507,62.493,35.69,224,0.910,bicubic,-84.217,-61.213,-74 +inception_v3,10.027,89.973,35.227,64.773,23.83,299,0.875,bicubic,-82.693,-62.743,+67 +efficientnet_b1.ft_in1k,10.013,89.987,37.547,62.453,7.79,256,1.000,bicubic,-83.237,-60.743,+29 +xception,9.987,90.013,38.027,61.973,22.86,299,0.897,bicubic,-83.473,-60.463,+9 +dpn68b,9.787,90.213,38.053,61.947,12.61,224,0.875,bicubic,-83.903,-60.457,-14 +gluon_resnet152_v1b,9.747,90.253,36.067,63.933,60.19,224,0.875,bicubic,-84.333,-62.453,-52 +tf_efficientnet_lite3.in1k,9.667,90.333,39.000,61.000,8.20,300,0.904,bilinear,-84.533,-59.640,-69 +tf_efficientnet_b2.aa_in1k,9.653,90.347,38.880,61.120,9.11,260,0.890,bicubic,-84.707,-59.730,-86 +tf_efficientnet_cc_b1_8e.in1k,9.573,90.427,36.773,63.227,39.72,240,0.882,bicubic,-84.327,-61.487,-38 +res2net101_26w_4s,9.520,90.480,35.027,64.973,45.21,224,0.875,bilinear,-84.230,-63.283,-25 +legacy_seresnet152,9.347,90.653,37.413,62.587,66.82,224,0.875,bilinear,-84.053,-60.937,+7 +cspresnet50,9.253,90.747,39.640,60.360,21.62,256,0.887,bilinear,-84.487,-59.000,-26 +resnet33ts,9.240,90.760,38.667,61.333,19.68,256,0.900,bicubic,-84.360,-59.773,-14 +hrnet_w40,9.227,90.773,36.893,63.107,57.56,224,0.875,bilinear,-84.263,-61.687,-3 +regnetx_120,9.187,90.813,37.200,62.800,46.11,224,0.875,bicubic,-85.053,-61.450,-81 +seresnext26d_32x4d,9.147,90.853,36.840,63.160,16.81,224,0.875,bicubic,-83.553,-61.310,+54 +resnest26d,9.080,90.920,37.853,62.147,17.07,224,0.875,bilinear,-84.250,-60.497,+5 +crossvit_tiny_240,9.080,90.920,34.600,65.400,7.01,240,0.875,bicubic,-81.170,-62.990,+144 +vit_tiny_patch16_224.augreg_in21k_ft_in1k,9.067,90.933,34.573,65.427,5.72,224,0.900,bicubic,-82.693,-63.287,+102 +vit_base_patch16_224.sam,8.987,91.013,36.173,63.827,86.57,224,0.900,bicubic,-85.153,-62.357,-73 +gluon_resnext50_32x4d,8.947,91.053,36.333,63.667,25.03,224,0.875,bicubic,-84.863,-62.337,-40 +seresnext26t_32x4d,8.893,91.107,36.907,63.093,16.81,224,0.875,bicubic,-83.927,-61.463,+37 +rexnet_100,8.893,91.107,36.373,63.627,4.80,224,0.875,bicubic,-84.137,-61.927,+26 +bat_resnext26ts,8.867,91.133,36.427,63.573,10.73,256,0.900,bicubic,-84.463,-62.203,+1 +mixnet_l.ft_in1k,8.853,91.147,36.187,63.813,7.33,224,0.875,bicubic,-84.597,-62.033,-10 +convit_tiny,8.840,91.160,34.360,65.640,5.71,224,0.875,bicubic,-81.790,-63.380,+130 +mobilenetv3_large_100.miil_in21k_ft_in1k,8.840,91.160,32.973,67.027,5.48,224,0.875,bilinear,-83.420,-64.667,+69 +resnet32ts,8.760,91.240,37.200,62.800,17.96,256,0.900,bicubic,-84.700,-61.330,-14 +gcresnext26ts,8.680,91.320,35.733,64.267,10.48,256,0.900,bicubic,-84.090,-62.307,+34 +levit_128s,8.653,91.347,33.107,66.893,7.78,224,0.900,bicubic,-83.317,-64.953,+80 +dla169,8.640,91.360,36.040,63.960,53.39,224,0.875,bilinear,-84.700,-62.560,-10 +hrnet_w30,8.613,91.387,37.040,62.960,37.71,224,0.875,bilinear,-84.587,-61.370,+2 +mixer_b16_224,8.600,91.400,29.413,70.587,59.88,224,0.875,bicubic,-83.270,-68.507,+83 +convnext_atto_ols.a2_in1k,8.560,91.440,35.067,64.933,3.70,288,0.950,bicubic,-84.520,-63.403,+9 +legacy_seresnet101,8.533,91.467,36.013,63.987,49.33,224,0.875,bilinear,-84.747,-62.497,-5 +tf_efficientnet_b1.ap_in1k,8.453,91.547,35.253,64.747,7.79,240,0.882,bicubic,-85.237,-63.107,-44 +repvgg_b2,8.427,91.573,36.467,63.533,89.02,224,0.875,bilinear,-85.073,-62.263,-29 +resmlp_12_distilled_224,8.307,91.693,36.853,63.147,15.35,224,0.875,bicubic,-84.523,-61.287,+20 +crossvit_9_240,8.280,91.720,34.107,65.893,8.55,240,0.875,bicubic,-82.360,-63.633,+116 +resnetblur50,8.240,91.760,37.400,62.600,25.56,224,0.875,bicubic,-85.720,-61.160,-78 +dla102x,8.200,91.800,37.013,62.987,26.31,224,0.875,bilinear,-85.320,-61.287,-35 +eca_resnext26ts,8.080,91.920,35.960,64.040,10.30,256,0.900,bicubic,-84.530,-62.300,+36 +hrnet_w32,8.040,91.960,37.507,62.493,41.23,224,0.875,bilinear,-85.490,-60.943,-38 +cs3darknet_m,8.000,92.000,36.520,63.480,9.31,288,0.950,bicubic,-85.350,-62.080,-24 +res2net50_26w_8s,8.000,92.000,33.853,66.147,48.40,224,0.875,bilinear,-85.540,-64.407,-41 +vit_base_patch32_224.augreg_in1k,8.000,92.000,30.467,69.533,88.22,224,0.900,bicubic,-83.190,-66.913,+90 +gluon_resnet101_v1c,7.987,92.013,33.360,66.640,44.57,224,0.875,bicubic,-85.683,-65.060,-54 +gluon_resnet50_v1d,7.920,92.080,35.000,65.000,25.58,224,0.875,bicubic,-85.850,-63.390,-65 +mobilevitv2_075,7.800,92.200,33.640,66.360,2.87,256,0.888,bicubic,-83.960,-64.400,+73 +dla60_res2next,7.787,92.213,34.987,65.013,17.03,224,0.875,bilinear,-85.393,-63.443,-13 +mobilevit_xs,7.733,92.267,32.507,67.493,2.32,256,0.900,bicubic,-83.097,-65.413,+99 +densenetblur121d,7.720,92.280,34.733,65.267,8.00,224,0.875,bicubic,-84.190,-63.337,+61 +deit_tiny_distilled_patch16_224,7.707,92.293,33.560,66.440,5.91,224,0.900,bicubic,-82.993,-64.010,+99 +tf_efficientnetv2_b1.in1k,7.693,92.307,34.653,65.347,8.14,240,0.882,bicubic,-86.247,-63.967,-89 +convnext_atto.d2_in1k,7.600,92.400,35.040,64.960,3.70,288,0.950,bicubic,-85.190,-63.020,+7 +dla60_res2net,7.560,92.440,34.627,65.373,20.85,224,0.875,bilinear,-85.620,-63.783,-20 +efficientnet_b1_pruned.in1k,7.440,92.560,34.533,65.467,6.33,240,0.882,bicubic,-85.330,-63.727,+7 +wide_resnet101_2,7.360,92.640,34.147,65.853,126.89,224,0.875,bilinear,-86.360,-63.923,-70 +regnetx_064,7.333,92.667,34.373,65.627,26.21,224,0.875,bicubic,-86.557,-64.257,-87 +deit_tiny_patch16_224,7.307,92.693,30.707,69.293,5.72,224,0.900,bicubic,-82.363,-66.743,+112 +edgenext_x_small,7.280,92.720,30.947,69.053,2.34,288,1.000,bicubic,-84.440,-66.923,+64 +hardcorenas_e,7.240,92.760,33.293,66.707,8.07,224,0.875,bilinear,-85.330,-64.817,+18 +gluon_resnet101_v1b,7.227,92.773,32.773,67.227,44.55,224,0.875,bicubic,-86.523,-65.607,-79 +efficientnet_b0.ra_in1k,7.213,92.787,34.013,65.987,5.29,224,0.875,bicubic,-85.477,-64.277,+8 +gluon_resnet50_v1s,7.213,92.787,33.507,66.493,25.68,224,0.875,bicubic,-86.407,-64.953,-67 +tf_mixnet_l.in1k,7.147,92.853,31.613,68.387,7.33,224,0.875,bicubic,-86.163,-66.417,-39 +tf_efficientnet_b1.aa_in1k,7.133,92.867,33.040,66.960,7.79,240,0.882,bicubic,-86.367,-65.490,-57 +tf_efficientnet_cc_b0_8e.in1k,7.120,92.880,31.787,68.213,24.01,224,0.875,bicubic,-85.710,-66.093,-11 +convmixer_1024_20_ks9_p14,7.093,92.907,33.053,66.947,24.38,224,0.960,bicubic,-85.337,-65.217,+18 +seresnext26ts,7.040,92.960,34.933,65.067,10.39,256,0.900,bicubic,-85.650,-63.137,+1 +resmlp_12_224,7.013,92.987,33.947,66.053,15.35,224,0.875,bicubic,-85.197,-64.213,+28 +cs3darknet_focus_m,6.947,93.053,34.600,65.400,9.30,288,0.950,bicubic,-86.003,-63.790,-21 +hardcorenas_f,6.827,93.173,34.093,65.907,8.20,224,0.875,bilinear,-86.123,-64.067,-21 +ese_vovnet19b_dw,6.733,93.267,33.413,66.587,6.54,224,0.875,bicubic,-85.557,-64.677,+20 +selecsls60b,6.733,93.267,33.267,66.733,32.77,224,0.875,bicubic,-86.567,-65.123,-46 +efficientnet_es.ra_in1k,6.707,93.293,33.840,66.160,5.44,224,0.875,bicubic,-86.433,-64.580,-38 +res2net50_26w_6s,6.693,93.307,31.653,68.347,37.05,224,0.875,bilinear,-86.717,-66.627,-60 +legacy_seresnext26_32x4d,6.627,93.373,33.253,66.747,16.79,224,0.875,bicubic,-86.013,-64.877,-3 +tinynet_a.in1k,6.627,93.373,32.213,67.787,6.19,192,0.875,bicubic,-85.813,-65.867,+5 +mixnet_m.ft_in1k,6.627,93.373,32.053,67.947,5.01,224,0.875,bicubic,-85.803,-65.817,+9 +pit_ti_distilled_224,6.627,93.373,30.760,69.240,5.10,224,0.900,bicubic,-84.273,-66.940,+67 +poolformer_s12,6.560,93.440,34.467,65.533,11.92,224,0.900,bicubic,-86.060,-63.733,-6 +skresnet34,6.480,93.520,31.547,68.453,22.28,224,0.875,bicubic,-85.910,-66.603,+8 +repvgg_b1,6.467,93.533,33.827,66.173,57.42,224,0.875,bilinear,-86.863,-64.683,-60 +hardcorenas_d,6.440,93.560,32.213,67.787,7.50,224,0.875,bilinear,-85.960,-65.857,+4 +dla60x,6.427,93.573,34.080,65.920,17.35,224,0.875,bilinear,-86.693,-64.430,-47 +resnet34d,6.400,93.600,31.493,68.507,21.82,224,0.875,bicubic,-86.280,-66.817,-14 +regnetx_080,6.307,93.693,32.320,67.680,39.57,224,0.875,bicubic,-87.563,-66.200,-114 +swsl_resnet18,6.240,93.760,31.600,68.400,11.69,224,0.875,bilinear,-84.450,-66.100,+65 +legacy_seresnet50,6.187,93.813,32.653,67.347,28.09,224,0.875,bilinear,-86.773,-65.537,-40 +resnet26t,6.120,93.880,32.280,67.720,16.01,256,0.940,bicubic,-86.630,-65.930,-26 +pit_ti_224,6.120,93.880,30.227,69.773,4.85,224,0.900,bicubic,-83.820,-67.223,+76 +tv_resnet152,6.040,93.960,32.053,67.947,60.19,224,0.875,bilinear,-87.260,-66.227,-65 +regnetx_040,5.973,94.027,31.547,68.453,22.12,224,0.875,bicubic,-87.587,-66.993,-92 +tf_efficientnet_cc_b0_4e.in1k,5.973,94.027,29.600,70.400,13.31,224,0.875,bicubic,-86.617,-68.480,-16 +tf_efficientnetv2_b0.in1k,5.893,94.107,30.773,69.227,7.14,224,0.875,bicubic,-87.217,-67.537,-55 +dla102,5.880,94.120,32.707,67.293,33.27,224,0.875,bilinear,-87.180,-65.833,-53 +mixer_l16_224,5.867,94.133,18.533,81.467,208.20,224,0.875,bicubic,-81.283,-74.987,+98 +regnety_016,5.680,94.320,30.413,69.587,11.20,224,0.875,bicubic,-87.350,-67.947,-53 +selecsls60,5.653,94.347,32.507,67.493,30.67,224,0.875,bicubic,-87.377,-65.683,-53 +hardcorenas_c,5.640,94.360,30.400,69.600,5.52,224,0.875,bilinear,-86.380,-67.440,+6 +res2next50,5.627,94.373,30.867,69.133,24.67,224,0.875,bilinear,-87.213,-67.313,-46 +hrnet_w18,5.493,94.507,30.960,69.040,21.30,224,0.875,bilinear,-86.827,-67.280,-10 +resnest14d,5.480,94.520,28.547,71.453,10.61,224,0.875,bilinear,-86.240,-69.063,+20 +tf_efficientnet_lite2.in1k,5.360,94.640,30.907,69.093,6.09,260,0.890,bicubic,-87.290,-67.323,-31 +tf_efficientnet_em.in1k,5.347,94.653,31.107,68.893,6.90,240,0.882,bicubic,-87.583,-67.083,-53 +gernet_s,5.307,94.693,30.133,69.867,8.17,224,0.875,bilinear,-86.833,-68.057,-5 +tf_efficientnet_b0.ap_in1k,5.307,94.693,28.813,71.187,5.29,224,0.875,bicubic,-86.893,-69.207,-8 +repvgg_b1g4,5.293,94.707,30.813,69.187,39.97,224,0.875,bilinear,-87.687,-67.617,-61 +densenet121,5.293,94.707,29.907,70.093,7.98,224,0.875,bicubic,-86.277,-68.123,+17 +xcit_nano_12_p16_224_dist,5.240,94.760,26.560,73.440,3.05,224,1.000,bicubic,-84.440,-70.530,+60 +res2net50_26w_4s,5.160,94.840,29.360,70.640,25.70,224,0.875,bilinear,-87.340,-68.880,-27 +tf_mixnet_m.in1k,5.080,94.920,28.147,71.853,5.01,224,0.875,bicubic,-87.250,-69.743,-21 +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,5.080,94.920,27.080,72.920,6.34,224,0.900,bicubic,-84.090,-70.150,+62 +tf_efficientnet_b0.aa_in1k,5.067,94.933,28.800,71.200,5.29,224,0.875,bicubic,-87.183,-69.200,-19 +mobilenetv3_large_100.ra_in1k,5.067,94.933,28.187,71.813,5.48,224,0.875,bicubic,-86.253,-69.523,+16 +res2net50_14w_8s,5.040,94.960,28.773,71.227,25.06,224,0.875,bilinear,-87.700,-69.497,-49 +hardcorenas_b,4.947,95.053,28.120,71.880,5.18,224,0.875,bilinear,-86.823,-69.660,+2 +mobilenetv3_rw.rmsp_in1k,4.907,95.093,29.853,70.147,5.48,224,0.875,bicubic,-86.303,-67.807,+14 +mixnet_s.ft_in1k,4.907,95.093,28.573,71.427,4.13,224,0.875,bicubic,-86.923,-69.287,-1 +gluon_resnet50_v1c,4.893,95.107,28.147,71.853,25.58,224,0.875,bicubic,-88.137,-70.243,-76 +hardcorenas_a,4.867,95.133,28.093,71.907,5.26,224,0.875,bilinear,-86.483,-69.767,+8 +regnetx_032,4.853,95.147,30.280,69.720,15.30,224,0.875,bicubic,-88.267,-68.110,-83 +xcit_nano_12_p16_224,4.853,95.147,25.467,74.533,3.05,224,1.000,bicubic,-83.757,-71.323,+63 +tv_resnext50_32x4d,4.840,95.160,30.307,69.693,25.03,224,0.875,bilinear,-87.900,-67.873,-58 +tv_resnet101,4.707,95.293,29.333,70.667,44.55,224,0.875,bilinear,-88.103,-68.917,-65 +densenet161,4.693,95.307,29.547,70.453,28.68,224,0.875,bicubic,-87.807,-68.743,-44 +resnext26ts,4.693,95.307,29.027,70.973,10.30,256,0.900,bicubic,-87.177,-68.223,-13 +selecsls42b,4.667,95.333,28.587,71.413,32.46,224,0.875,bicubic,-87.613,-69.563,-34 +tf_efficientnet_lite1.in1k,4.613,95.387,28.387,71.613,5.42,240,0.882,bicubic,-88.007,-69.693,-53 +mobilenetv2_120d.ra_in1k,4.533,95.467,29.280,70.720,5.83,224,0.875,bicubic,-87.867,-68.770,-41 +pvt_v2_b0,4.347,95.653,25.907,74.093,3.67,224,0.900,bicubic,-84.443,-70.823,+52 +vit_base_patch32_224.sam,4.333,95.667,24.387,75.613,88.22,224,0.900,bicubic,-85.417,-72.613,+36 +edgenext_xx_small,4.240,95.760,24.093,75.907,1.33,288,1.000,bicubic,-84.640,-72.887,+49 +tinynet_b.in1k,4.187,95.813,26.720,73.280,3.73,188,0.875,bicubic,-86.733,-70.950,+10 +efficientnet_es_pruned.in1k,4.187,95.813,26.520,73.480,5.44,224,0.875,bicubic,-86.993,-71.230,+2 +fbnetc_100.rmsp_in1k,4.133,95.867,25.933,74.067,5.57,224,0.875,bilinear,-86.567,-71.277,+16 +densenet201,4.120,95.880,27.547,72.453,20.01,224,0.875,bicubic,-88.630,-70.683,-73 +gluon_resnet50_v1b,4.120,95.880,26.933,73.067,25.56,224,0.875,bicubic,-88.420,-71.237,-57 +resnet26d,4.040,95.960,28.520,71.480,16.01,224,0.875,bicubic,-88.030,-69.440,-36 +semnasnet_100.rmsp_in1k,3.960,96.040,26.947,73.053,3.89,224,0.875,bicubic,-87.320,-70.613,-9 +repvgg_a2,3.947,96.053,27.267,72.733,28.21,224,0.875,bilinear,-87.993,-70.883,-31 +mobilevitv2_050,3.933,96.067,23.880,76.120,1.37,256,0.888,bicubic,-84.297,-73.110,+47 +tf_mixnet_s.in1k,3.880,96.120,25.253,74.747,4.13,224,0.875,bicubic,-87.630,-72.367,-17 +semnasnet_075.rmsp_in1k,3.867,96.133,27.027,72.973,2.91,224,0.875,bicubic,-86.203,-70.403,+19 +dpn68,3.867,96.133,26.080,73.920,12.61,224,0.875,bicubic,-88.143,-71.970,-37 +tf_efficientnet_es.in1k,3.827,96.173,26.107,73.893,5.44,224,0.875,bicubic,-88.153,-71.753,-39 +mobilevit_xxs,3.827,96.173,21.747,78.253,1.27,256,0.900,bicubic,-83.343,-74.353,+48 +regnety_008,3.813,96.187,27.133,72.867,6.26,224,0.875,bicubic,-87.937,-71.047,-26 +dla60,3.773,96.227,27.933,72.067,22.04,224,0.875,bilinear,-88.457,-70.177,-52 +ssl_resnet18,3.747,96.253,25.427,74.573,11.69,224,0.875,bilinear,-86.473,-72.123,+11 +mobilenetv2_140.ra_in1k,3.720,96.280,26.747,73.253,6.11,224,0.875,bicubic,-88.110,-70.943,-35 +densenet169,3.707,96.293,25.613,74.387,14.15,224,0.875,bicubic,-88.223,-72.487,-41 +regnetx_016,3.627,96.373,26.293,73.707,9.19,224,0.875,bicubic,-88.543,-71.917,-53 +res2net50_48w_2s,3.587,96.413,26.613,73.387,25.29,224,0.875,bilinear,-88.963,-71.467,-74 +tf_mobilenetv3_large_100.in1k,3.547,96.453,25.053,74.947,5.48,224,0.875,bilinear,-87.693,-72.607,-24 +spnasnet_100.rmsp_in1k,3.547,96.453,24.293,75.707,4.42,224,0.875,bilinear,-86.803,-72.897,+4 +regnety_006,3.467,96.533,24.893,75.107,6.06,224,0.875,bicubic,-87.903,-72.817,-29 +legacy_seresnet34,3.333,96.667,23.800,76.200,21.96,224,0.875,bilinear,-87.557,-73.910,-10 +efficientnet_lite0.ra_in1k,3.253,96.747,25.867,74.133,4.65,224,0.875,bicubic,-87.887,-71.763,-22 +ghostnet_100,3.227,96.773,24.853,75.147,5.18,224,0.875,bilinear,-86.793,-72.517,+4 +dla34,3.227,96.773,23.573,76.427,15.74,224,0.875,bilinear,-87.533,-74.087,-10 +regnety_004,3.200,96.800,22.653,77.347,4.34,224,0.875,bicubic,-87.300,-74.887,-5 +mobilenetv2_110d.ra_in1k,3.173,96.827,24.587,75.413,4.52,224,0.875,bicubic,-87.777,-72.963,-19 +mnasnet_100.rmsp_in1k,3.120,96.880,24.227,75.773,4.38,224,0.875,bicubic,-87.390,-73.243,-8 +tinynet_c.in1k,3.093,96.907,21.533,78.467,2.46,184,0.875,bicubic,-84.677,-74.837,+25 +tf_efficientnet_lite0.in1k,3.080,96.920,22.907,77.093,4.65,224,0.875,bicubic,-87.960,-74.683,-25 +skresnet18,3.013,96.987,22.800,77.200,11.96,224,0.875,bicubic,-86.647,-74.430,+5 +vgg19_bn,2.947,97.053,23.480,76.520,143.68,224,0.875,bilinear,-87.133,-74.100,-5 +resnet34,2.920,97.080,23.680,76.320,21.80,224,0.875,bilinear,-88.210,-73.940,-31 +tf_mobilenetv3_large_075.in1k,2.867,97.133,21.573,78.427,3.99,224,0.875,bilinear,-86.813,-75.637,-1 +tinynet_d.in1k,2.867,97.133,17.813,82.187,2.34,152,0.875,bicubic,-81.883,-77.367,+33 +resnet14t,2.787,97.213,19.267,80.733,10.08,224,0.950,bilinear,-86.263,-77.353,+5 +hrnet_w18_small_v2,2.720,97.280,23.693,76.307,15.60,224,0.875,bilinear,-88.470,-74.207,-39 +gluon_resnet34_v1b,2.667,97.333,21.680,78.320,21.80,224,0.875,bicubic,-88.293,-75.950,-31 +vgg16_bn,2.653,97.347,23.773,76.227,138.37,224,0.875,bilinear,-87.437,-73.597,-14 +regnetx_008,2.653,97.347,22.453,77.547,7.26,224,0.875,bicubic,-88.397,-75.257,-35 +vgg16,2.640,97.360,20.427,79.573,138.36,224,0.875,bilinear,-85.910,-76.363,+10 +lcnet_100.ra2_in1k,2.613,97.387,20.893,79.107,2.95,224,0.875,bicubic,-86.177,-75.967,+6 +resnet18d,2.600,97.400,21.613,78.387,11.71,224,0.875,bicubic,-86.680,-75.537,-5 +tv_densenet121,2.560,97.440,22.667,77.333,7.98,224,0.875,bicubic,-88.330,-74.913,-33 +repvgg_b0,2.547,97.453,24.013,75.987,15.82,224,0.875,bilinear,-88.883,-73.977,-54 +regnetx_006,2.507,97.493,20.653,79.347,6.20,224,0.875,bicubic,-87.843,-76.777,-24 +legacy_seresnet18,2.493,97.507,20.080,79.920,11.78,224,0.875,bicubic,-86.387,-76.610,-2 +resnet26,2.480,97.520,22.987,77.013,16.00,224,0.875,bicubic,-88.630,-74.753,-45 +lcnet_075.ra2_in1k,2.320,97.680,17.120,82.880,2.36,224,0.875,bicubic,-83.670,-78.560,+16 +mobilenetv3_small_075.lamb_in1k,2.307,97.693,15.920,84.080,2.04,224,0.875,bicubic,-80.733,-78.180,+24 +mobilenetv2_100.ra_in1k,2.147,97.853,19.907,80.093,3.50,224,0.875,bicubic,-87.453,-77.233,-14 +regnety_002,2.147,97.853,18.880,81.120,3.16,224,0.875,bicubic,-85.233,-77.710,+5 +vgg19,2.107,97.893,20.733,79.267,143.67,224,0.875,bilinear,-86.933,-76.137,-11 +vgg13_bn,2.093,97.907,20.307,79.693,133.05,224,0.875,bilinear,-86.667,-76.663,-5 +tf_mobilenetv3_small_100.in1k,2.013,97.987,15.867,84.133,2.54,224,0.875,bilinear,-83.177,-79.903,+12 +mobilenetv3_small_100.lamb_in1k,2.000,98.000,17.080,82.920,2.54,224,0.875,bicubic,-83.220,-78.540,+10 +tf_mobilenetv3_small_075.in1k,2.000,98.000,14.813,85.187,2.04,224,0.875,bilinear,-81.520,-79.977,+16 +regnetx_004,1.960,98.040,19.173,80.827,5.16,224,0.875,bicubic,-86.940,-77.947,-14 +tv_resnet34,1.867,98.133,20.000,80.000,21.80,224,0.875,bilinear,-88.073,-77.340,-28 vgg13,1.867,98.133,17.960,82.040,133.05,224,0.875,bilinear,-85.183,-78.360,0 -tinynet_e,1.853,98.147,14.013,85.987,2.04,106,0.875,bicubic,-77.047,-78.547,+16 -mobilenetv3_small_050,1.840,98.160,12.507,87.493,1.59,224,0.875,bicubic,-75.150,-78.793,+16 -lcnet_050,1.813,98.187,13.893,86.107,1.88,224,0.875,bicubic,-79.967,-79.827,+12 -dla46x_c,1.760,98.240,16.493,83.507,1.07,224,0.875,bilinear,-82.490,-78.767,+6 -mnasnet_small,1.760,98.240,15.093,84.907,2.03,224,0.875,bicubic,-82.680,-80.087,+4 -resnet10t,1.733,98.267,15.813,84.187,5.44,224,0.950,bilinear,-84.477,-79.847,-3 -vgg11_bn,1.720,98.280,18.080,81.920,132.87,224,0.875,bilinear,-85.780,-78.740,-12 -dla60x_c,1.627,98.373,18.053,81.947,1.32,224,0.875,bilinear,-84.643,-78.117,-6 -tf_mobilenetv3_large_minimal_100,1.613,98.387,17.120,82.880,3.92,224,0.875,bilinear,-87.357,-79.730,-25 -mobilenetv2_050,1.613,98.387,14.187,85.813,1.97,224,0.875,bicubic,-82.277,-80.533,+1 +tinynet_e.in1k,1.853,98.147,14.027,85.973,2.04,106,0.875,bicubic,-77.047,-78.533,+16 +mobilenetv3_small_050.lamb_in1k,1.840,98.160,12.507,87.493,1.59,224,0.875,bicubic,-75.150,-78.793,+16 +lcnet_050.ra2_in1k,1.813,98.187,13.880,86.120,1.88,224,0.875,bicubic,-79.967,-79.830,+12 +dla46x_c,1.760,98.240,16.480,83.520,1.07,224,0.875,bilinear,-82.490,-78.790,+6 +mnasnet_small.lamb_in1k,1.760,98.240,15.093,84.907,2.03,224,0.875,bicubic,-82.680,-80.087,+4 +resnet10t,1.733,98.267,15.813,84.187,5.44,224,0.950,bilinear,-84.467,-79.837,-3 +vgg11_bn,1.720,98.280,18.093,81.907,132.87,224,0.875,bilinear,-85.780,-78.727,-12 +tf_mobilenetv3_large_minimal_100.in1k,1.627,98.373,17.120,82.880,3.92,224,0.875,bilinear,-87.343,-79.740,-25 +dla60x_c,1.613,98.387,18.040,81.960,1.32,224,0.875,bilinear,-84.677,-78.120,-7 +mobilenetv2_050.lamb_in1k,1.613,98.387,14.200,85.800,1.97,224,0.875,bicubic,-82.277,-80.520,+1 vgg11,1.560,98.440,16.227,83.773,132.86,224,0.875,bilinear,-84.990,-80.053,-10 -gluon_resnet18_v1b,1.547,98.453,16.613,83.387,11.69,224,0.875,bicubic,-86.853,-80.067,-21 -hrnet_w18_small,1.533,98.467,18.133,81.867,13.19,224,0.875,bilinear,-87.497,-78.977,-30 -dla46_c,1.520,98.480,15.253,84.747,1.30,224,0.875,bilinear,-82.120,-79.667,-2 -regnetx_002,1.373,98.627,15.027,84.973,2.68,224,0.875,bicubic,-84.827,-80.953,-11 -resnet18,1.160,98.840,16.227,83.773,11.69,224,0.875,bilinear,-86.230,-80.063,-20 -tf_mobilenetv3_small_minimal_100,1.013,98.987,11.453,88.547,2.04,224,0.875,bilinear,-80.387,-82.227,-1 -tv_resnet50,0.000,100.000,14.453,85.547,25.56,224,0.875,bilinear,-91.900,-83.587,-99 +gluon_resnet18_v1b,1.547,98.453,16.613,83.387,11.69,224,0.875,bicubic,-86.853,-80.067,-20 +hrnet_w18_small,1.533,98.467,18.120,81.880,13.19,224,0.875,bilinear,-87.517,-78.990,-33 +dla46_c,1.520,98.480,15.267,84.733,1.30,224,0.875,bilinear,-82.130,-79.653,-2 +regnetx_002,1.373,98.627,15.027,84.973,2.68,224,0.875,bicubic,-84.817,-80.953,-11 +resnet18,1.160,98.840,16.213,83.787,11.69,224,0.875,bilinear,-86.230,-80.077,-20 +tf_mobilenetv3_small_minimal_100.in1k,1.013,98.987,11.493,88.507,2.04,224,0.875,bilinear,-80.367,-82.177,-1 +tv_resnet50,0.000,100.000,14.453,85.547,25.56,224,0.875,bilinear,-91.880,-83.587,-101 diff --git a/results/results-imagenet-r-clean.csv b/results/results-imagenet-r-clean.csv index 0e42fcda..d26ae416 100644 --- a/results/results-imagenet-r-clean.csv +++ b/results/results-imagenet-r-clean.csv @@ -1,109 +1,166 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation -beit_large_patch16_384,97.810,2.190,99.790,0.210,305.00,384,1.000,bicubic -tf_efficientnet_l2_ns,97.780,2.220,99.890,0.110,480.31,800,0.960,bicubic -beit_large_patch16_512,97.780,2.220,99.820,0.180,305.67,512,1.000,bicubic -tf_efficientnet_l2_ns_475,97.750,2.250,99.820,0.180,480.31,475,0.936,bicubic +eva_giant_patch14_336.m30m_ft_in22k_in1k,98.000,2.000,99.900,0.100,"1,013.01",336,1.000,bicubic +eva_giant_patch14_560.m30m_ft_in22k_in1k,97.990,2.010,99.860,0.140,"1,014.45",560,1.000,bicubic +eva_large_patch14_336.in22k_ft_in22k_in1k,97.860,2.140,99.880,0.120,304.53,336,1.000,bicubic +eva_giant_patch14_336.clip_ft_in1k,97.860,2.140,99.790,0.210,"1,013.01",336,1.000,bicubic +eva_large_patch14_336.in22k_ft_in1k,97.810,2.190,99.840,0.160,304.53,336,1.000,bicubic +beit_large_patch16_384.in22k_ft_in22k_in1k,97.810,2.190,99.790,0.210,305.00,384,1.000,bicubic +tf_efficientnet_l2.ns_jft_in1k,97.780,2.220,99.890,0.110,480.31,800,0.960,bicubic +beit_large_patch16_512.in22k_ft_in22k_in1k,97.780,2.220,99.820,0.180,305.67,512,1.000,bicubic +maxvit_base_tf_512.in21k_ft_in1k,97.760,2.240,99.860,0.140,119.88,512,1.000,bicubic +maxvit_xlarge_tf_512.in21k_ft_in1k,97.760,2.240,99.820,0.180,475.77,512,1.000,bicubic +tf_efficientnet_l2.ns_jft_in1k_475,97.750,2.250,99.820,0.180,480.31,475,0.936,bicubic +beitv2_large_patch16_224.in1k_ft_in22k_in1k,97.750,2.250,99.790,0.210,304.43,224,0.950,bicubic +maxvit_xlarge_tf_384.in21k_ft_in1k,97.740,2.260,99.850,0.150,475.32,384,1.000,bicubic +eva_giant_patch14_224.clip_ft_in1k,97.680,2.320,99.750,0.250,"1,012.56",224,1.000,bicubic +maxvit_large_tf_384.in21k_ft_in1k,97.670,2.330,99.820,0.180,212.03,384,1.000,bicubic +maxvit_large_tf_512.in21k_ft_in1k,97.670,2.330,99.730,0.270,212.33,512,1.000,bicubic +eva_large_patch14_196.in22k_ft_in22k_in1k,97.610,2.390,99.810,0.190,304.14,196,1.000,bicubic +vit_large_patch14_clip_224.openai_ft_in12k_in1k,97.610,2.390,99.730,0.270,304.20,224,1.000,bicubic +vit_large_patch14_clip_336.openai_ft_in12k_in1k,97.610,2.390,99.730,0.270,304.53,336,1.000,bicubic +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,97.600,2.400,99.780,0.220,632.46,336,1.000,bicubic +convnext_xlarge.fb_in22k_ft_in1k_384,97.590,2.410,99.770,0.230,350.20,384,1.000,bicubic +maxvit_base_tf_384.in21k_ft_in1k,97.560,2.440,99.760,0.240,119.65,384,1.000,bicubic deit3_large_patch16_384_in21ft1k,97.560,2.440,99.710,0.290,304.76,384,1.000,bicubic -convnext_xlarge_384_in22ft1k,97.550,2.450,99.800,0.200,350.20,384,1.000,bicubic -beit_large_patch16_224,97.480,2.520,99.690,0.310,304.43,224,0.900,bicubic -convnext_large_384_in22ft1k,97.440,2.560,99.780,0.220,197.77,384,1.000,bicubic -vit_large_patch16_384,97.420,2.580,99.780,0.220,304.72,384,1.000,bicubic -beit_base_patch16_384,97.330,2.670,99.720,0.280,86.74,384,1.000,bicubic +eva_large_patch14_196.in22k_ft_in1k,97.520,2.480,99.790,0.210,304.14,196,1.000,bicubic +beit_large_patch16_224.in22k_ft_in22k_in1k,97.480,2.520,99.690,0.310,304.43,224,0.900,bicubic +vit_large_patch14_clip_224.openai_ft_in1k,97.460,2.540,99.680,0.320,304.20,224,1.000,bicubic +convnext_xlarge.fb_in22k_ft_in1k,97.450,2.550,99.820,0.180,350.20,288,1.000,bicubic +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,97.450,2.550,99.780,0.220,304.53,336,1.000,bicubic +vit_large_patch16_384.augreg_in21k_ft_in1k,97.420,2.580,99.780,0.220,304.72,384,1.000,bicubic +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,97.390,2.610,99.740,0.260,304.20,224,1.000,bicubic +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,97.360,2.640,99.800,0.200,632.05,224,1.000,bicubic +beit_base_patch16_384.in22k_ft_in22k_in1k,97.330,2.670,99.720,0.280,86.74,384,1.000,bicubic +tf_efficientnetv2_xl.in21k_ft_in1k,97.330,2.670,99.600,0.400,208.12,512,1.000,bicubic +tf_efficientnetv2_l.in21k_ft_in1k,97.320,2.680,99.640,0.360,118.52,480,1.000,bicubic +convnext_large.fb_in22k_ft_in1k_384,97.310,2.690,99.760,0.240,197.77,384,1.000,bicubic deit3_large_patch16_224_in21ft1k,97.310,2.690,99.680,0.320,304.37,224,1.000,bicubic -convnext_base_384_in22ft1k,97.290,2.710,99.780,0.220,88.59,384,1.000,bicubic +swinv2_large_window12to24_192to384_22kft1k,97.290,2.710,99.780,0.220,196.74,384,1.000,bicubic volo_d5_512,97.290,2.710,99.760,0.240,296.09,512,1.150,bicubic -swinv2_large_window12to24_192to384_22kft1k,97.280,2.720,99.780,0.220,196.74,384,1.000,bicubic swinv2_base_window12to24_192to384_22kft1k,97.260,2.740,99.790,0.210,87.92,384,1.000,bicubic -convnext_large_in22ft1k,97.260,2.740,99.650,0.350,197.77,224,0.875,bicubic deit3_huge_patch14_224_in21ft1k,97.250,2.750,99.720,0.280,632.13,224,1.000,bicubic +convnext_base.fb_in22k_ft_in1k_384,97.250,2.750,99.710,0.290,88.59,384,1.000,bicubic volo_d5_448,97.240,2.760,99.740,0.260,295.91,448,1.150,bicubic -convnext_xlarge_in22ft1k,97.240,2.760,99.730,0.270,350.20,224,0.875,bicubic swinv2_large_window12to16_192to256_22kft1k,97.240,2.760,99.710,0.290,196.74,256,0.900,bicubic deit3_base_patch16_384_in21ft1k,97.240,2.760,99.670,0.330,86.88,384,1.000,bicubic -tf_efficientnet_b7_ns,97.190,2.810,99.700,0.300,66.35,600,0.949,bicubic -swin_large_patch4_window12_384,97.180,2.820,99.680,0.320,196.74,384,1.000,bicubic -tf_efficientnetv2_xl_in21ft1k,97.150,2.850,99.620,0.380,208.12,512,1.000,bicubic +vit_large_patch14_clip_336.laion2b_ft_in1k,97.230,2.770,99.720,0.280,304.53,336,1.000,bicubic +convnext_base.fb_in22k_ft_in1k,97.220,2.780,99.760,0.240,88.59,288,1.000,bicubic +convnext_large.fb_in22k_ft_in1k,97.220,2.780,99.730,0.270,197.77,288,1.000,bicubic +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,97.220,2.780,99.700,0.300,86.86,384,1.000,bicubic +tf_efficientnet_b7.ns_jft_in1k,97.200,2.800,99.700,0.300,66.35,600,0.949,bicubic +maxvit_base_tf_512.in1k,97.180,2.820,99.640,0.360,119.88,512,1.000,bicubic +maxvit_small_tf_512.in1k,97.180,2.820,99.620,0.380,69.13,512,1.000,bicubic +swin_large_patch4_window12_384,97.170,2.830,99.680,0.320,196.74,384,1.000,bicubic +vit_base_patch16_clip_384.openai_ft_in12k_in1k,97.140,2.860,99.640,0.360,86.86,384,0.950,bicubic swin_base_patch4_window12_384,97.120,2.880,99.780,0.220,87.90,384,1.000,bicubic -tf_efficientnetv2_l_in21ft1k,97.110,2.890,99.710,0.290,118.52,480,1.000,bicubic -convnext_small_384_in22ft1k,97.090,2.910,99.690,0.310,50.22,384,1.000,bicubic -vit_base_patch8_224,97.080,2.920,99.620,0.380,86.58,224,0.900,bicubic +maxvit_base_tf_384.in1k,97.120,2.880,99.570,0.430,119.65,384,1.000,bicubic +vit_huge_patch14_clip_224.laion2b_ft_in1k,97.100,2.900,99.700,0.300,632.05,224,1.000,bicubic +convnext_small.fb_in22k_ft_in1k_384,97.100,2.900,99.640,0.360,50.22,384,1.000,bicubic +vit_base_patch8_224.augreg_in21k_ft_in1k,97.080,2.920,99.620,0.380,86.58,224,0.900,bicubic volo_d4_448,97.070,2.930,99.750,0.250,193.41,448,1.150,bicubic swinv2_base_window12to16_192to256_22kft1k,97.060,2.940,99.660,0.340,87.92,256,0.900,bicubic -tf_efficientnet_b6_ns,97.020,2.980,99.710,0.290,43.04,528,0.942,bicubic -vit_base_patch16_384,97.020,2.980,99.710,0.290,86.86,384,1.000,bicubic +maxvit_large_tf_512.in1k,97.050,2.950,99.590,0.410,212.33,512,1.000,bicubic +tf_efficientnet_b6.ns_jft_in1k,97.020,2.980,99.710,0.290,43.04,528,0.942,bicubic +vit_base_patch16_384.augreg_in21k_ft_in1k,97.020,2.980,99.710,0.290,86.86,384,1.000,bicubic volo_d3_448,97.020,2.980,99.680,0.320,86.63,448,1.000,bicubic +vit_large_patch14_clip_224.laion2b_ft_in1k,97.020,2.980,99.670,0.330,304.20,224,1.000,bicubic +tf_efficientnetv2_m.in21k_ft_in1k,97.000,3.000,99.630,0.370,54.14,480,1.000,bicubic ig_resnext101_32x48d,96.970,3.030,99.670,0.330,828.41,224,0.875,bilinear -tf_efficientnetv2_m_in21ft1k,96.970,3.030,99.610,0.390,54.14,480,1.000,bicubic -vit_large_r50_s32_384,96.950,3.050,99.710,0.290,329.09,384,1.000,bicubic +maxvit_tiny_tf_512.in1k,96.970,3.030,99.670,0.330,31.05,512,1.000,bicubic +vit_large_r50_s32_384.augreg_in21k_ft_in1k,96.950,3.050,99.710,0.290,329.09,384,1.000,bicubic swin_large_patch4_window7_224,96.950,3.050,99.660,0.340,196.53,224,0.900,bicubic -xcit_large_24_p16_384_dist,96.940,3.060,99.510,0.490,189.10,384,1.000,bicubic +vit_base_patch8_224.augreg2_in21k_ft_in1k,96.940,3.060,99.640,0.360,86.58,224,0.900,bicubic +maxvit_large_tf_384.in1k,96.940,3.060,99.580,0.420,212.03,384,1.000,bicubic +xcit_large_24_p16_384_dist,96.940,3.060,99.520,0.480,189.10,384,1.000,bicubic dm_nfnet_f6,96.920,3.080,99.720,0.280,438.36,576,0.956,bicubic +beitv2_base_patch16_224.in1k_ft_in22k_in1k,96.910,3.090,99.730,0.270,86.53,224,0.900,bicubic +vit_base_patch16_clip_384.laion2b_ft_in1k,96.910,3.090,99.670,0.330,86.86,384,1.000,bicubic volo_d5_224,96.880,3.120,99.670,0.330,295.46,224,0.960,bicubic -resnetv2_152x4_bitm,96.880,3.120,99.660,0.340,936.53,480,1.000,bilinear cait_m48_448,96.880,3.120,99.620,0.380,356.46,448,1.000,bicubic -tf_efficientnet_b5_ns,96.870,3.130,99.640,0.360,30.39,456,0.934,bicubic +resnetv2_152x4_bitm,96.870,3.130,99.660,0.340,936.53,480,1.000,bilinear +tf_efficientnet_b5.ns_jft_in1k,96.870,3.130,99.640,0.360,30.39,456,0.934,bicubic deit3_base_patch16_224_in21ft1k,96.870,3.130,99.620,0.380,86.59,224,1.000,bicubic deit3_large_patch16_384,96.850,3.150,99.620,0.380,304.76,384,1.000,bicubic -convnext_base_in22ft1k,96.840,3.160,99.650,0.350,88.59,224,0.875,bicubic cait_m36_384,96.830,3.170,99.660,0.340,271.22,384,1.000,bicubic +vit_base_patch16_clip_384.openai_ft_in1k,96.820,3.180,99.660,0.340,86.86,384,1.000,bicubic +xcit_small_24_p8_384_dist,96.820,3.180,99.630,0.370,47.63,384,1.000,bicubic dm_nfnet_f5,96.810,3.190,99.670,0.330,377.21,544,0.954,bicubic -xcit_small_24_p8_384_dist,96.810,3.190,99.630,0.370,47.63,384,1.000,bicubic +convnext_small.fb_in22k_ft_in1k,96.810,3.190,99.510,0.490,50.22,288,1.000,bicubic volo_d4_224,96.780,3.220,99.670,0.330,192.96,224,0.960,bicubic dm_nfnet_f4,96.780,3.220,99.620,0.380,316.07,512,0.951,bicubic +flexivit_large.1200ep_in1k,96.780,3.220,99.610,0.390,304.36,240,0.950,bicubic xcit_medium_24_p8_384_dist,96.780,3.220,99.610,0.390,84.32,384,1.000,bicubic ig_resnext101_32x32d,96.780,3.220,99.530,0.470,468.53,224,0.875,bilinear +efficientnet_b5.in12k_ft_in1k,96.770,3.230,99.600,0.400,30.39,448,1.000,bicubic xcit_large_24_p8_384_dist,96.760,3.240,99.560,0.440,188.93,384,1.000,bicubic +maxvit_small_tf_384.in1k,96.740,3.260,99.600,0.400,69.02,384,1.000,bicubic +flexivit_large.600ep_in1k,96.740,3.260,99.550,0.450,304.36,240,0.950,bicubic +tf_efficientnetv2_l.in1k,96.740,3.260,99.550,0.450,118.52,480,1.000,bicubic dm_nfnet_f3,96.730,3.270,99.630,0.370,254.92,416,0.940,bicubic -vit_large_patch16_224,96.710,3.290,99.650,0.350,304.33,224,0.900,bicubic -tf_efficientnet_b4_ns,96.710,3.290,99.640,0.360,19.34,380,0.922,bicubic +vit_large_patch16_224.augreg_in21k_ft_in1k,96.710,3.290,99.650,0.350,304.33,224,0.900,bicubic +tf_efficientnet_b4.ns_jft_in1k,96.710,3.290,99.640,0.360,19.34,380,0.922,bicubic volo_d2_384,96.710,3.290,99.600,0.400,58.87,384,1.000,bicubic xcit_medium_24_p16_384_dist,96.700,3.300,99.600,0.400,84.40,384,1.000,bicubic -tf_efficientnet_b8,96.700,3.300,99.530,0.470,87.41,672,0.954,bicubic +tf_efficientnet_b8.ra_in1k,96.700,3.300,99.530,0.470,87.41,672,0.954,bicubic +flexivit_large.300ep_in1k,96.690,3.310,99.580,0.420,304.36,240,0.950,bicubic swin_base_patch4_window7_224,96.680,3.320,99.660,0.340,87.77,224,0.900,bicubic deit3_small_patch16_384_in21ft1k,96.670,3.330,99.640,0.360,22.21,384,1.000,bicubic -beit_base_patch16_224,96.660,3.340,99.660,0.340,86.53,224,0.900,bicubic -tf_efficientnetv2_l,96.650,3.350,99.560,0.440,118.52,480,1.000,bicubic +beit_base_patch16_224.in22k_ft_in22k_in1k,96.660,3.340,99.660,0.340,86.53,224,0.900,bicubic xcit_large_24_p8_224_dist,96.640,3.360,99.460,0.540,188.93,224,1.000,bicubic cait_s36_384,96.630,3.370,99.600,0.400,68.37,384,1.000,bicubic +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,96.610,3.390,99.480,0.520,88.30,384,1.000,bicubic regnetz_e8,96.600,3.400,99.610,0.390,57.70,320,1.000,bicubic -deit3_huge_patch14_224,96.580,3.420,99.520,0.480,632.13,224,0.900,bicubic -tf_efficientnet_b7,96.580,3.420,99.520,0.480,66.35,600,0.949,bicubic +maxvit_tiny_tf_384.in1k,96.600,3.400,99.560,0.440,30.98,384,1.000,bicubic +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,96.600,3.400,99.560,0.440,86.57,224,0.950,bicubic +tf_efficientnet_b7.ra_in1k,96.580,3.420,99.510,0.490,66.35,600,0.949,bicubic cait_s24_384,96.570,3.430,99.550,0.450,47.06,384,1.000,bicubic +deit3_huge_patch14_224,96.570,3.430,99.520,0.480,632.13,224,0.900,bicubic +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,96.570,3.430,99.520,0.480,88.34,448,1.000,bicubic xcit_small_24_p8_224_dist,96.550,3.450,99.570,0.430,47.63,224,1.000,bicubic -tf_efficientnet_b8_ap,96.550,3.450,99.540,0.460,87.41,672,0.954,bicubic -tf_efficientnetv2_m,96.540,3.460,99.570,0.430,54.14,480,1.000,bicubic +tf_efficientnet_b8.ap_in1k,96.550,3.450,99.540,0.460,87.41,672,0.954,bicubic resnetv2_152x2_bitm,96.520,3.480,99.590,0.410,236.34,448,1.000,bilinear xcit_medium_24_p8_224_dist,96.520,3.480,99.510,0.490,84.32,224,1.000,bicubic +vit_medium_patch16_gap_384.in12k_ft_in1k,96.510,3.490,99.620,0.380,39.03,384,0.950,bicubic deit_base_distilled_patch16_384,96.510,3.490,99.590,0.410,87.63,384,1.000,bicubic +vit_base_patch16_224.augreg2_in21k_ft_in1k,96.510,3.490,99.560,0.440,86.57,224,0.900,bicubic +vit_base_patch16_clip_224.openai_ft_in12k_in1k,96.510,3.490,99.550,0.450,86.57,224,0.950,bicubic +tf_efficientnetv2_m.in1k,96.480,3.520,99.610,0.390,54.14,480,1.000,bicubic xcit_small_12_p8_384_dist,96.480,3.520,99.490,0.510,26.21,384,1.000,bicubic -tf_efficientnetv2_s_in21ft1k,96.470,3.530,99.570,0.430,21.46,384,1.000,bicubic +tf_efficientnetv2_s.in21k_ft_in1k,96.470,3.530,99.570,0.430,21.46,384,1.000,bicubic volo_d1_384,96.470,3.530,99.550,0.450,26.78,384,1.000,bicubic ecaresnet269d,96.460,3.540,99.610,0.390,102.09,352,1.000,bicubic dm_nfnet_f2,96.460,3.540,99.540,0.460,193.78,352,0.920,bicubic -convnext_small_in22ft1k,96.460,3.540,99.470,0.530,50.22,224,0.875,bicubic -vit_base_r50_s16_384,96.450,3.550,99.660,0.340,98.95,384,1.000,bicubic +vit_base_r50_s16_384.orig_in21k_ft_in1k,96.450,3.550,99.660,0.340,98.95,384,1.000,bicubic eca_nfnet_l2,96.450,3.550,99.620,0.380,56.72,384,1.000,bicubic -volo_d3_224,96.440,3.560,99.620,0.380,86.33,224,0.960,bicubic +volo_d3_224,96.450,3.550,99.620,0.380,86.33,224,0.960,bicubic ig_resnext101_32x16d,96.440,3.560,99.540,0.460,194.03,224,0.875,bilinear seresnextaa101d_32x8d,96.420,3.580,99.520,0.480,93.59,288,1.000,bicubic volo_d2_224,96.420,3.580,99.500,0.500,58.68,224,0.960,bicubic -resnetrs420,96.410,3.590,99.540,0.460,191.89,416,1.000,bicubic -dm_nfnet_f1,96.380,3.620,99.470,0.530,132.63,320,0.910,bicubic -tf_efficientnet_b6_ap,96.370,3.630,99.550,0.450,43.04,528,0.942,bicubic +vit_base_patch32_clip_384.openai_ft_in12k_in1k,96.420,3.580,99.460,0.540,88.30,384,0.950,bicubic +mvitv2_large,96.410,3.590,99.450,0.550,217.99,224,0.900,bicubic +resnetrs420,96.400,3.600,99.540,0.460,191.89,416,1.000,bicubic +convnext_large.fb_in1k,96.400,3.600,99.530,0.470,197.77,288,1.000,bicubic +dm_nfnet_f1,96.390,3.610,99.470,0.530,132.63,320,0.910,bicubic +tf_efficientnet_b6.ap_in1k,96.370,3.630,99.550,0.450,43.04,528,0.942,bicubic seresnext101d_32x8d,96.360,3.640,99.470,0.530,93.59,288,1.000,bicubic -tf_efficientnet_b7_ap,96.350,3.650,99.590,0.410,66.35,600,0.949,bicubic +tf_efficientnet_b7.ap_in1k,96.350,3.650,99.590,0.410,66.35,600,0.949,bicubic resmlp_big_24_224_in22ft1k,96.350,3.650,99.520,0.480,129.14,224,0.875,bicubic +maxvit_base_tf_224.in1k,96.350,3.650,99.370,0.630,119.47,224,0.950,bicubic xcit_small_24_p16_384_dist,96.340,3.660,99.580,0.420,47.67,384,1.000,bicubic resnetrs200,96.340,3.660,99.550,0.450,93.21,320,1.000,bicubic +xcit_small_12_p16_384_dist,96.340,3.660,99.490,0.510,26.25,384,1.000,bicubic regnetz_040h,96.330,3.670,99.520,0.480,28.94,320,1.000,bicubic -xcit_small_12_p16_384_dist,96.330,3.670,99.490,0.510,26.25,384,1.000,bicubic +vit_base_patch16_clip_224.laion2b_ft_in1k,96.320,3.680,99.540,0.460,86.57,224,1.000,bicubic xcit_large_24_p16_224_dist,96.320,3.680,99.500,0.500,189.10,224,1.000,bicubic +maxvit_large_tf_224.in1k,96.320,3.680,99.410,0.590,211.79,224,0.950,bicubic +vit_base_patch16_clip_224.openai_ft_in1k,96.310,3.690,99.550,0.450,86.57,224,0.900,bicubic seresnet152d,96.310,3.690,99.510,0.490,66.84,320,1.000,bicubic -vit_base_patch16_224,96.300,3.700,99.560,0.440,86.57,224,0.900,bicubic -tf_efficientnet_b6,96.290,3.710,99.520,0.480,43.04,528,0.942,bicubic -swsl_resnext101_32x16d,96.280,3.720,99.500,0.500,194.03,224,0.875,bilinear +convnext_base.fb_in1k,96.310,3.690,99.500,0.500,88.59,288,1.000,bicubic +vit_base_patch16_224.augreg_in21k_ft_in1k,96.300,3.700,99.560,0.440,86.57,224,0.900,bicubic +tf_efficientnet_b6.aa_in1k,96.290,3.710,99.520,0.480,43.04,528,0.942,bicubic resnetv2_50x3_bitm,96.270,3.730,99.630,0.370,217.32,448,1.000,bilinear -efficientnetv2_rw_m,96.270,3.730,99.560,0.440,53.24,416,1.000,bicubic +efficientnetv2_rw_m.agc_in1k,96.270,3.730,99.560,0.440,53.24,416,1.000,bicubic +swsl_resnext101_32x16d,96.270,3.730,99.500,0.500,194.03,224,0.875,bilinear xcit_medium_24_p16_224_dist,96.260,3.740,99.410,0.590,84.40,224,1.000,bicubic resnetv2_101x3_bitm,96.250,3.750,99.590,0.410,387.93,448,1.000,bilinear swsl_resnext101_32x8d,96.240,3.760,99.590,0.410,88.79,224,0.875,bilinear @@ -111,559 +168,624 @@ resnetrs350,96.240,3.760,99.470,0.530,163.96,384,1.000,bicubic xcit_tiny_24_p8_384_dist,96.240,3.760,99.440,0.560,12.11,384,1.000,bicubic deit3_base_patch16_384,96.230,3.770,99.400,0.600,86.88,384,1.000,bicubic regnetz_d8_evos,96.220,3.780,99.490,0.510,23.46,320,0.950,bicubic +maxxvit_rmlp_small_rw_256,96.210,3.790,99.480,0.520,66.01,256,0.950,bicubic +maxvit_small_tf_224.in1k,96.210,3.790,99.460,0.540,68.93,224,0.950,bicubic +coatnet_rmlp_2_rw_224,96.200,3.800,99.280,0.720,73.88,224,0.950,bicubic +vit_base_patch16_384.orig_in21k_ft_in1k,96.190,3.810,99.530,0.470,86.86,384,1.000,bicubic resnetv2_152x2_bit_teacher_384,96.190,3.810,99.500,0.500,236.34,384,1.000,bicubic deit3_large_patch16_224,96.190,3.810,99.300,0.700,304.37,224,0.900,bicubic -vit_large_r50_s32_224,96.180,3.820,99.540,0.460,328.99,224,0.900,bicubic +vit_large_r50_s32_224.augreg_in21k_ft_in1k,96.180,3.820,99.530,0.470,328.99,224,0.900,bicubic regnetz_040,96.180,3.820,99.510,0.490,27.12,320,1.000,bicubic -convnext_tiny_384_in22ft1k,96.170,3.830,99.480,0.520,28.59,384,1.000,bicubic -swinv2_base_window16_256,96.170,3.830,99.400,0.600,87.92,256,0.900,bicubic +swinv2_base_window16_256,96.180,3.820,99.400,0.600,87.92,256,0.900,bicubic +convnext_tiny.fb_in22k_ft_in1k_384,96.170,3.830,99.500,0.500,28.59,384,1.000,bicubic +deit3_medium_patch16_224_in21ft1k,96.140,3.860,99.490,0.510,38.85,224,1.000,bicubic crossvit_18_dagger_408,96.130,3.870,99.470,0.530,44.61,408,1.000,bicubic -seresnext101_32x8d,96.130,3.870,99.360,0.640,93.57,288,1.000,bicubic resnest269e,96.120,3.880,99.520,0.480,110.93,416,0.928,bicubic +seresnext101_32x8d,96.120,3.880,99.360,0.640,93.57,288,1.000,bicubic resnet200d,96.110,3.890,99.460,0.540,64.69,320,1.000,bicubic -tf_efficientnet_b3_ns,96.100,3.900,99.480,0.520,12.23,300,0.904,bicubic -tf_efficientnet_b5_ap,96.080,3.920,99.540,0.460,30.39,456,0.934,bicubic +flexivit_base.1200ep_in1k,96.110,3.890,99.400,0.600,86.59,240,0.950,bicubic +tf_efficientnet_b3.ns_jft_in1k,96.100,3.900,99.480,0.520,12.23,300,0.904,bicubic +tf_efficientnet_b5.ap_in1k,96.080,3.920,99.540,0.460,30.39,456,0.934,bicubic xcit_large_24_p8_224,96.080,3.920,99.150,0.850,188.93,224,1.000,bicubic resnest200e,96.070,3.930,99.480,0.520,70.20,320,0.909,bicubic swinv2_base_window8_256,96.070,3.930,99.420,0.580,87.92,256,0.900,bicubic pit_b_distilled_224,96.070,3.930,99.380,0.620,74.79,224,0.900,bicubic swinv2_small_window16_256,96.070,3.930,99.340,0.660,49.73,256,0.900,bicubic -vit_small_r26_s32_384,96.060,3.940,99.550,0.450,36.47,384,1.000,bicubic -resnetrs270,96.060,3.940,99.480,0.520,129.86,352,1.000,bicubic -swsl_resnext101_32x4d,96.040,3.960,99.530,0.470,44.18,224,0.875,bilinear +vit_small_r26_s32_384.augreg_in21k_ft_in1k,96.060,3.940,99.560,0.440,36.47,384,1.000,bicubic +resnetrs270,96.060,3.940,99.490,0.510,129.86,352,1.000,bicubic +gcvit_base,96.060,3.940,99.380,0.620,90.32,224,0.875,bicubic +swsl_resnext101_32x4d,96.050,3.950,99.530,0.470,44.18,224,0.875,bilinear +maxvit_rmlp_tiny_rw_256,96.040,3.960,99.410,0.590,29.15,256,0.950,bicubic swin_s3_base_224,96.040,3.960,99.350,0.650,71.13,224,0.900,bicubic volo_d1_224,96.030,3.970,99.390,0.610,26.63,224,0.960,bicubic -vit_base_patch16_224_miil,96.030,3.970,99.350,0.650,86.54,224,0.875,bilinear -convnext_large,96.020,3.980,99.470,0.530,197.77,224,0.875,bicubic +vit_base_patch16_224_miil.in21k_ft_in1k,96.030,3.970,99.350,0.650,86.54,224,0.875,bilinear regnetz_d8,96.010,3.990,99.520,0.480,23.37,320,1.000,bicubic cs3se_edgenet_x,96.010,3.990,99.440,0.560,50.72,320,1.000,bicubic cait_xs24_384,96.010,3.990,99.430,0.570,26.67,384,1.000,bicubic -vit_small_patch16_384,95.980,4.020,99.590,0.410,22.20,384,1.000,bicubic -tf_efficientnet_b5,95.980,4.020,99.450,0.550,30.39,456,0.934,bicubic +mvitv2_base,96.010,3.990,99.330,0.670,51.47,224,0.900,bicubic +vit_small_patch16_384.augreg_in21k_ft_in1k,95.980,4.020,99.590,0.410,22.20,384,1.000,bicubic +vit_medium_patch16_gap_256.in12k_ft_in1k,95.980,4.020,99.500,0.500,38.86,256,0.950,bicubic +tf_efficientnet_b5.ra_in1k,95.980,4.020,99.450,0.550,30.39,456,0.934,bicubic +convnext_small.fb_in1k,95.980,4.020,99.430,0.570,50.22,288,1.000,bicubic +flexivit_base.300ep_in1k,95.970,4.030,99.370,0.630,86.59,240,0.950,bicubic +flexivit_base.600ep_in1k,95.960,4.040,99.420,0.580,86.59,240,0.950,bicubic xcit_small_12_p8_224_dist,95.960,4.040,99.420,0.580,26.21,224,1.000,bicubic resnetrs152,95.960,4.040,99.380,0.620,86.62,320,1.000,bicubic +maxvit_rmlp_small_rw_224,95.960,4.040,99.350,0.650,64.90,224,0.900,bicubic +pvt_v2_b5,95.950,4.050,99.390,0.610,81.96,224,0.900,bicubic eca_nfnet_l1,95.940,4.060,99.490,0.510,41.41,320,1.000,bicubic -convnext_base,95.940,4.060,99.380,0.620,88.59,224,0.875,bicubic -ig_resnext101_32x8d,95.940,4.060,99.380,0.620,88.79,224,0.875,bilinear -xcit_small_24_p8_224,95.910,4.090,99.180,0.820,47.63,224,1.000,bicubic -vit_base_patch32_384,95.900,4.100,99.440,0.560,88.30,384,1.000,bicubic +ig_resnext101_32x8d,95.930,4.070,99.380,0.620,88.79,224,0.875,bilinear +gcvit_small,95.930,4.070,99.280,0.720,51.09,224,0.875,bicubic +vit_base_patch32_384.augreg_in21k_ft_in1k,95.900,4.100,99.440,0.560,88.30,384,1.000,bicubic +pvt_v2_b4,95.900,4.100,99.350,0.650,62.56,224,0.900,bicubic +xcit_small_24_p8_224,95.900,4.100,99.180,0.820,47.63,224,1.000,bicubic +mvitv2_small,95.890,4.110,99.360,0.640,34.87,224,0.900,bicubic regnety_160,95.880,4.120,99.560,0.440,83.59,288,1.000,bicubic -sequencer2d_l,95.870,4.130,99.470,0.530,54.30,224,0.875,bicubic +sequencer2d_l,95.880,4.120,99.470,0.530,54.30,224,0.875,bicubic resmlp_big_24_distilled_224,95.870,4.130,99.440,0.560,129.14,224,0.875,bicubic -regnetz_d32,95.870,4.130,99.430,0.570,27.58,320,0.950,bicubic resnet152d,95.870,4.130,99.430,0.570,60.21,320,1.000,bicubic xcit_medium_24_p8_224,95.870,4.130,99.080,0.920,84.32,224,1.000,bicubic -regnety_080,95.850,4.150,99.440,0.560,39.18,288,1.000,bicubic +regnety_080,95.860,4.140,99.440,0.560,39.18,288,1.000,bicubic +regnetz_d32,95.860,4.140,99.430,0.570,27.58,320,0.950,bicubic swin_s3_small_224,95.840,4.160,99.200,0.800,49.74,224,0.900,bicubic -deit3_small_patch16_224_in21ft1k,95.820,4.180,99.400,0.600,22.06,224,1.000,bicubic +deit3_small_patch16_224_in21ft1k,95.820,4.180,99.410,0.590,22.06,224,1.000,bicubic crossvit_15_dagger_408,95.820,4.180,99.310,0.690,28.50,408,1.000,bicubic -xcit_small_24_p16_224_dist,95.790,4.210,99.350,0.650,47.67,224,1.000,bicubic +tresnet_v2_l,95.820,4.180,99.290,0.710,46.17,224,0.875,bilinear +maxvit_tiny_tf_224.in1k,95.810,4.190,99.260,0.740,30.92,224,0.950,bicubic +xcit_small_24_p16_224_dist,95.800,4.200,99.340,0.660,47.67,224,1.000,bicubic +edgenext_base,95.790,4.210,99.570,0.430,18.51,320,1.000,bicubic regnety_064,95.790,4.210,99.290,0.710,30.58,288,1.000,bicubic deit3_base_patch16_224,95.780,4.220,99.270,0.730,86.59,224,0.900,bicubic regnetv_064,95.770,4.230,99.420,0.580,30.58,288,1.000,bicubic resnet101d,95.750,4.250,99.440,0.560,44.57,320,1.000,bicubic resnetv2_152x2_bit_teacher,95.750,4.250,99.430,0.570,236.34,224,0.875,bicubic deit_base_distilled_patch16_224,95.750,4.250,99.280,0.720,87.34,224,0.900,bicubic +xcit_small_12_p16_224_dist,95.740,4.260,99.310,0.690,26.25,224,1.000,bicubic +maxvit_tiny_rw_224,95.740,4.260,99.160,0.840,29.06,224,0.950,bicubic regnetv_040,95.730,4.270,99.380,0.620,20.64,288,1.000,bicubic -convnext_tiny_in22ft1k,95.730,4.270,99.360,0.640,28.59,224,0.875,bicubic swinv2_small_window8_256,95.730,4.270,99.360,0.640,49.73,256,0.900,bicubic -xcit_small_12_p16_224_dist,95.730,4.270,99.300,0.700,26.25,224,1.000,bicubic twins_pcpvt_large,95.720,4.280,99.490,0.510,60.99,224,0.900,bicubic twins_svt_large,95.720,4.280,99.370,0.630,99.27,224,0.900,bicubic swin_small_patch4_window7_224,95.720,4.280,99.290,0.710,49.61,224,0.900,bicubic -tf_efficientnetv2_s,95.710,4.290,99.400,0.600,21.46,384,1.000,bicubic -efficientnetv2_rw_s,95.700,4.300,99.380,0.620,23.94,384,1.000,bicubic +tf_efficientnetv2_s.in1k,95.710,4.290,99.400,0.600,21.46,384,1.000,bicubic +efficientnetv2_rw_s.ra2_in1k,95.710,4.290,99.380,0.620,23.94,384,1.000,bicubic dm_nfnet_f0,95.690,4.310,99.330,0.670,71.49,256,0.900,bicubic swinv2_cr_small_ns_224,95.690,4.310,99.310,0.690,49.70,224,0.900,bicubic xception65,95.690,4.310,99.310,0.690,39.92,299,0.940,bicubic +gcvit_tiny,95.680,4.320,99.340,0.660,28.22,224,0.875,bicubic xception65p,95.660,4.340,99.270,0.730,39.82,299,0.940,bicubic +cait_s24_224,95.650,4.350,99.390,0.610,46.92,224,1.000,bicubic deit_base_patch16_384,95.650,4.350,99.240,0.760,86.86,384,1.000,bicubic -cait_s24_224,95.640,4.360,99.390,0.610,46.92,224,1.000,bicubic -regnetz_c16_evos,95.630,4.370,99.420,0.580,13.49,320,0.950,bicubic -swsl_resnext50_32x4d,95.610,4.390,99.440,0.560,25.03,224,0.875,bilinear -deit3_small_patch16_384,95.610,4.390,99.390,0.610,22.21,384,1.000,bicubic -convnext_small,95.610,4.390,99.260,0.740,50.22,224,0.875,bicubic -sequencer2d_m,95.600,4.400,99.270,0.730,38.31,224,0.875,bicubic -tf_efficientnet_b4,95.590,4.410,99.330,0.670,19.34,380,0.922,bicubic +swsl_resnext50_32x4d,95.620,4.380,99.440,0.560,25.03,224,0.875,bilinear +regnetz_c16_evos,95.620,4.380,99.420,0.580,13.49,320,0.950,bicubic +coatnet_1_rw_224,95.620,4.380,99.220,0.780,41.72,224,0.950,bicubic +efficientformer_l7,95.600,4.400,99.440,0.560,82.23,224,0.950,bicubic +deit3_small_patch16_384,95.600,4.400,99.390,0.610,22.21,384,1.000,bicubic +tf_efficientnet_b4.aa_in1k,95.590,4.410,99.330,0.670,19.34,380,0.922,bicubic +sequencer2d_m,95.590,4.410,99.280,0.720,38.31,224,0.875,bicubic +tf_efficientnetv2_b3.in21k_ft_in1k,95.590,4.410,99.280,0.720,14.36,300,0.900,bicubic +resnest101e,95.570,4.430,99.270,0.730,48.28,256,0.875,bilinear twins_svt_base,95.570,4.430,99.230,0.770,56.07,224,0.900,bicubic -resnest101e,95.560,4.440,99.270,0.730,48.28,256,0.875,bilinear -resnet152,95.550,4.450,99.260,0.740,60.19,224,0.950,bicubic +resnet152,95.550,4.450,99.270,0.730,60.19,224,0.950,bicubic jx_nest_base,95.540,4.460,99.300,0.700,67.72,224,0.875,bicubic resnext101_64x4d,95.540,4.460,99.290,0.710,83.46,288,1.000,bicubic -efficientnet_b4,95.530,4.470,99.400,0.600,19.34,384,1.000,bicubic -jx_nest_small,95.530,4.470,99.220,0.780,38.35,224,0.875,bicubic -tf_efficientnet_b2_ns,95.520,4.480,99.340,0.660,9.11,260,0.890,bicubic +jx_nest_small,95.530,4.470,99.210,0.790,38.35,224,0.875,bicubic +efficientnet_b4.ra2_in1k,95.520,4.480,99.390,0.610,19.34,384,1.000,bicubic +tf_efficientnet_b2.ns_jft_in1k,95.520,4.480,99.340,0.660,9.11,260,0.890,bicubic tresnet_xl_448,95.510,4.490,99.340,0.660,78.44,448,0.875,bilinear -tf_efficientnet_b4_ap,95.490,4.510,99.390,0.610,19.34,380,0.922,bicubic +tf_efficientnet_b4.ap_in1k,95.490,4.510,99.390,0.610,19.34,380,0.922,bicubic xcit_tiny_24_p16_384_dist,95.490,4.510,99.360,0.640,12.12,384,1.000,bicubic -regnety_032,95.480,4.520,99.320,0.680,19.44,288,1.000,bicubic -regnety_040,95.470,4.530,99.420,0.580,20.65,288,1.000,bicubic -cs3edgenet_x,95.470,4.530,99.280,0.720,47.82,288,1.000,bicubic +coatnet_rmlp_1_rw_224,95.490,4.510,99.240,0.760,41.69,224,0.950,bicubic +maxvit_nano_rw_256,95.490,4.510,99.130,0.870,15.45,256,0.950,bicubic +regnety_040,95.480,4.520,99.420,0.580,20.65,288,1.000,bicubic +regnety_032,95.470,4.530,99.320,0.680,19.44,288,1.000,bicubic +pvt_v2_b3,95.470,4.530,99.310,0.690,45.24,224,0.900,bicubic sequencer2d_s,95.470,4.530,99.270,0.730,27.65,224,0.875,bicubic twins_pcpvt_base,95.460,4.540,99.390,0.610,43.83,224,0.900,bicubic -xcit_tiny_24_p8_224_dist,95.460,4.540,99.360,0.640,12.11,224,1.000,bicubic eca_nfnet_l0,95.450,4.550,99.390,0.610,24.14,288,1.000,bicubic -cs3sedarknet_x,95.420,4.580,99.320,0.680,35.40,288,1.000,bicubic +xcit_tiny_24_p8_224_dist,95.450,4.550,99.360,0.640,12.11,224,1.000,bicubic +cs3edgenet_x,95.450,4.550,99.280,0.720,47.82,288,1.000,bicubic +maxvit_rmlp_nano_rw_256,95.440,4.560,99.060,0.940,15.50,256,0.950,bicubic xcit_small_12_p8_224,95.420,4.580,99.200,0.800,26.21,224,1.000,bicubic ssl_resnext101_32x16d,95.410,4.590,99.410,0.590,194.03,224,0.875,bilinear -resnetv2_50x1_bit_distilled,95.400,4.600,99.430,0.570,25.55,224,0.875,bicubic -tresnet_l_448,95.400,4.600,99.300,0.700,55.99,448,0.875,bilinear -swinv2_cr_small_224,95.400,4.600,99.050,0.950,49.70,224,0.900,bicubic +tresnet_l_448,95.410,4.590,99.300,0.700,55.99,448,0.875,bilinear +mvitv2_tiny,95.410,4.590,99.160,0.840,24.17,224,0.900,bicubic +swinv2_cr_small_224,95.410,4.590,99.060,0.940,49.70,224,0.900,bicubic +cs3sedarknet_x,95.400,4.600,99.320,0.680,35.40,288,1.000,bicubic +regnetz_c16,95.400,4.600,99.310,0.690,13.46,320,0.940,bicubic +resnetv2_50x1_bit_distilled,95.390,4.610,99.430,0.570,25.55,224,0.875,bicubic nfnet_l0,95.390,4.610,99.420,0.580,35.07,288,1.000,bicubic -regnetz_c16,95.390,4.610,99.310,0.690,13.46,320,0.940,bicubic mobilevitv2_200_384_in22ft1k,95.390,4.610,99.280,0.720,18.45,384,1.000,bicubic +deit3_medium_patch16_224,95.390,4.610,99.210,0.790,38.85,224,0.900,bicubic tresnet_m,95.380,4.620,99.150,0.850,31.39,224,0.875,bilinear +convnext_nano.in12k_ft_in1k,95.360,4.640,99.450,0.550,15.59,288,1.000,bicubic swinv2_tiny_window16_256,95.360,4.640,99.300,0.700,28.35,256,0.900,bicubic pnasnet5large,95.360,4.640,99.130,0.870,86.06,331,0.911,bicubic +maxxvit_rmlp_nano_rw_256,95.350,4.650,99.320,0.680,16.78,256,0.950,bicubic xcit_tiny_12_p8_384_dist,95.340,4.660,99.340,0.660,6.71,384,1.000,bicubic -mobilevitv2_150_384_in22ft1k,95.340,4.660,99.130,0.870,10.59,384,1.000,bicubic -ssl_resnext101_32x8d,95.330,4.670,99.310,0.690,88.79,224,0.875,bilinear +ssl_resnext101_32x8d,95.340,4.660,99.320,0.680,88.79,224,0.875,bilinear +mobilevitv2_150_384_in22ft1k,95.330,4.670,99.130,0.870,10.59,384,1.000,bicubic resnetv2_101x1_bitm,95.320,4.680,99.370,0.630,44.54,448,1.000,bilinear -vit_relpos_medium_patch16_cls_224,95.300,4.700,99.090,0.910,38.76,224,0.900,bicubic -gc_efficientnetv2_rw_t,95.290,4.710,99.220,0.780,13.68,288,1.000,bicubic -cs3darknet_x,95.270,4.730,99.280,0.720,35.05,288,1.000,bicubic +vit_relpos_medium_patch16_cls_224.sw_in1k,95.300,4.700,99.090,0.910,38.76,224,0.900,bicubic +cs3darknet_x,95.280,4.720,99.280,0.720,35.05,288,1.000,bicubic +gc_efficientnetv2_rw_t.agc_in1k,95.280,4.720,99.220,0.780,13.68,288,1.000,bicubic +flexivit_small.600ep_in1k,95.270,4.730,99.160,0.840,22.06,240,0.950,bicubic +convnext_tiny_hnf.a2h_in1k,95.270,4.730,98.980,1.020,28.59,288,1.000,bicubic +mobilevitv2_175_384_in22ft1k,95.260,4.740,99.380,0.620,14.25,384,1.000,bicubic resnetrs101,95.250,4.750,99.210,0.790,63.62,288,0.940,bicubic -vit_relpos_base_patch16_clsgap_224,95.250,4.750,99.200,0.800,86.43,224,0.900,bicubic -mobilevitv2_175_384_in22ft1k,95.240,4.760,99.380,0.620,14.25,384,1.000,bicubic -vit_large_patch32_384,95.240,4.760,99.320,0.680,306.63,384,1.000,bicubic +vit_relpos_base_patch16_clsgap_224.sw_in1k,95.250,4.750,99.200,0.800,86.43,224,0.900,bicubic +vit_large_patch32_384.orig_in21k_ft_in1k,95.240,4.760,99.320,0.680,306.63,384,1.000,bicubic +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,95.240,4.760,99.240,0.760,88.22,224,0.900,bicubic cait_xxs36_384,95.220,4.780,99.320,0.680,17.37,384,1.000,bicubic +efficientformer_l3,95.210,4.790,99.310,0.690,31.41,224,0.950,bicubic +vit_base_patch16_224.orig_in21k_ft_in1k,95.210,4.790,99.230,0.770,86.57,224,0.900,bicubic +vit_relpos_medium_patch16_224.sw_in1k,95.210,4.790,99.220,0.780,38.75,224,0.900,bicubic levit_384,95.210,4.790,99.160,0.840,39.13,224,0.900,bicubic swsl_resnet50,95.200,4.800,99.390,0.610,25.56,224,0.875,bilinear +convnext_tiny.fb_in1k,95.200,4.800,99.330,0.670,28.59,288,1.000,bicubic resnet51q,95.200,4.800,99.280,0.720,35.70,288,1.000,bilinear -vit_relpos_medium_patch16_224,95.200,4.800,99.220,0.780,38.75,224,0.900,bicubic +pvt_v2_b2_li,95.200,4.800,99.260,0.740,22.55,224,0.900,bicubic +flexivit_small.1200ep_in1k,95.200,4.800,99.170,0.830,22.06,240,0.950,bicubic crossvit_18_dagger_240,95.180,4.820,99.120,0.880,44.27,240,0.875,bicubic +ssl_resnext101_32x4d,95.160,4.840,99.300,0.700,44.18,224,0.875,bilinear ecaresnet101d,95.160,4.840,99.230,0.770,44.57,224,0.875,bicubic -ssl_resnext101_32x4d,95.150,4.850,99.300,0.700,44.18,224,0.875,bilinear +vit_relpos_base_patch16_224.sw_in1k,95.150,4.850,99.300,0.700,86.43,224,0.900,bicubic +flexivit_small.300ep_in1k,95.150,4.850,99.160,0.840,22.06,240,0.950,bicubic nasnetalarge,95.150,4.850,99.130,0.870,88.75,331,0.911,bicubic -efficientnet_b3,95.140,4.860,99.210,0.790,12.23,320,1.000,bicubic -vit_relpos_base_patch16_224,95.130,4.870,99.300,0.700,86.43,224,0.900,bicubic -fbnetv3_g,95.130,4.870,99.200,0.800,16.62,288,0.950,bilinear +efficientnet_b3.ra2_in1k,95.140,4.860,99.210,0.790,12.23,320,1.000,bicubic +resnetv2_50d_evos,95.130,4.870,99.230,0.770,25.59,288,0.950,bicubic +vit_small_r26_s32_224.augreg_in21k_ft_in1k,95.130,4.870,99.220,0.780,36.43,224,0.900,bicubic +fbnetv3_g.ra2_in1k,95.130,4.870,99.200,0.800,16.62,288,0.950,bilinear poolformer_m48,95.130,4.870,99.120,0.880,73.47,224,0.950,bicubic -xcit_medium_24_p16_224,95.130,4.870,98.930,1.070,84.40,224,1.000,bicubic -resnetv2_50d_evos,95.120,4.880,99.230,0.770,25.59,288,0.950,bicubic -vit_small_r26_s32_224,95.120,4.880,99.220,0.780,36.43,224,0.900,bicubic -cs3sedarknet_l,95.120,4.880,99.210,0.790,21.91,288,0.950,bicubic -tf_efficientnetv2_b3,95.120,4.880,99.200,0.800,14.36,300,0.904,bicubic +xcit_medium_24_p16_224,95.130,4.870,98.920,1.080,84.40,224,1.000,bicubic +tf_efficientnetv2_b3.in1k,95.120,4.880,99.200,0.800,14.36,300,0.904,bicubic resnet61q,95.120,4.880,99.080,0.920,36.85,288,1.000,bicubic +cs3sedarknet_l,95.110,4.890,99.210,0.790,21.91,288,0.950,bicubic convit_base,95.100,4.900,99.140,0.860,86.54,224,0.875,bicubic resnetv2_50d_gn,95.100,4.900,99.060,0.940,25.57,288,0.950,bicubic -xcit_small_24_p16_224,95.080,4.920,99.070,0.930,47.67,224,1.000,bicubic -coat_lite_small,95.080,4.920,99.030,0.970,19.84,224,0.900,bicubic +coatnet_rmlp_nano_rw_224,95.090,4.910,99.170,0.830,15.15,224,0.900,bicubic +xcit_small_24_p16_224,95.080,4.920,99.060,0.940,47.67,224,1.000,bicubic +coat_lite_small,95.080,4.920,99.020,0.980,19.84,224,0.900,bicubic ecaresnet50t,95.070,4.930,99.290,0.710,25.57,320,0.950,bicubic -efficientnetv2_rw_t,95.070,4.930,99.220,0.780,13.65,288,1.000,bicubic -vit_relpos_medium_patch16_rpn_224,95.070,4.930,99.190,0.810,38.73,224,0.900,bicubic +efficientnetv2_rw_t.ra2_in1k,95.070,4.930,99.220,0.780,13.65,288,1.000,bicubic +vit_relpos_medium_patch16_rpn_224.sw_in1k,95.070,4.930,99.200,0.800,38.73,224,0.900,bicubic +xception41p,95.070,4.930,99.150,0.850,26.91,299,0.940,bicubic crossvit_18_240,95.070,4.930,99.120,0.880,43.27,240,0.875,bicubic crossvit_base_240,95.070,4.930,98.980,1.020,105.03,240,0.875,bicubic tresnet_xl,95.060,4.940,99.260,0.740,78.44,224,0.875,bilinear -xception41p,95.060,4.940,99.150,0.850,26.91,299,0.940,bicubic +coatnet_nano_rw_224,95.050,4.950,99.150,0.850,15.14,224,0.900,bicubic mobilevitv2_200_in22ft1k,95.050,4.950,99.080,0.920,18.45,256,0.888,bicubic swinv2_tiny_window8_256,95.030,4.970,99.170,0.830,28.35,256,0.900,bicubic +halo2botnet50ts_256,95.030,4.970,99.030,0.970,22.64,256,0.950,bicubic +gcvit_xtiny,95.010,4.990,99.180,0.820,19.98,224,0.875,bicubic +pvt_v2_b2,95.010,4.990,99.140,0.860,25.36,224,0.900,bicubic poolformer_m36,95.010,4.990,99.100,0.900,56.17,224,0.950,bicubic -halo2botnet50ts_256,95.010,4.990,99.040,0.960,22.64,256,0.950,bicubic deit_base_patch16_224,95.010,4.990,98.980,1.020,86.57,224,0.900,bicubic +coatnet_bn_0_rw_224,94.980,5.020,99.230,0.770,27.44,224,0.950,bicubic crossvit_15_dagger_240,94.980,5.020,99.160,0.840,28.21,240,0.875,bicubic -resnet101,94.980,5.020,99.080,0.920,44.55,224,0.950,bicubic -visformer_small,94.970,5.030,99.210,0.790,40.22,224,0.900,bicubic convmixer_1536_20,94.970,5.030,99.170,0.830,51.63,224,0.960,bicubic -tf_efficientnet_b3_ap,94.970,5.030,99.110,0.890,12.23,300,0.904,bicubic -convnext_tiny,94.960,5.040,99.200,0.800,28.59,224,0.875,bicubic +tf_efficientnet_b3.ap_in1k,94.970,5.030,99.110,0.890,12.23,300,0.904,bicubic +visformer_small,94.960,5.040,99.210,0.790,40.22,224,0.900,bicubic jx_nest_tiny,94.950,5.050,99.100,0.900,17.06,224,0.875,bicubic -xcit_large_24_p16_224,94.950,5.050,98.830,1.170,189.10,224,1.000,bicubic +resnet101,94.950,5.050,99.070,0.930,44.55,224,0.950,bicubic +xcit_large_24_p16_224,94.940,5.060,98.830,1.170,189.10,224,1.000,bicubic gernet_l,94.930,5.070,99.200,0.800,31.08,256,0.875,bilinear -cait_xxs24_384,94.930,5.070,99.140,0.860,12.03,384,1.000,bicubic -resnetv2_101,94.930,5.070,99.120,0.880,44.54,224,0.950,bicubic +cait_xxs24_384,94.920,5.080,99.140,0.860,12.03,384,1.000,bicubic +resnetv2_101,94.920,5.080,99.120,0.880,44.54,224,0.950,bicubic convit_small,94.920,5.080,99.110,0.890,27.78,224,0.875,bicubic -tf_efficientnet_b3,94.910,5.090,99.110,0.890,12.23,300,0.904,bicubic -vit_srelpos_medium_patch16_224,94.900,5.100,99.200,0.800,38.74,224,0.900,bicubic +tf_efficientnet_b3.aa_in1k,94.910,5.090,99.110,0.890,12.23,300,0.904,bicubic +coatnet_0_rw_224,94.910,5.090,99.020,0.980,27.44,224,0.950,bicubic +vit_srelpos_medium_patch16_224.sw_in1k,94.900,5.100,99.200,0.800,38.74,224,0.900,bicubic swin_s3_tiny_224,94.900,5.100,99.160,0.840,28.33,224,0.900,bicubic -tresnet_l,94.900,5.100,99.030,0.970,55.99,224,0.875,bilinear xcit_tiny_24_p8_224,94.890,5.110,99.190,0.810,12.11,224,1.000,bicubic -mixer_b16_224_miil,94.890,5.110,99.080,0.920,59.88,224,0.875,bilinear -vit_small_patch16_224,94.880,5.120,99.270,0.730,22.05,224,0.900,bicubic -resnetaa50,94.880,5.120,99.130,0.870,25.56,288,1.000,bicubic -tf_efficientnet_lite4,94.870,5.130,99.090,0.910,13.01,380,0.920,bilinear -tf_efficientnet_b1_ns,94.860,5.140,99.250,0.750,7.79,240,0.882,bicubic -convnext_nano,94.860,5.140,99.150,0.850,15.59,288,1.000,bicubic +tresnet_l,94.890,5.110,99.030,0.970,55.99,224,0.875,bilinear +vit_small_patch16_224.augreg_in21k_ft_in1k,94.880,5.120,99.270,0.730,22.05,224,0.900,bicubic +mixer_b16_224_miil,94.880,5.120,99.080,0.920,59.88,224,0.875,bilinear +convnext_nano.d1h_in1k,94.870,5.130,99.140,0.860,15.59,288,1.000,bicubic +resnetaa50,94.870,5.130,99.120,0.880,25.56,288,1.000,bicubic +tf_efficientnet_lite4.in1k,94.870,5.130,99.090,0.910,13.01,380,0.920,bilinear +tf_efficientnet_b1.ns_jft_in1k,94.860,5.140,99.250,0.750,7.79,240,0.882,bicubic +coatnext_nano_rw_224,94.850,5.150,99.200,0.800,14.70,224,0.900,bicubic edgenext_small,94.830,5.170,99.410,0.590,5.59,320,1.000,bicubic -vit_base_patch16_rpn_224,94.820,5.180,99.090,0.910,86.54,224,0.900,bicubic -xcit_small_12_p16_224,94.820,5.180,99.060,0.940,26.25,224,1.000,bicubic -seresnext50_32x4d,94.810,5.190,99.130,0.870,27.56,224,0.875,bicubic +vit_base_patch16_rpn_224.in1k,94.830,5.170,99.090,0.910,86.54,224,0.900,bicubic +xcit_small_12_p16_224,94.830,5.170,99.060,0.940,26.25,224,1.000,bicubic +seresnext50_32x4d,94.820,5.180,99.130,0.870,27.56,224,0.875,bicubic cs3darknet_focus_l,94.790,5.210,99.150,0.850,21.15,288,0.950,bicubic +mobilevitv2_175_in22ft1k,94.790,5.210,99.090,0.910,14.25,256,0.888,bicubic pit_b_224,94.790,5.210,98.820,1.180,73.76,224,0.900,bicubic -mobilevitv2_175_in22ft1k,94.780,5.220,99.100,0.900,14.25,256,0.888,bicubic -convnext_tiny_hnf,94.770,5.230,99.160,0.840,28.59,224,0.950,bicubic +lamhalobotnet50ts_256,94.780,5.220,98.980,1.020,22.57,256,0.950,bicubic twins_svt_small,94.770,5.230,99.080,0.920,24.06,224,0.900,bicubic -lamhalobotnet50ts_256,94.770,5.230,98.980,1.020,22.57,256,0.950,bicubic coat_mini,94.770,5.230,98.950,1.050,10.34,224,0.900,bicubic swinv2_cr_tiny_ns_224,94.760,5.240,99.110,0.890,28.33,224,0.900,bicubic -resnetv2_50x1_bitm,94.750,5.250,99.180,0.820,25.55,448,1.000,bilinear -pit_s_distilled_224,94.740,5.260,99.180,0.820,24.04,224,0.900,bicubic +vit_base_patch32_clip_224.laion2b_ft_in1k,94.740,5.260,99.070,0.930,88.22,224,0.900,bicubic +pit_s_distilled_224,94.730,5.270,99.190,0.810,24.04,224,0.900,bicubic +resnetv2_50x1_bitm,94.730,5.270,99.180,0.820,25.55,448,1.000,bilinear legacy_senet154,94.730,5.270,99.100,0.900,115.09,224,0.875,bilinear xcit_tiny_12_p8_224_dist,94.720,5.280,99.180,0.820,6.71,224,1.000,bicubic crossvit_15_240,94.720,5.280,99.080,0.920,27.53,240,0.875,bicubic gluon_resnet152_v1s,94.720,5.280,99.060,0.940,60.32,224,0.875,bicubic -resnest50d_4s2x40d,94.710,5.290,99.140,0.860,30.42,224,0.875,bicubic +resnest50d_4s2x40d,94.710,5.290,99.130,0.870,30.42,224,0.875,bicubic gluon_senet154,94.710,5.290,98.970,1.030,115.09,224,0.875,bicubic -halonet50ts,94.710,5.290,98.830,1.170,22.73,256,0.940,bicubic ssl_resnext50_32x4d,94.700,5.300,99.240,0.760,25.03,224,0.875,bilinear -vit_relpos_small_patch16_224,94.690,5.310,99.100,0.900,21.98,224,0.900,bicubic -mobilevitv2_150_in22ft1k,94.690,5.310,98.920,1.080,10.59,256,0.888,bicubic +mobilevitv2_150_in22ft1k,94.700,5.300,98.920,1.080,10.59,256,0.888,bicubic +halonet50ts,94.700,5.300,98.830,1.170,22.73,256,0.940,bicubic +vit_relpos_small_patch16_224.sw_in1k,94.690,5.310,99.100,0.900,21.98,224,0.900,bicubic deit3_small_patch16_224,94.690,5.310,98.750,1.250,22.06,224,0.900,bicubic cs3darknet_l,94.680,5.320,99.220,0.780,21.16,288,0.950,bicubic regnetz_b16,94.680,5.320,99.160,0.840,9.72,288,0.940,bicubic -efficientnet_el,94.670,5.330,99.130,0.870,10.59,300,0.904,bicubic +efficientnet_el.ra_in1k,94.670,5.330,99.130,0.870,10.59,300,0.904,bicubic +wide_resnet50_2,94.670,5.330,99.050,0.950,68.88,224,0.875,bicubic tresnet_m_448,94.660,5.340,99.150,0.850,31.39,448,0.875,bilinear rexnet_200,94.660,5.340,99.090,0.910,16.37,224,0.875,bicubic -wide_resnet50_2,94.660,5.340,99.050,0.950,68.88,224,0.875,bicubic -gluon_seresnext101_64x4d,94.660,5.340,98.980,1.020,88.23,224,0.875,bicubic +gluon_seresnext101_64x4d,94.650,5.350,98.980,1.020,88.23,224,0.875,bicubic +poolformer_s36,94.630,5.370,99.050,0.950,30.86,224,0.900,bicubic +vit_small_patch16_384.augreg_in1k,94.620,5.380,99.140,0.860,22.20,384,1.000,bicubic swin_tiny_patch4_window7_224,94.620,5.380,99.120,0.880,28.29,224,0.900,bicubic -poolformer_s36,94.620,5.380,99.050,0.950,30.86,224,0.900,bicubic resnest50d,94.620,5.380,99.030,0.970,27.48,224,0.875,bilinear gcresnet50t,94.620,5.380,98.980,1.020,25.90,256,0.900,bicubic twins_pcpvt_small,94.600,5.400,99.150,0.850,24.11,224,0.900,bicubic -vit_small_patch32_384,94.600,5.400,99.140,0.860,22.92,384,1.000,bicubic -deit_small_distilled_patch16_224,94.600,5.400,99.100,0.900,22.44,224,0.900,bicubic -crossvit_small_240,94.580,5.420,99.120,0.880,26.86,240,0.875,bicubic -efficientnet_b3_pruned,94.580,5.420,99.070,0.930,9.86,300,0.904,bicubic -pit_s_224,94.580,5.420,98.930,1.070,23.46,224,0.900,bicubic -resnext50_32x4d,94.580,5.420,98.800,1.200,25.03,224,0.950,bicubic -tnt_s_patch16_224,94.570,5.430,99.180,0.820,23.76,224,0.900,bicubic -lambda_resnet50ts,94.570,5.430,98.650,1.350,21.54,256,0.950,bicubic -repvgg_b3,94.560,5.440,98.910,1.090,123.09,224,0.875,bilinear -resmlp_36_distilled_224,94.550,5.450,99.160,0.840,44.69,224,0.875,bicubic -vit_srelpos_small_patch16_224,94.550,5.450,99.140,0.860,21.97,224,0.900,bicubic +vit_small_patch32_384.augreg_in21k_ft_in1k,94.590,5.410,99.140,0.860,22.92,384,1.000,bicubic +deit_small_distilled_patch16_224,94.590,5.410,99.100,0.900,22.44,224,0.900,bicubic +pit_s_224,94.590,5.410,98.930,1.070,23.46,224,0.900,bicubic +tnt_s_patch16_224,94.580,5.420,99.180,0.820,23.76,224,0.900,bicubic +crossvit_small_240,94.580,5.420,99.110,0.890,26.86,240,0.875,bicubic +efficientnet_b3_pruned.in1k,94.580,5.420,99.070,0.930,9.86,300,0.904,bicubic +convnext_nano_ols.d1h_in1k,94.580,5.420,99.050,0.950,15.65,288,1.000,bicubic +resmlp_36_distilled_224,94.570,5.430,99.160,0.840,44.69,224,0.875,bicubic +resnext50_32x4d,94.570,5.430,98.800,1.200,25.03,224,0.950,bicubic +vit_srelpos_small_patch16_224.sw_in1k,94.550,5.450,99.140,0.860,21.97,224,0.900,bicubic gernet_m,94.550,5.450,98.930,1.070,21.14,224,0.875,bilinear -sehalonet33ts,94.540,5.460,98.760,1.240,13.69,256,0.940,bicubic +repvgg_b3,94.550,5.450,98.910,1.090,123.09,224,0.875,bilinear +lambda_resnet50ts,94.550,5.450,98.660,1.340,21.54,256,0.950,bicubic xcit_tiny_12_p16_384_dist,94.530,5.470,99.170,0.830,6.72,384,1.000,bicubic regnety_320,94.520,5.480,99.170,0.830,145.05,224,0.875,bicubic -haloregnetz_b,94.520,5.480,98.960,1.040,11.68,224,0.940,bicubic +sehalonet33ts,94.520,5.480,98.760,1.240,13.69,256,0.940,bicubic mobilevitv2_200,94.510,5.490,98.970,1.030,18.45,256,0.888,bicubic -repvgg_b3g4,94.500,5.500,99.020,0.980,83.83,224,0.875,bilinear -ecaresnet101d_pruned,94.460,5.540,99.090,0.910,24.88,224,0.875,bicubic +haloregnetz_b,94.510,5.490,98.960,1.040,11.68,224,0.940,bicubic +repvgg_b3g4,94.490,5.510,99.020,0.980,83.83,224,0.875,bilinear +ecaresnet101d_pruned,94.450,5.550,99.100,0.900,24.88,224,0.875,bicubic gluon_seresnext101_32x4d,94.450,5.550,99.090,0.910,48.96,224,0.875,bicubic +vit_base_patch32_clip_224.openai_ft_in1k,94.440,5.560,99.180,0.820,88.22,224,0.900,bicubic +vit_base_patch16_384.augreg_in1k,94.440,5.560,99.020,0.980,86.86,384,1.000,bicubic gluon_resnet152_v1d,94.440,5.560,99.010,0.990,60.21,224,0.875,bicubic -convmixer_768_32,94.430,5.570,99.110,0.890,21.11,224,0.960,bicubic -levit_256,94.410,5.590,99.060,0.940,18.89,224,0.900,bicubic +convmixer_768_32,94.420,5.580,99.110,0.890,21.11,224,0.960,bicubic gcresnext50ts,94.410,5.590,98.990,1.010,15.67,256,0.900,bicubic -nf_resnet50,94.390,5.610,99.070,0.930,25.56,288,0.940,bicubic +nf_resnet50,94.400,5.600,99.070,0.930,25.56,288,0.940,bicubic +levit_256,94.400,5.600,99.060,0.940,18.89,224,0.900,bicubic resnest50d_1s4x24d,94.390,5.610,99.070,0.930,25.68,224,0.875,bicubic -vit_base_patch32_224,94.390,5.610,99.060,0.940,88.22,224,0.900,bicubic +vit_base_patch32_224.augreg_in21k_ft_in1k,94.390,5.610,99.060,0.940,88.22,224,0.900,bicubic inception_v4,94.380,5.620,98.820,1.180,42.68,299,0.875,bicubic -efficientnet_b2,94.370,5.630,99.050,0.950,9.11,288,1.000,bicubic -tf_efficientnet_el,94.360,5.640,99.100,0.900,10.59,300,0.904,bicubic +darknet53,94.370,5.630,99.050,0.950,41.61,288,1.000,bicubic +efficientnet_b2.ra_in1k,94.370,5.630,99.050,0.950,9.11,288,1.000,bicubic +tf_efficientnet_el.in1k,94.360,5.640,99.100,0.900,10.59,300,0.904,bicubic xcit_tiny_12_p8_224,94.360,5.640,99.070,0.930,6.71,224,1.000,bicubic -darknet53,94.360,5.640,99.050,0.950,41.61,288,1.000,bicubic edgenext_small_rw,94.360,5.640,99.040,0.960,7.83,320,1.000,bicubic gluon_resnext101_64x4d,94.350,5.650,98.880,1.120,83.46,224,0.875,bicubic -resmlp_24_distilled_224,94.340,5.660,99.090,0.910,30.02,224,0.875,bicubic +inception_resnet_v2,94.340,5.660,98.800,1.200,55.84,299,0.897,bicubic +resmlp_24_distilled_224,94.330,5.670,99.090,0.910,30.02,224,0.875,bicubic poolformer_s24,94.330,5.670,99.060,0.940,21.39,224,0.900,bicubic -inception_resnet_v2,94.330,5.670,98.800,1.200,55.84,299,0.897,bicubic -ssl_resnet50,94.320,5.680,99.150,0.850,25.56,224,0.875,bilinear -sebotnet33ts_256,94.310,5.690,98.600,1.400,13.70,256,0.940,bicubic -rexnet_150,94.280,5.720,99.080,0.920,9.73,224,0.875,bicubic -tf_efficientnet_b2_ap,94.270,5.730,98.950,1.050,9.11,260,0.890,bicubic -resnetv2_50,94.270,5.730,98.930,1.070,25.55,224,0.950,bicubic +sebotnet33ts_256,94.330,5.670,98.580,1.420,13.70,256,0.940,bicubic +ssl_resnet50,94.310,5.690,99.150,0.850,25.56,224,0.875,bilinear +resnetv2_50,94.290,5.710,98.930,1.070,25.55,224,0.950,bicubic +regnetx_120,94.270,5.730,99.190,0.810,46.11,224,0.875,bicubic +rexnet_150,94.270,5.730,99.080,0.920,9.73,224,0.875,bicubic +tf_efficientnet_b2.ap_in1k,94.270,5.730,98.950,1.050,9.11,260,0.890,bicubic seresnet33ts,94.270,5.730,98.780,1.220,19.78,256,0.900,bicubic -regnetx_120,94.260,5.740,99.190,0.810,46.11,224,0.875,bicubic resmlp_big_24_224,94.260,5.740,98.820,1.180,129.14,224,0.875,bicubic -cspresnext50,94.240,5.760,99.050,0.950,20.57,256,0.887,bilinear +cspresnext50,94.250,5.750,99.050,0.950,20.57,256,0.887,bilinear +xcit_tiny_24_p16_224_dist,94.230,5.770,98.960,1.040,12.12,224,1.000,bicubic mobilevitv2_175,94.230,5.770,98.930,1.070,14.25,256,0.888,bicubic -mixnet_xl,94.230,5.770,98.820,1.180,11.90,224,0.875,bicubic -regnetx_320,94.220,5.780,99.050,0.950,107.81,224,0.875,bicubic -xcit_tiny_24_p16_224_dist,94.220,5.780,98.960,1.040,12.12,224,1.000,bicubic -tf_efficientnet_b2,94.210,5.790,99.040,0.960,9.11,260,0.890,bicubic +mixnet_xl.ra_in1k,94.230,5.770,98.820,1.180,11.90,224,0.875,bicubic +maxvit_rmlp_pico_rw_256,94.220,5.780,99.000,1.000,7.52,256,0.950,bicubic +regnetx_320,94.210,5.790,99.050,0.950,107.81,224,0.875,bicubic +tf_efficientnet_b2.aa_in1k,94.210,5.790,99.030,0.970,9.11,260,0.890,bicubic darknetaa53,94.210,5.790,98.950,1.050,36.02,288,1.000,bilinear -ecaresnet50d,94.200,5.800,99.020,0.980,25.58,224,0.875,bicubic -gluon_resnet101_v1d,94.180,5.820,98.940,1.060,44.57,224,0.875,bicubic -dpn92,94.180,5.820,98.930,1.070,37.67,224,0.875,bicubic +ecaresnet50d,94.190,5.810,99.020,0.980,25.58,224,0.875,bicubic +dpn92,94.190,5.810,98.930,1.070,37.67,224,0.875,bicubic resnet50_gn,94.180,5.820,98.920,1.080,25.56,224,0.940,bicubic gluon_resnet101_v1s,94.170,5.830,99.010,0.990,44.67,224,0.875,bicubic +gluon_resnet101_v1d,94.170,5.830,98.940,1.060,44.57,224,0.875,bicubic gluon_seresnext50_32x4d,94.170,5.830,98.910,1.090,27.56,224,0.875,bicubic ecaresnetlight,94.140,5.860,98.950,1.050,30.16,224,0.875,bicubic -legacy_seresnext101_32x4d,94.120,5.880,98.970,1.030,48.96,224,0.875,bilinear +legacy_seresnext101_32x4d,94.130,5.870,98.970,1.030,48.96,224,0.875,bilinear +tf_efficientnet_lite3.in1k,94.130,5.870,98.960,1.040,8.20,300,0.904,bilinear +ens_adv_inception_resnet_v2,94.130,5.870,98.790,1.210,55.84,299,0.897,bicubic gluon_resnext101_32x4d,94.120,5.880,98.930,1.070,44.18,224,0.875,bicubic -ens_adv_inception_resnet_v2,94.120,5.880,98.790,1.210,55.84,299,0.897,bicubic -tf_efficientnet_lite3,94.110,5.890,98.960,1.040,8.20,300,0.904,bilinear -efficientnet_el_pruned,94.090,5.910,99.010,0.990,10.59,300,0.904,bicubic +efficientnet_el_pruned.in1k,94.090,5.910,99.010,0.990,10.59,300,0.904,bicubic cspdarknet53,94.090,5.910,98.980,1.020,27.64,256,0.887,bilinear -seresnet50,94.080,5.920,98.950,1.050,28.09,224,0.875,bicubic +seresnet50,94.080,5.920,98.970,1.030,28.09,224,0.875,bicubic +tf_efficientnetv2_b2.in1k,94.070,5.930,98.930,1.070,10.10,260,0.890,bicubic resnet50d,94.070,5.930,98.920,1.080,25.58,224,0.875,bicubic -mobilevitv2_150,94.070,5.930,98.900,1.100,10.59,256,0.888,bicubic -tf_efficientnetv2_b2,94.060,5.940,98.930,1.070,10.10,260,0.890,bicubic -hrnet_w48,94.030,5.970,99.030,0.970,77.47,224,0.875,bilinear -gluon_resnet152_v1b,94.030,5.970,98.750,1.250,60.19,224,0.875,bicubic +gcvit_xxtiny,94.050,5.950,99.070,0.930,12.00,224,0.875,bicubic +mobilevitv2_150,94.050,5.950,98.900,1.100,10.59,256,0.888,bicubic +hrnet_w48,94.030,5.970,99.040,0.960,77.47,224,0.875,bilinear +convnext_pico.d1_in1k,94.030,5.970,99.000,1.000,9.05,288,0.950,bicubic +convnext_pico_ols.d1_in1k,94.030,5.970,98.940,1.060,9.06,288,1.000,bicubic +gluon_resnet152_v1b,94.030,5.970,98.740,1.260,60.19,224,0.875,bicubic resnetrs50,94.020,5.980,98.850,1.150,35.69,224,0.910,bicubic regnety_120,94.010,5.990,99.030,0.970,51.82,224,0.875,bicubic gluon_xception65,94.010,5.990,99.020,0.980,39.92,299,0.903,bicubic dla102x2,94.000,6.000,99.030,0.970,41.28,224,0.875,bilinear -deit_small_patch16_224,93.990,6.010,98.960,1.040,22.05,224,0.900,bicubic -ecaresnet26t,93.960,6.040,98.920,1.080,16.01,320,0.950,bicubic -dpn107,93.960,6.040,98.830,1.170,86.92,224,0.875,bicubic -skresnext50_32x4d,93.950,6.050,98.830,1.170,27.48,224,0.875,bicubic -dpn98,93.930,6.070,98.920,1.080,61.57,224,0.875,bicubic -cait_xxs36_224,93.930,6.070,98.890,1.110,17.30,224,1.000,bicubic -resnet50,93.930,6.070,98.470,1.530,25.56,224,0.950,bicubic -regnetx_160,93.890,6.110,99.090,0.910,54.28,224,0.875,bicubic -vit_base_patch16_224_sam,93.890,6.110,98.890,1.110,86.57,224,0.900,bicubic -gluon_resnet152_v1c,93.890,6.110,98.800,1.200,60.21,224,0.875,bicubic -xception71,93.880,6.120,98.950,1.050,42.34,299,0.903,bicubic -nf_regnet_b1,93.880,6.120,98.750,1.250,10.22,288,0.900,bicubic +deit_small_patch16_224,94.000,6.000,98.960,1.040,22.05,224,0.900,bicubic +dpn107,93.960,6.040,98.840,1.160,86.92,224,0.875,bicubic +skresnext50_32x4d,93.950,6.050,98.820,1.180,27.48,224,0.875,bicubic +efficientformer_l1,93.940,6.060,99.030,0.970,12.29,224,0.950,bicubic +dpn98,93.940,6.060,98.920,1.080,61.57,224,0.875,bicubic +ecaresnet26t,93.940,6.060,98.920,1.080,16.01,320,0.950,bicubic +cait_xxs36_224,93.940,6.060,98.890,1.110,17.30,224,1.000,bicubic +resnet50,93.920,6.080,98.470,1.530,25.56,224,0.950,bicubic +xception71,93.890,6.110,98.950,1.050,42.34,299,0.903,bicubic +vit_base_patch16_224.sam,93.890,6.110,98.890,1.110,86.57,224,0.900,bicubic +regnetx_160,93.880,6.120,99.090,0.910,54.28,224,0.875,bicubic +gluon_resnet152_v1c,93.880,6.120,98.800,1.200,60.21,224,0.875,bicubic +nf_regnet_b1,93.880,6.120,98.740,1.260,10.22,288,0.900,bicubic eca_resnet33ts,93.860,6.140,98.890,1.110,19.68,256,0.900,bicubic -cspresnet50,93.860,6.140,98.860,1.140,21.62,256,0.887,bilinear -fbnetv3_d,93.850,6.150,98.910,1.090,10.31,256,0.950,bilinear +cspresnet50,93.860,6.140,98.870,1.130,21.62,256,0.887,bilinear ese_vovnet39b,93.850,6.150,98.900,1.100,24.57,224,0.875,bicubic -xcit_tiny_24_p16_224,93.840,6.160,98.760,1.240,12.12,224,1.000,bicubic -hrnet_w64,93.830,6.170,98.920,1.080,128.06,224,0.875,bilinear -gcresnet33ts,93.830,6.170,98.910,1.090,19.88,256,0.900,bicubic +xcit_tiny_24_p16_224,93.850,6.150,98.760,1.240,12.12,224,1.000,bicubic +fbnetv3_d.ra2_in1k,93.840,6.160,98.910,1.090,10.31,256,0.950,bilinear +hrnet_w64,93.830,6.170,98.930,1.070,128.06,224,0.875,bilinear ecaresnet50d_pruned,93.820,6.180,99.000,1.000,19.94,224,0.875,bicubic repvgg_b2g4,93.820,6.180,98.930,1.070,61.76,224,0.875,bilinear -resnext50d_32x4d,93.820,6.180,98.740,1.260,25.05,224,0.875,bicubic -efficientnet_b2_pruned,93.800,6.200,98.910,1.090,8.31,260,0.890,bicubic -regnetx_080,93.790,6.210,98.900,1.100,39.57,224,0.875,bicubic -dla169,93.790,6.210,98.830,1.170,53.39,224,0.875,bilinear +gcresnet33ts,93.820,6.180,98.910,1.090,19.88,256,0.900,bicubic +resnext50d_32x4d,93.810,6.190,98.740,1.260,25.05,224,0.875,bicubic +efficientnet_b2_pruned.in1k,93.800,6.200,98.910,1.090,8.31,260,0.890,bicubic +dla169,93.800,6.200,98.840,1.160,53.39,224,0.875,bilinear +regnetx_080,93.790,6.210,98.910,1.090,39.57,224,0.875,bicubic resnext101_32x8d,93.770,6.230,98.950,1.050,88.79,224,0.875,bilinear +dpn131,93.760,6.240,98.800,1.200,79.25,224,0.875,bicubic gluon_resnet101_v1b,93.760,6.240,98.700,1.300,44.55,224,0.875,bicubic -tf_efficientnet_b0_ns,93.750,6.250,98.970,1.030,5.29,224,0.875,bicubic -dpn131,93.750,6.250,98.830,1.170,79.25,224,0.875,bicubic -efficientnet_em,93.740,6.260,98.930,1.070,6.90,240,0.882,bicubic -wide_resnet101_2,93.720,6.280,98.810,1.190,126.89,224,0.875,bilinear -levit_192,93.720,6.280,98.790,1.210,10.95,224,0.900,bicubic -tf_efficientnetv2_b1,93.710,6.290,98.820,1.180,8.14,240,0.882,bicubic +tf_efficientnet_b0.ns_jft_in1k,93.740,6.260,98.980,1.020,5.29,224,0.875,bicubic +efficientnet_em.ra2_in1k,93.740,6.260,98.930,1.070,6.90,240,0.882,bicubic +wide_resnet101_2,93.730,6.270,98.810,1.190,126.89,224,0.875,bilinear +tf_efficientnetv2_b1.in1k,93.710,6.290,98.820,1.180,8.14,240,0.882,bicubic resnetblur50,93.710,6.290,98.810,1.190,25.56,224,0.875,bicubic hrnet_w40,93.710,6.290,98.800,1.200,57.56,224,0.875,bilinear -tf_efficientnet_b1,93.710,6.290,98.800,1.200,7.79,240,0.882,bicubic -gluon_resnet101_v1c,93.680,6.320,98.760,1.240,44.57,224,0.875,bicubic -regnetx_040,93.670,6.330,98.940,1.060,22.12,224,0.875,bicubic -rexnet_130,93.670,6.330,98.700,1.300,7.56,224,0.875,bicubic +tf_efficientnet_b1.aa_in1k,93.710,6.290,98.800,1.200,7.79,240,0.882,bicubic +levit_192,93.710,6.290,98.790,1.210,10.95,224,0.900,bicubic +gluon_resnet101_v1c,93.690,6.310,98.760,1.240,44.57,224,0.875,bicubic +regnetx_040,93.680,6.320,98.940,1.060,22.12,224,0.875,bicubic +rexnet_130,93.670,6.330,98.710,1.290,7.56,224,0.875,bicubic resmlp_36_224,93.650,6.350,98.950,1.050,44.69,224,0.875,bicubic +fbnetv3_b.ra2_in1k,93.650,6.350,98.910,1.090,8.60,256,0.950,bilinear gluon_resnext50_32x4d,93.650,6.350,98.690,1.310,25.03,224,0.875,bicubic -xception,93.640,6.360,98.760,1.240,22.86,299,0.897,bicubic -fbnetv3_b,93.630,6.370,98.910,1.090,8.60,256,0.950,bilinear -tf_efficientnet_b1_ap,93.630,6.370,98.800,1.200,7.79,240,0.882,bicubic -resnet33ts,93.630,6.370,98.760,1.240,19.68,256,0.900,bicubic -regnetx_064,93.620,6.380,99.050,0.950,26.21,224,0.875,bicubic +xception,93.640,6.360,98.770,1.230,22.86,299,0.897,bicubic +regnetx_064,93.630,6.370,99.050,0.950,26.21,224,0.875,bicubic +tf_efficientnet_b1.ap_in1k,93.630,6.370,98.800,1.200,7.79,240,0.882,bicubic +resnet33ts,93.630,6.370,98.750,1.250,19.68,256,0.900,bicubic +hrnet_w44,93.620,6.380,98.960,1.040,67.06,224,0.875,bilinear dpn68b,93.620,6.380,98.700,1.300,12.61,224,0.875,bicubic -hrnet_w44,93.610,6.390,98.960,1.040,67.06,224,0.875,bilinear halonet26t,93.610,6.390,98.640,1.360,12.48,256,0.950,bicubic -res2net50_26w_6s,93.600,6.400,98.750,1.250,37.05,224,0.875,bilinear repvgg_b2,93.590,6.410,99.070,0.930,89.02,224,0.875,bilinear gluon_resnet50_v1s,93.590,6.410,98.840,1.160,25.68,224,0.875,bicubic -tf_efficientnet_cc_b1_8e,93.570,6.430,98.690,1.310,39.72,240,0.882,bicubic -resnet32ts,93.560,6.440,98.750,1.250,17.96,256,0.900,bicubic -eca_halonext26ts,93.560,6.440,98.680,1.320,10.76,256,0.940,bicubic -dla60_res2next,93.550,6.450,98.780,1.220,17.03,224,0.875,bilinear +res2net50_26w_6s,93.590,6.410,98.750,1.250,37.05,224,0.875,bilinear +dla60_res2next,93.570,6.430,98.800,1.200,17.03,224,0.875,bilinear +resnet32ts,93.570,6.430,98.750,1.250,17.96,256,0.900,bicubic +tf_efficientnet_cc_b1_8e.in1k,93.570,6.430,98.690,1.310,39.72,240,0.882,bicubic +eca_halonext26ts,93.550,6.450,98.680,1.320,10.76,256,0.940,bicubic +convnext_tiny.fb_in22k_ft_in1k,93.550,6.450,98.580,1.420,28.59,288,1.000,bicubic gluon_inception_v3,93.540,6.460,98.830,1.170,23.83,299,0.875,bicubic dla102x,93.530,6.470,98.850,1.150,26.31,224,0.875,bilinear gluon_resnet50_v1d,93.530,6.470,98.710,1.290,25.58,224,0.875,bicubic -res2net101_26w_4s,93.530,6.470,98.600,1.400,45.21,224,0.875,bilinear -gmlp_s16_224,93.510,6.490,98.780,1.220,19.42,224,0.875,bicubic +res2net101_26w_4s,93.520,6.480,98.600,1.400,45.21,224,0.875,bilinear coat_tiny,93.510,6.490,98.690,1.310,5.50,224,0.900,bicubic selecsls60b,93.500,6.500,98.840,1.160,32.77,224,0.875,bicubic +gmlp_s16_224,93.500,6.500,98.780,1.220,19.42,224,0.875,bicubic +pvt_v2_b1,93.490,6.510,98.860,1.140,14.01,224,0.900,bicubic cait_xxs24_224,93.490,6.510,98.770,1.230,11.96,224,1.000,bicubic xception41,93.480,6.520,98.750,1.250,26.97,299,0.903,bicubic mobilevitv2_125,93.460,6.540,98.860,1.140,7.48,256,0.888,bicubic -coat_lite_mini,93.460,6.540,98.780,1.220,11.01,224,0.900,bicubic -vit_tiny_patch16_384,93.440,6.560,98.830,1.170,5.79,384,1.000,bicubic +coat_lite_mini,93.450,6.550,98.780,1.220,11.01,224,0.900,bicubic +res2net50_26w_8s,93.450,6.550,98.700,1.300,48.40,224,0.875,bilinear +botnet26t_256,93.450,6.550,98.650,1.350,12.49,256,0.950,bicubic +legacy_seresnet152,93.440,6.560,98.850,1.150,66.82,224,0.875,bilinear +convnext_femto.d1_in1k,93.440,6.560,98.810,1.190,5.22,288,0.950,bicubic resmlp_24_224,93.440,6.560,98.810,1.190,30.02,224,0.875,bicubic -res2net50_26w_8s,93.440,6.560,98.690,1.310,48.40,224,0.875,bilinear lambda_resnet26rpt_256,93.430,6.570,98.880,1.120,10.99,256,0.940,bicubic -legacy_seresnet152,93.430,6.570,98.850,1.150,66.82,224,0.875,bilinear -botnet26t_256,93.430,6.570,98.650,1.350,12.49,256,0.950,bicubic -legacy_seresnext50_32x4d,93.420,6.580,98.800,1.200,27.56,224,0.875,bilinear +legacy_seresnext50_32x4d,93.430,6.570,98.800,1.200,27.56,224,0.875,bilinear +vit_small_patch16_224.augreg_in1k,93.430,6.570,98.780,1.220,22.05,224,0.900,bicubic +vit_tiny_patch16_384.augreg_in21k_ft_in1k,93.420,6.580,98.830,1.170,5.79,384,1.000,bicubic repvgg_b1,93.410,6.590,98.790,1.210,57.42,224,0.875,bilinear -lambda_resnet26t,93.400,6.600,98.730,1.270,10.96,256,0.940,bicubic -hrnet_w30,93.380,6.620,98.830,1.170,37.71,224,0.875,bilinear -dla60_res2net,93.370,6.630,98.840,1.160,20.85,224,0.875,bilinear -eca_botnext26ts_256,93.370,6.630,98.700,1.300,10.59,256,0.950,bicubic +lambda_resnet26t,93.400,6.600,98.740,1.260,10.96,256,0.940,bicubic +convnext_femto_ols.d1_in1k,93.390,6.610,98.910,1.090,5.23,288,0.950,bicubic +dla60_res2net,93.380,6.620,98.860,1.140,20.85,224,0.875,bilinear +hrnet_w30,93.370,6.630,98.830,1.170,37.71,224,0.875,bilinear +eca_botnext26ts_256,93.360,6.640,98.700,1.300,10.59,256,0.950,bicubic xcit_tiny_12_p16_224_dist,93.350,6.650,98.740,1.260,6.72,224,1.000,bicubic -xcit_nano_12_p8_384_dist,93.270,6.730,98.850,1.150,3.05,384,1.000,bicubic -legacy_seresnet101,93.270,6.730,98.740,1.260,49.33,224,0.875,bilinear -mixnet_l,93.270,6.730,98.700,1.300,7.33,224,0.875,bicubic -dla102,93.260,6.740,98.770,1.230,33.27,224,0.875,bilinear -cs3darknet_m,93.260,6.740,98.720,1.280,9.31,288,0.950,bicubic -tv_resnet152,93.250,6.750,98.750,1.250,60.19,224,0.875,bilinear +vit_base_patch16_224.augreg_in1k,93.350,6.650,98.670,1.330,86.57,224,0.900,bicubic +cs3darknet_m,93.280,6.720,98.720,1.280,9.31,288,0.950,bicubic +dla102,93.260,6.740,98.780,1.220,33.27,224,0.875,bilinear +legacy_seresnet101,93.260,6.740,98.740,1.260,49.33,224,0.875,bilinear +mixnet_l.ft_in1k,93.260,6.740,98.700,1.300,7.33,224,0.875,bicubic +xcit_nano_12_p8_384_dist,93.250,6.750,98.850,1.150,3.05,384,1.000,bicubic regnetx_032,93.250,6.750,98.730,1.270,15.30,224,0.875,bicubic resnest26d,93.240,6.760,98.850,1.150,17.07,224,0.875,bilinear -pit_xs_distilled_224,93.240,6.760,98.830,1.170,11.00,224,0.900,bicubic +pit_xs_distilled_224,93.240,6.760,98.820,1.180,11.00,224,0.900,bicubic +tv_resnet152,93.240,6.760,98.750,1.250,60.19,224,0.875,bilinear tf_inception_v3,93.200,6.800,98.480,1.520,23.83,299,0.875,bicubic dla60x,93.190,6.810,98.710,1.290,17.35,224,0.875,bilinear res2net50_26w_4s,93.180,6.820,98.670,1.330,25.70,224,0.875,bilinear -tf_efficientnet_em,93.170,6.830,98.670,1.330,6.90,240,0.882,bicubic +tf_efficientnet_em.in1k,93.170,6.830,98.670,1.330,6.90,240,0.882,bicubic mobilevit_s,93.160,6.840,98.770,1.230,5.58,256,0.900,bicubic -res2next50,93.160,6.840,98.650,1.350,24.67,224,0.875,bilinear -vit_relpos_base_patch32_plus_rpn_256,93.160,6.840,98.320,1.680,119.42,256,0.900,bicubic -mobilevitv2_100,93.130,6.870,98.760,1.240,4.90,256,0.888,bicubic +vit_base_patch32_384.augreg_in1k,93.160,6.840,98.610,1.390,88.30,384,1.000,bicubic +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,93.160,6.840,98.310,1.690,119.42,256,0.900,bicubic +res2next50,93.150,6.850,98.660,1.340,24.67,224,0.875,bilinear +mobilevitv2_100,93.140,6.860,98.760,1.240,4.90,256,0.888,bicubic cs3darknet_focus_m,93.110,6.890,98.740,1.260,9.30,288,0.950,bicubic -bat_resnext26ts,93.100,6.900,98.730,1.270,10.73,256,0.900,bicubic -tf_efficientnetv2_b0,93.060,6.940,98.690,1.310,7.14,224,0.875,bicubic -levit_128,93.050,6.950,98.700,1.300,9.21,224,0.900,bicubic -res2net50_14w_8s,93.040,6.960,98.700,1.300,25.06,224,0.875,bilinear -tf_mixnet_l,93.040,6.960,98.540,1.460,7.33,224,0.875,bicubic +bat_resnext26ts,93.100,6.900,98.720,1.280,10.73,256,0.900,bicubic +tf_efficientnetv2_b0.in1k,93.060,6.940,98.700,1.300,7.14,224,0.875,bicubic +levit_128,93.050,6.950,98.690,1.310,9.21,224,0.900,bicubic +tf_mixnet_l.in1k,93.040,6.960,98.540,1.460,7.33,224,0.875,bicubic repvgg_b1g4,93.030,6.970,98.820,1.180,39.97,224,0.875,bilinear -efficientnet_b1,93.020,6.980,98.710,1.290,7.79,256,1.000,bicubic +efficientnet_b1.ft_in1k,93.030,6.970,98.710,1.290,7.79,256,1.000,bicubic +res2net50_14w_8s,93.030,6.970,98.700,1.300,25.06,224,0.875,bilinear selecsls60,93.010,6.990,98.830,1.170,30.67,224,0.875,bicubic adv_inception_v3,93.010,6.990,98.490,1.510,23.83,299,0.875,bicubic regnety_016,93.000,7.000,98.680,1.320,11.20,224,0.875,bicubic +convnext_atto_ols.a2_in1k,92.980,7.020,98.680,1.320,3.70,288,0.950,bicubic hardcorenas_f,92.980,7.020,98.620,1.380,8.20,224,0.875,bilinear -efficientnet_b1_pruned,92.970,7.030,98.520,1.480,6.33,240,0.882,bicubic +efficientnet_b1_pruned.in1k,92.980,7.020,98.530,1.470,6.33,240,0.882,bicubic hrnet_w32,92.950,7.050,98.840,1.160,41.23,224,0.875,bilinear -hardcorenas_e,92.940,7.060,98.580,1.420,8.07,224,0.875,bilinear -efficientnet_es,92.920,7.080,98.690,1.310,5.44,224,0.875,bicubic +hardcorenas_e,92.950,7.050,98.570,1.430,8.07,224,0.875,bilinear pit_xs_224,92.910,7.090,98.780,1.220,10.62,224,0.900,bicubic -tv_resnext50_32x4d,92.910,7.090,98.720,1.280,25.03,224,0.875,bilinear -gluon_resnet50_v1c,92.910,7.090,98.700,1.300,25.58,224,0.875,bicubic +gluon_resnet50_v1c,92.910,7.090,98.710,1.290,25.58,224,0.875,bicubic +efficientnet_es.ra_in1k,92.910,7.090,98.690,1.310,5.44,224,0.875,bicubic densenet161,92.900,7.100,98.810,1.190,28.68,224,0.875,bicubic +tv_resnext50_32x4d,92.900,7.100,98.720,1.280,25.03,224,0.875,bilinear inception_v3,92.900,7.100,98.330,1.670,23.83,299,0.875,bicubic tv_resnet101,92.880,7.120,98.660,1.340,44.55,224,0.875,bilinear -resmlp_12_distilled_224,92.870,7.130,98.620,1.380,15.35,224,0.875,bicubic -tf_efficientnet_cc_b0_8e,92.870,7.130,98.450,1.550,24.01,224,0.875,bicubic -coat_lite_tiny,92.860,7.140,98.640,1.360,5.72,224,0.900,bicubic +resmlp_12_distilled_224,92.870,7.130,98.630,1.370,15.35,224,0.875,bicubic +tf_efficientnet_cc_b0_8e.in1k,92.870,7.130,98.460,1.540,24.01,224,0.875,bicubic +coat_lite_tiny,92.850,7.150,98.640,1.360,5.72,224,0.900,bicubic rexnet_100,92.850,7.150,98.620,1.380,4.80,224,0.875,bicubic -tf_efficientnet_cc_b0_4e,92.830,7.170,98.440,1.560,13.31,224,0.875,bicubic -seresnext26ts,92.820,7.180,98.600,1.400,10.39,256,0.900,bicubic +tf_efficientnet_cc_b0_4e.in1k,92.840,7.160,98.440,1.560,13.31,224,0.875,bicubic +seresnext26ts,92.830,7.170,98.600,1.400,10.39,256,0.900,bicubic seresnext26t_32x4d,92.820,7.180,98.560,1.440,16.81,224,0.875,bicubic -tinynet_a,92.810,7.190,98.560,1.440,6.19,192,0.875,bicubic -res2net50_48w_2s,92.790,7.210,98.480,1.520,25.29,224,0.875,bilinear +tinynet_a.in1k,92.800,7.200,98.560,1.440,6.19,192,0.875,bicubic +res2net50_48w_2s,92.790,7.210,98.470,1.530,25.29,224,0.875,bilinear hrnet_w18,92.760,7.240,98.660,1.340,21.30,224,0.875,bilinear +convnext_atto.d2_in1k,92.760,7.240,98.610,1.390,3.70,288,0.950,bicubic crossvit_9_dagger_240,92.760,7.240,98.510,1.490,8.78,240,0.875,bicubic -densenet201,92.700,7.300,98.650,1.350,20.01,224,0.875,bicubic +densenet201,92.690,7.310,98.650,1.350,20.01,224,0.875,bicubic +resnet26t,92.680,7.320,98.580,1.420,16.01,256,0.940,bicubic repvgg_a2,92.680,7.320,98.520,1.480,28.21,224,0.875,bilinear gmixer_24_224,92.680,7.320,98.280,1.720,24.72,224,0.875,bicubic legacy_seresnet50,92.670,7.330,98.650,1.350,28.09,224,0.875,bilinear -resnet26t,92.670,7.330,98.580,1.420,16.01,256,0.940,bicubic -dla60,92.660,7.340,98.630,1.370,22.04,224,0.875,bilinear -resnet34d,92.650,7.350,98.420,1.580,21.82,224,0.875,bicubic -tf_efficientnet_b0_ap,92.620,7.380,98.370,1.630,5.29,224,0.875,bicubic -mobilenetv2_120d,92.610,7.390,98.500,1.500,5.83,224,0.875,bicubic -tf_efficientnet_lite2,92.600,7.400,98.550,1.450,6.09,260,0.890,bicubic +dla60,92.670,7.330,98.630,1.370,22.04,224,0.875,bilinear +resnet34d,92.640,7.360,98.420,1.580,21.82,224,0.875,bicubic +mobilenetv2_120d.ra_in1k,92.610,7.390,98.510,1.490,5.83,224,0.875,bicubic +tf_efficientnet_b0.ap_in1k,92.610,7.390,98.370,1.630,5.29,224,0.875,bicubic hardcorenas_d,92.600,7.400,98.430,1.570,7.50,224,0.875,bilinear -legacy_seresnext26_32x4d,92.590,7.410,98.410,1.590,16.79,224,0.875,bicubic +tf_efficientnet_lite2.in1k,92.590,7.410,98.550,1.450,6.09,260,0.890,bicubic skresnet34,92.570,7.430,98.520,1.480,22.28,224,0.875,bicubic +legacy_seresnext26_32x4d,92.570,7.430,98.420,1.580,16.79,224,0.875,bicubic gluon_resnet50_v1b,92.560,7.440,98.550,1.450,25.56,224,0.875,bicubic -regnetx_016,92.530,7.470,98.550,1.450,9.19,224,0.875,bicubic -efficientnet_b0,92.480,7.520,98.680,1.320,5.29,224,0.875,bicubic +regnetx_016,92.540,7.460,98.550,1.450,9.19,224,0.875,bicubic +efficientnet_b0.ra_in1k,92.480,7.520,98.680,1.320,5.29,224,0.875,bicubic selecsls42b,92.480,7.520,98.440,1.560,32.46,224,0.875,bicubic poolformer_s12,92.470,7.530,98.350,1.650,11.92,224,0.900,bicubic xcit_tiny_12_p16_224,92.460,7.540,98.630,1.370,6.72,224,1.000,bicubic gcresnext26ts,92.460,7.540,98.490,1.510,10.48,256,0.900,bicubic +seresnext26d_32x4d,92.440,7.560,98.540,1.460,16.81,224,0.875,bicubic gernet_s,92.440,7.560,98.500,1.500,8.17,224,0.875,bilinear -seresnext26d_32x4d,92.430,7.570,98.540,1.460,16.81,224,0.875,bicubic -xcit_nano_12_p8_224_dist,92.420,7.580,98.520,1.480,3.05,224,1.000,bicubic -eca_resnext26ts,92.410,7.590,98.620,1.380,10.30,256,0.900,bicubic -densenetblur121d,92.410,7.590,98.420,1.580,8.00,224,0.875,bicubic -tf_efficientnet_b0,92.400,7.600,98.470,1.530,5.29,224,0.875,bicubic -hardcorenas_c,92.360,7.640,98.350,1.650,5.52,224,0.875,bilinear -convmixer_1024_20_ks9_p14,92.350,7.650,98.420,1.580,24.38,224,0.960,bicubic -tf_efficientnet_lite1,92.310,7.690,98.490,1.510,5.42,240,0.882,bicubic -densenet169,92.290,7.710,98.590,1.410,14.15,224,0.875,bicubic -mixnet_m,92.270,7.730,98.350,1.650,5.01,224,0.875,bicubic -mobilenetv3_large_100_miil,92.270,7.730,98.240,1.760,5.48,224,0.875,bilinear -resnet26d,92.260,7.740,98.450,1.550,16.01,224,0.875,bicubic -dpn68,92.250,7.750,98.610,1.390,12.61,224,0.875,bicubic -resnext26ts,92.220,7.780,98.250,1.750,10.30,256,0.900,bicubic -tf_mixnet_m,92.210,7.790,98.420,1.580,5.01,224,0.875,bicubic -vit_small_patch32_224,92.160,7.840,98.510,1.490,22.88,224,0.900,bicubic -xcit_nano_12_p16_384_dist,92.130,7.870,98.520,1.480,3.05,384,1.000,bicubic -tv_resnet50,92.130,7.870,98.420,1.580,25.56,224,0.875,bilinear +xcit_nano_12_p8_224_dist,92.430,7.570,98.530,1.470,3.05,224,1.000,bicubic +eca_resnext26ts,92.420,7.580,98.620,1.380,10.30,256,0.900,bicubic +tf_efficientnet_b0.aa_in1k,92.400,7.600,98.470,1.530,5.29,224,0.875,bicubic +densenetblur121d,92.400,7.600,98.410,1.590,8.00,224,0.875,bicubic +convmixer_1024_20_ks9_p14,92.340,7.660,98.430,1.570,24.38,224,0.960,bicubic +hardcorenas_c,92.330,7.670,98.340,1.660,5.52,224,0.875,bilinear +tf_efficientnet_lite1.in1k,92.310,7.690,98.490,1.510,5.42,240,0.882,bicubic +densenet169,92.300,7.700,98.590,1.410,14.15,224,0.875,bicubic +mixnet_m.ft_in1k,92.270,7.730,98.350,1.650,5.01,224,0.875,bicubic +mobilenetv3_large_100.miil_in21k_ft_in1k,92.250,7.750,98.250,1.750,5.48,224,0.875,bilinear +dpn68,92.240,7.760,98.610,1.390,12.61,224,0.875,bicubic +resnet26d,92.230,7.770,98.450,1.550,16.01,224,0.875,bicubic +resnext26ts,92.210,7.790,98.250,1.750,10.30,256,0.900,bicubic +tf_mixnet_m.in1k,92.200,7.800,98.420,1.580,5.01,224,0.875,bicubic +vit_small_patch32_224.augreg_in21k_ft_in1k,92.150,7.850,98.510,1.490,22.88,224,0.900,bicubic +tv_resnet50,92.140,7.860,98.420,1.580,25.56,224,0.875,bilinear resmlp_12_224,92.120,7.880,98.570,1.430,15.35,224,0.875,bicubic -tf_efficientnet_es,92.120,7.880,98.430,1.570,5.44,224,0.875,bicubic -mobilenetv2_140,92.040,7.960,98.250,1.750,6.11,224,0.875,bicubic -ese_vovnet19b_dw,92.000,8.000,98.510,1.490,6.54,224,0.875,bicubic -mobilevitv2_075,91.970,8.030,98.300,1.700,2.87,256,0.888,bicubic +xcit_nano_12_p16_384_dist,92.110,7.890,98.520,1.480,3.05,384,1.000,bicubic +tf_efficientnet_es.in1k,92.100,7.900,98.440,1.560,5.44,224,0.875,bicubic +mobilenetv2_140.ra_in1k,92.030,7.970,98.250,1.750,6.11,224,0.875,bicubic +ese_vovnet19b_dw,92.010,7.990,98.510,1.490,6.54,224,0.875,bicubic +mobilevitv2_075,91.980,8.020,98.300,1.700,2.87,256,0.888,bicubic +hardcorenas_b,91.940,8.060,98.400,1.600,5.18,224,0.875,bilinear densenet121,91.940,8.060,98.280,1.720,7.98,224,0.875,bicubic -hardcorenas_b,91.930,8.070,98.400,1.600,5.18,224,0.875,bilinear -vit_tiny_patch16_224,91.910,8.090,98.340,1.660,5.72,224,0.900,bicubic +vit_tiny_patch16_224.augreg_in21k_ft_in1k,91.930,8.070,98.340,1.660,5.72,224,0.900,bicubic regnety_008,91.900,8.100,98.420,1.580,6.26,224,0.875,bicubic -mixnet_s,91.770,8.230,98.300,1.700,4.13,224,0.875,bicubic -vit_tiny_r_s16_p8_384,91.730,8.270,98.430,1.570,6.36,384,1.000,bicubic -efficientnet_es_pruned,91.710,8.290,98.410,1.590,5.44,224,0.875,bicubic -tf_mixnet_s,91.690,8.310,98.240,1.760,4.13,224,0.875,bicubic +mixnet_s.ft_in1k,91.780,8.220,98.300,1.700,4.13,224,0.875,bicubic +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,91.730,8.270,98.430,1.570,6.36,384,1.000,bicubic +efficientnet_es_pruned.in1k,91.700,8.300,98.420,1.580,5.44,224,0.875,bicubic repvgg_b0,91.680,8.320,98.450,1.550,15.82,224,0.875,bilinear -semnasnet_100,91.660,8.340,98.270,1.730,3.89,224,0.875,bicubic +tf_mixnet_s.in1k,91.680,8.320,98.240,1.760,4.13,224,0.875,bicubic +semnasnet_100.rmsp_in1k,91.660,8.340,98.270,1.730,3.89,224,0.875,bicubic hardcorenas_a,91.620,8.380,98.170,1.830,5.26,224,0.875,bilinear -regnety_006,91.550,8.450,98.430,1.570,6.06,224,0.875,bicubic -mobilenetv3_rw,91.550,8.450,98.280,1.720,5.48,224,0.875,bicubic +regnety_006,91.570,8.430,98.430,1.570,6.06,224,0.875,bicubic +edgenext_x_small,91.570,8.430,98.180,1.820,2.34,288,1.000,bicubic +mobilenetv3_rw.rmsp_in1k,91.550,8.450,98.270,1.730,5.48,224,0.875,bicubic levit_128s,91.500,8.500,98.400,1.600,7.78,224,0.900,bicubic -legacy_seresnet34,91.490,8.510,98.200,1.800,21.96,224,0.875,bilinear -mobilenetv3_large_100,91.480,8.520,98.320,1.680,5.48,224,0.875,bicubic -resnet26,91.440,8.560,98.260,1.740,16.00,224,0.875,bicubic -tf_mobilenetv3_large_100,91.420,8.580,98.260,1.740,5.48,224,0.875,bilinear +mobilenetv3_large_100.ra_in1k,91.480,8.520,98.320,1.680,5.48,224,0.875,bicubic +legacy_seresnet34,91.480,8.520,98.200,1.800,21.96,224,0.875,bilinear +resnet26,91.440,8.560,98.280,1.720,16.00,224,0.875,bicubic +tf_mobilenetv3_large_100.in1k,91.420,8.580,98.260,1.740,5.48,224,0.875,bilinear tv_densenet121,91.400,8.600,98.250,1.750,7.98,224,0.875,bicubic -edgenext_x_small,91.400,8.600,98.160,1.840,2.34,256,0.900,bicubic -mobilenetv2_110d,91.330,8.670,98.190,1.810,4.52,224,0.875,bicubic -tf_efficientnet_lite0,91.300,8.700,98.090,1.910,4.65,224,0.875,bicubic -efficientnet_lite0,91.260,8.740,98.250,1.750,4.65,224,0.875,bicubic -fbnetc_100,91.250,8.750,97.850,2.150,5.57,224,0.875,bilinear -dla34,91.230,8.770,98.170,1.830,15.74,224,0.875,bilinear -mnasnet_100,91.210,8.790,98.050,1.950,4.38,224,0.875,bicubic +mobilenetv2_110d.ra_in1k,91.350,8.650,98.190,1.810,4.52,224,0.875,bicubic +tf_efficientnet_lite0.in1k,91.300,8.700,98.090,1.910,4.65,224,0.875,bicubic +fbnetc_100.rmsp_in1k,91.270,8.730,97.830,2.170,5.57,224,0.875,bilinear +efficientnet_lite0.ra_in1k,91.260,8.740,98.250,1.750,4.65,224,0.875,bicubic +dla34,91.240,8.760,98.180,1.820,15.74,224,0.875,bilinear resnet34,91.200,8.800,98.240,1.760,21.80,224,0.875,bilinear -mobilevit_xs,91.200,8.800,98.220,1.780,2.32,256,0.900,bicubic -hrnet_w18_small_v2,91.170,8.830,98.330,1.670,15.60,224,0.875,bilinear -regnetx_008,91.160,8.840,98.380,1.620,7.26,224,0.875,bicubic -mixer_b16_224,91.150,8.850,97.400,2.600,59.88,224,0.875,bicubic -tinynet_b,91.140,8.860,98.060,1.940,3.73,188,0.875,bicubic +mnasnet_100.rmsp_in1k,91.200,8.800,98.050,1.950,4.38,224,0.875,bicubic +mobilevit_xs,91.190,8.810,98.220,1.780,2.32,256,0.900,bicubic +regnetx_008,91.180,8.820,98.380,1.620,7.26,224,0.875,bicubic +hrnet_w18_small_v2,91.170,8.830,98.340,1.660,15.60,224,0.875,bilinear +mixer_b16_224,91.140,8.860,97.400,2.600,59.88,224,0.875,bicubic resnest14d,91.130,8.870,98.330,1.670,10.61,224,0.875,bilinear -xcit_nano_12_p8_224,91.130,8.870,98.230,1.770,3.05,224,1.000,bicubic +xcit_nano_12_p8_224,91.120,8.880,98.240,1.760,3.05,224,1.000,bicubic +tinynet_b.in1k,91.120,8.880,98.070,1.930,3.73,188,0.875,bicubic +deit_tiny_distilled_patch16_224,91.100,8.900,98.270,1.730,5.91,224,0.900,bicubic gluon_resnet34_v1b,91.100,8.900,98.180,1.820,21.80,224,0.875,bicubic -deit_tiny_distilled_patch16_224,91.080,8.920,98.270,1.730,5.91,224,0.900,bicubic -swsl_resnet18,91.070,8.930,98.210,1.790,11.69,224,0.875,bilinear +swsl_resnet18,91.090,8.910,98.210,1.790,11.69,224,0.875,bilinear crossvit_9_240,91.050,8.950,98.310,1.690,8.55,240,0.875,bicubic vgg19_bn,90.990,9.010,98.110,1.890,143.68,224,0.875,bilinear pit_ti_distilled_224,90.900,9.100,98.220,1.780,5.10,224,0.900,bicubic -regnetx_006,90.770,9.230,98.100,1.900,6.20,224,0.875,bicubic -regnety_004,90.770,9.230,98.080,1.920,4.34,224,0.875,bicubic -ssl_resnet18,90.700,9.300,98.030,1.970,11.69,224,0.875,bilinear -spnasnet_100,90.600,9.400,97.960,2.040,4.42,224,0.875,bilinear -convit_tiny,90.550,9.450,98.220,1.780,5.71,224,0.875,bicubic +regnety_004,90.780,9.220,98.080,1.920,4.34,224,0.875,bicubic +regnetx_006,90.760,9.240,98.100,1.900,6.20,224,0.875,bicubic +ssl_resnet18,90.700,9.300,98.020,1.980,11.69,224,0.875,bilinear +spnasnet_100.rmsp_in1k,90.610,9.390,97.950,2.050,4.42,224,0.875,bilinear +vit_base_patch32_224.augreg_in1k,90.590,9.410,97.720,2.280,88.22,224,0.900,bicubic vgg16_bn,90.540,9.460,97.990,2.010,138.37,224,0.875,bilinear crossvit_tiny_240,90.540,9.460,97.940,2.060,7.01,240,0.875,bicubic -pit_ti_224,90.440,9.560,98.010,1.990,4.85,224,0.900,bicubic +convit_tiny,90.530,9.470,98.210,1.790,5.71,224,0.875,bicubic ghostnet_100,90.440,9.560,97.830,2.170,5.18,224,0.875,bilinear -tf_mobilenetv3_large_075,90.330,9.670,97.880,2.120,3.99,224,0.875,bilinear +pit_ti_224,90.420,9.580,98.010,1.990,4.85,224,0.900,bicubic +tf_mobilenetv3_large_075.in1k,90.320,9.680,97.870,2.130,3.99,224,0.875,bilinear tv_resnet34,90.290,9.710,97.980,2.020,21.80,224,0.875,bilinear -semnasnet_075,90.210,9.790,97.970,2.030,2.91,224,0.875,bicubic -skresnet18,90.170,9.830,97.780,2.220,11.96,224,0.875,bicubic -xcit_nano_12_p16_224_dist,90.170,9.830,97.750,2.250,3.05,224,1.000,bicubic -resnet18d,89.980,10.020,97.830,2.170,11.71,224,0.875,bicubic -hrnet_w18_small,89.870,10.130,97.890,2.110,13.19,224,0.875,bilinear -vit_base_patch32_224_sam,89.860,10.140,97.600,2.400,88.22,224,0.900,bicubic -mobilenetv2_100,89.820,10.180,97.830,2.170,3.50,224,0.875,bicubic +semnasnet_075.rmsp_in1k,90.200,9.800,97.970,2.030,2.91,224,0.875,bicubic +skresnet18,90.160,9.840,97.780,2.220,11.96,224,0.875,bicubic +xcit_nano_12_p16_224_dist,90.150,9.850,97.760,2.240,3.05,224,1.000,bicubic +resnet18d,89.990,10.010,97.830,2.170,11.71,224,0.875,bicubic +hrnet_w18_small,89.880,10.120,97.900,2.100,13.19,224,0.875,bilinear +vit_base_patch32_224.sam,89.860,10.140,97.600,2.400,88.22,224,0.900,bicubic +mobilenetv2_100.ra_in1k,89.830,10.170,97.830,2.170,3.50,224,0.875,bicubic +edgenext_xx_small,89.780,10.220,97.520,2.480,1.33,288,1.000,bicubic vgg19,89.680,10.320,97.550,2.450,143.67,224,0.875,bilinear deit_tiny_patch16_224,89.620,10.380,97.960,2.040,5.72,224,0.900,bicubic -regnetx_004,89.470,10.530,97.770,2.230,5.16,224,0.875,bicubic +regnetx_004,89.460,10.540,97.770,2.230,5.16,224,0.875,bicubic vgg16,89.360,10.640,97.520,2.480,138.36,224,0.875,bilinear -vit_tiny_r_s16_p8_224,89.350,10.650,97.700,2.300,6.34,224,0.900,bicubic -legacy_seresnet18,89.260,10.740,97.690,2.310,11.78,224,0.875,bicubic -edgenext_xx_small,89.230,10.770,97.260,2.740,1.33,256,0.900,bicubic -vgg13_bn,89.210,10.790,97.520,2.480,133.05,224,0.875,bilinear -tf_mobilenetv3_large_minimal_100,89.180,10.820,97.320,2.680,3.92,224,0.875,bilinear +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,89.340,10.660,97.700,2.300,6.34,224,0.900,bicubic +legacy_seresnet18,89.270,10.730,97.680,2.320,11.78,224,0.875,bicubic +vgg13_bn,89.200,10.800,97.530,2.470,133.05,224,0.875,bilinear +tf_mobilenetv3_large_minimal_100.in1k,89.180,10.820,97.320,2.680,3.92,224,0.875,bilinear resnet14t,89.110,10.890,97.370,2.630,10.08,224,0.950,bilinear -mobilevitv2_050,89.050,10.950,97.590,2.410,1.37,256,0.888,bicubic -lcnet_100,88.970,11.030,97.380,2.620,2.95,224,0.875,bicubic -xcit_nano_12_p16_224,88.960,11.040,97.400,2.600,3.05,224,1.000,bicubic +mobilevitv2_050,89.030,10.970,97.590,2.410,1.37,256,0.888,bicubic +pvt_v2_b0,88.980,11.020,97.690,2.310,3.67,224,0.900,bicubic +xcit_nano_12_p16_224,88.960,11.040,97.390,2.610,3.05,224,1.000,bicubic +lcnet_100.ra2_in1k,88.960,11.040,97.360,2.640,2.95,224,0.875,bicubic gluon_resnet18_v1b,88.660,11.340,97.100,2.900,11.69,224,0.875,bicubic -tinynet_c,88.420,11.580,97.270,2.730,2.46,184,0.875,bicubic +tinynet_c.in1k,88.420,11.580,97.270,2.730,2.46,184,0.875,bicubic vgg11_bn,88.390,11.610,97.270,2.730,132.87,224,0.875,bilinear -regnety_002,88.190,11.810,97.440,2.560,3.16,224,0.875,bicubic +regnety_002,88.200,11.800,97.430,2.570,3.16,224,0.875,bicubic resnet18,88.150,11.850,97.120,2.880,11.69,224,0.875,bilinear mobilevit_xxs,87.950,12.050,97.180,2.820,1.27,256,0.900,bicubic vgg13,87.570,12.430,97.120,2.880,133.05,224,0.875,bilinear regnetx_002,87.380,12.620,96.990,3.010,2.68,224,0.875,bicubic vgg11,87.340,12.660,97.110,2.890,132.86,224,0.875,bilinear -dla60x_c,87.130,12.870,97.140,2.860,1.32,224,0.875,bilinear -mixer_l16_224,86.970,13.030,94.050,5.950,208.20,224,0.875,bicubic -lcnet_075,86.940,13.060,96.530,3.470,2.36,224,0.875,bicubic -resnet10t,86.730,13.270,96.670,3.330,5.44,224,0.950,bilinear -mobilenetv3_small_100,86.180,13.820,96.460,3.540,2.54,224,0.875,bicubic -tf_mobilenetv3_small_100,85.970,14.030,96.400,3.600,2.54,224,0.875,bilinear -mnasnet_small,85.510,14.490,95.980,4.020,2.03,224,0.875,bicubic -dla46x_c,85.460,14.540,96.450,3.550,1.07,224,0.875,bilinear -tinynet_d,85.420,14.580,96.020,3.980,2.34,152,0.875,bicubic -mobilenetv2_050,85.010,14.990,95.620,4.380,1.97,224,0.875,bicubic -dla46_c,84.660,15.340,96.210,3.790,1.30,224,0.875,bilinear -tf_mobilenetv3_small_075,84.520,15.480,95.890,4.110,2.04,224,0.875,bilinear -mobilenetv3_small_075,84.120,15.880,95.500,4.500,2.04,224,0.875,bicubic -lcnet_050,83.010,16.990,95.010,4.990,1.88,224,0.875,bicubic -tf_mobilenetv3_small_minimal_100,82.690,17.310,95.000,5.000,2.04,224,0.875,bilinear -tinynet_e,79.800,20.200,93.980,6.020,2.04,106,0.875,bicubic -mobilenetv3_small_050,78.100,21.900,93.010,6.990,1.59,224,0.875,bicubic +dla60x_c,87.110,12.890,97.140,2.860,1.32,224,0.875,bilinear +mixer_l16_224,86.970,13.030,94.060,5.940,208.20,224,0.875,bicubic +lcnet_075.ra2_in1k,86.940,13.060,96.530,3.470,2.36,224,0.875,bicubic +resnet10t,86.690,13.310,96.670,3.330,5.44,224,0.950,bilinear +mobilenetv3_small_100.lamb_in1k,86.170,13.830,96.470,3.530,2.54,224,0.875,bicubic +tf_mobilenetv3_small_100.in1k,85.960,14.040,96.400,3.600,2.54,224,0.875,bilinear +mnasnet_small.lamb_in1k,85.510,14.490,95.980,4.020,2.03,224,0.875,bicubic +dla46x_c,85.480,14.520,96.440,3.560,1.07,224,0.875,bilinear +tinynet_d.in1k,85.430,14.570,96.010,3.990,2.34,152,0.875,bicubic +mobilenetv2_050.lamb_in1k,84.990,15.010,95.620,4.380,1.97,224,0.875,bicubic +dla46_c,84.660,15.340,96.200,3.800,1.30,224,0.875,bilinear +tf_mobilenetv3_small_075.in1k,84.530,15.470,95.890,4.110,2.04,224,0.875,bilinear +mobilenetv3_small_075.lamb_in1k,84.120,15.880,95.500,4.500,2.04,224,0.875,bicubic +lcnet_050.ra2_in1k,83.000,17.000,95.010,4.990,1.88,224,0.875,bicubic +tf_mobilenetv3_small_minimal_100.in1k,82.670,17.330,95.000,5.000,2.04,224,0.875,bilinear +tinynet_e.in1k,79.810,20.190,93.980,6.020,2.04,106,0.875,bicubic +mobilenetv3_small_050.lamb_in1k,78.100,21.900,93.010,6.990,1.59,224,0.875,bicubic diff --git a/results/results-imagenet-r.csv b/results/results-imagenet-r.csv index 507e75d3..9e2fee5b 100644 --- a/results/results-imagenet-r.csv +++ b/results/results-imagenet-r.csv @@ -1,669 +1,791 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation,top1_diff,top5_diff,rank_diff -ig_resnext101_32x48d,79.650,20.350,89.393,10.607,828.41,224,0.875,bilinear,-17.320,-10.277,+33 -ig_resnext101_32x32d,79.467,20.533,89.180,10.820,468.53,224,0.875,bilinear,-17.313,-10.350,+51 -ig_resnext101_32x16d,78.817,21.183,88.477,11.523,194.03,224,0.875,bilinear,-17.623,-11.063,+83 -tf_efficientnet_l2_ns_475,76.470,23.530,88.653,11.347,480.31,475,0.936,bicubic,-21.280,-11.167,0 -swsl_resnext101_32x16d,76.307,23.693,87.740,12.260,194.03,224,0.875,bilinear,-19.973,-11.760,+98 -ig_resnext101_32x8d,75.800,24.200,86.213,13.787,88.79,224,0.875,bilinear,-20.140,-13.167,+140 -swsl_resnext101_32x8d,75.583,24.417,86.940,13.060,88.79,224,0.875,bilinear,-20.657,-12.650,+101 -tf_efficientnet_l2_ns,74.657,25.343,87.547,12.453,480.31,800,0.960,bicubic,-23.123,-12.343,-6 -beit_large_patch16_384,73.280,26.720,85.023,14.977,305.00,384,1.000,bicubic,-24.530,-14.767,-8 -beit_large_patch16_512,73.157,26.843,85.080,14.920,305.67,512,1.000,bicubic,-24.623,-14.740,-7 -swsl_resnext101_32x4d,72.657,27.343,85.157,14.843,44.18,224,0.875,bilinear,-23.383,-14.373,+121 -beit_large_patch16_224,71.043,28.957,83.420,16.580,304.43,224,0.900,bicubic,-26.437,-16.270,-5 -deit3_huge_patch14_224_in21ft1k,70.813,29.187,82.193,17.807,632.13,224,1.000,bicubic,-26.437,-17.527,+4 -deit3_large_patch16_384_in21ft1k,70.563,29.437,82.437,17.563,304.76,384,1.000,bicubic,-26.997,-17.273,-9 -deit3_large_patch16_224_in21ft1k,69.720,30.280,81.197,18.803,304.37,224,1.000,bicubic,-27.590,-18.483,-4 -swsl_resnext50_32x4d,68.970,31.030,82.807,17.193,25.03,224,0.875,bilinear,-26.640,-16.633,+166 -swsl_resnet50,68.293,31.707,83.300,16.700,25.56,224,0.875,bilinear,-26.907,-16.090,+213 -swinv2_large_window12to24_192to384_22kft1k,67.673,32.327,80.097,19.903,196.74,384,1.000,bicubic,-29.607,-19.683,-4 -tf_efficientnet_b7_ns,67.537,32.463,81.380,18.620,66.35,600,0.949,bicubic,-29.653,-18.320,+3 -vit_large_patch16_384,67.060,32.940,78.703,21.297,304.72,384,1.000,bicubic,-30.360,-21.077,-11 -convnext_xlarge_384_in22ft1k,66.967,33.033,79.703,20.297,350.20,384,1.000,bicubic,-30.583,-20.097,-15 -swin_large_patch4_window12_384,66.290,33.710,79.783,20.217,196.74,384,1.000,bicubic,-30.890,-19.897,+1 -convnext_large_384_in22ft1k,65.980,34.020,79.203,20.797,197.77,384,1.000,bicubic,-31.460,-20.577,-15 -swinv2_base_window12to24_192to384_22kft1k,65.740,34.260,79.310,20.690,87.92,384,1.000,bicubic,-31.520,-20.480,-9 -swinv2_large_window12to16_192to256_22kft1k,65.633,34.367,78.460,21.540,196.74,256,0.900,bicubic,-31.607,-21.250,-5 -tf_efficientnet_b6_ns,65.590,34.410,79.560,20.440,43.04,528,0.942,bicubic,-31.430,-20.150,+5 -convnext_xlarge_in22ft1k,65.423,34.577,78.243,21.757,350.20,224,0.875,bicubic,-31.817,-21.487,-8 -vit_large_patch16_224,64.353,35.647,76.187,23.813,304.33,224,0.900,bicubic,-32.357,-23.463,+28 -convnext_large_in22ft1k,64.177,35.823,77.580,22.420,197.77,224,0.875,bicubic,-33.083,-22.070,-13 -vit_large_r50_s32_384,64.103,35.897,75.850,24.150,329.09,384,1.000,bicubic,-32.847,-23.860,+6 -convnext_base_384_in22ft1k,64.093,35.907,77.733,22.267,88.59,384,1.000,bicubic,-33.197,-22.047,-19 -swin_large_patch4_window7_224,63.867,36.133,78.177,21.823,196.53,224,0.900,bicubic,-33.083,-21.483,+5 -beit_base_patch16_384,63.617,36.383,78.113,21.887,86.74,384,1.000,bicubic,-33.713,-21.607,-23 -swin_base_patch4_window12_384,63.470,36.530,78.080,21.920,87.90,384,1.000,bicubic,-33.650,-21.700,-9 -swinv2_base_window12to16_192to256_22kft1k,63.200,36.800,77.120,22.880,87.92,256,0.900,bicubic,-33.860,-22.540,-5 -tf_efficientnet_b5_ns,63.043,36.957,77.773,22.227,30.39,456,0.934,bicubic,-33.827,-21.867,+7 -deit3_base_patch16_384_in21ft1k,62.637,37.363,75.550,24.450,86.88,384,1.000,bicubic,-34.603,-24.120,-16 -vit_base_patch8_224,62.197,37.803,75.617,24.383,86.58,224,0.900,bicubic,-34.883,-24.003,-10 -convnext_base_in22ft1k,62.010,37.990,76.037,23.963,88.59,224,0.875,bicubic,-34.830,-23.613,+7 -deit3_base_patch16_224_in21ft1k,61.787,38.213,74.723,25.277,86.59,224,1.000,bicubic,-35.083,-24.897,+4 -tf_efficientnet_b4_ns,61.233,38.767,76.160,23.840,19.34,380,0.922,bicubic,-35.477,-23.480,+16 -tf_efficientnetv2_l_in21ft1k,60.953,39.047,75.843,24.157,118.52,480,1.000,bicubic,-36.157,-23.867,-16 -tf_efficientnetv2_xl_in21ft1k,60.680,39.320,74.397,25.603,208.12,512,1.000,bicubic,-36.470,-25.223,-19 -beit_base_patch16_224,60.317,39.683,75.597,24.403,86.53,224,0.900,bicubic,-36.343,-24.063,+19 -vit_base_patch16_384,60.187,39.813,73.837,26.163,86.86,384,1.000,bicubic,-36.833,-25.873,-13 -swin_base_patch4_window7_224,59.537,40.463,74.240,25.760,87.77,224,0.900,bicubic,-37.143,-25.420,+15 -convnext_small_384_in22ft1k,59.110,40.890,73.903,26.097,50.22,384,1.000,bicubic,-37.980,-25.787,-20 -volo_d5_512,58.920,41.080,73.200,26.800,296.09,512,1.150,bicubic,-38.370,-26.560,-35 -volo_d5_448,58.793,41.207,73.057,26.943,295.91,448,1.150,bicubic,-38.447,-26.683,-31 -tf_efficientnetv2_m_in21ft1k,58.643,41.357,73.980,26.020,54.14,480,1.000,bicubic,-38.327,-25.630,-15 -vit_large_r50_s32_224,58.633,41.367,71.720,28.280,328.99,224,0.900,bicubic,-37.547,-27.820,+64 -deit3_large_patch16_384,58.357,41.643,72.970,27.030,304.76,384,1.000,bicubic,-38.493,-26.650,-7 -deit3_huge_patch14_224,58.110,41.890,72.130,27.870,632.13,224,0.900,bicubic,-38.470,-27.390,+15 -tf_efficientnet_b8_ap,57.830,42.170,72.953,27.047,87.41,672,0.954,bicubic,-38.720,-26.587,+18 -convnext_small_in22ft1k,57.533,42.467,72.677,27.323,50.22,224,0.875,bicubic,-38.927,-26.793,+27 -cait_m48_448,57.477,42.523,71.867,28.133,356.46,448,1.000,bicubic,-39.403,-27.753,-14 -cait_m36_384,57.467,42.533,72.320,27.680,271.22,384,1.000,bicubic,-39.363,-27.340,-10 -tf_efficientnet_b3_ns,57.413,42.587,72.387,27.613,12.23,300,0.904,bicubic,-38.687,-27.093,+65 -volo_d4_448,57.293,42.707,71.533,28.467,193.41,448,1.150,bicubic,-39.777,-28.217,-30 -vit_base_patch16_224,56.840,43.160,70.637,29.363,86.57,224,0.900,bicubic,-39.460,-28.923,+41 -volo_d5_224,56.490,43.510,70.647,29.353,295.46,224,0.960,bicubic,-40.390,-29.023,-21 -deit3_large_patch16_224,56.463,43.537,70.463,29.537,304.37,224,0.900,bicubic,-39.727,-28.837,+52 -xcit_large_24_p8_384_dist,56.350,43.650,71.320,28.680,188.93,384,1.000,bicubic,-40.410,-28.240,-9 -xcit_large_24_p8_224_dist,56.027,43.973,70.663,29.337,188.93,224,1.000,bicubic,-40.613,-28.797,+1 -xcit_large_24_p16_384_dist,54.910,45.090,69.863,30.137,189.10,384,1.000,bicubic,-42.030,-29.647,-27 -volo_d4_224,54.743,45.257,68.860,31.140,192.96,224,0.960,bicubic,-42.037,-30.810,-16 -deit3_small_patch16_384_in21ft1k,54.467,45.533,68.310,31.690,22.21,384,1.000,bicubic,-42.203,-31.330,-5 -vit_base_r50_s16_384,54.400,45.600,69.563,30.437,98.95,384,1.000,bicubic,-42.050,-30.097,+15 -resnetv2_152x4_bitm,54.323,45.677,70.173,29.827,936.53,480,1.000,bilinear,-42.557,-29.487,-28 -xcit_large_24_p16_224_dist,54.260,45.740,68.980,31.020,189.10,224,1.000,bicubic,-42.060,-30.520,+29 -vit_small_r26_s32_384,54.203,45.797,68.747,31.253,36.47,384,1.000,bicubic,-41.857,-30.803,+59 -volo_d3_448,53.993,46.007,68.023,31.977,86.63,448,1.000,bicubic,-43.027,-31.657,-39 -tf_efficientnet_b5_ap,53.867,46.133,69.163,30.837,30.39,456,0.934,bicubic,-42.213,-30.377,+51 -xcit_medium_24_p8_224_dist,53.663,46.337,68.407,31.593,84.32,224,1.000,bicubic,-42.857,-31.103,+1 -tf_efficientnet_b2_ns,53.597,46.403,70.277,29.723,9.11,260,0.890,bicubic,-41.923,-29.063,+119 -tf_efficientnet_b6_ap,53.567,46.433,68.550,31.450,43.04,528,0.942,bicubic,-42.803,-31.000,+15 -cait_s36_384,53.550,46.450,68.020,31.980,68.37,384,1.000,bicubic,-43.080,-31.580,-11 -convnext_large,53.533,46.467,68.183,31.817,197.77,224,0.875,bicubic,-42.487,-31.287,+58 -deit3_base_patch16_384,53.510,46.490,67.630,32.370,86.88,384,1.000,bicubic,-42.720,-31.770,+32 -deit3_base_patch16_224,53.457,46.543,67.593,32.407,86.59,224,0.900,bicubic,-42.323,-31.677,+81 -tf_efficientnet_b8,53.410,46.590,69.090,30.910,87.41,672,0.954,bicubic,-43.290,-30.440,-21 -xcit_medium_24_p8_384_dist,53.407,46.593,68.137,31.863,84.32,384,1.000,bicubic,-43.373,-31.473,-30 -vit_base_patch32_384,53.300,46.700,68.043,31.957,88.30,384,1.000,bicubic,-42.600,-31.397,+65 -tf_efficientnet_b7_ap,53.260,46.740,68.867,31.133,66.35,600,0.949,bicubic,-43.090,-30.723,+9 -xcit_medium_24_p16_384_dist,53.213,46.787,68.057,31.943,84.40,384,1.000,bicubic,-43.487,-31.543,-26 -tf_efficientnetv2_s_in21ft1k,53.143,46.857,69.007,30.993,21.46,384,1.000,bicubic,-43.327,-30.563,-8 -tf_efficientnet_b4_ap,53.093,46.907,68.213,31.787,19.34,380,0.922,bicubic,-42.397,-31.177,+109 -regnetz_e8,53.017,46.983,67.140,32.860,57.70,320,1.000,bicubic,-43.583,-32.470,-21 -dm_nfnet_f5,52.870,47.130,67.427,32.573,377.21,544,0.954,bicubic,-43.940,-32.243,-41 -volo_d3_224,52.703,47.297,66.320,33.680,86.33,224,0.960,bicubic,-43.737,-33.300,-5 -deit3_small_patch16_224_in21ft1k,52.690,47.310,66.877,33.123,22.06,224,1.000,bicubic,-43.130,-32.523,+66 -dm_nfnet_f6,52.447,47.553,67.113,32.887,438.36,576,0.956,bicubic,-44.473,-32.607,-53 -tf_efficientnet_b7,52.393,47.607,68.230,31.770,66.35,600,0.949,bicubic,-44.187,-31.290,-24 -tf_efficientnetv2_l,52.387,47.613,67.240,32.760,118.52,480,1.000,bicubic,-44.263,-32.320,-30 -xcit_small_24_p8_384_dist,52.360,47.640,66.833,33.167,47.63,384,1.000,bicubic,-44.450,-32.797,-46 -swsl_resnet18,52.337,47.663,70.477,29.523,11.69,224,0.875,bilinear,-38.733,-27.733,+512 -efficientnetv2_rw_m,52.323,47.677,67.210,32.790,53.24,416,1.000,bicubic,-43.947,-32.350,+8 -deit_base_distilled_patch16_384,52.253,47.747,67.737,32.263,87.63,384,1.000,bicubic,-44.257,-31.853,-22 -xcit_medium_24_p16_224_dist,52.197,47.803,66.893,33.107,84.40,224,1.000,bicubic,-44.063,-32.517,+7 -xcit_small_24_p8_224_dist,52.197,47.803,66.767,33.233,47.63,224,1.000,bicubic,-44.353,-32.803,-29 -dm_nfnet_f3,52.130,47.870,66.740,33.260,254.92,416,0.940,bicubic,-44.600,-32.890,-46 -resnetv2_152x2_bit_teacher_384,51.943,48.057,68.663,31.337,236.34,384,1.000,bicubic,-44.247,-30.837,+11 -resmlp_big_24_224_in22ft1k,51.893,48.107,68.470,31.530,129.14,224,0.875,bicubic,-44.457,-31.050,-9 -xcit_small_24_p16_384_dist,51.883,48.117,66.367,33.633,47.67,384,1.000,bicubic,-44.457,-33.213,-9 -cait_s24_384,51.783,48.217,66.320,33.680,47.06,384,1.000,bicubic,-44.787,-33.230,-35 -resnetv2_152x2_bitm,51.757,48.243,69.247,30.753,236.34,448,1.000,bilinear,-44.763,-30.343,-32 -ecaresnet269d,51.663,48.337,66.043,33.957,102.09,352,1.000,bicubic,-44.797,-33.567,-27 -vit_base_patch16_224_miil,51.550,48.450,65.207,34.793,86.54,224,0.875,bilinear,-44.480,-34.143,+27 -convnext_tiny_384_in22ft1k,51.453,48.547,66.427,33.573,28.59,384,1.000,bicubic,-44.717,-33.053,+8 -convnext_base,51.247,48.753,66.190,33.810,88.59,224,0.875,bicubic,-44.693,-33.190,+35 -pit_b_distilled_224,51.157,48.843,66.773,33.227,74.79,224,0.900,bicubic,-44.913,-32.607,+17 -xcit_small_12_p8_384_dist,51.093,48.907,65.833,34.167,26.21,384,1.000,bicubic,-45.387,-33.657,-35 -convnext_tiny_in22ft1k,51.083,48.917,66.620,33.380,28.59,224,0.875,bicubic,-44.647,-32.740,+54 -dm_nfnet_f4,50.907,49.093,65.563,34.437,316.07,512,0.951,bicubic,-45.873,-34.057,-63 -tf_efficientnet_b1_ns,50.900,49.100,67.927,32.073,7.79,240,0.882,bicubic,-43.960,-31.323,+169 -volo_d2_384,50.883,49.117,65.637,34.363,58.87,384,1.000,bicubic,-45.827,-33.963,-58 -xcit_small_24_p16_224_dist,50.730,49.270,65.033,34.967,47.67,224,1.000,bicubic,-45.060,-34.317,+42 -tf_efficientnetv2_m,50.560,49.440,66.000,34.000,54.14,480,1.000,bicubic,-45.980,-33.570,-45 -xcit_small_12_p16_384_dist,50.527,49.473,65.297,34.703,26.25,384,1.000,bicubic,-45.803,-34.193,-21 -efficientnet_b4,50.503,49.497,65.707,34.293,19.34,384,1.000,bicubic,-45.027,-33.693,+72 -volo_d1_384,50.473,49.527,64.927,35.073,26.78,384,1.000,bicubic,-45.997,-34.623,-42 -xcit_small_12_p8_224_dist,50.440,49.560,65.433,34.567,26.21,224,1.000,bicubic,-45.520,-33.987,+20 -resnetv2_101x3_bitm,50.403,49.597,67.787,32.213,387.93,448,1.000,bilinear,-45.847,-31.803,-16 -regnetz_040h,50.330,49.670,65.630,34.370,28.94,320,1.000,bicubic,-46.000,-33.890,-27 -ssl_resnext101_32x16d,50.250,49.750,66.017,33.983,194.03,224,0.875,bilinear,-45.160,-33.393,+82 -cait_s24_224,50.240,49.760,65.023,34.977,46.92,224,1.000,bicubic,-45.400,-34.367,+54 -eca_nfnet_l2,50.233,49.767,65.453,34.547,56.72,384,1.000,bicubic,-46.217,-34.167,-43 -vit_small_patch16_384,50.167,49.833,65.807,34.193,22.20,384,1.000,bicubic,-45.813,-33.783,+12 -resnest269e,50.153,49.847,64.663,35.337,110.93,416,0.928,bicubic,-45.967,-34.857,-8 -deit_base_distilled_patch16_224,50.060,49.940,66.223,33.777,87.34,224,0.900,bicubic,-45.690,-33.057,+35 -tf_efficientnet_b3_ap,50.043,49.957,65.213,34.787,12.23,300,0.904,bicubic,-44.927,-33.897,+136 -resnest200e,49.877,50.123,64.740,35.260,70.20,320,0.909,bicubic,-46.193,-34.740,-6 -volo_d2_224,49.813,50.187,64.587,35.413,58.68,224,0.960,bicubic,-46.607,-34.913,-45 -seresnextaa101d_32x8d,49.767,50.233,64.420,35.580,93.59,288,1.000,bicubic,-46.653,-35.100,-47 -xception65,49.757,50.243,63.523,36.477,39.92,299,0.940,bicubic,-45.933,-35.787,+42 -swinv2_base_window16_256,49.667,50.333,63.810,36.190,87.92,256,0.900,bicubic,-46.503,-35.590,-18 -convnext_small,49.573,50.427,64.830,35.170,50.22,224,0.875,bicubic,-46.037,-34.430,+47 -cait_xs24_384,49.537,50.463,64.900,35.100,26.67,384,1.000,bicubic,-46.473,-34.530,+1 -tf_efficientnet_b5,49.510,50.490,65.650,34.350,30.39,456,0.934,bicubic,-46.470,-33.800,+2 -resnetv2_152x2_bit_teacher,49.487,50.513,65.620,34.380,236.34,224,0.875,bicubic,-46.263,-33.810,+24 -resnet200d,49.473,50.527,64.327,35.673,64.69,320,1.000,bicubic,-46.637,-35.133,-19 -xcit_small_12_p16_224_dist,49.420,50.580,63.840,36.160,26.25,224,1.000,bicubic,-46.310,-35.460,+27 -resnest101e,49.363,50.637,65.597,34.403,48.28,256,0.875,bilinear,-46.197,-33.673,+45 -regnetz_040,49.283,50.717,64.063,35.937,27.12,320,1.000,bicubic,-46.897,-35.447,-28 -resnet152d,49.250,50.750,64.417,35.583,60.21,320,1.000,bicubic,-46.620,-35.013,+8 -vit_base_patch32_224,49.250,50.750,64.343,35.657,88.22,224,0.900,bicubic,-45.140,-34.717,+203 -seresnet152d,49.250,50.750,64.177,35.823,66.84,320,1.000,bicubic,-47.060,-35.333,-47 -xcit_large_24_p8_224,49.237,50.763,62.840,37.160,188.93,224,1.000,bicubic,-46.843,-36.310,-23 -ssl_resnext101_32x8d,49.097,50.903,65.483,34.517,88.79,224,0.875,bilinear,-46.233,-33.827,+70 -resmlp_big_24_distilled_224,49.097,50.903,65.477,34.523,129.14,224,0.875,bicubic,-46.773,-33.963,+1 -volo_d1_224,48.970,51.030,63.190,36.810,26.63,224,0.960,bicubic,-47.060,-36.200,-17 -repvgg_b3,48.920,51.080,64.880,35.120,123.09,224,0.875,bilinear,-45.640,-34.030,+179 -resnetrs420,48.857,51.143,63.427,36.573,191.89,416,1.000,bicubic,-47.553,-36.113,-64 -deit3_small_patch16_384,48.670,51.330,62.823,37.177,22.21,384,1.000,bicubic,-46.940,-36.567,+29 -seresnext101d_32x8d,48.597,51.403,62.960,37.040,93.59,288,1.000,bicubic,-47.763,-36.510,-63 -efficientnetv2_rw_s,48.593,51.407,63.837,36.163,23.94,384,1.000,bicubic,-47.107,-35.543,+18 -regnetz_d32,48.590,51.410,65.190,34.810,27.58,320,0.950,bicubic,-47.280,-34.240,-5 -swinv2_small_window16_256,48.577,51.423,62.763,37.237,49.73,256,0.900,bicubic,-47.493,-36.577,-29 -efficientnet_b3,48.567,51.433,64.250,35.750,12.23,320,1.000,bicubic,-46.573,-34.960,+78 -ecaresnet101d,48.537,51.463,64.097,35.903,44.57,224,0.875,bicubic,-46.623,-35.133,+74 -vit_small_r26_s32_224,48.367,51.633,63.800,36.200,36.43,224,0.900,bicubic,-46.753,-35.420,+82 -dm_nfnet_f2,48.367,51.633,63.230,36.770,193.78,352,0.920,bicubic,-48.093,-36.310,-81 -swinv2_base_window8_256,48.340,51.660,63.597,36.403,87.92,256,0.900,bicubic,-47.730,-35.823,-36 -repvgg_b3g4,48.303,51.697,64.793,35.207,83.83,224,0.875,bilinear,-46.197,-34.227,+176 -vit_large_patch32_384,48.247,51.753,61.823,38.177,306.63,384,1.000,bicubic,-46.993,-37.497,+62 -convit_base,48.220,51.780,63.007,36.993,86.54,224,0.875,bicubic,-46.880,-36.133,+81 -swin_s3_base_224,48.140,51.860,62.263,37.737,71.13,224,0.900,bicubic,-47.900,-37.087,-34 -sequencer2d_l,48.107,51.893,62.343,37.657,54.30,224,0.875,bicubic,-47.763,-37.127,-18 -resnetrs350,48.057,51.943,62.650,37.350,163.96,384,1.000,bicubic,-48.183,-36.820,-60 -regnetz_d8,48.013,51.987,64.410,35.590,23.37,320,1.000,bicubic,-47.997,-35.110,-33 -twins_svt_large,47.947,52.053,62.910,37.090,99.27,224,0.900,bicubic,-47.773,-36.460,0 -vit_relpos_base_patch16_224,47.937,52.063,62.847,37.153,86.43,224,0.900,bicubic,-47.193,-36.453,+66 -mixer_b16_224_miil,47.800,52.200,63.397,36.603,59.88,224,0.875,bilinear,-47.090,-35.683,+107 -repvgg_b2g4,47.793,52.207,64.377,35.623,61.76,224,0.875,bilinear,-46.027,-34.553,+244 -vit_relpos_base_patch16_clsgap_224,47.763,52.237,62.410,37.590,86.43,224,0.900,bicubic,-47.487,-36.790,+50 -vit_relpos_medium_patch16_cls_224,47.660,52.340,61.803,38.197,38.76,224,0.900,bicubic,-47.640,-37.287,+45 -seresnext101_32x8d,47.653,52.347,61.447,38.553,93.57,288,1.000,bicubic,-48.477,-37.913,-57 -eca_nfnet_l1,47.643,52.357,62.763,37.237,41.41,320,1.000,bicubic,-48.297,-36.727,-34 -resnetv2_50x3_bitm,47.593,52.407,65.603,34.397,217.32,448,1.000,bilinear,-48.677,-34.027,-75 -pit_s_distilled_224,47.547,52.453,63.497,36.503,24.04,224,0.900,bicubic,-47.193,-35.683,+119 -resnest50d_4s2x40d,47.490,52.510,63.817,36.183,30.42,224,0.875,bicubic,-47.220,-35.323,+123 -efficientnet_b3_pruned,47.443,52.557,62.787,37.213,9.86,300,0.904,bicubic,-47.137,-36.283,+144 -crossvit_18_dagger_408,47.380,52.620,60.943,39.057,44.61,408,1.000,bicubic,-48.750,-38.527,-64 -xcit_small_24_p8_224,47.297,52.703,60.983,39.017,47.63,224,1.000,bicubic,-48.613,-38.197,-37 -tresnet_m,47.217,52.783,62.000,38.000,31.39,224,0.875,bilinear,-48.163,-37.150,+29 -tf_efficientnet_b6,47.207,52.793,63.110,36.890,43.04,528,0.942,bicubic,-49.083,-36.410,-84 -convnext_tiny,47.180,52.820,63.217,36.783,28.59,224,0.875,bicubic,-47.780,-35.983,+81 -ssl_resnext101_32x4d,47.167,52.833,63.367,36.633,44.18,224,0.875,bilinear,-47.983,-35.933,+47 -resnetrs270,47.107,52.893,62.013,37.987,129.86,352,1.000,bicubic,-48.953,-37.467,-58 -regnetz_d8_evos,47.080,52.920,63.390,36.610,23.46,320,0.950,bicubic,-49.140,-36.100,-78 -tf_efficientnet_b4,47.080,52.920,62.857,37.143,19.34,380,0.922,bicubic,-48.510,-36.473,-5 -vit_base_patch16_rpn_224,47.063,52.937,62.403,37.597,86.54,224,0.900,bicubic,-47.757,-36.687,+95 -swinv2_small_window8_256,47.030,52.970,62.297,37.703,49.73,256,0.900,bicubic,-48.700,-37.063,-25 -xcit_small_12_p8_224,46.983,53.017,60.537,39.463,26.21,224,1.000,bicubic,-48.437,-38.663,+12 -xcit_large_24_p16_224,46.960,53.040,60.670,39.330,189.10,224,1.000,bicubic,-47.990,-38.160,+75 -convnext_tiny_hnf,46.937,53.063,61.200,38.800,28.59,224,0.950,bicubic,-47.833,-37.960,+97 -xception65p,46.933,53.067,61.083,38.917,39.82,299,0.940,bicubic,-48.727,-38.187,-19 -resnet101d,46.893,53.107,62.323,37.677,44.57,320,1.000,bicubic,-48.857,-37.117,-35 -resnet152,46.800,53.200,60.410,39.590,60.19,224,0.950,bicubic,-48.750,-38.850,-10 -gluon_seresnext101_64x4d,46.677,53.323,61.297,38.703,88.23,224,0.875,bicubic,-47.983,-37.683,+117 -twins_pcpvt_large,46.627,53.373,62.233,37.767,60.99,224,0.900,bicubic,-49.093,-37.257,-31 -dm_nfnet_f1,46.547,53.453,61.403,38.597,132.63,320,0.910,bicubic,-49.833,-38.067,-112 -regnetv_064,46.480,53.520,62.253,37.747,30.58,288,1.000,bicubic,-49.290,-37.167,-41 -xcit_medium_24_p8_224,46.473,53.527,59.647,40.353,84.32,224,1.000,bicubic,-49.397,-39.433,-50 -crossvit_15_dagger_408,46.457,53.543,60.487,39.513,28.50,408,1.000,bicubic,-49.363,-38.823,-47 -resnetrs200,46.430,53.570,61.060,38.940,93.21,320,1.000,bicubic,-49.910,-38.490,-110 -swin_s3_small_224,46.393,53.607,60.897,39.103,49.74,224,0.900,bicubic,-49.447,-38.303,-51 -fbnetv3_g,46.347,53.653,62.403,37.597,16.62,288,0.950,bilinear,-48.783,-36.797,+31 -sequencer2d_m,46.297,53.703,60.903,39.097,38.31,224,0.875,bicubic,-49.303,-38.367,-24 -tresnet_xl,46.280,53.720,61.950,38.050,78.44,224,0.875,bilinear,-48.780,-37.310,+46 -xcit_tiny_24_p8_384_dist,46.263,53.737,60.713,39.287,12.11,384,1.000,bicubic,-49.977,-38.727,-101 -xcit_tiny_24_p8_224_dist,46.257,53.743,60.607,39.393,12.11,224,1.000,bicubic,-49.203,-38.753,-9 -gernet_m,46.170,53.830,62.700,37.300,21.14,224,0.875,bilinear,-48.380,-36.230,+121 -deit_small_distilled_patch16_224,46.163,53.837,62.403,37.597,22.44,224,0.900,bicubic,-48.437,-36.697,+110 -regnety_160,46.163,53.837,61.843,38.157,83.59,288,1.000,bicubic,-49.717,-37.717,-66 -crossvit_base_240,46.133,53.867,60.223,39.777,105.03,240,0.875,bicubic,-48.937,-38.757,+39 -swinv2_cr_small_ns_224,46.123,53.877,60.787,39.213,49.70,224,0.900,bicubic,-49.567,-38.523,-41 -resnest50d_1s4x24d,46.093,53.907,62.377,37.623,25.68,224,0.875,bicubic,-48.297,-36.693,+130 -tf_efficientnet_b0_ns,46.053,53.947,63.270,36.730,5.29,224,0.875,bicubic,-47.697,-35.700,+206 -jx_nest_base,46.040,53.960,60.093,39.907,67.72,224,0.875,bicubic,-49.500,-39.207,-30 -resnet51q,46.027,53.973,60.903,39.097,35.70,288,1.000,bilinear,-49.173,-38.377,+10 -vit_small_patch16_224,46.000,54.000,61.820,38.180,22.05,224,0.900,bicubic,-48.880,-37.450,+59 -vit_relpos_medium_patch16_224,45.960,54.040,61.030,38.970,38.75,224,0.900,bicubic,-49.240,-38.190,+9 -regnety_080,45.953,54.047,60.880,39.120,39.18,288,1.000,bicubic,-49.897,-38.560,-69 -resnest50d,45.943,54.057,62.630,37.370,27.48,224,0.875,bilinear,-48.677,-36.400,+95 -deit3_small_patch16_224,45.923,54.077,58.893,41.107,22.06,224,0.900,bicubic,-48.767,-39.857,+84 -crossvit_18_240,45.903,54.097,60.383,39.617,43.27,240,0.875,bicubic,-49.167,-38.737,+27 -twins_pcpvt_base,45.893,54.107,61.343,38.657,43.83,224,0.900,bicubic,-49.567,-38.047,-26 -regnety_032,45.883,54.117,61.533,38.467,19.44,288,1.000,bicubic,-49.597,-37.787,-31 -levit_384,45.873,54.127,61.690,38.310,39.13,224,0.900,bicubic,-49.337,-37.470,-1 -twins_svt_base,45.873,54.127,60.967,39.033,56.07,224,0.900,bicubic,-49.697,-38.263,-44 -crossvit_18_dagger_240,45.850,54.150,59.923,40.077,44.27,240,0.875,bicubic,-49.330,-39.197,+1 -vit_relpos_medium_patch16_rpn_224,45.753,54.247,60.957,39.043,38.73,224,0.900,bicubic,-49.317,-38.233,+20 -vit_srelpos_medium_patch16_224,45.730,54.270,61.070,38.930,38.74,224,0.900,bicubic,-49.170,-38.130,+42 -crossvit_15_dagger_240,45.697,54.303,60.090,39.910,28.21,240,0.875,bicubic,-49.283,-39.070,+28 -regnetz_c16,45.690,54.310,62.517,37.483,13.46,320,0.940,bicubic,-49.700,-36.793,-24 -convmixer_1536_20,45.660,54.340,61.770,38.230,51.63,224,0.960,bicubic,-49.310,-37.400,+29 -gc_efficientnetv2_rw_t,45.657,54.343,60.200,39.800,13.68,288,1.000,bicubic,-49.633,-39.020,-16 -efficientnetv2_rw_t,45.607,54.393,60.187,39.813,13.65,288,1.000,bicubic,-49.463,-39.033,+13 -gluon_seresnext101_32x4d,45.597,54.403,61.140,38.860,48.96,224,0.875,bicubic,-48.853,-37.950,+102 -xcit_tiny_24_p16_384_dist,45.587,54.413,60.510,39.490,12.12,384,1.000,bicubic,-49.903,-38.850,-44 -xcit_medium_24_p16_224,45.527,54.473,59.000,41.000,84.40,224,1.000,bicubic,-49.603,-39.930,-1 -xcit_small_24_p16_224,45.517,54.483,58.887,41.113,47.67,224,1.000,bicubic,-49.563,-40.183,+6 -dm_nfnet_f0,45.480,54.520,60.990,39.010,71.49,256,0.900,bicubic,-50.210,-38.340,-69 -resnext101_64x4d,45.453,54.547,59.040,40.960,83.46,288,1.000,bicubic,-50.087,-40.250,-54 -gluon_resnet152_v1d,45.437,54.563,60.083,39.917,60.21,224,0.875,bicubic,-49.003,-38.927,+97 -nfnet_l0,45.423,54.577,62.073,37.927,35.07,288,1.000,bicubic,-49.967,-37.347,-36 -ssl_resnext50_32x4d,45.403,54.597,62.033,37.967,25.03,224,0.875,bilinear,-49.297,-37.207,+59 -resnetv2_50x1_bit_distilled,45.397,54.603,62.310,37.690,25.55,224,0.875,bicubic,-50.003,-37.120,-41 -xcit_small_12_p16_224,45.397,54.603,59.417,40.583,26.25,224,1.000,bicubic,-49.423,-39.643,+38 -jx_nest_small,45.353,54.647,59.010,40.990,38.35,224,0.875,bicubic,-50.177,-40.210,-58 -cs3se_edgenet_x,45.327,54.673,60.383,39.617,50.72,320,1.000,bicubic,-50.683,-39.057,-114 -resnet61q,45.283,54.717,59.400,40.600,36.85,288,1.000,bicubic,-49.837,-39.680,-7 -cs3edgenet_x,45.280,54.720,60.287,39.713,47.82,288,1.000,bicubic,-50.190,-38.993,-54 -tresnet_xl_448,45.223,54.777,61.440,38.560,78.44,448,0.875,bilinear,-50.287,-37.900,-60 -nasnetalarge,45.207,54.793,57.880,42.120,88.75,331,0.911,bicubic,-49.943,-41.250,-20 -convit_small,45.197,54.803,60.497,39.503,27.78,224,0.875,bicubic,-49.723,-38.613,+17 -swin_small_patch4_window7_224,45.157,54.843,60.333,39.667,49.61,224,0.900,bicubic,-50.563,-38.957,-86 -tf_efficientnet_b3,45.100,54.900,60.643,39.357,12.23,300,0.904,bicubic,-49.810,-38.467,+16 -resnet101,45.087,54.913,59.577,40.423,44.55,224,0.950,bicubic,-49.893,-39.503,+4 -sequencer2d_s,45.083,54.917,60.067,39.933,27.65,224,0.875,bicubic,-50.387,-39.203,-60 -rexnet_200,45.057,54.943,62.313,37.687,16.37,224,0.875,bicubic,-49.603,-36.777,+53 -resnetrs152,44.957,55.043,59.707,40.293,86.62,320,1.000,bicubic,-51.003,-39.673,-120 -resnetv2_101,44.933,55.067,58.837,41.163,44.54,224,0.950,bicubic,-49.997,-40.283,+9 -ecaresnetlight,44.893,55.107,60.777,39.223,30.16,224,0.875,bicubic,-49.247,-38.173,+116 -deit_base_patch16_224,44.873,55.127,59.190,40.810,86.57,224,0.900,bicubic,-50.137,-39.790,-4 -cait_xxs36_384,44.777,55.223,59.367,40.633,17.37,384,1.000,bicubic,-50.443,-39.953,-39 -deit_base_patch16_384,44.770,55.230,59.627,40.373,86.86,384,1.000,bicubic,-50.880,-39.613,-89 -resmlp_36_distilled_224,44.757,55.243,61.073,38.927,44.69,224,0.875,bicubic,-49.793,-38.087,+63 -gernet_l,44.730,55.270,58.947,41.053,31.08,256,0.875,bilinear,-50.200,-40.253,+1 -xcit_tiny_24_p16_224_dist,44.720,55.280,59.420,40.580,12.12,224,1.000,bicubic,-49.500,-39.540,+101 -resmlp_24_distilled_224,44.710,55.290,61.463,38.537,30.02,224,0.875,bicubic,-49.630,-37.627,+85 -tf_efficientnet_b2_ap,44.707,55.293,60.680,39.320,9.11,260,0.890,bicubic,-49.563,-38.270,+90 -swinv2_tiny_window16_256,44.573,55.427,59.577,40.423,28.35,256,0.900,bicubic,-50.787,-39.723,-59 -vit_relpos_small_patch16_224,44.550,55.450,60.203,39.797,21.98,224,0.900,bicubic,-50.140,-38.897,+33 -gmlp_s16_224,44.483,55.517,58.627,41.373,19.42,224,0.875,bicubic,-49.027,-40.153,+182 -ens_adv_inception_resnet_v2,44.390,55.610,58.110,41.890,55.84,299,0.897,bicubic,-49.730,-40.680,+107 -tresnet_l,44.360,55.640,59.947,40.053,55.99,224,0.875,bilinear,-50.540,-39.083,0 -gluon_resnext101_32x4d,44.287,55.713,59.090,40.910,44.18,224,0.875,bicubic,-49.833,-39.840,+104 -poolformer_m48,44.270,55.730,59.300,40.700,73.47,224,0.950,bicubic,-50.860,-39.820,-40 -wide_resnet50_2,44.180,55.820,59.697,40.303,68.88,224,0.875,bicubic,-50.480,-39.353,+35 -regnetz_c16_evos,44.160,55.840,61.057,38.943,13.49,320,0.950,bicubic,-51.470,-38.363,-101 -vit_srelpos_small_patch16_224,44.137,55.863,59.710,40.290,21.97,224,0.900,bicubic,-50.413,-39.430,+50 -crossvit_15_240,44.123,55.877,59.130,40.870,27.53,240,0.875,bicubic,-50.597,-39.950,+18 -seresnext50_32x4d,44.120,55.880,59.480,40.520,27.56,224,0.875,bicubic,-50.690,-39.650,+4 -resnetv2_101x1_bitm,44.113,55.887,61.980,38.020,44.54,448,1.000,bilinear,-51.207,-37.390,-66 -gluon_resnet152_v1s,44.070,55.930,58.700,41.300,60.32,224,0.875,bicubic,-50.650,-40.360,+16 -pit_b_224,44.067,55.933,58.017,41.983,73.76,224,0.900,bicubic,-50.723,-40.803,+3 -ssl_resnet50,44.020,55.980,61.910,38.090,25.56,224,0.875,bilinear,-50.300,-37.240,+71 -poolformer_m36,44.020,55.980,59.067,40.933,56.17,224,0.950,bicubic,-50.990,-40.033,-30 -inception_resnet_v2,44.007,55.993,57.907,42.093,55.84,299,0.897,bicubic,-50.323,-40.893,+68 -pnasnet5large,43.953,56.047,56.723,43.277,86.06,331,0.911,bicubic,-51.407,-42.407,-76 -pit_s_224,43.893,56.107,58.637,41.363,23.46,224,0.900,bicubic,-50.687,-40.293,+34 -gluon_resnext101_64x4d,43.880,56.120,58.703,41.297,83.46,224,0.875,bicubic,-50.470,-40.177,+62 -coat_lite_small,43.813,56.187,57.143,42.857,19.84,224,0.900,bicubic,-51.267,-41.887,-45 -regnetv_040,43.793,56.207,58.460,41.540,20.64,288,1.000,bicubic,-51.937,-40.920,-130 -tnt_s_patch16_224,43.777,56.223,59.197,40.803,23.76,224,0.900,bicubic,-50.793,-39.983,+32 -mobilevitv2_200_in22ft1k,43.770,56.230,59.500,40.500,18.45,256,0.888,bicubic,-51.280,-39.580,-40 -swinv2_cr_small_224,43.770,56.230,57.690,42.310,49.70,224,0.900,bicubic,-51.630,-41.360,-89 -cspresnext50,43.763,56.237,60.143,39.857,20.57,256,0.887,bilinear,-50.477,-38.907,+68 -cait_xxs36_224,43.760,56.240,58.730,41.270,17.30,224,1.000,bicubic,-50.170,-40.160,+102 -ecaresnet50d,43.743,56.257,60.373,39.627,25.58,224,0.875,bicubic,-50.457,-38.647,+73 -ecaresnet101d_pruned,43.740,56.260,59.607,40.393,24.88,224,0.875,bicubic,-50.720,-39.483,+38 -swin_s3_tiny_224,43.717,56.283,59.510,40.490,28.33,224,0.900,bicubic,-51.183,-39.650,-27 -tf_efficientnetv2_s,43.707,56.293,58.597,41.403,21.46,384,1.000,bicubic,-52.003,-40.803,-132 -rexnet_150,43.687,56.313,60.890,39.110,9.73,224,0.875,bicubic,-50.593,-38.190,+56 -pit_xs_distilled_224,43.660,56.340,60.707,39.293,11.00,224,0.900,bicubic,-49.580,-38.123,+179 -xcit_tiny_12_p8_224_dist,43.640,56.360,58.457,41.543,6.71,224,1.000,bicubic,-51.080,-40.723,-7 -edgenext_small,43.617,56.383,59.883,40.117,5.59,320,1.000,bicubic,-51.213,-39.527,-23 -crossvit_small_240,43.473,56.527,58.940,41.060,26.86,240,0.875,bicubic,-51.107,-40.180,+15 -cs3sedarknet_x,43.460,56.540,58.843,41.157,35.40,288,1.000,bicubic,-51.960,-40.477,-106 -gluon_resnet101_v1d,43.430,56.570,58.610,41.390,44.57,224,0.875,bicubic,-50.750,-40.330,+64 -ecaresnet50t,43.413,56.587,59.300,40.700,25.57,320,0.950,bicubic,-51.657,-39.990,-62 -gluon_resnet101_v1s,43.363,56.637,58.510,41.490,44.67,224,0.875,bicubic,-50.807,-40.500,+65 -cspdarknet53,43.353,56.647,59.430,40.570,27.64,256,0.887,bilinear,-50.737,-39.550,+72 -xcit_tiny_24_p8_224,43.303,56.697,57.273,42.727,12.11,224,1.000,bicubic,-51.587,-41.917,-37 -xcit_tiny_12_p8_384_dist,43.300,56.700,58.177,41.823,6.71,384,1.000,bicubic,-52.040,-41.163,-100 -dpn68b,43.277,56.723,58.673,41.327,12.61,224,0.875,bicubic,-50.343,-40.027,+126 -convmixer_768_32,43.267,56.733,59.367,40.633,21.11,224,0.960,bicubic,-51.163,-39.743,+25 -visformer_small,43.257,56.743,57.980,42.020,40.22,224,0.900,bicubic,-51.713,-41.230,-55 -eca_nfnet_l0,43.233,56.767,59.907,40.093,24.14,288,1.000,bicubic,-52.217,-39.483,-117 -regnety_064,43.223,56.777,57.230,42.770,30.58,288,1.000,bicubic,-52.567,-42.060,-162 -vit_relpos_base_patch32_plus_rpn_256,43.167,56.833,58.430,41.570,119.42,256,0.900,bicubic,-49.993,-39.890,+170 -vit_small_patch32_384,43.143,56.857,59.293,40.707,22.92,384,1.000,bicubic,-51.457,-39.847,-1 -resnest26d,43.140,56.860,60.637,39.363,17.07,224,0.875,bilinear,-50.100,-38.213,+160 -twins_pcpvt_small,43.087,56.913,58.877,41.123,24.11,224,0.900,bicubic,-51.513,-40.273,-4 -resmlp_36_224,43.050,56.950,59.313,40.687,44.69,224,0.875,bicubic,-50.600,-39.637,+110 -cspresnet50,43.047,56.953,59.167,40.833,21.62,256,0.887,bilinear,-50.813,-39.693,+83 -dpn131,43.040,56.960,57.420,42.580,79.25,224,0.875,bicubic,-50.710,-41.410,+97 -tf_efficientnet_lite4,42.980,57.020,57.640,42.360,13.01,380,0.920,bilinear,-51.890,-41.450,-47 -twins_svt_small,42.930,57.070,58.467,41.533,24.06,224,0.900,bicubic,-51.840,-40.613,-37 -mobilevitv2_200_384_in22ft1k,42.917,57.083,58.987,41.013,18.45,384,1.000,bicubic,-52.473,-40.293,-119 -gluon_resnet152_v1b,42.893,57.107,57.740,42.260,60.19,224,0.875,bicubic,-51.137,-41.010,+60 -fbnetv3_d,42.890,57.110,59.690,40.310,10.31,256,0.950,bilinear,-50.960,-39.220,+78 -dpn107,42.860,57.140,57.363,42.637,86.92,224,0.875,bicubic,-51.100,-41.467,+65 -levit_256,42.813,57.187,57.903,42.097,18.89,224,0.900,bicubic,-51.597,-41.157,+9 -gluon_resnet152_v1c,42.810,57.190,57.737,42.263,60.21,224,0.875,bicubic,-51.080,-41.063,+70 -tf_efficientnet_b1_ap,42.800,57.200,58.817,41.183,7.79,240,0.882,bicubic,-50.830,-39.983,+103 -gcresnet50t,42.790,57.210,59.190,40.810,25.90,256,0.900,bicubic,-51.830,-39.790,-18 -gluon_xception65,42.790,57.210,58.820,41.180,39.92,299,0.903,bicubic,-51.220,-40.200,+56 -tresnet_l_448,42.750,57.250,58.943,41.057,55.99,448,0.875,bilinear,-52.650,-40.357,-132 -cs3darknet_x,42.717,57.283,58.197,41.803,35.05,288,1.000,bicubic,-52.553,-41.083,-119 -resnet50d,42.697,57.303,58.687,41.313,25.58,224,0.875,bicubic,-51.373,-40.233,+46 -gluon_seresnext50_32x4d,42.683,57.317,58.700,41.300,27.56,224,0.875,bicubic,-51.487,-40.210,+36 -convnext_nano,42.590,57.410,57.497,42.503,15.59,288,1.000,bicubic,-52.270,-41.653,-60 -xcit_tiny_12_p16_384_dist,42.587,57.413,58.087,41.913,6.72,384,1.000,bicubic,-51.943,-41.083,-10 -resnext101_32x8d,42.570,57.430,58.293,41.707,88.79,224,0.875,bilinear,-51.200,-40.657,+76 -regnety_040,42.567,57.433,57.037,42.963,20.65,288,1.000,bicubic,-52.903,-42.383,-149 -seresnet50,42.513,57.487,58.677,41.323,28.09,224,0.875,bicubic,-51.567,-40.273,+39 -nf_resnet50,42.507,57.493,59.520,40.480,25.56,288,0.940,bicubic,-51.883,-39.550,-3 -mobilevitv2_175_in22ft1k,42.500,57.500,58.133,41.867,14.25,256,0.888,bicubic,-52.280,-40.967,-59 -resnetrs101,42.443,57.557,57.290,42.710,63.62,288,0.940,bicubic,-52.807,-41.920,-128 -poolformer_s36,42.333,57.667,58.737,41.263,30.86,224,0.900,bicubic,-52.287,-40.313,-34 -jx_nest_tiny,42.330,57.670,57.043,42.957,17.06,224,0.875,bicubic,-52.620,-42.057,-85 -tf_efficientnetv2_b3,42.310,57.690,57.943,42.057,14.36,300,0.904,bicubic,-52.810,-41.257,-110 -convmixer_1024_20_ks9_p14,42.277,57.723,59.713,40.287,24.38,224,0.960,bicubic,-50.073,-38.707,+199 -dpn98,42.273,57.727,56.883,43.117,61.57,224,0.875,bicubic,-51.657,-42.037,+45 -xcit_tiny_24_p16_224,42.273,57.727,56.830,43.170,12.12,224,1.000,bicubic,-51.567,-41.930,+56 -deit_small_patch16_224,42.267,57.733,58.013,41.987,22.05,224,0.900,bicubic,-51.723,-40.947,+39 -tf_efficientnet_cc_b1_8e,42.220,57.780,58.430,41.570,39.72,240,0.882,bicubic,-51.350,-40.260,+90 -legacy_senet154,42.213,57.787,56.593,43.407,115.09,224,0.875,bilinear,-52.517,-42.507,-61 -cait_xxs24_384,42.183,57.817,57.460,42.540,12.03,384,1.000,bicubic,-52.747,-41.680,-90 -xception41p,42.163,57.837,56.890,43.110,26.91,299,0.940,bicubic,-52.897,-42.260,-106 -tf_efficientnet_b2,42.117,57.883,58.197,41.803,9.11,260,0.890,bicubic,-52.093,-40.843,+9 -gluon_resnext50_32x4d,42.043,57.957,57.670,42.330,25.03,224,0.875,bicubic,-51.607,-41.020,+73 -resnext50_32x4d,41.963,58.037,56.757,43.243,25.03,224,0.950,bicubic,-52.617,-42.043,-38 -ecaresnet50d_pruned,41.950,58.050,58.217,41.783,19.94,224,0.875,bicubic,-51.870,-40.783,+50 -efficientnet_b2,41.933,58.067,58.287,41.713,9.11,288,1.000,bicubic,-52.437,-40.763,-17 -mobilevitv2_150_in22ft1k,41.920,58.080,57.923,42.077,10.59,256,0.888,bicubic,-52.770,-40.997,-60 -xcit_tiny_12_p16_224_dist,41.920,58.080,57.227,42.773,6.72,224,1.000,bicubic,-51.430,-41.513,+107 -mobilevitv2_150_384_in22ft1k,41.777,58.223,57.820,42.180,10.59,384,1.000,bicubic,-53.563,-41.310,-153 -mobilevitv2_175_384_in22ft1k,41.670,58.330,58.010,41.990,14.25,384,1.000,bicubic,-53.570,-41.370,-146 -edgenext_small_rw,41.663,58.337,58.520,41.480,7.83,320,1.000,bicubic,-52.697,-40.520,-18 -dla102x2,41.643,58.357,57.940,42.060,41.28,224,0.875,bilinear,-52.357,-41.090,+23 -hrnet_w64,41.640,58.360,57.123,42.877,128.06,224,0.875,bilinear,-52.190,-41.797,+40 -gluon_senet154,41.617,58.383,56.377,43.623,115.09,224,0.875,bicubic,-53.093,-42.593,-71 -poolformer_s24,41.607,58.393,58.440,41.560,21.39,224,0.900,bicubic,-52.723,-40.620,-19 -inception_v4,41.580,58.420,55.390,44.610,42.68,299,0.875,bicubic,-52.800,-43.430,-28 -swinv2_cr_tiny_ns_224,41.543,58.457,57.190,42.810,28.33,224,0.900,bicubic,-53.217,-41.920,-82 -haloregnetz_b,41.540,58.460,57.080,42.920,11.68,224,0.940,bicubic,-52.980,-41.880,-42 -cs3sedarknet_l,41.533,58.467,57.347,42.653,21.91,288,0.950,bicubic,-53.587,-41.863,-137 -efficientnet_em,41.490,58.510,58.880,41.120,6.90,240,0.882,bicubic,-52.250,-40.050,+45 -tf_efficientnet_cc_b0_8e,41.490,58.510,57.380,42.620,24.01,224,0.875,bicubic,-51.380,-41.070,+135 -efficientnet_el,41.483,58.517,58.313,41.687,10.59,300,0.904,bicubic,-53.187,-40.817,-71 -halo2botnet50ts_256,41.467,58.533,56.207,43.793,22.64,256,0.950,bicubic,-53.543,-42.833,-124 -swin_tiny_patch4_window7_224,41.460,58.540,57.307,42.693,28.29,224,0.900,bicubic,-53.160,-41.813,-68 -resnetv2_50,41.387,58.613,56.747,43.253,25.55,224,0.950,bicubic,-52.883,-42.183,-23 -swinv2_tiny_window8_256,41.383,58.617,57.117,42.883,28.35,256,0.900,bicubic,-53.647,-42.053,-129 -cait_xxs24_224,41.380,58.620,57.523,42.477,11.96,224,1.000,bicubic,-52.110,-41.247,+72 -tv_resnet152,41.333,58.667,57.523,42.477,60.19,224,0.875,bilinear,-51.917,-41.227,+93 -gcresnext50ts,41.283,58.717,57.147,42.853,15.67,256,0.900,bicubic,-53.127,-41.843,-45 -cs3darknet_l,41.280,58.720,57.347,42.653,21.16,288,0.950,bicubic,-53.400,-41.873,-81 -dpn92,41.277,58.723,56.340,43.660,37.67,224,0.875,bicubic,-52.903,-42.590,-16 -xception71,41.273,58.727,55.877,44.123,42.34,299,0.903,bicubic,-52.607,-43.073,+14 -adv_inception_v3,41.260,58.740,56.317,43.683,23.83,299,0.875,bicubic,-51.750,-42.173,+109 -gernet_s,41.250,58.750,58.827,41.173,8.17,224,0.875,bilinear,-51.190,-39.673,+152 -resnetv2_50d_evos,41.133,58.867,56.050,43.950,25.59,288,0.950,bicubic,-53.987,-43.180,-155 -resnetblur50,41.077,58.923,57.080,42.920,25.56,224,0.875,bicubic,-52.633,-41.730,+33 -nf_regnet_b1,41.027,58.973,58.113,41.887,10.22,288,0.900,bicubic,-52.853,-40.637,+10 -gluon_resnet50_v1d,40.970,59.030,57.137,42.863,25.58,224,0.875,bicubic,-52.560,-41.573,+56 -fbnetv3_b,40.953,59.047,58.653,41.347,8.60,256,0.950,bilinear,-52.677,-40.257,+39 -gluon_inception_v3,40.907,59.093,55.620,44.380,23.83,299,0.875,bicubic,-52.633,-43.210,+52 -cs3darknet_focus_l,40.893,59.107,56.630,43.370,21.15,288,0.950,bicubic,-53.897,-42.520,-113 -ese_vovnet39b,40.867,59.133,56.950,43.050,24.57,224,0.875,bicubic,-52.983,-41.950,+9 -levit_192,40.837,59.163,56.690,43.310,10.95,224,0.900,bicubic,-52.883,-42.100,+24 -regnety_320,40.803,59.197,56.113,43.887,145.05,224,0.875,bicubic,-53.717,-43.057,-69 -resnet34d,40.800,59.200,56.523,43.477,21.82,224,0.875,bicubic,-51.850,-41.897,+127 -resnetv2_50d_gn,40.783,59.217,56.207,43.793,25.57,288,0.950,bicubic,-54.317,-42.853,-160 -xception,40.773,59.227,56.383,43.617,22.86,299,0.897,bicubic,-52.867,-42.377,+30 -lamhalobotnet50ts_256,40.747,59.253,56.093,43.907,22.57,256,0.950,bicubic,-54.023,-42.887,-115 -resnet50_gn,40.737,59.263,55.750,44.250,25.56,224,0.940,bicubic,-53.443,-43.170,-33 -skresnext50_32x4d,40.700,59.300,56.030,43.970,27.48,224,0.875,bicubic,-53.250,-42.800,-11 -gluon_resnet101_v1b,40.683,59.317,56.123,43.877,44.55,224,0.875,bicubic,-53.077,-42.577,+11 -hrnet_w40,40.663,59.337,56.757,43.243,57.56,224,0.875,bilinear,-53.047,-42.043,+18 -resmlp_24_224,40.643,59.357,56.570,43.430,30.02,224,0.875,bicubic,-52.797,-42.240,+51 -repvgg_b1,40.593,59.407,57.830,42.170,57.42,224,0.875,bilinear,-52.817,-40.960,+56 -halonet50ts,40.577,59.423,55.193,44.807,22.73,256,0.940,bicubic,-54.133,-43.637,-111 -tf_efficientnet_lite3,40.563,59.437,56.473,43.527,8.20,300,0.904,bilinear,-53.547,-42.487,-33 -xcit_tiny_12_p8_224,40.533,59.467,55.623,44.377,6.71,224,1.000,bicubic,-53.827,-43.447,-66 -mobilevitv2_175,40.530,59.470,56.277,43.723,14.25,256,0.888,bicubic,-53.700,-42.653,-51 -tresnet_m_448,40.527,59.473,56.703,43.297,31.39,448,0.875,bilinear,-54.133,-42.447,-107 -dla169,40.523,59.477,57.257,42.743,53.39,224,0.875,bilinear,-53.267,-41.573,0 -pit_xs_224,40.487,59.513,56.533,43.467,10.62,224,0.900,bicubic,-52.423,-42.247,+88 -resnetaa50,40.473,59.527,56.027,43.973,25.56,288,1.000,bicubic,-54.407,-43.103,-142 -repvgg_b2,40.463,59.537,57.773,42.227,89.02,224,0.875,bilinear,-53.127,-41.297,+23 -regnetx_320,40.447,59.553,55.667,44.333,107.81,224,0.875,bicubic,-53.773,-43.383,-55 -coat_mini,40.420,59.580,55.157,44.843,10.34,224,0.900,bicubic,-54.350,-43.793,-131 -skresnet34,40.393,59.607,56.740,43.260,22.28,224,0.875,bicubic,-52.177,-41.780,+112 -efficientnet_el_pruned,40.390,59.610,56.887,43.113,10.59,300,0.904,bicubic,-53.700,-42.123,-43 -resnet50,40.383,59.617,54.663,45.337,25.56,224,0.950,bicubic,-53.547,-43.807,-26 -efficientnet_b2_pruned,40.380,59.620,56.533,43.467,8.31,260,0.890,bicubic,-53.420,-42.377,-11 -wide_resnet101_2,40.360,59.640,55.787,44.213,126.89,224,0.875,bilinear,-53.360,-43.023,-4 -coat_lite_mini,40.353,59.647,55.723,44.277,11.01,224,0.900,bicubic,-53.107,-43.057,+31 -legacy_seresnext101_32x4d,40.353,59.647,54.823,45.177,48.96,224,0.875,bilinear,-53.767,-44.147,-52 -sebotnet33ts_256,40.340,59.660,53.217,46.783,13.70,256,0.940,bicubic,-53.970,-45.383,-74 -tf_efficientnet_b0_ap,40.333,59.667,56.793,43.207,5.29,224,0.875,bicubic,-52.287,-41.577,+99 -regnetx_160,40.273,59.727,56.060,43.940,54.28,224,0.875,bicubic,-53.617,-43.030,-32 -densenet201,40.270,59.730,56.713,43.287,20.01,224,0.875,bicubic,-52.430,-41.937,+90 -resnext50d_32x4d,40.157,59.843,55.490,44.510,25.05,224,0.875,bicubic,-53.663,-43.250,-20 -eca_resnet33ts,40.137,59.863,57.003,42.997,19.68,256,0.900,bicubic,-53.723,-41.887,-30 -mobilevitv2_200,40.133,59.867,55.510,44.490,18.45,256,0.888,bicubic,-54.377,-43.460,-102 -darknetaa53,40.120,59.880,55.787,44.213,36.02,288,1.000,bilinear,-54.090,-43.163,-68 -hrnet_w48,40.097,59.903,56.647,43.353,77.47,224,0.875,bilinear,-53.933,-42.383,-51 -vit_base_patch16_224_sam,40.093,59.907,55.433,44.567,86.57,224,0.900,bicubic,-53.797,-43.457,-38 -legacy_seresnet152,40.037,59.963,55.820,44.180,66.82,224,0.875,bilinear,-53.393,-43.030,+24 -hrnet_w30,40.030,59.970,57.100,42.900,37.71,224,0.875,bilinear,-53.350,-41.730,+28 -regnetz_b16,40.000,60.000,55.623,44.377,9.72,288,0.940,bicubic,-54.680,-43.537,-135 -regnetx_080,39.997,60.003,55.963,44.037,39.57,224,0.875,bicubic,-53.793,-42.937,-27 -tf_efficientnet_b1,39.980,60.020,56.133,43.867,7.79,240,0.882,bicubic,-53.730,-42.667,-16 -gluon_resnet101_v1c,39.950,60.050,55.310,44.690,44.57,224,0.875,bicubic,-53.730,-43.450,-16 -resmlp_12_distilled_224,39.833,60.167,57.440,42.560,15.35,224,0.875,bicubic,-53.037,-41.180,+66 -seresnet33ts,39.823,60.177,56.523,43.477,19.78,256,0.900,bicubic,-54.447,-42.257,-87 -res2net50_26w_8s,39.807,60.193,54.910,45.090,48.40,224,0.875,bilinear,-53.633,-43.780,+14 -tf_efficientnetv2_b0,39.787,60.213,56.290,43.710,7.14,224,0.875,bicubic,-53.273,-42.400,+43 -darknet53,39.733,60.267,55.283,44.717,41.61,288,1.000,bicubic,-54.627,-43.767,-101 -lambda_resnet50ts,39.733,60.267,54.340,45.660,21.54,256,0.950,bicubic,-54.837,-44.310,-126 -res2net101_26w_4s,39.713,60.287,54.550,45.450,45.21,224,0.875,bilinear,-53.817,-44.050,0 -regnetx_120,39.690,60.310,55.650,44.350,46.11,224,0.875,bicubic,-54.570,-43.540,-92 -vit_small_patch32_224,39.687,60.313,55.260,44.740,22.88,224,0.900,bicubic,-52.473,-43.250,+105 -hrnet_w44,39.680,60.320,55.333,44.667,67.06,224,0.875,bilinear,-53.930,-43.627,-15 -densenet161,39.623,60.377,56.130,43.870,28.68,224,0.875,bicubic,-53.277,-42.680,+53 -resmlp_big_24_224,39.623,60.377,54.820,45.180,129.14,224,0.875,bicubic,-54.637,-44.000,-95 -mixnet_xl,39.613,60.387,55.883,44.117,11.90,224,0.875,bicubic,-54.617,-42.937,-93 -xception41,39.607,60.393,55.047,44.953,26.97,299,0.903,bicubic,-53.873,-43.703,-2 -tf_efficientnetv2_b1,39.573,60.427,55.353,44.647,8.14,240,0.882,bicubic,-54.137,-43.467,-35 -gcresnet33ts,39.557,60.443,55.823,44.177,19.88,256,0.900,bicubic,-54.273,-43.087,-50 -dla102x,39.543,60.457,56.310,43.690,26.31,224,0.875,bilinear,-53.987,-42.540,-12 -xcit_tiny_12_p16_224,39.543,60.457,55.023,44.977,6.72,224,1.000,bicubic,-52.917,-43.607,+78 -sehalonet33ts,39.533,60.467,54.013,45.987,13.69,256,0.940,bicubic,-55.007,-44.747,-134 -rexnet_130,39.490,60.510,56.643,43.357,7.56,224,0.875,bicubic,-54.180,-42.057,-34 -hrnet_w32,39.463,60.537,56.137,43.863,41.23,224,0.875,bilinear,-53.487,-42.703,+37 -resnetv2_50x1_bitm,39.433,60.567,57.857,42.143,25.55,448,1.000,bilinear,-55.317,-41.323,-174 -levit_128,39.423,60.577,55.350,44.650,9.21,224,0.900,bicubic,-53.627,-43.350,+25 -densenetblur121d,39.380,60.620,56.630,43.370,8.00,224,0.875,bicubic,-53.030,-41.790,+78 -regnety_120,39.353,60.647,55.277,44.723,51.82,224,0.875,bicubic,-54.657,-43.753,-80 -mobilevitv2_150,39.340,60.660,55.203,44.797,10.59,256,0.888,bicubic,-54.730,-43.697,-86 -tf_efficientnet_el,39.307,60.693,55.380,44.620,10.59,300,0.904,bicubic,-55.053,-43.720,-125 -tv_resnet101,39.293,60.707,55.793,44.207,44.55,224,0.875,bilinear,-53.587,-42.867,+38 -tf_inception_v3,39.250,60.750,54.303,45.697,23.83,299,0.875,bicubic,-53.950,-44.177,+8 -gluon_resnet50_v1s,39.237,60.763,55.010,44.990,25.68,224,0.875,bicubic,-54.353,-43.830,-31 -densenet169,39.173,60.827,55.847,44.153,14.15,224,0.875,bicubic,-53.117,-42.743,+76 -tf_efficientnetv2_b2,39.173,60.827,54.567,45.433,10.10,260,0.890,bicubic,-54.887,-44.363,-91 -legacy_seresnet101,39.033,60.967,55.007,44.993,49.33,224,0.875,bilinear,-54.237,-43.733,-4 -efficientnet_b1_pruned,39.003,60.997,55.633,44.367,6.33,240,0.882,bicubic,-53.967,-42.887,+23 -repvgg_b1g4,38.987,61.013,56.347,43.653,39.97,224,0.875,bilinear,-54.043,-42.473,+16 -crossvit_9_dagger_240,38.973,61.027,54.860,45.140,8.78,240,0.875,bicubic,-53.787,-43.650,+41 -inception_v3,38.957,61.043,53.840,46.160,23.83,299,0.875,bicubic,-53.943,-44.490,+28 -resnet33ts,38.930,61.070,55.580,44.420,19.68,256,0.900,bicubic,-54.700,-43.180,-46 -dpn68,38.917,61.083,54.930,45.070,12.61,224,0.875,bicubic,-53.333,-43.680,+72 -legacy_seresnext50_32x4d,38.883,61.117,54.597,45.403,27.56,224,0.875,bilinear,-54.537,-44.203,-19 -dla102,38.833,61.167,55.330,44.670,33.27,224,0.875,bilinear,-54.427,-43.440,-10 -densenet121,38.787,61.213,56.273,43.727,7.98,224,0.875,bicubic,-53.153,-42.007,+80 -resnet32ts,38.773,61.227,55.813,44.187,17.96,256,0.900,bicubic,-54.787,-42.937,-42 -regnetx_040,38.707,61.293,55.343,44.657,22.12,224,0.875,bicubic,-54.963,-43.597,-59 -res2net50_14w_8s,38.697,61.303,54.073,45.927,25.06,224,0.875,bilinear,-54.343,-44.627,+4 -regnetx_032,38.683,61.317,55.160,44.840,15.30,224,0.875,bicubic,-54.567,-43.570,-12 -res2net50_26w_6s,38.683,61.317,53.757,46.243,37.05,224,0.875,bilinear,-54.917,-44.993,-50 -selecsls60,38.617,61.383,55.633,44.367,30.67,224,0.875,bicubic,-54.393,-43.197,+5 -dla60x,38.617,61.383,55.387,44.613,17.35,224,0.875,bilinear,-54.573,-43.323,-11 -dla60_res2net,38.607,61.393,54.547,45.453,20.85,224,0.875,bilinear,-54.763,-44.293,-25 -tf_efficientnet_b0,38.577,61.423,55.963,44.037,5.29,224,0.875,bicubic,-53.823,-42.507,+52 -selecsls60b,38.563,61.437,55.287,44.713,32.77,224,0.875,bicubic,-54.937,-43.553,-42 -repvgg_a2,38.557,61.443,55.760,44.240,28.21,224,0.875,bilinear,-54.123,-42.760,+26 -hardcorenas_f,38.503,61.497,55.650,44.350,8.20,224,0.875,bilinear,-54.477,-42.970,+2 -resmlp_12_224,38.443,61.557,56.320,43.680,15.35,224,0.875,bicubic,-53.677,-42.250,+62 -dla60_res2next,38.433,61.567,54.947,45.053,17.03,224,0.875,bilinear,-55.117,-43.833,-53 -regnetx_064,38.420,61.580,54.990,45.010,26.21,224,0.875,bicubic,-55.200,-44.060,-64 -gluon_resnet50_v1b,38.413,61.587,54.817,45.183,25.56,224,0.875,bicubic,-54.147,-43.733,+33 -tf_efficientnet_cc_b0_4e,38.400,61.600,55.157,44.843,13.31,224,0.875,bicubic,-54.430,-43.283,+12 -hrnet_w18,38.273,61.727,55.653,44.347,21.30,224,0.875,bilinear,-54.487,-43.007,+16 -tinynet_a,38.223,61.777,55.177,44.823,6.19,192,0.875,bicubic,-54.587,-43.383,+13 -poolformer_s12,38.163,61.837,56.190,43.810,11.92,224,0.900,bicubic,-54.307,-42.160,+33 -mixnet_l,38.160,61.840,54.753,45.247,7.33,224,0.875,bicubic,-55.110,-43.947,-33 -hardcorenas_e,38.150,61.850,55.167,44.833,8.07,224,0.875,bilinear,-54.790,-43.413,-5 -efficientnet_b1,38.090,61.910,54.020,45.980,7.79,256,1.000,bicubic,-54.930,-44.690,-13 -coat_lite_tiny,38.070,61.930,53.460,46.540,5.72,224,0.900,bicubic,-54.790,-45.180,+3 -gmixer_24_224,38.063,61.937,52.077,47.923,24.72,224,0.875,bicubic,-54.617,-46.203,+13 -resnetrs50,37.970,62.030,53.313,46.687,35.69,224,0.910,bicubic,-56.050,-45.537,-124 -mobilevitv2_125,37.883,62.117,54.060,45.940,7.48,256,0.888,bicubic,-55.577,-44.800,-56 -hardcorenas_c,37.873,62.127,55.713,44.287,5.52,224,0.875,bilinear,-54.487,-42.637,+34 -gluon_resnet50_v1c,37.850,62.150,54.117,45.883,25.58,224,0.875,bicubic,-55.060,-44.583,-8 -res2net50_26w_4s,37.830,62.170,53.070,46.930,25.70,224,0.875,bilinear,-55.350,-45.600,-33 -efficientnet_es,37.787,62.213,54.980,45.020,5.44,224,0.875,bicubic,-55.133,-43.710,-13 -resnest14d,37.773,62.227,56.450,43.550,10.61,224,0.875,bilinear,-53.357,-41.880,+80 -tv_resnext50_32x4d,37.740,62.260,54.120,45.880,25.03,224,0.875,bilinear,-55.170,-44.600,-13 -resnet26t,37.690,62.310,55.260,44.740,16.01,256,0.940,bicubic,-54.980,-43.320,+6 -ecaresnet26t,37.647,62.353,54.347,45.653,16.01,320,0.950,bicubic,-56.313,-44.573,-128 -hardcorenas_d,37.533,62.467,54.713,45.287,7.50,224,0.875,bilinear,-55.067,-43.717,+10 -res2next50,37.483,62.517,52.863,47.137,24.67,224,0.875,bilinear,-55.677,-45.787,-37 -resnet34,37.453,62.547,54.303,45.697,21.80,224,0.875,bilinear,-53.747,-43.937,+68 -pit_ti_distilled_224,37.323,62.677,55.133,44.867,5.10,224,0.900,bicubic,-53.577,-43.087,+80 -lambda_resnet26t,37.297,62.703,53.580,46.420,10.96,256,0.940,bicubic,-56.103,-45.150,-59 -hardcorenas_b,37.240,62.760,55.050,44.950,5.18,224,0.875,bilinear,-54.690,-43.350,+40 -mobilenetv3_large_100_miil,37.220,62.780,53.547,46.453,5.48,224,0.875,bilinear,-55.050,-44.693,+25 -eca_halonext26ts,37.183,62.817,53.120,46.880,10.76,256,0.940,bicubic,-56.377,-45.560,-83 -cs3darknet_focus_m,37.140,62.860,53.917,46.083,9.30,288,0.950,bicubic,-55.970,-44.823,-41 -res2net50_48w_2s,37.127,62.873,53.347,46.653,25.29,224,0.875,bilinear,-55.663,-45.133,-12 -lambda_resnet26rpt_256,37.077,62.923,53.840,46.160,10.99,256,0.940,bicubic,-56.353,-45.040,-70 -dla60,37.073,62.927,54.193,45.807,22.04,224,0.875,bilinear,-55.587,-44.437,-6 -rexnet_100,37.063,62.937,54.037,45.963,4.80,224,0.875,bicubic,-55.787,-44.583,-20 -bat_resnext26ts,37.063,62.937,53.753,46.247,10.73,256,0.900,bicubic,-56.037,-44.977,-45 -regnety_016,37.010,62.990,54.080,45.920,11.20,224,0.875,bicubic,-55.990,-44.600,-37 -tf_mixnet_l,36.973,63.027,52.587,47.413,7.33,224,0.875,bicubic,-56.067,-45.953,-43 -botnet26t_256,36.957,63.043,53.083,46.917,12.49,256,0.950,bicubic,-56.473,-45.567,-74 -legacy_seresnet50,36.867,63.133,53.473,46.527,28.09,224,0.875,bilinear,-55.803,-45.177,-14 -halonet26t,36.850,63.150,52.277,47.723,12.48,256,0.950,bicubic,-56.760,-46.363,-100 -tv_densenet121,36.813,63.187,54.030,45.970,7.98,224,0.875,bicubic,-54.587,-44.220,+43 -tf_efficientnet_lite2,36.803,63.197,53.320,46.680,6.09,260,0.890,bicubic,-55.797,-45.230,-11 -mobilenetv2_120d,36.793,63.207,54.050,45.950,5.83,224,0.875,bicubic,-55.817,-44.450,-13 -tf_efficientnet_lite1,36.727,63.273,53.580,46.420,5.42,240,0.882,bicubic,-55.583,-44.910,+6 -eca_botnext26ts_256,36.687,63.313,52.483,47.517,10.59,256,0.950,bicubic,-56.683,-46.217,-75 -regnetx_016,36.677,63.323,53.293,46.707,9.19,224,0.875,bicubic,-55.853,-45.257,-10 -hardcorenas_a,36.673,63.327,54.917,45.083,5.26,224,0.875,bilinear,-54.947,-43.253,+29 -levit_128s,36.633,63.367,53.123,46.877,7.78,224,0.900,bicubic,-54.867,-45.277,+31 -efficientnet_b0,36.597,63.403,53.487,46.513,5.29,224,0.875,bicubic,-55.883,-45.193,-12 -vit_base_patch32_224_sam,36.550,63.450,53.043,46.957,88.22,224,0.900,bicubic,-53.310,-44.557,+72 -xcit_nano_12_p8_224_dist,36.533,63.467,52.883,47.117,3.05,224,1.000,bicubic,-55.887,-45.637,-7 -cs3darknet_m,36.457,63.543,53.230,46.770,9.31,288,0.950,bicubic,-56.803,-45.490,-76 -mobilevitv2_100,36.393,63.607,53.067,46.933,4.90,256,0.888,bicubic,-56.737,-45.693,-65 -tf_efficientnet_em,36.387,63.613,52.833,47.167,6.90,240,0.882,bicubic,-56.783,-45.837,-70 -skresnet18,36.323,63.677,54.187,45.813,11.96,224,0.875,bicubic,-53.847,-43.593,+63 -repvgg_b0,36.283,63.717,54.067,45.933,15.82,224,0.875,bilinear,-55.397,-44.383,+18 -tv_resnet50,36.167,63.833,52.807,47.193,25.56,224,0.875,bilinear,-55.963,-45.613,+3 -xcit_nano_12_p16_384_dist,36.160,63.840,53.247,46.753,3.05,384,1.000,bicubic,-55.970,-45.273,+1 -legacy_seresnet34,36.140,63.860,52.550,47.450,21.96,224,0.875,bilinear,-55.350,-45.650,+21 -coat_tiny,36.120,63.880,51.060,48.940,5.50,224,0.900,bicubic,-57.390,-47.630,-107 -tv_resnet34,36.077,63.923,53.533,46.467,21.80,224,0.875,bilinear,-54.213,-44.447,+55 -deit_tiny_distilled_patch16_224,36.023,63.977,54.237,45.763,5.91,224,0.900,bicubic,-55.057,-44.033,+39 -mobilenetv2_140,36.010,63.990,53.957,46.043,6.11,224,0.875,bicubic,-56.030,-44.293,0 -tf_efficientnet_lite0,35.917,64.083,53.473,46.527,4.65,224,0.875,bicubic,-55.383,-44.617,+23 -seresnext26ts,35.830,64.170,53.933,46.067,10.39,256,0.900,bicubic,-56.990,-44.667,-49 -selecsls42b,35.807,64.193,52.493,47.507,32.46,224,0.875,bicubic,-56.673,-45.947,-28 -xcit_nano_12_p8_384_dist,35.770,64.230,52.297,47.703,3.05,384,1.000,bicubic,-57.500,-46.553,-95 -gluon_resnet34_v1b,35.760,64.240,52.183,47.817,21.80,224,0.875,bicubic,-55.340,-45.997,+32 -dla34,35.640,64.360,52.787,47.213,15.74,224,0.875,bilinear,-55.590,-45.383,+21 -efficientnet_lite0,35.637,64.363,53.637,46.363,4.65,224,0.875,bicubic,-55.623,-44.613,+18 -mixnet_m,35.637,64.363,52.423,47.577,5.01,224,0.875,bicubic,-56.633,-45.927,-19 -ssl_resnet18,35.593,64.407,53.740,46.260,11.69,224,0.875,bilinear,-55.107,-44.290,+36 -mobilenetv3_rw,35.537,64.463,53.707,46.293,5.48,224,0.875,bicubic,-56.013,-44.573,+5 -efficientnet_es_pruned,35.390,64.610,52.847,47.153,5.44,224,0.875,bicubic,-56.320,-45.563,-2 -mobilenetv2_110d,35.307,64.693,52.847,47.153,4.52,224,0.875,bicubic,-56.023,-45.343,+11 -tf_mixnet_m,35.180,64.820,50.983,49.017,5.01,224,0.875,bicubic,-57.030,-47.437,-19 -hrnet_w18_small_v2,35.170,64.830,52.430,47.570,15.60,224,0.875,bilinear,-56.000,-45.900,+17 -resnet18d,35.133,64.867,52.890,47.110,11.71,224,0.875,bicubic,-54.847,-44.940,+42 -xcit_nano_12_p16_224_dist,35.120,64.880,52.543,47.457,3.05,224,1.000,bicubic,-55.050,-45.207,+40 -eca_resnext26ts,35.050,64.950,52.310,47.690,10.30,256,0.900,bicubic,-57.360,-46.310,-35 -convit_tiny,35.047,64.953,51.770,48.230,5.71,224,0.875,bicubic,-55.503,-46.450,+29 -resnext26ts,35.043,64.957,53.420,46.580,10.30,256,0.900,bicubic,-57.177,-44.830,-26 -gcresnext26ts,34.930,65.070,51.673,48.327,10.48,256,0.900,bicubic,-57.530,-46.817,-42 -tinynet_b,34.863,65.137,52.010,47.990,3.73,188,0.875,bicubic,-56.277,-46.050,+13 -ese_vovnet19b_dw,34.833,65.167,52.033,47.967,6.54,224,0.875,bicubic,-57.167,-46.477,-21 -regnety_008,34.810,65.190,51.750,48.250,6.26,224,0.875,bicubic,-57.090,-46.670,-17 -pit_ti_224,34.677,65.323,52.160,47.840,4.85,224,0.900,bicubic,-55.763,-45.850,+26 -mobilenetv3_large_100,34.600,65.400,52.863,47.137,5.48,224,0.875,bicubic,-56.880,-45.457,-7 -crossvit_9_240,34.597,65.403,51.763,48.237,8.55,240,0.875,bicubic,-56.453,-46.547,+14 -seresnext26t_32x4d,34.543,65.457,51.380,48.620,16.81,224,0.875,bicubic,-58.277,-47.180,-73 -seresnext26d_32x4d,34.533,65.467,51.553,48.447,16.81,224,0.875,bicubic,-57.897,-46.987,-48 -mixer_b16_224,34.423,65.577,48.080,51.920,59.88,224,0.875,bicubic,-56.727,-49.320,+4 -resnet26d,34.283,65.717,51.687,48.313,16.01,224,0.875,bicubic,-57.977,-46.763,-39 -tf_efficientnet_es,34.263,65.737,51.347,48.653,5.44,224,0.875,bicubic,-57.857,-47.083,-32 -fbnetc_100,34.253,65.747,51.187,48.813,5.57,224,0.875,bilinear,-56.997,-46.663,-6 -regnety_006,34.147,65.853,51.270,48.730,6.06,224,0.875,bicubic,-57.403,-47.160,-19 -tf_mobilenetv3_large_100,33.940,66.060,51.483,48.517,5.48,224,0.875,bilinear,-57.480,-46.777,-14 -semnasnet_075,33.780,66.220,52.427,47.573,2.91,224,0.875,bicubic,-56.430,-45.543,+19 -mnasnet_100,33.780,66.220,51.177,48.823,4.38,224,0.875,bicubic,-57.430,-46.873,-8 -regnetx_008,33.773,66.227,50.540,49.460,7.26,224,0.875,bicubic,-57.387,-47.840,-5 -lcnet_100,33.750,66.250,52.090,47.910,2.95,224,0.875,bicubic,-55.220,-45.290,+34 -vit_tiny_r_s16_p8_384,33.647,66.353,50.683,49.317,6.36,384,1.000,bicubic,-58.083,-47.747,-31 -mobilevit_s,33.637,66.363,49.280,50.720,5.58,256,0.900,bicubic,-59.523,-49.490,-118 -xcit_nano_12_p8_224,33.580,66.420,50.213,49.787,3.05,224,1.000,bicubic,-57.550,-48.017,-5 -vit_tiny_patch16_384,33.543,66.457,51.077,48.923,5.79,384,1.000,bicubic,-59.897,-47.753,-146 -semnasnet_100,33.523,66.477,50.787,49.213,3.89,224,0.875,bicubic,-58.137,-47.483,-31 -resnet26,33.493,66.507,50.930,49.070,16.00,224,0.875,bicubic,-57.947,-47.330,-25 -spnasnet_100,33.477,66.523,51.270,48.730,4.42,224,0.875,bilinear,-57.123,-46.690,+1 -mixnet_s,33.477,66.523,51.010,48.990,4.13,224,0.875,bicubic,-58.293,-47.290,-39 -crossvit_tiny_240,33.353,66.647,49.893,50.107,7.01,240,0.875,bicubic,-57.187,-48.047,+2 -mobilevitv2_075,33.350,66.650,50.077,49.923,2.87,256,0.888,bicubic,-58.620,-48.223,-46 -vgg19_bn,33.230,66.770,50.803,49.197,143.68,224,0.875,bilinear,-57.760,-47.307,-8 -ghostnet_100,33.207,66.793,51.160,48.840,5.18,224,0.875,bilinear,-57.233,-46.670,+1 -regnetx_006,33.147,66.853,50.253,49.747,6.20,224,0.875,bicubic,-57.623,-47.847,-8 -resnet18,33.070,66.930,51.180,48.820,11.69,224,0.875,bilinear,-55.080,-45.940,+26 -xcit_nano_12_p16_224,32.953,67.047,49.993,50.007,3.05,224,1.000,bicubic,-56.007,-47.407,+20 -legacy_seresnext26_32x4d,32.763,67.237,49.250,50.750,16.79,224,0.875,bicubic,-59.827,-49.160,-84 -edgenext_x_small,32.720,67.280,48.640,51.360,2.34,256,0.900,bicubic,-58.680,-49.520,-33 -hrnet_w18_small,32.667,67.333,50.597,49.403,13.19,224,0.875,bilinear,-57.203,-47.293,+2 -deit_tiny_patch16_224,32.663,67.337,50.270,49.730,5.72,224,0.900,bicubic,-56.957,-47.690,+5 -legacy_seresnet18,32.593,67.407,50.323,49.677,11.78,224,0.875,bicubic,-56.667,-47.367,+8 -mobilenetv2_100,32.523,67.477,50.820,49.180,3.50,224,0.875,bicubic,-57.297,-47.010,+1 -regnetx_004,32.510,67.490,49.337,50.663,5.16,224,0.875,bicubic,-56.960,-48.433,+3 +eva_giant_patch14_336.clip_ft_in1k,90.553,9.447,97.230,2.770,"1,013.01",336,1.000,bicubic,-7.307,-2.650,+3 +eva_giant_patch14_224.clip_ft_in1k,90.227,9.773,97.173,2.827,"1,012.56",224,1.000,bicubic,-7.453,-2.577,+12 +eva_giant_patch14_336.m30m_ft_in22k_in1k,88.583,11.417,95.930,4.070,"1,013.01",336,1.000,bicubic,-9.417,-3.970,-2 +eva_giant_patch14_560.m30m_ft_in22k_in1k,88.410,11.590,95.613,4.387,"1,014.45",560,1.000,bicubic,-9.580,-4.247,-2 +vit_large_patch14_clip_336.openai_ft_in12k_in1k,83.910,16.090,93.880,6.120,304.53,336,1.000,bicubic,-13.700,-5.850,+14 +vit_large_patch14_clip_336.laion2b_ft_in1k,83.587,16.413,93.507,6.493,304.53,336,1.000,bicubic,-13.643,-6.213,+39 +eva_large_patch14_336.in22k_ft_in1k,83.523,16.477,93.100,6.900,304.53,336,1.000,bicubic,-14.287,-6.690,-2 +vit_huge_patch14_clip_224.laion2b_ft_in1k,83.280,16.720,93.103,6.897,632.05,224,1.000,bicubic,-13.820,-6.537,+48 +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,83.063,16.937,92.850,7.150,632.46,336,1.000,bicubic,-14.537,-6.930,+11 +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,82.810,17.190,92.620,7.380,632.05,224,1.000,bicubic,-14.550,-7.180,+21 +vit_large_patch14_clip_224.openai_ft_in1k,82.317,17.683,92.913,7.087,304.20,224,1.000,bicubic,-15.143,-6.767,+15 +vit_large_patch14_clip_224.laion2b_ft_in1k,81.703,18.297,92.270,7.730,304.20,224,1.000,bicubic,-15.317,-7.410,+53 +eva_large_patch14_196.in22k_ft_in1k,81.290,18.710,91.550,8.450,304.14,196,1.000,bicubic,-16.230,-8.240,+11 +eva_large_patch14_336.in22k_ft_in22k_in1k,80.077,19.923,89.363,10.637,304.53,336,1.000,bicubic,-17.783,-10.427,-11 +ig_resnext101_32x48d,79.650,20.350,89.393,10.607,828.41,224,0.875,bilinear,-17.320,-10.277,+52 +ig_resnext101_32x32d,79.457,20.543,89.183,10.817,468.53,224,0.875,bilinear,-17.323,-10.427,+76 +ig_resnext101_32x16d,78.837,21.163,88.480,11.520,194.03,224,0.875,bilinear,-17.603,-11.060,+118 +vit_large_patch14_clip_224.openai_ft_in12k_in1k,78.687,21.313,88.920,11.080,304.20,224,1.000,bicubic,-18.923,-10.810,0 +eva_large_patch14_196.in22k_ft_in22k_in1k,78.503,21.497,88.320,11.680,304.14,196,1.000,bicubic,-19.107,-11.490,-2 +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,78.427,21.573,88.503,11.497,304.53,336,1.000,bicubic,-19.023,-11.277,+8 +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,78.263,21.737,88.673,11.327,304.20,224,1.000,bicubic,-19.127,-11.067,+9 +tf_efficientnet_l2.ns_jft_in1k_475,76.480,23.520,88.653,11.347,480.31,475,0.936,bicubic,-21.270,-11.137,-11 +beitv2_large_patch16_224.in1k_ft_in22k_in1k,76.370,23.630,87.093,12.907,304.43,224,0.950,bicubic,-21.380,-12.727,-11 +swsl_resnext101_32x16d,76.303,23.697,87.733,12.267,194.03,224,0.875,bilinear,-19.967,-11.767,+138 +ig_resnext101_32x8d,75.813,24.187,86.200,13.800,88.79,224,0.875,bilinear,-20.117,-13.080,+191 +swsl_resnext101_32x8d,75.590,24.410,86.937,13.063,88.79,224,0.875,bilinear,-20.650,-12.533,+139 +tf_efficientnet_l2.ns_jft_in1k,74.650,25.350,87.543,12.457,480.31,800,0.960,bicubic,-23.130,-12.277,-20 +beit_large_patch16_384.in22k_ft_in22k_in1k,73.277,26.723,85.017,14.983,305.00,384,1.000,bicubic,-24.533,-14.823,-22 +beit_large_patch16_512.in22k_ft_in22k_in1k,73.163,26.837,85.080,14.920,305.67,512,1.000,bicubic,-24.617,-14.810,-21 +swsl_resnext101_32x4d,72.660,27.340,85.157,14.843,44.18,224,0.875,bilinear,-23.390,-14.373,+166 +maxvit_xlarge_tf_512.in21k_ft_in1k,71.893,28.107,82.920,17.080,475.77,512,1.000,bicubic,-25.867,-16.900,-21 +maxvit_xlarge_tf_384.in21k_ft_in1k,71.697,28.303,82.727,17.273,475.32,384,1.000,bicubic,-26.043,-17.123,-19 +beit_large_patch16_224.in22k_ft_in22k_in1k,71.043,28.957,83.420,16.580,304.43,224,0.900,bicubic,-26.437,-16.270,-8 +deit3_huge_patch14_224_in21ft1k,70.810,29.190,82.197,17.803,632.13,224,1.000,bicubic,-26.440,-17.513,+6 +vit_base_patch16_clip_384.laion2b_ft_in1k,70.793,29.207,83.810,16.190,86.86,384,1.000,bicubic,-26.117,-15.860,+41 +deit3_large_patch16_384_in21ft1k,70.570,29.430,82.437,17.563,304.76,384,1.000,bicubic,-26.990,-17.323,-13 +maxvit_base_tf_512.in21k_ft_in1k,70.397,29.603,81.597,18.403,119.88,512,1.000,bicubic,-27.363,-18.263,-28 +maxvit_large_tf_512.in21k_ft_in1k,70.390,29.610,81.647,18.353,212.33,512,1.000,bicubic,-27.280,-18.083,-22 +maxvit_large_tf_384.in21k_ft_in1k,70.030,29.970,81.033,18.967,212.03,384,1.000,bicubic,-27.640,-18.787,-24 +deit3_large_patch16_224_in21ft1k,69.717,30.283,81.190,18.810,304.37,224,1.000,bicubic,-27.593,-18.490,-4 +maxvit_base_tf_384.in21k_ft_in1k,69.557,30.443,80.733,19.267,119.65,384,1.000,bicubic,-28.003,-18.977,-19 +swsl_resnext50_32x4d,68.977,31.023,82.810,17.190,25.03,224,0.875,bilinear,-26.643,-16.410,+216 +vit_base_patch16_clip_224.laion2b_ft_in1k,68.747,31.253,82.500,17.500,86.57,224,1.000,bicubic,-27.573,-16.910,+109 +swsl_resnet50,68.297,31.703,83.313,16.687,25.56,224,0.875,bilinear,-26.903,-16.017,+279 +convnext_xlarge.fb_in22k_ft_in1k_384,68.160,31.840,80.467,19.533,350.20,384,1.000,bicubic,-29.430,-19.303,-24 +swinv2_large_window12to24_192to384_22kft1k,67.670,32.330,80.100,19.900,196.74,384,1.000,bicubic,-29.620,-19.680,-9 +tf_efficientnet_b7.ns_jft_in1k,67.510,32.490,81.383,18.617,66.35,600,0.949,bicubic,-29.690,-18.317,+2 +vit_base_patch16_clip_384.openai_ft_in1k,67.357,32.643,81.690,18.310,86.86,384,1.000,bicubic,-29.463,-17.970,+36 +vit_large_patch16_384.augreg_in21k_ft_in1k,67.053,32.947,78.707,21.293,304.72,384,1.000,bicubic,-30.367,-21.073,-20 +convnext_large.fb_in22k_ft_in1k_384,66.667,33.333,79.807,20.193,197.77,384,1.000,bicubic,-30.643,-19.953,-15 +swin_large_patch4_window12_384,66.283,33.717,79.783,20.217,196.74,384,1.000,bicubic,-30.887,-19.897,+1 +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,66.160,33.840,78.890,21.110,86.86,384,1.000,bicubic,-31.060,-20.810,-4 +vit_base_patch16_clip_224.openai_ft_in1k,66.023,33.977,80.990,19.010,86.57,224,0.900,bicubic,-30.287,-18.510,+102 +beitv2_base_patch16_224.in1k_ft_in22k_in1k,65.757,34.243,78.890,21.110,86.53,224,0.900,bicubic,-31.153,-20.840,+21 +swinv2_base_window12to24_192to384_22kft1k,65.743,34.257,79.313,20.687,87.92,384,1.000,bicubic,-31.517,-20.477,-16 +swinv2_large_window12to16_192to256_22kft1k,65.627,34.373,78.460,21.540,196.74,256,0.900,bicubic,-31.613,-21.250,-13 +tf_efficientnet_b6.ns_jft_in1k,65.587,34.413,79.553,20.447,43.04,528,0.942,bicubic,-31.433,-20.157,+5 +convnext_xlarge.fb_in22k_ft_in1k,65.390,34.610,78.340,21.660,350.20,288,1.000,bicubic,-32.060,-21.480,-31 +vit_base_patch16_clip_384.openai_ft_in12k_in1k,65.357,34.643,78.943,21.057,86.86,384,0.950,bicubic,-31.783,-20.697,-6 +convnext_base.fb_in22k_ft_in1k_384,64.883,35.117,78.393,21.607,88.59,384,1.000,bicubic,-32.367,-21.327,-19 +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,64.767,35.233,77.787,22.213,86.57,224,0.950,bicubic,-31.833,-21.773,+52 +vit_large_patch16_224.augreg_in21k_ft_in1k,64.347,35.653,76.190,23.810,304.33,224,0.900,bicubic,-32.363,-23.450,+37 +convnext_large.fb_in22k_ft_in1k,64.270,35.730,77.787,22.213,197.77,288,1.000,bicubic,-32.950,-21.943,-16 +vit_large_r50_s32_384.augreg_in21k_ft_in1k,64.100,35.900,75.850,24.150,329.09,384,1.000,bicubic,-32.850,-23.810,+5 +swin_large_patch4_window7_224,63.870,36.130,78.180,21.820,196.53,224,0.900,bicubic,-33.080,-21.530,+5 +beit_base_patch16_384.in22k_ft_in22k_in1k,63.617,36.383,78.107,21.893,86.74,384,1.000,bicubic,-33.713,-21.613,-34 +swin_base_patch4_window12_384,63.470,36.530,78.063,21.937,87.90,384,1.000,bicubic,-33.650,-21.507,-13 +swinv2_base_window12to16_192to256_22kft1k,63.180,36.820,77.117,22.883,87.92,256,0.900,bicubic,-33.880,-22.543,-8 +tf_efficientnet_b5.ns_jft_in1k,63.047,36.953,77.777,22.223,30.39,456,0.934,bicubic,-33.823,-21.883,+11 +vit_base_patch16_clip_224.openai_ft_in12k_in1k,62.947,37.053,76.610,23.390,86.57,224,0.950,bicubic,-33.563,-23.010,+55 +deit3_base_patch16_384_in21ft1k,62.640,37.360,75.553,24.447,86.88,384,1.000,bicubic,-34.600,-24.187,-27 +convnext_base.fb_in22k_ft_in1k,62.520,37.480,76.563,23.437,88.59,288,1.000,bicubic,-34.700,-23.197,-26 +vit_base_patch8_224.augreg2_in21k_ft_in1k,62.407,37.593,76.607,23.393,86.58,224,0.900,bicubic,-34.533,-22.973,-2 +tf_efficientnetv2_l.in21k_ft_in1k,62.367,37.633,76.743,23.257,118.52,480,1.000,bicubic,-34.953,-22.897,-40 +vit_base_patch8_224.augreg_in21k_ft_in1k,62.190,37.810,75.610,24.390,86.58,224,0.900,bicubic,-34.890,-24.010,-17 +tf_efficientnetv2_xl.in21k_ft_in1k,62.090,37.910,75.650,24.350,208.12,512,1.000,bicubic,-35.240,-23.950,-43 +deit3_base_patch16_224_in21ft1k,61.780,38.220,74.713,25.287,86.59,224,1.000,bicubic,-35.090,-24.927,+4 +tf_efficientnet_b4.ns_jft_in1k,61.230,38.770,76.173,23.827,19.34,380,0.922,bicubic,-35.480,-23.477,+22 +maxvit_base_tf_512.in1k,61.113,38.887,74.057,25.943,119.88,512,1.000,bicubic,-36.067,-25.583,-29 +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,60.373,39.627,73.810,26.190,88.30,384,1.000,bicubic,-36.237,-25.670,+30 +beit_base_patch16_224.in22k_ft_in22k_in1k,60.317,39.683,75.597,24.403,86.53,224,0.900,bicubic,-36.343,-24.063,+26 +tf_efficientnetv2_m.in21k_ft_in1k,60.280,39.720,75.070,24.930,54.14,480,1.000,bicubic,-36.720,-24.560,-16 +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,60.240,39.760,73.540,26.460,88.34,448,1.000,bicubic,-36.330,-25.980,+34 +vit_base_patch16_384.augreg_in21k_ft_in1k,60.180,39.820,73.843,26.157,86.86,384,1.000,bicubic,-36.840,-25.867,-21 +convnext_small.fb_in22k_ft_in1k_384,59.947,40.053,74.470,25.530,50.22,384,1.000,bicubic,-37.153,-25.230,-28 +maxvit_large_tf_512.in1k,59.877,40.123,72.847,27.153,212.33,512,1.000,bicubic,-37.173,-26.743,-25 +swin_base_patch4_window7_224,59.537,40.463,74.247,25.753,87.77,224,0.900,bicubic,-37.143,-25.413,+18 +vit_base_patch32_clip_224.laion2b_ft_in1k,59.170,40.830,73.897,26.103,88.22,224,0.900,bicubic,-35.570,-25.173,+311 +maxvit_base_tf_384.in1k,59.110,40.890,71.700,28.300,119.65,384,1.000,bicubic,-38.010,-28.080,-34 +vit_base_patch16_224.augreg2_in21k_ft_in1k,59.047,40.953,73.640,26.360,86.57,224,0.900,bicubic,-37.463,-25.910,+34 +volo_d5_512,58.917,41.083,73.200,26.800,296.09,512,1.150,bicubic,-38.373,-26.560,-53 +volo_d5_448,58.793,41.207,73.057,26.943,295.91,448,1.150,bicubic,-38.447,-26.613,-50 +vit_large_r50_s32_224.augreg_in21k_ft_in1k,58.633,41.367,71.720,28.280,328.99,224,0.900,bicubic,-37.547,-27.790,+83 +vit_base_patch32_clip_384.openai_ft_in12k_in1k,58.603,41.397,73.140,26.860,88.30,384,0.950,bicubic,-37.817,-26.360,+44 +maxvit_large_tf_384.in1k,58.453,41.547,71.167,28.833,212.03,384,1.000,bicubic,-38.487,-28.473,-23 +deit3_large_patch16_384,58.360,41.640,72.970,27.030,304.76,384,1.000,bicubic,-38.490,-26.650,-14 +deit3_huge_patch14_224,58.107,41.893,72.130,27.870,632.13,224,0.900,bicubic,-38.463,-27.390,+19 +tf_efficientnet_b8.ap_in1k,57.830,42.170,72.957,27.043,87.41,672,0.954,bicubic,-38.720,-26.613,+21 +convnext_small.fb_in22k_ft_in1k,57.743,42.257,72.790,27.210,50.22,288,1.000,bicubic,-39.067,-26.880,-12 +mvitv2_large,57.487,42.513,70.773,29.227,217.99,224,0.900,bicubic,-38.923,-28.677,+39 +cait_m48_448,57.470,42.530,71.860,28.140,356.46,448,1.000,bicubic,-39.410,-27.810,-23 +cait_m36_384,57.467,42.533,72.313,27.687,271.22,384,1.000,bicubic,-39.363,-27.347,-19 +tf_efficientnet_b3.ns_jft_in1k,57.417,42.583,72.387,27.613,12.23,300,0.904,bicubic,-38.683,-27.093,+83 +volo_d4_448,57.283,42.717,71.537,28.463,193.41,448,1.150,bicubic,-39.787,-28.213,-45 +maxvit_small_tf_512.in1k,57.093,42.907,70.967,29.033,69.13,512,1.000,bicubic,-40.087,-28.653,-54 +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,57.057,42.943,71.290,28.710,88.22,224,0.900,bicubic,-38.183,-28.030,+211 +vit_base_patch16_224.augreg_in21k_ft_in1k,56.823,43.177,70.633,29.367,86.57,224,0.900,bicubic,-39.477,-28.927,+51 +deit3_medium_patch16_224_in21ft1k,56.650,43.350,69.737,30.263,38.85,224,1.000,bicubic,-39.490,-29.753,+72 +volo_d5_224,56.480,43.520,70.643,29.357,295.46,224,0.960,bicubic,-40.400,-28.977,-32 +deit3_large_patch16_224,56.463,43.537,70.457,29.543,304.37,224,0.900,bicubic,-39.727,-29.073,+65 +xcit_large_24_p8_384_dist,56.353,43.647,71.323,28.677,188.93,384,1.000,bicubic,-40.407,-28.237,-17 +flexivit_large.1200ep_in1k,56.293,43.707,71.560,28.440,304.36,240,0.950,bicubic,-40.487,-27.970,-22 +flexivit_large.600ep_in1k,56.047,43.953,71.173,28.827,304.36,240,0.950,bicubic,-40.693,-28.427,-17 +xcit_large_24_p8_224_dist,56.027,43.973,70.663,29.337,188.93,224,1.000,bicubic,-40.613,-28.797,-6 +vit_base_patch32_clip_224.openai_ft_in1k,55.907,44.093,72.173,27.827,88.22,224,0.900,bicubic,-38.533,-26.837,+332 +vit_medium_patch16_gap_384.in12k_ft_in1k,55.787,44.213,70.993,29.007,39.03,384,0.950,bicubic,-40.723,-28.597,+6 +flexivit_large.300ep_in1k,55.700,44.300,70.713,29.287,304.36,240,0.950,bicubic,-40.990,-28.867,-13 +xcit_large_24_p16_384_dist,54.907,45.093,69.867,30.133,189.10,384,1.000,bicubic,-42.033,-29.653,-45 +volo_d4_224,54.747,45.253,68.867,31.133,192.96,224,0.960,bicubic,-42.033,-30.753,-31 +maxvit_tiny_tf_512.in1k,54.723,45.277,68.933,31.067,31.05,512,1.000,bicubic,-42.247,-30.737,-52 +deit3_small_patch16_384_in21ft1k,54.470,45.530,68.313,31.687,22.21,384,1.000,bicubic,-42.200,-31.327,-15 +efficientnet_b5.in12k_ft_in1k,54.447,45.553,69.857,30.143,30.39,448,1.000,bicubic,-42.323,-29.743,-29 +vit_base_r50_s16_384.orig_in21k_ft_in1k,54.403,45.597,69.560,30.440,98.95,384,1.000,bicubic,-42.047,-30.060,+9 +maxvit_small_tf_384.in1k,54.343,45.657,68.193,31.807,69.02,384,1.000,bicubic,-42.397,-31.357,-29 +resnetv2_152x4_bitm,54.320,45.680,70.167,29.833,936.53,480,1.000,bilinear,-42.550,-29.453,-46 +xcit_large_24_p16_224_dist,54.260,45.740,68.970,31.030,189.10,224,1.000,bicubic,-42.060,-30.570,+27 +vit_small_r26_s32_384.augreg_in21k_ft_in1k,54.197,45.803,68.757,31.243,36.47,384,1.000,bicubic,-41.863,-30.623,+66 +volo_d3_448,53.990,46.010,68.020,31.980,86.63,448,1.000,bicubic,-43.030,-31.650,-64 +tf_efficientnet_b5.ap_in1k,53.870,46.130,69.160,30.840,30.39,456,0.934,bicubic,-42.210,-30.380,+58 +xcit_medium_24_p8_224_dist,53.660,46.340,68.410,31.590,84.32,224,1.000,bicubic,-42.860,-31.100,-9 +tf_efficientnet_b2.ns_jft_in1k,53.600,46.400,70.270,29.730,9.11,260,0.890,bicubic,-41.920,-29.070,+142 +tf_efficientnet_b6.ap_in1k,53.560,46.440,68.550,31.450,43.04,528,0.942,bicubic,-42.810,-31.000,+11 +cait_s36_384,53.550,46.450,68.000,32.000,68.37,384,1.000,bicubic,-43.080,-31.600,-24 +vit_medium_patch16_gap_256.in12k_ft_in1k,53.537,46.463,69.067,30.933,38.86,256,0.950,bicubic,-42.443,-30.383,+72 +deit3_base_patch16_384,53.513,46.487,67.637,32.363,86.88,384,1.000,bicubic,-42.717,-31.763,+33 +deit3_base_patch16_224,53.453,46.547,67.590,32.410,86.59,224,0.900,bicubic,-42.327,-31.680,+101 +tf_efficientnet_b8.ra_in1k,53.410,46.590,69.090,30.910,87.41,672,0.954,bicubic,-43.290,-30.510,-34 +xcit_medium_24_p8_384_dist,53.407,46.593,68.143,31.857,84.32,384,1.000,bicubic,-43.373,-31.527,-47 +vit_base_patch32_384.augreg_in21k_ft_in1k,53.307,46.693,68.047,31.953,88.30,384,1.000,bicubic,-42.593,-31.303,+79 +tf_efficientnet_b7.ap_in1k,53.260,46.740,68.873,31.127,66.35,600,0.949,bicubic,-43.090,-30.497,+5 +convnext_large.fb_in1k,53.227,46.773,67.877,32.123,197.77,288,1.000,bicubic,-43.173,-31.663,0 +xcit_medium_24_p16_384_dist,53.213,46.787,68.050,31.950,84.40,384,1.000,bicubic,-43.487,-31.480,-40 +maxvit_base_tf_224.in1k,53.210,46.790,66.147,33.853,119.47,224,0.950,bicubic,-43.140,-33.443,+4 +tf_efficientnetv2_l.in1k,53.163,46.837,67.833,32.167,118.52,480,1.000,bicubic,-43.577,-31.717,-47 +tf_efficientnetv2_s.in21k_ft_in1k,53.150,46.850,69.000,31.000,21.46,384,1.000,bicubic,-43.320,-30.570,-17 +tf_efficientnet_b4.ap_in1k,53.090,46.910,68.210,31.790,19.34,380,0.922,bicubic,-42.400,-31.030,+129 +regnetz_e8,53.017,46.983,67.140,32.860,57.70,320,1.000,bicubic,-43.583,-32.420,-36 +maxvit_large_tf_224.in1k,52.983,47.017,65.343,34.657,211.79,224,0.950,bicubic,-43.337,-34.157,+6 +dm_nfnet_f5,52.870,47.130,67.430,32.570,377.21,544,0.954,bicubic,-43.940,-32.080,-63 +volo_d3_224,52.703,47.297,66.317,33.683,86.33,224,0.960,bicubic,-43.747,-33.303,-16 +deit3_small_patch16_224_in21ft1k,52.690,47.310,66.877,33.123,22.06,224,1.000,bicubic,-43.130,-32.433,+79 +maxvit_tiny_tf_384.in1k,52.463,47.537,66.780,33.220,30.98,384,1.000,bicubic,-44.137,-32.830,-40 +dm_nfnet_f6,52.447,47.553,67.120,32.880,438.36,576,0.956,bicubic,-44.473,-32.600,-79 +tf_efficientnet_b7.ra_in1k,52.393,47.607,68.233,31.767,66.35,600,0.949,bicubic,-44.187,-31.277,-40 +xcit_small_24_p8_384_dist,52.360,47.640,66.840,33.160,47.63,384,1.000,bicubic,-44.460,-32.790,-70 +swsl_resnet18,52.327,47.673,70.480,29.520,11.69,224,0.875,bilinear,-38.763,-27.730,+572 +efficientnetv2_rw_m.agc_in1k,52.323,47.677,67.210,32.790,53.24,416,1.000,bicubic,-43.947,-32.420,+4 +deit_base_distilled_patch16_384,52.257,47.743,67.733,32.267,87.63,384,1.000,bicubic,-44.253,-31.827,-35 +xcit_medium_24_p16_224_dist,52.210,47.790,66.900,33.100,84.40,224,1.000,bicubic,-44.050,-32.510,+4 +xcit_small_24_p8_224_dist,52.197,47.803,66.767,33.233,47.63,224,1.000,bicubic,-44.353,-32.773,-42 +convnext_tiny.fb_in22k_ft_in1k_384,52.163,47.837,66.917,33.083,28.59,384,1.000,bicubic,-44.007,-32.583,+18 +dm_nfnet_f3,52.130,47.870,66.743,33.257,254.92,416,0.940,bicubic,-44.600,-32.887,-64 +resnetv2_152x2_bit_teacher_384,51.937,48.063,68.670,31.330,236.34,384,1.000,bicubic,-44.253,-30.830,+11 +resmlp_big_24_224_in22ft1k,51.903,48.097,68.463,31.537,129.14,224,0.875,bicubic,-44.447,-31.057,-18 +xcit_small_24_p16_384_dist,51.883,48.117,66.353,33.647,47.67,384,1.000,bicubic,-44.457,-33.197,-17 +cait_s24_384,51.783,48.217,66.313,33.687,47.06,384,1.000,bicubic,-44.787,-33.237,-51 +resnetv2_152x2_bitm,51.757,48.243,69.250,30.750,236.34,448,1.000,bilinear,-44.763,-30.340,-47 +ecaresnet269d,51.670,48.330,66.047,33.953,102.09,352,1.000,bicubic,-44.790,-33.493,-38 +mvitv2_base,51.567,48.433,65.623,34.377,51.47,224,0.900,bicubic,-44.443,-33.897,+35 +vit_base_patch16_224_miil.in21k_ft_in1k,51.557,48.443,65.207,34.793,86.54,224,0.875,bilinear,-44.473,-34.183,+30 +tf_efficientnetv2_m.in1k,51.437,48.563,66.630,33.370,54.14,480,1.000,bicubic,-45.043,-32.980,-45 +maxvit_rmlp_small_rw_224,51.423,48.577,65.180,34.820,64.90,224,0.900,bicubic,-44.537,-34.240,+41 +maxvit_small_tf_224.in1k,51.180,48.820,65.277,34.723,68.93,224,0.950,bicubic,-45.030,-34.203,-2 +pit_b_distilled_224,51.153,48.847,66.770,33.230,74.79,224,0.900,bicubic,-44.917,-32.650,+17 +xcit_small_12_p8_384_dist,51.100,48.900,65.833,34.167,26.21,384,1.000,bicubic,-45.380,-33.657,-48 +convnext_base.fb_in1k,51.073,48.927,65.883,34.117,88.59,288,1.000,bicubic,-45.237,-33.667,-19 +dm_nfnet_f4,50.900,49.100,65.557,34.443,316.07,512,0.951,bicubic,-45.880,-34.053,-88 +tf_efficientnet_b1.ns_jft_in1k,50.883,49.117,67.910,32.090,7.79,240,0.882,bicubic,-43.977,-31.340,+208 +vit_base_patch16_384.orig_in21k_ft_in1k,50.883,49.117,65.270,34.730,86.86,384,1.000,bicubic,-45.307,-34.030,-6 +volo_d2_384,50.873,49.127,65.637,34.363,58.87,384,1.000,bicubic,-45.837,-33.963,-79 +xcit_small_24_p16_224_dist,50.733,49.267,65.010,34.990,47.67,224,1.000,bicubic,-45.067,-34.330,+53 +flexivit_base.1200ep_in1k,50.693,49.307,65.117,34.883,86.59,240,0.950,bicubic,-45.417,-34.343,+3 +coatnet_rmlp_2_rw_224,50.563,49.437,63.370,36.630,73.88,224,0.950,bicubic,-45.637,-35.910,-11 +xcit_small_12_p16_384_dist,50.520,49.480,65.313,34.687,26.25,384,1.000,bicubic,-45.820,-34.267,-34 +efficientnet_b4.ra2_in1k,50.510,49.490,65.703,34.297,19.34,384,1.000,bicubic,-45.010,-33.687,+87 +volo_d1_384,50.477,49.523,64.917,35.083,26.78,384,1.000,bicubic,-45.993,-34.633,-57 +xcit_small_12_p8_224_dist,50.440,49.560,65.433,34.567,26.21,224,1.000,bicubic,-45.520,-33.917,+24 +resnetv2_101x3_bitm,50.407,49.593,67.790,32.210,387.93,448,1.000,bilinear,-45.843,-31.800,-24 +flexivit_base.600ep_in1k,50.357,49.643,64.613,35.387,86.59,240,0.950,bicubic,-45.603,-34.807,+21 +regnetz_040h,50.330,49.670,65.633,34.367,28.94,320,1.000,bicubic,-46.000,-33.887,-39 +ssl_resnext101_32x16d,50.257,49.743,66.033,33.967,194.03,224,0.875,bilinear,-45.153,-33.127,+98 +mvitv2_small,50.250,49.750,64.910,35.090,34.87,224,0.900,bicubic,-45.640,-34.450,+29 +cait_s24_224,50.243,49.757,65.027,34.973,46.92,224,1.000,bicubic,-45.407,-34.363,+63 +eca_nfnet_l2,50.237,49.763,65.450,34.550,56.72,384,1.000,bicubic,-46.213,-34.210,-61 +tresnet_v2_l,50.167,49.833,65.093,34.907,46.17,224,0.875,bilinear,-45.653,-34.197,+35 +pvt_v2_b5,50.167,49.833,65.037,34.963,81.96,224,0.900,bicubic,-45.783,-34.353,+18 +deit3_medium_patch16_224,50.167,49.833,64.697,35.303,38.85,224,0.900,bicubic,-45.223,-34.733,+103 +vit_small_patch16_384.augreg_in21k_ft_in1k,50.160,49.840,65.807,34.193,22.20,384,1.000,bicubic,-45.820,-33.623,+7 +resnest269e,50.153,49.847,64.670,35.330,110.93,416,0.928,bicubic,-45.967,-34.850,-17 +deit_base_distilled_patch16_224,50.063,49.937,66.227,33.773,87.34,224,0.900,bicubic,-45.687,-33.203,+41 +pvt_v2_b4,50.063,49.937,65.150,34.850,62.56,224,0.900,bicubic,-45.837,-34.290,+18 +tf_efficientnet_b3.ap_in1k,50.057,49.943,65.210,34.790,12.23,300,0.904,bicubic,-44.913,-33.900,+164 +flexivit_base.300ep_in1k,50.013,49.987,64.113,35.887,86.59,240,0.950,bicubic,-45.957,-35.257,+6 +resnest200e,49.873,50.127,64.743,35.257,70.20,320,0.909,bicubic,-46.197,-34.637,-15 +efficientformer_l7,49.837,50.163,66.020,33.980,82.23,224,0.950,bicubic,-45.763,-33.370,+56 +volo_d2_224,49.813,50.187,64.593,35.407,58.68,224,0.960,bicubic,-46.607,-34.867,-69 +seresnextaa101d_32x8d,49.760,50.240,64.410,35.590,93.59,288,1.000,bicubic,-46.660,-35.110,-71 +xception65,49.760,50.240,63.520,36.480,39.92,299,0.940,bicubic,-45.930,-35.790,+45 +swinv2_base_window16_256,49.680,50.320,63.813,36.187,87.92,256,0.900,bicubic,-46.500,-35.717,-31 +pvt_v2_b3,49.580,50.420,64.787,35.213,45.24,224,0.900,bicubic,-45.890,-34.533,+71 +cait_xs24_384,49.527,50.473,64.900,35.100,26.67,384,1.000,bicubic,-46.483,-34.430,-8 +maxvit_rmlp_tiny_rw_256,49.523,50.477,63.817,36.183,29.15,256,0.950,bicubic,-46.517,-35.593,-15 +tf_efficientnet_b5.ra_in1k,49.510,50.490,65.657,34.343,30.39,456,0.934,bicubic,-46.470,-33.843,-6 +resnetv2_152x2_bit_teacher,49.480,50.520,65.617,34.383,236.34,224,0.875,bicubic,-46.270,-33.823,+26 +resnet200d,49.470,50.530,64.330,35.670,64.69,320,1.000,bicubic,-46.640,-35.070,-31 +xcit_small_12_p16_224_dist,49.417,50.583,63.850,36.150,26.25,224,1.000,bicubic,-46.323,-35.310,+26 +resnest101e,49.367,50.633,65.587,34.413,48.28,256,0.875,bilinear,-46.203,-33.683,+49 +regnetz_040,49.280,50.720,64.067,35.933,27.12,320,1.000,bicubic,-46.900,-35.333,-41 +resnet152d,49.253,50.747,64.413,35.587,60.21,320,1.000,bicubic,-46.617,-35.017,+6 +vit_base_patch32_224.augreg_in21k_ft_in1k,49.253,50.747,64.340,35.660,88.22,224,0.900,bicubic,-45.137,-34.720,+235 +seresnet152d,49.247,50.753,64.170,35.830,66.84,320,1.000,bicubic,-47.063,-35.340,-65 +xcit_large_24_p8_224,49.247,50.753,62.850,37.150,188.93,224,1.000,bicubic,-46.833,-36.300,-34 +maxxvit_rmlp_small_rw_256,49.147,50.853,63.343,36.657,66.01,256,0.950,bicubic,-47.063,-36.117,-53 +gcvit_base,49.143,50.857,63.963,36.037,90.32,224,0.875,bicubic,-46.917,-35.597,-29 +resmlp_big_24_distilled_224,49.097,50.903,65.470,34.530,129.14,224,0.875,bicubic,-46.773,-33.970,-1 +convnext_small.fb_in1k,49.077,50.923,64.820,35.180,50.22,288,1.000,bicubic,-46.903,-34.770,-18 +ssl_resnext101_32x8d,49.067,50.933,65.480,34.520,88.79,224,0.875,bilinear,-46.273,-33.860,+78 +volo_d1_224,48.967,51.033,63.187,36.813,26.63,224,0.960,bicubic,-47.063,-36.163,-29 +repvgg_b3,48.917,51.083,64.887,35.113,123.09,224,0.875,bilinear,-45.633,-34.023,+208 +resnetrs420,48.857,51.143,63.427,36.573,191.89,416,1.000,bicubic,-47.543,-36.103,-90 +maxvit_tiny_tf_224.in1k,48.810,51.190,62.947,37.053,30.92,224,0.950,bicubic,-47.000,-36.313,+2 +deit3_small_patch16_384,48.680,51.320,62.833,37.167,22.21,384,1.000,bicubic,-46.920,-36.607,+30 +seresnext101d_32x8d,48.610,51.390,62.963,37.037,93.59,288,1.000,bicubic,-47.750,-36.507,-89 +efficientnetv2_rw_s.ra2_in1k,48.603,51.397,63.840,36.160,23.94,384,1.000,bicubic,-47.107,-35.560,+16 +regnetz_d32,48.590,51.410,65.187,34.813,27.58,320,0.950,bicubic,-47.270,-34.243,-7 +swinv2_small_window16_256,48.583,51.417,62.767,37.233,49.73,256,0.900,bicubic,-47.487,-36.573,-44 +efficientnet_b3.ra2_in1k,48.563,51.437,64.250,35.750,12.23,320,1.000,bicubic,-46.577,-34.960,+97 +ecaresnet101d,48.527,51.473,64.100,35.900,44.57,224,0.875,bicubic,-46.633,-35.200,+92 +edgenext_base,48.433,51.567,64.317,35.683,18.51,320,1.000,bicubic,-47.357,-35.253,-4 +dm_nfnet_f2,48.373,51.627,63.233,36.767,193.78,352,0.920,bicubic,-48.087,-36.377,-109 +vit_small_r26_s32_224.augreg_in21k_ft_in1k,48.363,51.637,63.797,36.203,36.43,224,0.900,bicubic,-46.767,-35.323,+95 +swinv2_base_window8_256,48.340,51.660,63.610,36.390,87.92,256,0.900,bicubic,-47.730,-35.870,-52 +repvgg_b3g4,48.310,51.690,64.800,35.200,83.83,224,0.875,bilinear,-46.180,-34.220,+201 +vit_large_patch32_384.orig_in21k_ft_in1k,48.250,51.750,61.830,38.170,306.63,384,1.000,bicubic,-46.990,-37.410,+72 +convit_base,48.217,51.783,63.000,37.000,86.54,224,0.875,bicubic,-46.883,-36.140,+98 +swin_s3_base_224,48.147,51.853,62.250,37.750,71.13,224,0.900,bicubic,-47.893,-37.100,-48 +sequencer2d_l,48.100,51.900,62.350,37.650,54.30,224,0.875,bicubic,-47.780,-37.120,-24 +resnetrs350,48.050,51.950,62.653,37.347,163.96,384,1.000,bicubic,-48.190,-36.937,-82 +tf_efficientnetv2_b3.in21k_ft_in1k,48.037,51.963,64.730,35.270,14.36,300,0.900,bicubic,-47.553,-34.550,+16 +gcvit_small,48.033,51.967,62.700,37.300,51.09,224,0.875,bicubic,-47.897,-36.680,-33 +regnetz_d8,48.010,51.990,64.417,35.583,23.37,320,1.000,bicubic,-48.000,-35.013,-50 +twins_svt_large,47.947,52.053,62.907,37.093,99.27,224,0.900,bicubic,-47.773,-36.583,-5 +vit_relpos_base_patch16_224.sw_in1k,47.927,52.073,62.853,37.147,86.43,224,0.900,bicubic,-47.223,-36.307,+78 +mixer_b16_224_miil,47.790,52.210,63.400,36.600,59.88,224,0.875,bilinear,-47.090,-35.870,+128 +repvgg_b2g4,47.787,52.213,64.390,35.610,61.76,224,0.875,bilinear,-46.033,-34.520,+273 +vit_relpos_base_patch16_clsgap_224.sw_in1k,47.767,52.233,62.403,37.597,86.43,224,0.900,bicubic,-47.483,-36.797,+59 +mvitv2_tiny,47.693,52.307,62.810,37.190,24.17,224,0.900,bicubic,-47.717,-36.250,+34 +vit_relpos_medium_patch16_cls_224.sw_in1k,47.660,52.340,61.800,38.200,38.76,224,0.900,bicubic,-47.640,-37.290,+50 +eca_nfnet_l1,47.650,52.350,62.763,37.237,41.41,320,1.000,bicubic,-48.290,-36.727,-44 +seresnext101_32x8d,47.643,52.357,61.443,38.557,93.57,288,1.000,bicubic,-48.477,-37.917,-77 +resnetv2_50x3_bitm,47.593,52.407,65.603,34.397,217.32,448,1.000,bilinear,-48.677,-33.957,-101 +pit_s_distilled_224,47.543,52.457,63.493,36.507,24.04,224,0.900,bicubic,-47.187,-35.607,+138 +resnest50d_4s2x40d,47.483,52.517,63.807,36.193,30.42,224,0.875,bicubic,-47.227,-35.163,+143 +efficientnet_b3_pruned.in1k,47.447,52.553,62.793,37.207,9.86,300,0.904,bicubic,-47.133,-36.277,+167 +crossvit_18_dagger_408,47.380,52.620,60.943,39.057,44.61,408,1.000,bicubic,-48.750,-38.527,-84 +coatnet_rmlp_1_rw_224,47.370,52.630,61.430,38.570,41.69,224,0.950,bicubic,-48.120,-37.960,+11 +vit_base_patch16_224.orig_in21k_ft_in1k,47.340,52.660,61.607,38.393,86.57,224,0.900,bicubic,-47.870,-37.553,+53 +xcit_small_24_p8_224,47.287,52.713,60.990,39.010,47.63,224,1.000,bicubic,-48.613,-38.190,-48 +efficientformer_l3,47.230,52.770,63.400,36.600,31.41,224,0.950,bicubic,-47.980,-35.910,+50 +tresnet_m,47.230,52.770,61.993,38.007,31.39,224,0.875,bilinear,-48.150,-37.157,+29 +tf_efficientnet_b6.aa_in1k,47.213,52.787,63.110,36.890,43.04,528,0.942,bicubic,-49.077,-36.410,-112 +ssl_resnext101_32x4d,47.177,52.823,63.367,36.633,44.18,224,0.875,bilinear,-47.983,-35.863,+57 +resnetrs270,47.107,52.893,62.010,37.990,129.86,352,1.000,bicubic,-48.953,-37.480,-79 +tf_efficientnet_b4.aa_in1k,47.083,52.917,62.867,37.133,19.34,380,0.922,bicubic,-48.507,-36.413,-11 +regnetz_d8_evos,47.080,52.920,63.397,36.603,23.46,320,0.950,bicubic,-49.140,-36.093,-106 +vit_base_patch16_rpn_224.in1k,47.057,52.943,62.400,37.600,86.54,224,0.900,bicubic,-47.773,-36.690,+113 +swinv2_small_window8_256,47.027,52.973,62.307,37.693,49.73,256,0.900,bicubic,-48.703,-37.053,-32 +xcit_small_12_p8_224,46.983,53.017,60.533,39.467,26.21,224,1.000,bicubic,-48.437,-38.667,+10 +xcit_large_24_p16_224,46.957,53.043,60.670,39.330,189.10,224,1.000,bicubic,-47.983,-38.160,+91 +xception65p,46.937,53.063,61.083,38.917,39.82,299,0.940,bicubic,-48.723,-38.187,-25 +resnet101d,46.893,53.107,62.317,37.683,44.57,320,1.000,bicubic,-48.857,-36.963,-42 +maxvit_tiny_rw_224,46.887,53.113,60.897,39.103,29.06,224,0.950,bicubic,-48.853,-38.413,-39 +pvt_v2_b2_li,46.833,53.167,62.507,37.493,22.55,224,0.900,bicubic,-48.367,-36.773,+43 +resnet152,46.817,53.183,60.427,39.573,60.19,224,0.950,bicubic,-48.733,-38.843,-16 +gluon_seresnext101_64x4d,46.677,53.323,61.303,38.697,88.23,224,0.875,bicubic,-47.973,-37.677,+134 +twins_pcpvt_large,46.637,53.363,62.240,37.760,60.99,224,0.900,bicubic,-49.083,-37.050,-40 +convnext_tiny.fb_in1k,46.573,53.427,63.183,36.817,28.59,288,1.000,bicubic,-48.627,-35.987,+37 +dm_nfnet_f1,46.547,53.453,61.407,38.593,132.63,320,0.910,bicubic,-49.843,-38.063,-146 +regnetv_064,46.480,53.520,62.253,37.747,30.58,288,1.000,bicubic,-49.290,-37.167,-51 +crossvit_15_dagger_408,46.463,53.537,60.480,39.520,28.50,408,1.000,bicubic,-49.357,-38.930,-59 +xcit_medium_24_p8_224,46.463,53.537,59.647,40.353,84.32,224,1.000,bicubic,-49.407,-39.433,-65 +resnetrs200,46.423,53.577,61.060,38.940,93.21,320,1.000,bicubic,-49.917,-38.430,-143 +coatnet_1_rw_224,46.403,53.597,60.080,39.920,41.72,224,0.950,bicubic,-49.217,-39.360,-33 +swin_s3_small_224,46.400,53.600,60.900,39.100,49.74,224,0.900,bicubic,-49.440,-38.300,-65 +gcvit_tiny,46.373,53.627,61.603,38.397,28.22,224,0.875,bicubic,-49.307,-37.737,-41 +fbnetv3_g.ra2_in1k,46.337,53.663,62.417,37.583,16.62,288,0.950,bilinear,-48.793,-36.813,+41 +sequencer2d_m,46.300,53.700,60.897,39.103,38.31,224,0.875,bicubic,-49.290,-38.433,-33 +tresnet_xl,46.283,53.717,61.943,38.057,78.44,224,0.875,bilinear,-48.777,-37.317,+56 +xcit_tiny_24_p8_384_dist,46.267,53.733,60.713,39.287,12.11,384,1.000,bicubic,-49.973,-38.727,-132 +xcit_tiny_24_p8_224_dist,46.260,53.740,60.600,39.400,12.11,224,1.000,bicubic,-49.190,-38.790,-15 +deit_small_distilled_patch16_224,46.160,53.840,62.417,37.583,22.44,224,0.900,bicubic,-48.430,-36.513,+126 +regnety_160,46.153,53.847,61.837,38.163,83.59,288,1.000,bicubic,-49.727,-37.723,-80 +gernet_m,46.150,53.850,62.700,37.300,21.14,224,0.875,bilinear,-48.400,-35.960,+133 +crossvit_base_240,46.133,53.867,60.217,39.783,105.03,240,0.875,bicubic,-48.937,-38.933,+49 +swinv2_cr_small_ns_224,46.117,53.883,60.780,39.220,49.70,224,0.900,bicubic,-49.573,-38.530,-53 +resnest50d_1s4x24d,46.083,53.917,62.377,37.623,25.68,224,0.875,bicubic,-48.307,-36.693,+148 +tf_efficientnet_b0.ns_jft_in1k,46.047,53.953,63.253,36.747,5.29,224,0.875,bicubic,-47.693,-35.677,+230 +jx_nest_base,46.040,53.960,60.103,39.897,67.72,224,0.875,bicubic,-49.500,-39.197,-39 +resnet51q,46.027,53.973,60.910,39.090,35.70,288,1.000,bilinear,-49.173,-38.350,+16 +vit_small_patch16_224.augreg_in21k_ft_in1k,45.990,54.010,61.820,38.180,22.05,224,0.900,bicubic,-48.890,-37.260,+71 +regnety_080,45.960,54.040,60.850,39.150,39.18,288,1.000,bicubic,-49.900,-38.590,-84 +vit_relpos_medium_patch16_224.sw_in1k,45.947,54.053,61.030,38.970,38.75,224,0.900,bicubic,-49.263,-38.200,+9 +resnest50d,45.937,54.063,62.623,37.377,27.48,224,0.875,bilinear,-48.683,-36.497,+110 +deit3_small_patch16_224,45.927,54.073,58.903,41.097,22.06,224,0.900,bicubic,-48.763,-40.197,+98 +convnext_nano.in12k_ft_in1k,45.903,54.097,62.680,37.320,15.59,288,1.000,bicubic,-49.457,-36.770,-15 +crossvit_18_240,45.903,54.097,60.373,39.627,43.27,240,0.875,bicubic,-49.167,-38.827,+36 +regnety_032,45.893,54.107,61.537,38.463,19.44,288,1.000,bicubic,-49.577,-37.773,-37 +twins_pcpvt_base,45.893,54.107,61.337,38.663,43.83,224,0.900,bicubic,-49.567,-38.053,-35 +levit_384,45.877,54.123,61.693,38.307,39.13,224,0.900,bicubic,-49.333,-37.527,+3 +twins_svt_base,45.877,54.123,60.967,39.033,56.07,224,0.900,bicubic,-49.693,-38.263,-53 +crossvit_18_dagger_240,45.853,54.147,59.927,40.073,44.27,240,0.875,bicubic,-49.327,-39.193,+7 +convnext_tiny_hnf.a2h_in1k,45.850,54.150,60.183,39.817,28.59,288,1.000,bicubic,-49.420,-38.977,-10 +vit_relpos_medium_patch16_rpn_224.sw_in1k,45.737,54.263,60.963,39.037,38.73,224,0.900,bicubic,-49.333,-38.327,+27 +vit_srelpos_medium_patch16_224.sw_in1k,45.720,54.280,61.067,38.933,38.74,224,0.900,bicubic,-49.180,-38.093,+53 +crossvit_15_dagger_240,45.700,54.300,60.097,39.903,28.21,240,0.875,bicubic,-49.280,-39.063,+39 +regnetz_c16,45.687,54.313,62.520,37.480,13.46,320,0.940,bicubic,-49.713,-36.790,-32 +convmixer_1536_20,45.663,54.337,61.770,38.230,51.63,224,0.960,bicubic,-49.307,-37.400,+38 +gc_efficientnetv2_rw_t.agc_in1k,45.653,54.347,60.203,39.797,13.68,288,1.000,bicubic,-49.627,-39.017,-18 +flexivit_small.1200ep_in1k,45.610,54.390,59.883,40.117,22.06,240,0.950,bicubic,-49.590,-39.507,-2 +efficientnetv2_rw_t.ra2_in1k,45.607,54.393,60.183,39.817,13.65,288,1.000,bicubic,-49.463,-38.797,+19 +gluon_seresnext101_32x4d,45.590,54.410,61.143,38.857,48.96,224,0.875,bicubic,-48.860,-37.947,+115 +xcit_tiny_24_p16_384_dist,45.583,54.417,60.510,39.490,12.12,384,1.000,bicubic,-49.907,-38.620,-56 +xcit_small_24_p16_224,45.547,54.453,58.920,41.080,47.67,224,1.000,bicubic,-49.533,-40.100,+13 +xcit_medium_24_p16_224,45.540,54.460,59.000,41.000,84.40,224,1.000,bicubic,-49.590,-39.920,+5 +dm_nfnet_f0,45.483,54.517,60.983,39.017,71.49,256,0.900,bicubic,-50.207,-38.347,-84 +resnext101_64x4d,45.470,54.530,59.047,40.953,83.46,288,1.000,bicubic,-50.070,-40.243,-66 +gluon_resnet152_v1d,45.430,54.570,60.077,39.923,60.21,224,0.875,bicubic,-49.010,-39.103,+112 +nfnet_l0,45.420,54.580,62.080,37.920,35.07,288,1.000,bicubic,-49.970,-37.200,-42 +ssl_resnext50_32x4d,45.407,54.593,62.047,37.953,25.03,224,0.875,bilinear,-49.293,-36.783,+69 +resnetv2_50x1_bit_distilled,45.393,54.607,62.303,37.697,25.55,224,0.875,bicubic,-49.997,-36.907,-46 +cs3se_edgenet_x,45.393,54.607,60.427,39.573,50.72,320,1.000,bicubic,-50.617,-39.013,-138 +xcit_small_12_p16_224,45.387,54.613,59.417,40.583,26.25,224,1.000,bicubic,-49.443,-39.643,+48 +jx_nest_small,45.360,54.640,59.007,40.993,38.35,224,0.875,bicubic,-50.170,-40.203,-72 +pvt_v2_b2,45.297,54.703,60.620,39.380,25.36,224,0.900,bicubic,-49.713,-38.560,+16 +resnet61q,45.283,54.717,59.400,40.600,36.85,288,1.000,bicubic,-49.837,-39.800,-4 +cs3edgenet_x,45.253,54.747,60.257,39.743,47.82,288,1.000,bicubic,-50.197,-39.103,-60 +tresnet_xl_448,45.223,54.777,61.437,38.563,78.44,448,0.875,bilinear,-50.287,-37.903,-73 +nasnetalarge,45.210,54.790,57.883,42.117,88.75,331,0.911,bicubic,-49.940,-41.417,-15 +convit_small,45.203,54.797,60.510,39.490,27.78,224,0.875,bicubic,-49.717,-38.610,+25 +flexivit_small.600ep_in1k,45.197,54.803,59.413,40.587,22.06,240,0.950,bicubic,-50.073,-39.567,-39 +swin_small_patch4_window7_224,45.163,54.837,60.330,39.670,49.61,224,0.900,bicubic,-50.557,-39.040,-103 +resnet101,45.127,54.873,59.573,40.427,44.55,224,0.950,bicubic,-49.823,-39.497,+17 +tf_efficientnet_b3.aa_in1k,45.107,54.893,60.650,39.350,12.23,300,0.904,bicubic,-49.803,-38.370,+22 +sequencer2d_s,45.093,54.907,60.050,39.950,27.65,224,0.875,bicubic,-50.377,-39.220,-72 +rexnet_200,45.047,54.953,62.317,37.683,16.37,224,0.875,bicubic,-49.613,-36.833,+63 +maxxvit_rmlp_nano_rw_256,45.023,54.977,59.660,40.340,16.78,256,0.950,bicubic,-50.327,-39.660,-53 +resnetrs152,44.943,55.057,59.713,40.287,86.62,320,1.000,bicubic,-51.017,-39.667,-145 +resnetv2_101,44.937,55.063,58.850,41.150,44.54,224,0.950,bicubic,-49.983,-40.260,+15 +ecaresnetlight,44.890,55.110,60.770,39.230,30.16,224,0.875,bicubic,-49.250,-38.180,+129 +deit_base_patch16_224,44.870,55.130,59.177,40.823,86.57,224,0.900,bicubic,-50.140,-39.963,+2 +flexivit_small.300ep_in1k,44.867,55.133,59.343,40.657,22.06,240,0.950,bicubic,-50.283,-39.787,-29 +coatnet_bn_0_rw_224,44.813,55.187,60.903,39.097,27.44,224,0.950,bicubic,-50.167,-38.327,+1 +deit_base_patch16_384,44.777,55.223,59.617,40.383,86.86,384,1.000,bicubic,-50.873,-39.623,-106 +cait_xxs36_384,44.773,55.227,59.380,40.620,17.37,384,1.000,bicubic,-50.447,-39.940,-46 +resmlp_36_distilled_224,44.757,55.243,61.073,38.927,44.69,224,0.875,bicubic,-49.813,-38.087,+68 +gernet_l,44.740,55.260,58.943,41.057,31.08,256,0.875,bilinear,-50.190,-40.257,+5 +xcit_tiny_24_p16_224_dist,44.710,55.290,59.417,40.583,12.12,224,1.000,bicubic,-49.520,-39.403,+108 +resmlp_24_distilled_224,44.707,55.293,61.467,38.533,30.02,224,0.875,bicubic,-49.623,-37.593,+96 +tf_efficientnet_b2.ap_in1k,44.700,55.300,60.680,39.320,9.11,260,0.890,bicubic,-49.570,-38.100,+102 +swinv2_tiny_window16_256,44.573,55.427,59.570,40.430,28.35,256,0.900,bicubic,-50.787,-39.560,-69 +vit_relpos_small_patch16_224.sw_in1k,44.553,55.447,60.203,39.797,21.98,224,0.900,bicubic,-50.137,-38.547,+40 +gmlp_s16_224,44.473,55.527,58.630,41.370,19.42,224,0.875,bicubic,-49.027,-40.210,+200 +ens_adv_inception_resnet_v2,44.393,55.607,58.117,41.883,55.84,299,0.897,bicubic,-49.737,-40.843,+118 +tresnet_l,44.363,55.637,59.953,40.047,55.99,224,0.875,bilinear,-50.527,-39.237,+6 +gluon_resnext101_32x4d,44.290,55.710,59.090,40.910,44.18,224,0.875,bicubic,-49.830,-39.840,+117 +poolformer_m48,44.267,55.733,59.300,40.700,73.47,224,0.950,bicubic,-50.863,-39.920,-38 +gcvit_xtiny,44.237,55.763,59.963,40.037,19.98,224,0.875,bicubic,-50.773,-39.017,-18 +maxvit_rmlp_nano_rw_256,44.180,55.820,58.240,41.760,15.50,256,0.950,bicubic,-51.260,-40.820,-91 +wide_resnet50_2,44.177,55.823,59.727,40.273,68.88,224,0.875,bicubic,-50.493,-39.323,+37 +regnetz_c16_evos,44.160,55.840,61.060,38.940,13.49,320,0.950,bicubic,-51.460,-38.360,-121 +vit_srelpos_small_patch16_224.sw_in1k,44.137,55.863,59.710,40.290,21.97,224,0.900,bicubic,-50.413,-39.220,+54 +resnetv2_101x1_bitm,44.127,55.873,61.983,38.017,44.54,448,1.000,bilinear,-51.193,-37.387,-75 +seresnext50_32x4d,44.127,55.873,59.490,40.510,27.56,224,0.875,bicubic,-50.693,-39.640,+8 +crossvit_15_240,44.117,55.883,59.130,40.870,27.53,240,0.875,bicubic,-50.603,-39.930,+20 +gluon_resnet152_v1s,44.073,55.927,58.703,41.297,60.32,224,0.875,bicubic,-50.647,-40.477,+20 +pit_b_224,44.070,55.930,58.017,41.983,73.76,224,0.900,bicubic,-50.720,-40.803,+8 +poolformer_m36,44.027,55.973,59.060,40.940,56.17,224,0.950,bicubic,-50.983,-40.040,-26 +ssl_resnet50,44.010,55.990,61.887,38.113,25.56,224,0.875,bilinear,-50.300,-37.263,+79 +inception_resnet_v2,44.003,55.997,57.907,42.093,55.84,299,0.897,bicubic,-50.337,-40.893,+74 +pnasnet5large,43.950,56.050,56.730,43.270,86.06,331,0.911,bicubic,-51.410,-42.570,-88 +coatnext_nano_rw_224,43.907,56.093,58.653,41.347,14.70,224,0.900,bicubic,-50.943,-40.547,-4 +pit_s_224,43.890,56.110,58.627,41.373,23.46,224,0.900,bicubic,-50.700,-40.513,+36 +gluon_resnext101_64x4d,43.877,56.123,58.710,41.290,83.46,224,0.875,bicubic,-50.473,-40.170,+69 +coat_lite_small,43.823,56.177,57.147,42.853,19.84,224,0.900,bicubic,-51.257,-41.913,-47 +regnetv_040,43.787,56.213,58.457,41.543,20.64,288,1.000,bicubic,-51.943,-40.923,-151 +tnt_s_patch16_224,43.773,56.227,59.197,40.803,23.76,224,0.900,bicubic,-50.807,-39.853,+32 +swinv2_cr_small_224,43.773,56.227,57.703,42.297,49.70,224,0.900,bicubic,-51.637,-41.597,-104 +mobilevitv2_200_in22ft1k,43.770,56.230,59.493,40.507,18.45,256,0.888,bicubic,-51.280,-39.587,-42 +cait_xxs36_224,43.760,56.240,58.720,41.280,17.30,224,1.000,bicubic,-50.180,-40.310,+115 +cspresnext50,43.757,56.243,60.130,39.870,20.57,256,0.887,bilinear,-50.493,-38.920,+74 +ecaresnet50d,43.750,56.250,60.387,39.613,25.58,224,0.875,bicubic,-50.440,-38.543,+81 +ecaresnet101d_pruned,43.737,56.263,59.607,40.393,24.88,224,0.875,bicubic,-50.713,-39.493,+43 +swin_s3_tiny_224,43.710,56.290,59.497,40.503,28.33,224,0.900,bicubic,-51.190,-39.703,-25 +tf_efficientnetv2_s.in1k,43.710,56.290,58.597,41.403,21.46,384,1.000,bicubic,-52.000,-40.783,-155 +rexnet_150,43.690,56.310,60.897,39.103,9.73,224,0.875,bicubic,-50.580,-38.183,+65 +pit_xs_distilled_224,43.663,56.337,60.703,39.297,11.00,224,0.900,bicubic,-49.577,-38.147,+196 +xcit_tiny_12_p8_224_dist,43.640,56.360,58.457,41.543,6.71,224,1.000,bicubic,-51.080,-40.623,-4 +edgenext_small,43.620,56.380,59.887,40.113,5.59,320,1.000,bicubic,-51.210,-39.523,-20 +efficientformer_l1,43.587,56.413,59.927,40.073,12.29,224,0.950,bicubic,-50.353,-38.963,+102 +maxvit_nano_rw_256,43.537,56.463,57.600,42.400,15.45,256,0.950,bicubic,-51.953,-41.760,-132 +cs3sedarknet_x,43.520,56.480,58.793,41.207,35.40,288,1.000,bicubic,-51.880,-40.527,-118 +crossvit_small_240,43.470,56.530,58.933,41.067,26.86,240,0.875,bicubic,-51.110,-40.177,+18 +coatnet_rmlp_nano_rw_224,43.450,56.550,58.607,41.393,15.15,224,0.900,bicubic,-51.640,-40.563,-68 +gluon_resnet101_v1d,43.440,56.560,58.613,41.387,44.57,224,0.875,bicubic,-50.730,-40.397,+72 +ecaresnet50t,43.407,56.593,59.300,40.700,25.57,320,0.950,bicubic,-51.663,-39.820,-67 +gluon_resnet101_v1s,43.363,56.637,58.503,41.497,44.67,224,0.875,bicubic,-50.807,-40.437,+69 +cspdarknet53,43.357,56.643,59.430,40.570,27.64,256,0.887,bilinear,-50.733,-39.580,+77 +xcit_tiny_24_p8_224,43.313,56.687,57.280,42.720,12.11,224,1.000,bicubic,-51.577,-41.750,-39 +xcit_tiny_12_p8_384_dist,43.310,56.690,58.180,41.820,6.71,384,1.000,bicubic,-52.030,-41.140,-115 +dpn68b,43.287,56.713,58.673,41.327,12.61,224,0.875,bicubic,-50.333,-40.287,+136 +convmixer_768_32,43.267,56.733,59.367,40.633,21.11,224,0.960,bicubic,-51.153,-39.743,+29 +visformer_small,43.253,56.747,57.993,42.007,40.22,224,0.900,bicubic,-51.707,-41.217,-55 +eca_nfnet_l0,43.233,56.767,59.913,40.087,24.14,288,1.000,bicubic,-52.217,-39.367,-139 +regnety_064,43.227,56.773,57.237,42.763,30.58,288,1.000,bicubic,-52.563,-42.053,-188 +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,43.163,56.837,58.427,41.573,119.42,256,0.900,bicubic,-49.997,-39.883,+185 +vit_small_patch32_384.augreg_in21k_ft_in1k,43.143,56.857,59.293,40.707,22.92,384,1.000,bicubic,-51.447,-39.807,0 +resnest26d,43.140,56.860,60.623,39.377,17.07,224,0.875,bilinear,-50.100,-38.197,+174 +twins_pcpvt_small,43.090,56.910,58.873,41.127,24.11,224,0.900,bicubic,-51.510,-40.277,-3 +resmlp_36_224,43.050,56.950,59.310,40.690,44.69,224,0.875,bicubic,-50.600,-39.600,+119 +coatnet_nano_rw_224,43.047,56.953,57.930,42.070,15.14,224,0.900,bicubic,-52.003,-41.220,-75 +dpn131,43.047,56.953,57.440,42.560,79.25,224,0.875,bicubic,-50.713,-41.360,+104 +cspresnet50,43.030,56.970,59.153,40.847,21.62,256,0.887,bilinear,-50.830,-39.737,+90 +tf_efficientnet_lite4.in1k,42.967,57.033,57.620,42.380,13.01,380,0.920,bilinear,-51.903,-41.470,-48 +twins_svt_small,42.923,57.077,58.453,41.547,24.06,224,0.900,bicubic,-51.847,-40.497,-38 +mobilevitv2_200_384_in22ft1k,42.920,57.080,58.980,41.020,18.45,384,1.000,bicubic,-52.470,-40.440,-138 +gluon_resnet152_v1b,42.903,57.097,57.750,42.250,60.19,224,0.875,bicubic,-51.127,-41.290,+67 +fbnetv3_d.ra2_in1k,42.873,57.127,59.693,40.307,10.31,256,0.950,bilinear,-50.967,-39.217,+88 +dpn107,42.857,57.143,57.367,42.633,86.92,224,0.875,bicubic,-51.103,-41.473,+71 +levit_256,42.823,57.177,57.897,42.103,18.89,224,0.900,bicubic,-51.577,-41.173,+14 +tf_efficientnet_b1.ap_in1k,42.803,57.197,58.813,41.187,7.79,240,0.882,bicubic,-50.827,-39.937,+113 +gcresnet50t,42.800,57.200,59.190,40.810,25.90,256,0.900,bicubic,-51.820,-39.950,-17 +gluon_resnet152_v1c,42.800,57.200,57.737,42.263,60.21,224,0.875,bicubic,-51.080,-41.003,+77 +gluon_xception65,42.793,57.207,58.820,41.180,39.92,299,0.903,bicubic,-51.217,-40.210,+63 +tresnet_l_448,42.753,57.247,58.947,41.053,55.99,448,0.875,bilinear,-52.657,-40.463,-154 +coatnet_0_rw_224,42.747,57.253,56.250,43.750,27.44,224,0.950,bicubic,-52.163,-42.860,-69 +cs3darknet_x,42.723,57.277,58.193,41.807,35.05,288,1.000,bicubic,-52.557,-41.087,-137 +resnet50d,42.707,57.293,58.697,41.303,25.58,224,0.875,bicubic,-51.363,-40.233,+50 +gluon_seresnext50_32x4d,42.683,57.317,58.710,41.290,27.56,224,0.875,bicubic,-51.487,-40.200,+39 +convnext_nano.d1h_in1k,42.673,57.327,57.560,42.440,15.59,288,1.000,bicubic,-52.197,-41.580,-66 +xcit_tiny_12_p16_384_dist,42.583,57.417,58.090,41.910,6.72,384,1.000,bicubic,-51.947,-41.080,-11 +regnety_040,42.570,57.430,57.027,42.973,20.65,288,1.000,bicubic,-52.910,-42.393,-172 +resnext101_32x8d,42.557,57.443,58.317,41.683,88.79,224,0.875,bilinear,-51.213,-40.633,+82 +nf_resnet50,42.510,57.490,59.520,40.480,25.56,288,0.940,bicubic,-51.890,-39.540,-1 +seresnet50,42.510,57.490,58.667,41.333,28.09,224,0.875,bicubic,-51.570,-40.303,+41 +mobilevitv2_175_in22ft1k,42.490,57.510,58.157,41.843,14.25,256,0.888,bicubic,-52.300,-40.933,-62 +resnetrs101,42.437,57.563,57.300,42.700,63.62,288,0.940,bicubic,-52.813,-41.910,-142 +poolformer_s36,42.323,57.677,58.743,41.257,30.86,224,0.900,bicubic,-52.307,-40.307,-38 +jx_nest_tiny,42.323,57.677,57.053,42.947,17.06,224,0.875,bicubic,-52.627,-42.047,-89 +tf_efficientnetv2_b3.in1k,42.313,57.687,57.940,42.060,14.36,300,0.904,bicubic,-52.807,-41.140,-119 +convmixer_1024_20_ks9_p14,42.280,57.720,59.713,40.287,24.38,224,0.960,bicubic,-50.060,-38.717,+214 +dpn98,42.280,57.720,56.880,43.120,61.57,224,0.875,bicubic,-51.660,-42.040,+51 +deit_small_patch16_224,42.263,57.737,58.020,41.980,22.05,224,0.900,bicubic,-51.737,-41.010,+46 +xcit_tiny_24_p16_224,42.260,57.740,56.830,43.170,12.12,224,1.000,bicubic,-51.590,-41.930,+61 +tf_efficientnet_cc_b1_8e.in1k,42.233,57.767,58.420,41.580,39.72,240,0.882,bicubic,-51.337,-40.270,+99 +legacy_senet154,42.207,57.793,56.597,43.403,115.09,224,0.875,bilinear,-52.523,-42.583,-63 +cait_xxs24_384,42.187,57.813,57.460,42.540,12.03,384,1.000,bicubic,-52.733,-41.680,-94 +xception41p,42.170,57.830,56.900,43.100,26.91,299,0.940,bicubic,-52.900,-42.320,-116 +tf_efficientnet_b2.aa_in1k,42.120,57.880,58.197,41.803,9.11,260,0.890,bicubic,-52.090,-40.853,+12 +convnext_nano_ols.d1h_in1k,42.047,57.953,56.880,43.120,15.65,288,1.000,bicubic,-52.533,-42.300,-37 +gluon_resnext50_32x4d,42.043,57.957,57.667,42.333,25.03,224,0.875,bicubic,-51.607,-41.283,+80 +resnext50_32x4d,41.987,58.013,56.767,43.233,25.03,224,0.950,bicubic,-52.583,-42.033,-37 +pvt_v2_b1,41.970,58.030,59.570,40.430,14.01,224,0.900,bicubic,-51.520,-39.200,+101 +ecaresnet50d_pruned,41.953,58.047,58.217,41.783,19.94,224,0.875,bicubic,-51.867,-40.783,+54 +mobilevitv2_150_in22ft1k,41.937,58.063,57.937,42.063,10.59,256,0.888,bicubic,-52.763,-40.983,-65 +efficientnet_b2.ra_in1k,41.933,58.067,58.300,41.700,9.11,288,1.000,bicubic,-52.437,-40.750,-17 +xcit_tiny_12_p16_224_dist,41.923,58.077,57.230,42.770,6.72,224,1.000,bicubic,-51.427,-41.440,+117 +gcvit_xxtiny,41.823,58.177,58.457,41.543,12.00,224,0.875,bicubic,-52.227,-40.613,+21 +mobilevitv2_150_384_in22ft1k,41.773,58.227,57.807,42.193,10.59,384,1.000,bicubic,-53.557,-41.323,-172 +edgenext_small_rw,41.687,58.313,58.513,41.487,7.83,320,1.000,bicubic,-52.673,-40.557,-18 +mobilevitv2_175_384_in22ft1k,41.683,58.317,57.997,42.003,14.25,384,1.000,bicubic,-53.577,-41.383,-167 +dla102x2,41.647,58.353,57.967,42.033,41.28,224,0.875,bilinear,-52.353,-40.993,+26 +hrnet_w64,41.637,58.363,57.130,42.870,128.06,224,0.875,bilinear,-52.193,-41.800,+44 +gluon_senet154,41.627,58.373,56.373,43.627,115.09,224,0.875,bicubic,-53.083,-42.757,-76 +poolformer_s24,41.600,58.400,58.437,41.563,21.39,224,0.900,bicubic,-52.730,-40.653,-19 +inception_v4,41.577,58.423,55.383,44.617,42.68,299,0.875,bicubic,-52.803,-43.437,-29 +haloregnetz_b,41.547,58.453,57.087,42.913,11.68,224,0.940,bicubic,-52.963,-41.883,-43 +swinv2_cr_tiny_ns_224,41.543,58.457,57.183,42.817,28.33,224,0.900,bicubic,-53.217,-41.927,-89 +cs3sedarknet_l,41.540,58.460,57.340,42.660,21.91,288,0.950,bicubic,-53.570,-41.870,-146 +convnext_tiny.fb_in22k_ft_in1k,41.533,58.467,55.470,44.530,28.59,288,1.000,bicubic,-52.017,-43.210,+76 +efficientnet_el.ra_in1k,41.497,58.503,58.303,41.697,10.59,300,0.904,bicubic,-53.173,-40.827,-75 +efficientnet_em.ra2_in1k,41.493,58.507,58.877,41.123,6.90,240,0.882,bicubic,-52.247,-40.103,+47 +tf_efficientnet_cc_b0_8e.in1k,41.487,58.513,57.377,42.623,24.01,224,0.875,bicubic,-51.383,-41.083,+145 +swin_tiny_patch4_window7_224,41.457,58.543,57.303,42.697,28.29,224,0.900,bicubic,-53.163,-41.727,-72 +halo2botnet50ts_256,41.457,58.543,56.210,43.790,22.64,256,0.950,bicubic,-53.573,-42.960,-135 +resnetv2_50,41.390,58.610,56.763,43.237,25.55,224,0.950,bicubic,-52.900,-42.167,-27 +swinv2_tiny_window8_256,41.387,58.613,57.117,42.883,28.35,256,0.900,bicubic,-53.643,-41.913,-139 +cait_xxs24_224,41.383,58.617,57.527,42.473,11.96,224,1.000,bicubic,-52.107,-41.333,+77 +tv_resnet152,41.327,58.673,57.520,42.480,60.19,224,0.875,bilinear,-51.913,-41.230,+105 +cs3darknet_l,41.290,58.710,57.353,42.647,21.16,288,0.950,bicubic,-53.390,-41.867,-86 +gcresnext50ts,41.270,58.730,57.140,42.860,15.67,256,0.900,bicubic,-53.140,-41.850,-49 +xception71,41.270,58.730,55.873,44.127,42.34,299,0.903,bicubic,-52.620,-43.017,+15 +dpn92,41.267,58.733,56.333,43.667,37.67,224,0.875,bicubic,-52.923,-42.687,-19 +adv_inception_v3,41.263,58.737,56.317,43.683,23.83,299,0.875,bicubic,-51.747,-42.513,+119 +gernet_s,41.247,58.753,58.830,41.170,8.17,224,0.875,bilinear,-51.193,-39.710,+165 +resnetv2_50d_evos,41.133,58.867,56.050,43.950,25.59,288,0.950,bicubic,-53.997,-43.150,-170 +resnetblur50,41.053,58.947,57.077,42.923,25.56,224,0.875,bicubic,-52.657,-41.713,+35 +nf_regnet_b1,41.013,58.987,58.120,41.880,10.22,288,0.900,bicubic,-52.867,-40.970,+13 +gluon_resnet50_v1d,40.970,59.030,57.137,42.863,25.58,224,0.875,bicubic,-52.560,-41.573,+60 +fbnetv3_b.ra2_in1k,40.947,59.053,58.653,41.347,8.60,256,0.950,bilinear,-52.703,-40.037,+40 +gluon_inception_v3,40.903,59.097,55.613,44.387,23.83,299,0.875,bicubic,-52.637,-43.217,+56 +cs3darknet_focus_l,40.890,59.110,56.637,43.363,21.15,288,0.950,bicubic,-53.900,-42.513,-119 +ese_vovnet39b,40.867,59.133,56.950,43.050,24.57,224,0.875,bicubic,-52.983,-41.950,+11 +levit_192,40.847,59.153,56.687,43.313,10.95,224,0.900,bicubic,-52.863,-42.133,+31 +regnety_320,40.813,59.187,56.117,43.883,145.05,224,0.875,bicubic,-53.707,-43.053,-74 +resnet34d,40.810,59.190,56.530,43.470,21.82,224,0.875,bicubic,-51.830,-41.890,+139 +resnetv2_50d_gn,40.777,59.223,56.210,43.790,25.57,288,0.950,bicubic,-54.323,-42.850,-172 +maxvit_rmlp_pico_rw_256,40.773,59.227,55.210,44.790,7.52,256,0.950,bicubic,-53.447,-43.790,-39 +xception,40.763,59.237,56.387,43.613,22.86,299,0.897,bicubic,-52.877,-42.383,+33 +lamhalobotnet50ts_256,40.747,59.253,56.090,43.910,22.57,256,0.950,bicubic,-54.033,-42.890,-124 +resnet50_gn,40.737,59.263,55.743,44.257,25.56,224,0.940,bicubic,-53.443,-43.177,-36 +skresnext50_32x4d,40.700,59.300,56.023,43.977,27.48,224,0.875,bicubic,-53.250,-42.797,-11 +vit_base_patch32_384.augreg_in1k,40.700,59.300,55.187,44.813,88.30,384,1.000,bicubic,-52.460,-43.423,+87 +gluon_resnet101_v1b,40.683,59.317,56.117,43.883,44.55,224,0.875,bicubic,-53.077,-42.583,+13 +hrnet_w40,40.660,59.340,56.753,43.247,57.56,224,0.875,bilinear,-53.050,-42.057,+18 +resmlp_24_224,40.653,59.347,56.573,43.427,30.02,224,0.875,bicubic,-52.787,-42.237,+57 +repvgg_b1,40.593,59.407,57.837,42.163,57.42,224,0.875,bilinear,-52.817,-40.953,+61 +halonet50ts,40.580,59.420,55.177,44.823,22.73,256,0.940,bicubic,-54.120,-44.063,-117 +tf_efficientnet_lite3.in1k,40.563,59.437,56.477,43.523,8.20,300,0.904,bilinear,-53.567,-42.493,-38 +tresnet_m_448,40.530,59.470,56.700,43.300,31.39,448,0.875,bilinear,-54.130,-42.390,-113 +mobilevitv2_175,40.530,59.470,56.280,43.720,14.25,256,0.888,bicubic,-53.700,-42.650,-53 +xcit_tiny_12_p8_224,40.530,59.470,55.630,44.370,6.71,224,1.000,bicubic,-53.830,-43.470,-71 +pit_xs_224,40.497,59.503,56.530,43.470,10.62,224,0.900,bicubic,-52.413,-42.160,+97 +dla169,40.493,59.507,57.263,42.737,53.39,224,0.875,bilinear,-53.307,-41.647,-1 +repvgg_b2,40.467,59.533,57.780,42.220,89.02,224,0.875,bilinear,-53.123,-41.060,+24 +resnetaa50,40.467,59.533,56.007,43.993,25.56,288,1.000,bicubic,-54.403,-43.113,-151 +vit_base_patch16_384.augreg_in1k,40.460,59.540,53.240,46.760,86.86,384,1.000,bicubic,-53.980,-45.780,-88 +regnetx_320,40.443,59.557,55.660,44.340,107.81,224,0.875,bicubic,-53.767,-43.290,-58 +coat_mini,40.420,59.580,55.167,44.833,10.34,224,0.900,bicubic,-54.350,-43.913,-141 +skresnet34,40.397,59.603,56.737,43.263,22.28,224,0.875,bicubic,-52.173,-41.683,+120 +efficientnet_el_pruned.in1k,40.390,59.610,56.903,43.097,10.59,300,0.904,bicubic,-53.700,-42.077,-47 +resnet50,40.387,59.613,54.673,45.327,25.56,224,0.950,bicubic,-53.533,-43.797,-26 +efficientnet_b2_pruned.in1k,40.383,59.617,56.537,43.463,8.31,260,0.890,bicubic,-53.417,-42.303,-11 +wide_resnet101_2,40.360,59.640,55.780,44.220,126.89,224,0.875,bilinear,-53.370,-43.030,-6 +coat_lite_mini,40.360,59.640,55.717,44.283,11.01,224,0.900,bicubic,-53.090,-42.933,+34 +legacy_seresnext101_32x4d,40.360,59.640,54.817,45.183,48.96,224,0.875,bilinear,-53.770,-43.973,-55 +sebotnet33ts_256,40.340,59.660,53.180,46.820,13.70,256,0.940,bicubic,-53.990,-45.400,-80 +tf_efficientnet_b0.ap_in1k,40.337,59.663,56.787,43.213,5.29,224,0.875,bicubic,-52.273,-41.583,+109 +regnetx_160,40.270,59.730,56.050,43.950,54.28,224,0.875,bicubic,-53.610,-42.750,-30 +densenet201,40.267,59.733,56.710,43.290,20.01,224,0.875,bicubic,-52.423,-41.940,+99 +resnext50d_32x4d,40.170,59.830,55.487,44.513,25.05,224,0.875,bicubic,-53.640,-43.253,-20 +eca_resnet33ts,40.137,59.863,57.003,42.997,19.68,256,0.900,bicubic,-53.723,-41.867,-30 +mobilevitv2_200,40.130,59.870,55.510,44.490,18.45,256,0.888,bicubic,-54.380,-43.450,-110 +darknetaa53,40.117,59.883,55.783,44.217,36.02,288,1.000,bilinear,-54.093,-43.247,-72 +vit_base_patch16_224.sam,40.097,59.903,55.430,44.570,86.57,224,0.900,bicubic,-53.793,-43.520,-37 +hrnet_w48,40.093,59.907,56.640,43.360,77.47,224,0.875,bilinear,-53.937,-42.360,-55 +legacy_seresnet152,40.043,59.957,55.820,44.180,66.82,224,0.875,bilinear,-53.397,-42.990,+24 +hrnet_w30,40.030,59.970,57.093,42.907,37.71,224,0.875,bilinear,-53.340,-41.737,+34 +regnetx_080,40.000,60.000,55.977,44.023,39.57,224,0.875,bicubic,-53.790,-42.933,-25 +regnetz_b16,40.000,60.000,55.627,44.373,9.72,288,0.940,bicubic,-54.680,-43.533,-145 +tf_efficientnet_b1.aa_in1k,39.977,60.023,56.137,43.863,7.79,240,0.882,bicubic,-53.733,-42.663,-17 +gluon_resnet101_v1c,39.953,60.047,55.300,44.700,44.57,224,0.875,bicubic,-53.737,-43.460,-16 +convnext_pico_ols.d1_in1k,39.870,60.130,55.620,44.380,9.06,288,1.000,bicubic,-54.160,-43.120,-60 +resmlp_12_distilled_224,39.843,60.157,57.440,42.560,15.35,224,0.875,bicubic,-53.027,-41.190,+73 +seresnet33ts,39.827,60.173,56.527,43.473,19.78,256,0.900,bicubic,-54.443,-42.423,-92 +tf_efficientnetv2_b0.in1k,39.787,60.213,56.283,43.717,7.14,224,0.875,bicubic,-53.273,-42.417,+50 +lambda_resnet50ts,39.733,60.267,54.337,45.663,21.54,256,0.950,bicubic,-54.817,-44.803,-128 +darknet53,39.723,60.277,55.293,44.707,41.61,288,1.000,bicubic,-54.647,-43.757,-110 +res2net101_26w_4s,39.717,60.283,54.550,45.450,45.21,224,0.875,bilinear,-53.803,-44.050,+1 +regnetx_120,39.687,60.313,55.633,44.367,46.11,224,0.875,bicubic,-54.583,-43.557,-100 +hrnet_w44,39.677,60.323,55.333,44.667,67.06,224,0.875,bilinear,-53.943,-43.367,-15 +vit_small_patch32_224.augreg_in21k_ft_in1k,39.670,60.330,55.253,44.747,22.88,224,0.900,bicubic,-52.480,-43.257,+113 +densenet161,39.620,60.380,56.133,43.867,28.68,224,0.875,bicubic,-53.280,-42.677,+60 +resmlp_big_24_224,39.620,60.380,54.817,45.183,129.14,224,0.875,bicubic,-54.640,-44.003,-100 +mixnet_xl.ra_in1k,39.617,60.383,55.887,44.113,11.90,224,0.875,bicubic,-54.613,-43.073,-97 +vit_small_patch16_384.augreg_in1k,39.617,60.383,54.253,45.747,22.20,384,1.000,bicubic,-55.003,-44.727,-154 +xception41,39.610,60.390,55.037,44.963,26.97,299,0.903,bicubic,-53.870,-43.713,-1 +res2net50_26w_8s,39.603,60.397,54.550,45.450,48.40,224,0.875,bilinear,-53.847,-44.230,+1 +tf_efficientnetv2_b1.in1k,39.570,60.430,55.343,44.657,8.14,240,0.882,bicubic,-54.140,-43.457,-38 +dla102x,39.553,60.447,56.323,43.677,26.31,224,0.875,bilinear,-53.977,-42.527,-12 +xcit_tiny_12_p16_224,39.553,60.447,55.027,44.973,6.72,224,1.000,bicubic,-52.907,-43.463,+86 +gcresnet33ts,39.550,60.450,55.830,44.170,19.88,256,0.900,bicubic,-54.270,-43.100,-52 +sehalonet33ts,39.550,60.450,54.020,45.980,13.69,256,0.940,bicubic,-54.970,-44.740,-141 +convnext_pico.d1_in1k,39.503,60.497,55.323,44.677,9.05,288,0.950,bicubic,-54.527,-43.617,-82 +rexnet_130,39.487,60.513,56.640,43.360,7.56,224,0.875,bicubic,-54.183,-42.070,-37 +hrnet_w32,39.463,60.537,56.123,43.877,41.23,224,0.875,bilinear,-53.487,-42.447,+42 +resnetv2_50x1_bitm,39.440,60.560,57.847,42.153,25.55,448,1.000,bilinear,-55.290,-41.343,-185 +levit_128,39.433,60.567,55.350,44.650,9.21,224,0.900,bicubic,-53.617,-43.340,+29 +densenetblur121d,39.380,60.620,56.640,43.360,8.00,224,0.875,bicubic,-53.020,-41.830,+85 +regnety_120,39.347,60.653,55.277,44.723,51.82,224,0.875,bicubic,-54.663,-43.743,-84 +mobilevitv2_150,39.333,60.667,55.210,44.790,10.59,256,0.888,bicubic,-54.717,-43.690,-91 +tv_resnet101,39.307,60.693,55.803,44.197,44.55,224,0.875,bilinear,-53.573,-42.857,+44 +tf_efficientnet_el.in1k,39.303,60.697,55.387,44.613,10.59,300,0.904,bicubic,-55.057,-43.653,-133 +tf_inception_v3,39.237,60.763,54.303,45.697,23.83,299,0.875,bicubic,-53.963,-44.177,+11 +gluon_resnet50_v1s,39.233,60.767,55.010,44.990,25.68,224,0.875,bicubic,-54.357,-44.060,-35 +tf_efficientnetv2_b2.in1k,39.180,60.820,54.570,45.430,10.10,260,0.890,bicubic,-54.890,-44.350,-99 +densenet169,39.167,60.833,55.843,44.157,14.15,224,0.875,bicubic,-53.133,-42.747,+81 +legacy_seresnet101,39.037,60.963,55.003,44.997,49.33,224,0.875,bilinear,-54.223,-43.737,0 +efficientnet_b1_pruned.in1k,39.010,60.990,55.647,44.353,6.33,240,0.882,bicubic,-53.970,-42.973,+28 +repvgg_b1g4,38.990,61.010,56.350,43.650,39.97,224,0.875,bilinear,-54.040,-42.360,+19 +crossvit_9_dagger_240,38.977,61.023,54.850,45.150,8.78,240,0.875,bicubic,-53.783,-43.810,+47 +inception_v3,38.963,61.037,53.850,46.150,23.83,299,0.875,bicubic,-53.937,-44.870,+33 +dpn68,38.933,61.067,54.933,45.067,12.61,224,0.875,bicubic,-53.307,-43.677,+78 +resnet33ts,38.920,61.080,55.580,44.420,19.68,256,0.900,bicubic,-54.710,-43.220,-49 +legacy_seresnext50_32x4d,38.877,61.123,54.593,45.407,27.56,224,0.875,bilinear,-54.553,-44.207,-20 +dla102,38.833,61.167,55.323,44.677,33.27,224,0.875,bilinear,-54.427,-43.457,-9 +densenet121,38.783,61.217,56.273,43.727,7.98,224,0.875,bicubic,-53.157,-42.127,+87 +resnet32ts,38.770,61.230,55.813,44.187,17.96,256,0.900,bicubic,-54.800,-42.937,-45 +res2net50_14w_8s,38.710,61.290,54.077,45.923,25.06,224,0.875,bilinear,-54.320,-44.623,+12 +regnetx_040,38.703,61.297,55.340,44.660,22.12,224,0.875,bicubic,-54.977,-43.600,-63 +res2net50_26w_6s,38.687,61.313,53.743,46.257,37.05,224,0.875,bilinear,-54.903,-45.007,-50 +regnetx_032,38.680,61.320,55.157,44.843,15.30,224,0.875,bicubic,-54.570,-43.693,-11 +selecsls60,38.623,61.377,55.630,44.370,30.67,224,0.875,bicubic,-54.387,-42.860,+9 +dla60x,38.617,61.383,55.383,44.617,17.35,224,0.875,bilinear,-54.573,-43.327,-8 +tf_efficientnet_b0.aa_in1k,38.600,61.400,55.957,44.043,5.29,224,0.875,bicubic,-53.800,-42.453,+58 +dla60_res2net,38.590,61.410,54.560,45.440,20.85,224,0.875,bilinear,-54.790,-44.300,-25 +selecsls60b,38.573,61.427,55.307,44.693,32.77,224,0.875,bicubic,-54.927,-43.473,-45 +repvgg_a2,38.563,61.437,55.770,44.230,28.21,224,0.875,bilinear,-54.117,-42.750,+33 +hardcorenas_f,38.500,61.500,55.657,44.343,8.20,224,0.875,bilinear,-54.480,-42.873,+7 +dla60_res2next,38.450,61.550,54.950,45.050,17.03,224,0.875,bilinear,-55.120,-43.850,-58 +resmlp_12_224,38.443,61.557,56.327,43.673,15.35,224,0.875,bicubic,-53.677,-42.243,+66 +regnetx_064,38.430,61.570,54.990,45.010,26.21,224,0.875,bicubic,-55.200,-44.060,-69 +tf_efficientnet_cc_b0_4e.in1k,38.413,61.587,55.150,44.850,13.31,224,0.875,bicubic,-54.427,-43.290,+18 +gluon_resnet50_v1b,38.407,61.593,54.833,45.167,25.56,224,0.875,bicubic,-54.153,-43.717,+38 +hrnet_w18,38.277,61.723,55.643,44.357,21.30,224,0.875,bilinear,-54.483,-42.967,+21 +tinynet_a.in1k,38.220,61.780,55.177,44.823,6.19,192,0.875,bicubic,-54.580,-43.383,+18 +poolformer_s12,38.173,61.827,56.187,43.813,11.92,224,0.900,bicubic,-54.297,-42.163,+39 +mixnet_l.ft_in1k,38.160,61.840,54.757,45.243,7.33,224,0.875,bicubic,-55.100,-43.943,-29 +hardcorenas_e,38.137,61.863,55.173,44.827,8.07,224,0.875,bilinear,-54.813,-43.667,0 +efficientnet_b1.ft_in1k,38.087,61.913,54.010,45.990,7.79,256,1.000,bicubic,-54.943,-44.810,-10 +coat_lite_tiny,38.070,61.930,53.453,46.547,5.72,224,0.900,bicubic,-54.780,-45.187,+8 +gmixer_24_224,38.050,61.950,52.083,47.917,24.72,224,0.875,bicubic,-54.630,-46.497,+20 +vit_base_patch16_224.augreg_in1k,38.037,61.963,50.687,49.313,86.57,224,0.900,bicubic,-55.313,-48.053,-38 +resnetrs50,37.957,62.043,53.310,46.690,35.69,224,0.910,bicubic,-56.063,-45.540,-129 +hardcorenas_c,37.883,62.117,55.717,44.283,5.52,224,0.875,bilinear,-54.447,-42.623,+41 +mobilevitv2_125,37.877,62.123,54.060,45.940,7.48,256,0.888,bicubic,-55.583,-44.800,-59 +gluon_resnet50_v1c,37.843,62.157,54.123,45.877,25.58,224,0.875,bicubic,-55.067,-44.587,-6 +res2net50_26w_4s,37.827,62.173,53.073,46.927,25.70,224,0.875,bilinear,-55.353,-45.597,-31 +efficientnet_es.ra_in1k,37.770,62.230,54.967,45.033,5.44,224,0.875,bicubic,-55.140,-43.813,-7 +resnest14d,37.767,62.233,56.470,43.530,10.61,224,0.875,bilinear,-53.363,-41.860,+84 +tv_resnext50_32x4d,37.750,62.250,54.113,45.887,25.03,224,0.875,bilinear,-55.150,-44.217,-7 +convnext_femto.d1_in1k,37.740,62.260,54.123,45.877,5.22,288,0.950,bicubic,-55.700,-44.727,-60 +resnet26t,37.707,62.293,55.257,44.743,16.01,256,0.940,bicubic,-54.973,-43.023,+7 +ecaresnet26t,37.650,62.350,54.350,45.650,16.01,320,0.950,bicubic,-56.290,-44.570,-130 +vit_base_patch32_224.augreg_in1k,37.557,62.443,51.813,48.187,88.22,224,0.900,bicubic,-53.033,-45.907,+92 +hardcorenas_d,37.550,62.450,54.723,45.277,7.50,224,0.875,bilinear,-55.050,-43.707,+12 +res2next50,37.477,62.523,52.853,47.147,24.67,224,0.875,bilinear,-55.673,-45.807,-35 +resnet34,37.443,62.557,54.297,45.703,21.80,224,0.875,bilinear,-53.757,-43.753,+70 +pit_ti_distilled_224,37.337,62.663,55.137,44.863,5.10,224,0.900,bicubic,-53.563,-43.083,+83 +lambda_resnet26t,37.300,62.700,53.570,46.430,10.96,256,0.940,bicubic,-56.100,-45.170,-61 +convnext_femto_ols.d1_in1k,37.260,62.740,53.053,46.947,5.23,288,0.950,bicubic,-56.130,-45.857,-61 +hardcorenas_b,37.243,62.757,55.073,44.927,5.18,224,0.875,bilinear,-54.697,-43.207,+41 +mobilenetv3_large_100.miil_in21k_ft_in1k,37.210,62.790,53.513,46.487,5.48,224,0.875,bilinear,-55.040,-44.737,+27 +eca_halonext26ts,37.187,62.813,53.113,46.887,10.76,256,0.940,bicubic,-56.363,-45.467,-89 +cs3darknet_focus_m,37.140,62.860,53.910,46.090,9.30,288,0.950,bicubic,-55.970,-44.830,-41 +res2net50_48w_2s,37.117,62.883,53.333,46.667,25.29,224,0.875,bilinear,-55.673,-45.137,-11 +lambda_resnet26rpt_256,37.093,62.907,53.860,46.140,10.99,256,0.940,bicubic,-56.337,-45.020,-73 +vit_small_patch16_224.augreg_in1k,37.093,62.907,51.533,48.467,22.05,224,0.900,bicubic,-56.337,-47.247,-72 +dla60,37.073,62.927,54.200,45.800,22.04,224,0.875,bilinear,-55.597,-44.450,-5 +rexnet_100,37.063,62.937,54.020,45.980,4.80,224,0.875,bicubic,-55.787,-44.600,-21 +bat_resnext26ts,37.063,62.937,53.743,46.257,10.73,256,0.900,bicubic,-56.037,-44.977,-45 +regnety_016,37.017,62.983,54.093,45.907,11.20,224,0.875,bicubic,-55.983,-44.587,-38 +tf_mixnet_l.in1k,36.987,63.013,52.583,47.417,7.33,224,0.875,bicubic,-56.053,-45.957,-45 +botnet26t_256,36.970,63.030,53.083,46.917,12.49,256,0.950,bicubic,-56.480,-45.617,-84 +legacy_seresnet50,36.873,63.127,53.487,46.513,28.09,224,0.875,bilinear,-55.797,-45.143,-12 +halonet26t,36.850,63.150,52.290,47.710,12.48,256,0.950,bicubic,-56.760,-46.350,-108 +tv_densenet121,36.810,63.190,54.033,45.967,7.98,224,0.875,bicubic,-54.590,-44.217,+45 +tf_efficientnet_lite2.in1k,36.807,63.193,53.320,46.680,6.09,260,0.890,bicubic,-55.783,-45.230,-9 +mobilenetv2_120d.ra_in1k,36.780,63.220,54.047,45.953,5.83,224,0.875,bicubic,-55.830,-44.463,-13 +tf_efficientnet_lite1.in1k,36.737,63.263,53.590,46.410,5.42,240,0.882,bicubic,-55.573,-44.900,+7 +regnetx_016,36.683,63.317,53.297,46.703,9.19,224,0.875,bicubic,-55.857,-45.253,-8 +eca_botnext26ts_256,36.670,63.330,52.467,47.533,10.59,256,0.950,bicubic,-56.690,-46.233,-79 +hardcorenas_a,36.640,63.360,54.910,45.090,5.26,224,0.875,bilinear,-54.980,-43.260,+30 +levit_128s,36.620,63.380,53.117,46.883,7.78,224,0.900,bicubic,-54.880,-45.283,+33 +efficientnet_b0.ra_in1k,36.600,63.400,53.497,46.503,5.29,224,0.875,bicubic,-55.880,-45.183,-11 +vit_base_patch32_224.sam,36.550,63.450,53.040,46.960,88.22,224,0.900,bicubic,-53.310,-44.560,+74 +xcit_nano_12_p8_224_dist,36.530,63.470,52.880,47.120,3.05,224,1.000,bicubic,-55.900,-45.650,-6 +cs3darknet_m,36.467,63.533,53.217,46.783,9.31,288,0.950,bicubic,-56.813,-45.503,-82 +mobilevitv2_100,36.387,63.613,53.070,46.930,4.90,256,0.888,bicubic,-56.753,-45.690,-66 +tf_efficientnet_em.in1k,36.380,63.620,52.840,47.160,6.90,240,0.882,bicubic,-56.790,-45.830,-72 +skresnet18,36.320,63.680,54.197,45.803,11.96,224,0.875,bicubic,-53.840,-43.583,+65 +repvgg_b0,36.287,63.713,54.057,45.943,15.82,224,0.875,bilinear,-55.393,-44.393,+18 +tv_resnet50,36.177,63.823,52.803,47.197,25.56,224,0.875,bilinear,-55.963,-45.617,+3 +xcit_nano_12_p16_384_dist,36.153,63.847,53.250,46.750,3.05,384,1.000,bicubic,-55.957,-45.270,+4 +legacy_seresnet34,36.143,63.857,52.553,47.447,21.96,224,0.875,bilinear,-55.337,-45.767,+24 +coat_tiny,36.123,63.877,51.063,48.937,5.50,224,0.900,bicubic,-57.387,-47.627,-115 +tv_resnet34,36.087,63.913,53.533,46.467,21.80,224,0.875,bilinear,-54.203,-44.447,+57 +deit_tiny_distilled_patch16_224,36.023,63.977,54.240,45.760,5.91,224,0.900,bicubic,-55.077,-44.030,+39 +mobilenetv2_140.ra_in1k,36.000,64.000,53.943,46.057,6.11,224,0.875,bicubic,-56.030,-44.307,+1 +tf_efficientnet_lite0.in1k,35.930,64.070,53.480,46.520,4.65,224,0.875,bicubic,-55.370,-44.610,+24 +seresnext26ts,35.833,64.167,53.913,46.087,10.39,256,0.900,bicubic,-56.997,-44.687,-49 +selecsls42b,35.813,64.187,52.487,47.513,32.46,224,0.875,bicubic,-56.667,-45.953,-27 +convnext_atto.d2_in1k,35.787,64.213,52.320,47.680,3.70,288,0.950,bicubic,-56.973,-46.190,-46 +xcit_nano_12_p8_384_dist,35.770,64.230,52.290,47.710,3.05,384,1.000,bicubic,-57.480,-46.440,-94 +gluon_resnet34_v1b,35.760,64.240,52.187,47.813,21.80,224,0.875,bicubic,-55.340,-45.993,+33 +dla34,35.643,64.357,52.783,47.217,15.74,224,0.875,bilinear,-55.597,-45.397,+21 +mixnet_m.ft_in1k,35.640,64.360,52.430,47.570,5.01,224,0.875,bicubic,-56.630,-45.920,-18 +efficientnet_lite0.ra_in1k,35.620,64.380,53.657,46.343,4.65,224,0.875,bicubic,-55.640,-44.593,+18 +ssl_resnet18,35.597,64.403,53.740,46.260,11.69,224,0.875,bilinear,-55.103,-44.280,+36 +mobilenetv3_rw.rmsp_in1k,35.547,64.453,53.713,46.287,5.48,224,0.875,bicubic,-56.003,-44.557,+6 +efficientnet_es_pruned.in1k,35.390,64.610,52.850,47.150,5.44,224,0.875,bicubic,-56.310,-45.570,-2 +convnext_atto_ols.a2_in1k,35.387,64.613,51.390,48.610,3.70,288,0.950,bicubic,-57.593,-47.290,-77 +mobilenetv2_110d.ra_in1k,35.293,64.707,52.830,47.170,4.52,224,0.875,bicubic,-56.057,-45.360,+10 +tf_mixnet_m.in1k,35.180,64.820,50.987,49.013,5.01,224,0.875,bicubic,-57.020,-47.433,-20 +hrnet_w18_small_v2,35.173,64.827,52.440,47.560,15.60,224,0.875,bilinear,-55.997,-45.900,+17 +resnet18d,35.127,64.873,52.890,47.110,11.71,224,0.875,bicubic,-54.863,-44.940,+42 +xcit_nano_12_p16_224_dist,35.123,64.877,52.557,47.443,3.05,224,1.000,bicubic,-55.027,-45.203,+40 +eca_resnext26ts,35.050,64.950,52.303,47.697,10.30,256,0.900,bicubic,-57.370,-46.317,-36 +convit_tiny,35.047,64.953,51.787,48.213,5.71,224,0.875,bicubic,-55.483,-46.423,+31 +resnext26ts,35.040,64.960,53.423,46.577,10.30,256,0.900,bicubic,-57.170,-44.827,-27 +gcresnext26ts,34.933,65.067,51.677,48.323,10.48,256,0.900,bicubic,-57.527,-46.953,-43 +tinynet_b.in1k,34.873,65.127,52.017,47.983,3.73,188,0.875,bicubic,-56.247,-46.223,+14 +ese_vovnet19b_dw,34.840,65.160,52.030,47.970,6.54,224,0.875,bicubic,-57.170,-46.480,-22 +regnety_008,34.807,65.193,51.743,48.257,6.26,224,0.875,bicubic,-57.093,-46.677,-18 +pit_ti_224,34.670,65.330,52.170,47.830,4.85,224,0.900,bicubic,-55.750,-45.840,+27 +mobilenetv3_large_100.ra_in1k,34.603,65.397,52.860,47.140,5.48,224,0.875,bicubic,-56.877,-45.340,-8 +crossvit_9_240,34.590,65.410,51.783,48.217,8.55,240,0.875,bicubic,-56.460,-46.527,+13 +seresnext26d_32x4d,34.543,65.457,51.543,48.457,16.81,224,0.875,bicubic,-57.897,-46.957,-49 +seresnext26t_32x4d,34.540,65.460,51.377,48.623,16.81,224,0.875,bicubic,-58.280,-47.183,-76 +mixer_b16_224,34.427,65.573,48.087,51.913,59.88,224,0.875,bicubic,-56.713,-49.313,+3 +pvt_v2_b0,34.393,65.607,53.093,46.907,3.67,224,0.900,bicubic,-54.587,-44.597,+42 +resnet26d,34.273,65.727,51.687,48.313,16.01,224,0.875,bicubic,-57.957,-46.763,-40 +tf_efficientnet_es.in1k,34.263,65.737,51.350,48.650,5.44,224,0.875,bicubic,-57.837,-47.090,-34 +fbnetc_100.rmsp_in1k,34.253,65.747,51.180,48.820,5.57,224,0.875,bilinear,-57.017,-46.650,-9 +regnety_006,34.150,65.850,51.277,48.723,6.06,224,0.875,bicubic,-57.420,-46.903,-21 +tf_mobilenetv3_large_100.in1k,33.947,66.053,51.490,48.510,5.48,224,0.875,bilinear,-57.473,-46.770,-15 +semnasnet_075.rmsp_in1k,33.790,66.210,52.427,47.573,2.91,224,0.875,bicubic,-56.410,-45.543,+18 +regnetx_008,33.770,66.230,50.547,49.453,7.26,224,0.875,bicubic,-57.410,-47.833,-7 +mnasnet_100.rmsp_in1k,33.763,66.237,51.170,48.830,4.38,224,0.875,bicubic,-57.437,-47.070,-10 +lcnet_100.ra2_in1k,33.750,66.250,52.103,47.897,2.95,224,0.875,bicubic,-55.210,-45.287,+35 +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,33.650,66.350,50.683,49.317,6.36,384,1.000,bicubic,-58.080,-47.747,-33 +mobilevit_s,33.637,66.363,49.277,50.723,5.58,256,0.900,bicubic,-59.523,-49.493,-123 +xcit_nano_12_p8_224,33.583,66.417,50.220,49.780,3.05,224,1.000,bicubic,-57.537,-47.850,-8 +vit_tiny_patch16_384.augreg_in21k_ft_in1k,33.550,66.450,51.077,48.923,5.79,384,1.000,bicubic,-59.870,-47.753,-147 +semnasnet_100.rmsp_in1k,33.520,66.480,50.787,49.213,3.89,224,0.875,bicubic,-58.140,-47.483,-33 +resnet26,33.500,66.500,50.927,49.073,16.00,224,0.875,bicubic,-57.940,-47.353,-26 +mixnet_s.ft_in1k,33.480,66.520,50.997,49.003,4.13,224,0.875,bicubic,-58.300,-47.303,-40 +spnasnet_100.rmsp_in1k,33.477,66.523,51.267,48.733,4.42,224,0.875,bilinear,-57.133,-46.683,-2 +mobilevitv2_075,33.360,66.640,50.100,49.900,2.87,256,0.888,bicubic,-58.620,-48.200,-47 +crossvit_tiny_240,33.357,66.643,49.900,50.100,7.01,240,0.875,bicubic,-57.183,-48.090,-1 +vgg19_bn,33.230,66.770,50.803,49.197,143.68,224,0.875,bilinear,-57.760,-47.307,-10 +ghostnet_100,33.207,66.793,51.163,48.837,5.18,224,0.875,bilinear,-57.233,-46.667,-1 +regnetx_006,33.157,66.843,50.250,49.750,6.20,224,0.875,bicubic,-57.603,-47.850,-9 +edgenext_x_small,33.113,66.887,48.977,51.023,2.34,288,1.000,bicubic,-58.457,-49.453,-39 +resnet18,33.067,66.933,51.170,48.830,11.69,224,0.875,bilinear,-55.083,-45.950,+25 +xcit_nano_12_p16_224,32.963,67.037,49.987,50.013,3.05,224,1.000,bicubic,-55.997,-47.373,+18 +legacy_seresnext26_32x4d,32.757,67.243,49.237,50.763,16.79,224,0.875,bicubic,-59.813,-49.283,-86 +hrnet_w18_small,32.667,67.333,50.587,49.413,13.19,224,0.875,bilinear,-57.213,-47.313,0 +deit_tiny_patch16_224,32.667,67.333,50.273,49.727,5.72,224,0.900,bicubic,-56.953,-47.687,+6 +legacy_seresnet18,32.600,67.400,50.340,49.660,11.78,224,0.875,bicubic,-56.670,-47.340,+8 +mobilenetv2_100.ra_in1k,32.523,67.477,50.800,49.200,3.50,224,0.875,bicubic,-57.307,-47.030,0 +regnetx_004,32.517,67.483,49.343,50.657,5.16,224,0.875,bicubic,-56.943,-48.427,+3 gluon_resnet18_v1b,32.407,67.593,49.727,50.273,11.69,224,0.875,bicubic,-56.253,-47.373,+13 -regnety_004,32.340,67.660,49.450,50.550,4.34,224,0.875,bicubic,-58.430,-48.630,-18 -tf_mixnet_s,32.190,67.810,48.503,51.497,4.13,224,0.875,bicubic,-59.500,-49.737,-53 -vit_tiny_patch16_224,32.020,67.980,49.023,50.977,5.72,224,0.900,bicubic,-59.890,-49.317,-59 -tf_mobilenetv3_large_075,31.857,68.143,49.113,50.887,3.99,224,0.875,bilinear,-58.473,-48.767,-13 -tf_mobilenetv3_large_minimal_100,31.593,68.407,49.340,50.660,3.92,224,0.875,bilinear,-57.587,-47.980,+3 -vit_tiny_r_s16_p8_224,30.797,69.203,47.643,52.357,6.34,224,0.900,bicubic,-58.553,-50.057,-2 -tinynet_c,30.510,69.490,48.490,51.510,2.46,184,0.875,bicubic,-57.910,-48.780,+7 -lcnet_075,30.383,69.617,48.753,51.247,2.36,224,0.875,bicubic,-56.557,-47.777,+16 -vgg16_bn,30.360,69.640,47.263,52.737,138.37,224,0.875,bilinear,-60.180,-50.727,-22 -regnety_002,29.687,70.313,46.800,53.200,3.16,224,0.875,bicubic,-58.503,-50.640,+6 -resnet10t,29.610,70.390,47.837,52.163,5.44,224,0.950,bilinear,-57.120,-48.833,+14 -mobilevit_xs,29.597,70.403,46.040,53.960,2.32,256,0.900,bicubic,-61.603,-52.180,-43 -edgenext_xx_small,29.420,70.580,46.500,53.500,1.33,256,0.900,bicubic,-59.810,-50.760,-7 -mobilenetv3_small_100,29.050,70.950,47.190,52.810,2.54,224,0.875,bicubic,-57.130,-49.270,+12 -mnasnet_small,28.950,71.050,47.267,52.733,2.03,224,0.875,bicubic,-56.560,-48.713,+13 -vgg13_bn,28.893,71.107,46.737,53.263,133.05,224,0.875,bilinear,-60.317,-50.783,-9 -regnetx_002,28.847,71.153,45.420,54.580,2.68,224,0.875,bicubic,-58.533,-51.570,+3 -mobilenetv2_050,28.663,71.337,46.593,53.407,1.97,224,0.875,bicubic,-56.347,-49.027,+13 +regnety_004,32.333,67.667,49.453,50.547,4.34,224,0.875,bicubic,-58.447,-48.627,-21 +tf_mixnet_s.in1k,32.183,67.817,48.493,51.507,4.13,224,0.875,bicubic,-59.497,-49.747,-54 +vit_tiny_patch16_224.augreg_in21k_ft_in1k,32.023,67.977,49.017,50.983,5.72,224,0.900,bicubic,-59.907,-49.323,-61 +tf_mobilenetv3_large_075.in1k,31.867,68.133,49.110,50.890,3.99,224,0.875,bilinear,-58.453,-48.760,-14 +tf_mobilenetv3_large_minimal_100.in1k,31.597,68.403,49.337,50.663,3.92,224,0.875,bilinear,-57.583,-47.983,+2 +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,30.807,69.193,47.657,52.343,6.34,224,0.900,bicubic,-58.533,-50.043,-2 +tinynet_c.in1k,30.507,69.493,48.487,51.513,2.46,184,0.875,bicubic,-57.913,-48.783,+7 +lcnet_075.ra2_in1k,30.367,69.633,48.760,51.240,2.36,224,0.875,bicubic,-56.573,-47.770,+16 +vgg16_bn,30.357,69.643,47.260,52.740,138.37,224,0.875,bilinear,-60.183,-50.680,-24 +edgenext_xx_small,29.730,70.270,46.493,53.507,1.33,288,1.000,bicubic,-60.050,-51.027,-11 +regnety_002,29.687,70.313,46.787,53.213,3.16,224,0.875,bicubic,-58.513,-50.643,+5 +resnet10t,29.613,70.387,47.843,52.157,5.44,224,0.950,bilinear,-57.077,-48.827,+13 +mobilevit_xs,29.590,70.410,46.003,53.997,2.32,256,0.900,bicubic,-61.600,-52.217,-46 +mobilenetv3_small_100.lamb_in1k,29.047,70.953,47.183,52.817,2.54,224,0.875,bicubic,-57.123,-49.287,+12 +mnasnet_small.lamb_in1k,28.950,71.050,47.270,52.730,2.03,224,0.875,bicubic,-56.560,-48.710,+13 +vgg13_bn,28.883,71.117,46.737,53.263,133.05,224,0.875,bilinear,-60.317,-50.793,-10 +regnetx_002,28.860,71.140,45.420,54.580,2.68,224,0.875,bicubic,-58.520,-51.570,+3 +mobilenetv2_050.lamb_in1k,28.677,71.323,46.600,53.400,1.97,224,0.875,bicubic,-56.313,-49.020,+13 vgg19,28.580,71.420,45.170,54.830,143.67,224,0.875,bilinear,-61.100,-52.380,-19 -mobilevitv2_050,28.560,71.440,45.193,54.807,1.37,256,0.888,bicubic,-60.490,-52.397,-10 -dla60x_c,28.437,71.563,46.213,53.787,1.32,224,0.875,bilinear,-58.693,-50.927,+1 -vgg11_bn,28.423,71.577,46.447,53.553,132.87,224,0.875,bilinear,-59.967,-50.823,-7 -resnet14t,28.097,71.903,45.297,54.703,10.08,224,0.950,bilinear,-61.013,-52.073,-14 -tinynet_d,27.963,72.037,45.863,54.137,2.34,152,0.875,bicubic,-57.457,-50.157,+6 +mobilevitv2_050,28.563,71.437,45.197,54.803,1.37,256,0.888,bicubic,-60.467,-52.393,-11 +dla60x_c,28.447,71.553,46.193,53.807,1.32,224,0.875,bilinear,-58.663,-50.947,+1 +vgg11_bn,28.423,71.577,46.453,53.547,132.87,224,0.875,bilinear,-59.967,-50.817,-7 +resnet14t,28.087,71.913,45.303,54.697,10.08,224,0.950,bilinear,-61.023,-52.067,-15 +tinynet_d.in1k,27.960,72.040,45.853,54.147,2.34,152,0.875,bicubic,-57.470,-50.157,+6 vgg16,27.877,72.123,44.673,55.327,138.36,224,0.875,bilinear,-61.483,-52.847,-22 -tf_mobilenetv3_small_100,27.287,72.713,44.420,55.580,2.54,224,0.875,bilinear,-58.683,-51.980,+1 -mixer_l16_224,26.857,73.143,37.923,62.077,208.20,224,0.875,bicubic,-60.113,-56.127,-4 -mobilenetv3_small_075,26.533,73.467,43.887,56.113,2.04,224,0.875,bicubic,-57.587,-51.613,+6 -vgg11,26.533,73.467,43.460,56.540,132.86,224,0.875,bilinear,-60.807,-53.650,-8 -mobilevit_xxs,26.347,73.653,43.030,56.970,1.27,256,0.900,bicubic,-61.603,-54.150,-12 -vgg13,26.270,73.730,43.373,56.627,133.05,224,0.875,bilinear,-61.300,-53.747,-12 -lcnet_050,26.220,73.780,44.607,55.393,1.88,224,0.875,bicubic,-56.790,-50.403,+3 -dla46x_c,26.220,73.780,43.770,56.230,1.07,224,0.875,bilinear,-59.240,-52.680,-4 -tf_mobilenetv3_small_075,26.197,73.803,43.640,56.360,2.04,224,0.875,bilinear,-58.323,-52.250,-1 -dla46_c,25.497,74.503,43.790,56.210,1.30,224,0.875,bilinear,-59.163,-52.420,-3 -tf_mobilenetv3_small_minimal_100,25.097,74.903,42.923,57.077,2.04,224,0.875,bilinear,-57.593,-52.077,0 -tinynet_e,23.363,76.637,41.083,58.917,2.04,106,0.875,bicubic,-56.437,-52.897,0 -mobilenetv3_small_050,21.743,78.257,38.757,61.243,1.59,224,0.875,bicubic,-56.357,-54.253,0 +tf_mobilenetv3_small_100.in1k,27.297,72.703,44.420,55.580,2.54,224,0.875,bilinear,-58.663,-51.980,+1 +mixer_l16_224,26.853,73.147,37.923,62.077,208.20,224,0.875,bicubic,-60.117,-56.137,-4 +vgg11,26.533,73.467,43.460,56.540,132.86,224,0.875,bilinear,-60.807,-53.650,-7 +mobilenetv3_small_075.lamb_in1k,26.530,73.470,43.887,56.113,2.04,224,0.875,bicubic,-57.590,-51.613,+5 +mobilevit_xxs,26.340,73.660,43.033,56.967,1.27,256,0.900,bicubic,-61.610,-54.147,-12 +vgg13,26.267,73.733,43.370,56.630,133.05,224,0.875,bilinear,-61.303,-53.750,-12 +lcnet_050.ra2_in1k,26.217,73.783,44.577,55.423,1.88,224,0.875,bicubic,-56.783,-50.433,+2 +dla46x_c,26.217,73.783,43.780,56.220,1.07,224,0.875,bilinear,-59.263,-52.660,-3 +tf_mobilenetv3_small_075.in1k,26.200,73.800,43.637,56.363,2.04,224,0.875,bilinear,-58.330,-52.253,-1 +dla46_c,25.490,74.510,43.800,56.200,1.30,224,0.875,bilinear,-59.170,-52.400,-3 +tf_mobilenetv3_small_minimal_100.in1k,25.087,74.913,42.930,57.070,2.04,224,0.875,bilinear,-57.583,-52.070,0 +tinynet_e.in1k,23.363,76.637,41.080,58.920,2.04,106,0.875,bicubic,-56.447,-52.900,0 +mobilenetv3_small_050.lamb_in1k,21.740,78.260,38.760,61.240,1.59,224,0.875,bicubic,-56.360,-54.250,0 diff --git a/results/results-imagenet-real.csv b/results/results-imagenet-real.csv index c0bc6be6..d6ce74ee 100644 --- a/results/results-imagenet-real.csv +++ b/results/results-imagenet-real.csv @@ -1,669 +1,791 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation,top1_diff,top5_diff,rank_diff -beit_large_patch16_512,90.691,9.309,98.751,1.249,305.67,512,1.000,bicubic,+2.089,+0.095,0 -beit_large_patch16_384,90.610,9.390,98.764,1.236,305.00,384,1.000,bicubic,+2.204,+0.158,0 -volo_d5_512,90.610,9.390,98.698,1.302,296.09,512,1.150,bicubic,+3.570,+0.730,+11 -volo_d5_448,90.584,9.416,98.685,1.315,295.91,448,1.150,bicubic,+3.630,+0.745,+13 -tf_efficientnet_l2_ns,90.563,9.437,98.779,1.221,480.31,800,0.960,bicubic,+2.213,+0.129,-2 -tf_efficientnet_l2_ns_475,90.540,9.460,98.710,1.290,480.31,475,0.936,bicubic,+2.308,+0.164,-2 -volo_d4_448,90.507,9.492,98.591,1.409,193.41,448,1.150,bicubic,+3.715,+0.709,+14 -convnext_xlarge_384_in22ft1k,90.450,9.550,98.672,1.328,350.20,384,1.000,bicubic,+2.906,+0.186,-2 -swinv2_base_window12to24_192to384_22kft1k,90.401,9.599,98.740,1.260,87.92,384,1.000,bicubic,+3.293,+0.504,+3 -beit_base_patch16_384,90.371,9.629,98.725,1.275,86.74,384,1.000,bicubic,+3.573,+0.589,+10 -convnext_large_384_in22ft1k,90.258,9.742,98.663,1.337,197.77,384,1.000,bicubic,+2.862,+0.297,-2 +eva_giant_patch14_336.clip_ft_in1k,91.054,8.946,98.597,1.403,"1,013.01",336,1.000,bicubic,+1.578,-0.227,+2 +eva_giant_patch14_560.m30m_ft_in22k_in1k,90.973,9.027,98.678,1.322,"1,014.45",560,1.000,bicubic,+1.177,-0.314,-1 +eva_giant_patch14_224.clip_ft_in1k,90.954,9.046,98.723,1.277,"1,012.56",224,1.000,bicubic,+1.854,+0.007,+2 +eva_large_patch14_336.in22k_ft_in1k,90.905,9.095,98.785,1.215,304.53,336,1.000,bicubic,+2.241,+0.065,+2 +eva_giant_patch14_336.m30m_ft_in22k_in1k,90.903,9.098,98.663,1.337,"1,013.01",336,1.000,bicubic,+1.335,-0.289,-3 +eva_large_patch14_336.in22k_ft_in22k_in1k,90.871,9.130,98.721,1.279,304.53,336,1.000,bicubic,+1.667,-0.129,-2 +beit_large_patch16_512.in22k_ft_in22k_in1k,90.687,9.313,98.751,1.249,305.67,512,1.000,bicubic,+2.089,+0.095,0 +beit_large_patch16_384.in22k_ft_in22k_in1k,90.610,9.390,98.766,1.234,305.00,384,1.000,bicubic,+2.206,+0.158,+3 +volo_d5_512,90.608,9.392,98.698,1.302,296.09,512,1.150,bicubic,+3.564,+0.730,+32 +volo_d5_448,90.584,9.416,98.685,1.315,295.91,448,1.150,bicubic,+3.630,+0.747,+35 +eva_large_patch14_196.in22k_ft_in22k_in1k,90.567,9.433,98.698,1.302,304.14,196,1.000,bicubic,+1.981,+0.042,-3 +tf_efficientnet_l2.ns_jft_in1k,90.563,9.437,98.779,1.221,480.31,800,0.960,bicubic,+2.211,+0.129,+1 +maxvit_base_tf_512.in21k_ft_in1k,90.561,9.439,98.700,1.300,119.88,512,1.000,bicubic,+2.349,+0.168,+6 +vit_large_patch14_clip_336.openai_ft_in12k_in1k,90.552,9.448,98.683,1.317,304.53,336,1.000,bicubic,+2.286,+0.151,+1 +tf_efficientnet_l2.ns_jft_in1k_475,90.537,9.463,98.710,1.290,480.31,475,0.936,bicubic,+2.303,+0.164,+2 +eva_large_patch14_196.in22k_ft_in1k,90.533,9.467,98.779,1.221,304.14,196,1.000,bicubic,+2.595,+0.287,+7 +volo_d4_448,90.510,9.490,98.591,1.409,193.41,448,1.150,bicubic,+3.720,+0.709,+34 +maxvit_xlarge_tf_512.in21k_ft_in1k,90.503,9.497,98.580,1.420,475.77,512,1.000,bicubic,+1.965,-0.064,-8 +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,90.501,9.499,98.640,1.360,632.46,336,1.000,bicubic,+1.927,-0.020,-10 +convnext_xlarge.fb_in22k_ft_in1k_384,90.495,9.505,98.766,1.234,350.20,384,1.000,bicubic,+2.747,+0.212,+8 +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,90.420,9.580,98.638,1.362,304.53,336,1.000,bicubic,+2.238,+0.066,-1 +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,90.414,9.586,98.648,1.351,632.05,224,1.000,bicubic,+2.168,+0.099,-6 +swinv2_base_window12to24_192to384_22kft1k,90.403,9.597,98.740,1.260,87.92,384,1.000,bicubic,+3.295,+0.504,+16 +maxvit_xlarge_tf_384.in21k_ft_in1k,90.379,9.621,98.587,1.413,475.32,384,1.000,bicubic,+2.073,+0.043,-10 +beit_base_patch16_384.in22k_ft_in22k_in1k,90.373,9.627,98.725,1.275,86.74,384,1.000,bicubic,+3.573,+0.587,+24 +maxvit_base_tf_384.in21k_ft_in1k,90.367,9.633,98.680,1.319,119.65,384,1.000,bicubic,+2.445,+0.138,-2 +vit_large_patch14_clip_224.openai_ft_in12k_in1k,90.367,9.633,98.657,1.343,304.20,224,1.000,bicubic,+2.199,+0.113,-6 +maxvit_large_tf_512.in21k_ft_in1k,90.360,9.640,98.642,1.358,212.33,512,1.000,bicubic,+2.142,+0.044,-10 +beitv2_large_patch16_224.in1k_ft_in22k_in1k,90.354,9.646,98.582,1.418,304.43,224,0.950,bicubic,+1.968,-0.016,-17 +vit_large_patch14_clip_336.laion2b_ft_in1k,90.341,9.659,98.593,1.407,304.53,336,1.000,bicubic,+2.493,+0.223,-3 +maxvit_large_tf_384.in21k_ft_in1k,90.317,9.682,98.685,1.315,212.03,384,1.000,bicubic,+2.325,+0.119,-9 +vit_large_patch14_clip_224.openai_ft_in1k,90.305,9.695,98.636,1.364,304.20,224,1.000,bicubic,+2.453,+0.208,-6 +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,90.302,9.697,98.661,1.339,304.20,224,1.000,bicubic,+2.412,+0.251,-8 +convnext_large.fb_in22k_ft_in1k_384,90.279,9.721,98.655,1.345,197.77,384,1.000,bicubic,+2.807,+0.269,-2 +convnext_base.fb_in22k_ft_in1k_384,90.277,9.723,98.800,1.200,88.59,384,1.000,bicubic,+3.483,+0.536,+15 deit3_large_patch16_384_in21ft1k,90.249,9.751,98.625,1.375,304.76,384,1.000,bicubic,+2.533,+0.113,-7 -deit3_huge_patch14_224_in21ft1k,90.213,9.787,98.638,1.362,632.13,224,1.000,bicubic,+3.033,+0.378,-3 -vit_large_patch16_384,90.198,9.802,98.661,1.339,304.72,384,1.000,bicubic,+3.118,+0.361,-1 -cait_m48_448,90.189,9.811,98.484,1.516,356.46,448,1.000,bicubic,+3.701,+0.734,+11 -volo_d3_448,90.168,9.832,98.550,1.450,86.63,448,1.000,bicubic,+3.672,+0.840,+9 -swinv2_large_window12to24_192to384_22kft1k,90.157,9.843,98.604,1.396,196.74,384,1.000,bicubic,+2.701,+0.352,-9 -convnext_base_384_in22ft1k,90.151,9.849,98.728,1.272,88.59,384,1.000,bicubic,+3.609,+0.538,+6 -beit_large_patch16_224,90.151,9.849,98.723,1.277,304.43,224,0.900,bicubic,+2.675,+0.419,-12 -tf_efficientnet_b7_ns,90.093,9.907,98.614,1.386,66.35,600,0.949,bicubic,+3.261,+0.518,-1 -cait_m36_384,90.049,9.951,98.493,1.507,271.22,384,1.000,bicubic,+3.995,+0.763,+16 -dm_nfnet_f6,90.044,9.956,98.546,1.454,438.36,576,0.956,bicubic,+3.902,+0.816,+12 -swin_large_patch4_window12_384,90.027,9.973,98.663,1.337,196.74,384,1.000,bicubic,+2.875,+0.423,-12 -deit3_large_patch16_224_in21ft1k,90.006,9.994,98.661,1.339,304.37,224,1.000,bicubic,+3.024,+0.423,-8 -tf_efficientnetv2_l_in21ft1k,90.004,9.996,98.623,1.377,118.52,480,1.000,bicubic,+3.700,+0.643,+6 -swin_base_patch4_window12_384,89.995,10.005,98.695,1.304,87.90,384,1.000,bicubic,+3.563,+0.639,+2 -vit_base_patch16_384,89.987,10.014,98.680,1.319,86.86,384,1.000,bicubic,+3.981,+0.676,+12 -convnext_xlarge_in22ft1k,89.933,10.067,98.570,1.431,350.20,224,0.875,bicubic,+2.931,+0.358,-13 -swinv2_large_window12to16_192to256_22kft1k,89.922,10.078,98.510,1.490,196.74,256,0.900,bicubic,+2.976,+0.400,-11 -xcit_large_24_p8_384_dist,89.886,10.114,98.384,1.616,188.93,384,1.000,bicubic,+3.888,+0.700,+10 -deit3_base_patch16_384_in21ft1k,89.884,10.116,98.602,1.399,86.88,384,1.000,bicubic,+3.142,+0.490,-9 -volo_d5_224,89.882,10.118,98.493,1.507,295.46,224,0.960,bicubic,+3.812,+0.915,+4 -swinv2_base_window12to16_192to256_22kft1k,89.873,10.127,98.657,1.343,87.92,256,0.900,bicubic,+3.603,+0.761,-1g -cait_s36_384,89.844,10.156,98.424,1.576,68.37,384,1.000,bicubic,+4.384,+0.946,+22 -volo_d4_224,89.814,10.186,98.424,1.576,192.96,224,0.960,bicubic,+3.938,+0.956,+6 -convnext_large_in22ft1k,89.811,10.188,98.493,1.507,197.77,224,0.875,bicubic,+3.175,+0.465,-13 -xcit_medium_24_p8_384_dist,89.811,10.188,98.362,1.638,84.32,384,1.000,bicubic,+3.995,+0.770,+7 -convnext_small_384_in22ft1k,89.803,10.197,98.655,1.345,50.22,384,1.000,bicubic,+4.079,+0.791,+11 -swin_large_patch4_window7_224,89.794,10.206,98.642,1.358,196.53,224,0.900,bicubic,+3.474,+0.750,-9 -vit_large_r50_s32_384,89.792,10.208,98.516,1.484,329.09,384,1.000,bicubic,+3.612,+0.596,-7 -tf_efficientnet_b6_ns,89.784,10.216,98.512,1.488,43.04,528,0.942,bicubic,+3.334,+0.626,-14 -volo_d2_384,89.784,10.216,98.401,1.599,58.87,384,1.000,bicubic,+3.748,+0.827,-4 -tf_efficientnetv2_m_in21ft1k,89.779,10.221,98.501,1.499,54.14,480,1.000,bicubic,+4.193,+0.755,+9 -xcit_small_24_p8_384_dist,89.739,10.261,98.422,1.578,47.63,384,1.000,bicubic,+4.185,+0.850,+9 -volo_d1_384,89.698,10.302,98.294,1.706,26.78,384,1.000,bicubic,+4.448,+1.080,+20 -deit3_large_patch16_384,89.681,10.319,98.392,1.608,304.76,384,1.000,bicubic,+3.875,+0.796,0 -xcit_large_24_p16_384_dist,89.662,10.338,98.401,1.599,189.10,384,1.000,bicubic,+3.910,+0.863,+1 -tf_efficientnet_b5_ns,89.649,10.351,98.482,1.518,30.39,456,0.934,bicubic,+3.561,+0.730,-13 -convnext_base_in22ft1k,89.628,10.372,98.537,1.462,88.59,224,0.875,bicubic,+3.804,+0.671,-6 -tf_efficientnetv2_xl_in21ft1k,89.589,10.411,98.174,1.825,208.12,512,1.000,bicubic,+3.169,+0.306,-21 -tf_efficientnet_b8_ap,89.581,10.419,98.303,1.697,87.41,672,0.954,bicubic,+4.209,+1.009,+11 -volo_d3_224,89.557,10.443,98.375,1.625,86.33,224,0.960,bicubic,+4.145,+1.095,+8 -dm_nfnet_f4,89.557,10.443,98.303,1.697,316.07,512,0.951,bicubic,+3.843,+0.783,-2 -xcit_large_24_p8_224_dist,89.517,10.483,98.224,1.776,188.93,224,1.000,bicubic,+4.119,+0.814,+7 -xcit_small_12_p8_384_dist,89.515,10.485,98.303,1.697,26.21,384,1.000,bicubic,+4.435,+1.023,+18 -cait_s24_384,89.506,10.494,98.367,1.633,47.06,384,1.000,bicubic,+4.456,+1.019,+21 -dm_nfnet_f3,89.485,10.515,98.399,1.601,254.92,416,0.940,bicubic,+3.963,+0.937,-3 -xcit_medium_24_p16_384_dist,89.474,10.526,98.296,1.704,84.40,384,1.000,bicubic,+4.052,+0.890,0 -dm_nfnet_f5,89.463,10.537,98.324,1.676,377.21,544,0.954,bicubic,+3.647,+0.838,-14 -deit3_base_patch16_224_in21ft1k,89.451,10.549,98.557,1.443,86.59,224,1.000,bicubic,+3.735,+0.813,-10 -deit_base_distilled_patch16_384,89.429,10.571,98.439,1.561,87.63,384,1.000,bicubic,+4.007,+1.107,-2 -tf_efficientnet_b7_ap,89.429,10.571,98.345,1.655,66.35,600,0.949,bicubic,+4.309,+1.093,+8 -vit_base_patch8_224,89.427,10.573,98.486,1.514,86.58,224,0.900,bicubic,+3.637,+0.694,-16 -beit_base_patch16_224,89.410,10.590,98.525,1.475,86.53,224,0.900,bicubic,+4.182,+0.869,+2 -regnetz_e8,89.380,10.620,98.459,1.542,57.70,320,1.000,bicubic,+4.350,+1.195,+14 -tf_efficientnetv2_l,89.374,10.626,98.271,1.729,118.52,480,1.000,bicubic,+3.886,+0.899,-11 -deit3_small_patch16_384_in21ft1k,89.367,10.633,98.382,1.618,22.21,384,1.000,bicubic,+4.543,+0.898,+20 -tf_efficientnet_b8,89.352,10.648,98.303,1.697,87.41,672,0.954,bicubic,+3.984,+0.911,-5 -tf_efficientnet_b6_ap,89.344,10.656,98.281,1.719,43.04,528,0.942,bicubic,+4.558,+1.143,+20 -volo_d2_224,89.327,10.673,98.209,1.791,58.68,224,0.960,bicubic,+4.133,+1.021,-2 -vit_large_patch16_224,89.314,10.686,98.394,1.606,304.33,224,0.900,bicubic,+3.470,+0.572,-29 -tf_efficientnet_b4_ns,89.303,10.697,98.347,1.653,19.34,380,0.922,bicubic,+4.143,+0.877,-3 -xcit_small_24_p16_384_dist,89.295,10.705,98.328,1.672,47.67,384,1.000,bicubic,+4.207,+1.020,-1 -xcit_medium_24_p8_224_dist,89.290,10.710,98.192,1.808,84.32,224,1.000,bicubic,+4.220,+0.912,+1 -tf_efficientnetv2_m,89.286,10.714,98.236,1.764,54.14,480,1.000,bicubic,+4.250,+0.958,+3 -deit3_huge_patch14_224,89.212,10.789,98.166,1.834,632.13,224,0.900,bicubic,+4.006,+0.808,-9 -xcit_small_24_p8_224_dist,89.201,10.799,98.245,1.755,47.63,224,1.000,bicubic,+4.325,+1.057,+9 -xcit_small_12_p16_384_dist,89.194,10.806,98.219,1.781,26.25,384,1.000,bicubic,+4.486,+1.103,+14 -swin_base_patch4_window7_224,89.147,10.852,98.424,1.576,87.77,224,0.900,bicubic,+3.897,+0.862,-15 -eca_nfnet_l2,89.141,10.859,98.315,1.685,56.72,384,1.000,bicubic,+4.445,+1.051,+13 -cait_xs24_384,89.139,10.861,98.290,1.710,26.67,384,1.000,bicubic,+5.075,+1.400,+46 -convnext_small_in22ft1k,89.122,10.878,98.322,1.678,50.22,224,0.875,bicubic,+4.554,+0.926,+15 -ig_resnext101_32x48d,89.115,10.885,98.132,1.868,828.41,224,0.875,bilinear,+3.679,+0.556,-26 -ig_resnext101_32x32d,89.109,10.891,98.183,1.817,468.53,224,0.875,bilinear,+4.009,+0.749,-13 -tf_efficientnet_b7,89.083,10.917,98.185,1.815,66.35,600,0.949,bicubic,+4.149,+0.979,-2 -ecaresnet269d,89.069,10.931,98.232,1.768,102.09,352,1.000,bicubic,+4.095,+1.006,-4 -xcit_large_24_p16_224_dist,89.041,10.959,98.061,1.939,189.10,224,1.000,bicubic,+4.121,+0.929,-3 -resmlp_big_24_224_in22ft1k,89.011,10.989,98.215,1.785,129.14,224,0.875,bicubic,+4.613,+1.097,+18 -dm_nfnet_f2,89.011,10.989,98.189,1.810,193.78,352,0.920,bicubic,+3.945,+0.947,-13 -xcit_small_12_p8_224_dist,89.002,10.998,98.078,1.922,26.21,224,1.000,bicubic,+4.772,+1.204,+28 -efficientnetv2_rw_m,88.990,11.011,98.213,1.787,53.24,416,1.000,bicubic,+4.178,+1.067,-3 -regnetz_040h,88.953,11.047,98.202,1.798,28.94,320,1.000,bicubic,+4.457,+1.196,+9 -tf_efficientnet_b5_ap,88.942,11.057,98.164,1.836,30.39,456,0.934,bicubic,+4.688,+1.186,+23 -deit3_base_patch16_384,88.928,11.072,98.046,1.954,86.88,384,1.000,bicubic,+3.852,+0.792,-20 -dm_nfnet_f1,88.925,11.075,98.115,1.885,132.63,320,0.910,bicubic,+4.301,+1.017,-1 -volo_d1_224,88.906,11.094,98.031,1.968,26.63,224,0.960,bicubic,+4.742,+1.257,+27 -tf_efficientnetv2_s_in21ft1k,88.904,11.096,98.279,1.721,21.46,384,1.000,bicubic,+4.608,+1.025,+13 -vit_base_patch16_224,88.864,11.136,98.230,1.770,86.57,224,0.900,bicubic,+4.334,+0.934,0 -regnetz_d8,88.855,11.145,98.189,1.810,23.37,320,1.000,bicubic,+4.803,+1.193,+29 -resnetrs420,88.842,11.158,98.034,1.966,191.89,416,1.000,bicubic,+3.834,+0.910,-20 -regnetz_d8_evos,88.838,11.162,98.132,1.868,23.46,320,0.950,bicubic,+4.788,+1.136,+28 -resnetrs270,88.834,11.166,98.136,1.864,129.86,352,1.000,bicubic,+4.398,+1.162,+2 -ig_resnext101_32x16d,88.825,11.175,98.049,1.951,194.03,224,0.875,bilinear,+4.655,+0.851,+19 -vit_small_r26_s32_384,88.812,11.188,98.343,1.657,36.47,384,1.000,bicubic,+4.764,+1.015,+26 -vit_base_r50_s16_384,88.804,11.196,98.232,1.768,98.95,384,1.000,bicubic,+3.828,+0.942,-24 -xcit_medium_24_p16_224_dist,88.804,11.196,98.038,1.962,84.40,224,1.000,bicubic,+4.526,+1.098,+7 -seresnet152d,88.797,11.203,98.174,1.825,66.84,320,1.000,bicubic,+4.433,+1.130,+1 -xcit_tiny_24_p8_384_dist,88.778,11.222,98.164,1.836,12.11,384,1.000,bicubic,+5.032,+1.452,+43 -swsl_resnext101_32x8d,88.778,11.222,98.149,1.851,88.79,224,0.875,bilinear,+4.488,+0.967,+3 -convnext_tiny_384_in22ft1k,88.772,11.228,98.298,1.702,28.59,384,1.000,bicubic,+4.696,+1.140,+16 -resnetrs200,88.759,11.241,98.113,1.887,93.21,320,1.000,bicubic,+4.319,+1.033,-8 -tf_efficientnet_b6,88.759,11.241,98.068,1.932,43.04,528,0.942,bicubic,+4.651,+1.180,+13 -deit3_large_patch16_224,88.759,11.241,97.912,2.088,304.37,224,0.900,bicubic,+3.997,+0.874,-23 -resnetrs350,88.755,11.245,98.031,1.968,163.96,384,1.000,bicubic,+4.043,+1.041,-23 -vit_base_patch16_224_miil,88.742,11.258,98.027,1.973,86.54,224,0.875,bilinear,+4.470,+1.225,-1 -regnetz_040,88.727,11.273,98.091,1.909,27.12,320,1.000,bicubic,+4.491,+1.159,+1 -resnetv2_152x2_bitm,88.725,11.275,98.307,1.693,236.34,448,1.000,bilinear,+4.215,+0.873,-17 -regnety_160,88.699,11.301,98.068,1.932,83.59,288,1.000,bicubic,+5.007,+1.292,+38 -pit_b_distilled_224,88.674,11.326,98.091,1.909,74.79,224,0.900,bicubic,+4.532,+1.235,+5 -regnetz_d32,88.652,11.348,98.081,1.919,27.58,320,0.950,bicubic,+4.628,+1.213,+12 -vit_small_patch16_384,88.648,11.352,98.230,1.770,22.20,384,1.000,bicubic,+4.848,+1.130,+27 -regnety_080,88.635,11.365,97.972,2.028,39.18,288,1.000,bicubic,+4.707,+1.084,+15 -eca_nfnet_l1,88.624,11.376,98.134,1.866,41.41,320,1.000,bicubic,+4.612,+1.102,+11 -swinv2_base_window16_256,88.584,11.416,97.895,2.105,87.92,256,0.900,bicubic,+3.992,+0.821,-29 -convnext_large,88.577,11.423,97.854,2.146,197.77,224,0.875,bicubic,+4.281,+0.960,-14 -resnetv2_152x4_bitm,88.552,11.448,98.189,1.810,936.53,480,1.000,bilinear,+3.634,+0.747,-41 -resnet200d,88.545,11.455,97.959,2.041,64.69,320,1.000,bicubic,+4.585,+1.135,+8 -seresnextaa101d_32x8d,88.543,11.457,98.002,1.998,93.59,288,1.000,bicubic,+3.971,+0.932,-32 -xcit_small_24_p16_224_dist,88.535,11.465,98.002,1.998,47.67,224,1.000,bicubic,+4.665,+1.270,+10 -resnest269e,88.522,11.478,98.027,1.973,110.93,416,0.928,bicubic,+4.004,+1.041,-31 -swinv2_base_window8_256,88.518,11.482,97.893,2.107,87.92,256,0.900,bicubic,+4.256,+0.971,-16 -seresnext101_32x8d,88.505,11.495,97.888,2.112,93.57,288,1.000,bicubic,+4.301,+1.014,-12 -efficientnetv2_rw_s,88.475,11.525,97.972,2.028,23.94,384,1.000,bicubic,+4.665,+1.248,+14 -crossvit_18_dagger_408,88.475,11.525,97.893,2.107,44.61,408,1.000,bicubic,+4.281,+1.075,-13 -resnetv2_101x3_bitm,88.469,11.531,98.157,1.843,387.93,448,1.000,bilinear,+4.025,+0.775,-33 -cait_s24_224,88.451,11.549,97.957,2.043,46.92,224,1.000,bicubic,+4.993,+1.395,+27 -resnetv2_50x3_bitm,88.445,11.555,98.198,1.802,217.32,448,1.000,bilinear,+4.433,+1.072,-4 -resmlp_big_24_distilled_224,88.441,11.559,97.940,2.060,129.14,224,0.875,bicubic,+4.853,+1.292,+21 -regnetv_064,88.432,11.568,98.064,1.937,30.58,288,1.000,bicubic,+4.720,+1.318,+16 -resnest200e,88.430,11.570,98.044,1.956,70.20,320,0.909,bicubic,+4.602,+1.152,+5 -tf_efficientnet_b3_ns,88.428,11.572,98.027,1.973,12.23,300,0.904,bicubic,+4.380,+1.115,-10 -vit_large_r50_s32_224,88.424,11.576,98.085,1.915,328.99,224,0.900,bicubic,+3.994,+0.919,-37 -seresnext101d_32x8d,88.424,11.576,97.955,2.045,93.59,288,1.000,bicubic,+4.062,+1.037,-34 -tf_efficientnetv2_s,88.396,11.604,97.927,2.073,21.46,384,1.000,bicubic,+4.512,+1.229,-6 -regnetz_c16_evos,88.379,11.621,98.042,1.958,13.49,320,0.950,bicubic,+5.747,+1.566,+66 -efficientnet_b4,88.368,11.632,97.961,2.039,19.34,384,1.000,bicubic,+4.944,+1.363,+19 -swinv2_small_window16_256,88.364,11.636,97.852,2.148,49.73,256,0.900,bicubic,+4.154,+0.982,-28 -resnet152d,88.353,11.647,97.938,2.062,60.21,320,1.000,bicubic,+4.675,+1.198,+10 -tf_efficientnet_b4_ap,88.351,11.649,97.893,2.107,19.34,380,0.922,bicubic,+5.103,+1.501,+25 -convnext_base,88.347,11.653,97.784,2.216,88.59,224,0.875,bicubic,+4.507,+1.034,-8 -deit3_small_patch16_224_in21ft1k,88.334,11.666,98.127,1.873,22.06,224,1.000,bicubic,+5.258,+1.351,+36 -tf_efficientnet_b5,88.323,11.677,97.912,2.088,30.39,456,0.934,bicubic,+4.509,+1.164,-6 -regnety_064,88.319,11.681,97.861,2.139,30.58,288,1.000,bicubic,+4.599,+1.135,0 -crossvit_15_dagger_408,88.308,11.692,97.869,2.131,28.50,408,1.000,bicubic,+4.470,+1.089,-10 -deit3_small_patch16_384,88.298,11.702,97.888,2.112,22.21,384,1.000,bicubic,+4.870,+1.212,+9 -cs3se_edgenet_x,88.291,11.709,97.931,2.069,50.72,320,1.000,bicubic,+4.743,+1.265,+5 -resnetrs152,88.255,11.745,97.737,2.263,86.62,320,1.000,bicubic,+4.541,+1.123,-3 -deit3_base_patch16_224,88.251,11.749,97.807,2.193,86.59,224,0.900,bicubic,+4.459,+1.223,-9 -xcit_small_12_p16_224_dist,88.246,11.754,97.846,2.154,26.25,224,1.000,bicubic,+4.900,+1.428,+13 -regnetv_040,88.219,11.781,97.972,2.028,20.64,288,1.000,bicubic,+5.021,+1.308,+17 -deit_base_distilled_patch16_224,88.214,11.786,97.920,2.080,87.34,224,0.900,bicubic,+4.826,+1.432,+7 -xception65p,88.185,11.815,97.790,2.210,39.82,299,0.940,bicubic,+5.055,+1.310,+22 -swinv2_small_window8_256,88.185,11.815,97.775,2.225,49.73,256,0.900,bicubic,+4.331,+1.133,-23 -xcit_tiny_24_p16_384_dist,88.161,11.839,97.946,2.054,12.12,384,1.000,bicubic,+5.589,+1.658,+53 -xcit_large_24_p8_224,88.157,11.843,97.389,2.611,188.93,224,1.000,bicubic,+3.765,+0.731,-58 -ig_resnext101_32x8d,88.155,11.845,97.856,2.144,88.79,224,0.875,bilinear,+5.457,+1.224,+42 -resnetv2_152x2_bit_teacher_384,88.150,11.850,98.053,1.947,236.34,384,1.000,bicubic,+4.306,+0.937,-26 -cait_xxs36_384,88.138,11.862,97.908,2.092,17.37,384,1.000,bicubic,+5.946,+1.764,+82 -dm_nfnet_f0,88.125,11.875,97.854,2.146,71.49,256,0.900,bicubic,+4.741,+1.280,0 -xcit_tiny_12_p8_384_dist,88.101,11.899,97.923,2.077,6.71,384,1.000,bicubic,+5.715,+1.701,+59 -swsl_resnext101_32x4d,88.099,11.901,97.970,2.030,44.18,224,0.875,bilinear,+4.859,+1.210,+4 -xception65,88.071,11.929,97.750,2.250,39.92,299,0.940,bicubic,+4.897,+1.158,+6 -convnext_small,88.050,11.950,97.788,2.212,50.22,224,0.875,bicubic,+4.900,+1.358,+6 -swin_s3_base_224,88.050,11.950,97.660,2.340,71.13,224,0.900,bicubic,+4.118,+1.000,-38 -xcit_tiny_24_p8_224_dist,88.035,11.965,97.812,2.188,12.11,224,1.000,bicubic,+5.475,+1.644,+44 -convnext_tiny_in22ft1k,87.997,12.003,97.920,2.080,28.59,224,0.875,bicubic,+5.085,+1.296,+18 -cs3sedarknet_x,87.995,12.005,97.790,2.210,35.40,288,1.000,bicubic,+5.341,+1.444,+32 -eca_nfnet_l0,87.978,12.023,97.871,2.129,24.14,288,1.000,bicubic,+5.400,+1.381,+38 -nfnet_l0,87.971,12.029,97.867,2.133,35.07,288,1.000,bicubic,+5.219,+1.349,+25 -xcit_small_24_p8_224,87.969,12.031,97.581,2.419,47.63,224,1.000,bicubic,+4.129,+0.945,-37 -tf_efficientnet_b4,87.967,12.033,97.739,2.261,19.34,380,0.922,bicubic,+4.943,+1.439,+10 -regnety_032,87.941,12.059,97.888,2.112,19.44,288,1.000,bicubic,+5.217,+1.466,+23 -resnet101d,87.937,12.063,97.908,2.092,44.57,320,1.000,bicubic,+4.915,+1.462,+9 -mobilevitv2_200_384_in22ft1k,87.935,12.065,97.822,2.178,18.45,384,1.000,bicubic,+4.535,+1.240,-17 -swinv2_cr_small_ns_224,87.922,12.078,97.666,2.334,49.70,224,0.900,bicubic,+4.436,+1.182,-23 -sequencer2d_l,87.915,12.085,97.698,2.302,54.30,224,0.875,bicubic,+4.509,+1.198,-20 -regnety_040,87.913,12.087,97.884,2.116,20.65,288,1.000,bicubic,+4.877,+1.374,+3 -vit_base_patch32_384,87.911,12.089,98.012,1.988,88.30,384,1.000,bicubic,+4.559,+1.176,-18 -twins_svt_large,87.901,12.099,97.581,2.419,99.27,224,0.900,bicubic,+4.221,+0.987,-32 -twins_pcpvt_large,87.877,12.123,97.859,2.142,60.99,224,0.900,bicubic,+4.741,+1.255,-7 -swin_s3_small_224,87.860,12.140,97.434,2.566,49.74,224,0.900,bicubic,+4.086,+0.982,-41 -regnetz_c16,87.858,12.142,97.818,2.182,13.46,320,0.940,bicubic,+5.338,+1.458,+28 -deit_base_patch16_384,87.841,12.159,97.510,2.490,86.86,384,1.000,bicubic,+4.735,+1.140,-7 -mobilevitv2_175_384_in22ft1k,87.837,12.164,97.726,2.274,14.25,384,1.000,bicubic,+4.903,+1.296,-1 -xcit_small_12_p8_224,87.828,12.172,97.566,2.434,26.21,224,1.000,bicubic,+4.488,+1.086,-22 -tresnet_xl_448,87.796,12.204,97.459,2.541,78.44,448,0.875,bilinear,+4.748,+1.289,-7 -resnetv2_50x1_bit_distilled,87.792,12.208,97.899,2.101,25.55,224,0.875,bicubic,+4.970,+1.377,+1 -tresnet_m,87.740,12.259,97.523,2.477,31.39,224,0.875,bilinear,+4.666,+1.403,-10 -twins_pcpvt_base,87.732,12.268,97.728,2.272,43.83,224,0.900,bicubic,+5.024,+1.378,+8 -gc_efficientnetv2_rw_t,87.717,12.283,97.807,2.193,13.68,288,1.000,bicubic,+5.251,+1.509,+24 -resnetv2_101x1_bitm,87.683,12.317,97.938,2.062,44.54,448,1.000,bilinear,+5.351,+1.422,+35 -swin_small_patch4_window7_224,87.670,12.330,97.568,2.432,49.61,224,0.900,bicubic,+4.452,+1.242,-26 -mobilevitv2_150_384_in22ft1k,87.653,12.347,97.649,2.351,10.59,384,1.000,bicubic,+5.063,+1.333,+10 -twins_svt_base,87.644,12.356,97.525,2.474,56.07,224,0.900,bicubic,+4.506,+1.105,-23 -efficientnetv2_rw_t,87.642,12.358,97.688,2.312,13.65,288,1.000,bicubic,+5.298,+1.492,+28 -pnasnet5large,87.640,12.360,97.485,2.515,86.06,331,0.911,bicubic,+4.858,+1.443,-4 -cs3edgenet_x,87.632,12.368,97.662,2.338,47.82,288,1.000,bicubic,+4.910,+1.286,-1 -swinv2_tiny_window16_256,87.617,12.383,97.562,2.438,28.35,256,0.900,bicubic,+4.807,+1.332,-8 -swsl_resnext101_32x16d,87.608,12.392,97.820,2.180,194.03,224,0.875,bilinear,+4.258,+0.976,-38 -jx_nest_base,87.608,12.392,97.515,2.485,67.72,224,0.875,bicubic,+4.054,+1.151,-50 -xcit_medium_24_p8_224,87.606,12.394,97.197,2.803,84.32,224,1.000,bicubic,+3.868,+0.803,-59 -swsl_resnext50_32x4d,87.602,12.398,97.654,2.346,25.03,224,0.875,bilinear,+5.426,+1.422,+39 -sequencer2d_m,87.565,12.435,97.581,2.419,38.31,224,0.875,bicubic,+4.757,+1.313,-12 -tf_efficientnet_b2_ns,87.559,12.441,97.628,2.372,9.11,260,0.890,bicubic,+5.175,+1.382,+16 -levit_384,87.555,12.445,97.545,2.455,39.13,224,0.900,bicubic,+4.967,+1.527,-1 -ecaresnet50t,87.542,12.458,97.645,2.355,25.57,320,0.950,bicubic,+5.194,+1.507,+16 -vit_base_patch16_rpn_224,87.506,12.494,97.489,2.511,86.54,224,0.900,bicubic,+5.306,+1.493,+32 -edgenext_small,87.504,12.496,97.587,2.413,5.59,320,1.000,bicubic,+5.930,+1.873,+66 -resnetv2_152x2_bit_teacher,87.493,12.507,97.812,2.188,236.34,224,0.875,bicubic,+4.625,+1.244,-22 -jx_nest_small,87.491,12.509,97.521,2.479,38.35,224,0.875,bicubic,+4.371,+1.191,-35 -vit_relpos_base_patch16_clsgap_224,87.469,12.531,97.525,2.474,86.43,224,0.900,bicubic,+4.709,+1.351,-18 -vit_relpos_base_patch16_224,87.463,12.537,97.560,2.440,86.43,224,0.900,bicubic,+4.977,+1.418,+1 -resnet152,87.454,12.546,97.400,2.600,60.19,224,0.950,bicubic,+4.636,+1.268,-24 -fbnetv3_g,87.446,12.554,97.545,2.455,16.62,288,0.950,bilinear,+5.412,+1.479,+36 -resnext101_64x4d,87.444,12.556,97.442,2.558,83.46,288,1.000,bicubic,+4.300,+1.068,-45 -efficientnet_b3,87.435,12.565,97.679,2.321,12.23,320,1.000,bicubic,+5.195,+1.561,+18 -resnet61q,87.431,12.569,97.598,2.402,36.85,288,1.000,bicubic,+4.913,+1.468,-5 -cait_xxs24_384,87.414,12.586,97.619,2.381,12.03,384,1.000,bicubic,+6.452,+1.975,+101 -cs3sedarknet_l,87.407,12.593,97.572,2.428,21.91,288,0.950,bicubic,+5.631,+1.602,+47 -cs3darknet_x,87.399,12.601,97.607,2.393,35.05,288,1.000,bicubic,+5.175,+1.377,+16 -resnet51q,87.392,12.608,97.581,2.419,35.70,288,1.000,bilinear,+5.034,+1.403,0 -xcit_tiny_24_p8_224,87.380,12.620,97.626,2.374,12.11,224,1.000,bicubic,+5.484,+1.652,+37 -tresnet_l_448,87.380,12.620,97.487,2.513,55.99,448,0.875,bilinear,+5.110,+1.507,+10 -coat_lite_small,87.377,12.623,97.372,2.628,19.84,224,0.900,bicubic,+5.073,+1.522,+4 -sequencer2d_s,87.375,12.625,97.391,2.609,27.65,224,0.875,bicubic,+5.031,+1.357,-1 -swinv2_cr_small_224,87.371,12.629,97.344,2.656,49.70,224,0.900,bicubic,+4.233,+1.246,-54 -vit_relpos_medium_patch16_cls_224,87.369,12.631,97.453,2.547,38.76,224,0.900,bicubic,+4.807,+1.387,-19 -nasnetalarge,87.348,12.652,97.417,2.583,88.75,331,0.911,bicubic,+4.730,+1.373,-26 -crossvit_18_dagger_240,87.346,12.655,97.455,2.545,44.27,240,0.875,bicubic,+4.826,+1.387,-18 -resnetv2_101,87.322,12.678,97.325,2.675,44.54,224,0.950,bicubic,+5.276,+1.463,+19 -crossvit_18_240,87.316,12.684,97.487,2.513,43.27,240,0.875,bicubic,+4.918,+1.433,-13 -convnext_tiny,87.313,12.687,97.449,2.551,28.59,224,0.875,bicubic,+5.251,+1.595,+16 -resnest101e,87.286,12.714,97.560,2.440,48.28,256,0.875,bilinear,+4.398,+1.240,-47 -ecaresnet101d,87.284,12.716,97.562,2.438,44.57,224,0.875,bicubic,+5.114,+1.514,+8 -pit_s_distilled_224,87.275,12.725,97.500,2.500,24.04,224,0.900,bicubic,+5.281,+1.704,+16 -resnetv2_50d_gn,87.269,12.731,97.513,2.487,25.57,288,0.950,bicubic,+5.445,+1.589,+26 -vit_relpos_medium_patch16_rpn_224,87.256,12.744,97.442,2.558,38.73,224,0.900,bicubic,+4.962,+1.470,-7 -resnetrs101,87.243,12.757,97.457,2.543,63.62,288,0.940,bicubic,+4.959,+1.449,-6 -poolformer_m48,87.239,12.761,97.308,2.692,73.47,224,0.950,bicubic,+4.779,+1.350,-23 -mixer_b16_224_miil,87.230,12.770,97.410,2.590,59.88,224,0.875,bilinear,+4.926,+1.690,-11 -tresnet_xl,87.226,12.774,97.400,2.600,78.44,224,0.875,bilinear,+5.164,+1.464,+6 -xcit_tiny_12_p8_224_dist,87.219,12.780,97.449,2.551,6.71,224,1.000,bicubic,+6.011,+1.843,+58 -convit_base,87.207,12.793,97.286,2.714,86.54,224,0.875,bicubic,+4.915,+1.348,-12 -xcit_tiny_12_p16_384_dist,87.202,12.798,97.468,2.532,6.72,384,1.000,bicubic,+6.260,+2.060,+76 -resnetv2_50d_evos,87.194,12.806,97.359,2.641,25.59,288,0.950,bicubic,+5.216,+1.447,+8 -tf_efficientnet_b3_ap,87.188,12.812,97.380,2.620,12.23,300,0.904,bicubic,+5.364,+1.756,+17 -visformer_small,87.185,12.815,97.325,2.675,40.22,224,0.900,bicubic,+5.077,+1.449,-3 -crossvit_15_dagger_240,87.170,12.830,97.438,2.562,28.21,240,0.875,bicubic,+4.844,+1.482,-21 -vit_srelpos_medium_patch16_224,87.168,12.832,97.312,2.688,38.74,224,0.900,bicubic,+4.932,+1.378,-14 -vit_relpos_medium_patch16_224,87.138,12.862,97.506,2.494,38.75,224,0.900,bicubic,+4.676,+1.420,-35 -xcit_small_24_p16_224,87.134,12.866,97.263,2.737,47.67,224,1.000,bicubic,+4.550,+1.263,-46 -swin_s3_tiny_224,87.130,12.870,97.303,2.697,28.33,224,0.900,bicubic,+5.006,+1.353,-9 -resnet101,87.081,12.919,97.265,2.735,44.55,224,0.950,bicubic,+5.151,+1.499,+5 -swinv2_tiny_window8_256,87.079,12.921,97.517,2.483,28.35,256,0.900,bicubic,+5.269,+1.523,+10 -mobilevitv2_200_in22ft1k,87.059,12.941,97.425,2.575,18.45,256,0.888,bicubic,+4.725,+1.487,-30 -xception41p,87.057,12.943,97.201,2.799,26.91,299,0.940,bicubic,+5.089,+1.407,-1 -crossvit_15_240,87.055,12.945,97.423,2.577,27.53,240,0.875,bicubic,+5.511,+1.733,+19 -convit_small,87.051,12.949,97.350,2.650,27.78,224,0.875,bicubic,+5.623,+1.608,+28 -tf_efficientnetv2_b3,87.029,12.970,97.303,2.697,14.36,300,0.904,bicubic,+5.063,+1.521,-3 -xcit_small_12_p16_224,87.017,12.983,97.242,2.759,26.25,224,1.000,bicubic,+5.045,+1.430,-6 -regnetz_b16,87.012,12.988,97.425,2.575,9.72,288,0.940,bicubic,+6.300,+1.951,+73 -jx_nest_tiny,87.008,12.992,97.378,2.622,17.06,224,0.875,bicubic,+5.590,+1.760,+25 -deit3_small_patch16_224,87.004,12.996,97.167,2.833,22.06,224,0.900,bicubic,+5.622,+1.717,+28 -deit_small_distilled_patch16_224,87.002,12.998,97.316,2.684,22.44,224,0.900,bicubic,+5.794,+1.942,+37 -swinv2_cr_tiny_ns_224,86.998,13.002,97.282,2.718,28.33,224,0.900,bicubic,+5.212,+1.460,0 -resmlp_36_distilled_224,86.989,13.011,97.276,2.724,44.69,224,0.875,bicubic,+5.833,+1.790,+37 -xcit_large_24_p16_224,86.955,13.045,96.919,3.081,189.10,224,1.000,bicubic,+4.063,+1.041,-82 -mobilevitv2_175_in22ft1k,86.953,13.047,97.333,2.667,14.25,256,0.888,bicubic,+5.013,+1.543,-11 -poolformer_m36,86.946,13.054,97.148,2.852,56.17,224,0.950,bicubic,+4.838,+1.458,-24 -xcit_medium_24_p16_224,86.938,13.062,97.098,2.902,84.40,224,1.000,bicubic,+4.300,+1.120,-70 -convnext_tiny_hnf,86.918,13.082,97.280,2.720,28.59,224,0.950,bicubic,+4.698,+1.414,-34 -tnt_s_patch16_224,86.906,13.094,97.365,2.635,23.76,224,0.900,bicubic,+5.388,+1.619,+6 -vit_relpos_small_patch16_224,86.891,13.109,97.491,2.509,21.98,224,0.900,bicubic,+5.437,+1.663,+12 -vit_small_patch16_224,86.865,13.135,97.613,2.387,22.05,224,0.900,bicubic,+5.469,+1.475,+15 -ssl_resnext101_32x16d,86.865,13.135,97.519,2.481,194.03,224,0.875,bilinear,+5.009,+1.423,-14 -vit_small_r26_s32_224,86.856,13.143,97.528,2.472,36.43,224,0.900,bicubic,+4.994,+1.506,-16 -convmixer_1536_20,86.854,13.146,97.346,2.654,51.63,224,0.960,bicubic,+5.484,+1.734,+16 -rexnet_200,86.842,13.158,97.276,2.724,16.37,224,0.875,bicubic,+5.214,+1.608,-7 -tf_efficientnet_b3,86.837,13.163,97.297,2.703,12.23,300,0.904,bicubic,+5.199,+1.579,-9 -swsl_resnet50,86.835,13.165,97.493,2.507,25.56,224,0.875,bilinear,+5.655,+1.513,+22 -deit_base_patch16_224,86.827,13.173,97.052,2.949,86.57,224,0.900,bicubic,+4.833,+1.320,-29 -tresnet_m_448,86.814,13.186,97.216,2.784,31.39,448,0.875,bilinear,+5.108,+1.644,-15 -ssl_resnext101_32x8d,86.801,13.199,97.472,2.528,88.79,224,0.875,bilinear,+5.193,+1.430,-10 -tf_efficientnet_lite4,86.801,13.199,97.263,2.737,13.01,380,0.920,bilinear,+5.267,+1.597,-7 -coat_mini,86.790,13.210,97.158,2.842,10.34,224,0.900,bicubic,+5.524,+1.766,+12 -resnetaa50,86.771,13.229,97.389,2.611,25.56,288,1.000,bicubic,+5.153,+1.579,-14 -tresnet_l,86.763,13.237,97.271,2.729,55.99,224,0.875,bilinear,+5.273,+1.645,-6 -twins_svt_small,86.756,13.244,97.177,2.823,24.06,224,0.900,bicubic,+5.074,+1.511,-20 -cs3darknet_l,86.748,13.252,97.463,2.537,21.16,288,0.950,bicubic,+5.862,+1.795,+35 -mobilevitv2_150_in22ft1k,86.743,13.257,97.218,2.782,10.59,256,0.888,bicubic,+5.273,+1.550,-7 -levit_256,86.739,13.261,97.259,2.741,18.89,224,0.900,bicubic,+5.223,+1.769,-12 -cs3darknet_focus_l,86.735,13.265,97.380,2.620,21.15,288,0.950,bicubic,+5.861,+1.688,+34 -crossvit_base_240,86.735,13.265,97.122,2.878,105.03,240,0.875,bicubic,+4.519,+1.290,-55 -vit_srelpos_small_patch16_224,86.703,13.297,97.250,2.750,21.97,224,0.900,bicubic,+5.605,+1.678,+14 -halo2botnet50ts_256,86.692,13.308,97.096,2.904,22.64,256,0.950,bicubic,+4.624,+1.454,-49 -seresnext50_32x4d,86.690,13.310,97.222,2.778,27.56,224,0.875,bicubic,+5.428,+1.594,+2 -crossvit_small_240,86.688,13.312,97.273,2.727,26.86,240,0.875,bicubic,+5.672,+1.817,+17 -pit_b_224,86.688,13.312,96.898,3.102,73.76,224,0.900,bicubic,+4.244,+1.186,-81 -tf_efficientnet_b1_ns,86.666,13.334,97.378,2.622,7.79,240,0.882,bicubic,+5.280,+1.642,-9 -swin_tiny_patch4_window7_224,86.658,13.342,97.197,2.803,28.29,224,0.900,bicubic,+5.282,+1.655,-8 -wide_resnet50_2,86.641,13.359,97.212,2.788,68.88,224,0.875,bicubic,+5.185,+1.682,-17 -gernet_l,86.641,13.359,97.190,2.810,31.08,256,0.875,bilinear,+5.291,+1.654,-8 -poolformer_s36,86.639,13.361,97.158,2.842,30.86,224,0.900,bicubic,+5.221,+1.710,-15 -efficientnet_el,86.635,13.366,97.180,2.820,10.59,300,0.904,bicubic,+5.329,+1.646,-8 -twins_pcpvt_small,86.626,13.374,97.340,2.660,24.11,224,0.900,bicubic,+5.536,+1.698,+6 -resmlp_24_distilled_224,86.620,13.380,97.139,2.861,30.02,224,0.875,bicubic,+5.856,+1.917,+25 -nf_resnet50,86.605,13.395,97.293,2.707,25.56,288,0.940,bicubic,+5.951,+1.959,+28 -resnest50d_4s2x40d,86.583,13.417,97.269,2.731,30.42,224,0.875,bicubic,+5.475,+1.707,-1 -efficientnet_b3_pruned,86.579,13.421,97.188,2.812,9.86,300,0.904,bicubic,+5.721,+1.944,+19 -sebotnet33ts_256,86.573,13.427,96.791,3.209,13.70,256,0.940,bicubic,+5.419,+1.625,-6 -sehalonet33ts,86.570,13.430,97.009,2.991,13.69,256,0.940,bicubic,+5.598,+1.737,+7 -repvgg_b3,86.564,13.436,97.141,2.859,123.09,224,0.875,bilinear,+6.068,+1.877,+32 -xcit_tiny_24_p16_224_dist,86.534,13.466,97.216,2.784,12.12,224,1.000,bicubic,+6.086,+2.004,+38 -convnext_nano,86.532,13.468,97.182,2.818,15.59,288,1.000,bicubic,+5.056,+1.522,-32 -halonet50ts,86.500,13.500,97.152,2.848,22.73,256,0.940,bicubic,+4.848,+1.540,-46 -ssl_resnext101_32x4d,86.477,13.524,97.470,2.530,44.18,224,0.875,bilinear,+5.553,+1.744,+6 -gcresnet50t,86.474,13.526,97.141,2.859,25.90,256,0.900,bicubic,+5.540,+1.687,+4 -ecaresnet50d,86.472,13.528,97.184,2.816,25.58,224,0.875,bicubic,+5.874,+1.866,+20 -gluon_resnet152_v1s,86.462,13.538,97.109,2.891,60.32,224,0.875,bicubic,+5.448,+1.695,-4 -haloregnetz_b,86.462,13.538,96.943,3.057,11.68,224,0.940,bicubic,+5.418,+1.745,-8 -mobilevitv2_200,86.455,13.545,96.970,3.030,18.45,256,0.888,bicubic,+5.315,+1.602,-15 -resnetv2_50x1_bitm,86.442,13.558,97.600,2.400,25.55,448,1.000,bilinear,+6.100,+1.914,+36 -resnest50d_1s4x24d,86.440,13.560,97.152,2.848,25.68,224,0.875,bicubic,+5.456,+1.828,-7 -repvgg_b3g4,86.368,13.632,97.054,2.946,83.83,224,0.875,bilinear,+6.152,+1.946,+47 -darknetaa53,86.361,13.639,97.165,2.835,36.02,288,1.000,bilinear,+5.839,+1.839,+18 -darknet53,86.359,13.641,97.113,2.887,41.61,288,1.000,bicubic,+5.821,+1.693,+15 -lamhalobotnet50ts_256,86.357,13.643,97.062,2.938,22.57,256,0.950,bicubic,+4.805,+1.558,-52 -legacy_senet154,86.340,13.660,96.925,3.075,115.09,224,0.875,bilinear,+5.032,+1.429,-33 -cait_xxs36_224,86.338,13.662,97.111,2.889,17.30,224,1.000,bicubic,+6.590,+2.243,+67 -resnext50_32x4d,86.329,13.671,96.964,3.036,25.03,224,0.950,bicubic,+5.233,+1.638,-20 -pit_s_224,86.325,13.675,97.049,2.951,23.46,224,0.900,bicubic,+5.227,+1.717,-22 -vit_small_patch32_384,86.316,13.684,97.419,2.581,22.92,384,1.000,bicubic,+5.826,+1.819,+13 -gernet_m,86.316,13.684,97.098,2.902,21.14,224,0.875,bilinear,+5.586,+1.912,-1 -mobilevitv2_175,86.316,13.684,96.990,3.010,14.25,256,0.888,bicubic,+5.454,+1.728,-7 -efficientnet_b2,86.310,13.690,96.987,3.013,9.11,288,1.000,bicubic,+5.694,+1.671,+1 -gluon_senet154,86.278,13.722,96.945,3.055,115.09,224,0.875,bicubic,+5.048,+1.599,-37 -resnest50d,86.240,13.761,97.071,2.929,27.48,224,0.875,bilinear,+5.266,+1.691,-20 -convmixer_768_32,86.225,13.775,97.034,2.966,21.11,224,0.960,bicubic,+6.061,+1.962,+37 -ecaresnet101d_pruned,86.210,13.790,97.338,2.662,24.88,224,0.875,bicubic,+5.400,+1.710,-10 -efficientnet_el_pruned,86.195,13.805,97.022,2.978,10.59,300,0.904,bicubic,+5.897,+1.808,+24 -cspdarknet53,86.184,13.816,97.013,2.987,27.64,256,0.887,bilinear,+6.128,+1.927,+40 -inception_v4,86.167,13.833,96.915,3.085,42.68,299,0.875,bicubic,+5.999,+1.951,+31 -rexnet_150,86.156,13.844,97.060,2.940,9.73,224,0.875,bicubic,+5.842,+1.894,+19 -inception_resnet_v2,86.137,13.863,97.043,2.957,55.84,299,0.897,bicubic,+5.677,+1.737,+4 -xcit_tiny_12_p8_224,86.114,13.886,97.086,2.914,6.71,224,1.000,bicubic,+6.420,+2.038,+54 -tf_efficientnet_el,86.086,13.914,96.964,3.036,10.59,300,0.904,bicubic,+5.832,+1.836,+21 -ssl_resnext50_32x4d,86.084,13.916,97.212,2.788,25.03,224,0.875,bilinear,+5.758,+1.800,+13 -cspresnext50,86.073,13.927,97.103,2.897,20.57,256,0.887,bilinear,+5.529,+1.779,-8 -mobilevitv2_150,86.073,13.927,96.853,3.147,10.59,256,0.888,bicubic,+5.705,+1.789,+7 -ecaresnetlight,86.054,13.946,97.071,2.929,30.16,224,0.875,bicubic,+5.598,+1.825,-1 -gluon_resnet101_v1s,86.054,13.946,97.024,2.976,44.67,224,0.875,bicubic,+5.756,+1.862,+14 -edgenext_small_rw,86.049,13.950,96.925,3.075,7.83,320,1.000,bicubic,+5.597,+1.735,-2 -lambda_resnet50ts,86.039,13.961,96.746,3.254,21.54,256,0.950,bicubic,+4.887,+1.644,-48 -poolformer_s24,86.037,13.963,97.030,2.970,21.39,224,0.900,bicubic,+5.721,+1.988,+7 -gluon_seresnext101_32x4d,86.032,13.968,96.977,3.023,48.96,224,0.875,bicubic,+5.126,+1.681,-32 -resnetv2_50,86.015,13.985,96.902,3.098,25.55,224,0.950,bicubic,+5.603,+1.830,-3 -seresnet33ts,86.009,13.991,97.011,2.989,19.78,256,0.900,bicubic,+5.655,+1.905,0 -gcresnext50ts,86.009,13.991,96.966,3.034,15.67,256,0.900,bicubic,+5.431,+1.796,-19 -resnet50d,86.002,13.998,96.987,3.013,25.58,224,0.875,bicubic,+5.474,+1.819,-17 -ecaresnet26t,85.985,14.015,97.037,2.963,16.01,320,0.950,bicubic,+6.133,+1.953,+31 -tf_efficientnet_b2_ap,85.973,14.027,96.808,3.192,9.11,260,0.890,bicubic,+5.671,+1.780,+2 -vit_base_patch32_224,85.958,14.042,97.130,2.869,88.22,224,0.900,bicubic,+5.234,+1.564,-29 -gluon_seresnext101_64x4d,85.958,14.042,96.981,3.019,88.23,224,0.875,bicubic,+5.078,+1.685,-38 -fbnetv3_d,85.924,14.076,97.028,2.972,10.31,256,0.950,bilinear,+6.244,+2.088,+38 -vit_large_patch32_384,85.911,14.089,97.368,2.632,306.63,384,1.000,bicubic,+4.403,+1.278,-85 -tf_efficientnet_b2,85.909,14.091,96.862,3.139,9.11,260,0.890,bicubic,+5.821,+1.954,+11 -gluon_resnet152_v1d,85.906,14.094,96.806,3.194,60.21,224,0.875,bicubic,+5.430,+1.606,-20 -tf_efficientnetv2_b2,85.902,14.098,96.885,3.115,10.10,260,0.890,bicubic,+5.694,+1.841,+4 -resnet50_gn,85.881,14.119,96.849,3.151,25.56,224,0.940,bicubic,+5.821,+1.901,+11 -vit_base_patch16_224_sam,85.879,14.121,96.695,3.305,86.57,224,0.900,bicubic,+5.635,+1.941,-1 -seresnet50,85.853,14.147,97.007,2.993,28.09,224,0.875,bicubic,+5.587,+1.937,-5 -gluon_resnet101_v1d,85.851,14.149,96.663,3.337,44.57,224,0.875,bicubic,+5.433,+1.649,-20 -repvgg_b2g4,85.847,14.153,96.812,3.188,61.76,224,0.875,bilinear,+6.481,+2.124,+44 -gcresnet33ts,85.804,14.196,96.902,3.098,19.88,256,0.900,bicubic,+5.728,+1.908,+5 -mixnet_xl,85.798,14.202,96.710,3.290,11.90,224,0.875,bicubic,+5.320,+1.776,-29 -ens_adv_inception_resnet_v2,85.768,14.232,96.761,3.239,55.84,299,0.897,bicubic,+5.794,+1.819,+7 -tf_efficientnet_lite3,85.761,14.239,96.889,3.111,8.20,300,0.904,bilinear,+5.943,+1.975,+17 -legacy_seresnext101_32x4d,85.744,14.256,96.755,3.245,48.96,224,0.875,bilinear,+5.522,+1.741,-8 -ese_vovnet39b,85.742,14.258,96.894,3.107,24.57,224,0.875,bicubic,+6.430,+2.180,+42 -gluon_resnext101_32x4d,85.742,14.258,96.635,3.365,44.18,224,0.875,bicubic,+5.402,+1.709,-21 -eca_resnet33ts,85.740,14.260,96.902,3.098,19.68,256,0.900,bicubic,+5.660,+1.930,-3 -xcit_tiny_24_p16_224,85.736,14.264,96.938,3.062,12.12,224,1.000,bicubic,+6.292,+2.050,+32 -cspresnet50,85.727,14.273,96.799,3.200,21.62,256,0.887,bilinear,+6.145,+2.091,+24 -regnety_320,85.719,14.281,96.723,3.277,145.05,224,0.875,bicubic,+4.915,+1.479,-54 -resnet50,85.719,14.281,96.492,3.508,25.56,224,0.950,bicubic,+5.345,+1.878,-30 -gluon_resnext101_64x4d,85.693,14.307,96.644,3.356,83.46,224,0.875,bicubic,+5.089,+1.652,-49 -resmlp_big_24_224,85.693,14.307,96.424,3.576,129.14,224,0.875,bicubic,+4.663,+1.404,-74 -xception71,85.691,14.309,96.774,3.226,42.34,299,0.903,bicubic,+5.821,+1.850,0 -efficientnet_em,85.686,14.313,96.936,3.064,6.90,240,0.882,bicubic,+6.434,+2.144,+41 -deit_small_patch16_224,85.678,14.322,96.904,3.096,22.05,224,0.900,bicubic,+5.814,+1.856,-1 -pit_xs_distilled_224,85.659,14.341,96.665,3.335,11.00,224,0.900,bicubic,+6.351,+2.299,+31 -dpn107,85.650,14.350,96.725,3.275,86.92,224,0.875,bicubic,+5.482,+1.819,-18 -efficientnet_b2_pruned,85.640,14.360,96.746,3.254,8.31,260,0.890,bicubic,+5.722,+1.896,-9 -resmlp_36_224,85.625,14.375,96.795,3.205,44.69,224,0.875,bicubic,+5.855,+1.909,+1 -mobilevitv2_125,85.584,14.416,96.665,3.335,7.48,256,0.888,bicubic,+5.902,+1.817,+5 -gluon_resnet152_v1c,85.582,14.418,96.646,3.354,60.21,224,0.875,bicubic,+5.670,+1.804,-11 -levit_192,85.578,14.422,96.744,3.256,10.95,224,0.900,bicubic,+5.742,+1.954,-5 -ecaresnet50d_pruned,85.576,14.425,96.932,3.068,19.94,224,0.875,bicubic,+5.858,+2.056,0 -resnext50d_32x4d,85.571,14.429,96.748,3.252,25.05,224,0.875,bicubic,+5.895,+1.882,+4 -tf_efficientnetv2_b1,85.561,14.439,96.727,3.273,8.14,240,0.882,bicubic,+6.095,+2.005,+13 -regnety_120,85.543,14.457,96.785,3.215,51.82,224,0.875,bicubic,+5.167,+1.663,-47 -regnetx_320,85.522,14.478,96.669,3.331,107.81,224,0.875,bicubic,+5.278,+1.649,-34 -fbnetv3_b,85.514,14.486,96.862,3.139,8.60,256,0.950,bilinear,+6.372,+2.112,+33 -nf_regnet_b1,85.514,14.486,96.795,3.205,10.22,288,0.900,bicubic,+6.214,+2.041,+20 -dpn92,85.501,14.499,96.631,3.369,37.67,224,0.875,bicubic,+5.481,+1.801,-23 -rexnet_130,85.475,14.525,96.686,3.314,7.56,224,0.875,bicubic,+5.973,+2.004,+3 -gluon_resnet152_v1b,85.465,14.536,96.556,3.444,60.19,224,0.875,bicubic,+5.783,+1.820,-6 -resnetrs50,85.462,14.538,96.738,3.262,35.69,224,0.910,bicubic,+5.576,+1.768,-21 -dpn131,85.400,14.600,96.631,3.369,79.25,224,0.875,bicubic,+5.574,+1.923,-16 -regnetx_160,85.390,14.610,96.637,3.363,54.28,224,0.875,bicubic,+5.536,+1.807,-20 -dla102x2,85.377,14.623,96.629,3.371,41.28,224,0.875,bilinear,+5.935,+1.983,+4 -gmlp_s16_224,85.351,14.649,96.646,3.354,19.42,224,0.875,bicubic,+5.711,+2.022,-7 -gluon_seresnext50_32x4d,85.334,14.666,96.671,3.329,27.56,224,0.875,bicubic,+5.422,+1.839,-27 -botnet26t_256,85.332,14.668,96.631,3.369,12.49,256,0.950,bicubic,+6.074,+2.103,+15 -skresnext50_32x4d,85.317,14.683,96.394,3.606,27.48,224,0.875,bicubic,+5.163,+1.748,-39 -gluon_resnet101_v1c,85.311,14.689,96.407,3.593,44.57,224,0.875,bicubic,+5.775,+1.829,-8 -dpn98,85.304,14.696,96.466,3.534,61.57,224,0.875,bicubic,+5.660,+1.866,-13 -lambda_resnet26t,85.302,14.698,96.727,3.273,10.96,256,0.940,bicubic,+6.204,+2.137,+20 -resnetblur50,85.291,14.709,96.520,3.480,25.56,224,0.875,bicubic,+5.997,+1.886,+7 -dpn68b,85.291,14.709,96.464,3.536,12.61,224,0.875,bicubic,+6.075,+2.050,+12 -resmlp_24_224,85.264,14.736,96.496,3.504,30.02,224,0.875,bicubic,+5.886,+1.950,-5 -coat_lite_mini,85.255,14.745,96.680,3.320,11.01,224,0.900,bicubic,+6.167,+2.072,+17 -cait_xxs24_224,85.225,14.775,96.716,3.284,11.96,224,1.000,bicubic,+6.839,+2.408,+50 -resnet33ts,85.225,14.775,96.627,3.373,19.68,256,0.900,bicubic,+6.017,+2.053,+9 -xcit_tiny_12_p16_224_dist,85.215,14.785,96.599,3.401,6.72,224,1.000,bicubic,+6.637,+2.401,+35 -halonet26t,85.202,14.798,96.464,3.536,12.48,256,0.950,bicubic,+6.090,+2.150,+11 -resnext101_32x8d,85.195,14.805,96.451,3.549,88.79,224,0.875,bilinear,+5.879,+1.933,-8 -gluon_inception_v3,85.180,14.819,96.526,3.474,23.83,299,0.875,bicubic,+6.374,+2.156,+24 -resnet32ts,85.168,14.832,96.622,3.378,17.96,256,0.900,bicubic,+6.154,+2.266,+15 -gluon_xception65,85.155,14.845,96.597,3.403,39.92,299,0.903,bicubic,+5.433,+1.737,-33 -hrnet_w48,85.148,14.851,96.492,3.508,77.47,224,0.875,bilinear,+5.848,+1.978,-6 -gluon_resnet101_v1b,85.142,14.858,96.368,3.632,44.55,224,0.875,bicubic,+5.838,+1.848,-9 -eca_halonext26ts,85.127,14.873,96.586,3.414,10.76,256,0.940,bicubic,+5.639,+1.982,-23 -regnetx_120,85.127,14.873,96.473,3.527,46.11,224,0.875,bicubic,+5.535,+1.739,-28 -eca_botnext26ts_256,85.125,14.875,96.507,3.493,10.59,256,0.950,bicubic,+5.849,+1.891,-8 -tf_efficientnet_b1_ap,85.125,14.875,96.407,3.593,7.79,240,0.882,bicubic,+5.851,+2.099,-8 -xception,85.123,14.877,96.471,3.529,22.86,299,0.897,bicubic,+6.079,+2.077,+6 -hrnet_w64,85.114,14.886,96.746,3.254,128.06,224,0.875,bilinear,+5.644,+2.092,-26 -lambda_resnet26rpt_256,85.095,14.905,96.560,3.440,10.99,256,0.940,bicubic,+6.131,+2.134,+7 -res2net101_26w_4s,85.095,14.905,96.383,3.617,45.21,224,0.875,bilinear,+5.899,+1.947,-5 -ssl_resnet50,85.091,14.909,96.862,3.139,25.56,224,0.875,bilinear,+5.867,+2.032,-10 -tf_efficientnet_cc_b1_8e,85.065,14.935,96.422,3.578,39.72,240,0.882,bicubic,+5.751,+2.052,-22 -xcit_nano_12_p8_384_dist,85.025,14.975,96.631,3.369,3.05,384,1.000,bicubic,+7.209,+2.585,+62 -resnest26d,85.010,14.990,96.637,3.363,17.07,224,0.875,bilinear,+6.526,+2.343,+21 -gluon_resnext50_32x4d,85.008,14.992,96.428,3.572,25.03,224,0.875,bicubic,+5.648,+2.002,-27 -tf_efficientnet_b0_ns,84.997,15.003,96.505,3.495,5.29,224,0.875,bicubic,+6.333,+2.129,+12 -coat_tiny,84.980,15.020,96.409,3.591,5.50,224,0.900,bicubic,+6.544,+2.371,+23 -dla169,84.922,15.078,96.535,3.465,53.39,224,0.875,bilinear,+6.240,+2.199,+9 -tf_efficientnet_b1,84.914,15.086,96.362,3.638,7.79,240,0.882,bicubic,+6.086,+2.164,+2 -mobilevitv2_100,84.905,15.095,96.390,3.610,4.90,256,0.888,bicubic,+6.819,+2.230,+39 -legacy_seresnext50_32x4d,84.899,15.101,96.428,3.572,27.56,224,0.875,bilinear,+5.823,+1.994,-11 -hrnet_w44,84.886,15.114,96.437,3.563,67.06,224,0.875,bilinear,+5.990,+2.067,-3 -regnetx_080,84.867,15.133,96.428,3.572,39.57,224,0.875,bicubic,+5.665,+1.876,-19 -gluon_resnet50_v1s,84.858,15.142,96.441,3.559,25.68,224,0.875,bicubic,+6.152,+2.203,+2 -res2net50_26w_8s,84.847,15.153,96.355,3.645,48.40,224,0.875,bilinear,+5.895,+2.049,-8 -levit_128,84.839,15.161,96.353,3.647,9.21,224,0.900,bicubic,+6.357,+2.341,+10 -vit_tiny_patch16_384,84.832,15.168,96.712,3.288,5.79,384,1.000,bicubic,+6.402,+2.168,+14 -gluon_resnet50_v1d,84.830,15.170,96.398,3.602,25.58,224,0.875,bicubic,+5.760,+1.932,-16 -dla60_res2next,84.826,15.174,96.411,3.589,17.03,224,0.875,bilinear,+6.370,+2.265,+9 -mixnet_l,84.824,15.176,96.328,3.672,7.33,224,0.875,bicubic,+5.848,+2.150,-15 -tv_resnet152,84.818,15.182,96.221,3.779,60.19,224,0.875,bilinear,+6.498,+2.187,+16 -dla102x,84.807,15.193,96.548,3.452,26.31,224,0.875,bilinear,+6.295,+2.320,+1 -dla60_res2net,84.803,15.197,96.479,3.521,20.85,224,0.875,bilinear,+6.345,+2.283,+4 -pit_xs_224,84.794,15.206,96.494,3.506,10.62,224,0.900,bicubic,+6.604,+2.328,+20 -xception41,84.792,15.208,96.417,3.583,26.97,299,0.903,bicubic,+6.276,+2.137,-3 -regnetx_064,84.779,15.221,96.492,3.508,26.21,224,0.875,bicubic,+5.705,+2.032,-25 -hrnet_w40,84.741,15.259,96.554,3.446,57.56,224,0.875,bilinear,+5.819,+2.084,-19 -res2net50_26w_6s,84.726,15.274,96.281,3.719,37.05,224,0.875,bilinear,+6.156,+2.157,-7 -repvgg_b2,84.722,15.278,96.469,3.531,89.02,224,0.875,bilinear,+5.928,+2.051,-16 -resmlp_12_distilled_224,84.715,15.285,96.221,3.779,15.35,224,0.875,bicubic,+6.769,+2.661,+27 -legacy_seresnet152,84.702,15.298,96.415,3.585,66.82,224,0.875,bilinear,+6.050,+2.045,-12 -cs3darknet_m,84.692,15.308,96.492,3.508,9.31,288,0.950,bicubic,+7.066,+2.478,+39 -hrnet_w32,84.655,15.345,96.411,3.589,41.23,224,0.875,bilinear,+6.203,+2.223,-4 -selecsls60b,84.651,15.349,96.304,3.696,32.77,224,0.875,bicubic,+6.247,+2.132,-2 -bat_resnext26ts,84.636,15.364,96.268,3.732,10.73,256,0.900,bicubic,+6.388,+2.172,+5 -tf_efficientnetv2_b0,84.617,15.383,96.274,3.726,7.14,224,0.875,bicubic,+6.265,+2.248,0 -regnetx_040,84.604,15.396,96.379,3.621,22.12,224,0.875,bicubic,+6.116,+2.141,-13 -efficientnet_b1,84.604,15.396,96.336,3.664,7.79,256,1.000,bicubic,+5.816,+1.990,-24 -vit_relpos_base_patch32_plus_rpn_256,84.593,15.407,96.010,3.990,119.42,256,0.900,bicubic,+5.107,+1.870,-68 -efficientnet_es,84.581,15.419,96.317,3.683,5.44,224,0.875,bicubic,+6.523,+2.373,+10 -hrnet_w30,84.576,15.424,96.383,3.617,37.71,224,0.875,bilinear,+6.378,+2.159,+2 -tf_mixnet_l,84.564,15.437,96.242,3.758,7.33,224,0.875,bicubic,+5.786,+2.244,-27 -wide_resnet101_2,84.549,15.451,96.353,3.647,126.89,224,0.875,bilinear,+5.697,+2.065,-33 -dla60x,84.521,15.479,96.289,3.711,17.35,224,0.875,bilinear,+6.293,+2.265,-2 -legacy_seresnet101,84.506,15.494,96.330,3.670,49.33,224,0.875,bilinear,+6.126,+2.068,-11 -cs3darknet_focus_m,84.482,15.518,96.422,3.578,9.30,288,0.950,bicubic,+7.200,+2.450,+42 -resnet26t,84.467,15.533,96.217,3.783,16.01,256,0.940,bicubic,+6.603,+2.375,+13 -coat_lite_tiny,84.459,15.541,96.370,3.630,5.72,224,0.900,bicubic,+6.943,+2.456,+30 -tf_efficientnet_em,84.448,15.552,96.183,3.817,6.90,240,0.882,bicubic,+6.322,+2.137,-2 -repvgg_b1,84.416,15.584,96.215,3.785,57.42,224,0.875,bilinear,+6.048,+2.121,-15 -efficientnet_b1_pruned,84.397,15.603,96.140,3.860,6.33,240,0.882,bicubic,+6.153,+2.306,-10 -res2net50_26w_4s,84.363,15.637,96.080,3.920,25.70,224,0.875,bilinear,+6.401,+2.228,+4 -hardcorenas_f,84.329,15.671,96.025,3.975,8.20,224,0.875,bilinear,+6.227,+2.223,-5 -res2net50_14w_8s,84.305,15.695,96.072,3.929,25.06,224,0.875,bilinear,+6.161,+2.220,-8 -selecsls60,84.297,15.703,96.101,3.899,30.67,224,0.875,bicubic,+6.313,+2.269,-1 -mobilevit_s,84.269,15.731,96.266,3.734,5.58,256,0.900,bicubic,+5.959,+2.114,-18 -regnetx_032,84.243,15.757,96.251,3.749,15.30,224,0.875,bicubic,+6.059,+2.163,-12 -res2next50,84.237,15.763,95.999,4.001,24.67,224,0.875,bilinear,+5.979,+2.111,-19 -gluon_resnet50_v1c,84.211,15.789,96.163,3.837,25.58,224,0.875,bicubic,+6.203,+2.173,-6 -dla102,84.190,15.810,96.208,3.792,33.27,224,0.875,bilinear,+6.162,+2.258,-8 -gcresnext26ts,84.171,15.829,96.084,3.916,10.48,256,0.900,bicubic,+6.357,+2.248,+5 -rexnet_100,84.168,15.832,96.255,3.745,4.80,224,0.875,bicubic,+6.308,+2.381,-1 -seresnext26ts,84.147,15.853,96.069,3.931,10.39,256,0.900,bicubic,+6.289,+2.279,-1 -tf_inception_v3,84.139,15.861,95.918,4.082,23.83,299,0.875,bicubic,+6.287,+2.278,0 -res2net50_48w_2s,84.128,15.872,95.965,4.035,25.29,224,0.875,bilinear,+6.604,+2.415,+12 -resnet34d,84.096,15.904,95.978,4.022,21.82,224,0.875,bicubic,+6.980,+2.596,+27 -xcit_tiny_12_p16_224,84.094,15.906,96.234,3.766,6.72,224,1.000,bicubic,+6.970,+2.522,+25 -tf_efficientnet_lite2,84.085,15.915,96.076,3.924,6.09,260,0.890,bicubic,+6.619,+2.318,+11 -poolformer_s12,84.036,15.964,96.163,3.837,11.92,224,0.900,bicubic,+6.798,+2.657,+21 -efficientnet_b0,84.032,15.968,95.958,4.042,5.29,224,0.875,bicubic,+6.332,+2.426,-2 -crossvit_9_dagger_240,84.015,15.985,96.084,3.916,8.78,240,0.875,bicubic,+7.037,+2.470,+27 -tf_efficientnet_cc_b0_8e,83.970,16.030,96.074,3.926,24.01,224,0.875,bicubic,+6.070,+2.416,-13 -hardcorenas_e,83.966,16.034,95.903,4.097,8.07,224,0.875,bilinear,+6.180,+2.199,-6 -gmixer_24_224,83.966,16.034,95.854,4.146,24.72,224,0.875,bicubic,+5.930,+2.184,-23 -regnety_016,83.957,16.043,96.005,3.995,11.20,224,0.875,bicubic,+6.101,+2.285,-12 -tv_resnext50_32x4d,83.957,16.043,95.967,4.033,25.03,224,0.875,bilinear,+6.339,+2.267,-4 -gluon_resnet50_v1b,83.936,16.064,96.014,3.986,25.56,224,0.875,bicubic,+6.352,+2.294,-2 -densenet161,83.906,16.094,96.014,3.986,28.68,224,0.875,bicubic,+6.552,+2.378,+6 -adv_inception_v3,83.897,16.103,95.933,4.067,23.83,299,0.875,bicubic,+6.319,+2.195,-3 -mobilenetv2_120d,83.889,16.111,95.909,4.091,5.83,224,0.875,bicubic,+6.599,+2.409,+6 -seresnext26t_32x4d,83.874,16.126,95.935,4.065,16.81,224,0.875,bicubic,+5.906,+2.187,-26 -tv_resnet101,83.853,16.148,95.892,4.108,44.55,224,0.875,bilinear,+6.473,+2.348,+1 -tinynet_a,83.833,16.167,95.817,4.183,6.19,192,0.875,bicubic,+6.185,+2.281,-14 -inception_v3,83.763,16.237,95.877,4.123,23.83,299,0.875,bicubic,+6.325,+2.401,-3 -hardcorenas_d,83.759,16.241,95.736,4.264,7.50,224,0.875,bilinear,+6.329,+2.252,-3 -seresnext26d_32x4d,83.750,16.250,95.852,4.148,16.81,224,0.875,bicubic,+6.144,+2.246,-13 -xcit_nano_12_p8_224_dist,83.731,16.269,95.958,4.042,3.05,224,1.000,bicubic,+7.403,+2.864,+31 -dla60,83.720,16.280,95.926,4.074,22.04,224,0.875,bilinear,+6.698,+2.606,+9 -eca_resnext26ts,83.705,16.295,95.948,4.052,10.30,256,0.900,bicubic,+6.247,+2.380,-9 -repvgg_b1g4,83.697,16.303,96.025,3.975,39.97,224,0.875,bilinear,+6.109,+2.195,-16 -convmixer_1024_20_ks9_p14,83.686,16.314,95.894,4.106,24.38,224,0.960,bicubic,+6.744,+2.536,+10 -legacy_seresnet50,83.665,16.335,95.978,4.022,28.09,224,0.875,bilinear,+6.033,+2.228,-22 -tf_efficientnet_b0_ap,83.652,16.348,95.781,4.219,5.29,224,0.875,bicubic,+6.564,+2.523,+2 -tf_efficientnet_cc_b0_4e,83.639,16.361,95.743,4.257,13.31,224,0.875,bicubic,+6.329,+2.403,-9 -skresnet34,83.635,16.365,95.928,4.072,22.28,224,0.875,bicubic,+6.731,+2.608,+8 -resmlp_12_224,83.573,16.427,95.762,4.238,15.35,224,0.875,bicubic,+6.917,+2.582,+13 -mobilenetv3_large_100_miil,83.558,16.442,95.452,4.548,5.48,224,0.875,bilinear,+5.636,+2.532,-39 -densenet201,83.554,16.446,95.811,4.189,20.01,224,0.875,bicubic,+6.266,+2.331,-11 -mixnet_m,83.526,16.474,95.685,4.315,5.01,224,0.875,bicubic,+6.264,+2.263,-10 -legacy_seresnext26_32x4d,83.522,16.478,95.717,4.283,16.79,224,0.875,bicubic,+6.418,+2.401,-6 -gernet_s,83.517,16.483,95.796,4.204,8.17,224,0.875,bilinear,+6.601,+2.662,+1 -tf_efficientnet_b0,83.511,16.489,95.704,4.296,5.29,224,0.875,bicubic,+6.671,+2.486,+2 -hrnet_w18,83.502,16.498,95.909,4.091,21.30,224,0.875,bilinear,+6.742,+2.465,+4 -densenetblur121d,83.470,16.530,95.817,4.183,8.00,224,0.875,bicubic,+6.890,+2.629,+9 -resnext26ts,83.464,16.536,95.726,4.274,10.30,256,0.900,bicubic,+6.684,+2.594,+1 -selecsls42b,83.460,16.540,95.743,4.257,32.46,224,0.875,bicubic,+6.282,+2.351,-15 -hardcorenas_c,83.336,16.664,95.713,4.287,5.52,224,0.875,bilinear,+6.284,+2.553,-11 -tf_efficientnet_lite1,83.332,16.668,95.640,4.360,5.42,240,0.882,bicubic,+6.694,+2.416,+2 -regnetx_016,83.193,16.807,95.743,4.257,9.19,224,0.875,bicubic,+6.251,+2.319,-9 -dpn68,83.184,16.816,95.600,4.400,12.61,224,0.875,bicubic,+6.874,+2.622,+10 -mobilenetv2_140,83.180,16.820,95.687,4.313,6.11,224,0.875,bicubic,+6.668,+2.689,+5 -tf_efficientnet_es,83.176,16.824,95.585,4.415,5.44,224,0.875,bicubic,+6.578,+2.381,0 -tf_mixnet_m,83.176,16.824,95.459,4.541,5.01,224,0.875,bicubic,+6.230,+2.307,-14 -xcit_nano_12_p16_384_dist,83.174,16.826,95.751,4.249,3.05,384,1.000,bicubic,+7.718,+3.061,+22 -ese_vovnet19b_dw,83.109,16.890,95.775,4.225,6.54,224,0.875,bicubic,+6.315,+2.509,-10 -levit_128s,83.058,16.942,95.531,4.469,7.78,224,0.900,bicubic,+6.544,+2.661,-1 -resnet26d,83.056,16.944,95.610,4.390,16.01,224,0.875,bicubic,+6.354,+2.458,-9 -repvgg_a2,83.001,16.999,95.593,4.407,28.21,224,0.875,bilinear,+6.541,+2.583,-1 -tv_resnet50,82.956,17.044,95.474,4.526,25.56,224,0.875,bilinear,+6.822,+2.606,+2 -hardcorenas_b,82.866,17.134,95.390,4.610,5.18,224,0.875,bilinear,+6.330,+2.636,-6 -densenet121,82.826,17.174,95.580,4.420,7.98,224,0.875,bicubic,+7.246,+2.932,+10 -mobilevitv2_075,82.806,17.194,95.572,4.428,2.87,256,0.888,bicubic,+7.198,+2.814,+8 -vit_tiny_r_s16_p8_384,82.687,17.313,95.849,4.151,6.36,384,1.000,bicubic,+6.735,+2.587,+1 -densenet169,82.683,17.317,95.597,4.402,14.15,224,0.875,bicubic,+6.779,+2.573,+2 -mixnet_s,82.527,17.473,95.356,4.644,4.13,224,0.875,bicubic,+6.531,+2.556,-3 -vit_small_patch32_224,82.514,17.486,95.664,4.336,22.88,224,0.900,bicubic,+6.524,+2.396,-3 -regnety_008,82.493,17.508,95.491,4.509,6.26,224,0.875,bicubic,+6.179,+2.421,-8 -efficientnet_lite0,82.371,17.629,95.284,4.716,4.65,224,0.875,bicubic,+6.903,+2.768,+6 -resnest14d,82.354,17.646,95.346,4.654,10.61,224,0.875,bilinear,+6.846,+2.822,+4 -hardcorenas_a,82.324,17.676,95.290,4.710,5.26,224,0.875,bilinear,+6.394,+2.780,-5 -efficientnet_es_pruned,82.292,17.708,95.303,4.697,5.44,224,0.875,bicubic,+7.292,+2.861,+15 -mobilenetv3_rw,82.266,17.734,95.234,4.766,5.48,224,0.875,bicubic,+6.632,+2.526,-3 -semnasnet_100,82.251,17.749,95.226,4.774,3.89,224,0.875,bicubic,+6.801,+2.626,+4 -mobilenetv3_large_100,82.170,17.830,95.196,4.804,5.48,224,0.875,bicubic,+6.394,+2.656,-7 -resnet34,82.144,17.855,95.128,4.872,21.80,224,0.875,bilinear,+7.032,+2.844,+7 -vit_tiny_patch16_224,82.076,17.924,95.482,4.518,5.72,224,0.900,bicubic,+6.612,+2.638,-1 -mobilenetv2_110d,82.070,17.930,95.079,4.921,4.52,224,0.875,bicubic,+7.034,+2.887,+7 -tf_mixnet_s,82.040,17.960,95.121,4.879,4.13,224,0.875,bicubic,+6.388,+2.495,-10 -repvgg_b0,82.006,17.994,95.098,4.902,15.82,224,0.875,bilinear,+6.852,+2.682,+1 -deit_tiny_distilled_patch16_224,81.993,18.007,95.138,4.862,5.91,224,0.900,bicubic,+7.481,+3.248,+17 -mixer_b16_224,81.987,18.014,94.449,5.551,59.88,224,0.875,bicubic,+5.377,+2.219,-30 -pit_ti_distilled_224,81.969,18.031,95.147,4.853,5.10,224,0.900,bicubic,+7.435,+3.051,+14 -hrnet_w18_small_v2,81.961,18.039,95.164,4.836,15.60,224,0.875,bilinear,+6.851,+2.748,0 -tf_efficientnet_lite0,81.959,18.041,95.162,4.838,4.65,224,0.875,bicubic,+7.127,+2.988,+5 -resnet26,81.957,18.043,95.252,4.748,16.00,224,0.875,bicubic,+6.657,+2.672,-7 -edgenext_x_small,81.897,18.103,95.032,4.968,2.34,256,0.900,bicubic,+7.033,+2.732,+2 -tinynet_b,81.871,18.129,94.878,5.122,3.73,188,0.875,bicubic,+6.897,+2.696,0 -tf_mobilenetv3_large_100,81.848,18.152,95.066,4.934,5.48,224,0.875,bilinear,+6.336,+2.460,-16 -tv_densenet121,81.722,18.278,95.034,4.966,7.98,224,0.875,bicubic,+6.982,+2.886,+2 -regnety_006,81.703,18.297,95.121,4.879,6.06,224,0.875,bicubic,+6.451,+2.589,-11 -dla34,81.660,18.340,94.876,5.124,15.74,224,0.875,bilinear,+7.036,+2.804,+3 -xcit_nano_12_p8_224,81.645,18.355,95.267,4.733,3.05,224,1.000,bicubic,+7.729,+3.099,+11 -crossvit_9_240,81.613,18.387,94.974,5.026,8.55,240,0.875,bicubic,+7.653,+3.010,+9 -mobilevit_xs,81.574,18.426,95.030,4.970,2.32,256,0.900,bicubic,+6.940,+2.684,-1 -fbnetc_100,81.559,18.441,94.959,5.041,5.57,224,0.875,bilinear,+6.443,+2.573,-14 -legacy_seresnet34,81.538,18.462,94.897,5.103,21.96,224,0.875,bilinear,+6.728,+2.771,-6 -gluon_resnet34_v1b,81.498,18.503,94.808,5.192,21.80,224,0.875,bicubic,+6.906,+2.820,-2 -regnetx_008,81.481,18.520,95.064,4.936,7.26,224,0.875,bicubic,+6.447,+2.724,-13 -mnasnet_100,81.451,18.549,94.904,5.096,4.38,224,0.875,bicubic,+6.801,+2.790,-7 -vgg19_bn,81.442,18.558,94.767,5.233,143.68,224,0.875,bilinear,+7.228,+2.923,-2 -convit_tiny,81.126,18.874,95.047,4.953,5.71,224,0.875,bicubic,+8.012,+3.327,+10 -crossvit_tiny_240,81.096,18.904,94.985,5.015,7.01,240,0.875,bicubic,+7.758,+3.071,+6 -spnasnet_100,80.880,19.119,94.530,5.470,4.42,224,0.875,bilinear,+6.790,+2.714,-4 -ghostnet_100,80.703,19.297,94.291,5.709,5.18,224,0.875,bilinear,+6.723,+2.833,-3 -regnety_004,80.650,19.350,94.688,5.312,4.34,224,0.875,bicubic,+6.626,+2.932,-5 -skresnet18,80.639,19.361,94.376,5.624,11.96,224,0.875,bicubic,+7.605,+3.210,+6 -regnetx_006,80.633,19.367,94.526,5.474,6.20,224,0.875,bicubic,+6.777,+2.854,-3 -pit_ti_224,80.614,19.386,94.620,5.380,4.85,224,0.900,bicubic,+7.702,+3.214,+7 -swsl_resnet18,80.573,19.427,94.743,5.256,11.69,224,0.875,bilinear,+7.299,+3.007,+1 -vgg16_bn,80.556,19.444,94.592,5.408,138.37,224,0.875,bilinear,+7.206,+3.088,-3 -semnasnet_075,80.475,19.525,94.319,5.681,2.91,224,0.875,bicubic,+7.501,+3.185,+2 -resnet18d,80.392,19.608,94.246,5.754,11.71,224,0.875,bicubic,+8.134,+3.558,+10 -tv_resnet34,80.389,19.611,94.436,5.564,21.80,224,0.875,bilinear,+7.081,+3.012,-4 -mobilenetv2_100,80.236,19.764,94.193,5.807,3.50,224,0.875,bicubic,+7.280,+3.183,0 -xcit_nano_12_p16_224_dist,80.214,19.786,94.355,5.645,3.05,224,1.000,bicubic,+7.912,+3.493,+6 -vit_base_patch32_224_sam,80.208,19.792,93.821,6.179,88.22,224,0.900,bicubic,+6.516,+2.809,-11 -ssl_resnet18,80.099,19.901,94.590,5.410,11.69,224,0.875,bilinear,+7.495,+3.166,-1 -tf_mobilenetv3_large_075,80.093,19.907,94.184,5.816,3.99,224,0.875,bilinear,+6.653,+2.836,-12 -deit_tiny_patch16_224,80.018,19.982,94.447,5.553,5.72,224,0.900,bicubic,+7.844,+3.333,+5 -hrnet_w18_small,79.557,20.443,93.902,6.098,13.19,224,0.875,bilinear,+7.221,+3.222,0 -vgg19,79.476,20.524,93.870,6.130,143.67,224,0.875,bilinear,+7.110,+2.998,-3 -regnetx_004,79.429,20.571,93.853,6.147,5.16,224,0.875,bicubic,+7.033,+3.015,-5 -resnet14t,79.243,20.757,93.603,6.397,10.08,224,0.950,bilinear,+6.887,+3.263,-4 -tf_mobilenetv3_large_minimal_100,79.228,20.772,93.693,6.307,3.92,224,0.875,bilinear,+6.978,+3.073,-1 -legacy_seresnet18,79.155,20.845,93.781,6.219,11.78,224,0.875,bicubic,+7.415,+3.451,+3 -vgg16,79.034,20.966,93.646,6.354,138.36,224,0.875,bilinear,+7.444,+3.264,+4 -vgg13_bn,79.006,20.994,93.661,6.339,133.05,224,0.875,bilinear,+7.408,+3.285,+2 -vit_tiny_r_s16_p8_224,78.993,21.007,93.898,6.102,6.34,224,0.900,bicubic,+7.199,+3.080,-1 -lcnet_100,78.912,21.088,93.561,6.439,2.95,224,0.875,bicubic,+6.802,+3.183,-4 -edgenext_xx_small,78.698,21.302,93.503,6.497,1.33,256,0.900,bicubic,+7.592,+3.471,+2 -tinynet_c,78.436,21.564,93.140,6.860,2.46,184,0.875,bicubic,+7.208,+3.392,0 -gluon_resnet18_v1b,78.376,21.624,93.136,6.864,11.69,224,0.875,bicubic,+7.538,+3.374,+1 -mobilevitv2_050,78.124,21.876,93.573,6.426,1.37,256,0.888,bicubic,+7.984,+3.643,+3 +vit_base_patch16_clip_384.laion2b_ft_in1k,90.219,9.781,98.704,1.296,86.86,384,1.000,bicubic,+3.599,+0.694,+17 +deit3_huge_patch14_224_in21ft1k,90.215,9.785,98.638,1.362,632.13,224,1.000,bicubic,+3.031,+0.378,-1 +vit_base_patch16_clip_384.openai_ft_in12k_in1k,90.211,9.789,98.653,1.347,86.86,384,0.950,bicubic,+3.177,+0.473,+3 +tf_efficientnetv2_l.in21k_ft_in1k,90.207,9.793,98.717,1.283,118.52,480,1.000,bicubic,+3.401,+0.583,+8 +vit_large_patch16_384.augreg_in21k_ft_in1k,90.196,9.804,98.661,1.339,304.72,384,1.000,bicubic,+3.116,+0.361,-2 +cait_m48_448,90.196,9.804,98.484,1.516,356.46,448,1.000,bicubic,+3.712,+0.730,+17 +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,90.187,9.813,98.585,1.415,86.86,384,1.000,bicubic,+2.969,+0.550,-7 +vit_huge_patch14_clip_224.laion2b_ft_in1k,90.172,9.828,98.542,1.458,632.05,224,1.000,bicubic,+2.578,+0.322,-14 +volo_d3_448,90.168,9.832,98.550,1.450,86.63,448,1.000,bicubic,+3.674,+0.840,+12 +swinv2_large_window12to24_192to384_22kft1k,90.157,9.843,98.608,1.392,196.74,384,1.000,bicubic,+2.699,+0.356,-13 +beit_large_patch16_224.in22k_ft_in22k_in1k,90.151,9.849,98.723,1.277,304.43,224,0.900,bicubic,+2.675,+0.419,-16 +vit_large_patch14_clip_224.laion2b_ft_in1k,90.106,9.894,98.561,1.439,304.20,224,1.000,bicubic,+2.814,+0.315,-13 +tf_efficientnet_b7.ns_jft_in1k,90.100,9.900,98.614,1.386,66.35,600,0.949,bicubic,+3.260,+0.520,-2 +convnext_xlarge.fb_in22k_ft_in1k,90.066,9.934,98.619,1.381,350.20,288,1.000,bicubic,+2.728,+0.291,-16 +dm_nfnet_f6,90.046,9.954,98.546,1.454,438.36,576,0.956,bicubic,+3.902,+0.816,+19 +cait_m36_384,90.046,9.954,98.493,1.507,271.22,384,1.000,bicubic,+3.992,+0.763,+24 +swin_large_patch4_window12_384,90.027,9.973,98.657,1.343,196.74,384,1.000,bicubic,+2.879,+0.423,-15 +tf_efficientnetv2_m.in21k_ft_in1k,90.023,9.977,98.663,1.337,54.14,480,1.000,bicubic,+4.019,+0.721,+24 +deit3_large_patch16_224_in21ft1k,89.999,10.001,98.661,1.339,304.37,224,1.000,bicubic,+3.021,+0.423,-11 +swin_base_patch4_window12_384,89.995,10.005,98.695,1.304,87.90,384,1.000,bicubic,+3.563,+0.637,+5 +vit_base_patch16_384.augreg_in21k_ft_in1k,89.989,10.011,98.678,1.322,86.86,384,1.000,bicubic,+3.983,+0.678,+20 +maxvit_base_tf_512.in1k,89.980,10.020,98.435,1.565,119.88,512,1.000,bicubic,+3.382,+0.515,-3 +swinv2_large_window12to16_192to256_22kft1k,89.920,10.080,98.508,1.492,196.74,256,0.900,bicubic,+2.984,+0.400,-13 +convnext_small.fb_in22k_ft_in1k_384,89.916,10.084,98.680,1.319,50.22,384,1.000,bicubic,+4.138,+0.788,+29 +efficientnet_b5.in12k_ft_in1k,89.903,10.097,98.570,1.431,30.39,448,1.000,bicubic,+4.015,+0.838,+20 +xcit_large_24_p8_384_dist,89.886,10.114,98.384,1.616,188.93,384,1.000,bicubic,+3.886,+0.698,+17 +deit3_base_patch16_384_in21ft1k,89.884,10.116,98.602,1.399,86.88,384,1.000,bicubic,+3.140,+0.490,-10 +volo_d5_224,89.882,10.118,98.493,1.507,295.46,224,0.960,bicubic,+3.814,+0.915,+10 +swinv2_base_window12to16_192to256_22kft1k,89.876,10.124,98.657,1.343,87.92,256,0.900,bicubic,+3.602,+0.761,0 +convnext_large.fb_in22k_ft_in1k,89.871,10.129,98.597,1.403,197.77,288,1.000,bicubic,+2.855,+0.391,-23 +convnext_base.fb_in22k_ft_in1k,89.856,10.144,98.689,1.311,88.59,288,1.000,bicubic,+3.576,+0.599,-3 +cait_s36_384,89.844,10.156,98.427,1.573,68.37,384,1.000,bicubic,+4.384,+0.947,+34 +xcit_medium_24_p8_384_dist,89.811,10.188,98.362,1.638,84.32,384,1.000,bicubic,+3.995,+0.770,+15 +volo_d4_224,89.809,10.191,98.424,1.576,192.96,224,0.960,bicubic,+3.937,+0.956,+12 +swin_large_patch4_window7_224,89.796,10.204,98.640,1.360,196.53,224,0.900,bicubic,+3.477,+0.744,-9 +vit_large_r50_s32_384.augreg_in21k_ft_in1k,89.794,10.206,98.514,1.486,329.09,384,1.000,bicubic,+3.610,+0.596,-4 +maxvit_large_tf_512.in1k,89.794,10.206,98.328,1.672,212.33,512,1.000,bicubic,+3.276,+0.444,-16 +volo_d2_384,89.784,10.216,98.399,1.601,58.87,384,1.000,bicubic,+3.748,+0.827,+2 +tf_efficientnet_b6.ns_jft_in1k,89.782,10.218,98.510,1.490,43.04,528,0.942,bicubic,+3.330,+0.628,-15 +tf_efficientnetv2_xl.in21k_ft_in1k,89.773,10.227,98.294,1.706,208.12,512,1.000,bicubic,+3.025,+0.276,-24 +beitv2_base_patch16_224.in1k_ft_in22k_in1k,89.747,10.253,98.580,1.420,86.53,224,0.900,bicubic,+3.267,+0.532,-18 +xcit_small_24_p8_384_dist,89.739,10.261,98.422,1.578,47.63,384,1.000,bicubic,+4.183,+0.850,+18 +vit_base_patch8_224.augreg2_in21k_ft_in1k,89.728,10.272,98.510,1.490,86.58,224,0.900,bicubic,+3.516,+0.678,-12 +vit_base_patch16_clip_384.openai_ft_in1k,89.709,10.291,98.510,1.490,86.86,384,1.000,bicubic,+3.503,+0.636,-12 +volo_d1_384,89.698,10.302,98.292,1.708,26.78,384,1.000,bicubic,+4.448,+1.096,+34 +deit3_large_patch16_384,89.679,10.321,98.392,1.608,304.76,384,1.000,bicubic,+3.869,+0.796,+4 +xcit_large_24_p16_384_dist,89.662,10.338,98.401,1.599,189.10,384,1.000,bicubic,+3.908,+0.863,+7 +tf_efficientnet_b5.ns_jft_in1k,89.651,10.349,98.482,1.518,30.39,456,0.934,bicubic,+3.563,+0.730,-11 +tf_efficientnet_b8.ap_in1k,89.581,10.419,98.305,1.695,87.41,672,0.954,bicubic,+4.211,+0.915,+25 +maxvit_base_tf_384.in1k,89.579,10.421,98.324,1.676,119.65,384,1.000,bicubic,+3.285,+0.520,-23 +maxvit_tiny_tf_512.in1k,89.564,10.436,98.335,1.665,31.05,512,1.000,bicubic,+3.902,+0.755,+7 +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,89.559,10.441,98.414,1.586,86.57,224,0.950,bicubic,+3.389,+0.660,-18 +dm_nfnet_f4,89.557,10.443,98.303,1.697,316.07,512,0.951,bicubic,+3.843,+0.783,+3 +volo_d3_224,89.555,10.445,98.375,1.625,86.33,224,0.960,bicubic,+4.147,+1.095,+15 +maxvit_large_tf_384.in1k,89.555,10.445,98.187,1.813,212.03,384,1.000,bicubic,+3.319,+0.497,-24 +tf_efficientnetv2_l.in1k,89.549,10.451,98.339,1.661,118.52,480,1.000,bicubic,+3.879,+0.865,+1 +flexivit_large.1200ep_in1k,89.540,10.460,98.418,1.582,304.36,240,0.950,bicubic,+3.896,+0.876,+2 +xcit_small_12_p8_384_dist,89.517,10.483,98.305,1.695,26.21,384,1.000,bicubic,+4.429,+1.023,+33 +xcit_large_24_p8_224_dist,89.517,10.483,98.224,1.776,188.93,224,1.000,bicubic,+4.121,+0.814,+13 +flexivit_large.600ep_in1k,89.510,10.490,98.394,1.606,304.36,240,0.950,bicubic,+3.972,+0.902,+1 +cait_s24_384,89.502,10.498,98.362,1.638,47.06,384,1.000,bicubic,+4.456,+1.016,+35 +dm_nfnet_f3,89.485,10.515,98.399,1.601,254.92,416,0.940,bicubic,+3.963,+0.937,+2 +xcit_medium_24_p16_384_dist,89.468,10.532,98.296,1.704,84.40,384,1.000,bicubic,+4.056,+0.890,+6 +dm_nfnet_f5,89.461,10.539,98.324,1.676,377.21,544,0.954,bicubic,+3.647,+0.836,-15 +deit3_base_patch16_224_in21ft1k,89.457,10.543,98.557,1.443,86.59,224,1.000,bicubic,+3.743,+0.813,-10 +maxvit_small_tf_512.in1k,89.451,10.549,98.350,1.650,69.13,512,1.000,bicubic,+3.363,+0.592,-30 +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,89.448,10.552,98.401,1.599,88.34,448,1.000,bicubic,+3.664,+0.767,-15 +vit_base_patch16_224.augreg2_in21k_ft_in1k,89.446,10.554,98.439,1.561,86.57,224,0.900,bicubic,+4.340,+1.059,+20 +deit_base_distilled_patch16_384,89.429,10.571,98.441,1.559,87.63,384,1.000,bicubic,+4.007,+1.109,-1 +tf_efficientnet_b7.ap_in1k,89.429,10.571,98.347,1.653,66.35,600,0.949,bicubic,+4.309,+1.096,+17 +vit_base_patch8_224.augreg_in21k_ft_in1k,89.427,10.573,98.486,1.514,86.58,224,0.900,bicubic,+3.631,+0.696,-20 +vit_base_patch16_clip_224.laion2b_ft_in1k,89.427,10.573,98.473,1.527,86.57,224,1.000,bicubic,+3.959,+0.897,-7 +vit_base_patch16_clip_224.openai_ft_in12k_in1k,89.410,10.590,98.401,1.599,86.57,224,0.950,bicubic,+3.480,+0.677,-29 +beit_base_patch16_224.in22k_ft_in22k_in1k,89.401,10.598,98.525,1.475,86.53,224,0.900,bicubic,+4.165,+0.869,+7 +regnetz_e8,89.382,10.618,98.459,1.542,57.70,320,1.000,bicubic,+4.352,+1.195,+22 +deit3_small_patch16_384_in21ft1k,89.367,10.633,98.384,1.616,22.21,384,1.000,bicubic,+4.543,+0.898,+32 +tf_efficientnetv2_m.in1k,89.355,10.645,98.330,1.670,54.14,480,1.000,bicubic,+4.147,+0.962,+5 +tf_efficientnet_b8.ra_in1k,89.355,10.645,98.303,1.697,87.41,672,0.954,bicubic,+3.985,+1.009,-4 +vit_medium_patch16_gap_384.in12k_ft_in1k,89.342,10.658,98.493,1.507,39.03,384,0.950,bicubic,+3.806,+0.859,-18 +tf_efficientnet_b6.ap_in1k,89.342,10.658,98.281,1.719,43.04,528,0.942,bicubic,+4.554,+1.143,+31 +volo_d2_224,89.327,10.673,98.209,1.791,58.68,224,0.960,bicubic,+4.131,+1.021,+4 +vit_large_patch16_224.augreg_in21k_ft_in1k,89.314,10.686,98.392,1.608,304.33,224,0.900,bicubic,+3.472,+0.568,-35 +convnext_small.fb_in22k_ft_in1k,89.305,10.694,98.360,1.640,50.22,288,1.000,bicubic,+4.043,+0.676,-6 +tf_efficientnet_b4.ns_jft_in1k,89.305,10.694,98.347,1.653,19.34,380,0.922,bicubic,+4.143,+0.877,+2 +flexivit_large.300ep_in1k,89.303,10.697,98.324,1.676,304.36,240,0.950,bicubic,+4.023,+0.884,-9 +xcit_small_24_p16_384_dist,89.299,10.701,98.330,1.670,47.67,384,1.000,bicubic,+4.201,+1.020,+4 +xcit_medium_24_p8_224_dist,89.293,10.707,98.187,1.813,84.32,224,1.000,bicubic,+4.221,+0.933,+6 +deit3_huge_patch14_224,89.214,10.786,98.166,1.834,632.13,224,0.900,bicubic,+4.010,+0.808,-4 +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,89.201,10.799,98.356,1.644,88.30,384,1.000,bicubic,+3.829,+0.692,-17 +xcit_small_24_p8_224_dist,89.201,10.799,98.243,1.757,47.63,224,1.000,bicubic,+4.325,+1.055,+15 +xcit_small_12_p16_384_dist,89.197,10.803,98.219,1.781,26.25,384,1.000,bicubic,+4.491,+1.101,+22 +vit_base_patch16_clip_224.openai_ft_in1k,89.154,10.846,98.273,1.727,86.57,224,0.900,bicubic,+3.874,+0.867,-17 +swin_base_patch4_window7_224,89.145,10.855,98.429,1.571,87.77,224,0.900,bicubic,+3.893,+0.867,-15 +eca_nfnet_l2,89.141,10.859,98.315,1.685,56.72,384,1.000,bicubic,+4.443,+1.051,+20 +cait_xs24_384,89.139,10.861,98.290,1.710,26.67,384,1.000,bicubic,+5.077,+1.402,+65 +maxvit_tiny_tf_384.in1k,89.120,10.880,98.211,1.789,30.98,384,1.000,bicubic,+4.014,+0.677,-8 +ig_resnext101_32x48d,89.120,10.880,98.130,1.870,828.41,224,0.875,bilinear,+3.692,+0.558,-29 +ig_resnext101_32x32d,89.111,10.889,98.181,1.819,468.53,224,0.875,bilinear,+4.017,+0.743,-7 +maxvit_small_tf_384.in1k,89.103,10.897,98.164,1.836,69.02,384,1.000,bicubic,+3.569,+0.700,-36 +tf_efficientnet_b7.ra_in1k,89.086,10.914,98.183,1.817,66.35,600,0.949,bicubic,+4.150,+0.979,+1 +ecaresnet269d,89.069,10.931,98.234,1.766,102.09,352,1.000,bicubic,+4.093,+1.008,-2 +vit_base_patch32_clip_384.openai_ft_in12k_in1k,89.051,10.949,98.285,1.714,88.30,384,0.950,bicubic,+3.839,+0.883,-20 +xcit_large_24_p16_224_dist,89.041,10.959,98.064,1.937,189.10,224,1.000,bicubic,+4.123,+0.931,0 +resmlp_big_24_224_in22ft1k,89.011,10.989,98.215,1.785,129.14,224,0.875,bicubic,+4.617,+1.333,+33 +dm_nfnet_f2,89.009,10.991,98.189,1.810,193.78,352,0.920,bicubic,+3.945,+0.950,-10 +xcit_small_12_p8_224_dist,89.002,10.998,98.078,1.922,26.21,224,1.000,bicubic,+4.770,+1.300,+43 +efficientnetv2_rw_m.agc_in1k,88.987,11.013,98.213,1.787,53.24,416,1.000,bicubic,+4.179,+1.065,+2 +convnext_large.fb_in1k,88.985,11.015,98.040,1.960,197.77,288,1.000,bicubic,+4.139,+0.828,-1 +regnetz_040h,88.951,11.049,98.202,1.798,28.94,320,1.000,bicubic,+4.457,+1.196,+17 +mvitv2_large,88.945,11.055,97.967,2.033,217.99,224,0.900,bicubic,+3.695,+0.753,-30 +tf_efficientnet_b5.ap_in1k,88.938,11.062,98.164,1.836,30.39,456,0.934,bicubic,+4.686,+1.190,+36 +dm_nfnet_f1,88.925,11.075,98.115,1.885,132.63,320,0.910,bicubic,+4.299,+1.015,+5 +deit3_medium_patch16_224_in21ft1k,88.921,11.079,98.300,1.700,38.85,224,1.000,bicubic,+4.361,+1.112,+7 +deit3_base_patch16_384,88.921,11.079,98.046,1.954,86.88,384,1.000,bicubic,+3.849,+0.768,-19 +volo_d1_224,88.908,11.092,98.031,1.968,26.63,224,0.960,bicubic,+4.744,+1.255,+41 +tf_efficientnetv2_s.in21k_ft_in1k,88.904,11.096,98.277,1.723,21.46,384,1.000,bicubic,+4.602,+1.025,+26 +vit_base_patch16_224.augreg_in21k_ft_in1k,88.866,11.134,98.230,1.770,86.57,224,0.900,bicubic,+4.334,+0.936,+5 +regnetz_d8,88.855,11.145,98.189,1.810,23.37,320,1.000,bicubic,+4.805,+1.191,+43 +convnext_tiny.fb_in22k_ft_in1k_384,88.846,11.154,98.298,1.702,28.59,384,1.000,bicubic,+4.766,+1.156,+40 +regnetz_d8_evos,88.842,11.158,98.132,1.868,23.46,320,0.950,bicubic,+4.792,+1.138,+42 +resnetrs420,88.840,11.160,98.034,1.966,191.89,416,1.000,bicubic,+3.832,+0.910,-23 +resnetrs270,88.834,11.166,98.136,1.864,129.86,352,1.000,bicubic,+4.400,+1.166,+10 +ig_resnext101_32x16d,88.834,11.166,98.049,1.951,194.03,224,0.875,bilinear,+4.664,+0.853,+33 +vit_small_r26_s32_384.augreg_in21k_ft_in1k,88.819,11.181,98.337,1.663,36.47,384,1.000,bicubic,+4.773,+1.009,+40 +vit_base_r50_s16_384.orig_in21k_ft_in1k,88.808,11.192,98.232,1.768,98.95,384,1.000,bicubic,+3.836,+0.944,-25 +xcit_medium_24_p16_224_dist,88.797,11.203,98.036,1.964,84.40,224,1.000,bicubic,+4.523,+1.096,+18 +seresnet152d,88.795,11.205,98.172,1.828,66.84,320,1.000,bicubic,+4.433,+1.132,+14 +maxxvit_rmlp_small_rw_256,88.791,11.209,98.061,1.939,66.01,256,0.950,bicubic,+4.163,+0.999,-12 +xcit_tiny_24_p8_384_dist,88.774,11.226,98.160,1.840,12.11,384,1.000,bicubic,+5.034,+1.526,+58 +swsl_resnext101_32x8d,88.770,11.230,98.147,1.853,88.79,224,0.875,bilinear,+4.486,+0.971,+13 +resnetrs200,88.763,11.237,98.113,1.887,93.21,320,1.000,bicubic,+4.315,+1.269,-3 +tf_efficientnet_b6.aa_in1k,88.761,11.239,98.064,1.937,43.04,528,0.942,bicubic,+4.651,+1.178,+25 +convnext_base.fb_in1k,88.761,11.239,97.931,2.069,88.59,288,1.000,bicubic,+4.327,+1.111,0 +resnetrs350,88.759,11.241,98.029,1.971,163.96,384,1.000,bicubic,+4.039,+1.041,-22 +deit3_large_patch16_224,88.755,11.245,97.914,2.086,304.37,224,0.900,bicubic,+3.991,+0.876,-24 +edgenext_base,88.744,11.256,98.145,1.855,18.51,320,1.000,bicubic,+4.784,+1.377,+33 +vit_base_patch16_224_miil.in21k_ft_in1k,88.737,11.262,98.027,1.973,86.54,224,0.875,bilinear,+4.469,+1.225,+8 +regnetz_040,88.731,11.269,98.091,1.909,27.12,320,1.000,bicubic,+4.495,+1.159,+10 +resnetv2_152x2_bitm,88.725,11.275,98.307,1.693,236.34,448,1.000,bilinear,+4.215,+0.875,-14 +regnety_160,88.697,11.303,98.068,1.932,83.59,288,1.000,bicubic,+5.011,+1.292,+55 +pit_b_distilled_224,88.676,11.324,98.093,1.907,74.79,224,0.900,bicubic,+4.532,+1.237,+16 +vit_small_patch16_384.augreg_in21k_ft_in1k,88.652,11.348,98.232,1.768,22.20,384,1.000,bicubic,+4.850,+1.130,+41 +regnetz_d32,88.650,11.350,98.081,1.919,27.58,320,0.950,bicubic,+4.628,+1.215,+22 +flexivit_base.1200ep_in1k,88.646,11.354,97.935,2.065,86.59,240,0.950,bicubic,+3.982,+0.943,-29 +regnety_080,88.635,11.365,97.970,2.030,39.18,288,1.000,bicubic,+4.703,+1.082,+25 +vit_medium_patch16_gap_256.in12k_ft_in1k,88.631,11.369,98.189,1.810,38.86,256,0.950,bicubic,+4.201,+0.977,-11 +eca_nfnet_l1,88.624,11.376,98.132,1.868,41.41,320,1.000,bicubic,+4.614,+1.104,+20 +mvitv2_base,88.620,11.380,97.820,2.180,51.47,224,0.900,bicubic,+4.198,+0.956,-12 +maxvit_base_tf_224.in1k,88.586,11.414,97.848,2.152,119.47,224,0.950,bicubic,+3.726,+0.858,-43 +swinv2_base_window16_256,88.584,11.416,97.893,2.107,87.92,256,0.900,bicubic,+3.990,+0.819,-31 +flexivit_base.600ep_in1k,88.550,11.450,97.935,2.065,86.59,240,0.950,bicubic,+4.032,+0.949,-27 +resnetv2_152x4_bitm,88.545,11.455,98.189,1.810,936.53,480,1.000,bilinear,+3.629,+0.749,-48 +seresnextaa101d_32x8d,88.545,11.455,98.002,1.998,93.59,288,1.000,bicubic,+3.977,+0.932,-33 +resnet200d,88.543,11.457,97.959,2.041,64.69,320,1.000,bicubic,+4.581,+1.135,+14 +xcit_small_24_p16_224_dist,88.530,11.470,97.999,2.001,47.67,224,1.000,bicubic,+4.668,+1.271,+20 +resnest269e,88.522,11.478,98.027,1.973,110.93,416,0.928,bicubic,+4.004,+1.091,-33 +maxvit_rmlp_small_rw_224,88.520,11.480,97.775,2.225,64.90,224,0.900,bicubic,+4.036,+1.013,-30 +swinv2_base_window8_256,88.513,11.487,97.893,2.107,87.92,256,0.900,bicubic,+4.251,+0.971,-12 +coatnet_rmlp_2_rw_224,88.509,11.491,97.572,2.428,73.88,224,0.950,bicubic,+3.909,+0.836,-41 +seresnext101_32x8d,88.501,11.499,97.888,2.112,93.57,288,1.000,bicubic,+4.309,+1.014,-6 +gcvit_base,88.498,11.502,97.775,2.225,90.32,224,0.875,bicubic,+4.050,+0.693,-32 +crossvit_18_dagger_408,88.477,11.523,97.893,2.107,44.61,408,1.000,bicubic,+4.281,+1.075,-9 +efficientnetv2_rw_s.ra2_in1k,88.473,11.527,97.974,2.026,23.94,384,1.000,bicubic,+4.665,+1.250,+19 +flexivit_base.300ep_in1k,88.471,11.529,97.841,2.159,86.59,240,0.950,bicubic,+4.077,+0.721,-26 +resnetv2_101x3_bitm,88.464,11.536,98.157,1.843,387.93,448,1.000,bilinear,+4.024,+0.775,-35 +maxvit_small_tf_224.in1k,88.458,11.542,97.880,2.120,68.93,224,0.950,bicubic,+4.024,+0.716,-32 +maxvit_large_tf_224.in1k,88.456,11.544,97.805,2.195,211.79,224,0.950,bicubic,+3.530,+0.833,-65 +cait_s24_224,88.447,11.553,97.957,2.043,46.92,224,1.000,bicubic,+4.995,+1.393,+36 +resnetv2_50x3_bitm,88.443,11.557,98.200,1.800,217.32,448,1.000,bilinear,+4.429,+1.076,-4 +resmlp_big_24_distilled_224,88.443,11.557,97.940,2.060,129.14,224,0.875,bicubic,+4.855,+1.292,+30 +regnetv_064,88.432,11.568,98.061,1.939,30.58,288,1.000,bicubic,+4.720,+1.313,+21 +resnest200e,88.432,11.568,98.042,1.958,70.20,320,0.909,bicubic,+4.600,+1.148,+8 +seresnext101d_32x8d,88.428,11.572,97.957,2.043,93.59,288,1.000,bicubic,+4.058,+1.041,-33 +vit_large_r50_s32_224.augreg_in21k_ft_in1k,88.426,11.574,98.085,1.915,328.99,224,0.900,bicubic,+3.992,+1.113,-44 +tf_efficientnet_b3.ns_jft_in1k,88.426,11.574,98.029,1.971,12.23,300,0.904,bicubic,+4.378,+1.119,-11 +convnext_small.fb_in1k,88.413,11.587,98.008,1.992,50.22,288,1.000,bicubic,+4.707,+1.198,+18 +tf_efficientnetv2_s.in1k,88.402,11.598,97.927,2.073,21.46,384,1.000,bicubic,+4.508,+1.229,-4 +vit_base_patch16_384.orig_in21k_ft_in1k,88.389,11.611,98.155,1.845,86.86,384,1.000,bicubic,+4.180,+0.937,-27 +tresnet_v2_l,88.381,11.619,97.925,2.075,46.17,224,0.875,bilinear,+4.479,+1.433,-7 +regnetz_c16_evos,88.377,11.623,98.040,1.960,13.49,320,0.950,bicubic,+5.747,+1.566,+83 +efficientnet_b4.ra2_in1k,88.372,11.628,97.961,2.039,19.34,384,1.000,bicubic,+4.944,+1.365,+24 +resnet152d,88.355,11.645,97.935,2.065,60.21,320,1.000,bicubic,+4.675,+1.197,+14 +swinv2_small_window16_256,88.355,11.645,97.848,2.152,49.73,256,0.900,bicubic,+4.149,+0.978,-31 +tf_efficientnet_b4.ap_in1k,88.349,11.651,97.893,2.107,19.34,380,0.922,bicubic,+5.101,+1.501,+36 +maxvit_rmlp_tiny_rw_256,88.345,11.655,97.824,2.176,29.15,256,0.950,bicubic,+4.113,+0.948,-35 +deit3_small_patch16_224_in21ft1k,88.328,11.672,98.130,1.870,22.06,224,1.000,bicubic,+5.258,+1.350,+49 +tf_efficientnet_b5.ra_in1k,88.321,11.679,97.912,2.088,30.39,456,0.934,bicubic,+4.509,+1.164,-6 +regnety_064,88.317,11.683,97.859,2.142,30.58,288,1.000,bicubic,+4.601,+1.184,+2 +crossvit_15_dagger_408,88.308,11.692,97.871,2.129,28.50,408,1.000,bicubic,+4.470,+1.089,-11 +deit3_small_patch16_384,88.300,11.700,97.888,2.112,22.21,384,1.000,bicubic,+4.874,+1.212,+16 +cs3se_edgenet_x,88.291,11.709,97.935,2.065,50.72,320,1.000,bicubic,+4.743,+1.265,+10 +pvt_v2_b4,88.285,11.715,97.814,2.186,62.56,224,0.900,bicubic,+4.569,+1.094,-1 +efficientformer_l7,88.278,11.722,97.882,2.118,82.23,224,0.950,bicubic,+4.892,+1.342,+20 +mvitv2_small,88.261,11.739,97.688,2.312,34.87,224,0.900,bicubic,+4.493,+1.118,-8 +xcit_small_12_p16_224_dist,88.251,11.749,97.844,2.156,26.25,224,1.000,bicubic,+4.901,+1.430,+20 +resnetrs152,88.251,11.749,97.737,2.263,86.62,320,1.000,bicubic,+4.539,+1.123,-2 +deit3_base_patch16_224,88.249,11.751,97.809,2.191,86.59,224,0.900,bicubic,+4.457,+1.225,-13 +gcvit_small,88.219,11.781,97.786,2.214,51.09,224,0.875,bicubic,+4.335,+1.128,-24 +regnetv_040,88.214,11.786,97.972,2.028,20.64,288,1.000,bicubic,+5.020,+1.312,+23 +deit_base_distilled_patch16_224,88.214,11.786,97.914,2.086,87.34,224,0.900,bicubic,+4.826,+1.426,+12 +xception65p,88.180,11.820,97.790,2.210,39.82,299,0.940,bicubic,+5.050,+1.310,+27 +swinv2_small_window8_256,88.180,11.820,97.775,2.225,49.73,256,0.900,bicubic,+4.324,+1.135,-25 +xcit_tiny_24_p16_384_dist,88.161,11.839,97.946,2.054,12.12,384,1.000,bicubic,+5.591,+1.660,+68 +xcit_large_24_p8_224,88.157,11.843,97.387,2.613,188.93,224,1.000,bicubic,+3.765,+0.731,-65 +resnetv2_152x2_bit_teacher_384,88.150,11.850,98.051,1.949,236.34,384,1.000,bicubic,+4.306,+0.933,-28 +ig_resnext101_32x8d,88.146,11.854,97.856,2.144,88.79,224,0.875,bilinear,+5.458,+1.220,+53 +cait_xxs36_384,88.140,11.860,97.908,2.092,17.37,384,1.000,bicubic,+5.946,+1.760,+103 +dm_nfnet_f0,88.125,11.875,97.854,2.146,71.49,256,0.900,bicubic,+4.739,+1.282,+4 +pvt_v2_b3,88.112,11.888,97.779,2.220,45.24,224,0.900,bicubic,+4.986,+1.224,+21 +pvt_v2_b5,88.108,11.892,97.701,2.300,81.96,224,0.900,bicubic,+4.368,+0.989,-22 +xcit_tiny_12_p8_384_dist,88.103,11.897,97.918,2.082,6.71,384,1.000,bicubic,+5.715,+1.694,+77 +swsl_resnext101_32x4d,88.099,11.901,97.967,2.033,44.18,224,0.875,bilinear,+4.869,+1.207,+9 +xception65,88.076,11.924,97.752,2.248,39.92,299,0.940,bicubic,+4.896,+1.160,+11 +swin_s3_base_224,88.048,11.952,97.656,2.344,71.13,224,0.900,bicubic,+4.118,+0.994,-43 +xcit_tiny_24_p8_224_dist,88.041,11.959,97.814,2.186,12.11,224,1.000,bicubic,+5.480,+1.748,+57 +maxvit_tiny_tf_224.in1k,88.020,11.980,97.814,2.186,30.92,224,0.950,bicubic,+4.622,+1.226,-7 +gcvit_tiny,88.003,11.997,97.728,2.272,28.22,224,0.875,bicubic,+4.603,+1.330,-9 +cs3sedarknet_x,87.984,12.016,97.799,2.201,35.40,288,1.000,bicubic,+5.330,+1.445,+43 +eca_nfnet_l0,87.980,12.020,97.871,2.129,24.14,288,1.000,bicubic,+5.400,+1.381,+50 +efficientformer_l3,87.971,12.029,97.709,2.291,31.41,224,0.950,bicubic,+5.421,+1.461,+54 +nfnet_l0,87.967,12.033,97.867,2.133,35.07,288,1.000,bicubic,+5.217,+1.351,+33 +xcit_small_24_p8_224,87.965,12.035,97.581,2.419,47.63,224,1.000,bicubic,+4.127,+0.945,-43 +tf_efficientnet_b4.aa_in1k,87.963,12.037,97.739,2.261,19.34,380,0.922,bicubic,+4.941,+1.439,+17 +coatnet_1_rw_224,87.948,12.052,97.455,2.545,41.72,224,0.950,bicubic,+4.340,+1.067,-26 +resnet101d,87.941,12.059,97.908,2.092,44.57,320,1.000,bicubic,+4.919,+1.462,+14 +regnety_032,87.937,12.063,97.891,2.109,19.44,288,1.000,bicubic,+5.213,+1.467,+29 +mobilevitv2_200_384_in22ft1k,87.926,12.074,97.820,2.180,18.45,384,1.000,bicubic,+4.532,+1.240,-17 +swinv2_cr_small_ns_224,87.920,12.080,97.668,2.332,49.70,224,0.900,bicubic,+4.432,+1.182,-25 +regnety_040,87.915,12.085,97.884,2.116,20.65,288,1.000,bicubic,+4.877,+1.374,+8 +sequencer2d_l,87.915,12.085,97.696,2.304,54.30,224,0.875,bicubic,+4.509,+1.190,-23 +vit_base_patch32_384.augreg_in21k_ft_in1k,87.909,12.091,98.012,1.988,88.30,384,1.000,bicubic,+4.559,+1.176,-16 +twins_svt_large,87.901,12.099,97.581,2.419,99.27,224,0.900,bicubic,+4.223,+0.987,-35 +coatnet_rmlp_1_rw_224,87.892,12.108,97.628,2.372,41.69,224,0.950,bicubic,+4.534,+1.172,-19 +twins_pcpvt_large,87.877,12.123,97.856,2.144,60.99,224,0.900,bicubic,+4.737,+1.258,-7 +regnetz_c16,87.860,12.140,97.818,2.182,13.46,320,0.940,bicubic,+5.342,+1.746,+42 +swin_s3_small_224,87.854,12.146,97.431,2.568,49.74,224,0.900,bicubic,+4.084,+0.981,-51 +maxvit_tiny_rw_224,87.852,12.149,97.643,2.357,29.06,224,0.950,bicubic,+4.347,+1.141,-35 +deit_base_patch16_384,87.845,12.155,97.510,2.490,86.86,384,1.000,bicubic,+4.739,+1.138,-6 +mobilevitv2_175_384_in22ft1k,87.841,12.159,97.726,2.274,14.25,384,1.000,bicubic,+4.899,+1.300,+3 +convnext_nano.in12k_ft_in1k,87.834,12.166,97.888,2.112,15.59,288,1.000,bicubic,+4.976,+1.332,+7 +xcit_small_12_p8_224,87.828,12.172,97.568,2.432,26.21,224,1.000,bicubic,+4.484,+1.088,-23 +flexivit_small.1200ep_in1k,87.815,12.185,97.615,2.385,22.06,240,0.950,bicubic,+5.289,+1.479,+33 +tf_efficientnetv2_b3.in21k_ft_in1k,87.813,12.187,97.893,2.107,14.36,300,0.900,bicubic,+5.141,+1.269,+17 +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,87.813,12.187,97.754,2.246,88.22,224,0.900,bicubic,+4.507,+1.224,-25 +flexivit_small.600ep_in1k,87.811,12.189,97.581,2.419,22.06,240,0.950,bicubic,+5.457,+1.495,+46 +maxxvit_rmlp_nano_rw_256,87.802,12.198,97.756,2.244,16.78,256,0.950,bicubic,+4.772,+1.412,-9 +deit3_medium_patch16_224,87.802,12.198,97.649,2.351,38.85,224,0.900,bicubic,+4.722,+1.357,-13 +tresnet_xl_448,87.796,12.204,97.459,2.541,78.44,448,0.875,bilinear,+4.746,+1.285,-12 +resnetv2_50x1_bit_distilled,87.787,12.213,97.899,2.101,25.55,224,0.875,bicubic,+4.969,+1.377,0 +convnext_tiny.fb_in1k,87.762,12.238,97.585,2.415,28.59,288,1.000,bicubic,+5.062,+1.449,+8 +twins_pcpvt_base,87.736,12.264,97.726,2.274,43.83,224,0.900,bicubic,+5.028,+1.380,+4 +tresnet_m,87.736,12.264,97.523,2.477,31.39,224,0.875,bilinear,+4.656,+1.405,-17 +mvitv2_tiny,87.721,12.279,97.555,2.445,24.17,224,0.900,bicubic,+5.317,+1.399,+31 +maxvit_rmlp_nano_rw_256,87.719,12.281,97.579,2.421,15.50,256,0.950,bicubic,+4.757,+1.309,-13 +gc_efficientnetv2_rw_t.agc_in1k,87.715,12.285,97.807,2.193,13.68,288,1.000,bicubic,+5.251,+1.509,+26 +resnetv2_101x1_bitm,87.681,12.319,97.940,2.060,44.54,448,1.000,bilinear,+5.349,+1.984,+39 +swin_small_patch4_window7_224,87.664,12.336,97.566,2.434,49.61,224,0.900,bicubic,+4.452,+1.244,-35 +mobilevitv2_150_384_in22ft1k,87.657,12.343,97.649,2.351,10.59,384,1.000,bicubic,+5.063,+1.331,+7 +efficientnetv2_rw_t.ra2_in1k,87.644,12.356,97.688,2.312,13.65,288,1.000,bicubic,+5.296,+1.492,+33 +twins_svt_base,87.638,12.362,97.523,2.477,56.07,224,0.900,bicubic,+4.502,+1.105,-32 +pnasnet5large,87.636,12.364,97.485,2.515,86.06,331,0.911,bicubic,+4.854,+1.445,-9 +cs3edgenet_x,87.619,12.381,97.654,2.346,47.82,288,1.000,bicubic,+4.917,+1.284,-5 +swsl_resnext101_32x16d,87.615,12.386,97.820,2.180,194.03,224,0.875,bilinear,+4.269,+0.974,-48 +flexivit_small.300ep_in1k,87.615,12.386,97.613,2.387,22.06,240,0.950,bicubic,+5.443,+1.589,+50 +swinv2_tiny_window16_256,87.615,12.386,97.562,2.438,28.35,256,0.900,bicubic,+4.805,+1.330,-14 +maxvit_nano_rw_256,87.610,12.390,97.523,2.477,15.45,256,0.950,bicubic,+4.678,+1.301,-23 +jx_nest_base,87.606,12.394,97.521,2.479,67.72,224,0.875,bicubic,+4.054,+1.151,-67 +swsl_resnext50_32x4d,87.600,12.400,97.651,2.349,25.03,224,0.875,bilinear,+5.418,+1.421,+43 +xcit_medium_24_p8_224,87.600,12.400,97.199,2.801,84.32,224,1.000,bicubic,+3.866,+0.805,-80 +sequencer2d_m,87.565,12.435,97.581,2.419,38.31,224,0.875,bicubic,+4.759,+1.313,-19 +tf_efficientnet_b2.ns_jft_in1k,87.557,12.443,97.628,2.372,9.11,260,0.890,bicubic,+5.177,+1.380,+18 +levit_384,87.553,12.447,97.545,2.455,39.13,224,0.900,bicubic,+4.967,+1.529,-5 +ecaresnet50t,87.538,12.462,97.643,2.357,25.57,320,0.950,bicubic,+5.192,+1.505,+20 +vit_base_patch32_clip_224.laion2b_ft_in1k,87.535,12.464,97.553,2.447,88.22,224,0.900,bicubic,+4.954,+1.351,-6 +vit_base_patch16_rpn_224.in1k,87.508,12.492,97.489,2.511,86.54,224,0.900,bicubic,+5.306,+1.493,+33 +pvt_v2_b2_li,87.504,12.496,97.478,2.522,22.55,224,0.900,bicubic,+5.308,+1.374,+33 +edgenext_small,87.501,12.499,97.583,2.417,5.59,320,1.000,bicubic,+5.933,+1.877,+75 +coatnet_bn_0_rw_224,87.499,12.501,97.547,2.453,27.44,224,0.950,bicubic,+5.101,+1.365,+8 +resnetv2_152x2_bit_teacher,87.493,12.507,97.812,2.188,236.34,224,0.875,bicubic,+4.631,+1.244,-33 +jx_nest_small,87.489,12.511,97.517,2.483,38.35,224,0.875,bicubic,+4.368,+1.189,-49 +vit_relpos_base_patch16_clsgap_224.sw_in1k,87.469,12.531,97.525,2.474,86.43,224,0.900,bicubic,+4.707,+1.351,-28 +vit_relpos_base_patch16_224.sw_in1k,87.463,12.537,97.558,2.442,86.43,224,0.900,bicubic,+4.979,+1.416,-3 +resnet152,87.461,12.539,97.402,2.598,60.19,224,0.950,bicubic,+4.639,+1.276,-35 +fbnetv3_g.ra2_in1k,87.452,12.548,97.547,2.453,16.62,288,0.950,bilinear,+5.404,+1.483,+37 +resnext101_64x4d,87.444,12.556,97.442,2.558,83.46,288,1.000,bicubic,+4.296,+1.070,-60 +efficientnet_b3.ra2_in1k,87.435,12.565,97.681,2.319,12.23,320,1.000,bicubic,+5.193,+1.567,+18 +resnet61q,87.435,12.565,97.598,2.402,36.85,288,1.000,bicubic,+4.911,+1.468,-11 +cait_xxs24_384,87.416,12.584,97.619,2.381,12.03,384,1.000,bicubic,+6.450,+1.973,+110 +cs3darknet_x,87.401,12.599,97.607,2.393,35.05,288,1.000,bicubic,+5.173,+1.373,+17 +cs3sedarknet_l,87.397,12.603,97.570,2.430,21.91,288,0.950,bicubic,+5.623,+1.602,+52 +resnet51q,87.395,12.605,97.587,2.413,35.70,288,1.000,bilinear,+5.035,+1.407,-1 +xcit_tiny_24_p8_224,87.380,12.620,97.628,2.372,12.11,224,1.000,bicubic,+5.480,+1.652,+41 +coat_lite_small,87.380,12.620,97.365,2.635,19.84,224,0.900,bicubic,+5.072,+1.515,+6 +tresnet_l_448,87.377,12.623,97.485,2.515,55.99,448,0.875,bilinear,+5.109,+1.509,+9 +sequencer2d_s,87.375,12.625,97.389,2.611,27.65,224,0.875,bicubic,+5.033,+1.359,-1 +pvt_v2_b2,87.373,12.627,97.517,2.483,25.36,224,0.900,bicubic,+5.297,+1.555,+21 +swinv2_cr_small_224,87.371,12.629,97.344,2.656,49.70,224,0.900,bicubic,+4.225,+1.250,-71 +vit_relpos_medium_patch16_cls_224.sw_in1k,87.369,12.631,97.453,2.547,38.76,224,0.900,bicubic,+4.807,+1.283,-25 +nasnetalarge,87.350,12.650,97.417,2.583,88.75,331,0.911,bicubic,+4.730,+1.371,-35 +crossvit_18_dagger_240,87.346,12.655,97.457,2.543,44.27,240,0.875,bicubic,+4.828,+1.097,-22 +crossvit_18_240,87.316,12.684,97.483,2.517,43.27,240,0.875,bicubic,+4.916,+1.429,-16 +resnetv2_101,87.307,12.693,97.323,2.677,44.54,224,0.950,bicubic,+5.277,+1.463,+20 +ecaresnet101d,87.288,12.712,97.562,2.438,44.57,224,0.875,bicubic,+5.116,+1.516,+9 +resnest101e,87.284,12.716,97.560,2.440,48.28,256,0.875,bilinear,+4.394,+1.240,-59 +gcvit_xtiny,87.281,12.719,97.481,2.519,19.98,224,0.875,bicubic,+5.329,+1.515,+24 +pit_s_distilled_224,87.277,12.723,97.500,2.500,24.04,224,0.900,bicubic,+5.281,+1.702,+18 +coatnet_rmlp_nano_rw_224,87.275,12.725,97.440,2.560,15.15,224,0.900,bicubic,+5.211,+1.570,+11 +resnetv2_50d_gn,87.260,12.740,97.513,2.487,25.57,288,0.950,bicubic,+5.444,+1.589,+30 +vit_relpos_medium_patch16_rpn_224.sw_in1k,87.258,12.742,97.442,2.558,38.73,224,0.900,bicubic,+4.960,+1.468,-9 +resnetrs101,87.247,12.753,97.457,2.543,63.62,288,0.940,bicubic,+4.959,+1.519,-9 +coatnext_nano_rw_224,87.239,12.761,97.549,2.451,14.70,224,0.900,bicubic,+5.291,+1.631,+19 +poolformer_m48,87.239,12.761,97.308,2.692,73.47,224,0.950,bicubic,+4.777,+1.350,-30 +mixer_b16_224_miil,87.226,12.774,97.410,2.590,59.88,224,0.875,bilinear,+4.918,+1.694,-14 +xcit_tiny_12_p8_224_dist,87.224,12.776,97.444,2.556,6.71,224,1.000,bicubic,+6.012,+1.844,+63 +tresnet_xl,87.224,12.776,97.400,2.600,78.44,224,0.875,bilinear,+5.170,+1.463,+6 +xcit_tiny_12_p16_384_dist,87.202,12.798,97.466,2.534,6.72,384,1.000,bicubic,+6.262,+2.056,+85 +convit_base,87.200,12.800,97.286,2.714,86.54,224,0.875,bicubic,+4.912,+1.278,-15 +resnetv2_50d_evos,87.194,12.806,97.361,2.639,25.59,288,0.950,bicubic,+5.218,+1.445,+7 +tf_efficientnet_b3.ap_in1k,87.192,12.808,97.380,2.620,12.23,300,0.904,bicubic,+5.370,+1.756,+18 +visformer_small,87.181,12.819,97.323,2.677,40.22,224,0.900,bicubic,+5.075,+1.451,-4 +vit_base_patch32_clip_224.openai_ft_in1k,87.177,12.823,97.463,2.537,88.22,224,0.900,bicubic,+5.247,+1.495,+12 +crossvit_15_dagger_240,87.172,12.828,97.438,2.562,28.21,240,0.875,bicubic,+4.841,+0.920,-26 +vit_srelpos_medium_patch16_224.sw_in1k,87.168,12.832,97.314,2.686,38.74,224,0.900,bicubic,+4.932,+1.380,-18 +convnext_tiny_hnf.a2h_in1k,87.143,12.857,97.293,2.707,28.59,288,1.000,bicubic,+4.553,+1.277,-58 +vit_relpos_medium_patch16_224.sw_in1k,87.141,12.860,97.504,2.496,38.75,224,0.900,bicubic,+4.675,+1.416,-45 +xcit_small_24_p16_224,87.132,12.868,97.259,2.741,47.67,224,1.000,bicubic,+4.552,+1.255,-56 +swin_s3_tiny_224,87.128,12.872,97.303,2.697,28.33,224,0.900,bicubic,+5.006,+1.355,-13 +coatnet_0_rw_224,87.119,12.881,97.224,2.776,27.44,224,0.950,bicubic,+4.729,+1.388,-41 +swinv2_tiny_window8_256,87.076,12.924,97.515,2.485,28.35,256,0.900,bicubic,+5.270,+1.521,+10 +resnet101,87.068,12.932,97.263,2.737,44.55,224,0.950,bicubic,+5.130,+1.509,+2 +mobilevitv2_200_in22ft1k,87.053,12.947,97.429,2.571,18.45,256,0.888,bicubic,+4.729,+1.489,-35 +convit_small,87.053,12.947,97.350,2.650,27.78,224,0.875,bicubic,+5.627,+1.606,+33 +crossvit_15_240,87.051,12.949,97.423,2.577,27.53,240,0.875,bicubic,+5.515,+1.731,+21 +xception41p,87.049,12.951,97.205,2.795,26.91,299,0.940,bicubic,+5.091,+1.411,-6 +tf_efficientnetv2_b3.in1k,87.032,12.968,97.303,2.697,14.36,300,0.904,bicubic,+5.062,+1.521,-8 +regnetz_b16,87.017,12.983,97.429,2.571,9.72,288,0.940,bicubic,+6.301,+1.951,+78 +xcit_small_12_p16_224,87.010,12.990,97.248,2.752,26.25,224,1.000,bicubic,+5.036,+1.432,-12 +deit3_small_patch16_224,87.010,12.990,97.169,2.831,22.06,224,0.900,bicubic,+5.624,+1.719,+32 +jx_nest_tiny,87.008,12.992,97.378,2.622,17.06,224,0.875,bicubic,+5.594,+1.762,+27 +swinv2_cr_tiny_ns_224,87.000,13.000,97.284,2.716,28.33,224,0.900,bicubic,+5.210,+1.460,0 +deit_small_distilled_patch16_224,86.993,13.007,97.316,2.684,22.44,224,0.900,bicubic,+5.793,+1.938,+38 +resmlp_36_distilled_224,86.993,13.007,97.278,2.722,44.69,224,0.875,bicubic,+5.833,+1.790,+40 +coatnet_nano_rw_224,86.989,13.011,97.242,2.759,15.14,224,0.900,bicubic,+5.289,+1.603,+1 +xcit_large_24_p16_224,86.955,13.045,96.921,3.079,189.10,224,1.000,bicubic,+4.059,+1.039,-100 +poolformer_m36,86.948,13.052,97.148,2.852,56.17,224,0.950,bicubic,+4.838,+1.460,-30 +mobilevitv2_175_in22ft1k,86.944,13.056,97.333,2.667,14.25,256,0.888,bicubic,+5.000,+1.541,-15 +xcit_medium_24_p16_224,86.936,13.065,97.101,2.899,84.40,224,1.000,bicubic,+4.300,+1.125,-85 +tnt_s_patch16_224,86.903,13.097,97.368,2.632,23.76,224,0.900,bicubic,+5.385,+1.620,+8 +vit_relpos_small_patch16_224.sw_in1k,86.899,13.101,97.489,2.511,21.98,224,0.900,bicubic,+5.437,+1.661,+13 +vit_small_patch16_224.augreg_in21k_ft_in1k,86.869,13.131,97.613,2.387,22.05,224,0.900,bicubic,+5.467,+1.479,+17 +vit_small_r26_s32_224.augreg_in21k_ft_in1k,86.863,13.137,97.528,2.472,36.43,224,0.900,bicubic,+5.005,+1.506,-16 +ssl_resnext101_32x16d,86.856,13.143,97.517,2.483,194.03,224,0.875,bilinear,+5.013,+1.421,-16 +convmixer_1536_20,86.852,13.148,97.346,2.654,51.63,224,0.960,bicubic,+5.476,+1.732,+18 +rexnet_200,86.846,13.154,97.276,2.724,16.37,224,0.875,bicubic,+5.214,+1.608,-6 +tf_efficientnet_b3.aa_in1k,86.835,13.165,97.297,2.703,12.23,300,0.904,bicubic,+5.199,+1.579,-8 +deit_base_patch16_224,86.829,13.171,97.049,2.951,86.57,224,0.900,bicubic,+4.831,+1.315,-33 +tresnet_m_448,86.820,13.180,97.212,2.788,31.39,448,0.875,bilinear,+5.106,+1.640,-14 +swsl_resnet50,86.807,13.193,97.498,2.502,25.56,224,0.875,bilinear,+5.641,+2.402,+21 +ssl_resnext101_32x8d,86.807,13.193,97.466,2.534,88.79,224,0.875,bilinear,+5.191,+1.428,-8 +tf_efficientnet_lite4.in1k,86.803,13.197,97.263,2.737,13.01,380,0.920,bilinear,+5.267,+1.595,-5 +coat_mini,86.793,13.207,97.162,2.837,10.34,224,0.900,bicubic,+5.525,+1.770,+14 +vit_base_patch16_224.orig_in21k_ft_in1k,86.778,13.223,97.438,2.562,86.57,224,0.900,bicubic,+4.992,+1.316,-22 +resnetaa50,86.778,13.223,97.395,2.605,25.56,288,1.000,bicubic,+5.156,+1.587,-13 +tresnet_l,86.767,13.233,97.271,2.729,55.99,224,0.875,bilinear,+5.279,+1.647,-5 +cs3darknet_l,86.756,13.244,97.466,2.534,21.16,288,0.950,bicubic,+5.860,+1.796,+39 +twins_svt_small,86.756,13.244,97.175,2.825,24.06,224,0.900,bicubic,+5.074,+1.505,-21 +cs3darknet_focus_l,86.743,13.257,97.376,2.624,21.15,288,0.950,bicubic,+5.859,+1.694,+39 +mobilevitv2_150_in22ft1k,86.739,13.261,97.222,2.778,10.59,256,0.888,bicubic,+5.261,+1.548,-8 +crossvit_base_240,86.733,13.267,97.120,2.880,105.03,240,0.875,bicubic,+4.517,+1.290,-62 +levit_256,86.728,13.272,97.259,2.741,18.89,224,0.900,bicubic,+5.218,+1.769,-13 +convnext_nano_ols.d1h_in1k,86.722,13.278,97.049,2.951,15.65,288,1.000,bicubic,+5.112,+1.409,-20 +vit_srelpos_small_patch16_224.sw_in1k,86.699,13.301,97.250,2.750,21.97,224,0.900,bicubic,+5.605,+1.918,+16 +seresnext50_32x4d,86.699,13.301,97.214,2.786,27.56,224,0.875,bicubic,+5.433,+1.594,+4 +crossvit_small_240,86.686,13.314,97.273,2.727,26.86,240,0.875,bicubic,+5.666,+1.814,+20 +pit_b_224,86.686,13.314,96.898,3.102,73.76,224,0.900,bicubic,+4.240,+1.188,-92 +halo2botnet50ts_256,86.681,13.319,97.090,2.910,22.64,256,0.950,bicubic,+4.621,+1.454,-57 +tf_efficientnet_b1.ns_jft_in1k,86.669,13.331,97.378,2.622,7.79,240,0.882,bicubic,+5.281,+1.640,-9 +swin_tiny_patch4_window7_224,86.664,13.336,97.197,2.803,28.29,224,0.900,bicubic,+5.286,+1.657,-8 +gernet_l,86.654,13.346,97.186,2.814,31.08,256,0.875,bilinear,+5.300,+1.650,-7 +wide_resnet50_2,86.647,13.353,97.214,2.786,68.88,224,0.875,bicubic,+5.191,+1.682,-17 +poolformer_s36,86.639,13.361,97.158,2.842,30.86,224,0.900,bicubic,+5.223,+1.712,-16 +efficientnet_el.ra_in1k,86.635,13.366,97.175,2.825,10.59,300,0.904,bicubic,+5.319,+1.649,-9 +resmlp_24_distilled_224,86.622,13.378,97.135,2.865,30.02,224,0.875,bicubic,+5.856,+1.917,+28 +twins_pcpvt_small,86.620,13.380,97.340,2.660,24.11,224,0.900,bicubic,+5.532,+1.698,+7 +nf_resnet50,86.609,13.391,97.293,2.707,25.56,288,0.940,bicubic,+5.947,+1.957,+30 +resnest50d_4s2x40d,86.592,13.408,97.269,2.731,30.42,224,0.875,bicubic,+5.484,+1.711,+1 +efficientnet_b3_pruned.in1k,86.581,13.419,97.190,2.810,9.86,300,0.904,bicubic,+5.723,+1.948,+21 +repvgg_b3,86.566,13.434,97.139,2.861,123.09,224,0.875,bilinear,+6.074,+1.879,+38 +sehalonet33ts,86.564,13.436,97.004,2.995,13.69,256,0.940,bicubic,+5.606,+1.728,+10 +sebotnet33ts_256,86.558,13.442,96.785,3.215,13.70,256,0.940,bicubic,+5.408,+1.611,-7 +xcit_tiny_24_p16_224_dist,86.536,13.464,97.218,2.782,12.12,224,1.000,bicubic,+6.090,+2.000,+43 +convnext_nano.d1h_in1k,86.530,13.470,97.177,2.823,15.59,288,1.000,bicubic,+5.060,+1.519,-31 +vit_small_patch16_384.augreg_in1k,86.496,13.504,97.182,2.818,22.20,384,1.000,bicubic,+5.376,+1.608,-8 +halonet50ts,86.483,13.517,97.152,2.848,22.73,256,0.940,bicubic,+4.839,+1.544,-48 +ssl_resnext101_32x4d,86.479,13.521,97.468,2.532,44.18,224,0.875,bilinear,+5.555,+1.740,+6 +maxvit_rmlp_pico_rw_256,86.479,13.521,97.203,2.797,7.52,256,0.950,bicubic,+5.963,+1.991,+29 +ecaresnet50d,86.470,13.530,97.186,2.814,25.58,224,0.875,bicubic,+5.878,+1.866,+21 +gcresnet50t,86.470,13.530,97.141,2.859,25.90,256,0.900,bicubic,+5.530,+1.687,+2 +gluon_resnet152_v1s,86.468,13.532,97.109,2.891,60.32,224,0.875,bicubic,+5.452,+1.697,-4 +haloregnetz_b,86.464,13.536,96.943,3.057,11.68,224,0.940,bicubic,+5.414,+1.747,-8 +mobilevitv2_200,86.451,13.549,96.972,3.027,18.45,256,0.888,bicubic,+5.315,+1.606,-17 +resnest50d_1s4x24d,86.447,13.553,97.148,2.852,25.68,224,0.875,bicubic,+5.459,+1.826,-6 +resnetv2_50x1_bitm,86.436,13.564,97.602,2.398,25.55,448,1.000,bilinear,+6.094,+1.918,+39 +repvgg_b3g4,86.361,13.639,97.054,2.946,83.83,224,0.875,bilinear,+6.149,+1.944,+51 +darknetaa53,86.359,13.641,97.158,2.842,36.02,288,1.000,bilinear,+5.837,+1.836,+18 +lamhalobotnet50ts_256,86.357,13.643,97.062,2.938,22.57,256,0.950,bicubic,+4.813,+1.558,-53 +darknet53,86.355,13.645,97.115,2.885,41.61,288,1.000,bicubic,+5.821,+1.695,+14 +efficientformer_l1,86.344,13.656,97.024,2.976,12.29,224,0.950,bicubic,+5.842,+2.026,+17 +legacy_senet154,86.342,13.658,96.928,3.072,115.09,224,0.875,bilinear,+5.032,+1.432,-35 +cait_xxs36_224,86.340,13.660,97.111,2.889,17.30,224,1.000,bicubic,+6.590,+2.245,+70 +resnext50_32x4d,86.340,13.660,96.972,3.027,25.03,224,0.950,bicubic,+5.222,+1.641,-25 +gernet_m,86.319,13.681,97.096,2.904,21.14,224,0.875,bilinear,+5.587,+1.912,0 +pit_s_224,86.316,13.684,97.045,2.955,23.46,224,0.900,bicubic,+5.222,+1.475,-24 +mobilevitv2_175,86.316,13.684,96.985,3.015,14.25,256,0.888,bicubic,+5.456,+1.731,-6 +vit_small_patch32_384.augreg_in21k_ft_in1k,86.312,13.688,97.417,2.583,22.92,384,1.000,bicubic,+5.832,+1.819,+12 +efficientnet_b2.ra_in1k,86.304,13.696,96.990,3.010,9.11,288,1.000,bicubic,+5.692,+1.672,0 +gluon_senet154,86.278,13.722,96.949,3.051,115.09,224,0.875,bicubic,+5.044,+1.601,-40 +gcvit_xxtiny,86.242,13.758,97.109,2.891,12.00,224,0.875,bicubic,+6.528,+2.029,+65 +resnest50d,86.240,13.761,97.073,2.927,27.48,224,0.875,bilinear,+5.266,+1.695,-22 +convmixer_768_32,86.229,13.771,97.034,2.966,21.11,224,0.960,bicubic,+6.065,+1.962,+38 +vit_base_patch16_384.augreg_in1k,86.227,13.773,96.968,3.032,86.86,384,1.000,bicubic,+5.125,+1.636,-33 +ecaresnet101d_pruned,86.210,13.790,97.335,2.665,24.88,224,0.875,bicubic,+5.392,+1.707,-13 +efficientnet_el_pruned.in1k,86.192,13.807,97.026,2.974,10.59,300,0.904,bicubic,+5.892,+1.808,+25 +cspdarknet53,86.182,13.818,97.013,2.987,27.64,256,0.887,bilinear,+6.124,+1.929,+40 +inception_v4,86.169,13.831,96.919,3.081,42.68,299,0.875,bicubic,+6.001,+1.951,+32 +rexnet_150,86.154,13.846,97.058,2.942,9.73,224,0.875,bicubic,+5.844,+1.892,+20 +inception_resnet_v2,86.133,13.867,97.043,2.957,55.84,299,0.897,bicubic,+5.675,+1.737,+5 +xcit_tiny_12_p8_224,86.105,13.895,97.084,2.917,6.71,224,1.000,bicubic,+6.411,+2.031,+56 +ssl_resnext50_32x4d,86.086,13.914,97.212,2.788,25.03,224,0.875,bilinear,+5.768,+1.806,+15 +tf_efficientnet_el.in1k,86.084,13.916,96.964,3.036,10.59,300,0.904,bicubic,+5.834,+1.836,+21 +mobilevitv2_150,86.075,13.925,96.849,3.151,10.59,256,0.888,bicubic,+5.699,+1.789,+7 +cspresnext50,86.073,13.927,97.101,2.899,20.57,256,0.887,bilinear,+5.527,+1.781,-12 +convnext_pico_ols.d1_in1k,86.067,13.933,97.017,2.983,9.06,288,1.000,bicubic,+5.603,+1.775,-3 +gluon_resnet101_v1s,86.054,13.946,97.022,2.978,44.67,224,0.875,bicubic,+5.752,+1.862,+13 +lambda_resnet50ts,86.054,13.946,96.736,3.264,21.54,256,0.950,bicubic,+4.888,+0.764,-54 +ecaresnetlight,86.052,13.948,97.069,2.931,30.16,224,0.875,bicubic,+5.590,+1.821,-5 +edgenext_small_rw,86.047,13.953,96.925,3.075,7.83,320,1.000,bicubic,+5.591,+1.734,-4 +poolformer_s24,86.032,13.968,97.028,2.972,21.39,224,0.900,bicubic,+5.716,+1.990,+6 +gluon_seresnext101_32x4d,86.032,13.968,96.977,3.023,48.96,224,0.875,bicubic,+5.128,+1.683,-35 +resnetv2_50,86.032,13.968,96.902,3.098,25.55,224,0.950,bicubic,+5.600,+1.822,-5 +convnext_pico.d1_in1k,86.020,13.980,96.941,3.059,9.05,288,0.950,bicubic,+5.594,+1.882,-5 +resnet50d,86.009,13.991,96.979,3.021,25.58,224,0.875,bicubic,+5.479,+1.819,-20 +seresnet33ts,86.007,13.993,97.011,2.989,19.78,256,0.900,bicubic,+5.655,+1.905,-3 +gcresnext50ts,86.007,13.993,96.966,3.034,15.67,256,0.900,bicubic,+5.427,+1.796,-24 +ecaresnet26t,85.983,14.017,97.041,2.959,16.01,320,0.950,bicubic,+6.129,+1.957,+30 +tf_efficientnet_b2.ap_in1k,85.975,14.025,96.810,3.190,9.11,260,0.890,bicubic,+5.675,+1.782,+3 +gluon_seresnext101_64x4d,85.960,14.040,96.979,3.021,88.23,224,0.875,bicubic,+5.066,+1.671,-42 +vit_base_patch32_224.augreg_in21k_ft_in1k,85.956,14.044,97.130,2.869,88.22,224,0.900,bicubic,+5.231,+1.562,-35 +fbnetv3_d.ra2_in1k,85.924,14.076,97.026,2.974,10.31,256,0.950,bilinear,+6.243,+2.082,+38 +gluon_resnet152_v1d,85.917,14.083,96.812,3.188,60.21,224,0.875,bicubic,+5.443,+1.606,-21 +vit_large_patch32_384.orig_in21k_ft_in1k,85.909,14.091,97.368,2.632,306.63,384,1.000,bicubic,+4.403,+1.276,-93 +tf_efficientnet_b2.aa_in1k,85.902,14.098,96.862,3.139,9.11,260,0.890,bicubic,+5.816,+1.954,+9 +tf_efficientnetv2_b2.in1k,85.900,14.100,96.889,3.111,10.10,260,0.890,bicubic,+5.692,+1.847,+3 +resnet50_gn,85.881,14.119,96.851,3.149,25.56,224,0.940,bicubic,+5.829,+1.905,+11 +vit_base_patch16_224.sam,85.877,14.123,96.697,3.303,86.57,224,0.900,bicubic,+5.635,+1.941,-2 +seresnet50,85.857,14.143,97.004,2.995,28.09,224,0.875,bicubic,+5.583,+1.934,-6 +repvgg_b2g4,85.855,14.145,96.812,3.188,61.76,224,0.875,bilinear,+6.489,+2.124,+45 +gluon_resnet101_v1d,85.849,14.151,96.663,3.337,44.57,224,0.875,bicubic,+5.435,+1.649,-21 +gcresnet33ts,85.815,14.185,96.898,3.102,19.88,256,0.900,bicubic,+5.733,+1.900,+3 +mixnet_xl.ra_in1k,85.798,14.202,96.712,3.288,11.90,224,0.875,bicubic,+5.322,+1.776,-32 +ens_adv_inception_resnet_v2,85.781,14.220,96.761,3.239,55.84,299,0.897,bicubic,+5.799,+1.823,+6 +tf_efficientnet_lite3.in1k,85.755,14.245,96.887,3.113,8.20,300,0.904,bilinear,+5.935,+1.973,+16 +ese_vovnet39b,85.751,14.249,96.891,3.109,24.57,224,0.875,bicubic,+6.431,+2.179,+41 +legacy_seresnext101_32x4d,85.746,14.254,96.757,3.243,48.96,224,0.875,bilinear,+5.518,+1.739,-11 +gluon_resnext101_32x4d,85.746,14.254,96.635,3.365,44.18,224,0.875,bicubic,+5.412,+1.709,-21 +eca_resnet33ts,85.740,14.260,96.900,3.100,19.68,256,0.900,bicubic,+5.662,+1.930,-3 +xcit_tiny_24_p16_224,85.734,14.267,96.938,3.062,12.12,224,1.000,bicubic,+6.290,+2.056,+33 +regnety_320,85.725,14.275,96.725,3.275,145.05,224,0.875,bicubic,+4.915,+1.481,-58 +cspresnet50,85.721,14.279,96.795,3.205,21.62,256,0.887,bilinear,+6.147,+2.083,+23 +resnet50,85.706,14.294,96.494,3.506,25.56,224,0.950,bicubic,+5.332,+1.880,-31 +xception71,85.697,14.303,96.776,3.224,42.34,299,0.903,bicubic,+5.823,+1.854,+1 +resmlp_big_24_224,85.695,14.305,96.426,3.574,129.14,224,0.875,bicubic,+4.667,+1.404,-79 +gluon_resnext101_64x4d,85.693,14.307,96.644,3.356,83.46,224,0.875,bicubic,+5.089,+1.656,-56 +efficientnet_em.ra2_in1k,85.684,14.316,96.938,3.062,6.90,240,0.882,bicubic,+6.432,+2.144,+41 +deit_small_patch16_224,85.678,14.322,96.906,3.094,22.05,224,0.900,bicubic,+5.822,+1.854,-2 +pit_xs_distilled_224,85.657,14.343,96.667,3.333,11.00,224,0.900,bicubic,+6.351,+2.303,+32 +efficientnet_b2_pruned.in1k,85.642,14.358,96.746,3.254,8.31,260,0.890,bicubic,+5.726,+1.890,-8 +dpn107,85.640,14.360,96.729,3.271,86.92,224,0.875,bicubic,+5.484,+1.819,-19 +resmlp_36_224,85.620,14.380,96.795,3.205,44.69,224,0.875,bicubic,+5.850,+1.909,0 +mobilevitv2_125,85.584,14.416,96.665,3.335,7.48,256,0.888,bicubic,+5.900,+1.815,+6 +ecaresnet50d_pruned,85.580,14.420,96.936,3.064,19.94,224,0.875,bicubic,+5.864,+2.056,0 +levit_192,85.580,14.420,96.740,3.260,10.95,224,0.900,bicubic,+5.738,+1.954,-7 +gluon_resnet152_v1c,85.580,14.420,96.646,3.354,60.21,224,0.875,bicubic,+5.670,+1.806,-12 +resnext50d_32x4d,85.569,14.431,96.748,3.252,25.05,224,0.875,bicubic,+5.893,+1.882,+4 +tf_efficientnetv2_b1.in1k,85.561,14.439,96.727,3.273,8.14,240,0.882,bicubic,+6.099,+2.005,+13 +regnety_120,85.543,14.457,96.785,3.215,51.82,224,0.875,bicubic,+5.177,+1.659,-46 +fbnetv3_b.ra2_in1k,85.524,14.476,96.862,3.139,8.60,256,0.950,bilinear,+6.374,+2.116,+36 +regnetx_320,85.524,14.476,96.669,3.331,107.81,224,0.875,bicubic,+5.278,+1.643,-36 +nf_regnet_b1,85.505,14.495,96.791,3.209,10.22,288,0.900,bicubic,+6.213,+2.043,+21 +dpn92,85.494,14.506,96.635,3.365,37.67,224,0.875,bicubic,+5.486,+1.799,-24 +gluon_resnet152_v1b,85.475,14.525,96.550,3.450,60.19,224,0.875,bicubic,+5.789,+1.814,-6 +rexnet_130,85.473,14.527,96.684,3.316,7.56,224,0.875,bicubic,+5.973,+2.002,+2 +resnetrs50,85.462,14.538,96.736,3.264,35.69,224,0.910,bicubic,+5.570,+1.767,-22 +dpn131,85.398,14.602,96.639,3.361,79.25,224,0.875,bicubic,+5.576,+1.929,-17 +convnext_tiny.fb_in22k_ft_in1k,85.390,14.610,96.799,3.200,28.59,288,1.000,bicubic,+6.482,+2.125,+41 +regnetx_160,85.390,14.610,96.637,3.363,54.28,224,0.875,bicubic,+5.534,+1.807,-22 +dla102x2,85.366,14.634,96.629,3.371,41.28,224,0.875,bilinear,+5.918,+1.989,+2 +gmlp_s16_224,85.353,14.646,96.648,3.352,19.42,224,0.875,bicubic,+5.712,+2.050,-9 +botnet26t_256,85.343,14.657,96.629,3.371,12.49,256,0.950,bicubic,+6.071,+2.101,+15 +gluon_seresnext50_32x4d,85.336,14.664,96.667,3.333,27.56,224,0.875,bicubic,+5.418,+1.845,-32 +skresnext50_32x4d,85.313,14.687,96.390,3.610,27.48,224,0.875,bicubic,+5.157,+1.748,-41 +dpn98,85.311,14.689,96.469,3.531,61.57,224,0.875,bicubic,+5.669,+1.841,-12 +gluon_resnet101_v1c,85.304,14.696,96.405,3.595,44.57,224,0.875,bicubic,+5.770,+1.827,-10 +lambda_resnet26t,85.300,14.700,96.718,3.282,10.96,256,0.940,bicubic,+6.204,+2.126,+21 +dpn68b,85.291,14.709,96.464,3.536,12.61,224,0.875,bicubic,+6.076,+2.050,+12 +resnetblur50,85.283,14.717,96.531,3.470,25.56,224,0.875,bicubic,+5.997,+1.892,+5 +resmlp_24_224,85.268,14.732,96.492,3.508,30.02,224,0.875,bicubic,+5.894,+1.946,-6 +convnext_femto_ols.d1_in1k,85.255,14.745,96.770,3.230,5.23,288,0.950,bicubic,+6.321,+2.238,+26 +coat_lite_mini,85.251,14.749,96.680,3.320,11.01,224,0.900,bicubic,+6.163,+2.076,+17 +cait_xxs24_224,85.228,14.773,96.712,3.288,11.96,224,1.000,bicubic,+6.842,+2.402,+55 +resnet33ts,85.228,14.773,96.627,3.373,19.68,256,0.900,bicubic,+6.014,+2.053,+7 +xcit_tiny_12_p16_224_dist,85.208,14.792,96.597,3.403,6.72,224,1.000,bicubic,+6.630,+2.401,+40 +pvt_v2_b1,85.198,14.802,96.622,3.378,14.01,224,0.900,bicubic,+6.504,+2.130,+35 +halonet26t,85.195,14.805,96.462,3.538,12.48,256,0.950,bicubic,+6.096,+2.150,+10 +resnext101_32x8d,85.187,14.813,96.445,3.555,88.79,224,0.875,bilinear,+5.879,+1.927,-10 +gluon_inception_v3,85.183,14.817,96.526,3.474,23.83,299,0.875,bicubic,+6.377,+2.156,+25 +resnet32ts,85.155,14.845,96.624,3.376,17.96,256,0.900,bicubic,+6.151,+2.269,+14 +convnext_femto.d1_in1k,85.151,14.849,96.708,3.292,5.22,288,0.950,bicubic,+6.447,+2.274,+29 +hrnet_w48,85.151,14.849,96.492,3.508,77.47,224,0.875,bilinear,+5.851,+1.980,-10 +gluon_xception65,85.148,14.851,96.597,3.403,39.92,299,0.903,bicubic,+5.433,+1.737,-38 +gluon_resnet101_v1b,85.142,14.858,96.366,3.634,44.55,224,0.875,bicubic,+5.836,+1.842,-14 +eca_halonext26ts,85.131,14.869,96.584,3.416,10.76,256,0.940,bicubic,+5.645,+1.986,-27 +regnetx_120,85.131,14.869,96.477,3.523,46.11,224,0.875,bicubic,+5.535,+1.739,-32 +xception,85.129,14.871,96.471,3.529,22.86,299,0.897,bicubic,+6.077,+2.079,+6 +tf_efficientnet_b1.ap_in1k,85.127,14.873,96.405,3.595,7.79,240,0.882,bicubic,+5.847,+2.099,-13 +eca_botnext26ts_256,85.125,14.875,96.509,3.491,10.59,256,0.950,bicubic,+5.851,+1.895,-13 +hrnet_w64,85.119,14.881,96.744,3.256,128.06,224,0.875,bilinear,+5.645,+2.092,-30 +ssl_resnet50,85.097,14.903,96.866,3.134,25.56,224,0.875,bilinear,+5.875,+2.034,-12 +lambda_resnet26rpt_256,85.095,14.905,96.560,3.440,10.99,256,0.940,bicubic,+6.125,+2.130,+4 +res2net101_26w_4s,85.093,14.907,96.381,3.619,45.21,224,0.875,bilinear,+5.895,+1.949,-10 +tf_efficientnet_cc_b1_8e.in1k,85.063,14.937,96.422,3.578,39.72,240,0.882,bicubic,+5.755,+2.052,-25 +res2net50_26w_8s,85.031,14.969,96.419,3.580,48.40,224,0.875,bilinear,+5.831,+2.052,-13 +xcit_nano_12_p8_384_dist,85.029,14.971,96.629,3.371,3.05,384,1.000,bicubic,+7.209,+2.593,+64 +resnest26d,85.008,14.992,96.637,3.363,17.07,224,0.875,bilinear,+6.530,+2.339,+24 +gluon_resnext50_32x4d,84.995,15.005,96.426,3.574,25.03,224,0.875,bicubic,+5.641,+2.000,-32 +tf_efficientnet_b0.ns_jft_in1k,84.984,15.016,96.503,3.497,5.29,224,0.875,bicubic,+6.326,+2.127,+15 +coat_tiny,84.976,15.024,96.409,3.591,5.50,224,0.900,bicubic,+6.542,+2.371,+25 +dla169,84.920,15.080,96.535,3.465,53.39,224,0.875,bilinear,+6.232,+2.199,+11 +tf_efficientnet_b1.aa_in1k,84.918,15.082,96.364,3.636,7.79,240,0.882,bicubic,+6.092,+2.166,+1 +mobilevitv2_100,84.905,15.095,96.385,3.615,4.90,256,0.888,bicubic,+6.815,+2.221,+41 +legacy_seresnext50_32x4d,84.901,15.099,96.434,3.566,27.56,224,0.875,bilinear,+5.823,+1.998,-14 +hrnet_w44,84.884,15.116,96.434,3.566,67.06,224,0.875,bilinear,+5.988,+2.066,-5 +gluon_resnet50_v1s,84.862,15.138,96.443,3.557,25.68,224,0.875,bicubic,+6.150,+2.205,+3 +regnetx_080,84.862,15.138,96.434,3.566,39.57,224,0.875,bicubic,+5.668,+1.874,-23 +levit_128,84.843,15.157,96.360,3.640,9.21,224,0.900,bicubic,+6.357,+2.350,+11 +gluon_resnet50_v1d,84.832,15.168,96.398,3.602,25.58,224,0.875,bicubic,+5.758,+1.928,-18 +dla60_res2next,84.830,15.170,96.411,3.589,17.03,224,0.875,bilinear,+6.390,+2.259,+14 +vit_tiny_patch16_384.augreg_in21k_ft_in1k,84.828,15.172,96.708,3.292,5.79,384,1.000,bicubic,+6.398,+2.166,+15 +mixnet_l.ft_in1k,84.822,15.178,96.328,3.672,7.33,224,0.875,bicubic,+5.846,+2.146,-17 +tv_resnet152,84.815,15.185,96.225,3.775,60.19,224,0.875,bilinear,+6.503,+2.187,+20 +dla102x,84.813,15.187,96.552,3.448,26.31,224,0.875,bilinear,+6.303,+2.324,+3 +dla60_res2net,84.813,15.187,96.481,3.519,20.85,224,0.875,bilinear,+6.349,+2.275,+8 +pit_xs_224,84.792,15.208,96.492,3.508,10.62,224,0.900,bicubic,+6.610,+2.324,+23 +xception41,84.792,15.208,96.413,3.587,26.97,299,0.903,bicubic,+6.276,+2.135,0 +regnetx_064,84.781,15.219,96.490,3.510,26.21,224,0.875,bicubic,+5.709,+2.032,-26 +hrnet_w40,84.743,15.257,96.554,3.446,57.56,224,0.875,bilinear,+5.823,+2.084,-21 +res2net50_26w_6s,84.726,15.274,96.281,3.719,37.05,224,0.875,bilinear,+6.156,+2.157,-4 +repvgg_b2,84.724,15.276,96.469,3.531,89.02,224,0.875,bilinear,+5.932,+2.055,-15 +vit_base_patch32_384.augreg_in1k,84.722,15.278,96.319,3.681,88.30,384,1.000,bicubic,+5.962,+2.091,-14 +resmlp_12_distilled_224,84.713,15.287,96.225,3.775,15.35,224,0.875,bicubic,+6.769,+2.667,+29 +cs3darknet_m,84.704,15.296,96.488,3.512,9.31,288,0.950,bicubic,+7.068,+2.474,+41 +legacy_seresnet152,84.704,15.296,96.417,3.583,66.82,224,0.875,bilinear,+6.044,+2.047,-12 +selecsls60b,84.657,15.343,96.300,3.700,32.77,224,0.875,bicubic,+6.245,+2.126,+1 +hrnet_w32,84.651,15.349,96.407,3.593,41.23,224,0.875,bilinear,+6.201,+2.221,-4 +bat_resnext26ts,84.638,15.362,96.272,3.728,10.73,256,0.900,bicubic,+6.396,+2.172,+8 +tf_efficientnetv2_b0.in1k,84.625,15.375,96.274,3.726,7.14,224,0.875,bicubic,+6.269,+2.250,+2 +efficientnet_b1.ft_in1k,84.608,15.392,96.332,3.668,7.79,256,1.000,bicubic,+5.814,+1.990,-25 +regnetx_040,84.600,15.400,96.383,3.617,22.12,224,0.875,bicubic,+6.118,+2.139,-11 +efficientnet_es.ra_in1k,84.591,15.409,96.311,3.689,5.44,224,0.875,bicubic,+6.525,+2.385,+13 +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,84.591,15.409,96.005,3.995,119.42,256,0.900,bicubic,+5.111,+1.867,-74 +hrnet_w30,84.572,15.428,96.388,3.612,37.71,224,0.875,bilinear,+6.366,+2.166,+4 +tf_mixnet_l.in1k,84.564,15.437,96.244,3.756,7.33,224,0.875,bicubic,+5.790,+2.246,-28 +wide_resnet101_2,84.557,15.443,96.349,3.651,126.89,224,0.875,bilinear,+5.701,+2.067,-35 +vit_small_patch16_224.augreg_in1k,84.538,15.462,96.276,3.724,22.05,224,0.900,bicubic,+5.692,+1.992,-35 +dla60x,84.523,15.477,96.285,3.715,17.35,224,0.875,bilinear,+6.277,+2.267,-4 +legacy_seresnet101,84.504,15.496,96.330,3.670,49.33,224,0.875,bilinear,+6.122,+2.066,-10 +cs3darknet_focus_m,84.478,15.522,96.419,3.580,9.30,288,0.950,bicubic,+7.200,+2.450,+43 +resnet26t,84.476,15.524,96.217,3.783,16.01,256,0.940,bicubic,+6.594,+2.377,+14 +coat_lite_tiny,84.450,15.550,96.368,3.632,5.72,224,0.900,bicubic,+6.938,+2.452,+31 +tf_efficientnet_em.in1k,84.450,15.550,96.180,3.820,6.90,240,0.882,bicubic,+6.320,+2.136,-1 +repvgg_b1,84.416,15.584,96.221,3.779,57.42,224,0.875,bilinear,+6.050,+2.123,-14 +efficientnet_b1_pruned.in1k,84.393,15.607,96.140,3.860,6.33,240,0.882,bicubic,+6.157,+2.306,-8 +vit_base_patch16_224.augreg_in1k,84.391,15.610,96.046,3.954,86.57,224,0.900,bicubic,+5.237,+1.946,-61 +res2net50_26w_4s,84.365,15.635,96.082,3.918,25.70,224,0.875,bilinear,+6.401,+2.228,+4 +hardcorenas_f,84.326,15.674,96.025,3.975,8.20,224,0.875,bilinear,+6.222,+2.222,-5 +res2net50_14w_8s,84.309,15.691,96.072,3.929,25.06,224,0.875,bilinear,+6.159,+2.224,-8 +selecsls60,84.288,15.712,96.095,3.905,30.67,224,0.875,bicubic,+6.306,+2.267,0 +mobilevit_s,84.271,15.729,96.264,3.736,5.58,256,0.900,bicubic,+5.959,+2.118,-19 +regnetx_032,84.237,15.763,96.247,3.753,15.30,224,0.875,bicubic,+6.065,+2.159,-12 +res2next50,84.226,15.774,95.997,4.003,24.67,224,0.875,bilinear,+5.980,+2.105,-18 +convnext_atto_ols.a2_in1k,84.220,15.780,96.223,3.777,3.70,288,0.950,bicubic,+7.004,+2.543,+32 +gluon_resnet50_v1c,84.207,15.793,96.161,3.839,25.58,224,0.875,bicubic,+6.195,+2.173,-7 +dla102,84.190,15.810,96.206,3.794,33.27,224,0.875,bilinear,+6.158,+2.260,-9 +gcresnext26ts,84.164,15.836,96.086,3.914,10.48,256,0.900,bicubic,+6.350,+2.252,+4 +rexnet_100,84.162,15.838,96.255,3.745,4.80,224,0.875,bicubic,+6.304,+2.385,+1 +seresnext26ts,84.149,15.851,96.074,3.926,10.39,256,0.900,bicubic,+6.283,+2.284,-3 +convnext_atto.d2_in1k,84.147,15.853,96.200,3.800,3.70,288,0.950,bicubic,+7.133,+2.500,+34 +tf_inception_v3,84.136,15.864,95.920,4.080,23.83,299,0.875,bicubic,+6.276,+2.280,-3 +res2net50_48w_2s,84.126,15.874,95.965,4.035,25.29,224,0.875,bilinear,+6.604,+2.411,+10 +resnet34d,84.098,15.902,95.978,4.022,21.82,224,0.875,bicubic,+6.982,+2.596,+26 +tf_efficientnet_lite2.in1k,84.094,15.906,96.069,3.931,6.09,260,0.890,bicubic,+6.626,+2.315,+10 +xcit_tiny_12_p16_224,84.090,15.911,96.234,3.766,6.72,224,1.000,bicubic,+6.969,+2.522,+23 +efficientnet_b0.ra_in1k,84.038,15.962,95.956,4.044,5.29,224,0.875,bicubic,+6.340,+2.424,-3 +poolformer_s12,84.032,15.968,96.161,3.839,11.92,224,0.900,bicubic,+6.802,+2.657,+18 +crossvit_9_dagger_240,84.015,15.985,96.084,3.916,8.78,240,0.875,bicubic,+7.035,+2.474,+27 +hardcorenas_e,83.968,16.032,95.898,4.101,8.07,224,0.875,bilinear,+6.174,+2.204,-8 +gmixer_24_224,83.968,16.032,95.849,4.151,24.72,224,0.875,bicubic,+5.932,+2.185,-23 +tf_efficientnet_cc_b0_8e.in1k,83.966,16.034,96.065,3.935,24.01,224,0.875,bicubic,+6.058,+2.411,-17 +tv_resnext50_32x4d,83.959,16.041,95.960,4.040,25.03,224,0.875,bilinear,+6.339,+2.264,-5 +regnety_016,83.955,16.045,96.005,3.995,11.20,224,0.875,bicubic,+6.093,+2.285,-16 +gluon_resnet50_v1b,83.940,16.060,96.012,3.988,25.56,224,0.875,bicubic,+6.360,+2.296,-3 +densenet161,83.906,16.094,96.010,3.990,28.68,224,0.875,bicubic,+6.548,+2.372,+4 +adv_inception_v3,83.902,16.098,95.935,4.065,23.83,299,0.875,bicubic,+6.320,+2.199,-6 +mobilenetv2_120d.ra_in1k,83.893,16.107,95.909,4.091,5.83,224,0.875,bicubic,+6.609,+2.417,+5 +seresnext26t_32x4d,83.878,16.122,95.931,4.069,16.81,224,0.875,bicubic,+5.892,+2.185,-29 +tv_resnet101,83.848,16.152,95.892,4.108,44.55,224,0.875,bilinear,+6.474,+2.352,-1 +tinynet_a.in1k,83.827,16.173,95.820,4.181,6.19,192,0.875,bicubic,+6.175,+2.284,-16 +inception_v3,83.761,16.239,95.879,4.121,23.83,299,0.875,bicubic,+6.321,+2.403,-5 +hardcorenas_d,83.759,16.241,95.734,4.266,7.50,224,0.875,bilinear,+6.327,+2.250,-5 +seresnext26d_32x4d,83.754,16.246,95.849,4.151,16.81,224,0.875,bicubic,+6.152,+2.241,-15 +xcit_nano_12_p8_224_dist,83.731,16.269,95.960,4.040,3.05,224,1.000,bicubic,+7.407,+2.870,+31 +dla60,83.729,16.271,95.933,4.067,22.04,224,0.875,bilinear,+6.697,+2.615,+8 +eca_resnext26ts,83.701,16.299,95.948,4.052,10.30,256,0.900,bicubic,+6.249,+2.382,-11 +repvgg_b1g4,83.699,16.301,96.020,3.980,39.97,224,0.875,bilinear,+6.105,+2.194,-18 +convmixer_1024_20_ks9_p14,83.682,16.318,95.896,4.104,24.38,224,0.960,bicubic,+6.736,+2.538,+9 +legacy_seresnet50,83.662,16.337,95.973,4.027,28.09,224,0.875,bilinear,+6.032,+2.225,-23 +tf_efficientnet_b0.ap_in1k,83.650,16.350,95.779,4.221,5.29,224,0.875,bicubic,+6.564,+2.523,+1 +skresnet34,83.641,16.359,95.933,4.067,22.28,224,0.875,bicubic,+6.729,+2.611,+9 +tf_efficientnet_cc_b0_4e.in1k,83.639,16.361,95.740,4.260,13.31,224,0.875,bicubic,+6.333,+2.406,-12 +resmlp_12_224,83.571,16.429,95.760,4.240,15.35,224,0.875,bicubic,+6.917,+2.580,+13 +densenet201,83.556,16.444,95.811,4.189,20.01,224,0.875,bicubic,+6.270,+2.333,-13 +mobilenetv3_large_100.miil_in21k_ft_in1k,83.556,16.444,95.452,4.548,5.48,224,0.875,bilinear,+5.640,+2.542,-42 +gernet_s,83.522,16.478,95.794,4.206,8.17,224,0.875,bilinear,+6.606,+2.662,+3 +legacy_seresnext26_32x4d,83.517,16.483,95.719,4.281,16.79,224,0.875,bicubic,+6.413,+2.403,-7 +tf_efficientnet_b0.aa_in1k,83.515,16.485,95.719,4.281,5.29,224,0.875,bicubic,+6.667,+2.491,+2 +mixnet_m.ft_in1k,83.515,16.485,95.689,4.311,5.01,224,0.875,bicubic,+6.255,+2.265,-14 +hrnet_w18,83.500,16.500,95.907,4.093,21.30,224,0.875,bilinear,+6.742,+2.463,+4 +densenetblur121d,83.472,16.527,95.822,4.178,8.00,224,0.875,bicubic,+6.885,+2.630,+9 +resnext26ts,83.464,16.536,95.728,4.272,10.30,256,0.900,bicubic,+6.684,+2.598,+1 +selecsls42b,83.457,16.543,95.745,4.255,32.46,224,0.875,bicubic,+6.283,+2.355,-16 +tf_efficientnet_lite1.in1k,83.344,16.656,95.642,4.358,5.42,240,0.882,bicubic,+6.702,+2.416,+3 +hardcorenas_c,83.342,16.658,95.706,4.294,5.52,224,0.875,bilinear,+6.288,+2.548,-13 +regnetx_016,83.195,16.805,95.740,4.260,9.19,224,0.875,bicubic,+6.245,+2.320,-10 +mobilenetv2_140.ra_in1k,83.182,16.818,95.689,4.311,6.11,224,0.875,bicubic,+6.666,+2.693,+6 +dpn68,83.178,16.822,95.597,4.402,12.61,224,0.875,bicubic,+6.860,+2.620,+8 +tf_efficientnet_es.in1k,83.178,16.822,95.585,4.415,5.44,224,0.875,bicubic,+6.584,+2.383,0 +xcit_nano_12_p16_384_dist,83.176,16.824,95.753,4.247,3.05,384,1.000,bicubic,+7.718,+3.059,+22 +tf_mixnet_m.in1k,83.176,16.824,95.461,4.539,5.01,224,0.875,bicubic,+6.234,+2.309,-12 +ese_vovnet19b_dw,83.109,16.890,95.779,4.221,6.54,224,0.875,bicubic,+6.311,+2.511,-10 +levit_128s,83.069,16.931,95.531,4.469,7.78,224,0.900,bicubic,+6.539,+2.665,-1 +resnet26d,83.050,16.950,95.604,4.396,16.01,224,0.875,bicubic,+6.354,+2.454,-9 +repvgg_a2,83.001,16.999,95.589,4.411,28.21,224,0.875,bilinear,+6.541,+2.585,-1 +tv_resnet50,82.958,17.042,95.467,4.533,25.56,224,0.875,bilinear,+6.820,+2.603,+2 +hardcorenas_b,82.873,17.128,95.392,4.607,5.18,224,0.875,bilinear,+6.335,+2.638,-6 +densenet121,82.823,17.177,95.585,4.415,7.98,224,0.875,bicubic,+7.245,+2.933,+11 +mobilevitv2_075,82.813,17.187,95.576,4.424,2.87,256,0.888,bicubic,+7.191,+2.808,+9 +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,82.691,17.309,95.845,4.155,6.36,384,1.000,bicubic,+6.739,+2.585,+1 +densenet169,82.683,17.317,95.600,4.400,14.15,224,0.875,bicubic,+6.776,+2.574,+2 +edgenext_x_small,82.572,17.428,95.461,4.539,2.34,288,1.000,bicubic,+6.883,+2.695,+3 +mixnet_s.ft_in1k,82.525,17.476,95.356,4.644,4.13,224,0.875,bicubic,+6.532,+2.560,-4 +vit_small_patch32_224.augreg_in21k_ft_in1k,82.514,17.486,95.670,4.330,22.88,224,0.900,bicubic,+6.524,+2.398,-4 +regnety_008,82.493,17.508,95.487,4.513,6.26,224,0.875,bicubic,+6.177,+2.421,-8 +efficientnet_lite0.ra_in1k,82.382,17.619,95.279,4.721,4.65,224,0.875,bicubic,+6.898,+2.769,+6 +resnest14d,82.352,17.648,95.339,4.661,10.61,224,0.875,bilinear,+6.846,+2.821,+4 +hardcorenas_a,82.313,17.687,95.294,4.706,5.26,224,0.875,bilinear,+6.397,+2.780,-6 +efficientnet_es_pruned.in1k,82.296,17.704,95.303,4.697,5.44,224,0.875,bicubic,+7.296,+2.855,+15 +mobilenetv3_rw.rmsp_in1k,82.275,17.725,95.234,4.766,5.48,224,0.875,bicubic,+6.641,+2.526,-3 +semnasnet_100.rmsp_in1k,82.251,17.749,95.230,4.770,3.89,224,0.875,bicubic,+6.803,+2.626,+4 +mobilenetv3_large_100.ra_in1k,82.177,17.823,95.196,4.804,5.48,224,0.875,bicubic,+6.410,+2.654,-8 +resnet34,82.138,17.862,95.130,4.870,21.80,224,0.875,bilinear,+7.028,+2.846,+8 +mobilenetv2_110d.ra_in1k,82.070,17.930,95.076,4.923,4.52,224,0.875,bicubic,+7.034,+2.890,+9 +vit_tiny_patch16_224.augreg_in21k_ft_in1k,82.066,17.934,95.489,4.511,5.72,224,0.900,bicubic,+6.612,+2.641,-1 +tf_mixnet_s.in1k,82.038,17.962,95.121,4.879,4.13,224,0.875,bicubic,+6.388,+2.493,-10 +repvgg_b0,82.001,17.999,95.100,4.900,15.82,224,0.875,bilinear,+6.849,+2.682,+1 +deit_tiny_distilled_patch16_224,81.997,18.003,95.141,4.859,5.91,224,0.900,bicubic,+7.487,+3.251,+17 +mixer_b16_224,81.976,18.024,94.449,5.551,59.88,224,0.875,bicubic,+5.376,+2.221,-31 +pit_ti_distilled_224,81.967,18.033,95.145,4.855,5.10,224,0.900,bicubic,+7.437,+3.049,+14 +hrnet_w18_small_v2,81.961,18.039,95.164,4.836,15.60,224,0.875,bilinear,+6.847,+2.748,-1 +tf_efficientnet_lite0.in1k,81.952,18.048,95.168,4.832,4.65,224,0.875,bicubic,+7.122,+2.992,+5 +resnet26,81.944,18.056,95.241,4.759,16.00,224,0.875,bicubic,+6.652,+2.671,-7 +tinynet_b.in1k,81.871,18.129,94.878,5.122,3.73,188,0.875,bicubic,+6.897,+2.690,+1 +tf_mobilenetv3_large_100.in1k,81.848,18.152,95.070,4.930,5.48,224,0.875,bilinear,+6.330,+2.464,-15 +tv_densenet121,81.726,18.274,95.034,4.966,7.98,224,0.875,bicubic,+6.988,+2.884,+3 +regnety_006,81.700,18.300,95.115,4.885,6.06,224,0.875,bicubic,+6.454,+2.583,-10 +dla34,81.658,18.342,94.878,5.122,15.74,224,0.875,bilinear,+7.028,+2.800,+4 +xcit_nano_12_p8_224,81.643,18.357,95.273,4.727,3.05,224,1.000,bicubic,+7.729,+3.101,+12 +crossvit_9_240,81.615,18.385,94.978,5.022,8.55,240,0.875,bicubic,+7.651,+3.010,+10 +mobilevit_xs,81.566,18.434,95.034,4.966,2.32,256,0.900,bicubic,+6.922,+2.682,0 +fbnetc_100.rmsp_in1k,81.559,18.441,94.970,5.030,5.57,224,0.875,bilinear,+6.436,+2.584,-13 +legacy_seresnet34,81.534,18.466,94.899,5.101,21.96,224,0.875,bilinear,+6.726,+2.775,-5 +gluon_resnet34_v1b,81.500,18.500,94.810,5.190,21.80,224,0.875,bicubic,+6.912,+2.820,-1 +regnetx_008,81.485,18.515,95.059,4.941,7.26,224,0.875,bicubic,+6.447,+2.724,-13 +mnasnet_100.rmsp_in1k,81.459,18.541,94.899,5.101,4.38,224,0.875,bicubic,+6.801,+2.785,-6 +vgg19_bn,81.446,18.554,94.763,5.237,143.68,224,0.875,bilinear,+7.232,+2.921,-1 +vit_base_patch32_224.augreg_in1k,81.143,18.857,94.427,5.572,88.22,224,0.900,bicubic,+6.239,+2.649,-12 +convit_tiny,81.126,18.874,95.044,4.955,5.71,224,0.875,bicubic,+8.010,+3.331,+10 +crossvit_tiny_240,81.088,18.912,94.987,5.013,7.01,240,0.875,bicubic,+7.764,+3.071,+6 +spnasnet_100.rmsp_in1k,80.878,19.122,94.526,5.474,4.42,224,0.875,bilinear,+6.794,+2.708,-4 +ghostnet_100,80.699,19.301,94.291,5.709,5.18,224,0.875,bilinear,+6.721,+2.835,-3 +regnety_004,80.659,19.341,94.686,5.314,4.34,224,0.875,bicubic,+6.624,+2.934,-5 +skresnet18,80.637,19.363,94.378,5.622,11.96,224,0.875,bicubic,+7.599,+3.210,+6 +regnetx_006,80.629,19.371,94.524,5.476,6.20,224,0.875,bicubic,+6.777,+2.852,-3 +pit_ti_224,80.605,19.395,94.618,5.383,4.85,224,0.900,bicubic,+7.693,+3.216,+7 +swsl_resnet18,80.575,19.425,94.743,5.256,11.69,224,0.875,bilinear,+7.299,+3.010,+1 +vgg16_bn,80.556,19.444,94.592,5.408,138.37,224,0.875,bilinear,+7.206,+3.086,-3 +semnasnet_075.rmsp_in1k,80.477,19.523,94.321,5.679,2.91,224,0.875,bicubic,+7.503,+3.185,+2 +tv_resnet34,80.389,19.611,94.436,5.564,21.80,224,0.875,bilinear,+7.077,+3.010,-3 +resnet18d,80.387,19.613,94.252,5.748,11.71,224,0.875,bicubic,+8.127,+3.556,+9 +mobilenetv2_100.ra_in1k,80.257,19.743,94.195,5.805,3.50,224,0.875,bicubic,+7.287,+3.179,0 +xcit_nano_12_p16_224_dist,80.212,19.788,94.361,5.639,3.05,224,1.000,bicubic,+7.910,+3.499,+6 +vit_base_patch32_224.sam,80.208,19.792,93.825,6.175,88.22,224,0.900,bicubic,+6.518,+2.811,-11 +ssl_resnet18,80.101,19.899,94.590,5.410,11.69,224,0.875,bilinear,+7.491,+3.174,-1 +tf_mobilenetv3_large_075.in1k,80.093,19.907,94.184,5.816,3.99,224,0.875,bilinear,+6.655,+2.834,-12 +deit_tiny_patch16_224,80.018,19.982,94.449,5.551,5.72,224,0.900,bicubic,+7.850,+3.331,+5 +hrnet_w18_small,79.555,20.445,93.898,6.102,13.19,224,0.875,bilinear,+7.213,+3.220,0 +vgg19,79.480,20.520,93.870,6.130,143.67,224,0.875,bilinear,+7.112,+2.998,-3 +regnetx_004,79.435,20.565,93.853,6.147,5.16,224,0.875,bicubic,+7.039,+3.023,-5 +resnet14t,79.239,20.761,93.606,6.394,10.08,224,0.950,bilinear,+6.889,+3.266,-4 +tf_mobilenetv3_large_minimal_100.in1k,79.222,20.778,93.706,6.294,3.92,224,0.875,bilinear,+6.974,+3.076,-1 +edgenext_xx_small,79.175,20.825,93.821,6.179,1.33,288,1.000,bicubic,+7.309,+3.277,+2 +legacy_seresnet18,79.153,20.847,93.783,6.217,11.78,224,0.875,bicubic,+7.409,+3.449,+3 +vgg16,79.038,20.962,93.646,6.354,138.36,224,0.875,bilinear,+7.444,+3.270,+3 +vgg13_bn,79.006,20.994,93.655,6.345,133.05,224,0.875,bilinear,+7.412,+3.273,+3 +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,78.991,21.009,93.902,6.098,6.34,224,0.900,bicubic,+7.203,+3.074,-1 +lcnet_100.ra2_in1k,78.914,21.086,93.550,6.450,2.95,224,0.875,bicubic,+6.800,+3.172,-5 +pvt_v2_b0,78.752,21.248,93.849,6.151,3.67,224,0.900,bicubic,+8.096,+3.641,+3 +tinynet_c.in1k,78.438,21.562,93.140,6.860,2.46,184,0.875,bicubic,+7.206,+3.392,0 +gluon_resnet18_v1b,78.372,21.628,93.136,6.864,11.69,224,0.875,bicubic,+7.536,+3.376,0 +mobilevitv2_050,78.118,21.882,93.573,6.426,1.37,256,0.888,bicubic,+7.978,+3.647,+3 vgg11_bn,77.926,22.074,93.230,6.770,132.87,224,0.875,bilinear,+7.566,+3.428,0 -xcit_nano_12_p16_224,77.900,22.100,93.430,6.570,3.05,224,1.000,bicubic,+7.946,+3.674,+2 -regnety_002,77.411,22.589,92.912,7.088,3.16,224,0.875,bicubic,+7.155,+3.378,-1 -mixer_l16_224,77.287,22.713,90.574,9.426,208.20,224,0.875,bicubic,+5.221,+2.908,-11 -resnet18,77.279,22.721,92.760,7.240,11.69,224,0.875,bilinear,+7.531,+3.676,+1 -vgg13,77.227,22.773,92.689,7.311,133.05,224,0.875,bilinear,+7.301,+3.443,-1 -mobilevit_xxs,76.602,23.398,92.694,7.306,1.27,256,0.900,bicubic,+7.682,+3.748,+1 -vgg11,76.393,23.607,92.154,7.846,132.86,224,0.875,bilinear,+7.365,+3.526,-1 -resnet10t,76.222,23.778,92.224,7.776,5.44,224,0.950,bilinear,+7.914,+4.144,+2 -regnetx_002,76.119,23.881,92.211,7.789,2.68,224,0.875,bicubic,+7.365,+3.655,0 -lcnet_075,76.051,23.949,92.068,7.932,2.36,224,0.875,bicubic,+7.237,+3.704,-2 -dla60x_c,75.618,24.382,92.179,7.821,1.32,224,0.875,bilinear,+7.738,+3.745,+1 -mobilenetv3_small_100,74.911,25.089,91.496,8.504,2.54,224,0.875,bicubic,+7.253,+3.862,+1 -tf_mobilenetv3_small_100,74.717,25.283,91.257,8.743,2.54,224,0.875,bilinear,+6.791,+3.589,-2 -tinynet_d,74.283,25.717,90.926,9.074,2.34,152,0.875,bicubic,+7.321,+3.862,0 -mnasnet_small,73.816,26.184,90.727,9.273,2.03,224,0.875,bicubic,+7.610,+4.221,0 -dla46x_c,73.632,26.368,91.110,8.890,1.07,224,0.875,bilinear,+7.680,+4.124,0 -mobilenetv2_050,73.468,26.532,90.317,9.682,1.97,224,0.875,bicubic,+7.524,+4.237,0 -tf_mobilenetv3_small_075,72.812,27.188,90.038,9.962,2.04,224,0.875,bilinear,+7.100,+3.908,0 -dla46_c,72.611,27.389,90.503,9.497,1.30,224,0.875,bilinear,+7.739,+4.201,+1 -mobilenetv3_small_075,72.323,27.677,89.671,10.329,2.04,224,0.875,bicubic,+7.085,+4.231,-1 -lcnet_050,70.385,29.616,88.821,11.179,1.88,224,0.875,bicubic,+7.291,+4.439,0 -tf_mobilenetv3_small_minimal_100,70.113,29.887,88.505,11.495,2.04,224,0.875,bilinear,+7.213,+4.271,0 -tinynet_e,66.813,33.187,86.276,13.724,2.04,106,0.875,bicubic,+6.957,+4.510,0 -mobilenetv3_small_050,64.671,35.329,84.867,15.133,1.59,224,0.875,bicubic,+6.781,+4.673,0 +xcit_nano_12_p16_224,77.900,22.100,93.430,6.570,3.05,224,1.000,bicubic,+7.946,+3.676,+2 +regnety_002,77.405,22.595,92.914,7.086,3.16,224,0.875,bicubic,+7.153,+3.374,-1 +mixer_l16_224,77.285,22.715,90.582,9.418,208.20,224,0.875,bicubic,+5.227,+2.914,-12 +resnet18,77.276,22.724,92.756,7.244,11.69,224,0.875,bilinear,+7.528,+3.678,+1 +vgg13,77.230,22.770,92.689,7.311,133.05,224,0.875,bilinear,+7.303,+3.444,-1 +mobilevit_xxs,76.595,23.405,92.685,7.315,1.27,256,0.900,bicubic,+7.683,+3.747,+1 +vgg11,76.384,23.616,92.154,7.846,132.86,224,0.875,bilinear,+7.360,+3.526,-1 +resnet10t,76.215,23.785,92.224,7.776,5.44,224,0.950,bilinear,+7.921,+4.146,+2 +regnetx_002,76.124,23.876,92.211,7.789,2.68,224,0.875,bicubic,+7.362,+3.655,0 +lcnet_075.ra2_in1k,76.053,23.947,92.066,7.934,2.36,224,0.875,bicubic,+7.235,+3.696,-2 +dla60x_c,75.637,24.363,92.177,7.823,1.32,224,0.875,bilinear,+7.745,+3.751,+1 +mobilenetv3_small_100.lamb_in1k,74.911,25.089,91.498,8.502,2.54,224,0.875,bicubic,+7.259,+3.862,+1 +tf_mobilenetv3_small_100.in1k,74.717,25.283,91.257,8.743,2.54,224,0.875,bilinear,+6.795,+3.593,-2 +tinynet_d.in1k,74.285,25.715,90.924,9.076,2.34,152,0.875,bicubic,+7.323,+3.858,0 +mnasnet_small.lamb_in1k,73.816,26.184,90.732,9.268,2.03,224,0.875,bicubic,+7.610,+4.224,0 +dla46x_c,73.647,26.353,91.095,8.905,1.07,224,0.875,bilinear,+7.677,+4.115,0 +mobilenetv2_050.lamb_in1k,73.465,26.535,90.320,9.680,1.97,224,0.875,bicubic,+7.523,+4.238,0 +tf_mobilenetv3_small_075.in1k,72.812,27.188,90.036,9.964,2.04,224,0.875,bilinear,+7.096,+3.906,0 +dla46_c,72.603,27.397,90.499,9.501,1.30,224,0.875,bilinear,+7.737,+4.207,+1 +mobilenetv3_small_075.lamb_in1k,72.330,27.670,89.666,10.334,2.04,224,0.875,bicubic,+7.084,+4.230,-1 +lcnet_050.ra2_in1k,70.400,29.601,88.823,11.177,1.88,224,0.875,bicubic,+7.300,+4.443,0 +tf_mobilenetv3_small_minimal_100.in1k,70.111,29.889,88.505,11.495,2.04,224,0.875,bilinear,+7.205,+4.275,0 +tinynet_e.in1k,66.810,33.190,86.274,13.726,2.04,106,0.875,bicubic,+6.954,+4.512,0 +mobilenetv3_small_050.lamb_in1k,64.669,35.331,84.865,15.136,1.59,224,0.875,bicubic,+6.779,+4.671,0 diff --git a/results/results-imagenet.csv b/results/results-imagenet.csv index d475f4da..199dfd6e 100644 --- a/results/results-imagenet.csv +++ b/results/results-imagenet.csv @@ -1,669 +1,791 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation -beit_large_patch16_512,88.602,11.398,98.656,1.344,305.67,512,1.000,bicubic -beit_large_patch16_384,88.406,11.594,98.606,1.394,305.00,384,1.000,bicubic -tf_efficientnet_l2_ns,88.350,11.650,98.650,1.350,480.31,800,0.960,bicubic -tf_efficientnet_l2_ns_475,88.232,11.768,98.546,1.454,480.31,475,0.936,bicubic +eva_giant_patch14_560.m30m_ft_in22k_in1k,89.796,10.204,98.992,1.008,"1,014.45",560,1.000,bicubic +eva_giant_patch14_336.m30m_ft_in22k_in1k,89.568,10.432,98.952,1.048,"1,013.01",336,1.000,bicubic +eva_giant_patch14_336.clip_ft_in1k,89.476,10.524,98.824,1.176,"1,013.01",336,1.000,bicubic +eva_large_patch14_336.in22k_ft_in22k_in1k,89.204,10.796,98.850,1.150,304.53,336,1.000,bicubic +eva_giant_patch14_224.clip_ft_in1k,89.100,10.900,98.716,1.284,"1,012.56",224,1.000,bicubic +eva_large_patch14_336.in22k_ft_in1k,88.664,11.336,98.720,1.280,304.53,336,1.000,bicubic +beit_large_patch16_512.in22k_ft_in22k_in1k,88.598,11.402,98.656,1.344,305.67,512,1.000,bicubic +eva_large_patch14_196.in22k_ft_in22k_in1k,88.586,11.414,98.656,1.344,304.14,196,1.000,bicubic +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,88.574,11.426,98.660,1.340,632.46,336,1.000,bicubic +maxvit_xlarge_tf_512.in21k_ft_in1k,88.538,11.462,98.644,1.356,475.77,512,1.000,bicubic +beit_large_patch16_384.in22k_ft_in22k_in1k,88.404,11.596,98.608,1.392,305.00,384,1.000,bicubic +beitv2_large_patch16_224.in1k_ft_in22k_in1k,88.386,11.614,98.598,1.402,304.43,224,0.950,bicubic +tf_efficientnet_l2.ns_jft_in1k,88.352,11.648,98.650,1.350,480.31,800,0.960,bicubic +maxvit_xlarge_tf_384.in21k_ft_in1k,88.306,11.694,98.544,1.456,475.32,384,1.000,bicubic +vit_large_patch14_clip_336.openai_ft_in12k_in1k,88.266,11.734,98.532,1.468,304.53,336,1.000,bicubic +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,88.246,11.754,98.550,1.450,632.05,224,1.000,bicubic +tf_efficientnet_l2.ns_jft_in1k_475,88.234,11.766,98.546,1.454,480.31,475,0.936,bicubic +maxvit_large_tf_512.in21k_ft_in1k,88.218,11.782,98.598,1.402,212.33,512,1.000,bicubic +maxvit_base_tf_512.in21k_ft_in1k,88.212,11.788,98.532,1.468,119.88,512,1.000,bicubic +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,88.182,11.818,98.572,1.428,304.53,336,1.000,bicubic +vit_large_patch14_clip_224.openai_ft_in12k_in1k,88.168,11.832,98.544,1.456,304.20,224,1.000,bicubic +maxvit_large_tf_384.in21k_ft_in1k,87.992,12.008,98.566,1.434,212.03,384,1.000,bicubic +eva_large_patch14_196.in22k_ft_in1k,87.938,12.062,98.492,1.508,304.14,196,1.000,bicubic +maxvit_base_tf_384.in21k_ft_in1k,87.922,12.078,98.542,1.458,119.65,384,1.000,bicubic +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,87.890,12.110,98.410,1.590,304.20,224,1.000,bicubic +vit_large_patch14_clip_224.openai_ft_in1k,87.852,12.148,98.428,1.572,304.20,224,1.000,bicubic +vit_large_patch14_clip_336.laion2b_ft_in1k,87.848,12.152,98.370,1.630,304.53,336,1.000,bicubic +convnext_xlarge.fb_in22k_ft_in1k_384,87.748,12.252,98.554,1.446,350.20,384,1.000,bicubic deit3_large_patch16_384_in21ft1k,87.716,12.284,98.512,1.488,304.76,384,1.000,bicubic -convnext_xlarge_384_in22ft1k,87.544,12.456,98.486,1.514,350.20,384,1.000,bicubic -beit_large_patch16_224,87.476,12.524,98.304,1.696,304.43,224,0.900,bicubic -swinv2_large_window12to24_192to384_22kft1k,87.456,12.544,98.252,1.748,196.74,384,1.000,bicubic -convnext_large_384_in22ft1k,87.396,12.604,98.366,1.634,197.77,384,1.000,bicubic -deit3_huge_patch14_224_in21ft1k,87.180,12.820,98.260,1.740,632.13,224,1.000,bicubic -swin_large_patch4_window12_384,87.152,12.848,98.240,1.760,196.74,384,1.000,bicubic +vit_huge_patch14_clip_224.laion2b_ft_in1k,87.594,12.406,98.220,1.780,632.05,224,1.000,bicubic +beit_large_patch16_224.in22k_ft_in22k_in1k,87.476,12.524,98.304,1.696,304.43,224,0.900,bicubic +convnext_large.fb_in22k_ft_in1k_384,87.472,12.528,98.386,1.614,197.77,384,1.000,bicubic +swinv2_large_window12to24_192to384_22kft1k,87.458,12.542,98.252,1.748,196.74,384,1.000,bicubic +convnext_xlarge.fb_in22k_ft_in1k,87.338,12.662,98.328,1.672,350.20,288,1.000,bicubic +vit_large_patch14_clip_224.laion2b_ft_in1k,87.292,12.708,98.246,1.754,304.20,224,1.000,bicubic +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,87.218,12.782,98.034,1.966,86.86,384,1.000,bicubic +deit3_huge_patch14_224_in21ft1k,87.184,12.816,98.260,1.740,632.13,224,1.000,bicubic +swin_large_patch4_window12_384,87.148,12.852,98.234,1.766,196.74,384,1.000,bicubic swinv2_base_window12to24_192to384_22kft1k,87.108,12.892,98.236,1.764,87.92,384,1.000,bicubic -vit_large_patch16_384,87.080,12.920,98.300,1.700,304.72,384,1.000,bicubic -volo_d5_512,87.040,12.960,97.968,2.032,296.09,512,1.150,bicubic -convnext_xlarge_in22ft1k,87.002,12.998,98.212,1.788,350.20,224,0.875,bicubic -deit3_large_patch16_224_in21ft1k,86.982,13.018,98.238,1.762,304.37,224,1.000,bicubic -volo_d5_448,86.954,13.046,97.940,2.060,295.91,448,1.150,bicubic -swinv2_large_window12to16_192to256_22kft1k,86.946,13.054,98.110,1.890,196.74,256,0.900,bicubic -tf_efficientnet_b7_ns,86.832,13.168,98.096,1.904,66.35,600,0.949,bicubic -beit_base_patch16_384,86.798,13.202,98.136,1.864,86.74,384,1.000,bicubic -volo_d4_448,86.792,13.208,97.882,2.118,193.41,448,1.150,bicubic -deit3_base_patch16_384_in21ft1k,86.742,13.258,98.112,1.888,86.88,384,1.000,bicubic -convnext_large_in22ft1k,86.636,13.364,98.028,1.972,197.77,224,0.875,bicubic -convnext_base_384_in22ft1k,86.542,13.458,98.190,1.810,88.59,384,1.000,bicubic -volo_d3_448,86.496,13.504,97.710,2.290,86.63,448,1.000,bicubic -cait_m48_448,86.488,13.512,97.750,2.250,356.46,448,1.000,bicubic -tf_efficientnet_b6_ns,86.450,13.550,97.886,2.114,43.04,528,0.942,bicubic -swin_base_patch4_window12_384,86.432,13.568,98.056,1.944,87.90,384,1.000,bicubic -tf_efficientnetv2_xl_in21ft1k,86.420,13.580,97.868,2.132,208.12,512,1.000,bicubic -swin_large_patch4_window7_224,86.320,13.680,97.892,2.108,196.53,224,0.900,bicubic -tf_efficientnetv2_l_in21ft1k,86.304,13.696,97.980,2.020,118.52,480,1.000,bicubic -swinv2_base_window12to16_192to256_22kft1k,86.270,13.730,97.896,2.104,87.92,256,0.900,bicubic -vit_large_r50_s32_384,86.180,13.820,97.920,2.080,329.09,384,1.000,bicubic -dm_nfnet_f6,86.142,13.858,97.730,2.270,438.36,576,0.956,bicubic -tf_efficientnet_b5_ns,86.088,13.912,97.752,2.248,30.39,456,0.934,bicubic -volo_d5_224,86.070,13.930,97.578,2.422,295.46,224,0.960,bicubic +vit_large_patch16_384.augreg_in21k_ft_in1k,87.080,12.920,98.300,1.700,304.72,384,1.000,bicubic +volo_d5_512,87.044,12.956,97.968,2.032,296.09,512,1.150,bicubic +vit_base_patch16_clip_384.openai_ft_in12k_in1k,87.034,12.966,98.180,1.820,86.86,384,0.950,bicubic +convnext_large.fb_in22k_ft_in1k,87.016,12.984,98.206,1.794,197.77,288,1.000,bicubic +deit3_large_patch16_224_in21ft1k,86.978,13.022,98.238,1.762,304.37,224,1.000,bicubic +volo_d5_448,86.954,13.046,97.938,2.062,295.91,448,1.150,bicubic +swinv2_large_window12to16_192to256_22kft1k,86.936,13.064,98.108,1.892,196.74,256,0.900,bicubic +tf_efficientnet_b7.ns_jft_in1k,86.840,13.160,98.094,1.906,66.35,600,0.949,bicubic +tf_efficientnetv2_l.in21k_ft_in1k,86.806,13.194,98.134,1.866,118.52,480,1.000,bicubic +beit_base_patch16_384.in22k_ft_in22k_in1k,86.800,13.200,98.138,1.862,86.74,384,1.000,bicubic +convnext_base.fb_in22k_ft_in1k_384,86.794,13.206,98.264,1.736,88.59,384,1.000,bicubic +volo_d4_448,86.790,13.210,97.882,2.118,193.41,448,1.150,bicubic +tf_efficientnetv2_xl.in21k_ft_in1k,86.748,13.252,98.018,1.982,208.12,512,1.000,bicubic +deit3_base_patch16_384_in21ft1k,86.744,13.256,98.112,1.888,86.88,384,1.000,bicubic +vit_base_patch16_clip_384.laion2b_ft_in1k,86.620,13.380,98.010,1.990,86.86,384,1.000,bicubic +maxvit_base_tf_512.in1k,86.598,13.402,97.920,2.080,119.88,512,1.000,bicubic +maxvit_large_tf_512.in1k,86.518,13.482,97.884,2.116,212.33,512,1.000,bicubic +volo_d3_448,86.494,13.506,97.710,2.290,86.63,448,1.000,bicubic +cait_m48_448,86.484,13.516,97.754,2.246,356.46,448,1.000,bicubic +beitv2_base_patch16_224.in1k_ft_in22k_in1k,86.480,13.520,98.048,1.952,86.53,224,0.900,bicubic +tf_efficientnet_b6.ns_jft_in1k,86.452,13.548,97.882,2.118,43.04,528,0.942,bicubic +swin_base_patch4_window12_384,86.432,13.568,98.058,1.942,87.90,384,1.000,bicubic +swin_large_patch4_window7_224,86.320,13.680,97.896,2.104,196.53,224,0.900,bicubic +maxvit_base_tf_384.in1k,86.294,13.706,97.804,2.196,119.65,384,1.000,bicubic +convnext_base.fb_in22k_ft_in1k,86.280,13.720,98.090,1.910,88.59,288,1.000,bicubic +swinv2_base_window12to16_192to256_22kft1k,86.274,13.726,97.896,2.104,87.92,256,0.900,bicubic +maxvit_large_tf_384.in1k,86.236,13.764,97.690,2.310,212.03,384,1.000,bicubic +vit_base_patch8_224.augreg2_in21k_ft_in1k,86.212,13.788,97.832,2.168,86.58,224,0.900,bicubic +vit_base_patch16_clip_384.openai_ft_in1k,86.206,13.794,97.874,2.126,86.86,384,1.000,bicubic +vit_large_r50_s32_384.augreg_in21k_ft_in1k,86.184,13.816,97.918,2.082,329.09,384,1.000,bicubic +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,86.170,13.830,97.754,2.246,86.57,224,0.950,bicubic +dm_nfnet_f6,86.144,13.856,97.730,2.270,438.36,576,0.956,bicubic +maxvit_small_tf_512.in1k,86.088,13.912,97.758,2.242,69.13,512,1.000,bicubic +tf_efficientnet_b5.ns_jft_in1k,86.088,13.912,97.752,2.248,30.39,456,0.934,bicubic +volo_d5_224,86.068,13.932,97.578,2.422,295.46,224,0.960,bicubic cait_m36_384,86.054,13.946,97.730,2.270,271.22,384,1.000,bicubic -volo_d2_384,86.036,13.964,97.574,2.426,58.87,384,1.000,bicubic -vit_base_patch16_384,86.006,13.994,98.004,1.996,86.86,384,1.000,bicubic -xcit_large_24_p8_384_dist,85.998,14.002,97.684,2.316,188.93,384,1.000,bicubic -volo_d4_224,85.876,14.124,97.468,2.532,192.96,224,0.960,bicubic -vit_large_patch16_224,85.844,14.156,97.822,2.178,304.33,224,0.900,bicubic -convnext_base_in22ft1k,85.824,14.176,97.866,2.134,88.59,224,0.875,bicubic +volo_d2_384,86.036,13.964,97.572,2.428,58.87,384,1.000,bicubic +vit_base_patch16_384.augreg_in21k_ft_in1k,86.006,13.994,98.000,2.000,86.86,384,1.000,bicubic +tf_efficientnetv2_m.in21k_ft_in1k,86.004,13.996,97.942,2.058,54.14,480,1.000,bicubic +xcit_large_24_p8_384_dist,86.000,14.000,97.686,2.314,188.93,384,1.000,bicubic +vit_base_patch16_clip_224.openai_ft_in12k_in1k,85.930,14.070,97.724,2.276,86.57,224,0.950,bicubic +efficientnet_b5.in12k_ft_in1k,85.888,14.112,97.732,2.268,30.39,448,1.000,bicubic +volo_d4_224,85.872,14.128,97.468,2.532,192.96,224,0.960,bicubic +vit_large_patch16_224.augreg_in21k_ft_in1k,85.842,14.158,97.824,2.176,304.33,224,0.900,bicubic xcit_medium_24_p8_384_dist,85.816,14.184,97.592,2.408,84.32,384,1.000,bicubic -dm_nfnet_f5,85.816,14.184,97.486,2.514,377.21,544,0.954,bicubic -deit3_large_patch16_384,85.806,14.194,97.596,2.404,304.76,384,1.000,bicubic -vit_base_patch8_224,85.790,14.210,97.792,2.208,86.58,224,0.900,bicubic -xcit_large_24_p16_384_dist,85.752,14.248,97.538,2.462,189.10,384,1.000,bicubic -convnext_small_384_in22ft1k,85.724,14.276,97.864,2.136,50.22,384,1.000,bicubic -deit3_base_patch16_224_in21ft1k,85.716,14.284,97.744,2.256,86.59,224,1.000,bicubic +dm_nfnet_f5,85.814,14.186,97.488,2.512,377.21,544,0.954,bicubic +deit3_large_patch16_384,85.810,14.190,97.596,2.404,304.76,384,1.000,bicubic +vit_base_patch8_224.augreg_in21k_ft_in1k,85.796,14.204,97.790,2.210,86.58,224,0.900,bicubic +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,85.784,14.216,97.634,2.366,88.34,448,1.000,bicubic +convnext_small.fb_in22k_ft_in1k_384,85.778,14.222,97.892,2.108,50.22,384,1.000,bicubic +xcit_large_24_p16_384_dist,85.754,14.246,97.538,2.462,189.10,384,1.000,bicubic +deit3_base_patch16_224_in21ft1k,85.714,14.286,97.744,2.256,86.59,224,1.000,bicubic dm_nfnet_f4,85.714,14.286,97.520,2.480,316.07,512,0.951,bicubic -tf_efficientnetv2_m_in21ft1k,85.586,14.414,97.746,2.254,54.14,480,1.000,bicubic -xcit_small_24_p8_384_dist,85.554,14.446,97.572,2.428,47.63,384,1.000,bicubic +tf_efficientnetv2_l.in1k,85.670,14.330,97.474,2.526,118.52,480,1.000,bicubic +maxvit_tiny_tf_512.in1k,85.662,14.338,97.580,2.420,31.05,512,1.000,bicubic +flexivit_large.1200ep_in1k,85.644,14.356,97.542,2.458,304.36,240,0.950,bicubic +xcit_small_24_p8_384_dist,85.556,14.444,97.572,2.428,47.63,384,1.000,bicubic +flexivit_large.600ep_in1k,85.538,14.462,97.492,2.508,304.36,240,0.950,bicubic +vit_medium_patch16_gap_384.in12k_ft_in1k,85.536,14.464,97.634,2.366,39.03,384,0.950,bicubic +maxvit_small_tf_384.in1k,85.534,14.466,97.464,2.536,69.02,384,1.000,bicubic dm_nfnet_f3,85.522,14.478,97.462,2.538,254.92,416,0.940,bicubic -tf_efficientnetv2_l,85.488,14.512,97.372,2.628,118.52,480,1.000,bicubic -cait_s36_384,85.460,14.540,97.478,2.522,68.37,384,1.000,bicubic -ig_resnext101_32x48d,85.436,14.564,97.576,2.424,828.41,224,0.875,bilinear -xcit_medium_24_p16_384_dist,85.422,14.578,97.406,2.594,84.40,384,1.000,bicubic +vit_base_patch16_clip_224.laion2b_ft_in1k,85.468,14.532,97.576,2.424,86.57,224,1.000,bicubic +cait_s36_384,85.460,14.540,97.480,2.520,68.37,384,1.000,bicubic +ig_resnext101_32x48d,85.428,14.572,97.572,2.428,828.41,224,0.875,bilinear deit_base_distilled_patch16_384,85.422,14.578,97.332,2.668,87.63,384,1.000,bicubic -volo_d3_224,85.412,14.588,97.280,2.720,86.33,224,0.960,bicubic -xcit_large_24_p8_224_dist,85.398,14.602,97.410,2.590,188.93,224,1.000,bicubic -tf_efficientnet_b8_ap,85.372,14.628,97.294,2.706,87.41,672,0.954,bicubic -tf_efficientnet_b8,85.368,14.632,97.392,2.608,87.41,672,0.954,bicubic -swin_base_patch4_window7_224,85.250,14.750,97.562,2.438,87.77,224,0.900,bicubic +xcit_medium_24_p16_384_dist,85.412,14.588,97.406,2.594,84.40,384,1.000,bicubic +volo_d3_224,85.408,14.592,97.280,2.720,86.33,224,0.960,bicubic +xcit_large_24_p8_224_dist,85.396,14.604,97.410,2.590,188.93,224,1.000,bicubic +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,85.372,14.628,97.664,2.336,88.30,384,1.000,bicubic +tf_efficientnet_b8.ra_in1k,85.370,14.630,97.390,2.610,87.41,672,0.954,bicubic +tf_efficientnet_b8.ap_in1k,85.370,14.630,97.294,2.706,87.41,672,0.954,bicubic +vit_base_patch16_clip_224.openai_ft_in1k,85.280,14.720,97.440,2.560,86.57,224,0.900,bicubic +flexivit_large.300ep_in1k,85.280,14.720,97.406,2.594,304.36,240,0.950,bicubic +convnext_small.fb_in22k_ft_in1k,85.262,14.738,97.684,2.316,50.22,288,1.000,bicubic +swin_base_patch4_window7_224,85.252,14.748,97.562,2.438,87.77,224,0.900,bicubic volo_d1_384,85.250,14.750,97.214,2.786,26.78,384,1.000,bicubic -beit_base_patch16_224,85.228,14.772,97.656,2.344,86.53,224,0.900,bicubic -deit3_huge_patch14_224,85.206,14.794,97.358,2.642,632.13,224,0.900,bicubic -volo_d2_224,85.194,14.806,97.188,2.812,58.68,224,0.960,bicubic -tf_efficientnet_b4_ns,85.160,14.840,97.470,2.530,19.34,380,0.922,bicubic -tf_efficientnet_b7_ap,85.120,14.880,97.252,2.748,66.35,600,0.949,bicubic -ig_resnext101_32x32d,85.100,14.900,97.434,2.566,468.53,224,0.875,bilinear -xcit_small_24_p16_384_dist,85.088,14.912,97.308,2.692,47.67,384,1.000,bicubic -xcit_small_12_p8_384_dist,85.080,14.920,97.280,2.720,26.21,384,1.000,bicubic -deit3_base_patch16_384,85.076,14.924,97.254,2.746,86.88,384,1.000,bicubic -xcit_medium_24_p8_224_dist,85.070,14.930,97.280,2.720,84.32,224,1.000,bicubic -dm_nfnet_f2,85.066,14.934,97.242,2.758,193.78,352,0.920,bicubic -cait_s24_384,85.050,14.950,97.348,2.652,47.06,384,1.000,bicubic -tf_efficientnetv2_m,85.036,14.964,97.278,2.722,54.14,480,1.000,bicubic +mvitv2_large,85.250,14.750,97.196,2.804,217.99,224,0.900,bicubic +beit_base_patch16_224.in22k_ft_in22k_in1k,85.236,14.764,97.656,2.344,86.53,224,0.900,bicubic +vit_base_patch32_clip_384.openai_ft_in12k_in1k,85.212,14.788,97.402,2.598,88.30,384,0.950,bicubic +tf_efficientnetv2_m.in1k,85.208,14.792,97.368,2.632,54.14,480,1.000,bicubic +deit3_huge_patch14_224,85.204,14.796,97.358,2.642,632.13,224,0.900,bicubic +volo_d2_224,85.196,14.804,97.188,2.812,58.68,224,0.960,bicubic +tf_efficientnet_b4.ns_jft_in1k,85.162,14.838,97.470,2.530,19.34,380,0.922,bicubic +tf_efficientnet_b7.ap_in1k,85.120,14.880,97.252,2.748,66.35,600,0.949,bicubic +vit_base_patch16_224.augreg2_in21k_ft_in1k,85.106,14.894,97.534,2.466,86.57,224,0.900,bicubic +maxvit_tiny_tf_384.in1k,85.106,14.894,97.380,2.620,30.98,384,1.000,bicubic +xcit_small_24_p16_384_dist,85.098,14.902,97.310,2.690,47.67,384,1.000,bicubic +ig_resnext101_32x32d,85.094,14.906,97.438,2.562,468.53,224,0.875,bilinear +xcit_small_12_p8_384_dist,85.088,14.912,97.282,2.718,26.21,384,1.000,bicubic +xcit_medium_24_p8_224_dist,85.072,14.928,97.278,2.722,84.32,224,1.000,bicubic +deit3_base_patch16_384,85.072,14.928,97.254,2.746,86.88,384,1.000,bicubic +dm_nfnet_f2,85.064,14.936,97.240,2.760,193.78,352,0.920,bicubic +cait_s24_384,85.046,14.954,97.346,2.654,47.06,384,1.000,bicubic regnetz_e8,85.030,14.970,97.264,2.736,57.70,320,1.000,bicubic resnetrs420,85.008,14.992,97.124,2.876,191.89,416,1.000,bicubic -vit_base_r50_s16_384,84.976,15.024,97.290,2.710,98.95,384,1.000,bicubic -ecaresnet269d,84.974,15.026,97.226,2.774,102.09,352,1.000,bicubic -tf_efficientnet_b7,84.934,15.066,97.206,2.794,66.35,600,0.949,bicubic -xcit_large_24_p16_224_dist,84.920,15.080,97.132,2.868,189.10,224,1.000,bicubic -resnetv2_152x4_bitm,84.918,15.082,97.442,2.558,936.53,480,1.000,bilinear +ecaresnet269d,84.976,15.024,97.226,2.774,102.09,352,1.000,bicubic +vit_base_r50_s16_384.orig_in21k_ft_in1k,84.972,15.028,97.288,2.712,98.95,384,1.000,bicubic +tf_efficientnet_b7.ra_in1k,84.936,15.064,97.204,2.796,66.35,600,0.949,bicubic +maxvit_large_tf_224.in1k,84.926,15.074,96.972,3.028,211.79,224,0.950,bicubic +xcit_large_24_p16_224_dist,84.918,15.082,97.132,2.868,189.10,224,1.000,bicubic +resnetv2_152x4_bitm,84.916,15.084,97.440,2.560,936.53,480,1.000,bilinear xcit_small_24_p8_224_dist,84.876,15.124,97.188,2.812,47.63,224,1.000,bicubic -deit3_small_patch16_384_in21ft1k,84.824,15.176,97.484,2.516,22.21,384,1.000,bicubic -efficientnetv2_rw_m,84.812,15.188,97.146,2.854,53.24,416,1.000,bicubic -tf_efficientnet_b6_ap,84.786,15.214,97.138,2.862,43.04,528,0.942,bicubic -deit3_large_patch16_224,84.762,15.238,97.038,2.962,304.37,224,0.900,bicubic -resnetrs350,84.712,15.288,96.990,3.010,163.96,384,1.000,bicubic -xcit_small_12_p16_384_dist,84.708,15.292,97.116,2.884,26.25,384,1.000,bicubic -eca_nfnet_l2,84.696,15.304,97.264,2.736,56.72,384,1.000,bicubic -dm_nfnet_f1,84.624,15.376,97.098,2.902,132.63,320,0.910,bicubic -swinv2_base_window16_256,84.592,15.408,97.074,2.926,87.92,256,0.900,bicubic -seresnextaa101d_32x8d,84.572,15.428,97.070,2.930,93.59,288,1.000,bicubic -convnext_small_in22ft1k,84.568,15.432,97.396,2.604,50.22,224,0.875,bicubic -vit_base_patch16_224,84.530,15.470,97.296,2.704,86.57,224,0.900,bicubic +maxvit_base_tf_224.in1k,84.860,15.140,96.990,3.010,119.47,224,0.950,bicubic +convnext_large.fb_in1k,84.846,15.154,97.212,2.788,197.77,288,1.000,bicubic +deit3_small_patch16_384_in21ft1k,84.824,15.176,97.486,2.514,22.21,384,1.000,bicubic +efficientnetv2_rw_m.agc_in1k,84.808,15.192,97.148,2.852,53.24,416,1.000,bicubic +tf_efficientnet_b6.ap_in1k,84.788,15.212,97.138,2.862,43.04,528,0.942,bicubic +deit3_large_patch16_224,84.764,15.236,97.038,2.962,304.37,224,0.900,bicubic +resnetrs350,84.720,15.280,96.988,3.012,163.96,384,1.000,bicubic +xcit_small_12_p16_384_dist,84.706,15.294,97.118,2.882,26.25,384,1.000,bicubic +eca_nfnet_l2,84.698,15.302,97.264,2.736,56.72,384,1.000,bicubic +flexivit_base.1200ep_in1k,84.664,15.336,96.992,3.008,86.59,240,0.950,bicubic +maxxvit_rmlp_small_rw_256,84.628,15.372,97.062,2.938,66.01,256,0.950,bicubic +dm_nfnet_f1,84.626,15.374,97.100,2.900,132.63,320,0.910,bicubic +coatnet_rmlp_2_rw_224,84.600,15.400,96.736,3.264,73.88,224,0.950,bicubic +swinv2_base_window16_256,84.594,15.406,97.074,2.926,87.92,256,0.900,bicubic +seresnextaa101d_32x8d,84.568,15.432,97.070,2.930,93.59,288,1.000,bicubic +deit3_medium_patch16_224_in21ft1k,84.560,15.440,97.188,2.812,38.85,224,1.000,bicubic +vit_base_patch16_224.augreg_in21k_ft_in1k,84.532,15.468,97.294,2.706,86.57,224,0.900,bicubic resnest269e,84.518,15.482,96.986,3.014,110.93,416,0.928,bicubic -resnetv2_152x2_bitm,84.510,15.490,97.434,2.566,236.34,448,1.000,bilinear -regnetz_040h,84.496,15.504,97.006,2.994,28.94,320,1.000,bicubic -resnetv2_101x3_bitm,84.444,15.556,97.382,2.618,387.93,448,1.000,bilinear -resnetrs200,84.440,15.560,97.080,2.920,93.21,320,1.000,bicubic -resnetrs270,84.436,15.564,96.974,3.026,129.86,352,1.000,bicubic -vit_large_r50_s32_224,84.430,15.570,97.166,2.834,328.99,224,0.900,bicubic -resmlp_big_24_224_in22ft1k,84.398,15.602,97.118,2.882,129.14,224,0.875,bicubic -xcit_large_24_p8_224,84.392,15.608,96.658,3.342,188.93,224,1.000,bicubic -seresnet152d,84.364,15.636,97.044,2.956,66.84,320,1.000,bicubic -seresnext101d_32x8d,84.362,15.638,96.918,3.082,93.59,288,1.000,bicubic -tf_efficientnetv2_s_in21ft1k,84.296,15.704,97.254,2.746,21.46,384,1.000,bicubic -convnext_large,84.296,15.704,96.894,3.106,197.77,224,0.875,bicubic -swsl_resnext101_32x8d,84.290,15.710,97.182,2.818,88.79,224,0.875,bilinear -xcit_medium_24_p16_224_dist,84.278,15.722,96.940,3.060,84.40,224,1.000,bicubic -vit_base_patch16_224_miil,84.272,15.728,96.802,3.198,86.54,224,0.875,bilinear +flexivit_base.600ep_in1k,84.518,15.482,96.936,3.064,86.59,240,0.950,bicubic +resnetv2_152x2_bitm,84.510,15.490,97.432,2.568,236.34,448,1.000,bilinear +regnetz_040h,84.494,15.506,97.006,2.994,28.94,320,1.000,bicubic +maxvit_rmlp_small_rw_224,84.484,15.516,96.762,3.238,64.90,224,0.900,bicubic +resnetrs200,84.448,15.552,97.082,2.918,93.21,320,1.000,bicubic +gcvit_base,84.448,15.552,96.844,3.156,90.32,224,0.875,bicubic +resnetv2_101x3_bitm,84.440,15.560,97.382,2.618,387.93,448,1.000,bilinear +vit_large_r50_s32_224.augreg_in21k_ft_in1k,84.434,15.566,97.164,2.836,328.99,224,0.900,bicubic +convnext_base.fb_in1k,84.434,15.566,96.972,3.028,88.59,288,1.000,bicubic +resnetrs270,84.434,15.566,96.970,3.030,129.86,352,1.000,bicubic +maxvit_small_tf_224.in1k,84.434,15.566,96.820,3.180,68.93,224,0.950,bicubic +vit_medium_patch16_gap_256.in12k_ft_in1k,84.430,15.570,97.212,2.788,38.86,256,0.950,bicubic +mvitv2_base,84.422,15.578,96.864,3.136,51.47,224,0.900,bicubic +resmlp_big_24_224_in22ft1k,84.394,15.606,97.120,2.880,129.14,224,0.875,bicubic +flexivit_base.300ep_in1k,84.394,15.606,96.882,3.118,86.59,240,0.950,bicubic +xcit_large_24_p8_224,84.392,15.608,96.656,3.344,188.93,224,1.000,bicubic +seresnext101d_32x8d,84.370,15.630,96.916,3.084,93.59,288,1.000,bicubic +seresnet152d,84.362,15.638,97.040,2.960,66.84,320,1.000,bicubic +tf_efficientnetv2_s.in21k_ft_in1k,84.302,15.698,97.252,2.748,21.46,384,1.000,bicubic +swsl_resnext101_32x8d,84.284,15.716,97.176,2.824,88.79,224,0.875,bilinear +xcit_medium_24_p16_224_dist,84.274,15.726,96.940,3.060,84.40,224,1.000,bicubic +vit_base_patch16_224_miil.in21k_ft_in1k,84.268,15.732,96.802,3.198,86.54,224,0.875,bilinear swinv2_base_window8_256,84.262,15.738,96.922,3.078,87.92,256,0.900,bicubic -tf_efficientnet_b5_ap,84.254,15.746,96.978,3.022,30.39,456,0.934,bicubic +tf_efficientnet_b5.ap_in1k,84.252,15.748,96.974,3.026,30.39,456,0.934,bicubic regnetz_040,84.236,15.764,96.932,3.068,27.12,320,1.000,bicubic -xcit_small_12_p8_224_dist,84.230,15.770,96.874,3.126,26.21,224,1.000,bicubic -swinv2_small_window16_256,84.210,15.790,96.870,3.130,49.73,256,0.900,bicubic -seresnext101_32x8d,84.204,15.796,96.874,3.126,93.57,288,1.000,bicubic -crossvit_18_dagger_408,84.194,15.806,96.818,3.182,44.61,408,1.000,bicubic -ig_resnext101_32x16d,84.170,15.830,97.198,2.802,194.03,224,0.875,bilinear -volo_d1_224,84.164,15.836,96.774,3.226,26.63,224,0.960,bicubic -pit_b_distilled_224,84.142,15.858,96.856,3.144,74.79,224,0.900,bicubic -tf_efficientnet_b6,84.108,15.892,96.888,3.112,43.04,528,0.942,bicubic -convnext_tiny_384_in22ft1k,84.076,15.924,97.158,2.842,28.59,384,1.000,bicubic -cait_xs24_384,84.064,15.936,96.890,3.110,26.67,384,1.000,bicubic -regnetz_d8,84.052,15.948,96.996,3.004,23.37,320,1.000,bicubic -regnetz_d8_evos,84.050,15.950,96.996,3.004,23.46,320,0.950,bicubic -vit_small_r26_s32_384,84.048,15.952,97.328,2.672,36.47,384,1.000,bicubic -tf_efficientnet_b3_ns,84.048,15.952,96.912,3.088,12.23,300,0.904,bicubic -regnetz_d32,84.024,15.976,96.868,3.132,27.58,320,0.950,bicubic -resnetv2_50x3_bitm,84.012,15.988,97.126,2.874,217.32,448,1.000,bilinear -eca_nfnet_l1,84.012,15.988,97.032,2.968,41.41,320,1.000,bicubic -resnet200d,83.960,16.040,96.824,3.176,64.69,320,1.000,bicubic -swin_s3_base_224,83.932,16.068,96.660,3.340,71.13,224,0.900,bicubic -regnety_080,83.928,16.072,96.888,3.112,39.18,288,1.000,bicubic -tf_efficientnetv2_s,83.884,16.116,96.698,3.302,21.46,384,1.000,bicubic -xcit_small_24_p16_224_dist,83.870,16.130,96.732,3.268,47.67,224,1.000,bicubic -swinv2_small_window8_256,83.854,16.146,96.642,3.358,49.73,256,0.900,bicubic -resnetv2_152x2_bit_teacher_384,83.844,16.156,97.116,2.884,236.34,384,1.000,bicubic -convnext_base,83.840,16.160,96.750,3.250,88.59,224,0.875,bicubic -xcit_small_24_p8_224,83.840,16.160,96.636,3.364,47.63,224,1.000,bicubic -crossvit_15_dagger_408,83.838,16.162,96.780,3.220,28.50,408,1.000,bicubic -resnest200e,83.828,16.172,96.892,3.108,70.20,320,0.909,bicubic -tf_efficientnet_b5,83.814,16.186,96.748,3.252,30.39,456,0.934,bicubic -efficientnetv2_rw_s,83.810,16.190,96.724,3.276,23.94,384,1.000,bicubic -vit_small_patch16_384,83.800,16.200,97.100,2.900,22.20,384,1.000,bicubic +xcit_small_12_p8_224_dist,84.232,15.768,96.876,3.124,26.21,224,1.000,bicubic +maxvit_rmlp_tiny_rw_256,84.232,15.768,96.778,3.222,29.15,256,0.950,bicubic +vit_base_patch16_384.orig_in21k_ft_in1k,84.210,15.790,97.218,2.782,86.86,384,1.000,bicubic +swinv2_small_window16_256,84.206,15.794,96.870,3.130,49.73,256,0.900,bicubic +crossvit_18_dagger_408,84.196,15.804,96.818,3.182,44.61,408,1.000,bicubic +seresnext101_32x8d,84.192,15.808,96.874,3.126,93.57,288,1.000,bicubic +ig_resnext101_32x16d,84.170,15.830,97.196,2.804,194.03,224,0.875,bilinear +volo_d1_224,84.164,15.836,96.776,3.224,26.63,224,0.960,bicubic +pit_b_distilled_224,84.144,15.856,96.856,3.144,74.79,224,0.900,bicubic +tf_efficientnet_b6.aa_in1k,84.110,15.890,96.886,3.114,43.04,528,0.942,bicubic +convnext_tiny.fb_in22k_ft_in1k_384,84.080,15.920,97.142,2.858,28.59,384,1.000,bicubic +cait_xs24_384,84.062,15.938,96.888,3.112,26.67,384,1.000,bicubic +regnetz_d8,84.050,15.950,96.998,3.002,23.37,320,1.000,bicubic +regnetz_d8_evos,84.050,15.950,96.994,3.006,23.46,320,0.950,bicubic +tf_efficientnet_b3.ns_jft_in1k,84.048,15.952,96.910,3.090,12.23,300,0.904,bicubic +vit_small_r26_s32_384.augreg_in21k_ft_in1k,84.046,15.954,97.328,2.672,36.47,384,1.000,bicubic +regnetz_d32,84.022,15.978,96.866,3.134,27.58,320,0.950,bicubic +resnetv2_50x3_bitm,84.014,15.986,97.124,2.876,217.32,448,1.000,bilinear +eca_nfnet_l1,84.010,15.990,97.028,2.972,41.41,320,1.000,bicubic +resnet200d,83.962,16.038,96.824,3.176,64.69,320,1.000,bicubic +edgenext_base,83.960,16.040,96.768,3.232,18.51,320,1.000,bicubic +regnety_080,83.932,16.068,96.888,3.112,39.18,288,1.000,bicubic +swin_s3_base_224,83.930,16.070,96.662,3.338,71.13,224,0.900,bicubic +tresnet_v2_l,83.902,16.098,96.492,3.508,46.17,224,0.875,bilinear +tf_efficientnetv2_s.in1k,83.894,16.106,96.698,3.302,21.46,384,1.000,bicubic +gcvit_small,83.884,16.116,96.658,3.342,51.09,224,0.875,bicubic +xcit_small_24_p16_224_dist,83.862,16.138,96.728,3.272,47.67,224,1.000,bicubic +swinv2_small_window8_256,83.856,16.144,96.640,3.360,49.73,256,0.900,bicubic +resnetv2_152x2_bit_teacher_384,83.844,16.156,97.118,2.882,236.34,384,1.000,bicubic +crossvit_15_dagger_408,83.838,16.162,96.782,3.218,28.50,408,1.000,bicubic +xcit_small_24_p8_224,83.838,16.162,96.636,3.364,47.63,224,1.000,bicubic +resnest200e,83.832,16.168,96.894,3.106,70.20,320,0.909,bicubic +tf_efficientnet_b5.ra_in1k,83.812,16.188,96.748,3.252,30.39,456,0.934,bicubic +efficientnetv2_rw_s.ra2_in1k,83.808,16.192,96.724,3.276,23.94,384,1.000,bicubic +vit_small_patch16_384.augreg_in21k_ft_in1k,83.802,16.198,97.102,2.898,22.20,384,1.000,bicubic deit3_base_patch16_224,83.792,16.208,96.584,3.416,86.59,224,0.900,bicubic -swin_s3_small_224,83.774,16.226,96.452,3.548,49.74,224,0.900,bicubic -xcit_tiny_24_p8_384_dist,83.746,16.254,96.712,3.288,12.11,384,1.000,bicubic -xcit_medium_24_p8_224,83.738,16.262,96.394,3.606,84.32,224,1.000,bicubic -regnety_064,83.720,16.280,96.726,3.274,30.58,288,1.000,bicubic -resnetrs152,83.714,16.286,96.614,3.386,86.62,320,1.000,bicubic -regnetv_064,83.712,16.288,96.746,3.254,30.58,288,1.000,bicubic -regnety_160,83.692,16.308,96.776,3.224,83.59,288,1.000,bicubic -twins_svt_large,83.680,16.320,96.594,3.406,99.27,224,0.900,bicubic -resnet152d,83.678,16.322,96.740,3.260,60.21,320,1.000,bicubic +swin_s3_small_224,83.770,16.230,96.450,3.550,49.74,224,0.900,bicubic +mvitv2_small,83.768,16.232,96.570,3.430,34.87,224,0.900,bicubic +xcit_tiny_24_p8_384_dist,83.740,16.260,96.712,3.288,12.11,384,1.000,bicubic +pvt_v2_b5,83.740,16.260,96.634,3.366,81.96,224,0.900,bicubic +xcit_medium_24_p8_224,83.734,16.266,96.394,3.606,84.32,224,1.000,bicubic +regnety_064,83.716,16.284,96.720,3.280,30.58,288,1.000,bicubic +pvt_v2_b4,83.716,16.284,96.674,3.326,62.56,224,0.900,bicubic +regnetv_064,83.712,16.288,96.748,3.252,30.58,288,1.000,bicubic +resnetrs152,83.712,16.288,96.614,3.386,86.62,320,1.000,bicubic +convnext_small.fb_in1k,83.706,16.294,96.810,3.190,50.22,288,1.000,bicubic +regnety_160,83.686,16.314,96.776,3.224,83.59,288,1.000,bicubic +resnet152d,83.680,16.320,96.738,3.262,60.21,320,1.000,bicubic +twins_svt_large,83.678,16.322,96.594,3.406,99.27,224,0.900,bicubic +coatnet_1_rw_224,83.608,16.392,96.388,3.612,41.72,224,0.950,bicubic resmlp_big_24_distilled_224,83.588,16.412,96.648,3.352,129.14,224,0.875,bicubic -jx_nest_base,83.554,16.446,96.364,3.636,67.72,224,0.875,bicubic -cs3se_edgenet_x,83.548,16.452,96.666,3.334,50.72,320,1.000,bicubic -swinv2_cr_small_ns_224,83.486,16.514,96.484,3.516,49.70,224,0.900,bicubic -cait_s24_224,83.458,16.542,96.562,3.438,46.92,224,1.000,bicubic -deit3_small_patch16_384,83.428,16.572,96.676,3.324,22.21,384,1.000,bicubic -efficientnet_b4,83.424,16.576,96.598,3.402,19.34,384,1.000,bicubic -sequencer2d_l,83.406,16.594,96.500,3.500,54.30,224,0.875,bicubic -mobilevitv2_200_384_in22ft1k,83.400,16.600,96.582,3.418,18.45,384,1.000,bicubic +jx_nest_base,83.552,16.448,96.370,3.630,67.72,224,0.875,bicubic +cs3se_edgenet_x,83.548,16.452,96.670,3.330,50.72,320,1.000,bicubic +maxvit_tiny_rw_224,83.504,16.496,96.502,3.498,29.06,224,0.950,bicubic +swinv2_cr_small_ns_224,83.488,16.512,96.486,3.514,49.70,224,0.900,bicubic +cait_s24_224,83.452,16.548,96.564,3.436,46.92,224,1.000,bicubic +efficientnet_b4.ra2_in1k,83.428,16.572,96.596,3.404,19.34,384,1.000,bicubic +deit3_small_patch16_384,83.426,16.574,96.676,3.324,22.21,384,1.000,bicubic +sequencer2d_l,83.406,16.594,96.506,3.494,54.30,224,0.875,bicubic +gcvit_tiny,83.400,16.600,96.398,3.602,28.22,224,0.875,bicubic +maxvit_tiny_tf_224.in1k,83.398,16.602,96.588,3.412,30.92,224,0.950,bicubic +mobilevitv2_200_384_in22ft1k,83.394,16.606,96.580,3.420,18.45,384,1.000,bicubic deit_base_distilled_patch16_224,83.388,16.612,96.488,3.512,87.34,224,0.900,bicubic -dm_nfnet_f0,83.384,16.616,96.574,3.426,71.49,256,0.900,bicubic -vit_base_patch32_384,83.352,16.648,96.836,3.164,88.30,384,1.000,bicubic -swsl_resnext101_32x16d,83.350,16.650,96.844,3.156,194.03,224,0.875,bilinear -xcit_small_12_p16_224_dist,83.346,16.654,96.418,3.582,26.25,224,1.000,bicubic -xcit_small_12_p8_224,83.340,16.660,96.480,3.520,26.21,224,1.000,bicubic -tf_efficientnet_b4_ap,83.248,16.752,96.392,3.608,19.34,380,0.922,bicubic -swsl_resnext101_32x4d,83.240,16.760,96.760,3.240,44.18,224,0.875,bilinear -swin_small_patch4_window7_224,83.218,16.782,96.326,3.674,49.61,224,0.900,bicubic -regnetv_040,83.198,16.802,96.664,3.336,20.64,288,1.000,bicubic -xception65,83.174,16.826,96.592,3.408,39.92,299,0.940,bicubic -convnext_small,83.150,16.850,96.430,3.570,50.22,224,0.875,bicubic -resnext101_64x4d,83.144,16.856,96.374,3.626,83.46,288,1.000,bicubic -twins_svt_base,83.138,16.862,96.420,3.580,56.07,224,0.900,bicubic -swinv2_cr_small_224,83.138,16.862,96.098,3.902,49.70,224,0.900,bicubic -twins_pcpvt_large,83.136,16.864,96.604,3.396,60.99,224,0.900,bicubic +dm_nfnet_f0,83.386,16.614,96.572,3.428,71.49,256,0.900,bicubic +efficientformer_l7,83.386,16.614,96.540,3.460,82.23,224,0.950,bicubic +coatnet_rmlp_1_rw_224,83.358,16.642,96.456,3.544,41.69,224,0.950,bicubic +vit_base_patch32_384.augreg_in21k_ft_in1k,83.350,16.650,96.836,3.164,88.30,384,1.000,bicubic +xcit_small_12_p16_224_dist,83.350,16.650,96.414,3.586,26.25,224,1.000,bicubic +swsl_resnext101_32x16d,83.346,16.654,96.846,3.154,194.03,224,0.875,bilinear +xcit_small_12_p8_224,83.344,16.656,96.480,3.520,26.21,224,1.000,bicubic +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,83.306,16.694,96.530,3.470,88.22,224,0.900,bicubic +tf_efficientnet_b4.ap_in1k,83.248,16.752,96.392,3.608,19.34,380,0.922,bicubic +swsl_resnext101_32x4d,83.230,16.770,96.760,3.240,44.18,224,0.875,bilinear +swin_small_patch4_window7_224,83.212,16.788,96.322,3.678,49.61,224,0.900,bicubic +regnetv_040,83.194,16.806,96.660,3.340,20.64,288,1.000,bicubic +xception65,83.180,16.820,96.592,3.408,39.92,299,0.940,bicubic +resnext101_64x4d,83.148,16.852,96.372,3.628,83.46,288,1.000,bicubic +swinv2_cr_small_224,83.146,16.854,96.094,3.906,49.70,224,0.900,bicubic +twins_pcpvt_large,83.140,16.860,96.598,3.402,60.99,224,0.900,bicubic +twins_svt_base,83.136,16.864,96.418,3.582,56.07,224,0.900,bicubic xception65p,83.130,16.870,96.480,3.520,39.82,299,0.940,bicubic -jx_nest_small,83.120,16.880,96.330,3.670,38.35,224,0.875,bicubic -deit_base_patch16_384,83.106,16.894,96.370,3.630,86.86,384,1.000,bicubic -deit3_small_patch16_224_in21ft1k,83.076,16.924,96.776,3.224,22.06,224,1.000,bicubic -tresnet_m,83.074,16.926,96.120,3.880,31.39,224,0.875,bilinear -tresnet_xl_448,83.048,16.952,96.170,3.830,78.44,448,0.875,bilinear -regnety_040,83.036,16.964,96.510,3.490,20.65,288,1.000,bicubic -tf_efficientnet_b4,83.024,16.976,96.300,3.700,19.34,380,0.922,bicubic +pvt_v2_b3,83.126,16.874,96.556,3.444,45.24,224,0.900,bicubic +jx_nest_small,83.120,16.880,96.328,3.672,38.35,224,0.875,bicubic +deit_base_patch16_384,83.106,16.894,96.372,3.628,86.86,384,1.000,bicubic +deit3_medium_patch16_224,83.080,16.920,96.292,3.708,38.85,224,0.900,bicubic +tresnet_m,83.080,16.920,96.118,3.882,31.39,224,0.875,bilinear +deit3_small_patch16_224_in21ft1k,83.070,16.930,96.780,3.220,22.06,224,1.000,bicubic +tresnet_xl_448,83.050,16.950,96.174,3.826,78.44,448,0.875,bilinear +regnety_040,83.038,16.962,96.510,3.490,20.65,288,1.000,bicubic +maxxvit_rmlp_nano_rw_256,83.030,16.970,96.344,3.656,16.78,256,0.950,bicubic resnet101d,83.022,16.978,96.446,3.554,44.57,320,1.000,bicubic -mobilevitv2_175_384_in22ft1k,82.934,17.066,96.430,3.570,14.25,384,1.000,bicubic -convnext_tiny_in22ft1k,82.912,17.088,96.624,3.376,28.59,224,0.875,bicubic -xcit_large_24_p16_224,82.892,17.108,95.878,4.122,189.10,224,1.000,bicubic -resnest101e,82.888,17.112,96.320,3.680,48.28,256,0.875,bilinear -resnetv2_152x2_bit_teacher,82.868,17.132,96.568,3.432,236.34,224,0.875,bicubic -resnetv2_50x1_bit_distilled,82.822,17.178,96.522,3.478,25.55,224,0.875,bicubic -resnet152,82.818,17.182,96.132,3.868,60.19,224,0.950,bicubic -swinv2_tiny_window16_256,82.810,17.190,96.230,3.770,28.35,256,0.900,bicubic -sequencer2d_m,82.808,17.192,96.268,3.732,38.31,224,0.875,bicubic -pnasnet5large,82.782,17.218,96.042,3.958,86.06,331,0.911,bicubic -vit_relpos_base_patch16_clsgap_224,82.760,17.240,96.174,3.826,86.43,224,0.900,bicubic -nfnet_l0,82.752,17.248,96.518,3.482,35.07,288,1.000,bicubic -regnety_032,82.724,17.276,96.422,3.578,19.44,288,1.000,bicubic -cs3edgenet_x,82.722,17.278,96.376,3.624,47.82,288,1.000,bicubic -twins_pcpvt_base,82.708,17.292,96.350,3.650,43.83,224,0.900,bicubic -ig_resnext101_32x8d,82.698,17.302,96.632,3.368,88.79,224,0.875,bilinear -cs3sedarknet_x,82.654,17.346,96.346,3.654,35.40,288,1.000,bicubic -xcit_medium_24_p16_224,82.638,17.362,95.978,4.022,84.40,224,1.000,bicubic -regnetz_c16_evos,82.632,17.368,96.476,3.524,13.49,320,0.950,bicubic -nasnetalarge,82.618,17.382,96.044,3.956,88.75,331,0.911,bicubic -mobilevitv2_150_384_in22ft1k,82.590,17.410,96.316,3.684,10.59,384,1.000,bicubic -levit_384,82.588,17.412,96.018,3.982,39.13,224,0.900,bicubic -xcit_small_24_p16_224,82.584,17.416,96.000,4.000,47.67,224,1.000,bicubic -eca_nfnet_l0,82.578,17.422,96.490,3.510,24.14,288,1.000,bicubic -xcit_tiny_24_p16_384_dist,82.572,17.428,96.288,3.712,12.12,384,1.000,bicubic -vit_relpos_medium_patch16_cls_224,82.562,17.438,96.066,3.934,38.76,224,0.900,bicubic -xcit_tiny_24_p8_224_dist,82.560,17.440,96.168,3.832,12.11,224,1.000,bicubic -regnetz_c16,82.520,17.480,96.360,3.640,13.46,320,0.940,bicubic -crossvit_18_dagger_240,82.520,17.480,96.068,3.932,44.27,240,0.875,bicubic -resnet61q,82.518,17.482,96.130,3.870,36.85,288,1.000,bicubic -vit_relpos_base_patch16_224,82.486,17.514,96.142,3.858,86.43,224,0.900,bicubic -gc_efficientnetv2_rw_t,82.466,17.534,96.298,3.702,13.68,288,1.000,bicubic -vit_relpos_medium_patch16_224,82.462,17.538,96.086,3.914,38.75,224,0.900,bicubic -poolformer_m48,82.460,17.540,95.958,4.042,73.47,224,0.950,bicubic -pit_b_224,82.444,17.556,95.712,4.288,73.76,224,0.900,bicubic -crossvit_18_240,82.398,17.602,96.054,3.946,43.27,240,0.875,bicubic -xcit_tiny_12_p8_384_dist,82.386,17.614,96.222,3.778,6.71,384,1.000,bicubic -tf_efficientnet_b2_ns,82.384,17.616,96.246,3.754,9.11,260,0.890,bicubic -resnet51q,82.358,17.642,96.178,3.822,35.70,288,1.000,bilinear -ecaresnet50t,82.348,17.652,96.138,3.862,25.57,320,0.950,bicubic -efficientnetv2_rw_t,82.344,17.656,96.196,3.804,13.65,288,1.000,bicubic -sequencer2d_s,82.344,17.656,96.034,3.966,27.65,224,0.875,bicubic -mobilevitv2_200_in22ft1k,82.334,17.666,95.938,4.062,18.45,256,0.888,bicubic -resnetv2_101x1_bitm,82.332,17.668,96.516,3.484,44.54,448,1.000,bilinear -crossvit_15_dagger_240,82.326,17.674,95.956,4.044,28.21,240,0.875,bicubic -coat_lite_small,82.304,17.696,95.850,4.150,19.84,224,0.900,bicubic -mixer_b16_224_miil,82.304,17.696,95.720,4.280,59.88,224,0.875,bilinear -vit_relpos_medium_patch16_rpn_224,82.294,17.706,95.972,4.028,38.73,224,0.900,bicubic -convit_base,82.292,17.708,95.938,4.062,86.54,224,0.875,bicubic -resnetrs101,82.284,17.716,96.008,3.992,63.62,288,0.940,bicubic -tresnet_l_448,82.270,17.730,95.980,4.020,55.99,448,0.875,bilinear -efficientnet_b3,82.240,17.760,96.118,3.882,12.23,320,1.000,bicubic -vit_srelpos_medium_patch16_224,82.236,17.764,95.934,4.066,38.74,224,0.900,bicubic -cs3darknet_x,82.224,17.776,96.230,3.770,35.05,288,1.000,bicubic -convnext_tiny_hnf,82.220,17.780,95.866,4.134,28.59,224,0.950,bicubic -crossvit_base_240,82.216,17.784,95.832,4.168,105.03,240,0.875,bicubic -vit_base_patch16_rpn_224,82.200,17.800,95.996,4.004,86.54,224,0.900,bicubic -cait_xxs36_384,82.192,17.808,96.144,3.856,17.37,384,1.000,bicubic -swsl_resnext50_32x4d,82.176,17.824,96.232,3.768,25.03,224,0.875,bilinear -ecaresnet101d,82.170,17.830,96.048,3.952,44.57,224,0.875,bicubic -swin_s3_tiny_224,82.124,17.876,95.950,4.050,28.33,224,0.900,bicubic -visformer_small,82.108,17.892,95.876,4.124,40.22,224,0.900,bicubic -poolformer_m36,82.108,17.892,95.690,4.310,56.17,224,0.950,bicubic -halo2botnet50ts_256,82.068,17.932,95.642,4.358,22.64,256,0.950,bicubic -tresnet_xl,82.062,17.938,95.936,4.064,78.44,224,0.875,bilinear -convnext_tiny,82.062,17.938,95.854,4.146,28.59,224,0.875,bicubic -resnetv2_101,82.046,17.954,95.862,4.138,44.54,224,0.950,bicubic -fbnetv3_g,82.034,17.966,96.066,3.934,16.62,288,0.950,bilinear -pit_s_distilled_224,81.994,18.006,95.796,4.204,24.04,224,0.900,bicubic -deit_base_patch16_224,81.994,18.006,95.732,4.268,86.57,224,0.900,bicubic -resnetv2_50d_evos,81.978,18.022,95.912,4.088,25.59,288,0.950,bicubic -xcit_small_12_p16_224,81.972,18.028,95.812,4.188,26.25,224,1.000,bicubic -xception41p,81.968,18.032,95.794,4.206,26.91,299,0.940,bicubic -tf_efficientnetv2_b3,81.966,18.034,95.782,4.218,14.36,300,0.904,bicubic -mobilevitv2_175_in22ft1k,81.940,18.060,95.790,4.210,14.25,256,0.888,bicubic -resnet101,81.930,18.070,95.766,4.234,44.55,224,0.950,bicubic -xcit_tiny_24_p8_224,81.896,18.104,95.974,4.026,12.11,224,1.000,bicubic -vit_small_r26_s32_224,81.862,18.138,96.022,3.978,36.43,224,0.900,bicubic -ssl_resnext101_32x16d,81.856,18.144,96.096,3.904,194.03,224,0.875,bilinear -resnetv2_50d_gn,81.824,18.176,95.924,4.076,25.57,288,0.950,bicubic -tf_efficientnet_b3_ap,81.824,18.176,95.624,4.376,12.23,300,0.904,bicubic -swinv2_tiny_window8_256,81.810,18.190,95.994,4.006,28.35,256,0.900,bicubic -swinv2_cr_tiny_ns_224,81.786,18.214,95.822,4.178,28.33,224,0.900,bicubic -cs3sedarknet_l,81.776,18.224,95.970,4.030,21.91,288,0.950,bicubic -tresnet_m_448,81.706,18.294,95.572,4.428,31.39,448,0.875,bilinear -twins_svt_small,81.682,18.318,95.666,4.334,24.06,224,0.900,bicubic -halonet50ts,81.652,18.348,95.612,4.388,22.73,256,0.940,bicubic -tf_efficientnet_b3,81.638,18.362,95.718,4.282,12.23,300,0.904,bicubic -rexnet_200,81.628,18.372,95.668,4.332,16.37,224,0.875,bicubic -resnetaa50,81.618,18.382,95.810,4.190,25.56,288,1.000,bicubic -ssl_resnext101_32x8d,81.608,18.392,96.042,3.958,88.79,224,0.875,bilinear -edgenext_small,81.574,18.426,95.714,4.286,5.59,320,1.000,bicubic -lamhalobotnet50ts_256,81.552,18.448,95.504,4.496,22.57,256,0.950,bicubic -crossvit_15_240,81.544,18.456,95.690,4.310,27.53,240,0.875,bicubic -tf_efficientnet_lite4,81.534,18.466,95.666,4.334,13.01,380,0.920,bilinear -tnt_s_patch16_224,81.518,18.482,95.746,4.254,23.76,224,0.900,bicubic -levit_256,81.516,18.484,95.490,4.510,18.89,224,0.900,bicubic -vit_large_patch32_384,81.508,18.492,96.090,3.910,306.63,384,1.000,bicubic -tresnet_l,81.490,18.510,95.626,4.374,55.99,224,0.875,bilinear -convnext_nano,81.476,18.524,95.660,4.340,15.59,288,1.000,bicubic -mobilevitv2_150_in22ft1k,81.470,18.530,95.668,4.332,10.59,256,0.888,bicubic -wide_resnet50_2,81.456,18.544,95.530,4.470,68.88,224,0.875,bicubic -vit_relpos_small_patch16_224,81.454,18.546,95.828,4.172,21.98,224,0.900,bicubic -convit_small,81.428,18.572,95.742,4.258,27.78,224,0.875,bicubic -jx_nest_tiny,81.418,18.582,95.618,4.382,17.06,224,0.875,bicubic -poolformer_s36,81.418,18.582,95.448,4.552,30.86,224,0.900,bicubic -vit_small_patch16_224,81.396,18.604,96.138,3.862,22.05,224,0.900,bicubic -tf_efficientnet_b1_ns,81.386,18.614,95.736,4.264,7.79,240,0.882,bicubic -deit3_small_patch16_224,81.382,18.618,95.450,4.550,22.06,224,0.900,bicubic -swin_tiny_patch4_window7_224,81.376,18.624,95.542,4.458,28.29,224,0.900,bicubic -convmixer_1536_20,81.370,18.630,95.612,4.388,51.63,224,0.960,bicubic -gernet_l,81.350,18.650,95.536,4.464,31.08,256,0.875,bilinear -legacy_senet154,81.308,18.692,95.496,4.504,115.09,224,0.875,bilinear -efficientnet_el,81.306,18.694,95.534,4.466,10.59,300,0.904,bicubic -coat_mini,81.266,18.734,95.392,4.608,10.34,224,0.900,bicubic -seresnext50_32x4d,81.262,18.738,95.628,4.372,27.56,224,0.875,bicubic -gluon_senet154,81.230,18.770,95.346,4.654,115.09,224,0.875,bicubic -xcit_tiny_12_p8_224_dist,81.208,18.792,95.606,4.394,6.71,224,1.000,bicubic -deit_small_distilled_patch16_224,81.208,18.792,95.374,4.626,22.44,224,0.900,bicubic -swsl_resnet50,81.180,18.820,95.980,4.020,25.56,224,0.875,bilinear -resmlp_36_distilled_224,81.156,18.844,95.486,4.514,44.69,224,0.875,bicubic -sebotnet33ts_256,81.154,18.846,95.166,4.834,13.70,256,0.940,bicubic -lambda_resnet50ts,81.152,18.848,95.102,4.898,21.54,256,0.950,bicubic -mobilevitv2_200,81.140,18.860,95.368,4.632,18.45,256,0.888,bicubic -resnest50d_4s2x40d,81.108,18.892,95.562,4.438,30.42,224,0.875,bicubic -vit_srelpos_small_patch16_224,81.098,18.902,95.572,4.428,21.97,224,0.900,bicubic -pit_s_224,81.098,18.902,95.332,4.668,23.46,224,0.900,bicubic -resnext50_32x4d,81.096,18.904,95.326,4.674,25.03,224,0.950,bicubic -twins_pcpvt_small,81.090,18.910,95.642,4.358,24.11,224,0.900,bicubic -haloregnetz_b,81.044,18.956,95.198,4.802,11.68,224,0.940,bicubic -resmlp_big_24_224,81.030,18.970,95.020,4.980,129.14,224,0.875,bicubic -crossvit_small_240,81.016,18.984,95.456,4.544,26.86,240,0.875,bicubic -gluon_resnet152_v1s,81.014,18.986,95.414,4.586,60.32,224,0.875,bicubic -resnest50d_1s4x24d,80.984,19.016,95.324,4.676,25.68,224,0.875,bicubic -resnest50d,80.974,19.026,95.380,4.620,27.48,224,0.875,bilinear -sehalonet33ts,80.972,19.028,95.272,4.728,13.69,256,0.940,bicubic -cait_xxs24_384,80.962,19.038,95.644,4.356,12.03,384,1.000,bicubic -xcit_tiny_12_p16_384_dist,80.942,19.058,95.408,4.592,6.72,384,1.000,bicubic -gcresnet50t,80.934,19.066,95.454,4.546,25.90,256,0.900,bicubic -ssl_resnext101_32x4d,80.924,19.076,95.726,4.274,44.18,224,0.875,bilinear -gluon_seresnext101_32x4d,80.906,19.094,95.296,4.704,48.96,224,0.875,bicubic -cs3darknet_l,80.886,19.114,95.668,4.332,21.16,288,0.950,bicubic -gluon_seresnext101_64x4d,80.880,19.120,95.296,4.704,88.23,224,0.875,bicubic -cs3darknet_focus_l,80.874,19.126,95.692,4.308,21.15,288,0.950,bicubic -mobilevitv2_175,80.862,19.138,95.262,4.738,14.25,256,0.888,bicubic -efficientnet_b3_pruned,80.858,19.142,95.244,4.756,9.86,300,0.904,bicubic -ecaresnet101d_pruned,80.810,19.190,95.628,4.372,24.88,224,0.875,bicubic -regnety_320,80.804,19.196,95.244,4.756,145.05,224,0.875,bicubic -resmlp_24_distilled_224,80.764,19.236,95.222,4.778,30.02,224,0.875,bicubic -gernet_m,80.730,19.270,95.186,4.814,21.14,224,0.875,bilinear -vit_base_patch32_224,80.724,19.276,95.566,4.434,88.22,224,0.900,bicubic -regnetz_b16,80.712,19.288,95.474,4.526,9.72,288,0.940,bicubic -nf_resnet50,80.654,19.346,95.334,4.666,25.56,288,0.940,bicubic -efficientnet_b2,80.616,19.384,95.316,4.684,9.11,288,1.000,bicubic -gluon_resnext101_64x4d,80.604,19.396,94.992,5.008,83.46,224,0.875,bicubic -ecaresnet50d,80.598,19.402,95.318,4.682,25.58,224,0.875,bicubic -gcresnext50ts,80.578,19.422,95.170,4.830,15.67,256,0.900,bicubic -cspresnext50,80.544,19.456,95.324,4.676,20.57,256,0.887,bilinear -darknet53,80.538,19.462,95.420,4.580,41.61,288,1.000,bicubic -resnet50d,80.528,19.472,95.168,4.832,25.58,224,0.875,bicubic -darknetaa53,80.522,19.478,95.326,4.674,36.02,288,1.000,bilinear -repvgg_b3,80.496,19.504,95.264,4.736,123.09,224,0.875,bilinear -vit_small_patch32_384,80.490,19.510,95.600,4.400,22.92,384,1.000,bicubic -mixnet_xl,80.478,19.522,94.934,5.066,11.90,224,0.875,bicubic -gluon_resnet152_v1d,80.476,19.524,95.200,4.800,60.21,224,0.875,bicubic -inception_resnet_v2,80.460,19.540,95.306,4.694,55.84,299,0.897,bicubic -ecaresnetlight,80.456,19.544,95.246,4.754,30.16,224,0.875,bicubic -edgenext_small_rw,80.452,19.548,95.190,4.810,7.83,320,1.000,bicubic -xcit_tiny_24_p16_224_dist,80.448,19.552,95.212,4.788,12.12,224,1.000,bicubic -gluon_resnet101_v1d,80.418,19.582,95.014,4.986,44.57,224,0.875,bicubic -resnetv2_50,80.412,19.588,95.072,4.928,25.55,224,0.950,bicubic -regnety_120,80.376,19.624,95.122,4.878,51.82,224,0.875,bicubic +tf_efficientnet_b4.aa_in1k,83.022,16.978,96.300,3.700,19.34,380,0.922,bicubic +maxvit_rmlp_nano_rw_256,82.962,17.038,96.270,3.730,15.50,256,0.950,bicubic +mobilevitv2_175_384_in22ft1k,82.942,17.058,96.426,3.574,14.25,384,1.000,bicubic +maxvit_nano_rw_256,82.932,17.068,96.222,3.778,15.45,256,0.950,bicubic +xcit_large_24_p16_224,82.896,17.104,95.882,4.118,189.10,224,1.000,bicubic +resnest101e,82.890,17.110,96.320,3.680,48.28,256,0.875,bilinear +resnetv2_152x2_bit_teacher,82.862,17.138,96.568,3.432,236.34,224,0.875,bicubic +convnext_nano.in12k_ft_in1k,82.858,17.142,96.556,3.444,15.59,288,1.000,bicubic +resnet152,82.822,17.178,96.126,3.874,60.19,224,0.950,bicubic +resnetv2_50x1_bit_distilled,82.818,17.182,96.522,3.478,25.55,224,0.875,bicubic +swinv2_tiny_window16_256,82.810,17.190,96.232,3.768,28.35,256,0.900,bicubic +sequencer2d_m,82.806,17.194,96.268,3.732,38.31,224,0.875,bicubic +pnasnet5large,82.782,17.218,96.040,3.960,86.06,331,0.911,bicubic +vit_relpos_base_patch16_clsgap_224.sw_in1k,82.762,17.238,96.174,3.826,86.43,224,0.900,bicubic +nfnet_l0,82.750,17.250,96.516,3.484,35.07,288,1.000,bicubic +regnety_032,82.724,17.276,96.424,3.576,19.44,288,1.000,bicubic +twins_pcpvt_base,82.708,17.292,96.346,3.654,43.83,224,0.900,bicubic +cs3edgenet_x,82.702,17.298,96.370,3.630,47.82,288,1.000,bicubic +convnext_tiny.fb_in1k,82.700,17.300,96.136,3.864,28.59,288,1.000,bicubic +ig_resnext101_32x8d,82.688,17.312,96.636,3.364,88.79,224,0.875,bilinear +tf_efficientnetv2_b3.in21k_ft_in1k,82.672,17.328,96.624,3.376,14.36,300,0.900,bicubic +cs3sedarknet_x,82.654,17.346,96.354,3.646,35.40,288,1.000,bicubic +xcit_medium_24_p16_224,82.636,17.364,95.976,4.024,84.40,224,1.000,bicubic +regnetz_c16_evos,82.630,17.370,96.474,3.526,13.49,320,0.950,bicubic +nasnetalarge,82.620,17.380,96.046,3.954,88.75,331,0.911,bicubic +mobilevitv2_150_384_in22ft1k,82.594,17.406,96.318,3.682,10.59,384,1.000,bicubic +convnext_tiny_hnf.a2h_in1k,82.590,17.410,96.016,3.984,28.59,288,1.000,bicubic +levit_384,82.586,17.414,96.016,3.984,39.13,224,0.900,bicubic +vit_base_patch32_clip_224.laion2b_ft_in1k,82.582,17.418,96.202,3.798,88.22,224,0.900,bicubic +eca_nfnet_l0,82.580,17.420,96.490,3.510,24.14,288,1.000,bicubic +xcit_small_24_p16_224,82.580,17.420,96.004,3.996,47.67,224,1.000,bicubic +xcit_tiny_24_p16_384_dist,82.570,17.430,96.286,3.714,12.12,384,1.000,bicubic +xcit_tiny_24_p8_224_dist,82.562,17.438,96.170,3.830,12.11,224,1.000,bicubic +vit_relpos_medium_patch16_cls_224.sw_in1k,82.562,17.438,96.066,3.934,38.76,224,0.900,bicubic +efficientformer_l3,82.550,17.450,96.248,3.752,31.41,224,0.950,bicubic +flexivit_small.1200ep_in1k,82.526,17.474,96.136,3.864,22.06,240,0.950,bicubic +resnet61q,82.524,17.476,96.130,3.870,36.85,288,1.000,bicubic +regnetz_c16,82.518,17.482,96.360,3.640,13.46,320,0.940,bicubic +crossvit_18_dagger_240,82.518,17.482,96.072,3.928,44.27,240,0.875,bicubic +vit_relpos_base_patch16_224.sw_in1k,82.484,17.516,96.142,3.858,86.43,224,0.900,bicubic +vit_relpos_medium_patch16_224.sw_in1k,82.466,17.534,96.088,3.912,38.75,224,0.900,bicubic +gc_efficientnetv2_rw_t.agc_in1k,82.464,17.536,96.298,3.702,13.68,288,1.000,bicubic +poolformer_m48,82.462,17.538,95.958,4.042,73.47,224,0.950,bicubic +pit_b_224,82.446,17.554,95.710,4.290,73.76,224,0.900,bicubic +mvitv2_tiny,82.404,17.596,96.156,3.844,24.17,224,0.900,bicubic +crossvit_18_240,82.400,17.600,96.054,3.946,43.27,240,0.875,bicubic +coatnet_bn_0_rw_224,82.398,17.602,96.182,3.818,27.44,224,0.950,bicubic +coatnet_0_rw_224,82.390,17.610,95.836,4.164,27.44,224,0.950,bicubic +xcit_tiny_12_p8_384_dist,82.388,17.612,96.224,3.776,6.71,384,1.000,bicubic +tf_efficientnet_b2.ns_jft_in1k,82.380,17.620,96.248,3.752,9.11,260,0.890,bicubic +resnet51q,82.360,17.640,96.180,3.820,35.70,288,1.000,bilinear +flexivit_small.600ep_in1k,82.354,17.646,96.086,3.914,22.06,240,0.950,bicubic +efficientnetv2_rw_t.ra2_in1k,82.348,17.652,96.196,3.804,13.65,288,1.000,bicubic +ecaresnet50t,82.346,17.654,96.138,3.862,25.57,320,0.950,bicubic +sequencer2d_s,82.342,17.658,96.030,3.970,27.65,224,0.875,bicubic +resnetv2_101x1_bitm,82.332,17.668,96.518,3.482,44.54,448,1.000,bilinear +crossvit_15_dagger_240,82.332,17.668,95.956,4.044,28.21,240,0.875,bicubic +mobilevitv2_200_in22ft1k,82.324,17.676,95.940,4.060,18.45,256,0.888,bicubic +coat_lite_small,82.308,17.692,95.850,4.150,19.84,224,0.900,bicubic +mixer_b16_224_miil,82.308,17.692,95.716,4.284,59.88,224,0.875,bilinear +vit_relpos_medium_patch16_rpn_224.sw_in1k,82.298,17.702,95.974,4.026,38.73,224,0.900,bicubic +resnetrs101,82.288,17.712,96.008,3.992,63.62,288,0.940,bicubic +convit_base,82.288,17.712,95.938,4.062,86.54,224,0.875,bicubic +tresnet_l_448,82.268,17.732,95.976,4.024,55.99,448,0.875,bilinear +efficientnet_b3.ra2_in1k,82.242,17.758,96.114,3.886,12.23,320,1.000,bicubic +vit_srelpos_medium_patch16_224.sw_in1k,82.236,17.764,95.934,4.066,38.74,224,0.900,bicubic +cs3darknet_x,82.228,17.772,96.234,3.766,35.05,288,1.000,bicubic +crossvit_base_240,82.216,17.784,95.830,4.170,105.03,240,0.875,bicubic +vit_base_patch16_rpn_224.in1k,82.202,17.798,95.996,4.004,86.54,224,0.900,bicubic +pvt_v2_b2_li,82.196,17.804,96.104,3.896,22.55,224,0.900,bicubic +cait_xxs36_384,82.194,17.806,96.148,3.852,17.37,384,1.000,bicubic +swsl_resnext50_32x4d,82.182,17.818,96.230,3.770,25.03,224,0.875,bilinear +ecaresnet101d,82.172,17.828,96.046,3.954,44.57,224,0.875,bicubic +flexivit_small.300ep_in1k,82.172,17.828,96.024,3.976,22.06,240,0.950,bicubic +swin_s3_tiny_224,82.122,17.878,95.948,4.052,28.33,224,0.900,bicubic +poolformer_m36,82.110,17.890,95.688,4.312,56.17,224,0.950,bicubic +visformer_small,82.106,17.894,95.872,4.128,40.22,224,0.900,bicubic +pvt_v2_b2,82.076,17.924,95.962,4.038,25.36,224,0.900,bicubic +coatnet_rmlp_nano_rw_224,82.064,17.936,95.870,4.130,15.15,224,0.900,bicubic +halo2botnet50ts_256,82.060,17.940,95.636,4.364,22.64,256,0.950,bicubic +tresnet_xl,82.054,17.946,95.936,4.064,78.44,224,0.875,bilinear +fbnetv3_g.ra2_in1k,82.048,17.952,96.064,3.936,16.62,288,0.950,bilinear +resnetv2_101,82.030,17.970,95.860,4.140,44.54,224,0.950,bicubic +deit_base_patch16_224,81.998,18.002,95.734,4.266,86.57,224,0.900,bicubic +pit_s_distilled_224,81.996,18.004,95.798,4.202,24.04,224,0.900,bicubic +resnetv2_50d_evos,81.976,18.024,95.916,4.084,25.59,288,0.950,bicubic +xcit_small_12_p16_224,81.974,18.026,95.816,4.184,26.25,224,1.000,bicubic +tf_efficientnetv2_b3.in1k,81.970,18.030,95.782,4.218,14.36,300,0.904,bicubic +xception41p,81.958,18.042,95.794,4.206,26.91,299,0.940,bicubic +gcvit_xtiny,81.952,18.048,95.966,4.034,19.98,224,0.875,bicubic +coatnext_nano_rw_224,81.948,18.052,95.918,4.082,14.70,224,0.900,bicubic +mobilevitv2_175_in22ft1k,81.944,18.056,95.792,4.208,14.25,256,0.888,bicubic +resnet101,81.938,18.062,95.754,4.246,44.55,224,0.950,bicubic +vit_base_patch32_clip_224.openai_ft_in1k,81.930,18.070,95.968,4.032,88.22,224,0.900,bicubic +xcit_tiny_24_p8_224,81.900,18.100,95.976,4.024,12.11,224,1.000,bicubic +vit_small_r26_s32_224.augreg_in21k_ft_in1k,81.858,18.142,96.022,3.978,36.43,224,0.900,bicubic +ssl_resnext101_32x16d,81.844,18.156,96.096,3.904,194.03,224,0.875,bilinear +tf_efficientnet_b3.ap_in1k,81.822,18.178,95.624,4.376,12.23,300,0.904,bicubic +resnetv2_50d_gn,81.816,18.184,95.924,4.076,25.57,288,0.950,bicubic +swinv2_tiny_window8_256,81.806,18.194,95.994,4.006,28.35,256,0.900,bicubic +swinv2_cr_tiny_ns_224,81.790,18.210,95.824,4.176,28.33,224,0.900,bicubic +vit_base_patch16_224.orig_in21k_ft_in1k,81.786,18.214,96.122,3.878,86.57,224,0.900,bicubic +cs3sedarknet_l,81.774,18.226,95.968,4.032,21.91,288,0.950,bicubic +tresnet_m_448,81.714,18.286,95.572,4.428,31.39,448,0.875,bilinear +coatnet_nano_rw_224,81.700,18.300,95.638,4.362,15.14,224,0.900,bicubic +twins_svt_small,81.682,18.318,95.670,4.330,24.06,224,0.900,bicubic +halonet50ts,81.644,18.356,95.608,4.392,22.73,256,0.940,bicubic +tf_efficientnet_b3.aa_in1k,81.636,18.364,95.718,4.282,12.23,300,0.904,bicubic +rexnet_200,81.632,18.368,95.668,4.332,16.37,224,0.875,bicubic +resnetaa50,81.622,18.378,95.808,4.192,25.56,288,1.000,bicubic +ssl_resnext101_32x8d,81.616,18.384,96.038,3.962,88.79,224,0.875,bilinear +convnext_nano_ols.d1h_in1k,81.610,18.390,95.640,4.360,15.65,288,1.000,bicubic +edgenext_small,81.568,18.432,95.706,4.294,5.59,320,1.000,bicubic +lamhalobotnet50ts_256,81.544,18.456,95.504,4.496,22.57,256,0.950,bicubic +crossvit_15_240,81.536,18.464,95.692,4.308,27.53,240,0.875,bicubic +tf_efficientnet_lite4.in1k,81.536,18.464,95.668,4.332,13.01,380,0.920,bilinear +tnt_s_patch16_224,81.518,18.482,95.748,4.252,23.76,224,0.900,bicubic +levit_256,81.510,18.490,95.490,4.510,18.89,224,0.900,bicubic +vit_large_patch32_384.orig_in21k_ft_in1k,81.506,18.494,96.092,3.908,306.63,384,1.000,bicubic +tresnet_l,81.488,18.512,95.624,4.376,55.99,224,0.875,bilinear +mobilevitv2_150_in22ft1k,81.478,18.522,95.674,4.326,10.59,256,0.888,bicubic +convnext_nano.d1h_in1k,81.470,18.530,95.658,4.342,15.59,288,1.000,bicubic +vit_relpos_small_patch16_224.sw_in1k,81.462,18.538,95.828,4.172,21.98,224,0.900,bicubic +wide_resnet50_2,81.456,18.544,95.532,4.468,68.88,224,0.875,bicubic +convit_small,81.426,18.574,95.744,4.256,27.78,224,0.875,bicubic +poolformer_s36,81.416,18.584,95.446,4.554,30.86,224,0.900,bicubic +jx_nest_tiny,81.414,18.586,95.616,4.384,17.06,224,0.875,bicubic +vit_small_patch16_224.augreg_in21k_ft_in1k,81.402,18.598,96.134,3.866,22.05,224,0.900,bicubic +tf_efficientnet_b1.ns_jft_in1k,81.388,18.612,95.738,4.262,7.79,240,0.882,bicubic +deit3_small_patch16_224,81.386,18.614,95.450,4.550,22.06,224,0.900,bicubic +swin_tiny_patch4_window7_224,81.378,18.622,95.540,4.460,28.29,224,0.900,bicubic +convmixer_1536_20,81.376,18.624,95.614,4.386,51.63,224,0.960,bicubic +gernet_l,81.354,18.646,95.536,4.464,31.08,256,0.875,bilinear +efficientnet_el.ra_in1k,81.316,18.684,95.526,4.474,10.59,300,0.904,bicubic +legacy_senet154,81.310,18.690,95.496,4.504,115.09,224,0.875,bilinear +coat_mini,81.268,18.732,95.392,4.608,10.34,224,0.900,bicubic +seresnext50_32x4d,81.266,18.734,95.620,4.380,27.56,224,0.875,bicubic +gluon_senet154,81.234,18.766,95.348,4.652,115.09,224,0.875,bicubic +xcit_tiny_12_p8_224_dist,81.212,18.788,95.600,4.400,6.71,224,1.000,bicubic +deit_small_distilled_patch16_224,81.200,18.800,95.378,4.622,22.44,224,0.900,bicubic +swsl_resnet50,81.166,18.834,95.972,4.028,25.56,224,0.875,bilinear +lambda_resnet50ts,81.166,18.834,95.096,4.904,21.54,256,0.950,bicubic +resmlp_36_distilled_224,81.160,18.840,95.488,4.512,44.69,224,0.875,bicubic +sebotnet33ts_256,81.150,18.850,95.174,4.826,13.70,256,0.940,bicubic +mobilevitv2_200,81.136,18.864,95.366,4.634,18.45,256,0.888,bicubic +vit_small_patch16_384.augreg_in1k,81.120,18.880,95.574,4.426,22.20,384,1.000,bicubic +resnext50_32x4d,81.118,18.882,95.332,4.668,25.03,224,0.950,bicubic +resnest50d_4s2x40d,81.108,18.892,95.558,4.442,30.42,224,0.875,bicubic +vit_base_patch16_384.augreg_in1k,81.102,18.898,95.332,4.668,86.86,384,1.000,bicubic +vit_srelpos_small_patch16_224.sw_in1k,81.094,18.906,95.570,4.430,21.97,224,0.900,bicubic +pit_s_224,81.094,18.906,95.332,4.668,23.46,224,0.900,bicubic +twins_pcpvt_small,81.088,18.912,95.642,4.358,24.11,224,0.900,bicubic +haloregnetz_b,81.050,18.950,95.196,4.804,11.68,224,0.940,bicubic +resmlp_big_24_224,81.028,18.972,95.022,4.978,129.14,224,0.875,bicubic +crossvit_small_240,81.020,18.980,95.460,4.540,26.86,240,0.875,bicubic +gluon_resnet152_v1s,81.016,18.984,95.412,4.588,60.32,224,0.875,bicubic +resnest50d_1s4x24d,80.988,19.012,95.322,4.678,25.68,224,0.875,bicubic +resnest50d,80.974,19.026,95.378,4.622,27.48,224,0.875,bilinear +cait_xxs24_384,80.966,19.034,95.646,4.354,12.03,384,1.000,bicubic +sehalonet33ts,80.958,19.042,95.276,4.724,13.69,256,0.940,bicubic +gcresnet50t,80.940,19.060,95.454,4.546,25.90,256,0.900,bicubic +xcit_tiny_12_p16_384_dist,80.940,19.060,95.410,4.590,6.72,384,1.000,bicubic +ssl_resnext101_32x4d,80.924,19.076,95.728,4.272,44.18,224,0.875,bilinear +gluon_seresnext101_32x4d,80.904,19.096,95.294,4.706,48.96,224,0.875,bicubic +cs3darknet_l,80.896,19.104,95.670,4.330,21.16,288,0.950,bicubic +gluon_seresnext101_64x4d,80.894,19.106,95.308,4.692,88.23,224,0.875,bicubic +cs3darknet_focus_l,80.884,19.116,95.682,4.318,21.15,288,0.950,bicubic +mobilevitv2_175,80.860,19.140,95.254,4.746,14.25,256,0.888,bicubic +efficientnet_b3_pruned.in1k,80.858,19.142,95.242,4.758,9.86,300,0.904,bicubic +ecaresnet101d_pruned,80.818,19.182,95.628,4.372,24.88,224,0.875,bicubic +regnety_320,80.810,19.190,95.244,4.756,145.05,224,0.875,bicubic +resmlp_24_distilled_224,80.766,19.234,95.218,4.782,30.02,224,0.875,bicubic +gernet_m,80.732,19.268,95.184,4.816,21.14,224,0.875,bilinear +vit_base_patch32_224.augreg_in21k_ft_in1k,80.724,19.276,95.568,4.432,88.22,224,0.900,bicubic +regnetz_b16,80.716,19.284,95.478,4.522,9.72,288,0.940,bicubic +nf_resnet50,80.662,19.338,95.336,4.664,25.56,288,0.940,bicubic +efficientnet_b2.ra_in1k,80.612,19.388,95.318,4.682,9.11,288,1.000,bicubic +gluon_resnext101_64x4d,80.604,19.396,94.988,5.012,83.46,224,0.875,bicubic +ecaresnet50d,80.592,19.408,95.320,4.680,25.58,224,0.875,bicubic +gcresnext50ts,80.580,19.420,95.170,4.830,15.67,256,0.900,bicubic +cspresnext50,80.546,19.454,95.320,4.680,20.57,256,0.887,bilinear +darknet53,80.534,19.466,95.420,4.580,41.61,288,1.000,bicubic +resnet50d,80.530,19.470,95.160,4.840,25.58,224,0.875,bicubic +darknetaa53,80.522,19.478,95.322,4.678,36.02,288,1.000,bilinear +maxvit_rmlp_pico_rw_256,80.516,19.484,95.212,4.788,7.52,256,0.950,bicubic +efficientformer_l1,80.502,19.498,94.998,5.002,12.29,224,0.950,bicubic +repvgg_b3,80.492,19.508,95.260,4.740,123.09,224,0.875,bilinear +vit_small_patch32_384.augreg_in21k_ft_in1k,80.480,19.520,95.598,4.402,22.92,384,1.000,bicubic +mixnet_xl.ra_in1k,80.476,19.524,94.936,5.064,11.90,224,0.875,bicubic +gluon_resnet152_v1d,80.474,19.526,95.206,4.794,60.21,224,0.875,bicubic +convnext_pico_ols.d1_in1k,80.464,19.536,95.242,4.758,9.06,288,1.000,bicubic +ecaresnetlight,80.462,19.538,95.248,4.752,30.16,224,0.875,bicubic +inception_resnet_v2,80.458,19.542,95.306,4.694,55.84,299,0.897,bicubic +edgenext_small_rw,80.456,19.544,95.192,4.808,7.83,320,1.000,bicubic +xcit_tiny_24_p16_224_dist,80.446,19.554,95.218,4.782,12.12,224,1.000,bicubic +resnetv2_50,80.432,19.568,95.080,4.920,25.55,224,0.950,bicubic +convnext_pico.d1_in1k,80.426,19.574,95.058,4.942,9.05,288,0.950,bicubic +gluon_resnet101_v1d,80.414,19.586,95.014,4.986,44.57,224,0.875,bicubic +mobilevitv2_150,80.376,19.624,95.060,4.940,10.59,256,0.888,bicubic resnet50,80.374,19.626,94.614,5.386,25.56,224,0.950,bicubic -mobilevitv2_150,80.368,19.632,95.064,4.936,10.59,256,0.888,bicubic -seresnet33ts,80.354,19.646,95.106,4.894,19.78,256,0.900,bicubic -resnetv2_50x1_bitm,80.342,19.658,95.686,4.314,25.55,448,1.000,bilinear -gluon_resnext101_32x4d,80.340,19.660,94.926,5.074,44.18,224,0.875,bicubic -ssl_resnext50_32x4d,80.326,19.674,95.412,4.588,25.03,224,0.875,bilinear -poolformer_s24,80.316,19.684,95.042,4.958,21.39,224,0.900,bicubic -rexnet_150,80.314,19.686,95.166,4.834,9.73,224,0.875,bicubic -tf_efficientnet_b2_ap,80.302,19.698,95.028,4.972,9.11,260,0.890,bicubic -efficientnet_el_pruned,80.298,19.702,95.214,4.786,10.59,300,0.904,bicubic -gluon_resnet101_v1s,80.298,19.702,95.162,4.838,44.67,224,0.875,bicubic -seresnet50,80.266,19.734,95.070,4.930,28.09,224,0.875,bicubic -tf_efficientnet_el,80.254,19.746,95.128,4.872,10.59,300,0.904,bicubic -regnetx_320,80.244,19.756,95.020,4.980,107.81,224,0.875,bicubic -vit_base_patch16_224_sam,80.244,19.756,94.754,5.246,86.57,224,0.900,bicubic -legacy_seresnext101_32x4d,80.222,19.778,95.014,4.986,48.96,224,0.875,bilinear -repvgg_b3g4,80.216,19.784,95.108,4.892,83.83,224,0.875,bilinear -tf_efficientnetv2_b2,80.208,19.792,95.044,4.956,10.10,260,0.890,bicubic -inception_v4,80.168,19.832,94.964,5.036,42.68,299,0.875,bicubic -dpn107,80.168,19.832,94.906,5.094,86.92,224,0.875,bicubic +regnety_120,80.366,19.634,95.126,4.874,51.82,224,0.875,bicubic +seresnet33ts,80.352,19.648,95.106,4.894,19.78,256,0.900,bicubic +resnetv2_50x1_bitm,80.342,19.658,95.684,4.316,25.55,448,1.000,bilinear +gluon_resnext101_32x4d,80.334,19.666,94.926,5.074,44.18,224,0.875,bicubic +ssl_resnext50_32x4d,80.318,19.682,95.406,4.594,25.03,224,0.875,bilinear +poolformer_s24,80.316,19.684,95.038,4.962,21.39,224,0.900,bicubic +rexnet_150,80.310,19.690,95.166,4.834,9.73,224,0.875,bicubic +gluon_resnet101_v1s,80.302,19.698,95.160,4.840,44.67,224,0.875,bicubic +efficientnet_el_pruned.in1k,80.300,19.700,95.218,4.782,10.59,300,0.904,bicubic +tf_efficientnet_b2.ap_in1k,80.300,19.700,95.028,4.972,9.11,260,0.890,bicubic +seresnet50,80.274,19.726,95.070,4.930,28.09,224,0.875,bicubic +tf_efficientnet_el.in1k,80.250,19.750,95.128,4.872,10.59,300,0.904,bicubic +regnetx_320,80.246,19.754,95.026,4.974,107.81,224,0.875,bicubic +vit_base_patch16_224.sam,80.242,19.758,94.756,5.244,86.57,224,0.900,bicubic +legacy_seresnext101_32x4d,80.228,19.772,95.018,4.982,48.96,224,0.875,bilinear +repvgg_b3g4,80.212,19.788,95.110,4.890,83.83,224,0.875,bilinear +tf_efficientnetv2_b2.in1k,80.208,19.792,95.042,4.958,10.10,260,0.890,bicubic +inception_v4,80.168,19.832,94.968,5.032,42.68,299,0.875,bicubic convmixer_768_32,80.164,19.836,95.072,4.928,21.11,224,0.960,bicubic -skresnext50_32x4d,80.154,19.846,94.646,5.354,27.48,224,0.875,bicubic -tf_efficientnet_b2,80.088,19.912,94.908,5.092,9.11,260,0.890,bicubic -eca_resnet33ts,80.080,19.920,94.972,5.028,19.68,256,0.900,bicubic -gcresnet33ts,80.076,19.924,94.994,5.006,19.88,256,0.900,bicubic -resnet50_gn,80.060,19.940,94.948,5.052,25.56,224,0.940,bicubic -cspdarknet53,80.056,19.944,95.086,4.914,27.64,256,0.887,bilinear -dpn92,80.020,19.980,94.830,5.170,37.67,224,0.875,bicubic -ens_adv_inception_resnet_v2,79.974,20.026,94.942,5.058,55.84,299,0.897,bicubic -efficientnet_b2_pruned,79.918,20.082,94.850,5.150,8.31,260,0.890,bicubic -gluon_resnet152_v1c,79.912,20.088,94.842,5.158,60.21,224,0.875,bicubic -gluon_seresnext50_32x4d,79.912,20.088,94.832,5.168,27.56,224,0.875,bicubic -resnetrs50,79.886,20.114,94.970,5.030,35.69,224,0.910,bicubic -xception71,79.870,20.130,94.924,5.076,42.34,299,0.903,bicubic -deit_small_patch16_224,79.864,20.136,95.048,4.952,22.05,224,0.900,bicubic -regnetx_160,79.854,20.146,94.830,5.170,54.28,224,0.875,bicubic -ecaresnet26t,79.852,20.148,95.084,4.916,16.01,320,0.950,bicubic -levit_192,79.836,20.164,94.790,5.210,10.95,224,0.900,bicubic -dpn131,79.826,20.174,94.708,5.292,79.25,224,0.875,bicubic -tf_efficientnet_lite3,79.818,20.182,94.914,5.086,8.20,300,0.904,bilinear +dpn107,80.156,19.844,94.910,5.090,86.92,224,0.875,bicubic +skresnext50_32x4d,80.156,19.844,94.642,5.358,27.48,224,0.875,bicubic +tf_efficientnet_b2.aa_in1k,80.086,19.914,94.908,5.092,9.11,260,0.890,bicubic +gcresnet33ts,80.082,19.918,94.998,5.002,19.88,256,0.900,bicubic +eca_resnet33ts,80.078,19.922,94.970,5.030,19.68,256,0.900,bicubic +cspdarknet53,80.058,19.942,95.084,4.916,27.64,256,0.887,bilinear +resnet50_gn,80.052,19.948,94.946,5.054,25.56,224,0.940,bicubic +dpn92,80.008,19.992,94.836,5.164,37.67,224,0.875,bicubic +ens_adv_inception_resnet_v2,79.982,20.018,94.938,5.062,55.84,299,0.897,bicubic +gluon_seresnext50_32x4d,79.918,20.082,94.822,5.178,27.56,224,0.875,bicubic +efficientnet_b2_pruned.in1k,79.916,20.084,94.856,5.144,8.31,260,0.890,bicubic +gluon_resnet152_v1c,79.910,20.090,94.840,5.160,60.21,224,0.875,bicubic +resnetrs50,79.892,20.108,94.968,5.032,35.69,224,0.910,bicubic +xception71,79.874,20.126,94.922,5.078,42.34,299,0.903,bicubic +deit_small_patch16_224,79.856,20.144,95.052,4.948,22.05,224,0.900,bicubic +regnetx_160,79.856,20.144,94.830,5.170,54.28,224,0.875,bicubic +ecaresnet26t,79.854,20.146,95.084,4.916,16.01,320,0.950,bicubic +levit_192,79.842,20.158,94.786,5.214,10.95,224,0.900,bicubic +dpn131,79.822,20.178,94.710,5.290,79.25,224,0.875,bicubic +tf_efficientnet_lite3.in1k,79.820,20.180,94.914,5.086,8.20,300,0.904,bilinear resmlp_36_224,79.770,20.230,94.886,5.114,44.69,224,0.875,bicubic -cait_xxs36_224,79.748,20.252,94.868,5.132,17.30,224,1.000,bicubic -gluon_xception65,79.722,20.278,94.860,5.140,39.92,299,0.903,bicubic -ecaresnet50d_pruned,79.718,20.282,94.876,5.124,19.94,224,0.875,bicubic -xcit_tiny_12_p8_224,79.694,20.306,95.048,4.952,6.71,224,1.000,bicubic -mobilevitv2_125,79.682,20.318,94.848,5.152,7.48,256,0.888,bicubic -gluon_resnet152_v1b,79.682,20.318,94.736,5.264,60.19,224,0.875,bicubic -fbnetv3_d,79.680,20.320,94.940,5.060,10.31,256,0.950,bilinear +cait_xxs36_224,79.750,20.250,94.866,5.134,17.30,224,1.000,bicubic +ecaresnet50d_pruned,79.716,20.284,94.880,5.120,19.94,224,0.875,bicubic +gluon_xception65,79.716,20.284,94.860,5.140,39.92,299,0.903,bicubic +gcvit_xxtiny,79.714,20.286,95.080,4.920,12.00,224,0.875,bicubic +xcit_tiny_12_p8_224,79.694,20.306,95.052,4.948,6.71,224,1.000,bicubic +gluon_resnet152_v1b,79.686,20.314,94.736,5.264,60.19,224,0.875,bicubic +mobilevitv2_125,79.684,20.316,94.850,5.150,7.48,256,0.888,bicubic +fbnetv3_d.ra2_in1k,79.680,20.320,94.944,5.056,10.31,256,0.950,bilinear resnext50d_32x4d,79.676,20.324,94.866,5.134,25.05,224,0.875,bicubic -dpn98,79.644,20.356,94.600,5.400,61.57,224,0.875,bicubic -gmlp_s16_224,79.640,20.360,94.624,5.376,19.42,224,0.875,bicubic -regnetx_120,79.592,20.408,94.734,5.266,46.11,224,0.875,bicubic -cspresnet50,79.582,20.418,94.708,5.292,21.62,256,0.887,bilinear -gluon_resnet101_v1c,79.536,20.464,94.578,5.422,44.57,224,0.875,bicubic -rexnet_130,79.502,20.498,94.682,5.318,7.56,224,0.875,bicubic -eca_halonext26ts,79.488,20.512,94.604,5.396,10.76,256,0.940,bicubic -vit_relpos_base_patch32_plus_rpn_256,79.486,20.514,94.140,5.860,119.42,256,0.900,bicubic -hrnet_w64,79.470,20.530,94.654,5.346,128.06,224,0.875,bilinear -tf_efficientnetv2_b1,79.466,20.534,94.722,5.278,8.14,240,0.882,bicubic -xcit_tiny_24_p16_224,79.444,20.556,94.888,5.112,12.12,224,1.000,bicubic -dla102x2,79.442,20.558,94.646,5.354,41.28,224,0.875,bilinear -resmlp_24_224,79.378,20.622,94.546,5.454,30.02,224,0.875,bicubic +gmlp_s16_224,79.642,20.358,94.628,5.372,19.42,224,0.875,bicubic +dpn98,79.642,20.358,94.598,5.402,61.57,224,0.875,bicubic +regnetx_120,79.596,20.404,94.738,5.262,46.11,224,0.875,bicubic +cspresnet50,79.574,20.426,94.712,5.288,21.62,256,0.887,bilinear +gluon_resnet101_v1c,79.534,20.466,94.578,5.422,44.57,224,0.875,bicubic +rexnet_130,79.500,20.500,94.682,5.318,7.56,224,0.875,bicubic +eca_halonext26ts,79.486,20.514,94.598,5.402,10.76,256,0.940,bicubic +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,79.480,20.520,94.138,5.862,119.42,256,0.900,bicubic +hrnet_w64,79.474,20.526,94.652,5.348,128.06,224,0.875,bilinear +tf_efficientnetv2_b1.in1k,79.462,20.538,94.722,5.278,8.14,240,0.882,bicubic +dla102x2,79.448,20.552,94.640,5.360,41.28,224,0.875,bilinear +xcit_tiny_24_p16_224,79.444,20.556,94.882,5.118,12.12,224,1.000,bicubic +resmlp_24_224,79.374,20.626,94.546,5.454,30.02,224,0.875,bicubic repvgg_b2g4,79.366,20.634,94.688,5.312,61.76,224,0.875,bilinear -gluon_resnext50_32x4d,79.360,20.640,94.426,5.574,25.03,224,0.875,bicubic -resnext101_32x8d,79.316,20.684,94.518,5.482,88.79,224,0.875,bilinear -tf_efficientnet_cc_b1_8e,79.314,20.686,94.370,5.630,39.72,240,0.882,bicubic -ese_vovnet39b,79.312,20.688,94.714,5.286,24.57,224,0.875,bicubic -pit_xs_distilled_224,79.308,20.692,94.366,5.634,11.00,224,0.900,bicubic -gluon_resnet101_v1b,79.304,20.696,94.520,5.480,44.55,224,0.875,bicubic -nf_regnet_b1,79.300,20.700,94.754,5.246,10.22,288,0.900,bicubic -hrnet_w48,79.300,20.700,94.514,5.486,77.47,224,0.875,bilinear -resnetblur50,79.294,20.706,94.634,5.366,25.56,224,0.875,bicubic -eca_botnext26ts_256,79.276,20.724,94.616,5.384,10.59,256,0.950,bicubic -tf_efficientnet_b1_ap,79.274,20.726,94.308,5.692,7.79,240,0.882,bicubic -botnet26t_256,79.258,20.742,94.528,5.472,12.49,256,0.950,bicubic -efficientnet_em,79.252,20.748,94.792,5.208,6.90,240,0.882,bicubic -ssl_resnet50,79.224,20.776,94.830,5.170,25.56,224,0.875,bilinear +gluon_resnext50_32x4d,79.354,20.646,94.426,5.574,25.03,224,0.875,bicubic +ese_vovnet39b,79.320,20.680,94.712,5.288,24.57,224,0.875,bicubic +resnext101_32x8d,79.308,20.692,94.518,5.482,88.79,224,0.875,bilinear +tf_efficientnet_cc_b1_8e.in1k,79.308,20.692,94.370,5.630,39.72,240,0.882,bicubic +gluon_resnet101_v1b,79.306,20.694,94.524,5.476,44.55,224,0.875,bicubic +pit_xs_distilled_224,79.306,20.694,94.364,5.636,11.00,224,0.900,bicubic +hrnet_w48,79.300,20.700,94.512,5.488,77.47,224,0.875,bilinear +nf_regnet_b1,79.292,20.708,94.748,5.252,10.22,288,0.900,bicubic +resnetblur50,79.286,20.714,94.638,5.362,25.56,224,0.875,bicubic +tf_efficientnet_b1.ap_in1k,79.280,20.720,94.306,5.694,7.79,240,0.882,bicubic +eca_botnext26ts_256,79.274,20.726,94.614,5.386,10.59,256,0.950,bicubic +botnet26t_256,79.272,20.728,94.528,5.472,12.49,256,0.950,bicubic +efficientnet_em.ra2_in1k,79.252,20.748,94.794,5.206,6.90,240,0.882,bicubic +ssl_resnet50,79.222,20.778,94.832,5.168,25.56,224,0.875,bilinear dpn68b,79.216,20.784,94.414,5.586,12.61,224,0.875,bicubic -resnet33ts,79.208,20.792,94.574,5.426,19.68,256,0.900,bicubic -regnetx_080,79.202,20.798,94.552,5.448,39.57,224,0.875,bicubic -res2net101_26w_4s,79.196,20.804,94.436,5.564,45.21,224,0.875,bilinear -fbnetv3_b,79.142,20.858,94.750,5.250,8.60,256,0.950,bilinear -halonet26t,79.112,20.888,94.314,5.686,12.48,256,0.950,bicubic -lambda_resnet26t,79.098,20.902,94.590,5.410,10.96,256,0.940,bicubic -coat_lite_mini,79.088,20.912,94.608,5.392,11.01,224,0.900,bicubic -legacy_seresnext50_32x4d,79.076,20.924,94.434,5.566,27.56,224,0.875,bilinear -regnetx_064,79.074,20.926,94.460,5.540,26.21,224,0.875,bicubic -gluon_resnet50_v1d,79.070,20.930,94.466,5.534,25.58,224,0.875,bicubic -xception,79.044,20.956,94.394,5.606,22.86,299,0.897,bicubic -resnet32ts,79.014,20.986,94.356,5.644,17.96,256,0.900,bicubic -mixnet_l,78.976,21.024,94.178,5.822,7.33,224,0.875,bicubic -lambda_resnet26rpt_256,78.964,21.036,94.426,5.574,10.99,256,0.940,bicubic -res2net50_26w_8s,78.952,21.048,94.306,5.694,48.40,224,0.875,bilinear -hrnet_w40,78.922,21.078,94.470,5.530,57.56,224,0.875,bilinear -hrnet_w44,78.896,21.104,94.370,5.630,67.06,224,0.875,bilinear -wide_resnet101_2,78.852,21.148,94.288,5.712,126.89,224,0.875,bilinear -tf_efficientnet_b1,78.828,21.172,94.198,5.802,7.79,240,0.882,bicubic +resnet33ts,79.214,20.786,94.574,5.426,19.68,256,0.900,bicubic +res2net50_26w_8s,79.200,20.800,94.368,5.632,48.40,224,0.875,bilinear +res2net101_26w_4s,79.198,20.802,94.432,5.568,45.21,224,0.875,bilinear +regnetx_080,79.194,20.806,94.560,5.440,39.57,224,0.875,bicubic +vit_base_patch16_224.augreg_in1k,79.154,20.846,94.100,5.900,86.57,224,0.900,bicubic +fbnetv3_b.ra2_in1k,79.150,20.850,94.746,5.254,8.60,256,0.950,bilinear +halonet26t,79.100,20.900,94.312,5.688,12.48,256,0.950,bicubic +lambda_resnet26t,79.096,20.904,94.592,5.408,10.96,256,0.940,bicubic +coat_lite_mini,79.088,20.912,94.604,5.396,11.01,224,0.900,bicubic +legacy_seresnext50_32x4d,79.078,20.922,94.436,5.564,27.56,224,0.875,bilinear +gluon_resnet50_v1d,79.074,20.926,94.470,5.530,25.58,224,0.875,bicubic +regnetx_064,79.072,20.928,94.458,5.542,26.21,224,0.875,bicubic +xception,79.052,20.948,94.392,5.608,22.86,299,0.897,bicubic +resnet32ts,79.004,20.996,94.356,5.644,17.96,256,0.900,bicubic +mixnet_l.ft_in1k,78.976,21.024,94.182,5.818,7.33,224,0.875,bicubic +lambda_resnet26rpt_256,78.970,21.030,94.430,5.570,10.99,256,0.940,bicubic +convnext_femto_ols.d1_in1k,78.934,21.066,94.532,5.468,5.23,288,0.950,bicubic +hrnet_w40,78.920,21.080,94.470,5.530,57.56,224,0.875,bilinear +convnext_tiny.fb_in22k_ft_in1k,78.908,21.092,94.674,5.326,28.59,288,1.000,bicubic +hrnet_w44,78.896,21.104,94.368,5.632,67.06,224,0.875,bilinear +wide_resnet101_2,78.856,21.144,94.282,5.718,126.89,224,0.875,bilinear +vit_small_patch16_224.augreg_in1k,78.846,21.154,94.284,5.716,22.05,224,0.900,bicubic +tf_efficientnet_b1.aa_in1k,78.826,21.174,94.198,5.802,7.79,240,0.882,bicubic gluon_inception_v3,78.806,21.194,94.370,5.630,23.83,299,0.875,bicubic -repvgg_b2,78.794,21.206,94.418,5.582,89.02,224,0.875,bilinear -efficientnet_b1,78.788,21.212,94.346,5.654,7.79,256,1.000,bicubic -tf_mixnet_l,78.778,21.222,93.998,6.002,7.33,224,0.875,bicubic -gluon_resnet50_v1s,78.706,21.294,94.238,5.762,25.68,224,0.875,bicubic -dla169,78.682,21.318,94.336,5.664,53.39,224,0.875,bilinear -tf_efficientnet_b0_ns,78.664,21.336,94.376,5.624,5.29,224,0.875,bicubic -legacy_seresnet152,78.652,21.348,94.370,5.630,66.82,224,0.875,bilinear -xcit_tiny_12_p16_224_dist,78.578,21.422,94.198,5.802,6.72,224,1.000,bicubic +efficientnet_b1.ft_in1k,78.794,21.206,94.342,5.658,7.79,256,1.000,bicubic +repvgg_b2,78.792,21.208,94.414,5.586,89.02,224,0.875,bilinear +tf_mixnet_l.in1k,78.774,21.226,93.998,6.002,7.33,224,0.875,bicubic +vit_base_patch32_384.augreg_in1k,78.760,21.240,94.228,5.772,88.30,384,1.000,bicubic +gluon_resnet50_v1s,78.712,21.288,94.238,5.762,25.68,224,0.875,bicubic +convnext_femto.d1_in1k,78.704,21.296,94.434,5.566,5.22,288,0.950,bicubic +pvt_v2_b1,78.694,21.306,94.492,5.508,14.01,224,0.900,bicubic +dla169,78.688,21.312,94.336,5.664,53.39,224,0.875,bilinear +legacy_seresnet152,78.660,21.340,94.370,5.630,66.82,224,0.875,bilinear +tf_efficientnet_b0.ns_jft_in1k,78.658,21.342,94.376,5.624,5.29,224,0.875,bicubic +xcit_tiny_12_p16_224_dist,78.578,21.422,94.196,5.804,6.72,224,1.000,bicubic res2net50_26w_6s,78.570,21.430,94.124,5.876,37.05,224,0.875,bilinear -xception41,78.516,21.484,94.280,5.720,26.97,299,0.903,bicubic -dla102x,78.512,21.488,94.228,5.772,26.31,224,0.875,bilinear -regnetx_040,78.488,21.512,94.238,5.762,22.12,224,0.875,bicubic -resnest26d,78.484,21.516,94.294,5.706,17.07,224,0.875,bilinear -levit_128,78.482,21.518,94.012,5.988,9.21,224,0.900,bicubic -dla60_res2net,78.458,21.542,94.196,5.804,20.85,224,0.875,bilinear -dla60_res2next,78.456,21.544,94.146,5.854,17.03,224,0.875,bilinear -hrnet_w32,78.452,21.548,94.188,5.812,41.23,224,0.875,bilinear -coat_tiny,78.436,21.564,94.038,5.962,5.50,224,0.900,bicubic -vit_tiny_patch16_384,78.430,21.570,94.544,5.456,5.79,384,1.000,bicubic -selecsls60b,78.404,21.596,94.172,5.828,32.77,224,0.875,bicubic -cait_xxs24_224,78.386,21.614,94.308,5.692,11.96,224,1.000,bicubic -legacy_seresnet101,78.380,21.620,94.262,5.738,49.33,224,0.875,bilinear -repvgg_b1,78.368,21.632,94.094,5.906,57.42,224,0.875,bilinear -tf_efficientnetv2_b0,78.352,21.648,94.026,5.974,7.14,224,0.875,bicubic -tv_resnet152,78.320,21.680,94.034,5.966,60.19,224,0.875,bilinear -mobilevit_s,78.310,21.690,94.152,5.848,5.58,256,0.900,bicubic -res2next50,78.258,21.742,93.888,6.112,24.67,224,0.875,bilinear -bat_resnext26ts,78.248,21.752,94.096,5.904,10.73,256,0.900,bicubic -efficientnet_b1_pruned,78.244,21.756,93.834,6.166,6.33,240,0.882,bicubic -dla60x,78.228,21.772,94.024,5.976,17.35,224,0.875,bilinear -hrnet_w30,78.198,21.802,94.224,5.776,37.71,224,0.875,bilinear -pit_xs_224,78.190,21.810,94.166,5.834,10.62,224,0.900,bicubic -regnetx_032,78.184,21.816,94.088,5.912,15.30,224,0.875,bicubic -res2net50_14w_8s,78.144,21.856,93.852,6.148,25.06,224,0.875,bilinear -tf_efficientnet_em,78.126,21.874,94.046,5.954,6.90,240,0.882,bicubic -hardcorenas_f,78.102,21.898,93.802,6.198,8.20,224,0.875,bilinear -mobilevitv2_100,78.086,21.914,94.160,5.840,4.90,256,0.888,bicubic -efficientnet_es,78.058,21.942,93.944,6.056,5.44,224,0.875,bicubic -gmixer_24_224,78.036,21.964,93.670,6.330,24.72,224,0.875,bicubic -dla102,78.028,21.972,93.950,6.050,33.27,224,0.875,bilinear -gluon_resnet50_v1c,78.008,21.992,93.990,6.010,25.58,224,0.875,bicubic -selecsls60,77.984,22.016,93.832,6.168,30.67,224,0.875,bicubic -seresnext26t_32x4d,77.968,22.032,93.748,6.252,16.81,224,0.875,bicubic -res2net50_26w_4s,77.962,22.038,93.852,6.148,25.70,224,0.875,bilinear -resmlp_12_distilled_224,77.946,22.054,93.560,6.440,15.35,224,0.875,bicubic -mobilenetv3_large_100_miil,77.922,22.078,92.920,7.080,5.48,224,0.875,bilinear -tf_efficientnet_cc_b0_8e,77.900,22.100,93.658,6.342,24.01,224,0.875,bicubic -resnet26t,77.864,22.136,93.842,6.158,16.01,256,0.940,bicubic -rexnet_100,77.860,22.140,93.874,6.126,4.80,224,0.875,bicubic -seresnext26ts,77.858,22.142,93.790,6.210,10.39,256,0.900,bicubic -regnety_016,77.856,22.144,93.720,6.280,11.20,224,0.875,bicubic -tf_inception_v3,77.852,22.148,93.640,6.360,23.83,299,0.875,bicubic -xcit_nano_12_p8_384_dist,77.816,22.184,94.046,5.954,3.05,384,1.000,bicubic -gcresnext26ts,77.814,22.186,93.836,6.164,10.48,256,0.900,bicubic -hardcorenas_e,77.786,22.214,93.704,6.296,8.07,224,0.875,bilinear -efficientnet_b0,77.700,22.300,93.532,6.468,5.29,224,0.875,bicubic -tinynet_a,77.648,22.352,93.536,6.464,6.19,192,0.875,bicubic -legacy_seresnet50,77.632,22.368,93.750,6.250,28.09,224,0.875,bilinear -cs3darknet_m,77.626,22.374,94.014,5.986,9.31,288,0.950,bicubic -tv_resnext50_32x4d,77.618,22.382,93.700,6.300,25.03,224,0.875,bilinear -seresnext26d_32x4d,77.606,22.394,93.606,6.394,16.81,224,0.875,bicubic -repvgg_b1g4,77.588,22.412,93.830,6.170,39.97,224,0.875,bilinear -gluon_resnet50_v1b,77.584,22.416,93.720,6.280,25.56,224,0.875,bicubic -adv_inception_v3,77.578,22.422,93.738,6.262,23.83,299,0.875,bicubic -res2net50_48w_2s,77.524,22.476,93.550,6.450,25.29,224,0.875,bilinear -coat_lite_tiny,77.516,22.484,93.914,6.086,5.72,224,0.900,bicubic -tf_efficientnet_lite2,77.466,22.534,93.758,6.242,6.09,260,0.890,bicubic -eca_resnext26ts,77.458,22.542,93.568,6.432,10.30,256,0.900,bicubic -inception_v3,77.438,22.562,93.476,6.524,23.83,299,0.875,bicubic -hardcorenas_d,77.430,22.570,93.484,6.516,7.50,224,0.875,bilinear -tv_resnet101,77.380,22.620,93.544,6.456,44.55,224,0.875,bilinear -densenet161,77.354,22.646,93.636,6.364,28.68,224,0.875,bicubic -tf_efficientnet_cc_b0_4e,77.310,22.690,93.340,6.660,13.31,224,0.875,bicubic -mobilenetv2_120d,77.290,22.710,93.500,6.500,5.83,224,0.875,bicubic -densenet201,77.288,22.712,93.480,6.520,20.01,224,0.875,bicubic -cs3darknet_focus_m,77.282,22.718,93.972,6.028,9.30,288,0.950,bicubic -mixnet_m,77.262,22.738,93.422,6.578,5.01,224,0.875,bicubic -poolformer_s12,77.238,22.762,93.506,6.494,11.92,224,0.900,bicubic -selecsls42b,77.178,22.822,93.392,6.608,32.46,224,0.875,bicubic -xcit_tiny_12_p16_224,77.124,22.876,93.712,6.288,6.72,224,1.000,bicubic +xception41,78.516,21.484,94.278,5.722,26.97,299,0.903,bicubic +dla102x,78.510,21.490,94.228,5.772,26.31,224,0.875,bilinear +levit_128,78.486,21.514,94.010,5.990,9.21,224,0.900,bicubic +regnetx_040,78.482,21.518,94.244,5.756,22.12,224,0.875,bicubic +resnest26d,78.478,21.522,94.298,5.702,17.07,224,0.875,bilinear +dla60_res2net,78.464,21.536,94.206,5.794,20.85,224,0.875,bilinear +hrnet_w32,78.450,21.550,94.186,5.814,41.23,224,0.875,bilinear +dla60_res2next,78.440,21.560,94.152,5.848,17.03,224,0.875,bilinear +coat_tiny,78.434,21.566,94.038,5.962,5.50,224,0.900,bicubic +vit_tiny_patch16_384.augreg_in21k_ft_in1k,78.430,21.570,94.542,5.458,5.79,384,1.000,bicubic +selecsls60b,78.412,21.588,94.174,5.826,32.77,224,0.875,bicubic +cait_xxs24_224,78.386,21.614,94.310,5.690,11.96,224,1.000,bicubic +legacy_seresnet101,78.382,21.618,94.264,5.736,49.33,224,0.875,bilinear +repvgg_b1,78.366,21.634,94.098,5.902,57.42,224,0.875,bilinear +tf_efficientnetv2_b0.in1k,78.356,21.644,94.024,5.976,7.14,224,0.875,bicubic +mobilevit_s,78.312,21.688,94.146,5.854,5.58,256,0.900,bicubic +tv_resnet152,78.312,21.688,94.038,5.962,60.19,224,0.875,bilinear +dla60x,78.246,21.754,94.018,5.982,17.35,224,0.875,bilinear +res2next50,78.246,21.754,93.892,6.108,24.67,224,0.875,bilinear +bat_resnext26ts,78.242,21.758,94.100,5.900,10.73,256,0.900,bicubic +efficientnet_b1_pruned.in1k,78.236,21.764,93.834,6.166,6.33,240,0.882,bicubic +hrnet_w30,78.206,21.794,94.222,5.778,37.71,224,0.875,bilinear +pit_xs_224,78.182,21.818,94.168,5.832,10.62,224,0.900,bicubic +regnetx_032,78.172,21.828,94.088,5.912,15.30,224,0.875,bicubic +res2net50_14w_8s,78.150,21.850,93.848,6.152,25.06,224,0.875,bilinear +tf_efficientnet_em.in1k,78.130,21.870,94.044,5.956,6.90,240,0.882,bicubic +hardcorenas_f,78.104,21.896,93.802,6.198,8.20,224,0.875,bilinear +mobilevitv2_100,78.090,21.910,94.164,5.836,4.90,256,0.888,bicubic +efficientnet_es.ra_in1k,78.066,21.934,93.926,6.074,5.44,224,0.875,bicubic +gmixer_24_224,78.036,21.964,93.664,6.336,24.72,224,0.875,bicubic +dla102,78.032,21.968,93.946,6.054,33.27,224,0.875,bilinear +gluon_resnet50_v1c,78.012,21.988,93.988,6.012,25.58,224,0.875,bicubic +seresnext26t_32x4d,77.986,22.014,93.746,6.254,16.81,224,0.875,bicubic +selecsls60,77.982,22.018,93.828,6.172,30.67,224,0.875,bicubic +res2net50_26w_4s,77.964,22.036,93.854,6.146,25.70,224,0.875,bilinear +resmlp_12_distilled_224,77.944,22.056,93.558,6.442,15.35,224,0.875,bicubic +mobilenetv3_large_100.miil_in21k_ft_in1k,77.916,22.084,92.910,7.090,5.48,224,0.875,bilinear +tf_efficientnet_cc_b0_8e.in1k,77.908,22.092,93.654,6.346,24.01,224,0.875,bicubic +resnet26t,77.882,22.118,93.840,6.160,16.01,256,0.940,bicubic +seresnext26ts,77.866,22.134,93.790,6.210,10.39,256,0.900,bicubic +regnety_016,77.862,22.138,93.720,6.280,11.20,224,0.875,bicubic +tf_inception_v3,77.860,22.140,93.640,6.360,23.83,299,0.875,bicubic +rexnet_100,77.858,22.142,93.870,6.130,4.80,224,0.875,bicubic +xcit_nano_12_p8_384_dist,77.820,22.180,94.036,5.964,3.05,384,1.000,bicubic +gcresnext26ts,77.814,22.186,93.834,6.166,10.48,256,0.900,bicubic +hardcorenas_e,77.794,22.206,93.694,6.306,8.07,224,0.875,bilinear +efficientnet_b0.ra_in1k,77.698,22.302,93.532,6.468,5.29,224,0.875,bicubic +tinynet_a.in1k,77.652,22.348,93.536,6.464,6.19,192,0.875,bicubic +cs3darknet_m,77.636,22.364,94.014,5.986,9.31,288,0.950,bicubic +legacy_seresnet50,77.630,22.370,93.748,6.252,28.09,224,0.875,bilinear +tv_resnext50_32x4d,77.620,22.380,93.696,6.304,25.03,224,0.875,bilinear +seresnext26d_32x4d,77.602,22.398,93.608,6.392,16.81,224,0.875,bicubic +repvgg_b1g4,77.594,22.406,93.826,6.174,39.97,224,0.875,bilinear +adv_inception_v3,77.582,22.418,93.736,6.264,23.83,299,0.875,bicubic +gluon_resnet50_v1b,77.580,22.420,93.716,6.284,25.56,224,0.875,bicubic +res2net50_48w_2s,77.522,22.478,93.554,6.446,25.29,224,0.875,bilinear +coat_lite_tiny,77.512,22.488,93.916,6.084,5.72,224,0.900,bicubic +tf_efficientnet_lite2.in1k,77.468,22.532,93.754,6.246,6.09,260,0.890,bicubic +eca_resnext26ts,77.452,22.548,93.566,6.434,10.30,256,0.900,bicubic +inception_v3,77.440,22.560,93.476,6.524,23.83,299,0.875,bicubic +hardcorenas_d,77.432,22.568,93.484,6.516,7.50,224,0.875,bilinear +tv_resnet101,77.374,22.626,93.540,6.460,44.55,224,0.875,bilinear +densenet161,77.358,22.642,93.638,6.362,28.68,224,0.875,bicubic +tf_efficientnet_cc_b0_4e.in1k,77.306,22.694,93.334,6.666,13.31,224,0.875,bicubic +densenet201,77.286,22.714,93.478,6.522,20.01,224,0.875,bicubic +mobilenetv2_120d.ra_in1k,77.284,22.716,93.492,6.508,5.83,224,0.875,bicubic +cs3darknet_focus_m,77.278,22.722,93.970,6.030,9.30,288,0.950,bicubic +mixnet_m.ft_in1k,77.260,22.740,93.424,6.576,5.01,224,0.875,bicubic +poolformer_s12,77.230,22.770,93.504,6.496,11.92,224,0.900,bicubic +convnext_atto_ols.a2_in1k,77.216,22.784,93.680,6.320,3.70,288,0.950,bicubic +selecsls42b,77.174,22.826,93.390,6.610,32.46,224,0.875,bicubic +xcit_tiny_12_p16_224,77.120,22.880,93.712,6.288,6.72,224,1.000,bicubic resnet34d,77.116,22.884,93.382,6.618,21.82,224,0.875,bicubic legacy_seresnext26_32x4d,77.104,22.896,93.316,6.684,16.79,224,0.875,bicubic -tf_efficientnet_b0_ap,77.088,22.912,93.258,6.742,5.29,224,0.875,bicubic -hardcorenas_c,77.052,22.948,93.160,6.840,5.52,224,0.875,bilinear -dla60,77.022,22.978,93.320,6.680,22.04,224,0.875,bilinear -crossvit_9_dagger_240,76.978,23.022,93.614,6.386,8.78,240,0.875,bicubic -tf_mixnet_m,76.946,23.054,93.152,6.848,5.01,224,0.875,bicubic -regnetx_016,76.942,23.058,93.424,6.576,9.19,224,0.875,bicubic -convmixer_1024_20_ks9_p14,76.942,23.058,93.358,6.642,24.38,224,0.960,bicubic -gernet_s,76.916,23.084,93.134,6.866,8.17,224,0.875,bilinear -skresnet34,76.904,23.096,93.320,6.680,22.28,224,0.875,bicubic -tf_efficientnet_b0,76.840,23.160,93.218,6.782,5.29,224,0.875,bicubic -ese_vovnet19b_dw,76.794,23.206,93.266,6.734,6.54,224,0.875,bicubic -resnext26ts,76.780,23.220,93.132,6.868,10.30,256,0.900,bicubic -hrnet_w18,76.760,23.240,93.444,6.556,21.30,224,0.875,bilinear -resnet26d,76.702,23.298,93.152,6.848,16.01,224,0.875,bicubic -resmlp_12_224,76.656,23.344,93.180,6.820,15.35,224,0.875,bicubic -tf_efficientnet_lite1,76.638,23.362,93.224,6.776,5.42,240,0.882,bicubic -mixer_b16_224,76.610,23.390,92.230,7.770,59.88,224,0.875,bicubic -tf_efficientnet_es,76.598,23.402,93.204,6.796,5.44,224,0.875,bicubic -densenetblur121d,76.580,23.420,93.188,6.812,8.00,224,0.875,bicubic -hardcorenas_b,76.536,23.464,92.754,7.246,5.18,224,0.875,bilinear -levit_128s,76.514,23.486,92.870,7.130,7.78,224,0.900,bicubic -mobilenetv2_140,76.512,23.488,92.998,7.002,6.11,224,0.875,bicubic -repvgg_a2,76.460,23.540,93.010,6.990,28.21,224,0.875,bilinear -xcit_nano_12_p8_224_dist,76.328,23.672,93.094,6.906,3.05,224,1.000,bicubic -regnety_008,76.314,23.686,93.070,6.930,6.26,224,0.875,bicubic -dpn68,76.310,23.690,92.978,7.022,12.61,224,0.875,bicubic -tv_resnet50,76.134,23.866,92.868,7.132,25.56,224,0.875,bilinear -mixnet_s,75.996,24.004,92.800,7.200,4.13,224,0.875,bicubic -vit_small_patch32_224,75.990,24.010,93.268,6.732,22.88,224,0.900,bicubic -vit_tiny_r_s16_p8_384,75.952,24.048,93.262,6.738,6.36,384,1.000,bicubic -hardcorenas_a,75.930,24.070,92.510,7.490,5.26,224,0.875,bilinear -densenet169,75.904,24.096,93.024,6.976,14.15,224,0.875,bicubic -mobilenetv3_large_100,75.776,24.224,92.540,7.460,5.48,224,0.875,bicubic -tf_mixnet_s,75.652,24.348,92.626,7.374,4.13,224,0.875,bicubic -mobilenetv3_rw,75.634,24.366,92.708,7.292,5.48,224,0.875,bicubic -mobilevitv2_075,75.608,24.392,92.758,7.242,2.87,256,0.888,bicubic -densenet121,75.580,24.420,92.648,7.352,7.98,224,0.875,bicubic -tf_mobilenetv3_large_100,75.512,24.488,92.606,7.394,5.48,224,0.875,bilinear -resnest14d,75.508,24.492,92.524,7.476,10.61,224,0.875,bilinear -efficientnet_lite0,75.468,24.532,92.516,7.484,4.65,224,0.875,bicubic -vit_tiny_patch16_224,75.464,24.536,92.844,7.156,5.72,224,0.900,bicubic -xcit_nano_12_p16_384_dist,75.456,24.544,92.690,7.310,3.05,384,1.000,bicubic -semnasnet_100,75.450,24.550,92.600,7.400,3.89,224,0.875,bicubic -resnet26,75.300,24.700,92.580,7.420,16.00,224,0.875,bicubic -regnety_006,75.252,24.748,92.532,7.468,6.06,224,0.875,bicubic -repvgg_b0,75.154,24.846,92.416,7.584,15.82,224,0.875,bilinear -fbnetc_100,75.116,24.884,92.386,7.614,5.57,224,0.875,bilinear -resnet34,75.112,24.888,92.284,7.716,21.80,224,0.875,bilinear -hrnet_w18_small_v2,75.110,24.890,92.416,7.584,15.60,224,0.875,bilinear -mobilenetv2_110d,75.036,24.964,92.192,7.808,4.52,224,0.875,bicubic -regnetx_008,75.034,24.966,92.340,7.660,7.26,224,0.875,bicubic -efficientnet_es_pruned,75.000,25.000,92.442,7.558,5.44,224,0.875,bicubic -tinynet_b,74.974,25.026,92.182,7.818,3.73,188,0.875,bicubic -edgenext_x_small,74.864,25.136,92.300,7.700,2.34,256,0.900,bicubic -tf_efficientnet_lite0,74.832,25.168,92.174,7.826,4.65,224,0.875,bicubic -legacy_seresnet34,74.810,25.190,92.126,7.874,21.96,224,0.875,bilinear -tv_densenet121,74.740,25.260,92.148,7.852,7.98,224,0.875,bicubic -mnasnet_100,74.650,25.350,92.114,7.886,4.38,224,0.875,bicubic -mobilevit_xs,74.634,25.366,92.346,7.654,2.32,256,0.900,bicubic -dla34,74.624,25.376,92.072,7.928,15.74,224,0.875,bilinear -gluon_resnet34_v1b,74.592,25.408,91.988,8.012,21.80,224,0.875,bicubic -pit_ti_distilled_224,74.534,25.466,92.096,7.904,5.10,224,0.900,bicubic -deit_tiny_distilled_patch16_224,74.512,25.488,91.890,8.110,5.91,224,0.900,bicubic -vgg19_bn,74.214,25.786,91.844,8.156,143.68,224,0.875,bilinear -spnasnet_100,74.090,25.910,91.816,8.184,4.42,224,0.875,bilinear -regnety_004,74.024,25.976,91.756,8.244,4.34,224,0.875,bicubic -ghostnet_100,73.980,26.020,91.458,8.542,5.18,224,0.875,bilinear -crossvit_9_240,73.960,26.040,91.964,8.036,8.55,240,0.875,bicubic -xcit_nano_12_p8_224,73.916,26.084,92.168,7.832,3.05,224,1.000,bicubic -regnetx_006,73.856,26.144,91.672,8.328,6.20,224,0.875,bicubic -vit_base_patch32_224_sam,73.692,26.308,91.012,8.988,88.22,224,0.900,bicubic -tf_mobilenetv3_large_075,73.440,26.560,91.348,8.652,3.99,224,0.875,bilinear -vgg16_bn,73.350,26.650,91.504,8.496,138.37,224,0.875,bilinear -crossvit_tiny_240,73.338,26.662,91.914,8.086,7.01,240,0.875,bicubic -tv_resnet34,73.308,26.692,91.424,8.576,21.80,224,0.875,bilinear -swsl_resnet18,73.274,26.726,91.736,8.264,11.69,224,0.875,bilinear -convit_tiny,73.114,26.886,91.720,8.280,5.71,224,0.875,bicubic -skresnet18,73.034,26.966,91.166,8.834,11.96,224,0.875,bicubic -semnasnet_075,72.974,27.026,91.134,8.866,2.91,224,0.875,bicubic -mobilenetv2_100,72.956,27.044,91.010,8.990,3.50,224,0.875,bicubic -pit_ti_224,72.912,27.088,91.406,8.594,4.85,224,0.900,bicubic -ssl_resnet18,72.604,27.396,91.424,8.576,11.69,224,0.875,bilinear -regnetx_004,72.396,27.604,90.838,9.162,5.16,224,0.875,bicubic -vgg19,72.366,27.634,90.872,9.128,143.67,224,0.875,bilinear -resnet14t,72.356,27.644,90.340,9.660,10.08,224,0.950,bilinear -hrnet_w18_small,72.336,27.664,90.680,9.320,13.19,224,0.875,bilinear +tf_efficientnet_b0.ap_in1k,77.086,22.914,93.256,6.744,5.29,224,0.875,bicubic +hardcorenas_c,77.054,22.946,93.158,6.842,5.52,224,0.875,bilinear +dla60,77.032,22.968,93.318,6.682,22.04,224,0.875,bilinear +convnext_atto.d2_in1k,77.014,22.986,93.700,6.300,3.70,288,0.950,bicubic +crossvit_9_dagger_240,76.980,23.020,93.610,6.390,8.78,240,0.875,bicubic +regnetx_016,76.950,23.050,93.420,6.580,9.19,224,0.875,bicubic +convmixer_1024_20_ks9_p14,76.946,23.054,93.358,6.642,24.38,224,0.960,bicubic +tf_mixnet_m.in1k,76.942,23.058,93.152,6.848,5.01,224,0.875,bicubic +gernet_s,76.916,23.084,93.132,6.868,8.17,224,0.875,bilinear +skresnet34,76.912,23.088,93.322,6.678,22.28,224,0.875,bicubic +tf_efficientnet_b0.aa_in1k,76.848,23.152,93.228,6.772,5.29,224,0.875,bicubic +ese_vovnet19b_dw,76.798,23.202,93.268,6.732,6.54,224,0.875,bicubic +resnext26ts,76.780,23.220,93.130,6.870,10.30,256,0.900,bicubic +hrnet_w18,76.758,23.242,93.444,6.556,21.30,224,0.875,bilinear +resnet26d,76.696,23.304,93.150,6.850,16.01,224,0.875,bicubic +resmlp_12_224,76.654,23.346,93.180,6.820,15.35,224,0.875,bicubic +tf_efficientnet_lite1.in1k,76.642,23.358,93.226,6.774,5.42,240,0.882,bicubic +mixer_b16_224,76.600,23.400,92.228,7.772,59.88,224,0.875,bicubic +tf_efficientnet_es.in1k,76.594,23.406,93.202,6.798,5.44,224,0.875,bicubic +densenetblur121d,76.588,23.412,93.192,6.808,8.00,224,0.875,bicubic +hardcorenas_b,76.538,23.462,92.754,7.246,5.18,224,0.875,bilinear +levit_128s,76.530,23.470,92.866,7.134,7.78,224,0.900,bicubic +mobilenetv2_140.ra_in1k,76.516,23.484,92.996,7.004,6.11,224,0.875,bicubic +repvgg_a2,76.460,23.540,93.004,6.996,28.21,224,0.875,bilinear +xcit_nano_12_p8_224_dist,76.324,23.676,93.090,6.910,3.05,224,1.000,bicubic +dpn68,76.318,23.682,92.978,7.022,12.61,224,0.875,bicubic +regnety_008,76.316,23.684,93.066,6.934,6.26,224,0.875,bicubic +tv_resnet50,76.138,23.862,92.864,7.136,25.56,224,0.875,bilinear +mixnet_s.ft_in1k,75.992,24.008,92.796,7.204,4.13,224,0.875,bicubic +vit_small_patch32_224.augreg_in21k_ft_in1k,75.990,24.010,93.272,6.728,22.88,224,0.900,bicubic +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,75.952,24.048,93.260,6.740,6.36,384,1.000,bicubic +hardcorenas_a,75.916,24.084,92.514,7.486,5.26,224,0.875,bilinear +densenet169,75.906,24.094,93.026,6.974,14.15,224,0.875,bicubic +mobilenetv3_large_100.ra_in1k,75.766,24.234,92.542,7.458,5.48,224,0.875,bicubic +edgenext_x_small,75.688,24.312,92.766,7.234,2.34,288,1.000,bicubic +tf_mixnet_s.in1k,75.650,24.350,92.628,7.372,4.13,224,0.875,bicubic +mobilenetv3_rw.rmsp_in1k,75.634,24.366,92.708,7.292,5.48,224,0.875,bicubic +mobilevitv2_075,75.622,24.378,92.768,7.232,2.87,256,0.888,bicubic +densenet121,75.578,24.422,92.652,7.348,7.98,224,0.875,bicubic +tf_mobilenetv3_large_100.in1k,75.518,24.482,92.606,7.394,5.48,224,0.875,bilinear +resnest14d,75.506,24.494,92.518,7.482,10.61,224,0.875,bilinear +efficientnet_lite0.ra_in1k,75.484,24.516,92.510,7.490,4.65,224,0.875,bicubic +xcit_nano_12_p16_384_dist,75.458,24.542,92.694,7.306,3.05,384,1.000,bicubic +vit_tiny_patch16_224.augreg_in21k_ft_in1k,75.454,24.546,92.848,7.152,5.72,224,0.900,bicubic +semnasnet_100.rmsp_in1k,75.448,24.552,92.604,7.396,3.89,224,0.875,bicubic +resnet26,75.292,24.708,92.570,7.430,16.00,224,0.875,bicubic +regnety_006,75.246,24.754,92.532,7.468,6.06,224,0.875,bicubic +repvgg_b0,75.152,24.848,92.418,7.582,15.82,224,0.875,bilinear +fbnetc_100.rmsp_in1k,75.124,24.876,92.386,7.614,5.57,224,0.875,bilinear +hrnet_w18_small_v2,75.114,24.886,92.416,7.584,15.60,224,0.875,bilinear +resnet34,75.110,24.890,92.284,7.716,21.80,224,0.875,bilinear +regnetx_008,75.038,24.962,92.336,7.664,7.26,224,0.875,bicubic +mobilenetv2_110d.ra_in1k,75.036,24.964,92.186,7.814,4.52,224,0.875,bicubic +efficientnet_es_pruned.in1k,75.000,25.000,92.448,7.552,5.44,224,0.875,bicubic +tinynet_b.in1k,74.974,25.026,92.188,7.812,3.73,188,0.875,bicubic +vit_base_patch32_224.augreg_in1k,74.904,25.096,91.778,8.222,88.22,224,0.900,bicubic +tf_efficientnet_lite0.in1k,74.830,25.170,92.176,7.824,4.65,224,0.875,bicubic +legacy_seresnet34,74.808,25.192,92.124,7.876,21.96,224,0.875,bilinear +tv_densenet121,74.738,25.262,92.150,7.850,7.98,224,0.875,bicubic +mnasnet_100.rmsp_in1k,74.658,25.342,92.114,7.886,4.38,224,0.875,bicubic +mobilevit_xs,74.644,25.356,92.352,7.648,2.32,256,0.900,bicubic +dla34,74.630,25.370,92.078,7.922,15.74,224,0.875,bilinear +gluon_resnet34_v1b,74.588,25.412,91.990,8.010,21.80,224,0.875,bicubic +pit_ti_distilled_224,74.530,25.470,92.096,7.904,5.10,224,0.900,bicubic +deit_tiny_distilled_patch16_224,74.510,25.490,91.890,8.110,5.91,224,0.900,bicubic +vgg19_bn,74.214,25.786,91.842,8.158,143.68,224,0.875,bilinear +spnasnet_100.rmsp_in1k,74.084,25.916,91.818,8.182,4.42,224,0.875,bilinear +regnety_004,74.034,25.966,91.752,8.248,4.34,224,0.875,bicubic +ghostnet_100,73.978,26.022,91.456,8.544,5.18,224,0.875,bilinear +crossvit_9_240,73.964,26.036,91.968,8.032,8.55,240,0.875,bicubic +xcit_nano_12_p8_224,73.914,26.086,92.172,7.828,3.05,224,1.000,bicubic +regnetx_006,73.852,26.148,91.672,8.328,6.20,224,0.875,bicubic +vit_base_patch32_224.sam,73.690,26.310,91.014,8.986,88.22,224,0.900,bicubic +tf_mobilenetv3_large_075.in1k,73.438,26.562,91.350,8.650,3.99,224,0.875,bilinear +vgg16_bn,73.350,26.650,91.506,8.494,138.37,224,0.875,bilinear +crossvit_tiny_240,73.324,26.676,91.916,8.084,7.01,240,0.875,bicubic +tv_resnet34,73.312,26.688,91.426,8.574,21.80,224,0.875,bilinear +swsl_resnet18,73.276,26.724,91.734,8.266,11.69,224,0.875,bilinear +convit_tiny,73.116,26.884,91.714,8.286,5.71,224,0.875,bicubic +skresnet18,73.038,26.962,91.168,8.832,11.96,224,0.875,bicubic +semnasnet_075.rmsp_in1k,72.974,27.026,91.136,8.864,2.91,224,0.875,bicubic +mobilenetv2_100.ra_in1k,72.970,27.030,91.016,8.984,3.50,224,0.875,bicubic +pit_ti_224,72.912,27.088,91.402,8.598,4.85,224,0.900,bicubic +ssl_resnet18,72.610,27.390,91.416,8.584,11.69,224,0.875,bilinear +regnetx_004,72.396,27.604,90.830,9.170,5.16,224,0.875,bicubic +vgg19,72.368,27.632,90.872,9.128,143.67,224,0.875,bilinear +resnet14t,72.350,27.650,90.340,9.660,10.08,224,0.950,bilinear +hrnet_w18_small,72.342,27.658,90.678,9.322,13.19,224,0.875,bilinear xcit_nano_12_p16_224_dist,72.302,27.698,90.862,9.138,3.05,224,1.000,bicubic -resnet18d,72.258,27.742,90.688,9.312,11.71,224,0.875,bicubic -tf_mobilenetv3_large_minimal_100,72.250,27.750,90.620,9.380,3.92,224,0.875,bilinear -deit_tiny_patch16_224,72.174,27.826,91.114,8.886,5.72,224,0.900,bicubic -lcnet_100,72.110,27.890,90.378,9.622,2.95,224,0.875,bicubic -mixer_l16_224,72.066,27.934,87.666,12.334,208.20,224,0.875,bicubic -vit_tiny_r_s16_p8_224,71.794,28.206,90.818,9.182,6.34,224,0.900,bicubic -legacy_seresnet18,71.740,28.260,90.330,9.670,11.78,224,0.875,bicubic -vgg13_bn,71.598,28.402,90.376,9.624,133.05,224,0.875,bilinear -vgg16,71.590,28.410,90.382,9.618,138.36,224,0.875,bilinear -tinynet_c,71.228,28.772,89.748,10.252,2.46,184,0.875,bicubic -edgenext_xx_small,71.106,28.894,90.032,9.968,1.33,256,0.900,bicubic -gluon_resnet18_v1b,70.838,29.162,89.762,10.238,11.69,224,0.875,bicubic +resnet18d,72.260,27.740,90.696,9.304,11.71,224,0.875,bicubic +tf_mobilenetv3_large_minimal_100.in1k,72.248,27.752,90.630,9.370,3.92,224,0.875,bilinear +deit_tiny_patch16_224,72.168,27.832,91.118,8.882,5.72,224,0.900,bicubic +lcnet_100.ra2_in1k,72.114,27.886,90.378,9.622,2.95,224,0.875,bicubic +mixer_l16_224,72.058,27.942,87.668,12.332,208.20,224,0.875,bicubic +edgenext_xx_small,71.866,28.134,90.544,9.456,1.33,288,1.000,bicubic +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,71.788,28.212,90.828,9.172,6.34,224,0.900,bicubic +legacy_seresnet18,71.744,28.256,90.334,9.666,11.78,224,0.875,bicubic +vgg16,71.594,28.406,90.382,9.618,138.36,224,0.875,bilinear +vgg13_bn,71.594,28.406,90.376,9.624,133.05,224,0.875,bilinear +tinynet_c.in1k,71.232,28.768,89.748,10.252,2.46,184,0.875,bicubic +gluon_resnet18_v1b,70.836,29.164,89.760,10.240,11.69,224,0.875,bicubic +pvt_v2_b0,70.656,29.344,90.208,9.792,3.67,224,0.900,bicubic vgg11_bn,70.360,29.640,89.802,10.198,132.87,224,0.875,bilinear -regnety_002,70.256,29.744,89.534,10.466,3.16,224,0.875,bicubic -mobilevitv2_050,70.140,29.860,89.930,10.070,1.37,256,0.888,bicubic -xcit_nano_12_p16_224,69.954,30.046,89.756,10.244,3.05,224,1.000,bicubic +regnety_002,70.252,29.748,89.540,10.460,3.16,224,0.875,bicubic +mobilevitv2_050,70.140,29.860,89.926,10.074,1.37,256,0.888,bicubic +xcit_nano_12_p16_224,69.954,30.046,89.754,10.246,3.05,224,1.000,bicubic vgg13,69.926,30.074,89.246,10.754,133.05,224,0.875,bilinear -resnet18,69.748,30.252,89.084,10.916,11.69,224,0.875,bilinear -vgg11,69.028,30.972,88.628,11.372,132.86,224,0.875,bilinear -mobilevit_xxs,68.920,31.080,88.946,11.054,1.27,256,0.900,bicubic -lcnet_075,68.814,31.186,88.364,11.636,2.36,224,0.875,bicubic -regnetx_002,68.754,31.246,88.556,11.444,2.68,224,0.875,bicubic -resnet10t,68.308,31.692,88.080,11.920,5.44,224,0.950,bilinear -tf_mobilenetv3_small_100,67.926,32.074,87.668,12.332,2.54,224,0.875,bilinear -dla60x_c,67.880,32.120,88.434,11.566,1.32,224,0.875,bilinear -mobilenetv3_small_100,67.658,32.342,87.634,12.366,2.54,224,0.875,bicubic -tinynet_d,66.962,33.038,87.064,12.936,2.34,152,0.875,bicubic -mnasnet_small,66.206,33.794,86.506,13.494,2.03,224,0.875,bicubic -dla46x_c,65.952,34.048,86.986,13.014,1.07,224,0.875,bilinear -mobilenetv2_050,65.944,34.056,86.080,13.920,1.97,224,0.875,bicubic -tf_mobilenetv3_small_075,65.712,34.288,86.130,13.870,2.04,224,0.875,bilinear -mobilenetv3_small_075,65.238,34.762,85.440,14.560,2.04,224,0.875,bicubic -dla46_c,64.872,35.128,86.302,13.698,1.30,224,0.875,bilinear -lcnet_050,63.094,36.906,84.382,15.618,1.88,224,0.875,bicubic -tf_mobilenetv3_small_minimal_100,62.900,37.100,84.234,15.766,2.04,224,0.875,bilinear -tinynet_e,59.856,40.144,81.766,18.234,2.04,106,0.875,bicubic -mobilenetv3_small_050,57.890,42.110,80.194,19.806,1.59,224,0.875,bicubic +resnet18,69.748,30.252,89.078,10.922,11.69,224,0.875,bilinear +vgg11,69.024,30.976,88.628,11.372,132.86,224,0.875,bilinear +mobilevit_xxs,68.912,31.088,88.938,11.062,1.27,256,0.900,bicubic +lcnet_075.ra2_in1k,68.818,31.182,88.370,11.630,2.36,224,0.875,bicubic +regnetx_002,68.762,31.238,88.556,11.444,2.68,224,0.875,bicubic +resnet10t,68.294,31.706,88.078,11.922,5.44,224,0.950,bilinear +tf_mobilenetv3_small_100.in1k,67.922,32.078,87.664,12.336,2.54,224,0.875,bilinear +dla60x_c,67.892,32.108,88.426,11.574,1.32,224,0.875,bilinear +mobilenetv3_small_100.lamb_in1k,67.652,32.348,87.636,12.364,2.54,224,0.875,bicubic +tinynet_d.in1k,66.962,33.038,87.066,12.934,2.34,152,0.875,bicubic +mnasnet_small.lamb_in1k,66.206,33.794,86.508,13.492,2.03,224,0.875,bicubic +dla46x_c,65.970,34.030,86.980,13.020,1.07,224,0.875,bilinear +mobilenetv2_050.lamb_in1k,65.942,34.058,86.082,13.918,1.97,224,0.875,bicubic +tf_mobilenetv3_small_075.in1k,65.716,34.284,86.130,13.870,2.04,224,0.875,bilinear +mobilenetv3_small_075.lamb_in1k,65.246,34.754,85.436,14.564,2.04,224,0.875,bicubic +dla46_c,64.866,35.134,86.292,13.708,1.30,224,0.875,bilinear +lcnet_050.ra2_in1k,63.100,36.900,84.380,15.620,1.88,224,0.875,bicubic +tf_mobilenetv3_small_minimal_100.in1k,62.906,37.094,84.230,15.770,2.04,224,0.875,bilinear +tinynet_e.in1k,59.856,40.144,81.762,18.238,2.04,106,0.875,bicubic +mobilenetv3_small_050.lamb_in1k,57.890,42.110,80.194,19.806,1.59,224,0.875,bicubic diff --git a/results/results-imagenetv2-matched-frequency.csv b/results/results-imagenetv2-matched-frequency.csv index 131dd32e..71aa3f5a 100644 --- a/results/results-imagenetv2-matched-frequency.csv +++ b/results/results-imagenetv2-matched-frequency.csv @@ -1,669 +1,791 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation,top1_diff,top5_diff,rank_diff -tf_efficientnet_l2_ns_475,80.460,19.540,95.730,4.270,480.31,475,0.936,bicubic,-7.772,-2.816,+3 -tf_efficientnet_l2_ns,80.250,19.750,95.840,4.160,480.31,800,0.960,bicubic,-8.100,-2.810,+1 -beit_large_patch16_512,79.940,20.060,95.350,4.650,305.67,512,1.000,bicubic,-8.662,-3.306,-2 -beit_large_patch16_384,79.500,20.500,95.170,4.830,305.00,384,1.000,bicubic,-8.906,-3.436,-2 -deit3_huge_patch14_224_in21ft1k,79.160,20.840,94.860,5.140,632.13,224,1.000,bicubic,-8.020,-3.400,+5 -deit3_large_patch16_384_in21ft1k,79.100,20.900,94.880,5.120,304.76,384,1.000,bicubic,-8.616,-3.632,-1 -beit_large_patch16_224,78.820,21.180,94.610,5.390,304.43,224,0.900,bicubic,-8.656,-3.694,0 -deit3_large_patch16_224_in21ft1k,78.630,21.370,94.720,5.280,304.37,224,1.000,bicubic,-8.352,-3.518,+8 -tf_efficientnet_b7_ns,78.520,21.480,94.390,5.610,66.35,600,0.949,bicubic,-8.312,-3.706,+10 -volo_d5_512,77.970,22.030,94.160,5.840,296.09,512,1.150,bicubic,-9.070,-3.808,+4 -vit_large_patch16_384,77.940,22.060,94.440,5.560,304.72,384,1.000,bicubic,-9.140,-3.860,+2 -deit3_base_patch16_384_in21ft1k,77.880,22.120,94.030,5.970,86.88,384,1.000,bicubic,-8.862,-4.082,+10 -volo_d5_448,77.770,22.230,94.050,5.950,295.91,448,1.150,bicubic,-9.184,-3.890,+4 -volo_d4_448,77.750,22.250,93.930,6.070,193.41,448,1.150,bicubic,-9.042,-3.952,+7 -convnext_large_384_in22ft1k,77.730,22.270,94.080,5.920,197.77,384,1.000,bicubic,-9.666,-4.286,-6 -convnext_xlarge_384_in22ft1k,77.710,22.290,94.200,5.800,350.20,384,1.000,bicubic,-9.834,-4.286,-10 -swinv2_large_window12to24_192to384_22kft1k,77.310,22.690,93.930,6.070,196.74,384,1.000,bicubic,-10.146,-4.322,-9 -tf_efficientnet_b6_ns,77.280,22.720,93.890,6.110,43.04,528,0.942,bicubic,-9.170,-3.996,+9 -swinv2_base_window12to24_192to384_22kft1k,77.170,22.830,94.260,5.740,87.92,384,1.000,bicubic,-9.938,-3.976,-7 -volo_d3_448,77.070,22.930,94.110,5.890,86.63,448,1.000,bicubic,-9.426,-3.600,+5 -vit_large_r50_s32_384,77.070,22.930,93.720,6.280,329.09,384,1.000,bicubic,-9.110,-4.200,+12 -tf_efficientnetv2_xl_in21ft1k,77.040,22.960,93.270,6.730,208.12,512,1.000,bicubic,-9.380,-4.598,+7 -swin_large_patch4_window12_384,77.030,22.970,93.750,6.250,196.74,384,1.000,bicubic,-10.122,-4.490,-12 -tf_efficientnetv2_l_in21ft1k,76.940,23.060,93.960,6.040,118.52,480,1.000,bicubic,-9.364,-4.020,+7 -swinv2_large_window12to16_192to256_22kft1k,76.930,23.070,93.540,6.460,196.74,256,0.900,bicubic,-10.016,-4.570,-7 -beit_base_patch16_384,76.900,23.100,93.910,6.090,86.74,384,1.000,bicubic,-9.898,-4.226,-6 -ig_resnext101_32x48d,76.880,23.120,93.310,6.690,828.41,224,0.875,bilinear,-8.556,-4.266,+30 -cait_m48_448,76.870,23.130,93.370,6.630,356.46,448,1.000,bicubic,-9.618,-4.380,-2 -ig_resnext101_32x32d,76.820,23.180,93.200,6.800,468.53,224,0.875,bilinear,-8.280,-4.234,+42 -tf_efficientnet_b5_ns,76.810,23.190,93.580,6.420,30.39,456,0.934,bicubic,-9.278,-4.172,+5 -convnext_xlarge_in22ft1k,76.770,23.230,93.550,6.450,350.20,224,0.875,bicubic,-10.232,-4.662,-16 -deit3_large_patch16_384,76.690,23.310,93.350,6.650,304.76,384,1.000,bicubic,-9.116,-4.246,+14 -xcit_large_24_p8_384_dist,76.620,23.380,93.090,6.910,188.93,384,1.000,bicubic,-9.378,-4.594,+7 -convnext_base_384_in22ft1k,76.580,23.420,93.720,6.280,88.59,384,1.000,bicubic,-9.962,-4.470,-10 -volo_d5_224,76.580,23.420,93.300,6.700,295.46,224,0.960,bicubic,-9.490,-4.278,+1 -deit3_base_patch16_224_in21ft1k,76.540,23.460,93.560,6.440,86.59,224,1.000,bicubic,-9.176,-4.184,+14 -vit_base_patch16_384,76.480,23.520,93.770,6.230,86.86,384,1.000,bicubic,-9.526,-4.234,+2 -swinv2_base_window12to16_192to256_22kft1k,76.430,23.570,93.690,6.310,87.92,256,0.900,bicubic,-9.840,-4.206,-6 -convnext_large_in22ft1k,76.430,23.570,93.470,6.530,197.77,224,0.875,bicubic,-10.206,-4.558,-16 -cait_m36_384,76.330,23.670,93.050,6.950,271.22,384,1.000,bicubic,-9.724,-4.680,-3 -vit_large_patch16_224,76.300,23.700,93.600,6.400,304.33,224,0.900,bicubic,-9.544,-4.222,+1 -swin_base_patch4_window12_384,76.290,23.710,93.320,6.680,87.90,384,1.000,bicubic,-10.142,-4.736,-14 -tf_efficientnetv2_l,76.280,23.720,92.970,7.030,118.52,480,1.000,bicubic,-9.208,-4.402,+12 -swin_large_patch4_window7_224,76.270,23.730,93.410,6.590,196.53,224,0.900,bicubic,-10.050,-4.482,-14 -cait_s36_384,76.210,23.790,92.970,7.030,68.37,384,1.000,bicubic,-9.250,-4.508,+11 -xcit_medium_24_p8_384_dist,76.140,23.860,92.980,7.020,84.32,384,1.000,bicubic,-9.676,-4.612,-2 -dm_nfnet_f6,76.130,23.870,93.110,6.890,438.36,576,0.956,bicubic,-10.012,-4.620,-13 -tf_efficientnet_b7_ap,76.100,23.900,92.970,7.030,66.35,600,0.949,bicubic,-9.020,-4.282,+22 -volo_d2_384,76.090,23.910,93.130,6.870,58.87,384,1.000,bicubic,-9.946,-4.444,-11 -tf_efficientnet_b8_ap,76.080,23.920,92.730,7.270,87.41,672,0.954,bicubic,-9.292,-4.564,+12 -vit_base_patch8_224,76.010,23.990,93.370,6.630,86.58,224,0.900,bicubic,-9.780,-4.422,-4 -volo_d4_224,76.010,23.990,93.010,6.990,192.96,224,0.960,bicubic,-9.866,-4.458,-11 -xcit_large_24_p8_224_dist,75.990,24.010,92.730,7.270,188.93,224,1.000,bicubic,-9.408,-4.680,+8 -tf_efficientnetv2_m_in21ft1k,75.920,24.080,93.280,6.720,54.14,480,1.000,bicubic,-9.666,-4.466,-2 -dm_nfnet_f4,75.850,24.150,92.970,7.030,316.07,512,0.951,bicubic,-9.864,-4.550,-4 -xcit_large_24_p16_384_dist,75.820,24.180,92.750,7.250,189.10,384,1.000,bicubic,-9.932,-4.788,-8 -deit3_huge_patch14_224,75.790,24.210,92.760,7.240,632.13,224,0.900,bicubic,-9.416,-4.598,+10 -xcit_small_24_p8_384_dist,75.770,24.230,92.970,7.030,47.63,384,1.000,bicubic,-9.784,-4.602,-5 -ig_resnext101_32x16d,75.750,24.250,92.880,7.120,194.03,224,0.875,bilinear,-8.420,-4.318,+63 -tf_efficientnet_b4_ns,75.670,24.330,93.050,6.950,19.34,380,0.922,bicubic,-9.490,-4.420,+9 -volo_d1_384,75.620,24.380,93.060,6.940,26.78,384,1.000,bicubic,-9.630,-4.154,+4 -volo_d3_224,75.610,24.390,93.000,7.000,86.33,224,0.960,bicubic,-9.802,-4.280,-2 -convnext_base_in22ft1k,75.580,24.420,93.130,6.870,88.59,224,0.875,bicubic,-10.244,-4.736,-20 -vit_base_r50_s16_384,75.580,24.420,92.790,7.210,98.95,384,1.000,bicubic,-9.396,-4.500,+17 +eva_giant_patch14_336.clip_ft_in1k,82.190,17.810,96.280,3.720,"1,013.01",336,1.000,bicubic,-7.286,-2.544,+2 +eva_giant_patch14_560.m30m_ft_in22k_in1k,82.050,17.950,96.440,3.560,"1,014.45",560,1.000,bicubic,-7.746,-2.552,-1 +eva_giant_patch14_336.m30m_ft_in22k_in1k,81.840,18.160,96.290,3.710,"1,013.01",336,1.000,bicubic,-7.728,-2.662,-1 +eva_giant_patch14_224.clip_ft_in1k,81.590,18.410,96.140,3.860,"1,012.56",224,1.000,bicubic,-7.510,-2.576,+1 +eva_large_patch14_336.in22k_ft_in1k,81.180,18.820,95.880,4.120,304.53,336,1.000,bicubic,-7.484,-2.840,+1 +eva_large_patch14_336.in22k_ft_in22k_in1k,80.940,19.060,96.010,3.990,304.53,336,1.000,bicubic,-8.264,-2.840,-2 +vit_large_patch14_clip_336.openai_ft_in12k_in1k,80.530,19.470,95.500,4.500,304.53,336,1.000,bicubic,-7.736,-3.032,+8 +tf_efficientnet_l2.ns_jft_in1k_475,80.460,19.540,95.730,4.270,480.31,475,0.936,bicubic,-7.774,-2.816,+9 +beitv2_large_patch16_224.in1k_ft_in22k_in1k,80.260,19.740,95.160,4.840,304.43,224,0.950,bicubic,-8.126,-3.438,+3 +tf_efficientnet_l2.ns_jft_in1k,80.250,19.750,95.840,4.160,480.31,800,0.960,bicubic,-8.102,-2.810,+3 +eva_large_patch14_196.in22k_ft_in1k,80.170,19.830,95.450,4.550,304.14,196,1.000,bicubic,-7.768,-3.042,+12 +maxvit_base_tf_512.in21k_ft_in1k,80.160,19.840,95.490,4.510,119.88,512,1.000,bicubic,-8.052,-3.042,+6 +eva_large_patch14_196.in22k_ft_in22k_in1k,80.160,19.840,95.390,4.610,304.14,196,1.000,bicubic,-8.426,-3.266,-4 +maxvit_xlarge_tf_512.in21k_ft_in1k,80.100,19.900,95.490,4.510,475.77,512,1.000,bicubic,-8.438,-3.154,-4 +maxvit_large_tf_512.in21k_ft_in1k,79.990,20.010,95.150,4.850,212.33,512,1.000,bicubic,-8.228,-3.448,+3 +beit_large_patch16_512.in22k_ft_in22k_in1k,79.940,20.060,95.350,4.650,305.67,512,1.000,bicubic,-8.658,-3.306,-9 +maxvit_xlarge_tf_384.in21k_ft_in1k,79.690,20.310,95.160,4.840,475.32,384,1.000,bicubic,-8.616,-3.384,-3 +maxvit_large_tf_384.in21k_ft_in1k,79.600,20.400,95.080,4.920,212.03,384,1.000,bicubic,-8.392,-3.486,+4 +vit_large_patch14_clip_224.openai_ft_in1k,79.590,20.410,94.990,5.010,304.20,224,1.000,bicubic,-8.262,-3.438,+7 +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,79.530,20.470,94.990,5.010,632.46,336,1.000,bicubic,-9.044,-3.670,-11 +beit_large_patch16_384.in22k_ft_in22k_in1k,79.500,20.500,95.180,4.820,305.00,384,1.000,bicubic,-8.904,-3.428,-10 +vit_large_patch14_clip_224.openai_ft_in12k_in1k,79.390,20.610,95.070,4.930,304.20,224,1.000,bicubic,-8.778,-3.474,-1 +vit_huge_patch14_clip_224.laion2b_ft_in1k,79.370,20.630,94.920,5.080,632.05,224,1.000,bicubic,-8.224,-3.300,+7 +maxvit_base_tf_384.in21k_ft_in1k,79.340,20.660,95.080,4.920,119.65,384,1.000,bicubic,-8.582,-3.462,0 +vit_large_patch14_clip_336.laion2b_ft_in1k,79.230,20.770,94.980,5.020,304.53,336,1.000,bicubic,-8.618,-3.390,+2 +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,79.220,20.780,95.090,4.910,632.05,224,1.000,bicubic,-9.026,-3.460,-10 +deit3_huge_patch14_224_in21ft1k,79.160,20.840,94.860,5.140,632.13,224,1.000,bicubic,-8.024,-3.400,+10 +deit3_large_patch16_384_in21ft1k,79.090,20.910,94.880,5.120,304.76,384,1.000,bicubic,-8.626,-3.632,+1 +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,78.990,21.010,94.910,5.090,304.53,336,1.000,bicubic,-9.192,-3.662,-9 +beit_large_patch16_224.in22k_ft_in22k_in1k,78.820,21.180,94.600,5.400,304.43,224,0.900,bicubic,-8.656,-3.704,+1 +deit3_large_patch16_224_in21ft1k,78.630,21.370,94.720,5.280,304.37,224,1.000,bicubic,-8.348,-3.518,+13 +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,78.510,21.490,94.640,5.360,304.20,224,1.000,bicubic,-9.380,-3.770,-8 +tf_efficientnet_b7.ns_jft_in1k,78.510,21.490,94.380,5.620,66.35,600,0.949,bicubic,-8.330,-3.714,+15 +vit_large_patch14_clip_224.laion2b_ft_in1k,78.410,21.590,94.570,5.430,304.20,224,1.000,bicubic,-8.882,-3.676,+1 +volo_d5_512,77.970,22.030,94.170,5.830,296.09,512,1.150,bicubic,-9.074,-3.798,+6 +convnext_xlarge.fb_in22k_ft_in1k_384,77.960,22.040,94.460,5.540,350.20,384,1.000,bicubic,-9.788,-4.094,-8 +vit_large_patch16_384.augreg_in21k_ft_in1k,77.940,22.060,94.450,5.550,304.72,384,1.000,bicubic,-9.140,-3.850,+3 +deit3_base_patch16_384_in21ft1k,77.890,22.110,94.030,5.970,86.88,384,1.000,bicubic,-8.854,-4.082,+15 +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,77.790,22.210,94.170,5.830,86.86,384,1.000,bicubic,-9.428,-3.864,-3 +volo_d5_448,77.770,22.230,94.050,5.950,295.91,448,1.150,bicubic,-9.184,-3.888,+5 +volo_d4_448,77.750,22.250,93.930,6.070,193.41,448,1.150,bicubic,-9.040,-3.952,+10 +tf_efficientnetv2_xl.in21k_ft_in1k,77.640,22.360,93.970,6.030,208.12,512,1.000,bicubic,-9.108,-4.048,+10 +tf_efficientnetv2_l.in21k_ft_in1k,77.580,22.420,94.280,5.720,118.52,480,1.000,bicubic,-9.226,-3.854,+5 +maxvit_base_tf_512.in1k,77.460,22.540,93.960,6.040,119.88,512,1.000,bicubic,-9.138,-3.960,+11 +convnext_large.fb_in22k_ft_in1k_384,77.420,22.580,94.200,5.800,197.77,384,1.000,bicubic,-10.052,-4.186,-13 +vit_base_patch16_clip_384.laion2b_ft_in1k,77.340,22.660,93.860,6.140,86.86,384,1.000,bicubic,-9.280,-4.150,+8 +swinv2_large_window12to24_192to384_22kft1k,77.310,22.690,93.930,6.070,196.74,384,1.000,bicubic,-10.148,-4.322,-14 +tf_efficientnet_b6.ns_jft_in1k,77.280,22.720,93.890,6.110,43.04,528,0.942,bicubic,-9.172,-3.992,+11 +maxvit_large_tf_512.in1k,77.280,22.720,93.780,6.220,212.33,512,1.000,bicubic,-9.238,-4.104,+8 +swinv2_base_window12to24_192to384_22kft1k,77.180,22.820,94.260,5.740,87.92,384,1.000,bicubic,-9.928,-3.976,-11 +maxvit_large_tf_384.in1k,77.120,22.880,93.460,6.540,212.03,384,1.000,bicubic,-9.116,-4.230,+15 +beitv2_base_patch16_224.in1k_ft_in22k_in1k,77.090,22.910,94.020,5.980,86.53,224,0.900,bicubic,-9.390,-4.028,+7 +volo_d3_448,77.070,22.930,94.110,5.890,86.63,448,1.000,bicubic,-9.424,-3.600,+4 +vit_large_r50_s32_384.augreg_in21k_ft_in1k,77.060,22.940,93.720,6.280,329.09,384,1.000,bicubic,-9.124,-4.198,+15 +swin_large_patch4_window12_384,77.040,22.960,93.750,6.250,196.74,384,1.000,bicubic,-10.108,-4.484,-17 +vit_base_patch16_clip_384.openai_ft_in1k,76.960,23.040,93.750,6.250,86.86,384,1.000,bicubic,-9.246,-4.124,+12 +swinv2_large_window12to16_192to256_22kft1k,76.930,23.070,93.530,6.470,196.74,256,0.900,bicubic,-10.006,-4.578,-11 +beit_base_patch16_384.in22k_ft_in22k_in1k,76.890,23.110,93.910,6.090,86.74,384,1.000,bicubic,-9.910,-4.228,-9 +tf_efficientnetv2_m.in21k_ft_in1k,76.890,23.110,93.650,6.350,54.14,480,1.000,bicubic,-9.114,-4.292,+19 +cait_m48_448,76.870,23.130,93.370,6.630,356.46,448,1.000,bicubic,-9.614,-4.384,-2 +ig_resnext101_32x48d,76.870,23.130,93.310,6.690,828.41,224,0.875,bilinear,-8.558,-4.262,+42 +vit_base_patch16_clip_384.openai_ft_in12k_in1k,76.850,23.150,93.790,6.210,86.86,384,0.950,bicubic,-10.184,-4.390,-20 +ig_resnext101_32x32d,76.840,23.160,93.200,6.800,468.53,224,0.875,bilinear,-8.254,-4.238,+64 +tf_efficientnet_b5.ns_jft_in1k,76.810,23.190,93.580,6.420,30.39,456,0.934,bicubic,-9.278,-4.172,+9 +maxvit_base_tf_384.in1k,76.770,23.230,93.420,6.580,119.65,384,1.000,bicubic,-9.524,-4.384,-2 +convnext_large.fb_in22k_ft_in1k,76.730,23.270,93.710,6.290,197.77,288,1.000,bicubic,-10.286,-4.496,-23 +deit3_large_patch16_384,76.690,23.310,93.350,6.650,304.76,384,1.000,bicubic,-9.120,-4.246,+19 +convnext_base.fb_in22k_ft_in1k_384,76.640,23.360,93.700,6.300,88.59,384,1.000,bicubic,-10.154,-4.564,-18 +xcit_large_24_p8_384_dist,76.630,23.370,93.090,6.910,188.93,384,1.000,bicubic,-9.370,-4.596,+10 +convnext_xlarge.fb_in22k_ft_in1k,76.620,23.380,93.850,6.150,350.20,288,1.000,bicubic,-10.718,-4.478,-36 +volo_d5_224,76.580,23.420,93.300,6.700,295.46,224,0.960,bicubic,-9.488,-4.278,+3 +vit_base_patch8_224.augreg2_in21k_ft_in1k,76.570,23.430,93.330,6.670,86.58,224,0.900,bicubic,-9.642,-4.502,-5 +deit3_base_patch16_224_in21ft1k,76.550,23.450,93.560,6.440,86.59,224,1.000,bicubic,-9.164,-4.184,+18 +vit_base_patch16_384.augreg_in21k_ft_in1k,76.500,23.500,93.750,6.250,86.86,384,1.000,bicubic,-9.506,-4.250,+3 +maxvit_small_tf_512.in1k,76.490,23.510,93.390,6.610,69.13,512,1.000,bicubic,-9.598,-4.368,-3 +swinv2_base_window12to16_192to256_22kft1k,76.450,23.550,93.670,6.330,87.92,256,0.900,bicubic,-9.824,-4.226,-11 +cait_m36_384,76.320,23.680,93.050,6.950,271.22,384,1.000,bicubic,-9.734,-4.680,-2 +vit_large_patch16_224.augreg_in21k_ft_in1k,76.290,23.710,93.600,6.400,304.33,224,0.900,bicubic,-9.552,-4.224,+5 +swin_base_patch4_window12_384,76.280,23.720,93.320,6.680,87.90,384,1.000,bicubic,-10.152,-4.738,-18 +swin_large_patch4_window7_224,76.270,23.730,93.420,6.580,196.53,224,0.900,bicubic,-10.050,-4.476,-18 +cait_s36_384,76.210,23.790,92.970,7.030,68.37,384,1.000,bicubic,-9.250,-4.510,+21 +xcit_medium_24_p8_384_dist,76.140,23.860,92.980,7.020,84.32,384,1.000,bicubic,-9.676,-4.612,+2 +dm_nfnet_f6,76.130,23.870,93.110,6.890,438.36,576,0.956,bicubic,-10.014,-4.620,-12 +flexivit_large.1200ep_in1k,76.100,23.900,93.010,6.990,304.36,240,0.950,bicubic,-9.544,-4.532,+11 +tf_efficientnet_b7.ap_in1k,76.090,23.910,92.970,7.030,66.35,600,0.949,bicubic,-9.030,-4.282,+38 +tf_efficientnet_b8.ap_in1k,76.090,23.910,92.730,7.270,87.41,672,0.954,bicubic,-9.280,-4.660,+24 +volo_d2_384,76.080,23.920,93.130,6.870,58.87,384,1.000,bicubic,-9.956,-4.442,-11 +maxvit_small_tf_384.in1k,76.070,23.930,92.620,7.380,69.02,384,1.000,bicubic,-9.464,-4.844,+11 +maxvit_tiny_tf_512.in1k,76.050,23.950,93.160,6.840,31.05,512,1.000,bicubic,-9.612,-4.420,+5 +flexivit_large.600ep_in1k,76.040,23.960,92.960,7.040,304.36,240,0.950,bicubic,-9.498,-4.532,+7 +vit_base_patch8_224.augreg_in21k_ft_in1k,76.010,23.990,93.380,6.620,86.58,224,0.900,bicubic,-9.786,-4.410,-4 +tf_efficientnetv2_l.in1k,76.000,24.000,93.080,6.920,118.52,480,1.000,bicubic,-9.670,-4.394,+1 +volo_d4_224,76.000,24.000,93.000,7.000,192.96,224,0.960,bicubic,-9.872,-4.468,-11 +xcit_large_24_p8_224_dist,75.990,24.010,92.730,7.270,188.93,224,1.000,bicubic,-9.406,-4.680,+13 +flexivit_large.300ep_in1k,75.930,24.070,92.650,7.350,304.36,240,0.950,bicubic,-9.350,-4.790,+17 +convnext_base.fb_in22k_ft_in1k,75.910,24.090,93.580,6.420,88.59,288,1.000,bicubic,-10.370,-4.510,-32 +dm_nfnet_f4,75.850,24.150,92.950,7.050,316.07,512,0.951,bicubic,-9.864,-4.570,-5 +xcit_large_24_p16_384_dist,75.820,24.180,92.750,7.250,189.10,384,1.000,bicubic,-9.934,-4.788,-8 +deit3_huge_patch14_224,75.790,24.210,92.760,7.240,632.13,224,0.900,bicubic,-9.414,-4.598,+21 +xcit_small_24_p8_384_dist,75.770,24.230,92.980,7.020,47.63,384,1.000,bicubic,-9.786,-4.592,-4 +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,75.740,24.260,92.750,7.250,86.57,224,0.950,bicubic,-10.430,-5.004,-31 +ig_resnext101_32x16d,75.720,24.280,92.910,7.090,194.03,224,0.875,bilinear,-8.450,-4.286,+89 +efficientnet_b5.in12k_ft_in1k,75.680,24.320,93.040,6.960,30.39,448,1.000,bicubic,-10.208,-4.692,-22 +tf_efficientnet_b4.ns_jft_in1k,75.670,24.330,93.050,6.950,19.34,380,0.922,bicubic,-9.492,-4.420,+18 +vit_medium_patch16_gap_384.in12k_ft_in1k,75.660,24.340,92.970,7.030,39.03,384,0.950,bicubic,-9.876,-4.664,-7 +volo_d1_384,75.610,24.390,93.070,6.930,26.78,384,1.000,bicubic,-9.640,-4.126,+9 +volo_d3_224,75.610,24.390,93.000,7.000,86.33,224,0.960,bicubic,-9.798,-4.280,-1 +vit_base_patch16_clip_224.openai_ft_in1k,75.590,24.410,92.970,7.030,86.57,224,0.900,bicubic,-9.690,-4.436,+3 +vit_base_r50_s16_384.orig_in21k_ft_in1k,75.590,24.410,92.790,7.210,98.95,384,1.000,bicubic,-9.382,-4.498,+27 deit_base_distilled_patch16_384,75.550,24.450,92.500,7.500,87.63,384,1.000,bicubic,-9.872,-4.832,-6 -tf_efficientnetv2_m,75.520,24.480,92.620,7.380,54.14,480,1.000,bicubic,-9.516,-4.658,+12 -regnetz_e8,75.490,24.510,92.710,7.290,57.70,320,1.000,bicubic,-9.540,-4.554,+12 -cait_s24_384,75.480,24.520,92.600,7.400,47.06,384,1.000,bicubic,-9.570,-4.748,+9 -xcit_medium_24_p8_224_dist,75.470,24.530,92.900,7.100,84.32,224,1.000,bicubic,-9.600,-4.380,+6 -swsl_resnext101_32x8d,75.420,24.580,92.750,7.250,88.79,224,0.875,bilinear,-8.870,-4.432,+42 -tf_efficientnet_b6_ap,75.380,24.620,92.440,7.560,43.04,528,0.942,bicubic,-9.406,-4.698,+18 -beit_base_patch16_224,75.370,24.630,93.040,6.960,86.53,224,0.900,bicubic,-9.858,-4.616,-6 -volo_d2_224,75.300,24.700,92.510,7.490,58.68,224,0.960,bicubic,-9.894,-4.678,-5 -dm_nfnet_f3,75.200,24.800,92.940,7.060,254.92,416,0.940,bicubic,-10.322,-4.522,-20 -efficientnetv2_rw_m,75.160,24.840,92.570,7.430,53.24,416,1.000,bicubic,-9.652,-4.576,+13 -deit3_large_patch16_224,75.140,24.860,92.280,7.720,304.37,224,0.900,bicubic,-9.622,-4.758,+14 -ecaresnet269d,75.120,24.880,92.840,7.160,102.09,352,1.000,bicubic,-9.854,-4.386,+5 -xcit_medium_24_p16_384_dist,75.110,24.890,92.440,7.560,84.40,384,1.000,bicubic,-10.312,-4.966,-20 -deit3_small_patch16_384_in21ft1k,75.090,24.910,92.800,7.200,22.21,384,1.000,bicubic,-9.734,-4.684,+8 -convnext_small_384_in22ft1k,75.050,24.950,93.010,6.990,50.22,384,1.000,bicubic,-10.674,-4.854,-31 -dm_nfnet_f5,75.010,24.990,92.600,7.400,377.21,544,0.954,bicubic,-10.806,-4.886,-36 -xcit_small_24_p8_224_dist,74.980,25.020,92.300,7.700,47.63,224,1.000,bicubic,-9.896,-4.888,+4 -tf_efficientnet_b8,74.930,25.070,92.320,7.680,87.41,672,0.954,bicubic,-10.438,-5.072,-20 -xcit_small_12_p8_384_dist,74.860,25.140,92.460,7.540,26.21,384,1.000,bicubic,-10.220,-4.820,-11 -eca_nfnet_l2,74.830,25.170,92.650,7.350,56.72,384,1.000,bicubic,-9.866,-4.614,+8 -deit3_base_patch16_384,74.790,25.210,92.240,7.760,86.88,384,1.000,bicubic,-10.286,-5.014,-12 -tf_efficientnet_b7,74.720,25.280,92.220,7.780,66.35,600,0.949,bicubic,-10.214,-4.986,-4 -xcit_large_24_p16_224_dist,74.670,25.330,91.860,8.140,189.10,224,1.000,bicubic,-10.250,-5.272,-4 -dm_nfnet_f2,74.620,25.380,92.250,7.750,193.78,352,0.920,bicubic,-10.446,-4.992,-13 -tf_efficientnet_b5_ap,74.590,25.410,91.990,8.010,30.39,456,0.934,bicubic,-9.664,-4.988,+26 -xcit_small_24_p16_384_dist,74.580,25.420,92.450,7.550,47.67,384,1.000,bicubic,-10.508,-4.858,-19 -dm_nfnet_f1,74.570,25.430,92.260,7.740,132.63,320,0.910,bicubic,-10.054,-4.838,+2 -swin_base_patch4_window7_224,74.540,25.460,92.560,7.440,87.77,224,0.900,bicubic,-10.710,-5.002,-29 -seresnet152d,74.520,25.480,92.080,7.920,66.84,320,1.000,bicubic,-9.844,-4.964,+14 -regnetz_040,74.460,25.540,91.900,8.100,27.12,320,1.000,bicubic,-9.776,-5.032,+22 -resnest200e,74.460,25.540,91.860,8.140,70.20,320,0.909,bicubic,-9.368,-5.032,+49 -tf_efficientnetv2_s_in21ft1k,74.450,25.550,92.500,7.500,21.46,384,1.000,bicubic,-9.846,-4.754,+13 -regnetz_040h,74.440,25.560,92.240,7.760,28.94,320,1.000,bicubic,-10.056,-4.766,+3 -resnetrs200,74.360,25.640,91.940,8.060,93.21,320,1.000,bicubic,-10.080,-5.140,+4 -seresnextaa101d_32x8d,74.320,25.680,91.720,8.280,93.59,288,1.000,bicubic,-10.252,-5.350,-4 -convnext_small_in22ft1k,74.210,25.790,92.550,7.450,50.22,224,0.875,bicubic,-10.358,-4.846,-4 -seresnext101d_32x8d,74.210,25.790,91.860,8.140,93.59,288,1.000,bicubic,-10.152,-5.058,+7 -efficientnetv2_rw_s,74.180,25.820,91.710,8.290,23.94,384,1.000,bicubic,-9.630,-5.014,+44 -resnest269e,74.170,25.830,91.930,8.070,110.93,416,0.928,bicubic,-10.348,-5.056,-5 -cait_xs24_384,74.170,25.830,91.910,8.090,26.67,384,1.000,bicubic,-9.894,-4.980,+22 -pit_b_distilled_224,74.160,25.840,91.660,8.340,74.79,224,0.900,bicubic,-9.982,-5.196,+18 -swsl_resnext101_32x4d,74.140,25.860,91.990,8.010,44.18,224,0.875,bilinear,-9.100,-4.770,+68 -vit_large_r50_s32_224,74.120,25.880,92.380,7.620,328.99,224,0.900,bicubic,-10.310,-4.786,-3 -eca_nfnet_l1,74.120,25.880,92.070,7.930,41.41,320,1.000,bicubic,-9.892,-4.962,+25 -xcit_small_12_p16_384_dist,74.120,25.880,92.070,7.930,26.25,384,1.000,bicubic,-10.588,-5.046,-18 -volo_d1_224,74.120,25.880,92.030,7.970,26.63,224,0.960,bicubic,-10.044,-4.744,+12 -convnext_large,74.070,25.930,91.550,8.450,197.77,224,0.875,bicubic,-10.226,-5.344,-1 -xcit_large_24_p8_224,74.070,25.930,90.890,9.110,188.93,224,1.000,bicubic,-10.322,-5.768,-6 -vit_base_patch16_224_miil,74.040,25.960,91.700,8.300,86.54,224,0.875,bilinear,-10.232,-5.102,0 -resnetv2_152x4_bitm,74.010,25.990,92.340,7.660,936.53,480,1.000,bilinear,-10.908,-5.102,-30 -swinv2_base_window16_256,74.010,25.990,91.750,8.250,87.92,256,0.900,bicubic,-10.582,-5.324,-21 -vit_base_patch16_224,74.000,26.000,92.470,7.530,86.57,224,0.900,bicubic,-10.530,-4.826,-19 -tf_efficientnetv2_s,74.000,26.000,91.530,8.470,21.46,384,1.000,bicubic,-9.884,-5.168,+20 -swsl_resnext101_32x16d,73.990,26.010,92.180,7.820,194.03,224,0.875,bilinear,-9.360,-4.664,+52 -regnetz_d32,73.970,26.030,91.950,8.050,27.58,320,0.950,bicubic,-10.054,-4.918,+12 -crossvit_18_dagger_408,73.970,26.030,91.410,8.590,44.61,408,1.000,bicubic,-10.224,-5.408,0 -seresnext101_32x8d,73.940,26.060,91.450,8.550,93.57,288,1.000,bicubic,-10.264,-5.424,-2 -resnetv2_152x2_bitm,73.920,26.080,92.670,7.330,236.34,448,1.000,bilinear,-10.590,-4.764,-23 -resnetrs420,73.920,26.080,91.760,8.240,191.89,416,1.000,bicubic,-11.088,-5.364,-44 -xcit_small_12_p8_224_dist,73.920,26.080,91.720,8.280,26.21,224,1.000,bicubic,-10.310,-5.154,-7 -resmlp_big_24_224_in22ft1k,73.900,26.100,91.750,8.250,129.14,224,0.875,bicubic,-10.498,-5.368,-20 -tf_efficientnet_b6,73.890,26.110,91.750,8.250,43.04,528,0.942,bicubic,-10.218,-5.138,-2 -tf_efficientnet_b3_ns,73.880,26.120,91.870,8.130,12.23,300,0.904,bicubic,-10.168,-5.042,+3 -convnext_base,73.870,26.130,91.320,8.680,88.59,224,0.875,bicubic,-9.970,-5.430,+13 -deit3_small_patch16_224_in21ft1k,73.840,26.160,91.960,8.040,22.06,224,1.000,bicubic,-9.236,-4.816,+57 -vit_small_r26_s32_384,73.800,26.200,92.290,7.710,36.47,384,1.000,bicubic,-10.248,-5.038,-1 -regnetz_d8,73.760,26.240,92.020,7.980,23.37,320,1.000,bicubic,-10.292,-4.976,-4 -regnety_080,73.730,26.270,91.790,8.210,39.18,288,1.000,bicubic,-10.198,-5.098,+4 -resnetrs270,73.690,26.310,91.570,8.430,129.86,352,1.000,bicubic,-10.746,-5.404,-30 -resnetv2_101x3_bitm,73.680,26.320,92.470,7.530,387.93,448,1.000,bilinear,-10.764,-4.912,-33 -resnet200d,73.680,26.320,91.570,8.430,64.69,320,1.000,bicubic,-10.280,-5.254,-1 -ig_resnext101_32x8d,73.660,26.340,92.160,7.840,88.79,224,0.875,bilinear,-9.038,-4.472,+71 -xcit_medium_24_p16_224_dist,73.650,26.350,91.580,8.420,84.40,224,1.000,bicubic,-10.628,-5.360,-25 -regnety_064,73.590,26.410,91.350,8.650,30.58,288,1.000,bicubic,-10.130,-5.376,+14 -tf_efficientnet_b5,73.560,26.440,91.460,8.540,30.39,456,0.934,bicubic,-10.254,-5.288,+6 -swinv2_base_window8_256,73.540,26.460,91.520,8.480,87.92,256,0.900,bicubic,-10.722,-5.402,-26 -resnet152d,73.530,26.470,91.230,8.770,60.21,320,1.000,bicubic,-10.148,-5.510,+16 -regnetv_064,73.490,26.510,91.590,8.410,30.58,288,1.000,bicubic,-10.222,-5.156,+12 -deit3_base_patch16_224,73.480,26.520,91.290,8.710,86.59,224,0.900,bicubic,-10.312,-5.294,+5 -sequencer2d_l,73.480,26.520,91.100,8.900,54.30,224,0.875,bicubic,-9.926,-5.400,+21 -xcit_tiny_24_p8_384_dist,73.420,26.580,91.560,8.440,12.11,384,1.000,bicubic,-10.326,-5.152,+5 -resnetrs350,73.400,26.600,91.310,8.690,163.96,384,1.000,bicubic,-11.312,-5.680,-56 -twins_svt_large,73.390,26.610,90.910,9.090,99.27,224,0.900,bicubic,-10.290,-5.684,+9 -regnetz_d8_evos,73.370,26.630,91.640,8.360,23.46,320,0.950,bicubic,-10.680,-5.356,-20 -regnety_160,73.360,26.640,91.700,8.300,83.59,288,1.000,bicubic,-10.332,-5.076,+6 -swin_s3_base_224,73.320,26.680,91.190,8.810,71.13,224,0.900,bicubic,-10.612,-5.470,-15 -efficientnet_b4,73.310,26.690,91.280,8.720,19.34,384,1.000,bicubic,-10.114,-5.318,+13 -vit_small_patch16_384,73.300,26.700,92.000,8.000,22.20,384,1.000,bicubic,-10.500,-5.100,-5 -resmlp_big_24_distilled_224,73.290,26.710,91.160,8.840,129.14,224,0.875,bicubic,-10.298,-5.488,+5 -swinv2_small_window16_256,73.270,26.730,91.270,8.730,49.73,256,0.900,bicubic,-10.940,-5.600,-36 -xcit_small_24_p16_224_dist,73.260,26.740,91.460,8.540,47.67,224,1.000,bicubic,-10.610,-5.272,-17 -deit_base_distilled_patch16_224,73.240,26.760,91.000,9.000,87.34,224,0.900,bicubic,-10.148,-5.488,+11 -resnetrs152,73.200,26.800,91.260,8.740,86.62,320,1.000,bicubic,-10.514,-5.354,-4 -xcit_medium_24_p8_224,73.150,26.850,90.280,9.720,84.32,224,1.000,bicubic,-10.588,-6.114,-7 -vit_base_patch32_384,73.130,26.870,91.240,8.760,88.30,384,1.000,bicubic,-10.222,-5.596,+10 -jx_nest_base,73.120,26.880,91.060,8.940,67.72,224,0.875,bicubic,-10.434,-5.304,-1 -swinv2_small_window8_256,73.110,26.890,90.930,9.070,49.73,256,0.900,bicubic,-10.744,-5.712,-22 -cs3se_edgenet_x,73.100,26.900,91.260,8.740,50.72,320,1.000,bicubic,-10.448,-5.406,-2 -deit3_small_patch16_384,73.090,26.910,91.240,8.760,22.21,384,1.000,bicubic,-10.338,-5.436,0 -xcit_small_24_p8_224,73.080,26.920,91.140,8.860,47.63,224,1.000,bicubic,-10.760,-5.496,-22 -cait_s24_224,73.070,26.930,91.120,8.880,46.92,224,1.000,bicubic,-10.388,-5.442,-3 -crossvit_15_dagger_408,72.950,27.050,91.090,8.910,28.50,408,1.000,bicubic,-10.888,-5.690,-23 -resnetv2_152x2_bit_teacher_384,72.900,27.100,91.550,8.450,236.34,384,1.000,bicubic,-10.944,-5.566,-27 -regnetv_040,72.880,27.120,91.110,8.890,20.64,288,1.000,bicubic,-10.318,-5.554,+8 -dm_nfnet_f0,72.880,27.120,91.080,8.920,71.49,256,0.900,bicubic,-10.504,-5.494,-1 -tf_efficientnet_b4_ap,72.880,27.120,90.980,9.020,19.34,380,0.922,bicubic,-10.368,-5.412,+3 -convnext_tiny_384_in22ft1k,72.850,27.150,91.560,8.440,28.59,384,1.000,bicubic,-11.226,-5.598,-46 -swinv2_cr_small_ns_224,72.800,27.200,90.800,9.200,49.70,224,0.900,bicubic,-10.686,-5.684,-11 -xception65p,72.790,27.210,90.910,9.090,39.82,299,0.940,bicubic,-10.340,-5.570,+10 -regnety_032,72.760,27.240,90.960,9.040,19.44,288,1.000,bicubic,-9.964,-5.462,+30 -regnety_040,72.720,27.280,90.730,9.270,20.65,288,1.000,bicubic,-10.316,-5.780,+14 -swin_s3_small_224,72.690,27.310,90.560,9.440,49.74,224,0.900,bicubic,-11.084,-5.892,-27 -resnext101_64x4d,72.620,27.380,90.840,9.160,83.46,288,1.000,bicubic,-10.524,-5.534,+2 -xcit_small_12_p8_224,72.620,27.380,90.670,9.330,26.21,224,1.000,bicubic,-10.720,-5.810,-6 -nfnet_l0,72.610,27.390,91.000,9.000,35.07,288,1.000,bicubic,-10.142,-5.518,+24 -pnasnet5large,72.610,27.390,90.510,9.490,86.06,331,0.911,bicubic,-10.172,-5.532,+21 -xception65,72.600,27.400,90.820,9.180,39.92,299,0.940,bicubic,-10.574,-5.772,-4 -resnest101e,72.580,27.420,90.820,9.180,48.28,256,0.875,bilinear,-10.308,-5.500,+13 -twins_pcpvt_large,72.570,27.430,90.700,9.300,60.99,224,0.900,bicubic,-10.566,-5.904,-1 -swsl_resnext50_32x4d,72.560,27.440,90.850,9.150,25.03,224,0.875,bilinear,-9.616,-5.382,+66 -gc_efficientnetv2_rw_t,72.550,27.450,90.830,9.170,13.68,288,1.000,bicubic,-9.916,-5.468,+38 -twins_svt_base,72.550,27.450,90.450,9.550,56.07,224,0.900,bicubic,-10.588,-5.970,-6 -tresnet_xl_448,72.550,27.450,90.310,9.690,78.44,448,0.875,bilinear,-10.498,-5.860,+1 -deit_base_patch16_384,72.540,27.460,90.270,9.730,86.86,384,1.000,bicubic,-10.566,-6.100,-3 -resnetv2_50x3_bitm,72.520,27.480,91.760,8.240,217.32,448,1.000,bilinear,-11.492,-5.366,-57 -xcit_small_12_p16_224_dist,72.500,27.500,91.110,8.890,26.25,224,1.000,bicubic,-10.846,-5.308,-19 -xcit_tiny_24_p8_224_dist,72.430,27.570,90.920,9.080,12.11,224,1.000,bicubic,-10.130,-5.248,+27 -resnet101d,72.420,27.580,90.650,9.350,44.57,320,1.000,bicubic,-10.602,-5.796,-1 -sequencer2d_m,72.410,27.590,90.710,9.290,38.31,224,0.875,bicubic,-10.398,-5.558,+7 -cs3sedarknet_x,72.380,27.620,91.020,8.980,35.40,288,1.000,bicubic,-10.274,-5.326,+14 -jx_nest_small,72.360,27.640,90.690,9.310,38.35,224,0.875,bicubic,-10.760,-5.640,-11 -convnext_small,72.330,27.670,90.850,9.150,50.22,224,0.875,bicubic,-10.820,-5.580,-18 -regnetz_c16,72.310,27.690,90.820,9.180,13.46,320,0.940,bicubic,-10.210,-5.540,+22 -tf_efficientnet_b4,72.290,27.710,90.590,9.410,19.34,380,0.922,bicubic,-10.734,-5.710,-8 -tf_efficientnet_b2_ns,72.280,27.720,91.090,8.910,9.11,260,0.890,bicubic,-10.104,-5.156,+30 -tresnet_m,72.260,27.740,90.230,9.770,31.39,224,0.875,bilinear,-10.814,-5.890,-13 -resnetv2_50x1_bit_distilled,72.250,27.750,91.010,8.990,25.55,224,0.875,bicubic,-10.572,-5.512,-4 -crossvit_18_240,72.250,27.750,90.270,9.730,43.27,240,0.875,bicubic,-10.148,-5.784,+25 -regnetz_c16_evos,72.230,27.770,91.230,8.770,13.49,320,0.950,bicubic,-10.402,-5.246,+7 -nasnetalarge,72.230,27.770,90.460,9.540,88.75,331,0.911,bicubic,-10.388,-5.584,+7 -efficientnetv2_rw_t,72.230,27.770,90.410,9.590,13.65,288,1.000,bicubic,-10.114,-5.786,+27 -cait_xxs36_384,72.200,27.800,90.840,9.160,17.37,384,1.000,bicubic,-9.992,-5.304,+43 -twins_pcpvt_base,72.190,27.810,90.510,9.490,43.83,224,0.900,bicubic,-10.518,-5.840,-1 -crossvit_18_dagger_240,72.130,27.870,90.070,9.930,44.27,240,0.875,bicubic,-10.390,-5.998,+12 -xcit_tiny_24_p16_384_dist,72.080,27.920,90.590,9.410,12.12,384,1.000,bicubic,-10.492,-5.698,+7 -resnet152,72.060,27.940,90.340,9.660,60.19,224,0.950,bicubic,-10.758,-5.792,-12 -mobilevitv2_200_384_in22ft1k,72.000,28.000,90.630,9.370,18.45,384,1.000,bicubic,-11.400,-5.952,-45 -vit_relpos_base_patch16_clsgap_224,72.000,28.000,90.250,9.750,86.43,224,0.900,bicubic,-10.760,-5.924,-10 -vit_relpos_medium_patch16_cls_224,71.990,28.010,90.290,9.710,38.76,224,0.900,bicubic,-10.572,-5.776,+4 -sequencer2d_s,71.940,28.060,90.490,9.510,27.65,224,0.875,bicubic,-10.404,-5.544,+19 -swinv2_cr_small_224,71.880,28.120,90.260,9.740,49.70,224,0.900,bicubic,-11.258,-5.838,-34 -eca_nfnet_l0,71.840,28.160,91.110,8.890,24.14,288,1.000,bicubic,-10.738,-5.380,-1 -convnext_tiny_in22ft1k,71.830,28.170,90.920,9.080,28.59,224,0.875,bicubic,-11.082,-5.704,-24 -vit_relpos_base_patch16_224,71.830,28.170,90.260,9.740,86.43,224,0.900,bicubic,-10.656,-5.882,+4 -mobilevitv2_175_384_in22ft1k,71.810,28.190,90.780,9.220,14.25,384,1.000,bicubic,-11.124,-5.650,-27 -cs3edgenet_x,71.810,28.190,90.360,9.640,47.82,288,1.000,bicubic,-10.912,-6.016,-15 -swin_small_patch4_window7_224,71.750,28.250,90.240,9.760,49.61,224,0.900,bicubic,-11.468,-6.086,-46 -pit_b_224,71.710,28.290,89.250,10.750,73.76,224,0.900,bicubic,-10.734,-6.462,+4 -xcit_large_24_p16_224,71.700,28.300,89.170,10.830,189.10,224,1.000,bicubic,-11.192,-6.708,-29 -swsl_resnet50,71.690,28.310,90.470,9.530,25.56,224,0.875,bilinear,-9.490,-5.510,+87 -resnet61q,71.670,28.330,90.270,9.730,36.85,288,1.000,bicubic,-10.848,-5.860,-4 -tresnet_xl,71.660,28.340,89.630,10.370,78.44,224,0.875,bilinear,-10.402,-6.306,+30 -tresnet_l_448,71.610,28.390,90.060,9.940,55.99,448,0.875,bilinear,-10.660,-5.920,+15 -convit_base,71.590,28.410,90.160,9.840,86.54,224,0.875,bicubic,-10.702,-5.778,+12 -xcit_tiny_12_p8_384_dist,71.580,28.420,90.710,9.290,6.71,384,1.000,bicubic,-10.806,-5.512,-1 -swinv2_tiny_window16_256,71.570,28.430,90.350,9.650,28.35,256,0.900,bicubic,-11.240,-5.880,-31 -poolformer_m48,71.550,28.450,89.760,10.240,73.47,224,0.950,bicubic,-10.910,-6.198,-6 -fbnetv3_g,71.520,28.480,90.380,9.620,16.62,288,0.950,bilinear,-10.514,-5.686,+27 -crossvit_15_dagger_240,71.520,28.480,89.860,10.140,28.21,240,0.875,bicubic,-10.806,-6.096,+3 -ssl_resnext101_32x8d,71.510,28.490,90.470,9.530,88.79,224,0.875,bilinear,-10.098,-5.572,+48 -efficientnet_b3,71.480,28.520,90.060,9.940,12.23,320,1.000,bicubic,-10.760,-6.058,+8 -ecaresnet101d,71.470,28.530,90.330,9.670,44.57,224,0.875,bicubic,-10.700,-5.718,+15 -mobilevitv2_150_384_in22ft1k,71.460,28.540,90.420,9.580,10.59,384,1.000,bicubic,-11.130,-5.896,-25 -ssl_resnext101_32x16d,71.430,28.570,90.520,9.480,194.03,224,0.875,bilinear,-10.426,-5.576,+32 -resnet51q,71.420,28.580,90.180,9.820,35.70,288,1.000,bilinear,-10.938,-5.998,-9 -vit_relpos_medium_patch16_224,71.370,28.630,89.950,10.050,38.75,224,0.900,bicubic,-11.092,-6.136,-16 -pit_s_distilled_224,71.360,28.640,89.780,10.220,24.04,224,0.900,bicubic,-10.634,-6.016,+19 -xcit_tiny_24_p8_224,71.330,28.670,90.240,9.760,12.11,224,1.000,bicubic,-10.566,-5.734,+26 -mixer_b16_224_miil,71.310,28.690,89.650,10.350,59.88,224,0.875,bilinear,-10.994,-6.070,-5 -resnetv2_152x2_bit_teacher,71.290,28.710,90.430,9.570,236.34,224,0.875,bicubic,-11.578,-6.138,-48 -resnetv2_101,71.280,28.720,89.940,10.060,44.54,224,0.950,bicubic,-10.766,-5.922,+13 -convnext_tiny_hnf,71.280,28.720,89.400,10.600,28.59,224,0.950,bicubic,-10.940,-6.466,0 -ecaresnet50t,71.260,28.740,90.420,9.580,25.57,320,0.950,bicubic,-11.088,-5.718,-16 -convmixer_1536_20,71.230,28.770,89.440,10.560,51.63,224,0.960,bicubic,-10.140,-6.172,+54 -xcit_small_12_p16_224,71.200,28.800,89.750,10.250,26.25,224,1.000,bicubic,-10.772,-6.062,+14 -deit_base_patch16_224,71.200,28.800,89.200,10.800,86.57,224,0.900,bicubic,-10.794,-6.532,+11 -crossvit_base_240,71.180,28.820,89.840,10.160,105.03,240,0.875,bicubic,-11.036,-5.992,-4 -vit_relpos_medium_patch16_rpn_224,71.170,28.830,90.090,9.910,38.73,224,0.900,bicubic,-11.124,-5.882,-13 -mobilevitv2_200_in22ft1k,71.140,28.860,89.680,10.320,18.45,256,0.888,bicubic,-11.194,-6.258,-19 -resnetv2_50d_evos,71.120,28.880,90.030,9.970,25.59,288,0.950,bicubic,-10.858,-5.882,+8 -swin_s3_tiny_224,71.120,28.880,89.720,10.280,28.33,224,0.900,bicubic,-11.004,-6.230,-3 -halo2botnet50ts_256,71.110,28.890,89.630,10.370,22.64,256,0.950,bicubic,-10.958,-6.012,-1 -cs3darknet_x,71.080,28.920,90.150,9.850,35.05,288,1.000,bicubic,-11.144,-6.080,-12 -cs3sedarknet_l,71.070,28.930,90.350,9.650,21.91,288,0.950,bicubic,-10.706,-5.620,+17 -xcit_small_24_p16_224,71.040,28.960,89.700,10.300,47.67,224,1.000,bicubic,-11.544,-6.300,-45 -xcit_tiny_12_p8_224_dist,71.030,28.970,89.890,10.110,6.71,224,1.000,bicubic,-10.178,-5.716,+49 -xcit_medium_24_p16_224,71.010,28.990,89.520,10.480,84.40,224,1.000,bicubic,-11.628,-6.458,-52 -visformer_small,71.000,29.000,89.450,10.550,40.22,224,0.900,bicubic,-11.108,-6.426,-9 -resnetv2_101x1_bitm,70.990,29.010,91.090,8.910,44.54,448,1.000,bilinear,-11.342,-5.426,-28 -edgenext_small,70.990,29.010,89.870,10.130,5.59,320,1.000,bicubic,-10.584,-5.844,+19 -resnetv2_50d_gn,70.990,29.010,89.770,10.230,25.57,288,0.950,bicubic,-10.834,-6.154,+6 -lamhalobotnet50ts_256,70.990,29.010,89.070,10.930,22.57,256,0.950,bicubic,-10.562,-6.434,+18 -tresnet_m_448,70.990,29.010,88.690,11.310,31.39,448,0.875,bilinear,-10.716,-6.882,+9 -resnest50d_4s2x40d,70.950,29.050,89.720,10.280,30.42,224,0.875,bicubic,-10.158,-5.842,+48 -tnt_s_patch16_224,70.950,29.050,89.600,10.400,23.76,224,0.900,bicubic,-10.568,-6.146,+18 -wide_resnet50_2,70.940,29.060,89.230,10.770,68.88,224,0.875,bicubic,-10.516,-6.300,+23 -convnext_tiny,70.930,29.070,89.750,10.250,28.59,224,0.875,bicubic,-11.132,-6.104,-14 -tf_efficientnet_b3_ap,70.920,29.080,89.430,10.570,12.23,300,0.904,bicubic,-10.904,-6.194,0 -vit_small_patch16_224,70.910,29.090,90.150,9.850,22.05,224,0.900,bicubic,-10.486,-5.988,+25 -vit_srelpos_medium_patch16_224,70.910,29.090,89.960,10.040,38.74,224,0.900,bicubic,-11.326,-5.974,-30 -vit_base_patch16_rpn_224,70.870,29.130,89.770,10.230,86.54,224,0.900,bicubic,-11.330,-6.226,-27 -resnet101,70.870,29.130,89.510,10.490,44.55,224,0.950,bicubic,-11.060,-6.256,-9 -vit_large_patch32_384,70.860,29.140,90.570,9.430,306.63,384,1.000,bicubic,-10.648,-5.520,+12 -tf_efficientnet_b1_ns,70.860,29.140,90.140,9.860,7.79,240,0.882,bicubic,-10.526,-5.596,+21 -jx_nest_tiny,70.860,29.140,89.940,10.060,17.06,224,0.875,bicubic,-10.558,-5.678,+17 -resnetrs101,70.860,29.140,89.830,10.170,63.62,288,0.940,bicubic,-11.424,-6.178,-39 -rexnet_200,70.850,29.150,89.710,10.290,16.37,224,0.875,bicubic,-10.778,-5.958,-1 -tresnet_l,70.840,29.160,89.630,10.370,55.99,224,0.875,bilinear,-10.650,-5.996,+8 -tf_efficientnetv2_b3,70.830,29.170,89.510,10.490,14.36,300,0.904,bicubic,-11.136,-6.272,-18 -poolformer_m36,70.800,29.200,89.510,10.490,56.17,224,0.950,bicubic,-11.308,-6.180,-30 -coat_lite_small,70.780,29.220,89.580,10.420,19.84,224,0.900,bicubic,-11.524,-6.270,-48 -deit3_small_patch16_224,70.760,29.240,89.440,10.560,22.06,224,0.900,bicubic,-10.622,-6.010,+14 -levit_384,70.760,29.240,89.290,10.710,39.13,224,0.900,bicubic,-11.828,-6.728,-74 -convnext_nano,70.730,29.270,89.350,10.650,15.59,288,1.000,bicubic,-10.746,-6.310,+3 -swinv2_cr_tiny_ns_224,70.720,29.280,89.380,10.620,28.33,224,0.900,bicubic,-11.066,-6.442,-15 -vit_relpos_small_patch16_224,70.710,29.290,90.000,10.000,21.98,224,0.900,bicubic,-10.744,-5.828,+4 -mobilevitv2_175_in22ft1k,70.660,29.340,89.710,10.290,14.25,256,0.888,bicubic,-11.280,-6.080,-25 -tf_efficientnet_b3,70.640,29.360,89.450,10.550,12.23,300,0.904,bicubic,-10.998,-6.268,-13 -gluon_senet154,70.620,29.380,88.920,11.080,115.09,224,0.875,bicubic,-10.610,-6.426,+15 -crossvit_small_240,70.610,29.390,89.360,10.640,26.86,240,0.875,bicubic,-10.406,-6.096,+29 -cait_xxs24_384,70.600,29.400,89.720,10.280,12.03,384,1.000,bicubic,-10.362,-5.924,+33 -convit_small,70.590,29.410,89.580,10.420,27.78,224,0.875,bicubic,-10.838,-6.162,-1 -twins_pcpvt_small,70.560,29.440,89.070,10.930,24.11,224,0.900,bicubic,-10.530,-6.572,+23 -swinv2_tiny_window8_256,70.540,29.460,89.490,10.510,28.35,256,0.900,bicubic,-11.270,-6.504,-25 -ssl_resnext101_32x4d,70.530,29.470,89.760,10.240,44.18,224,0.875,bilinear,-10.394,-5.966,+32 -vit_small_r26_s32_224,70.520,29.480,90.110,9.890,36.43,224,0.900,bicubic,-11.342,-5.912,-31 -deit_small_distilled_patch16_224,70.520,29.480,89.470,10.530,22.44,224,0.900,bicubic,-10.688,-5.904,+9 -legacy_senet154,70.500,29.500,89.010,10.990,115.09,224,0.875,bilinear,-10.808,-6.486,+2 -halonet50ts,70.490,29.510,89.330,10.670,22.73,256,0.940,bicubic,-11.162,-6.282,-25 -regnetz_b16,70.460,29.540,89.540,10.460,9.72,288,0.940,bicubic,-10.252,-5.934,+39 -crossvit_15_240,70.450,29.550,89.530,10.470,27.53,240,0.875,bicubic,-11.094,-6.160,-20 -gluon_seresnext101_64x4d,70.440,29.560,89.360,10.640,88.23,224,0.875,bicubic,-10.440,-5.936,+28 -twins_svt_small,70.440,29.560,89.350,10.650,24.06,224,0.900,bicubic,-11.242,-6.316,-30 -tf_efficientnet_lite4,70.430,29.570,89.110,10.890,13.01,380,0.920,bilinear,-11.104,-6.556,-22 -resnetaa50,70.410,29.590,89.970,10.030,25.56,288,1.000,bicubic,-11.208,-5.840,-28 -resnest50d,70.410,29.590,88.760,11.240,27.48,224,0.875,bilinear,-10.564,-6.620,+16 -resnest50d_1s4x24d,70.400,29.600,89.240,10.760,25.68,224,0.875,bicubic,-10.584,-6.084,+14 -seresnext50_32x4d,70.390,29.610,89.110,10.890,27.56,224,0.875,bicubic,-10.872,-6.518,-5 -cs3darknet_l,70.370,29.630,89.750,10.250,21.16,288,0.950,bicubic,-10.516,-5.918,+20 -gernet_l,70.360,29.640,88.980,11.020,31.08,256,0.875,bilinear,-10.990,-6.556,-11 -vit_srelpos_small_patch16_224,70.290,29.710,89.580,10.420,21.97,224,0.900,bicubic,-10.808,-5.992,+2 -gluon_resnet152_v1s,70.290,29.710,88.850,11.150,60.32,224,0.875,bicubic,-10.724,-6.564,+8 -repvgg_b3,70.250,29.750,88.740,11.260,123.09,224,0.875,bilinear,-10.246,-6.524,+36 -coat_mini,70.210,29.790,89.450,10.550,10.34,224,0.900,bicubic,-11.056,-5.942,-12 -xception41p,70.200,29.800,89.090,10.910,26.91,299,0.940,bicubic,-11.768,-6.704,-55 -sebotnet33ts_256,70.150,29.850,88.800,11.200,13.70,256,0.940,bicubic,-11.004,-6.366,-7 -efficientnet_el,70.120,29.880,89.290,10.710,10.59,300,0.904,bicubic,-11.186,-6.244,-16 -inception_resnet_v2,70.120,29.880,88.700,11.300,55.84,299,0.897,bicubic,-10.340,-6.606,+35 -resmlp_36_distilled_224,70.110,29.890,89.100,10.900,44.69,224,0.875,bicubic,-11.046,-6.386,-11 -ecaresnet101d_pruned,70.100,29.900,89.580,10.420,24.88,224,0.875,bicubic,-10.710,-6.048,+14 -haloregnetz_b,70.070,29.930,88.870,11.130,11.68,224,0.940,bicubic,-10.974,-6.328,-4 -poolformer_s36,70.030,29.970,89.190,10.810,30.86,224,0.900,bicubic,-11.388,-6.258,-29 -gluon_seresnext101_32x4d,70.030,29.970,88.910,11.090,48.96,224,0.875,bicubic,-10.876,-6.386,+5 -sehalonet33ts,70.020,29.980,88.710,11.290,13.69,256,0.940,bicubic,-10.952,-6.562,-1 -regnety_320,70.010,29.990,88.890,11.110,145.05,224,0.875,bicubic,-10.794,-6.354,+10 -gluon_resnet152_v1d,69.970,30.030,88.490,11.510,60.21,224,0.875,bicubic,-10.506,-6.710,+26 -levit_256,69.950,30.050,89.240,10.760,18.89,224,0.900,bicubic,-11.566,-6.250,-43 -pit_s_224,69.890,30.110,88.930,11.070,23.46,224,0.900,bicubic,-11.208,-6.402,-14 -ecaresnet50d,69.840,30.160,89.390,10.610,25.58,224,0.875,bicubic,-10.758,-5.928,+14 -mobilevitv2_150_in22ft1k,69.830,30.170,89.160,10.840,10.59,256,0.888,bicubic,-11.640,-6.508,-42 -mobilevitv2_200,69.760,30.240,88.620,11.380,18.45,256,0.888,bicubic,-11.380,-6.748,-20 -ssl_resnext50_32x4d,69.730,30.270,89.430,10.570,25.03,224,0.875,bilinear,-10.596,-5.982,+33 -gluon_resnext101_64x4d,69.710,30.290,88.270,11.730,83.46,224,0.875,bicubic,-10.894,-6.722,+9 -lambda_resnet50ts,69.700,30.300,88.820,11.180,21.54,256,0.950,bicubic,-11.452,-6.282,-24 -xcit_tiny_24_p16_224_dist,69.700,30.300,88.710,11.290,12.12,224,1.000,bicubic,-10.748,-6.502,+21 -xcit_tiny_12_p16_384_dist,69.690,30.310,89.010,10.990,6.72,384,1.000,bicubic,-11.252,-6.398,-11 -resnext50_32x4d,69.680,30.320,88.660,11.340,25.03,224,0.950,bicubic,-11.416,-6.666,-22 -resmlp_24_distilled_224,69.670,30.330,89.050,10.950,30.02,224,0.875,bicubic,-11.094,-6.172,-2 -efficientnet_b3_pruned,69.590,30.410,88.980,11.020,9.86,300,0.904,bicubic,-11.268,-6.264,-6 -gernet_m,69.560,30.440,88.700,11.300,21.14,224,0.875,bilinear,-11.170,-6.486,-3 -nf_resnet50,69.540,30.460,88.730,11.270,25.56,288,0.940,bicubic,-11.114,-6.604,-1 -gcresnext50ts,69.530,30.470,88.840,11.160,15.67,256,0.900,bicubic,-11.048,-6.330,+2 -efficientnet_el_pruned,69.520,30.480,88.930,11.070,10.59,300,0.904,bicubic,-10.778,-6.284,+26 -repvgg_b3g4,69.520,30.480,88.450,11.550,83.83,224,0.875,bilinear,-10.696,-6.658,+32 -gcresnet50t,69.510,30.490,89.050,10.950,25.90,256,0.900,bicubic,-11.424,-6.404,-19 -ens_adv_inception_resnet_v2,69.510,30.490,88.520,11.480,55.84,299,0.897,bicubic,-10.464,-6.422,+42 -efficientnet_b2,69.490,30.510,88.690,11.310,9.11,288,1.000,bicubic,-11.126,-6.626,-6 -rexnet_150,69.460,30.540,88.980,11.020,9.73,224,0.875,bicubic,-10.854,-6.186,+19 -regnetx_320,69.450,30.550,88.270,11.730,107.81,224,0.875,bicubic,-10.794,-6.750,+24 -swin_tiny_patch4_window7_224,69.440,30.560,89.020,10.980,28.29,224,0.900,bicubic,-11.936,-6.522,-53 -vit_base_patch32_224,69.420,30.580,89.430,10.570,88.22,224,0.900,bicubic,-11.304,-6.136,-13 -cspresnext50,69.420,30.580,88.610,11.390,20.57,256,0.887,bilinear,-11.124,-6.714,-7 -convmixer_768_32,69.400,30.600,88.870,11.130,21.11,224,0.960,bicubic,-10.764,-6.202,+27 -darknet53,69.370,30.630,88.760,11.240,41.61,288,1.000,bicubic,-11.168,-6.660,-8 -legacy_seresnext101_32x4d,69.370,30.630,88.060,11.940,48.96,224,0.875,bilinear,-10.852,-6.954,+20 -inception_v4,69.360,30.640,88.780,11.220,42.68,299,0.875,bicubic,-10.808,-6.184,+22 -ecaresnetlight,69.350,30.650,89.230,10.770,30.16,224,0.875,bicubic,-11.106,-6.016,-3 -resnet50d,69.350,30.650,88.230,11.770,25.58,224,0.875,bicubic,-11.178,-6.938,-11 -cs3darknet_focus_l,69.330,30.670,89.440,10.560,21.15,288,0.950,bicubic,-11.544,-6.252,-28 -xception71,69.320,30.680,88.270,11.730,42.34,299,0.903,bicubic,-10.550,-6.654,+33 -mobilevitv2_175,69.300,30.700,88.940,11.060,14.25,256,0.888,bicubic,-11.562,-6.322,-29 -vit_small_patch32_384,69.290,30.710,89.820,10.180,22.92,384,1.000,bicubic,-11.200,-5.780,-12 -edgenext_small_rw,69.210,30.790,88.760,11.240,7.83,320,1.000,bicubic,-11.242,-6.430,-8 -gluon_xception65,69.160,30.840,88.080,11.920,39.92,299,0.903,bicubic,-10.562,-6.780,+38 -gluon_resnet152_v1c,69.150,30.850,87.870,12.130,60.21,224,0.875,bicubic,-10.762,-6.972,+25 -mixnet_xl,69.110,30.890,88.310,11.690,11.90,224,0.875,bicubic,-11.368,-6.624,-15 -seresnet33ts,69.100,30.900,88.490,11.510,19.78,256,0.900,bicubic,-11.254,-6.616,-5 -tf_efficientnetv2_b2,69.100,30.900,88.220,11.780,10.10,260,0.890,bicubic,-11.108,-6.824,+9 -resnetv2_50,69.070,30.930,88.440,11.560,25.55,224,0.950,bicubic,-11.342,-6.632,-11 -gcresnet33ts,69.010,30.990,88.470,11.530,19.88,256,0.900,bicubic,-11.066,-6.524,+14 -gluon_resnet101_v1d,69.010,30.990,88.100,11.900,44.57,224,0.875,bicubic,-11.408,-6.914,-14 -repvgg_b2g4,69.000,31.000,88.340,11.660,61.76,224,0.875,bilinear,-10.366,-6.348,+50 -seresnet50,68.950,31.050,88.700,11.300,28.09,224,0.875,bicubic,-11.316,-6.370,-2 -gluon_resnext101_32x4d,68.950,31.050,88.370,11.630,44.18,224,0.875,bicubic,-11.390,-6.556,-10 -cspdarknet53,68.930,31.070,88.600,11.400,27.64,256,0.887,bilinear,-11.126,-6.486,+11 -tf_efficientnet_b2_ap,68.930,31.070,88.350,11.650,9.11,260,0.890,bicubic,-11.372,-6.678,-8 -regnety_120,68.870,31.130,88.330,11.670,51.82,224,0.875,bicubic,-11.506,-6.792,-18 -mobilevitv2_150,68.850,31.150,88.080,11.920,10.59,256,0.888,bicubic,-11.518,-6.984,-17 -resnet50_gn,68.840,31.160,88.420,11.580,25.56,224,0.940,bicubic,-11.220,-6.528,+6 -gluon_resnet152_v1b,68.820,31.180,87.720,12.280,60.19,224,0.875,bicubic,-10.862,-7.016,+26 -eca_resnet33ts,68.800,31.200,88.580,11.420,19.68,256,0.900,bicubic,-11.280,-6.392,+2 -gmlp_s16_224,68.760,31.240,88.080,11.920,19.42,224,0.875,bicubic,-10.880,-6.544,+28 -dpn131,68.760,31.240,87.460,12.540,79.25,224,0.875,bicubic,-11.066,-7.248,+15 -poolformer_s24,68.750,31.250,88.210,11.790,21.39,224,0.900,bicubic,-11.566,-6.832,-18 -darknetaa53,68.740,31.260,88.720,11.280,36.02,288,1.000,bilinear,-11.782,-6.606,-37 -tf_efficientnet_b2,68.740,31.260,87.980,12.020,9.11,260,0.890,bicubic,-11.348,-6.928,-4 -resnet50,68.740,31.260,87.680,12.320,25.56,224,0.950,bicubic,-11.634,-6.934,-27 -resnext50d_32x4d,68.730,31.270,88.300,11.700,25.05,224,0.875,bicubic,-10.946,-6.566,+20 -deit_small_patch16_224,68.710,31.290,88.200,11.800,22.05,224,0.900,bicubic,-11.154,-6.848,+5 -gluon_resnet101_v1s,68.710,31.290,87.910,12.090,44.67,224,0.875,bicubic,-11.588,-7.252,-20 -dpn107,68.700,31.300,88.140,11.860,86.92,224,0.875,bicubic,-11.468,-6.766,-12 -gluon_seresnext50_32x4d,68.680,31.320,88.330,11.670,27.56,224,0.875,bicubic,-11.232,-6.502,-1 -hrnet_w64,68.630,31.370,88.050,11.950,128.06,224,0.875,bilinear,-10.840,-6.604,+24 -dpn98,68.600,31.400,87.660,12.340,61.57,224,0.875,bicubic,-11.044,-6.940,+15 -xcit_tiny_12_p8_224,68.570,31.430,88.690,11.310,6.71,224,1.000,bicubic,-11.124,-6.358,+9 -regnetx_160,68.530,31.470,88.440,11.560,54.28,224,0.875,bicubic,-11.324,-6.390,-1 -xcit_tiny_24_p16_224,68.440,31.560,88.290,11.710,12.12,224,1.000,bicubic,-11.004,-6.598,+22 -rexnet_130,68.440,31.560,88.040,11.960,7.56,224,0.875,bicubic,-11.062,-6.642,+16 -tf_efficientnet_el,68.430,31.570,88.210,11.790,10.59,300,0.904,bicubic,-11.824,-6.918,-27 -cspresnet50,68.420,31.580,87.970,12.030,21.62,256,0.887,bilinear,-11.162,-6.738,+12 -cait_xxs36_224,68.400,31.600,88.640,11.360,17.30,224,1.000,bicubic,-11.348,-6.228,0 -ecaresnet50d_pruned,68.400,31.600,88.370,11.630,19.94,224,0.875,bicubic,-11.318,-6.506,+1 -dla102x2,68.380,31.620,87.890,12.110,41.28,224,0.875,bilinear,-11.062,-6.756,+17 -skresnext50_32x4d,68.370,31.630,87.560,12.440,27.48,224,0.875,bicubic,-11.784,-7.086,-23 -ssl_resnet50,68.360,31.640,88.530,11.470,25.56,224,0.875,bilinear,-10.864,-6.300,+31 -fbnetv3_d,68.350,31.650,88.450,11.550,10.31,256,0.950,bilinear,-11.330,-6.490,+1 -efficientnet_b2_pruned,68.320,31.680,88.100,11.900,8.31,260,0.890,bicubic,-11.598,-6.750,-18 -resmlp_big_24_224,68.320,31.680,87.520,12.480,129.14,224,0.875,bicubic,-12.710,-7.500,-90 -gluon_resnext50_32x4d,68.310,31.690,87.300,12.700,25.03,224,0.875,bicubic,-11.050,-7.126,+14 -vit_base_patch16_224_sam,68.270,31.730,87.730,12.270,86.57,224,0.900,bicubic,-11.974,-7.024,-36 -ecaresnet26t,68.230,31.770,88.800,11.200,16.01,320,0.950,bicubic,-11.622,-6.284,-15 -tf_efficientnet_lite3,68.230,31.770,87.740,12.260,8.20,300,0.904,bilinear,-11.588,-7.174,-13 -ese_vovnet39b,68.200,31.800,88.260,11.740,24.57,224,0.875,bicubic,-11.112,-6.454,+13 -fbnetv3_b,68.190,31.810,87.930,12.070,8.60,256,0.950,bilinear,-10.952,-6.820,+27 -regnetx_120,68.170,31.830,87.660,12.340,46.11,224,0.875,bicubic,-11.422,-7.074,-4 -resmlp_36_224,68.060,31.940,88.190,11.810,44.69,224,0.875,bicubic,-11.710,-6.696,-16 -resnetrs50,68.030,31.970,87.710,12.290,35.69,224,0.910,bicubic,-11.856,-7.260,-25 -pit_xs_distilled_224,68.000,32.000,87.720,12.280,11.00,224,0.900,bicubic,-11.308,-6.646,+9 -nf_regnet_b1,67.970,32.030,88.190,11.810,10.22,288,0.900,bicubic,-11.330,-6.564,+10 -dpn92,67.970,32.030,87.540,12.460,37.67,224,0.875,bicubic,-12.050,-7.290,-33 -gluon_resnet50_v1d,67.950,32.050,87.130,12.870,25.58,224,0.875,bicubic,-11.120,-7.336,+26 -resnetv2_50x1_bitm,67.930,32.070,89.290,10.710,25.55,448,1.000,bilinear,-12.412,-6.396,-59 -levit_192,67.900,32.100,87.900,12.100,10.95,224,0.900,bicubic,-11.936,-6.890,-26 -tf_efficientnetv2_b1,67.890,32.110,87.800,12.200,8.14,240,0.882,bicubic,-11.576,-6.922,-6 -regnetx_080,67.880,32.120,86.990,13.010,39.57,224,0.875,bicubic,-11.322,-7.562,+14 -resnext101_32x8d,67.860,32.140,87.490,12.510,88.79,224,0.875,bilinear,-11.456,-7.028,-2 -efficientnet_em,67.840,32.160,88.120,11.880,6.90,240,0.882,bicubic,-11.412,-6.672,+8 -legacy_seresnext50_32x4d,67.840,32.160,87.620,12.380,27.56,224,0.875,bilinear,-11.236,-6.814,+17 -lambda_resnet26t,67.810,32.190,87.780,12.220,10.96,256,0.940,bicubic,-11.288,-6.810,+14 -resmlp_24_224,67.800,32.200,87.600,12.400,30.02,224,0.875,bicubic,-11.578,-6.946,-9 -hrnet_w48,67.770,32.230,87.410,12.590,77.47,224,0.875,bilinear,-11.530,-7.104,-1 -hrnet_w44,67.740,32.260,87.550,12.450,67.06,224,0.875,bilinear,-11.156,-6.820,+22 -tf_efficientnet_b0_ns,67.720,32.280,88.060,11.940,5.29,224,0.875,bicubic,-10.944,-6.316,+30 -coat_lite_mini,67.720,32.280,87.700,12.300,11.01,224,0.900,bicubic,-11.368,-6.908,+10 -eca_botnext26ts_256,67.690,32.310,87.050,12.950,10.59,256,0.950,bicubic,-11.586,-7.566,-3 -xception,67.680,32.320,87.580,12.420,22.86,299,0.897,bicubic,-11.364,-6.814,+12 -regnetx_064,67.670,32.330,87.540,12.460,26.21,224,0.875,bicubic,-11.404,-6.920,+9 -halonet26t,67.620,32.380,87.260,12.740,12.48,256,0.950,bicubic,-11.492,-7.054,+4 -dpn68b,67.610,32.390,87.670,12.330,12.61,224,0.875,bicubic,-11.606,-6.744,-2 -dla169,67.600,32.400,87.550,12.450,53.39,224,0.875,bilinear,-11.082,-6.786,+22 -gluon_inception_v3,67.590,32.410,87.470,12.530,23.83,299,0.875,bicubic,-11.216,-6.900,+16 -gluon_resnet101_v1c,67.580,32.420,87.180,12.820,44.57,224,0.875,bicubic,-11.956,-7.398,-29 -hrnet_w40,67.550,32.450,87.140,12.860,57.56,224,0.875,bilinear,-11.372,-7.330,+10 -legacy_seresnet152,67.530,32.470,87.400,12.600,66.82,224,0.875,bilinear,-11.122,-6.970,+20 -tf_efficientnet_b1_ap,67.520,32.480,87.760,12.240,7.79,240,0.882,bicubic,-11.754,-6.548,-12 -eca_halonext26ts,67.480,32.520,87.240,12.760,10.76,256,0.940,bicubic,-12.008,-7.364,-31 -efficientnet_b1,67.470,32.530,87.500,12.500,7.79,256,1.000,bicubic,-11.318,-6.846,+12 -gluon_resnet101_v1b,67.470,32.530,87.230,12.770,44.55,224,0.875,bicubic,-11.834,-7.290,-20 -mobilevitv2_125,67.460,32.540,87.570,12.430,7.48,256,0.888,bicubic,-12.222,-7.278,-44 -resnetblur50,67.460,32.540,87.440,12.560,25.56,224,0.875,bicubic,-11.834,-7.194,-19 -tf_efficientnet_cc_b1_8e,67.460,32.540,87.310,12.690,39.72,240,0.882,bicubic,-11.854,-7.060,-26 -res2net101_26w_4s,67.460,32.540,87.010,12.990,45.21,224,0.875,bilinear,-11.736,-7.426,-12 -res2net50_26w_8s,67.430,32.570,87.270,12.730,48.40,224,0.875,bilinear,-11.522,-7.036,-1 -resnet33ts,67.380,32.620,87.590,12.410,19.68,256,0.900,bicubic,-11.828,-6.984,-16 -cait_xxs24_224,67.350,32.650,87.520,12.480,11.96,224,1.000,bicubic,-11.036,-6.788,+23 -regnetx_032,67.290,32.710,87.000,13.000,15.30,224,0.875,bicubic,-10.894,-7.088,+34 -xception41,67.250,32.750,87.210,12.790,26.97,299,0.903,bicubic,-11.266,-7.070,+10 -coat_tiny,67.240,32.760,87.280,12.720,5.50,224,0.900,bicubic,-11.196,-6.758,+17 -resnest26d,67.190,32.810,87.170,12.830,17.07,224,0.875,bilinear,-11.294,-7.124,+11 -repvgg_b2,67.160,32.840,87.330,12.670,89.02,224,0.875,bilinear,-11.634,-7.088,-2 -legacy_seresnet101,67.140,32.860,87.040,12.960,49.33,224,0.875,bilinear,-11.240,-7.222,+18 -botnet26t_256,67.130,32.870,87.530,12.470,12.49,256,0.950,bicubic,-12.128,-6.998,-28 -vit_relpos_base_patch32_plus_rpn_256,67.130,32.870,86.500,13.500,119.42,256,0.900,bicubic,-12.356,-7.640,-47 -dla60x,67.080,32.920,87.180,12.820,17.35,224,0.875,bilinear,-11.148,-6.844,+23 -gluon_resnet50_v1s,67.060,32.940,86.860,13.140,25.68,224,0.875,bicubic,-11.646,-7.378,-4 -tv_resnet152,67.030,32.970,87.550,12.450,60.19,224,0.875,bilinear,-11.290,-6.484,+16 -dla60_res2net,67.030,32.970,87.160,12.840,20.85,224,0.875,bilinear,-11.428,-7.036,+5 -xcit_tiny_12_p16_224_dist,67.010,32.990,87.410,12.590,6.72,224,1.000,bicubic,-11.568,-6.788,-3 -dla102x,66.980,33.020,86.770,13.230,26.31,224,0.875,bilinear,-11.532,-7.458,-1 -lambda_resnet26rpt_256,66.960,33.040,87.130,12.870,10.99,256,0.940,bicubic,-12.004,-7.296,-19 -mixnet_l,66.960,33.040,86.920,13.080,7.33,224,0.875,bicubic,-12.016,-7.258,-21 -pit_xs_224,66.920,33.080,87.290,12.710,10.62,224,0.900,bicubic,-11.270,-6.876,+17 -res2net50_26w_6s,66.920,33.080,86.860,13.140,37.05,224,0.875,bilinear,-11.650,-7.264,-7 -repvgg_b1,66.900,33.100,86.790,13.210,57.42,224,0.875,bilinear,-11.468,-7.304,+6 -tf_efficientnet_b1,66.890,33.110,87.020,12.980,7.79,240,0.882,bicubic,-11.938,-7.178,-19 -xcit_nano_12_p8_384_dist,66.880,33.120,87.110,12.890,3.05,384,1.000,bicubic,-10.936,-6.936,+34 -efficientnet_es,66.870,33.130,86.730,13.270,5.44,224,0.875,bicubic,-11.188,-7.214,+18 -mobilevit_s,66.860,33.140,87.080,12.920,5.58,256,0.900,bicubic,-11.450,-7.072,+5 -resnet32ts,66.850,33.150,87.260,12.740,17.96,256,0.900,bicubic,-12.164,-7.096,-30 -regnetx_040,66.830,33.170,86.740,13.260,22.12,224,0.875,bicubic,-11.658,-7.498,-11 -tf_mixnet_l,66.780,33.220,86.460,13.540,7.33,224,0.875,bicubic,-11.998,-7.538,-21 -hrnet_w32,66.770,33.230,87.310,12.690,41.23,224,0.875,bilinear,-11.682,-6.878,-8 -hrnet_w30,66.770,33.230,86.790,13.210,37.71,224,0.875,bilinear,-11.428,-7.434,+5 -selecsls60b,66.750,33.250,86.530,13.470,32.77,224,0.875,bicubic,-11.654,-7.642,-7 -wide_resnet101_2,66.720,33.280,87.020,12.980,126.89,224,0.875,bilinear,-12.132,-7.268,-30 -tf_efficientnetv2_b0,66.690,33.310,86.710,13.290,7.14,224,0.875,bicubic,-11.662,-7.316,-5 -adv_inception_v3,66.650,33.350,86.540,13.460,23.83,299,0.875,bicubic,-10.928,-7.198,+34 -dla60_res2next,66.640,33.360,87.030,12.970,17.03,224,0.875,bilinear,-11.816,-7.116,-15 -mobilevitv2_100,66.590,33.410,87.020,12.980,4.90,256,0.888,bicubic,-11.496,-7.140,+5 -vit_tiny_patch16_384,66.570,33.430,87.270,12.730,5.79,384,1.000,bicubic,-11.860,-7.274,-14 -levit_128,66.570,33.430,86.750,13.250,9.21,224,0.900,bicubic,-11.912,-7.262,-20 -cs3darknet_m,66.560,33.440,87.180,12.820,9.31,288,0.950,bicubic,-11.066,-6.834,+24 -gluon_resnet50_v1c,66.560,33.440,86.180,13.820,25.58,224,0.875,bicubic,-11.448,-7.810,+5 -dla102,66.520,33.480,86.910,13.090,33.27,224,0.875,bilinear,-11.508,-7.040,+3 -gmixer_24_224,66.430,33.570,86.160,13.840,24.72,224,0.875,bicubic,-11.606,-7.510,+1 -tf_inception_v3,66.410,33.590,86.660,13.340,23.83,299,0.875,bicubic,-11.442,-6.980,+13 -bat_resnext26ts,66.380,33.620,86.830,13.170,10.73,256,0.900,bicubic,-11.868,-7.266,-12 -hardcorenas_f,66.380,33.620,86.190,13.810,8.20,224,0.875,bilinear,-11.722,-7.612,-5 -coat_lite_tiny,66.310,33.690,86.980,13.020,5.72,224,0.900,bicubic,-11.206,-6.934,+24 -efficientnet_b0,66.290,33.710,85.960,14.040,5.29,224,0.875,bicubic,-11.410,-7.572,+13 -cs3darknet_focus_m,66.260,33.740,87.090,12.910,9.30,288,0.950,bicubic,-11.022,-6.882,+32 -legacy_seresnet50,66.260,33.740,86.310,13.690,28.09,224,0.875,bilinear,-11.372,-7.440,+13 -selecsls60,66.200,33.800,86.340,13.660,30.67,224,0.875,bicubic,-11.784,-7.492,-4 -tf_efficientnet_em,66.170,33.830,86.360,13.640,6.90,240,0.882,bicubic,-11.956,-7.686,-12 -tf_efficientnet_cc_b0_8e,66.170,33.830,86.220,13.780,24.01,224,0.875,bicubic,-11.730,-7.438,-1 -tv_resnext50_32x4d,66.160,33.840,86.040,13.960,25.03,224,0.875,bilinear,-11.458,-7.660,+11 -inception_v3,66.150,33.850,86.330,13.670,23.83,299,0.875,bicubic,-11.288,-7.146,+19 -resmlp_12_distilled_224,66.130,33.870,86.620,13.380,15.35,224,0.875,bicubic,-11.816,-6.940,-6 -res2net50_26w_4s,66.130,33.870,86.590,13.410,25.70,224,0.875,bilinear,-11.832,-7.262,-8 -regnety_016,66.100,33.900,86.380,13.620,11.20,224,0.875,bicubic,-11.756,-7.340,-2 -efficientnet_b1_pruned,66.080,33.920,86.570,13.430,6.33,240,0.882,bicubic,-12.164,-7.264,-25 -gluon_resnet50_v1b,66.080,33.920,86.260,13.740,25.56,224,0.875,bicubic,-11.504,-7.460,+8 -rexnet_100,66.060,33.940,86.490,13.510,4.80,224,0.875,bicubic,-11.800,-7.384,-7 -tinynet_a,66.020,33.980,85.790,14.210,6.19,192,0.875,bicubic,-11.628,-7.746,0 -res2net50_14w_8s,66.010,33.990,86.250,13.750,25.06,224,0.875,bilinear,-12.134,-7.602,-24 -gcresnext26ts,65.970,34.030,85.910,14.090,10.48,256,0.900,bicubic,-11.844,-7.926,-5 -seresnext26t_32x4d,65.880,34.120,85.670,14.330,16.81,224,0.875,bicubic,-12.088,-8.078,-17 -res2next50,65.850,34.150,85.830,14.170,24.67,224,0.875,bilinear,-12.408,-8.058,-34 -repvgg_b1g4,65.840,34.160,86.110,13.890,39.97,224,0.875,bilinear,-11.748,-7.720,0 -densenet161,65.830,34.170,86.450,13.550,28.68,224,0.875,bicubic,-11.524,-7.186,+9 -hardcorenas_e,65.810,34.190,85.980,14.020,8.07,224,0.875,bilinear,-11.976,-7.724,-9 -resnet34d,65.790,34.210,86.720,13.280,21.82,224,0.875,bicubic,-11.326,-6.662,+16 -xcit_tiny_12_p16_224,65.770,34.230,86.230,13.770,6.72,224,1.000,bicubic,-11.354,-7.482,+14 -eca_resnext26ts,65.770,34.230,85.840,14.160,10.30,256,0.900,bicubic,-11.688,-7.728,+1 -skresnet34,65.740,34.260,85.960,14.040,22.28,224,0.875,bicubic,-11.164,-7.360,+23 -mobilenetv3_large_100_miil,65.740,34.260,85.170,14.830,5.48,224,0.875,bilinear,-12.182,-7.750,-23 -tv_resnet101,65.690,34.310,85.980,14.020,44.55,224,0.875,bilinear,-11.690,-7.564,+1 -seresnext26ts,65.650,34.350,86.150,13.850,10.39,256,0.900,bicubic,-12.208,-7.640,-21 -hardcorenas_d,65.620,34.380,85.470,14.530,7.50,224,0.875,bilinear,-11.810,-8.014,-2 -selecsls42b,65.600,34.400,85.790,14.210,32.46,224,0.875,bicubic,-11.578,-7.602,+6 -poolformer_s12,65.580,34.420,86.130,13.870,11.92,224,0.900,bicubic,-11.658,-7.376,+4 -tf_efficientnet_b0_ap,65.500,34.500,85.580,14.420,5.29,224,0.875,bicubic,-11.588,-7.678,+8 -seresnext26d_32x4d,65.410,34.590,85.960,14.040,16.81,224,0.875,bicubic,-12.196,-7.646,-15 -convmixer_1024_20_ks9_p14,65.410,34.590,85.590,14.410,24.38,224,0.960,bicubic,-11.532,-7.768,+12 -resnet26t,65.400,34.600,86.110,13.890,16.01,256,0.940,bicubic,-12.464,-7.732,-30 -tf_efficientnet_lite2,65.390,34.610,85.990,14.010,6.09,260,0.890,bicubic,-12.076,-7.768,-12 -res2net50_48w_2s,65.370,34.630,85.960,14.040,25.29,224,0.875,bilinear,-12.154,-7.590,-15 -densenetblur121d,65.290,34.710,85.700,14.300,8.00,224,0.875,bicubic,-11.290,-7.488,+20 -densenet201,65.290,34.710,85.670,14.330,20.01,224,0.875,bicubic,-11.998,-7.810,-7 -dla60,65.210,34.790,85.740,14.260,22.04,224,0.875,bilinear,-11.812,-7.580,+2 -crossvit_9_dagger_240,65.200,34.800,86.590,13.410,8.78,240,0.875,bicubic,-11.778,-7.024,+2 -ese_vovnet19b_dw,65.190,34.810,85.460,14.540,6.54,224,0.875,bicubic,-11.604,-7.806,+8 -tf_efficientnet_cc_b0_4e,65.140,34.860,85.160,14.840,13.31,224,0.875,bicubic,-12.170,-8.180,-13 -gernet_s,65.130,34.870,85.520,14.480,8.17,224,0.875,bilinear,-11.786,-7.614,+3 -legacy_seresnext26_32x4d,65.070,34.930,85.640,14.360,16.79,224,0.875,bicubic,-12.034,-7.676,-6 -mobilenetv2_120d,65.000,35.000,85.960,14.040,5.83,224,0.875,bicubic,-12.290,-7.540,-15 -hrnet_w18,64.920,35.080,85.740,14.260,21.30,224,0.875,bilinear,-11.840,-7.704,+5 -hardcorenas_c,64.880,35.120,85.260,14.740,5.52,224,0.875,bilinear,-12.172,-7.900,-7 -densenet169,64.750,35.250,85.260,14.740,14.15,224,0.875,bicubic,-11.154,-7.764,+22 -mixnet_m,64.710,35.290,85.450,14.550,5.01,224,0.875,bicubic,-12.552,-7.972,-16 -resnet26d,64.680,35.320,85.100,14.900,16.01,224,0.875,bicubic,-12.022,-8.052,+2 -resnext26ts,64.590,35.410,85.120,14.880,10.30,256,0.900,bicubic,-12.190,-8.012,-1 -levit_128s,64.590,35.410,84.730,15.270,7.78,224,0.900,bicubic,-11.924,-8.140,+7 -xcit_nano_12_p8_224_dist,64.550,35.450,85.990,14.010,3.05,224,1.000,bicubic,-11.778,-7.104,+9 -repvgg_a2,64.430,35.570,85.130,14.870,28.21,224,0.875,bilinear,-12.030,-7.880,+7 -xcit_nano_12_p16_384_dist,64.420,35.580,85.310,14.690,3.05,384,1.000,bicubic,-11.036,-7.380,+25 -hardcorenas_b,64.410,35.590,84.900,15.100,5.18,224,0.875,bilinear,-12.126,-7.854,+2 -tf_efficientnet_lite1,64.400,35.600,85.490,14.510,5.42,240,0.882,bicubic,-12.238,-7.734,-3 -regnetx_016,64.370,35.630,85.450,14.550,9.19,224,0.875,bicubic,-12.572,-7.974,-14 -resmlp_12_224,64.350,35.650,85.580,14.420,15.35,224,0.875,bicubic,-12.306,-7.600,-6 -tf_efficientnet_b0,64.320,35.680,85.280,14.720,5.29,224,0.875,bicubic,-12.520,-7.938,-12 -tf_mixnet_m,64.270,35.730,85.100,14.900,5.01,224,0.875,bicubic,-12.676,-8.052,-18 -dpn68,64.240,35.760,85.180,14.820,12.61,224,0.875,bicubic,-12.070,-7.798,+2 -tf_efficientnet_es,64.230,35.770,84.740,15.260,5.44,224,0.875,bicubic,-12.368,-8.464,-7 -regnety_008,64.140,35.860,85.280,14.720,6.26,224,0.875,bicubic,-12.174,-7.790,-1 -vit_small_patch32_224,64.070,35.930,85.570,14.430,22.88,224,0.900,bicubic,-11.920,-7.698,+2 -mobilenetv2_140,64.040,35.960,85.040,14.960,6.11,224,0.875,bicubic,-12.472,-7.958,-6 -densenet121,63.740,36.260,84.610,15.390,7.98,224,0.875,bicubic,-11.840,-8.038,+8 -hardcorenas_a,63.710,36.290,84.400,15.600,5.26,224,0.875,bilinear,-12.220,-8.110,+1 -resnest14d,63.610,36.390,84.250,15.750,10.61,224,0.875,bilinear,-11.898,-8.274,+8 -mobilevitv2_075,63.590,36.410,84.950,15.050,2.87,256,0.888,bicubic,-12.018,-7.808,+4 -tf_mixnet_s,63.560,36.440,84.280,15.720,4.13,224,0.875,bicubic,-12.092,-8.346,+1 -resnet26,63.450,36.550,84.250,15.750,16.00,224,0.875,bicubic,-11.850,-8.330,+10 -mixnet_s,63.390,36.610,84.750,15.250,4.13,224,0.875,bicubic,-12.606,-8.050,-7 -mobilenetv3_large_100,63.360,36.640,84.060,15.940,5.48,224,0.875,bicubic,-12.416,-8.480,-3 -vit_tiny_r_s16_p8_384,63.340,36.660,85.270,14.730,6.36,384,1.000,bicubic,-12.612,-7.992,-7 -tv_resnet50,63.340,36.660,84.650,15.350,25.56,224,0.875,bilinear,-12.794,-8.218,-11 -efficientnet_es_pruned,63.300,36.700,84.960,15.040,5.44,224,0.875,bicubic,-11.700,-7.482,+13 -efficientnet_lite0,63.270,36.730,84.440,15.560,4.65,224,0.875,bicubic,-12.198,-8.076,0 -mixer_b16_224,63.250,36.750,83.310,16.690,59.88,224,0.875,bicubic,-13.360,-8.920,-24 -mobilenetv3_rw,63.240,36.760,84.500,15.500,5.48,224,0.875,bicubic,-12.394,-8.208,-7 -semnasnet_100,63.160,36.840,84.540,15.460,3.89,224,0.875,bicubic,-12.290,-8.060,0 -pit_ti_distilled_224,63.140,36.860,83.950,16.050,5.10,224,0.900,bicubic,-11.394,-8.146,+18 -vit_tiny_patch16_224,63.100,36.900,84.850,15.150,5.72,224,0.900,bicubic,-12.364,-7.994,-4 -regnety_006,63.090,36.910,84.260,15.740,6.06,224,0.875,bicubic,-12.162,-8.272,-1 -mobilevit_xs,62.930,37.070,84.830,15.170,2.32,256,0.900,bicubic,-11.704,-7.516,+12 -tv_densenet121,62.930,37.070,84.250,15.750,7.98,224,0.875,bicubic,-11.810,-7.898,+9 -resnet34,62.860,37.140,84.130,15.870,21.80,224,0.875,bilinear,-12.252,-8.154,-1 -mobilenetv2_110d,62.840,37.160,84.500,15.500,4.52,224,0.875,bicubic,-12.196,-7.692,0 -legacy_seresnet34,62.840,37.160,84.220,15.780,21.96,224,0.875,bilinear,-11.970,-7.906,+5 -hrnet_w18_small_v2,62.800,37.200,83.970,16.030,15.60,224,0.875,bilinear,-12.310,-8.446,-3 -deit_tiny_distilled_patch16_224,62.800,37.200,83.920,16.080,5.91,224,0.900,bicubic,-11.712,-7.970,+10 -swsl_resnet18,62.760,37.240,84.300,15.700,11.69,224,0.875,bilinear,-10.514,-7.436,+22 -tinynet_b,62.730,37.270,84.250,15.750,3.73,188,0.875,bicubic,-12.244,-7.932,-2 -repvgg_b0,62.730,37.270,83.880,16.120,15.82,224,0.875,bilinear,-12.424,-8.536,-10 -xcit_nano_12_p8_224,62.570,37.430,84.200,15.800,3.05,224,1.000,bicubic,-11.346,-7.968,+12 -gluon_resnet34_v1b,62.570,37.430,83.990,16.010,21.80,224,0.875,bicubic,-12.022,-7.998,+3 -tf_efficientnet_lite0,62.550,37.450,84.230,15.770,4.65,224,0.875,bicubic,-12.282,-7.944,-4 -regnetx_008,62.490,37.510,84.020,15.980,7.26,224,0.875,bicubic,-12.544,-8.320,-9 -dla34,62.470,37.530,83.900,16.100,15.74,224,0.875,bilinear,-12.154,-8.172,-1 -fbnetc_100,62.470,37.530,83.380,16.620,5.57,224,0.875,bilinear,-12.646,-9.006,-15 -tf_mobilenetv3_large_100,62.450,37.550,83.950,16.050,5.48,224,0.875,bilinear,-13.062,-8.656,-25 -crossvit_9_240,62.270,37.730,84.240,15.760,8.55,240,0.875,bicubic,-11.690,-7.724,+4 -edgenext_x_small,62.160,37.840,84.050,15.950,2.34,256,0.900,bicubic,-12.704,-8.250,-11 -crossvit_tiny_240,62.070,37.930,83.610,16.390,7.01,240,0.875,bicubic,-11.268,-8.304,+8 -mnasnet_100,61.920,38.080,83.690,16.310,4.38,224,0.875,bicubic,-12.730,-8.424,-9 -regnety_004,61.860,38.140,83.410,16.590,4.34,224,0.875,bicubic,-12.164,-8.346,-2 -vgg19_bn,61.850,38.150,83.450,16.550,143.68,224,0.875,bilinear,-12.364,-8.394,-5 -convit_tiny,61.560,38.440,84.120,15.880,5.71,224,0.875,bicubic,-11.554,-7.600,+7 -ssl_resnet18,61.470,38.530,83.300,16.700,11.69,224,0.875,bilinear,-11.134,-8.124,+11 -regnetx_006,61.360,38.640,83.460,16.540,6.20,224,0.875,bicubic,-12.496,-8.212,-2 -spnasnet_100,61.250,38.750,82.790,17.210,4.42,224,0.875,bilinear,-12.840,-9.026,-8 -tv_resnet34,61.190,38.810,82.730,17.270,21.80,224,0.875,bilinear,-12.118,-8.694,+1 -pit_ti_224,60.970,39.030,83.860,16.140,4.85,224,0.900,bicubic,-11.942,-7.546,+6 -skresnet18,60.880,39.120,82.870,17.130,11.96,224,0.875,bicubic,-12.154,-8.296,+2 -ghostnet_100,60.830,39.170,82.370,17.630,5.18,224,0.875,bilinear,-13.150,-9.088,-10 -vgg16_bn,60.760,39.240,82.950,17.050,138.37,224,0.875,bilinear,-12.590,-8.554,-5 -semnasnet_075,60.700,39.300,82.510,17.490,2.91,224,0.875,bicubic,-12.274,-8.624,0 -tf_mobilenetv3_large_075,60.390,39.610,81.940,18.060,3.99,224,0.875,bilinear,-13.050,-9.408,-8 -xcit_nano_12_p16_224_dist,60.260,39.740,82.500,17.500,3.05,224,1.000,bicubic,-12.042,-8.362,+6 -mobilenetv2_100,60.190,39.810,82.220,17.780,3.50,224,0.875,bicubic,-12.766,-8.790,-2 -resnet18d,60.170,39.830,82.300,17.700,11.71,224,0.875,bicubic,-12.088,-8.388,+5 -vit_base_patch32_224_sam,60.010,39.990,81.240,18.760,88.22,224,0.900,bicubic,-13.682,-9.772,-13 -deit_tiny_patch16_224,59.850,40.150,82.680,17.320,5.72,224,0.900,bicubic,-12.324,-8.434,+5 -legacy_seresnet18,59.810,40.190,81.690,18.310,11.78,224,0.875,bicubic,-11.930,-8.640,+8 -vgg19,59.710,40.290,81.450,18.550,143.67,224,0.875,bilinear,-12.656,-9.422,-3 -regnetx_004,59.400,40.600,81.700,18.300,5.16,224,0.875,bicubic,-12.996,-9.138,-5 -vit_tiny_r_s16_p8_224,59.070,40.930,81.770,18.230,6.34,224,0.900,bicubic,-12.724,-9.048,+4 -tf_mobilenetv3_large_minimal_100,59.070,40.930,81.160,18.840,3.92,224,0.875,bilinear,-13.180,-9.460,-1 -vgg13_bn,59.000,41.000,81.080,18.920,133.05,224,0.875,bilinear,-12.598,-9.296,+4 -hrnet_w18_small,58.960,41.040,81.340,18.660,13.19,224,0.875,bilinear,-13.376,-9.340,-6 -lcnet_100,58.880,41.120,81.180,18.820,2.95,224,0.875,bicubic,-13.230,-9.198,-2 -vgg16,58.840,41.160,81.660,18.340,138.36,224,0.875,bilinear,-12.750,-8.722,+2 -xcit_nano_12_p16_224,58.340,41.660,80.880,19.120,3.05,224,1.000,bicubic,-11.614,-8.876,+8 -gluon_resnet18_v1b,58.330,41.670,80.970,19.030,11.69,224,0.875,bicubic,-12.508,-8.792,+3 -edgenext_xx_small,58.170,41.830,81.350,18.650,1.33,256,0.900,bicubic,-12.936,-8.682,+1 -tinynet_c,58.150,41.850,80.290,19.710,2.46,184,0.875,bicubic,-13.078,-9.458,-1 -resnet14t,57.800,42.200,79.910,20.090,10.08,224,0.950,bilinear,-14.556,-10.430,-14 -mobilevitv2_050,57.730,42.270,80.920,19.080,1.37,256,0.888,bicubic,-12.410,-9.010,+2 +vit_base_patch16_clip_224.openai_ft_in12k_in1k,75.500,24.500,92.760,7.240,86.57,224,0.950,bicubic,-10.430,-4.964,-31 +regnetz_e8,75.490,24.510,92.690,7.310,57.70,320,1.000,bicubic,-9.540,-4.574,+21 +cait_s24_384,75.480,24.520,92.600,7.400,47.06,384,1.000,bicubic,-9.566,-4.746,+19 +xcit_medium_24_p8_224_dist,75.470,24.530,92.900,7.100,84.32,224,1.000,bicubic,-9.602,-4.354,+15 +swsl_resnext101_32x8d,75.430,24.570,92.760,7.240,88.79,224,0.875,bilinear,-8.854,-4.416,+64 +vit_base_patch16_224.augreg2_in21k_ft_in1k,75.410,24.590,93.230,6.770,86.57,224,0.900,bicubic,-9.696,-4.150,+8 +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,75.410,24.590,92.710,7.290,88.34,448,1.000,bicubic,-10.374,-4.924,-29 +tf_efficientnetv2_m.in1k,75.390,24.610,92.760,7.240,54.14,480,1.000,bicubic,-9.818,-4.608,+1 +tf_efficientnet_b6.ap_in1k,75.380,24.620,92.440,7.560,43.04,528,0.942,bicubic,-9.408,-4.698,+27 +beit_base_patch16_224.in22k_ft_in22k_in1k,75.370,24.630,93.040,6.960,86.53,224,0.900,bicubic,-9.866,-4.616,-3 +volo_d2_224,75.300,24.700,92.510,7.490,58.68,224,0.960,bicubic,-9.896,-4.678,0 +mvitv2_large,75.280,24.720,92.340,7.660,217.99,224,0.900,bicubic,-9.970,-4.874,-6 +dm_nfnet_f3,75.210,24.790,92.940,7.060,254.92,416,0.940,bicubic,-10.312,-4.522,-23 +efficientnetv2_rw_m.agc_in1k,75.170,24.830,92.570,7.430,53.24,416,1.000,bicubic,-9.638,-4.578,+21 +convnext_small.fb_in22k_ft_in1k_384,75.150,24.850,93.050,6.950,50.22,384,1.000,bicubic,-10.628,-4.842,-36 +deit3_large_patch16_224,75.140,24.860,92.280,7.720,304.37,224,0.900,bicubic,-9.624,-4.758,+21 +ecaresnet269d,75.120,24.880,92.840,7.160,102.09,352,1.000,bicubic,-9.856,-4.386,+8 +vit_base_patch16_clip_224.laion2b_ft_in1k,75.120,24.880,92.710,7.290,86.57,224,1.000,bicubic,-10.348,-4.866,-27 +xcit_medium_24_p16_384_dist,75.120,24.880,92.440,7.560,84.40,384,1.000,bicubic,-10.292,-4.966,-24 +deit3_small_patch16_384_in21ft1k,75.090,24.910,92.790,7.210,22.21,384,1.000,bicubic,-9.734,-4.696,+14 +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,75.080,24.920,92.590,7.410,88.30,384,1.000,bicubic,-10.292,-5.074,-23 +maxvit_tiny_tf_384.in1k,75.010,24.990,92.470,7.530,30.98,384,1.000,bicubic,-10.096,-5.064,-7 +dm_nfnet_f5,75.000,25.000,92.600,7.400,377.21,544,0.954,bicubic,-10.814,-4.888,-48 +xcit_small_24_p8_224_dist,74.980,25.020,92.310,7.690,47.63,224,1.000,bicubic,-9.896,-4.878,+7 +tf_efficientnet_b8.ra_in1k,74.940,25.060,92.310,7.690,87.41,672,0.954,bicubic,-10.430,-4.984,-26 +xcit_small_12_p8_384_dist,74.880,25.120,92.460,7.540,26.21,384,1.000,bicubic,-10.208,-4.822,-8 +eca_nfnet_l2,74.830,25.170,92.650,7.350,56.72,384,1.000,bicubic,-9.868,-4.614,+13 +deit3_base_patch16_384,74.790,25.210,92.240,7.760,86.88,384,1.000,bicubic,-10.282,-5.038,-8 +tf_efficientnet_b7.ra_in1k,74.720,25.280,92.220,7.780,66.35,600,0.949,bicubic,-10.216,-4.984,-2 +deit3_medium_patch16_224_in21ft1k,74.680,25.320,92.480,7.520,38.85,224,1.000,bicubic,-9.880,-4.708,+17 +xcit_large_24_p16_224_dist,74.670,25.330,91.860,8.140,189.10,224,1.000,bicubic,-10.248,-5.272,-2 +convnext_large.fb_in1k,74.640,25.360,91.970,8.030,197.77,288,1.000,bicubic,-10.206,-5.242,+1 +dm_nfnet_f2,74.620,25.380,92.260,7.740,193.78,352,0.920,bicubic,-10.444,-4.980,-12 +xcit_small_24_p16_384_dist,74.610,25.390,92.450,7.550,47.67,384,1.000,bicubic,-10.488,-4.860,-18 +tf_efficientnet_b5.ap_in1k,74.600,25.400,91.990,8.010,30.39,456,0.934,bicubic,-9.652,-4.984,+38 +vit_medium_patch16_gap_256.in12k_ft_in1k,74.580,25.420,91.950,8.050,38.86,256,0.950,bicubic,-9.850,-5.262,+24 +maxvit_large_tf_224.in1k,74.580,25.420,91.700,8.300,211.79,224,0.950,bicubic,-10.346,-5.272,-8 +swin_base_patch4_window7_224,74.570,25.430,92.560,7.440,87.77,224,0.900,bicubic,-10.682,-5.002,-35 +dm_nfnet_f1,74.570,25.430,92.260,7.740,132.63,320,0.910,bicubic,-10.056,-4.840,+5 +maxxvit_rmlp_small_rw_256,74.530,25.470,91.980,8.020,66.01,256,0.950,bicubic,-10.098,-5.082,+2 +convnext_small.fb_in22k_ft_in1k,74.510,25.490,92.700,7.300,50.22,288,1.000,bicubic,-10.752,-4.984,-38 +seresnet152d,74.510,25.490,92.080,7.920,66.84,320,1.000,bicubic,-9.852,-4.960,+25 +gcvit_base,74.500,25.500,91.750,8.250,90.32,224,0.875,bicubic,-9.948,-5.332,+12 +resnest200e,74.480,25.520,91.860,8.140,70.20,320,0.909,bicubic,-9.352,-5.034,+62 +regnetz_040,74.460,25.540,91.890,8.110,27.12,320,1.000,bicubic,-9.776,-5.042,+29 +tf_efficientnetv2_s.in21k_ft_in1k,74.450,25.550,92.510,7.490,21.46,384,1.000,bicubic,-9.852,-4.742,+22 +regnetz_040h,74.430,25.570,92.240,7.760,28.94,320,1.000,bicubic,-10.064,-4.766,+5 +resnetrs200,74.370,25.630,91.940,8.060,93.21,320,1.000,bicubic,-10.078,-4.904,+6 +maxvit_rmlp_small_rw_224,74.360,25.640,91.410,8.590,64.90,224,0.900,bicubic,-10.124,-5.352,+4 +flexivit_base.1200ep_in1k,74.350,25.650,91.800,8.200,86.59,240,0.950,bicubic,-10.314,-5.192,-9 +maxvit_base_tf_224.in1k,74.330,25.670,91.750,8.250,119.47,224,0.950,bicubic,-10.530,-5.240,-19 +seresnextaa101d_32x8d,74.330,25.670,91.720,8.280,93.59,288,1.000,bicubic,-10.238,-5.350,-6 +mvitv2_base,74.240,25.760,91.620,8.380,51.47,224,0.900,bicubic,-10.182,-5.244,+9 +seresnext101d_32x8d,74.210,25.790,91.860,8.140,93.59,288,1.000,bicubic,-10.160,-5.056,+12 +coatnet_rmlp_2_rw_224,74.200,25.800,91.280,8.720,73.88,224,0.950,bicubic,-10.400,-5.456,-11 +resnest269e,74.170,25.830,91.950,8.050,110.93,416,0.928,bicubic,-10.348,-4.986,-8 +efficientnetv2_rw_s.ra2_in1k,74.170,25.830,91.710,8.290,23.94,384,1.000,bicubic,-9.638,-5.014,+52 +cait_xs24_384,74.160,25.840,91.910,8.090,26.67,384,1.000,bicubic,-9.902,-4.978,+28 +pit_b_distilled_224,74.160,25.840,91.680,8.320,74.79,224,0.900,bicubic,-9.984,-5.176,+24 +swsl_resnext101_32x4d,74.140,25.860,91.990,8.010,44.18,224,0.875,bilinear,-9.090,-4.770,+86 +flexivit_base.300ep_in1k,74.140,25.860,91.380,8.620,86.59,240,0.950,bicubic,-10.254,-5.740,+4 +vit_base_patch16_384.orig_in21k_ft_in1k,74.130,25.870,92.360,7.640,86.86,384,1.000,bicubic,-10.080,-4.858,+15 +eca_nfnet_l1,74.120,25.880,92.070,7.930,41.41,320,1.000,bicubic,-9.890,-4.958,+29 +xcit_small_12_p16_384_dist,74.120,25.880,92.070,7.930,26.25,384,1.000,bicubic,-10.586,-5.048,-26 +convnext_base.fb_in1k,74.120,25.880,91.730,8.270,88.59,288,1.000,bicubic,-10.314,-5.090,-5 +volo_d1_224,74.110,25.890,92.030,7.970,26.63,224,0.960,bicubic,-10.054,-4.746,+15 +flexivit_base.600ep_in1k,74.110,25.890,91.750,8.250,86.59,240,0.950,bicubic,-10.408,-5.236,-16 +vit_large_r50_s32_224.augreg_in21k_ft_in1k,74.100,25.900,92.390,7.610,328.99,224,0.900,bicubic,-10.334,-4.582,-11 +xcit_large_24_p8_224,74.060,25.940,90.890,9.110,188.93,224,1.000,bicubic,-10.332,-5.766,-4 +vit_base_patch16_224_miil.in21k_ft_in1k,74.040,25.960,91.700,8.300,86.54,224,0.875,bilinear,-10.228,-5.102,+1 +vit_base_patch32_clip_384.openai_ft_in12k_in1k,74.030,25.970,92.440,7.560,88.30,384,0.950,bicubic,-11.182,-4.962,-63 +swsl_resnext101_32x16d,74.020,25.980,92.160,7.840,194.03,224,0.875,bilinear,-9.326,-4.686,+71 +resnetv2_152x4_bitm,74.010,25.990,92.340,7.660,936.53,480,1.000,bilinear,-10.906,-5.100,-43 +vit_base_patch16_224.augreg_in21k_ft_in1k,74.000,26.000,92.470,7.530,86.57,224,0.900,bicubic,-10.532,-4.824,-28 +swinv2_base_window16_256,74.000,26.000,91.740,8.260,87.92,256,0.900,bicubic,-10.594,-5.334,-29 +tf_efficientnetv2_s.in1k,74.000,26.000,91.530,8.470,21.46,384,1.000,bicubic,-9.894,-5.168,+24 +regnetz_d32,73.970,26.030,91.950,8.050,27.58,320,0.950,bicubic,-10.052,-4.916,+14 +crossvit_18_dagger_408,73.960,26.040,91.420,8.580,44.61,408,1.000,bicubic,-10.236,-5.398,+1 +seresnext101_32x8d,73.940,26.060,91.450,8.550,93.57,288,1.000,bicubic,-10.252,-5.424,+1 +xcit_small_12_p8_224_dist,73.930,26.070,91.720,8.280,26.21,224,1.000,bicubic,-10.302,-5.058,-5 +resnetv2_152x2_bitm,73.920,26.080,92.670,7.330,236.34,448,1.000,bilinear,-10.590,-4.762,-31 +resnetrs420,73.920,26.080,91.760,8.240,191.89,416,1.000,bicubic,-11.088,-5.364,-57 +tf_efficientnet_b6.aa_in1k,73.900,26.100,91.750,8.250,43.04,528,0.942,bicubic,-10.210,-5.136,+1 +tf_efficientnet_b3.ns_jft_in1k,73.890,26.110,91.870,8.130,12.23,300,0.904,bicubic,-10.158,-5.040,+5 +edgenext_base,73.880,26.120,91.770,8.230,18.51,320,1.000,bicubic,-10.080,-4.998,+10 +resmlp_big_24_224_in22ft1k,73.880,26.120,91.750,8.250,129.14,224,0.875,bicubic,-10.514,-5.132,-23 +deit3_small_patch16_224_in21ft1k,73.850,26.150,91.960,8.040,22.06,224,1.000,bicubic,-9.220,-4.820,+74 +vit_small_r26_s32_384.augreg_in21k_ft_in1k,73.800,26.200,92.300,7.700,36.47,384,1.000,bicubic,-10.246,-5.028,+1 +maxvit_rmlp_tiny_rw_256,73.800,26.200,91.440,8.560,29.15,256,0.950,bicubic,-10.432,-5.436,-12 +maxvit_small_tf_224.in1k,73.780,26.220,91.440,8.560,68.93,224,0.950,bicubic,-10.654,-5.724,-30 +regnetz_d8,73.760,26.240,92.010,7.990,23.37,320,1.000,bicubic,-10.290,-4.988,-5 +regnety_080,73.760,26.240,91.800,8.200,39.18,288,1.000,bicubic,-10.172,-5.088,+5 +resnetrs270,73.710,26.290,91.580,8.420,129.86,352,1.000,bicubic,-10.724,-5.390,-34 +gcvit_small,73.690,26.310,91.240,8.760,51.09,224,0.875,bicubic,-10.194,-5.418,+6 +resnet200d,73.680,26.320,91.570,8.430,64.69,320,1.000,bicubic,-10.282,-5.254,-1 +resnetv2_101x3_bitm,73.670,26.330,92.470,7.530,387.93,448,1.000,bilinear,-10.770,-4.912,-40 +ig_resnext101_32x8d,73.650,26.350,92.190,7.810,88.79,224,0.875,bilinear,-9.038,-4.446,+88 +xcit_medium_24_p16_224_dist,73.650,26.350,91.570,8.430,84.40,224,1.000,bicubic,-10.624,-5.370,-28 +regnety_064,73.580,26.420,91.340,8.660,30.58,288,1.000,bicubic,-10.136,-5.334,+17 +tresnet_v2_l,73.570,26.430,90.960,9.040,46.17,224,0.875,bilinear,-10.332,-5.532,-2 +tf_efficientnet_b5.ra_in1k,73.550,26.450,91.460,8.540,30.39,456,0.934,bicubic,-10.262,-5.288,+6 +swinv2_base_window8_256,73.530,26.470,91.530,8.470,87.92,256,0.900,bicubic,-10.732,-5.392,-30 +resnet152d,73.520,26.480,91.230,8.770,60.21,320,1.000,bicubic,-10.160,-5.508,+19 +cs3se_edgenet_x,73.510,26.490,91.500,8.500,50.72,320,1.000,bicubic,-10.038,-5.170,+23 +regnetv_064,73.480,26.520,91.590,8.410,30.58,288,1.000,bicubic,-10.232,-5.158,+12 +convnext_small.fb_in1k,73.480,26.520,91.330,8.670,50.22,288,1.000,bicubic,-10.226,-5.480,+15 +deit3_base_patch16_224,73.470,26.530,91.290,8.710,86.59,224,0.900,bicubic,-10.322,-5.294,+3 +sequencer2d_l,73.470,26.530,91.100,8.900,54.30,224,0.875,bicubic,-9.936,-5.406,+25 +xcit_tiny_24_p8_384_dist,73.400,26.600,91.570,8.430,12.11,384,1.000,bicubic,-10.340,-5.064,+3 +resnetrs350,73.400,26.600,91.310,8.690,163.96,384,1.000,bicubic,-11.320,-5.678,-71 +twins_svt_large,73.390,26.610,90.910,9.090,99.27,224,0.900,bicubic,-10.288,-5.684,+12 +regnetz_d8_evos,73.380,26.620,91.640,8.360,23.46,320,0.950,bicubic,-10.670,-5.354,-25 +pvt_v2_b4,73.380,26.620,91.080,8.920,62.56,224,0.900,bicubic,-10.336,-5.640,+5 +regnety_160,73.360,26.640,91.690,8.310,83.59,288,1.000,bicubic,-10.326,-5.086,+7 +efficientnet_b4.ra2_in1k,73.320,26.680,91.280,8.720,19.34,384,1.000,bicubic,-10.108,-5.316,+16 +swin_s3_base_224,73.320,26.680,91.180,8.820,71.13,224,0.900,bicubic,-10.610,-5.482,-19 +vit_small_patch16_384.augreg_in21k_ft_in1k,73.290,26.710,91.990,8.010,22.20,384,1.000,bicubic,-10.512,-5.112,-9 +xcit_small_24_p16_224_dist,73.290,26.710,91.450,8.550,47.67,224,1.000,bicubic,-10.572,-5.278,-18 +resmlp_big_24_distilled_224,73.290,26.710,91.160,8.840,129.14,224,0.875,bicubic,-10.298,-5.488,+8 +swinv2_small_window16_256,73.280,26.720,91.270,8.730,49.73,256,0.900,bicubic,-10.926,-5.600,-42 +mvitv2_small,73.270,26.730,91.200,8.800,34.87,224,0.900,bicubic,-10.498,-5.370,-10 +gcvit_tiny,73.270,26.730,90.970,9.030,28.22,224,0.875,bicubic,-10.130,-5.428,+13 +pvt_v2_b5,73.240,26.760,91.090,8.910,81.96,224,0.900,bicubic,-10.500,-5.622,-10 +deit_base_distilled_patch16_224,73.240,26.760,91.000,9.000,87.34,224,0.900,bicubic,-10.148,-5.488,+14 +pvt_v2_b3,73.210,26.790,91.010,8.990,45.24,224,0.900,bicubic,-9.916,-5.546,+31 +resnetrs152,73.200,26.800,91.260,8.740,86.62,320,1.000,bicubic,-10.512,-5.354,-8 +maxvit_tiny_rw_224,73.200,26.800,90.820,9.180,29.06,224,0.950,bicubic,-10.304,-5.682,+2 +xcit_medium_24_p8_224,73.140,26.860,90.270,9.730,84.32,224,1.000,bicubic,-10.594,-6.124,-13 +vit_base_patch32_384.augreg_in21k_ft_in1k,73.130,26.870,91.240,8.760,88.30,384,1.000,bicubic,-10.220,-5.596,+12 +jx_nest_base,73.110,26.890,91.070,8.930,67.72,224,0.875,bicubic,-10.442,-5.300,-4 +swinv2_small_window8_256,73.110,26.890,90.940,9.060,49.73,256,0.900,bicubic,-10.746,-5.700,-29 +xcit_small_24_p8_224,73.090,26.910,91.150,8.850,47.63,224,1.000,bicubic,-10.748,-5.486,-27 +deit3_small_patch16_384,73.080,26.920,91.230,8.770,22.21,384,1.000,bicubic,-10.346,-5.446,-1 +cait_s24_224,73.070,26.930,91.130,8.870,46.92,224,1.000,bicubic,-10.382,-5.434,-4 +coatnet_rmlp_1_rw_224,73.020,26.980,90.890,9.110,41.69,224,0.950,bicubic,-10.338,-5.566,+5 +crossvit_15_dagger_408,72.950,27.050,91.080,8.920,28.50,408,1.000,bicubic,-10.888,-5.702,-32 +maxvit_tiny_tf_224.in1k,72.910,27.090,90.810,9.190,30.92,224,0.950,bicubic,-10.488,-5.778,-3 +coatnet_1_rw_224,72.910,27.090,90.790,9.210,41.72,224,0.950,bicubic,-10.698,-5.598,-13 +resnetv2_152x2_bit_teacher_384,72.890,27.110,91.550,8.450,236.34,384,1.000,bicubic,-10.954,-5.568,-36 +tf_efficientnet_b4.ap_in1k,72.890,27.110,90.980,9.020,19.34,380,0.922,bicubic,-10.358,-5.412,+6 +regnetv_040,72.880,27.120,91.100,8.900,20.64,288,1.000,bicubic,-10.314,-5.560,+7 +dm_nfnet_f0,72.880,27.120,91.080,8.920,71.49,256,0.900,bicubic,-10.506,-5.492,-3 +swinv2_cr_small_ns_224,72.800,27.200,90.800,9.200,49.70,224,0.900,bicubic,-10.688,-5.686,-14 +xception65p,72.790,27.210,90.910,9.090,39.82,299,0.940,bicubic,-10.340,-5.570,+11 +regnety_032,72.770,27.230,90.950,9.050,19.44,288,1.000,bicubic,-9.954,-5.474,+36 +regnety_040,72.710,27.290,90.720,9.280,20.65,288,1.000,bicubic,-10.328,-5.790,+17 +efficientformer_l7,72.690,27.310,90.810,9.190,82.23,224,0.950,bicubic,-10.696,-5.730,-8 +swin_s3_small_224,72.670,27.330,90.560,9.440,49.74,224,0.900,bicubic,-11.100,-5.890,-37 +nfnet_l0,72.610,27.390,91.010,8.990,35.07,288,1.000,bicubic,-10.140,-5.506,+31 +resnext101_64x4d,72.610,27.390,90.840,9.160,83.46,288,1.000,bicubic,-10.538,-5.532,0 +xcit_small_12_p8_224,72.610,27.390,90.680,9.320,26.21,224,1.000,bicubic,-10.734,-5.800,-8 +pnasnet5large,72.610,27.390,90.510,9.490,86.06,331,0.911,bicubic,-10.172,-5.530,+28 +xception65,72.600,27.400,90.830,9.170,39.92,299,0.940,bicubic,-10.580,-5.762,-3 +cs3sedarknet_x,72.580,27.420,91.050,8.950,35.40,288,1.000,bicubic,-10.074,-5.304,+33 +twins_pcpvt_large,72.580,27.420,90.700,9.300,60.99,224,0.900,bicubic,-10.560,-5.898,-2 +resnest101e,72.570,27.430,90.820,9.180,48.28,256,0.875,bilinear,-10.320,-5.500,+15 +swsl_resnext50_32x4d,72.560,27.440,90.870,9.130,25.03,224,0.875,bilinear,-9.622,-5.360,+80 +twins_svt_base,72.550,27.450,90.460,9.540,56.07,224,0.900,bicubic,-10.586,-5.958,-5 +tresnet_xl_448,72.550,27.450,90.310,9.690,78.44,448,0.875,bilinear,-10.500,-5.864,+4 +resnetv2_50x3_bitm,72.530,27.470,91.760,8.240,217.32,448,1.000,bilinear,-11.484,-5.364,-70 +gc_efficientnetv2_rw_t.agc_in1k,72.530,27.470,90.820,9.180,13.68,288,1.000,bicubic,-9.934,-5.478,+46 +deit_base_patch16_384,72.530,27.470,90.250,9.750,86.86,384,1.000,bicubic,-10.576,-6.122,-2 +deit3_medium_patch16_224,72.520,27.480,90.780,9.220,38.85,224,0.900,bicubic,-10.560,-5.512,-4 +xcit_small_12_p16_224_dist,72.500,27.500,91.120,8.880,26.25,224,1.000,bicubic,-10.850,-5.294,-23 +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,72.500,27.500,90.870,9.130,88.22,224,0.900,bicubic,-10.806,-5.660,-19 +convnext_tiny.fb_in22k_ft_in1k_384,72.490,27.510,91.540,8.460,28.59,384,1.000,bicubic,-11.590,-5.602,-81 +xcit_tiny_24_p8_224_dist,72.440,27.560,90.920,9.080,12.11,224,1.000,bicubic,-10.122,-5.146,+31 +resnet101d,72.410,27.590,90.650,9.350,44.57,320,1.000,bicubic,-10.612,-5.796,-3 +sequencer2d_m,72.400,27.600,90.690,9.310,38.31,224,0.875,bicubic,-10.406,-5.578,+8 +maxxvit_rmlp_nano_rw_256,72.380,27.620,90.750,9.250,16.78,256,0.950,bicubic,-10.650,-5.594,-8 +jx_nest_small,72.380,27.620,90.690,9.310,38.35,224,0.875,bicubic,-10.740,-5.638,-13 +maxvit_rmlp_nano_rw_256,72.380,27.620,90.450,9.550,15.50,256,0.950,bicubic,-10.582,-5.820,-4 +regnetz_c16,72.320,27.680,90.820,9.180,13.46,320,0.940,bicubic,-10.198,-5.252,+30 +tf_efficientnet_b4.aa_in1k,72.290,27.710,90.590,9.410,19.34,380,0.922,bicubic,-10.732,-5.710,-8 +tf_efficientnet_b2.ns_jft_in1k,72.280,27.720,91.090,8.910,9.11,260,0.890,bicubic,-10.100,-5.158,+39 +maxvit_nano_rw_256,72.280,27.720,90.550,9.450,15.45,256,0.950,bicubic,-10.652,-5.672,-6 +tresnet_m,72.270,27.730,90.240,9.760,31.39,224,0.875,bilinear,-10.810,-5.878,-17 +resnetv2_50x1_bit_distilled,72.260,27.740,91.010,8.990,25.55,224,0.875,bicubic,-10.558,-5.512,-3 +crossvit_18_240,72.250,27.750,90.270,9.730,43.27,240,0.875,bicubic,-10.150,-5.784,+32 +regnetz_c16_evos,72.240,27.760,91.230,8.770,13.49,320,0.950,bicubic,-10.390,-5.244,+8 +efficientnetv2_rw_t.ra2_in1k,72.240,27.760,90.420,9.580,13.65,288,1.000,bicubic,-10.108,-5.776,+38 +nasnetalarge,72.230,27.770,90.470,9.530,88.75,331,0.911,bicubic,-10.390,-5.576,+8 +cait_xxs36_384,72.190,27.810,90.840,9.160,17.37,384,1.000,bicubic,-10.004,-5.308,+53 +twins_pcpvt_base,72.180,27.820,90.510,9.490,43.83,224,0.900,bicubic,-10.528,-5.836,-2 +crossvit_18_dagger_240,72.150,27.850,90.070,9.930,44.27,240,0.875,bicubic,-10.368,-6.290,+19 +xcit_tiny_24_p16_384_dist,72.080,27.920,90.580,9.420,12.12,384,1.000,bicubic,-10.490,-5.706,+11 +resnet152,72.070,27.930,90.350,9.650,60.19,224,0.950,bicubic,-10.752,-5.776,-13 +cs3edgenet_x,72.050,27.950,90.370,9.630,47.82,288,1.000,bicubic,-10.652,-6.000,-5 +vit_relpos_base_patch16_clsgap_224.sw_in1k,72.010,27.990,90.250,9.750,86.43,224,0.900,bicubic,-10.752,-5.924,-10 +mobilevitv2_200_384_in22ft1k,72.000,28.000,90.630,9.370,18.45,384,1.000,bicubic,-11.394,-5.950,-55 +efficientformer_l3,72.000,28.000,90.280,9.720,31.41,224,0.950,bicubic,-10.550,-5.968,+10 +vit_relpos_medium_patch16_cls_224.sw_in1k,71.980,28.020,90.290,9.710,38.76,224,0.900,bicubic,-10.582,-5.880,+5 +convnext_tiny.fb_in1k,71.980,28.020,90.210,9.790,28.59,288,1.000,bicubic,-10.720,-5.926,-8 +convnext_tiny_hnf.a2h_in1k,71.980,28.020,89.770,10.230,28.59,288,1.000,bicubic,-10.610,-6.246,-1 +sequencer2d_s,71.940,28.060,90.480,9.520,27.65,224,0.875,bicubic,-10.402,-5.550,+25 +convnext_nano.in12k_ft_in1k,71.890,28.110,91.000,9.000,15.59,288,1.000,bicubic,-10.968,-5.556,-23 +swinv2_cr_small_224,71.890,28.110,90.270,9.730,49.70,224,0.900,bicubic,-11.256,-5.824,-45 +eca_nfnet_l0,71.840,28.160,91.110,8.890,24.14,288,1.000,bicubic,-10.740,-5.380,-3 +vit_relpos_base_patch16_224.sw_in1k,71.830,28.170,90.260,9.740,86.43,224,0.900,bicubic,-10.654,-5.882,+6 +mobilevitv2_175_384_in22ft1k,71.810,28.190,90.780,9.220,14.25,384,1.000,bicubic,-11.132,-5.646,-32 +flexivit_small.1200ep_in1k,71.740,28.260,90.270,9.730,22.06,240,0.950,bicubic,-10.786,-5.866,0 +swin_small_patch4_window7_224,71.740,28.260,90.240,9.760,49.61,224,0.900,bicubic,-11.472,-6.082,-54 +mvitv2_tiny,71.720,28.280,90.300,9.700,24.17,224,0.900,bicubic,-10.684,-5.856,+6 +flexivit_small.300ep_in1k,71.720,28.280,89.970,10.030,22.06,240,0.950,bicubic,-10.452,-6.054,+36 +swsl_resnet50,71.700,28.300,90.500,9.500,25.56,224,0.875,bilinear,-9.466,-4.596,+99 +pvt_v2_b2_li,71.700,28.300,90.010,9.990,22.55,224,0.900,bicubic,-10.496,-6.094,+29 +pit_b_224,71.700,28.300,89.250,10.750,73.76,224,0.900,bicubic,-10.746,-6.460,+4 +xcit_large_24_p16_224,71.700,28.300,89.170,10.830,189.10,224,1.000,bicubic,-11.196,-6.712,-38 +coatnet_bn_0_rw_224,71.690,28.310,90.380,9.620,27.44,224,0.950,bicubic,-10.708,-5.802,+3 +flexivit_small.600ep_in1k,71.690,28.310,90.160,9.840,22.06,240,0.950,bicubic,-10.664,-5.926,+7 +resnet61q,71.680,28.320,90.270,9.730,36.85,288,1.000,bicubic,-10.844,-5.860,-9 +gcvit_xtiny,71.660,28.340,90.230,9.770,19.98,224,0.875,bicubic,-10.292,-5.736,+43 +tresnet_xl,71.660,28.340,89.630,10.370,78.44,224,0.875,bilinear,-10.394,-6.306,+33 +convit_base,71.600,28.400,90.150,9.850,86.54,224,0.875,bicubic,-10.688,-5.858,+14 +tresnet_l_448,71.600,28.400,90.050,9.950,55.99,448,0.875,bilinear,-10.668,-5.926,+14 +xcit_tiny_12_p8_384_dist,71.580,28.420,90.710,9.290,6.71,384,1.000,bicubic,-10.808,-5.514,-2 +swinv2_tiny_window16_256,71.570,28.430,90.340,9.660,28.35,256,0.900,bicubic,-11.240,-5.892,-41 +poolformer_m48,71.550,28.450,89.760,10.240,73.47,224,0.950,bicubic,-10.912,-6.198,-10 +coatnet_0_rw_224,71.540,28.460,89.420,10.580,27.44,224,0.950,bicubic,-10.850,-6.416,-6 +crossvit_15_dagger_240,71.520,28.480,89.860,10.140,28.21,240,0.875,bicubic,-10.812,-6.658,+2 +ssl_resnext101_32x8d,71.500,28.500,90.460,9.540,88.79,224,0.875,bilinear,-10.116,-5.578,+54 +fbnetv3_g.ra2_in1k,71.500,28.500,90.370,9.630,16.62,288,0.950,bilinear,-10.548,-5.694,+26 +mobilevitv2_150_384_in22ft1k,71.490,28.510,90.420,9.580,10.59,384,1.000,bicubic,-11.104,-5.898,-33 +ecaresnet101d,71.490,28.510,90.330,9.670,44.57,224,0.875,bicubic,-10.682,-5.716,+15 +efficientnet_b3.ra2_in1k,71.480,28.520,90.060,9.940,12.23,320,1.000,bicubic,-10.762,-6.054,+5 +resnet51q,71.430,28.570,90.180,9.820,35.70,288,1.000,bilinear,-10.930,-6.000,-11 +pvt_v2_b2,71.430,28.570,90.040,9.960,25.36,224,0.900,bicubic,-10.646,-5.922,+17 +ssl_resnext101_32x16d,71.410,28.590,90.560,9.440,194.03,224,0.875,bilinear,-10.434,-5.536,+34 +pit_s_distilled_224,71.380,28.620,89.780,10.220,24.04,224,0.900,bicubic,-10.616,-6.018,+21 +vit_relpos_medium_patch16_224.sw_in1k,71.350,28.650,89.950,10.050,38.75,224,0.900,bicubic,-11.116,-6.138,-24 +xcit_tiny_24_p8_224,71.340,28.660,90.240,9.760,12.11,224,1.000,bicubic,-10.560,-5.736,+29 +vit_base_patch16_224.orig_in21k_ft_in1k,71.330,28.670,90.460,9.540,86.57,224,0.900,bicubic,-10.456,-5.662,+35 +tf_efficientnetv2_b3.in21k_ft_in1k,71.310,28.690,90.760,9.240,14.36,300,0.900,bicubic,-11.362,-5.864,-47 +mixer_b16_224_miil,71.300,28.700,89.650,10.350,59.88,224,0.875,bilinear,-11.008,-6.066,-9 +resnetv2_152x2_bit_teacher,71.290,28.710,90.430,9.570,236.34,224,0.875,bicubic,-11.572,-6.138,-63 +ecaresnet50t,71.280,28.720,90.420,9.580,25.57,320,0.950,bicubic,-11.066,-5.718,-17 +resnetv2_101,71.270,28.730,89.920,10.080,44.54,224,0.950,bicubic,-10.760,-5.940,+11 +convmixer_1536_20,71.240,28.760,89.430,10.570,51.63,224,0.960,bicubic,-10.136,-6.184,+59 +vit_base_patch32_clip_224.laion2b_ft_in1k,71.200,28.800,90.200,9.800,88.22,224,0.900,bicubic,-11.382,-6.002,-45 +xcit_small_12_p16_224,71.200,28.800,89.750,10.250,26.25,224,1.000,bicubic,-10.774,-6.066,+12 +crossvit_base_240,71.190,28.810,89.840,10.160,105.03,240,0.875,bicubic,-11.026,-5.990,-8 +vit_relpos_medium_patch16_rpn_224.sw_in1k,71.170,28.830,90.080,9.920,38.73,224,0.900,bicubic,-11.128,-5.894,-17 +deit_base_patch16_224,71.170,28.830,89.200,10.800,86.57,224,0.900,bicubic,-10.828,-6.534,+7 +mobilevitv2_200_in22ft1k,71.140,28.860,89.700,10.300,18.45,256,0.888,bicubic,-11.184,-6.240,-21 +swin_s3_tiny_224,71.130,28.870,89.720,10.280,28.33,224,0.900,bicubic,-10.992,-6.228,-5 +resnetv2_50d_evos,71.110,28.890,90.030,9.970,25.59,288,0.950,bicubic,-10.866,-5.886,+5 +halo2botnet50ts_256,71.100,28.900,89.610,10.390,22.64,256,0.950,bicubic,-10.960,-6.026,-2 +cs3sedarknet_l,71.080,28.920,90.330,9.670,21.91,288,0.950,bicubic,-10.694,-5.638,+20 +cs3darknet_x,71.070,28.930,90.120,9.880,35.05,288,1.000,bicubic,-11.158,-6.114,-17 +xcit_tiny_12_p8_224_dist,71.040,28.960,89.880,10.120,6.71,224,1.000,bicubic,-10.172,-5.720,+54 +xcit_medium_24_p16_224,71.020,28.980,89.520,10.480,84.40,224,1.000,bicubic,-11.616,-6.456,-63 +resnetv2_101x1_bitm,71.010,28.990,91.090,8.910,44.54,448,1.000,bilinear,-11.322,-4.866,-32 +resnetv2_50d_gn,71.010,28.990,89.770,10.230,25.57,288,0.950,bicubic,-10.806,-6.154,+12 +xcit_small_24_p16_224,71.010,28.990,89.700,10.300,47.67,224,1.000,bicubic,-11.570,-6.304,-59 +visformer_small,71.010,28.990,89.460,10.540,40.22,224,0.900,bicubic,-11.096,-6.412,-12 +lamhalobotnet50ts_256,70.990,29.010,89.060,10.940,22.57,256,0.950,bicubic,-10.554,-6.444,+23 +tresnet_m_448,70.990,29.010,88.680,11.320,31.39,448,0.875,bilinear,-10.724,-6.892,+12 +edgenext_small,70.980,29.020,89.870,10.130,5.59,320,1.000,bicubic,-10.588,-5.836,+20 +resnest50d_4s2x40d,70.950,29.050,89.710,10.290,30.42,224,0.875,bicubic,-10.158,-5.848,+54 +wide_resnet50_2,70.950,29.050,89.230,10.770,68.88,224,0.875,bicubic,-10.506,-6.302,+29 +convnext_nano.d1h_in1k,70.940,29.060,89.420,10.580,15.59,288,1.000,bicubic,-10.530,-6.238,+26 +vit_small_patch16_224.augreg_in21k_ft_in1k,70.930,29.070,90.140,9.860,22.05,224,0.900,bicubic,-10.472,-5.994,+30 +tnt_s_patch16_224,70.930,29.070,89.600,10.400,23.76,224,0.900,bicubic,-10.588,-6.148,+20 +coatnet_nano_rw_224,70.920,29.080,89.710,10.290,15.14,224,0.900,bicubic,-10.780,-5.928,+6 +tf_efficientnet_b3.ap_in1k,70.920,29.080,89.430,10.570,12.23,300,0.904,bicubic,-10.902,-6.194,-2 +coatnext_nano_rw_224,70.910,29.090,90.250,9.750,14.70,224,0.900,bicubic,-11.038,-5.668,-11 +coatnet_rmlp_nano_rw_224,70.910,29.090,89.920,10.080,15.15,224,0.900,bicubic,-11.154,-5.950,-22 +vit_srelpos_medium_patch16_224.sw_in1k,70.900,29.100,89.960,10.040,38.74,224,0.900,bicubic,-11.336,-5.974,-37 +tf_efficientnet_b1.ns_jft_in1k,70.870,29.130,90.120,9.880,7.79,240,0.882,bicubic,-10.518,-5.618,+25 +vit_base_patch16_rpn_224.in1k,70.870,29.130,89.770,10.230,86.54,224,0.900,bicubic,-11.332,-6.226,-36 +vit_large_patch32_384.orig_in21k_ft_in1k,70.860,29.140,90.570,9.430,306.63,384,1.000,bicubic,-10.646,-5.522,+13 +jx_nest_tiny,70.850,29.150,89.940,10.060,17.06,224,0.875,bicubic,-10.564,-5.676,+20 +resnetrs101,70.840,29.160,89.830,10.170,63.62,288,0.940,bicubic,-11.448,-6.108,-47 +rexnet_200,70.840,29.160,89.700,10.300,16.37,224,0.875,bicubic,-10.792,-5.968,-1 +tresnet_l,70.840,29.160,89.630,10.370,55.99,224,0.875,bilinear,-10.648,-5.994,+9 +resnet101,70.840,29.160,89.510,10.490,44.55,224,0.950,bicubic,-11.098,-6.244,-15 +tf_efficientnetv2_b3.in1k,70.830,29.170,89.500,10.500,14.36,300,0.904,bicubic,-11.140,-6.282,-24 +coat_lite_small,70.800,29.200,89.570,10.430,19.84,224,0.900,bicubic,-11.508,-6.280,-54 +poolformer_m36,70.790,29.210,89.510,10.490,56.17,224,0.950,bicubic,-11.320,-6.178,-38 +deit3_small_patch16_224,70.760,29.240,89.460,10.540,22.06,224,0.900,bicubic,-10.626,-5.990,+15 +levit_384,70.750,29.250,89.300,10.700,39.13,224,0.900,bicubic,-11.836,-6.716,-88 +swinv2_cr_tiny_ns_224,70.720,29.280,89.370,10.630,28.33,224,0.900,bicubic,-11.070,-6.454,-16 +vit_relpos_small_patch16_224.sw_in1k,70.710,29.290,90.000,10.000,21.98,224,0.900,bicubic,-10.752,-5.828,+4 +vit_base_patch32_clip_224.openai_ft_in1k,70.710,29.290,89.830,10.170,88.22,224,0.900,bicubic,-11.220,-6.138,-24 +mobilevitv2_175_in22ft1k,70.650,29.350,89.710,10.290,14.25,256,0.888,bicubic,-11.294,-6.082,-28 +tf_efficientnet_b3.aa_in1k,70.640,29.360,89.440,10.560,12.23,300,0.904,bicubic,-10.996,-6.278,-13 +crossvit_small_240,70.610,29.390,89.360,10.640,26.86,240,0.875,bicubic,-10.410,-6.100,+33 +cait_xxs24_384,70.600,29.400,89.720,10.280,12.03,384,1.000,bicubic,-10.366,-5.926,+36 +gluon_senet154,70.600,29.400,88.920,11.080,115.09,224,0.875,bicubic,-10.634,-6.428,+14 +convit_small,70.580,29.420,89.580,10.420,27.78,224,0.875,bicubic,-10.846,-6.164,0 +convnext_nano_ols.d1h_in1k,70.570,29.430,89.090,10.910,15.65,288,1.000,bicubic,-11.040,-6.550,-14 +twins_pcpvt_small,70.550,29.450,89.070,10.930,24.11,224,0.900,bicubic,-10.538,-6.572,+25 +swinv2_tiny_window8_256,70.540,29.460,89.500,10.500,28.35,256,0.900,bicubic,-11.266,-6.494,-28 +ssl_resnext101_32x4d,70.530,29.470,89.760,10.240,44.18,224,0.875,bilinear,-10.394,-5.968,+34 +vit_small_r26_s32_224.augreg_in21k_ft_in1k,70.520,29.480,90.110,9.890,36.43,224,0.900,bicubic,-11.338,-5.912,-35 +deit_small_distilled_patch16_224,70.520,29.480,89.470,10.530,22.44,224,0.900,bicubic,-10.680,-5.908,+10 +legacy_senet154,70.500,29.500,89.010,10.990,115.09,224,0.875,bilinear,-10.810,-6.486,+3 +regnetz_b16,70.450,29.550,89.530,10.470,9.72,288,0.940,bicubic,-10.266,-5.948,+42 +twins_svt_small,70.440,29.560,89.360,10.640,24.06,224,0.900,bicubic,-11.242,-6.310,-28 +crossvit_15_240,70.430,29.570,89.530,10.470,27.53,240,0.875,bicubic,-11.106,-6.162,-20 +gluon_seresnext101_64x4d,70.430,29.570,89.350,10.650,88.23,224,0.875,bicubic,-10.464,-5.958,+30 +halonet50ts,70.430,29.570,89.320,10.680,22.73,256,0.940,bicubic,-11.214,-6.288,-30 +tf_efficientnet_lite4.in1k,70.430,29.570,89.110,10.890,13.01,380,0.920,bilinear,-11.106,-6.558,-22 +resnetaa50,70.420,29.580,90.000,10.000,25.56,288,1.000,bicubic,-11.202,-5.808,-29 +resnest50d,70.410,29.590,88.760,11.240,27.48,224,0.875,bilinear,-10.564,-6.618,+18 +resnest50d_1s4x24d,70.400,29.600,89.220,10.780,25.68,224,0.875,bicubic,-10.588,-6.102,+16 +seresnext50_32x4d,70.400,29.600,89.110,10.890,27.56,224,0.875,bicubic,-10.866,-6.510,-5 +cs3darknet_l,70.360,29.640,89.750,10.250,21.16,288,0.950,bicubic,-10.536,-5.920,+22 +gernet_l,70.350,29.650,88.980,11.020,31.08,256,0.875,bilinear,-11.004,-6.556,-11 +vit_srelpos_small_patch16_224.sw_in1k,70.290,29.710,89.580,10.420,21.97,224,0.900,bicubic,-10.804,-5.752,+4 +gluon_resnet152_v1s,70.290,29.710,88.850,11.150,60.32,224,0.875,bicubic,-10.726,-6.562,+11 +repvgg_b3,70.250,29.750,88.730,11.270,123.09,224,0.875,bilinear,-10.242,-6.530,+40 +coat_mini,70.220,29.780,89.440,10.560,10.34,224,0.900,bicubic,-11.048,-5.952,-12 +xception41p,70.220,29.780,89.090,10.910,26.91,299,0.940,bicubic,-11.738,-6.704,-60 +sebotnet33ts_256,70.180,29.820,88.790,11.210,13.70,256,0.940,bicubic,-10.970,-6.384,-6 +ecaresnet101d_pruned,70.130,29.870,89.590,10.410,24.88,224,0.875,bicubic,-10.688,-6.038,+19 +efficientnet_el.ra_in1k,70.120,29.880,89.290,10.710,10.59,300,0.904,bicubic,-11.196,-6.236,-18 +inception_resnet_v2,70.120,29.880,88.700,11.300,55.84,299,0.897,bicubic,-10.338,-6.606,+40 +resmlp_36_distilled_224,70.090,29.910,89.100,10.900,44.69,224,0.875,bicubic,-11.070,-6.388,-12 +haloregnetz_b,70.090,29.910,88.860,11.140,11.68,224,0.940,bicubic,-10.960,-6.336,-1 +poolformer_s36,70.020,29.980,89.190,10.810,30.86,224,0.900,bicubic,-11.396,-6.256,-30 +sehalonet33ts,70.020,29.980,88.710,11.290,13.69,256,0.940,bicubic,-10.938,-6.566,+3 +gluon_seresnext101_32x4d,70.010,29.990,88.900,11.100,48.96,224,0.875,bicubic,-10.894,-6.394,+6 +regnety_320,70.000,30.000,88.890,11.110,145.05,224,0.875,bicubic,-10.810,-6.354,+12 +levit_256,69.970,30.030,89.250,10.750,18.89,224,0.900,bicubic,-11.540,-6.240,-42 +gluon_resnet152_v1d,69.960,30.040,88.490,11.510,60.21,224,0.875,bicubic,-10.514,-6.716,+29 +pit_s_224,69.890,30.110,88.930,11.070,23.46,224,0.900,bicubic,-11.204,-6.640,-11 +maxvit_rmlp_pico_rw_256,69.880,30.120,89.270,10.730,7.52,256,0.950,bicubic,-10.636,-5.942,+22 +ecaresnet50d,69.840,30.160,89.400,10.600,25.58,224,0.875,bicubic,-10.752,-5.920,+15 +mobilevitv2_150_in22ft1k,69.820,30.180,89.170,10.830,10.59,256,0.888,bicubic,-11.658,-6.504,-44 +mobilevitv2_200,69.760,30.240,88.610,11.390,18.45,256,0.888,bicubic,-11.376,-6.756,-21 +ssl_resnext50_32x4d,69.710,30.290,89.440,10.560,25.03,224,0.875,bilinear,-10.608,-5.966,+38 +xcit_tiny_24_p16_224_dist,69.700,30.300,88.710,11.290,12.12,224,1.000,bicubic,-10.746,-6.508,+27 +xcit_tiny_12_p16_384_dist,69.690,30.310,89.030,10.970,6.72,384,1.000,bicubic,-11.250,-6.380,-8 +lambda_resnet50ts,69.690,30.310,88.830,11.170,21.54,256,0.950,bicubic,-11.476,-7.142,-27 +resmlp_24_distilled_224,69.680,30.320,89.050,10.950,30.02,224,0.875,bicubic,-11.086,-6.168,0 +gluon_resnext101_64x4d,69.680,30.320,88.270,11.730,83.46,224,0.875,bicubic,-10.924,-6.718,+7 +resnext50_32x4d,69.660,30.340,88.660,11.340,25.03,224,0.950,bicubic,-11.458,-6.672,-26 +efficientnet_b3_pruned.in1k,69.580,30.420,88.980,11.020,9.86,300,0.904,bicubic,-11.278,-6.262,-5 +gcresnext50ts,69.540,30.460,88.850,11.150,15.67,256,0.900,bicubic,-11.040,-6.320,+5 +nf_resnet50,69.540,30.460,88.730,11.270,25.56,288,0.940,bicubic,-11.122,-6.606,0 +gernet_m,69.530,30.470,88.690,11.310,21.14,224,0.875,bilinear,-11.202,-6.494,-5 +ens_adv_inception_resnet_v2,69.530,30.470,88.510,11.490,55.84,299,0.897,bicubic,-10.452,-6.428,+51 +efficientnet_el_pruned.in1k,69.520,30.480,88.930,11.070,10.59,300,0.904,bicubic,-10.780,-6.288,+30 +repvgg_b3g4,69.520,30.480,88.450,11.550,83.83,224,0.875,bilinear,-10.692,-6.660,+36 +gcresnet50t,69.500,30.500,89.040,10.960,25.90,256,0.900,bicubic,-11.440,-6.414,-21 +efficientnet_b2.ra_in1k,69.500,30.500,88.680,11.320,9.11,288,1.000,bicubic,-11.112,-6.638,-4 +rexnet_150,69.470,30.530,88.980,11.020,9.73,224,0.875,bicubic,-10.840,-6.186,+23 +gcvit_xxtiny,69.470,30.530,88.870,11.130,12.00,224,0.875,bicubic,-10.244,-6.210,+61 +swin_tiny_patch4_window7_224,69.450,30.550,89.020,10.980,28.29,224,0.900,bicubic,-11.928,-6.520,-54 +regnetx_320,69.440,30.560,88.270,11.730,107.81,224,0.875,bicubic,-10.806,-6.756,+27 +cspresnext50,69.430,30.570,88.620,11.380,20.57,256,0.887,bilinear,-11.116,-6.700,-6 +vit_base_patch32_224.augreg_in21k_ft_in1k,69.410,30.590,89.420,10.580,88.22,224,0.900,bicubic,-11.314,-6.148,-14 +convmixer_768_32,69.400,30.600,88.910,11.090,21.11,224,0.960,bicubic,-10.764,-6.162,+30 +darknet53,69.370,30.630,88.770,11.230,41.61,288,1.000,bicubic,-11.164,-6.650,-8 +inception_v4,69.360,30.640,88.780,11.220,42.68,299,0.875,bicubic,-10.808,-6.188,+27 +legacy_seresnext101_32x4d,69.360,30.640,88.070,11.930,48.96,224,0.875,bilinear,-10.868,-6.948,+23 +ecaresnetlight,69.340,30.660,89.220,10.780,30.16,224,0.875,bicubic,-11.122,-6.028,-1 +resnet50d,69.330,30.670,88.220,11.780,25.58,224,0.875,bicubic,-11.200,-6.940,-11 +cs3darknet_focus_l,69.320,30.680,89.440,10.560,21.15,288,0.950,bicubic,-11.564,-6.242,-28 +xception71,69.320,30.680,88.260,11.740,42.34,299,0.903,bicubic,-10.554,-6.662,+37 +vit_small_patch16_384.augreg_in1k,69.310,30.690,89.020,10.980,22.20,384,1.000,bicubic,-11.810,-6.554,-51 +mobilevitv2_175,69.300,30.700,88.940,11.060,14.25,256,0.888,bicubic,-11.560,-6.314,-30 +vit_small_patch32_384.augreg_in21k_ft_in1k,69.290,30.710,89.820,10.180,22.92,384,1.000,bicubic,-11.190,-5.778,-11 +convnext_pico_ols.d1_in1k,69.240,30.760,88.830,11.170,9.06,288,1.000,bicubic,-11.224,-6.412,-9 +edgenext_small_rw,69.230,30.770,88.750,11.250,7.83,320,1.000,bicubic,-11.226,-6.442,-7 +efficientformer_l1,69.220,30.780,88.540,11.460,12.29,224,0.950,bicubic,-11.282,-6.458,-16 +vit_base_patch16_384.augreg_in1k,69.180,30.820,88.380,11.620,86.86,384,1.000,bicubic,-11.922,-6.952,-54 +gluon_xception65,69.160,30.840,88.090,11.910,39.92,299,0.903,bicubic,-10.556,-6.770,+39 +gluon_resnet152_v1c,69.140,30.860,87.870,12.130,60.21,224,0.875,bicubic,-10.770,-6.970,+26 +mixnet_xl.ra_in1k,69.100,30.900,88.310,11.690,11.90,224,0.875,bicubic,-11.376,-6.626,-17 +seresnet33ts,69.090,30.910,88.490,11.510,19.78,256,0.900,bicubic,-11.262,-6.616,-5 +tf_efficientnetv2_b2.in1k,69.090,30.910,88.220,11.780,10.10,260,0.890,bicubic,-11.118,-6.822,+9 +resnetv2_50,69.040,30.960,88.440,11.560,25.55,224,0.950,bicubic,-11.392,-6.640,-13 +gluon_resnet101_v1d,69.010,30.990,88.100,11.900,44.57,224,0.875,bicubic,-11.404,-6.914,-12 +repvgg_b2g4,69.000,31.000,88.360,11.640,61.76,224,0.875,bilinear,-10.366,-6.328,+52 +seresnet50,68.980,31.020,88.710,11.290,28.09,224,0.875,bicubic,-11.294,-6.360,-2 +gcresnet33ts,68.980,31.020,88.470,11.530,19.88,256,0.900,bicubic,-11.102,-6.528,+11 +gluon_resnext101_32x4d,68.960,31.040,88.360,11.640,44.18,224,0.875,bicubic,-11.374,-6.566,-10 +convnext_pico.d1_in1k,68.930,31.070,88.480,11.520,9.05,288,0.950,bicubic,-11.496,-6.578,-18 +tf_efficientnet_b2.ap_in1k,68.920,31.080,88.350,11.650,9.11,260,0.890,bicubic,-11.380,-6.678,-6 +cspdarknet53,68.890,31.110,88.600,11.400,27.64,256,0.887,bilinear,-11.168,-6.484,+8 +mobilevitv2_150,68.870,31.130,88.090,11.910,10.59,256,0.888,bicubic,-11.506,-6.970,-19 +regnety_120,68.850,31.150,88.330,11.670,51.82,224,0.875,bicubic,-11.516,-6.796,-18 +resnet50_gn,68.830,31.170,88.440,11.560,25.56,224,0.940,bicubic,-11.222,-6.506,+6 +gluon_resnet152_v1b,68.820,31.180,87.710,12.290,60.19,224,0.875,bicubic,-10.866,-7.026,+25 +eca_resnet33ts,68.810,31.190,88.580,11.420,19.68,256,0.900,bicubic,-11.268,-6.390,+2 +dpn131,68.770,31.230,87.470,12.530,79.25,224,0.875,bicubic,-11.052,-7.240,+15 +gmlp_s16_224,68.760,31.240,88.090,11.910,19.42,224,0.875,bicubic,-10.882,-6.508,+26 +darknetaa53,68.750,31.250,88.720,11.280,36.02,288,1.000,bilinear,-11.772,-6.602,-41 +tf_efficientnet_b2.aa_in1k,68.750,31.250,87.990,12.010,9.11,260,0.890,bicubic,-11.336,-6.918,-4 +resnext50d_32x4d,68.740,31.260,88.300,11.700,25.05,224,0.875,bicubic,-10.936,-6.566,+21 +poolformer_s24,68.740,31.260,88.220,11.780,21.39,224,0.900,bicubic,-11.576,-6.818,-21 +resnet50,68.730,31.270,87.690,12.310,25.56,224,0.950,bicubic,-11.644,-6.924,-29 +deit_small_patch16_224,68.720,31.280,88.200,11.800,22.05,224,0.900,bicubic,-11.136,-6.852,+4 +gluon_resnet101_v1s,68.710,31.290,87.910,12.090,44.67,224,0.875,bicubic,-11.592,-7.250,-23 +dpn107,68.690,31.310,88.130,11.870,86.92,224,0.875,bicubic,-11.466,-6.780,-12 +gluon_seresnext50_32x4d,68.670,31.330,88.310,11.690,27.56,224,0.875,bicubic,-11.248,-6.512,-4 +hrnet_w64,68.640,31.360,88.050,11.950,128.06,224,0.875,bilinear,-10.834,-6.602,+24 +dpn98,68.590,31.410,87.680,12.320,61.57,224,0.875,bicubic,-11.052,-6.948,+16 +xcit_tiny_12_p8_224,68.560,31.440,88.680,11.320,6.71,224,1.000,bicubic,-11.134,-6.372,+9 +regnetx_160,68.530,31.470,88.450,11.550,54.28,224,0.875,bicubic,-11.326,-6.380,-2 +cspresnet50,68.460,31.540,88.010,11.990,21.62,256,0.887,bilinear,-11.114,-6.702,+15 +rexnet_130,68.450,31.550,88.040,11.960,7.56,224,0.875,bicubic,-11.050,-6.642,+16 +xcit_tiny_24_p16_224,68.430,31.570,88.290,11.710,12.12,224,1.000,bicubic,-11.014,-6.592,+21 +ecaresnet50d_pruned,68.420,31.580,88.370,11.630,19.94,224,0.875,bicubic,-11.296,-6.510,+1 +tf_efficientnet_el.in1k,68.420,31.580,88.210,11.790,10.59,300,0.904,bicubic,-11.830,-6.918,-30 +cait_xxs36_224,68.410,31.590,88.630,11.370,17.30,224,1.000,bicubic,-11.340,-6.236,-2 +ssl_resnet50,68.410,31.590,88.560,11.440,25.56,224,0.875,bilinear,-10.812,-6.272,+33 +skresnext50_32x4d,68.350,31.650,87.570,12.430,27.48,224,0.875,bicubic,-11.806,-7.072,-24 +fbnetv3_d.ra2_in1k,68.330,31.670,88.450,11.550,10.31,256,0.950,bilinear,-11.350,-6.494,+1 +dla102x2,68.330,31.670,87.890,12.110,41.28,224,0.875,bilinear,-11.118,-6.750,+14 +efficientnet_b2_pruned.in1k,68.320,31.680,88.100,11.900,8.31,260,0.890,bicubic,-11.596,-6.756,-18 +resmlp_big_24_224,68.320,31.680,87.520,12.480,129.14,224,0.875,bicubic,-12.708,-7.502,-95 +gluon_resnext50_32x4d,68.310,31.690,87.300,12.700,25.03,224,0.875,bicubic,-11.044,-7.126,+14 +vit_base_patch16_224.sam,68.280,31.720,87.720,12.280,86.57,224,0.900,bicubic,-11.962,-7.036,-37 +ecaresnet26t,68.230,31.770,88.790,11.210,16.01,320,0.950,bicubic,-11.624,-6.294,-16 +tf_efficientnet_lite3.in1k,68.230,31.770,87.740,12.260,8.20,300,0.904,bilinear,-11.590,-7.174,-14 +ese_vovnet39b,68.210,31.790,88.240,11.760,24.57,224,0.875,bicubic,-11.110,-6.472,+11 +fbnetv3_b.ra2_in1k,68.180,31.820,87.930,12.070,8.60,256,0.950,bilinear,-10.970,-6.816,+29 +regnetx_120,68.150,31.850,87.660,12.340,46.11,224,0.875,bicubic,-11.446,-7.078,-4 +resmlp_36_224,68.080,31.920,88.190,11.810,44.69,224,0.875,bicubic,-11.690,-6.696,-17 +resnetrs50,68.030,31.970,87.710,12.290,35.69,224,0.910,bicubic,-11.862,-7.258,-26 +pit_xs_distilled_224,68.020,31.980,87.720,12.280,11.00,224,0.900,bicubic,-11.286,-6.644,+10 +dpn92,67.990,32.010,87.580,12.420,37.67,224,0.875,bicubic,-12.018,-7.256,-33 +nf_regnet_b1,67.960,32.040,88.200,11.800,10.22,288,0.900,bicubic,-11.332,-6.548,+10 +gluon_resnet50_v1d,67.940,32.060,87.130,12.870,25.58,224,0.875,bicubic,-11.134,-7.340,+27 +resnetv2_50x1_bitm,67.920,32.080,89.300,10.700,25.55,448,1.000,bilinear,-12.422,-6.384,-60 +levit_192,67.900,32.100,87.890,12.110,10.95,224,0.900,bicubic,-11.942,-6.896,-27 +tf_efficientnetv2_b1.in1k,67.890,32.110,87.800,12.200,8.14,240,0.882,bicubic,-11.572,-6.922,-6 +regnetx_080,67.880,32.120,86.990,13.010,39.57,224,0.875,bicubic,-11.314,-7.570,+16 +resnext101_32x8d,67.860,32.140,87.490,12.510,88.79,224,0.875,bilinear,-11.448,-7.028,-1 +efficientnet_em.ra2_in1k,67.840,32.160,88.120,11.880,6.90,240,0.882,bicubic,-11.412,-6.674,+8 +legacy_seresnext50_32x4d,67.840,32.160,87.620,12.380,27.56,224,0.875,bilinear,-11.238,-6.816,+19 +lambda_resnet26t,67.810,32.190,87.780,12.220,10.96,256,0.940,bicubic,-11.286,-6.812,+16 +resmlp_24_224,67.810,32.190,87.610,12.390,30.02,224,0.875,bicubic,-11.564,-6.936,-9 +hrnet_w48,67.770,32.230,87.420,12.580,77.47,224,0.875,bilinear,-11.530,-7.092,-2 +hrnet_w44,67.740,32.260,87.560,12.440,67.06,224,0.875,bilinear,-11.156,-6.808,+25 +coat_lite_mini,67.720,32.280,87.700,12.300,11.01,224,0.900,bicubic,-11.368,-6.904,+13 +tf_efficientnet_b0.ns_jft_in1k,67.710,32.290,88.070,11.930,5.29,224,0.875,bicubic,-10.948,-6.306,+37 +regnetx_064,67.680,32.320,87.520,12.480,26.21,224,0.875,bicubic,-11.392,-6.938,+13 +eca_botnext26ts_256,67.680,32.320,87.060,12.940,10.59,256,0.950,bicubic,-11.594,-7.554,-2 +convnext_femto_ols.d1_in1k,67.670,32.330,87.380,12.620,5.23,288,0.950,bicubic,-11.264,-7.152,+17 +xception,67.650,32.350,87.570,12.430,22.86,299,0.897,bicubic,-11.402,-6.822,+12 +dpn68b,67.630,32.370,87.670,12.330,12.61,224,0.875,bicubic,-11.586,-6.744,-2 +halonet26t,67.620,32.380,87.250,12.750,12.48,256,0.950,bicubic,-11.480,-7.062,+4 +dla169,67.610,32.390,87.590,12.410,53.39,224,0.875,bilinear,-11.078,-6.746,+28 +gluon_inception_v3,67.590,32.410,87.470,12.530,23.83,299,0.875,bicubic,-11.216,-6.900,+19 +gluon_resnet101_v1c,67.580,32.420,87.180,12.820,44.57,224,0.875,bicubic,-11.954,-7.398,-30 +res2net50_26w_8s,67.570,32.430,87.280,12.720,48.40,224,0.875,bilinear,-11.630,-7.088,-5 +hrnet_w40,67.560,32.440,87.140,12.860,57.56,224,0.875,bilinear,-11.360,-7.330,+10 +tf_efficientnet_b1.ap_in1k,67.520,32.480,87.760,12.240,7.79,240,0.882,bicubic,-11.760,-6.546,-15 +legacy_seresnet152,67.520,32.480,87.390,12.610,66.82,224,0.875,bilinear,-11.140,-6.980,+24 +mobilevitv2_125,67.470,32.530,87.570,12.430,7.48,256,0.888,bicubic,-12.214,-7.280,-44 +efficientnet_b1.ft_in1k,67.470,32.530,87.510,12.490,7.79,256,1.000,bicubic,-11.324,-6.832,+13 +eca_halonext26ts,67.470,32.530,87.230,12.770,10.76,256,0.940,bicubic,-12.016,-7.368,-33 +gluon_resnet101_v1b,67.460,32.540,87.240,12.760,44.55,224,0.875,bicubic,-11.846,-7.284,-24 +tf_efficientnet_cc_b1_8e.in1k,67.450,32.550,87.310,12.690,39.72,240,0.882,bicubic,-11.858,-7.060,-26 +res2net101_26w_4s,67.440,32.560,87.010,12.990,45.21,224,0.875,bilinear,-11.758,-7.422,-13 +resnetblur50,67.430,32.570,87.440,12.560,25.56,224,0.875,bicubic,-11.856,-7.198,-23 +resnet33ts,67.370,32.630,87.580,12.420,19.68,256,0.900,bicubic,-11.844,-6.994,-17 +cait_xxs24_224,67.330,32.670,87.510,12.490,11.96,224,1.000,bicubic,-11.056,-6.800,+29 +regnetx_032,67.290,32.710,87.000,13.000,15.30,224,0.875,bicubic,-10.882,-7.088,+40 +coat_tiny,67.250,32.750,87.340,12.660,5.50,224,0.900,bicubic,-11.184,-6.698,+24 +xception41,67.250,32.750,87.200,12.800,26.97,299,0.903,bicubic,-11.266,-7.078,+15 +convnext_femto.d1_in1k,67.200,32.800,87.510,12.490,5.22,288,0.950,bicubic,-11.504,-6.924,+7 +resnest26d,67.200,32.800,87.170,12.830,17.07,224,0.875,bilinear,-11.278,-7.128,+17 +repvgg_b2,67.160,32.840,87.330,12.670,89.02,224,0.875,bilinear,-11.632,-7.084,0 +legacy_seresnet101,67.160,32.840,87.060,12.940,49.33,224,0.875,bilinear,-11.222,-7.204,+24 +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,67.140,32.860,86.490,13.510,119.42,256,0.900,bicubic,-12.340,-7.648,-48 +botnet26t_256,67.130,32.870,87.550,12.450,12.49,256,0.950,bicubic,-12.142,-6.978,-31 +dla60x,67.100,32.900,87.190,12.810,17.35,224,0.875,bilinear,-11.146,-6.828,+25 +gluon_resnet50_v1s,67.060,32.940,86.860,13.140,25.68,224,0.875,bicubic,-11.652,-7.378,-1 +tv_resnet152,67.050,32.950,87.550,12.450,60.19,224,0.875,bilinear,-11.262,-6.488,+22 +xcit_tiny_12_p16_224_dist,67.020,32.980,87.400,12.600,6.72,224,1.000,bicubic,-11.558,-6.796,+2 +dla60_res2net,67.020,32.980,87.160,12.840,20.85,224,0.875,bilinear,-11.444,-7.046,+10 +dla102x,67.010,32.990,86.770,13.230,26.31,224,0.875,bilinear,-11.500,-7.458,+4 +lambda_resnet26rpt_256,66.960,33.040,87.130,12.870,10.99,256,0.940,bicubic,-12.010,-7.300,-19 +mixnet_l.ft_in1k,66.940,33.060,86.910,13.090,7.33,224,0.875,bicubic,-12.036,-7.272,-21 +pit_xs_224,66.920,33.080,87.280,12.720,10.62,224,0.900,bicubic,-11.262,-6.888,+22 +pvt_v2_b1,66.910,33.090,87.430,12.570,14.01,224,0.900,bicubic,-11.784,-7.062,-7 +res2net50_26w_6s,66.910,33.090,86.860,13.140,37.05,224,0.875,bilinear,-11.660,-7.264,-3 +repvgg_b1,66.900,33.100,86.780,13.220,57.42,224,0.875,bilinear,-11.466,-7.318,+10 +xcit_nano_12_p8_384_dist,66.880,33.120,87.110,12.890,3.05,384,1.000,bicubic,-10.940,-6.926,+37 +tf_efficientnet_b1.aa_in1k,66.880,33.120,87.010,12.990,7.79,240,0.882,bicubic,-11.946,-7.188,-19 +efficientnet_es.ra_in1k,66.880,33.120,86.730,13.270,5.44,224,0.875,bicubic,-11.186,-7.196,+24 +mobilevit_s,66.870,33.130,87.060,12.940,5.58,256,0.900,bicubic,-11.442,-7.086,+8 +resnet32ts,66.860,33.140,87.250,12.750,17.96,256,0.900,bicubic,-12.144,-7.106,-31 +regnetx_040,66.840,33.160,86.730,13.270,22.12,224,0.875,bicubic,-11.642,-7.514,-6 +hrnet_w30,66.780,33.220,86.800,13.200,37.71,224,0.875,bilinear,-11.426,-7.422,+11 +tf_mixnet_l.in1k,66.780,33.220,86.470,13.530,7.33,224,0.875,bicubic,-11.994,-7.528,-21 +selecsls60b,66.760,33.240,86.530,13.470,32.77,224,0.875,bicubic,-11.652,-7.644,-2 +hrnet_w32,66.750,33.250,87.300,12.700,41.23,224,0.875,bilinear,-11.700,-6.886,-7 +wide_resnet101_2,66.730,33.270,87.030,12.970,126.89,224,0.875,bilinear,-12.126,-7.252,-30 +vit_small_patch16_224.augreg_in1k,66.710,33.290,86.710,13.290,22.05,224,0.900,bicubic,-12.136,-7.574,-30 +tf_efficientnetv2_b0.in1k,66.700,33.300,86.710,13.290,7.14,224,0.875,bicubic,-11.656,-7.314,-2 +adv_inception_v3,66.650,33.350,86.530,13.470,23.83,299,0.875,bicubic,-10.932,-7.206,+36 +dla60_res2next,66.640,33.360,87.030,12.970,17.03,224,0.875,bilinear,-11.800,-7.122,-11 +vit_tiny_patch16_384.augreg_in21k_ft_in1k,66.610,33.390,87.260,12.740,5.79,384,1.000,bicubic,-11.820,-7.282,-11 +mobilevitv2_100,66.610,33.390,87.000,13.000,4.90,256,0.888,bicubic,-11.480,-7.164,+8 +cs3darknet_m,66.570,33.430,87.160,12.840,9.31,288,0.950,bicubic,-11.066,-6.854,+27 +gluon_resnet50_v1c,66.560,33.440,86.180,13.820,25.58,224,0.875,bicubic,-11.452,-7.808,+9 +levit_128,66.550,33.450,86.750,13.250,9.21,224,0.900,bicubic,-11.936,-7.260,-21 +dla102,66.540,33.460,86.910,13.090,33.27,224,0.875,bilinear,-11.492,-7.036,+6 +vit_base_patch16_224.augreg_in1k,66.490,33.510,86.250,13.750,86.57,224,0.900,bicubic,-12.664,-7.850,-57 +vit_base_patch32_384.augreg_in1k,66.420,33.580,86.950,13.050,88.30,384,1.000,bicubic,-12.340,-7.278,-36 +gmixer_24_224,66.420,33.580,86.150,13.850,24.72,224,0.875,bicubic,-11.616,-7.514,+3 +tf_inception_v3,66.410,33.590,86.660,13.340,23.83,299,0.875,bicubic,-11.450,-6.980,+13 +bat_resnext26ts,66.400,33.600,86.830,13.170,10.73,256,0.900,bicubic,-11.842,-7.270,-10 +hardcorenas_f,66.370,33.630,86.200,13.800,8.20,224,0.875,bilinear,-11.734,-7.602,-4 +coat_lite_tiny,66.290,33.710,86.980,13.020,5.72,224,0.900,bicubic,-11.222,-6.936,+25 +efficientnet_b0.ra_in1k,66.290,33.710,85.960,14.040,5.29,224,0.875,bicubic,-11.408,-7.572,+14 +cs3darknet_focus_m,66.260,33.740,87.090,12.910,9.30,288,0.950,bicubic,-11.018,-6.880,+33 +legacy_seresnet50,66.250,33.750,86.330,13.670,28.09,224,0.875,bilinear,-11.380,-7.418,+15 +selecsls60,66.210,33.790,86.340,13.660,30.67,224,0.875,bicubic,-11.772,-7.488,-2 +tf_efficientnet_em.in1k,66.180,33.820,86.360,13.640,6.90,240,0.882,bicubic,-11.950,-7.684,-11 +tv_resnext50_32x4d,66.180,33.820,86.040,13.960,25.03,224,0.875,bilinear,-11.440,-7.656,+13 +tf_efficientnet_cc_b0_8e.in1k,66.170,33.830,86.240,13.760,24.01,224,0.875,bicubic,-11.738,-7.414,-1 +inception_v3,66.150,33.850,86.330,13.670,23.83,299,0.875,bicubic,-11.290,-7.146,+20 +res2net50_26w_4s,66.140,33.860,86.600,13.400,25.70,224,0.875,bilinear,-11.824,-7.254,-6 +resmlp_12_distilled_224,66.130,33.870,86.630,13.370,15.35,224,0.875,bicubic,-11.814,-6.928,-6 +efficientnet_b1_pruned.in1k,66.090,33.910,86.570,13.430,6.33,240,0.882,bicubic,-12.146,-7.264,-22 +rexnet_100,66.070,33.930,86.490,13.510,4.80,224,0.875,bicubic,-11.788,-7.380,-2 +gluon_resnet50_v1b,66.070,33.930,86.260,13.740,25.56,224,0.875,bicubic,-11.510,-7.456,+11 +regnety_016,66.060,33.940,86.380,13.620,11.20,224,0.875,bicubic,-11.802,-7.340,-5 +res2net50_14w_8s,66.020,33.980,86.250,13.750,25.06,224,0.875,bilinear,-12.130,-7.598,-22 +tinynet_a.in1k,66.010,33.990,85.790,14.210,6.19,192,0.875,bicubic,-11.642,-7.746,0 +gcresnext26ts,65.940,34.060,85.920,14.080,10.48,256,0.900,bicubic,-11.874,-7.914,-4 +seresnext26t_32x4d,65.880,34.120,85.680,14.320,16.81,224,0.875,bicubic,-12.106,-8.066,-17 +repvgg_b1g4,65.850,34.150,86.120,13.880,39.97,224,0.875,bilinear,-11.744,-7.706,+2 +res2next50,65.850,34.150,85.840,14.160,24.67,224,0.875,bilinear,-12.396,-8.052,-33 +densenet161,65.840,34.160,86.450,13.550,28.68,224,0.875,bicubic,-11.518,-7.188,+10 +hardcorenas_e,65.840,34.160,85.980,14.020,8.07,224,0.875,bilinear,-11.954,-7.714,-8 +resnet34d,65.780,34.220,86.710,13.290,21.82,224,0.875,bicubic,-11.336,-6.672,+18 +xcit_tiny_12_p16_224,65.780,34.220,86.220,13.780,6.72,224,1.000,bicubic,-11.340,-7.492,+16 +eca_resnext26ts,65.760,34.240,85.840,14.160,10.30,256,0.900,bicubic,-11.692,-7.726,+2 +mobilenetv3_large_100.miil_in21k_ft_in1k,65.760,34.240,85.200,14.800,5.48,224,0.875,bilinear,-12.156,-7.710,-21 +skresnet34,65.750,34.250,85.960,14.040,22.28,224,0.875,bicubic,-11.162,-7.362,+25 +tv_resnet101,65.690,34.310,85.980,14.020,44.55,224,0.875,bilinear,-11.684,-7.560,+2 +convnext_tiny.fb_in22k_ft_in1k,65.680,34.320,86.610,13.390,28.59,288,1.000,bicubic,-13.228,-8.064,-78 +seresnext26ts,65.660,34.340,86.140,13.860,10.39,256,0.900,bicubic,-12.206,-7.650,-22 +hardcorenas_d,65.630,34.370,85.460,14.540,7.50,224,0.875,bilinear,-11.802,-8.024,-2 +convnext_atto_ols.a2_in1k,65.610,34.390,86.260,13.740,3.70,288,0.950,bicubic,-11.606,-7.420,+6 +selecsls42b,65.610,34.390,85.810,14.190,32.46,224,0.875,bicubic,-11.564,-7.580,+6 +poolformer_s12,65.600,34.400,86.130,13.870,11.92,224,0.900,bicubic,-11.630,-7.374,+3 +tf_efficientnet_b0.ap_in1k,65.490,34.510,85.580,14.420,5.29,224,0.875,bicubic,-11.596,-7.676,+8 +seresnext26d_32x4d,65.410,34.590,85.970,14.030,16.81,224,0.875,bicubic,-12.192,-7.638,-17 +convmixer_1024_20_ks9_p14,65.410,34.590,85.590,14.410,24.38,224,0.960,bicubic,-11.536,-7.768,+13 +resnet26t,65.400,34.600,86.110,13.890,16.01,256,0.940,bicubic,-12.482,-7.730,-31 +tf_efficientnet_lite2.in1k,65.380,34.620,85.990,14.010,6.09,260,0.890,bicubic,-12.088,-7.764,-13 +res2net50_48w_2s,65.350,34.650,85.960,14.040,25.29,224,0.875,bilinear,-12.172,-7.594,-16 +densenet201,65.290,34.710,85.690,14.310,20.01,224,0.875,bicubic,-11.996,-7.788,-8 +densenetblur121d,65.280,34.720,85.710,14.290,8.00,224,0.875,bicubic,-11.308,-7.482,+20 +dla60,65.200,34.800,85.760,14.240,22.04,224,0.875,bilinear,-11.832,-7.558,+2 +crossvit_9_dagger_240,65.190,34.810,86.600,13.400,8.78,240,0.875,bicubic,-11.790,-7.010,+3 +ese_vovnet19b_dw,65.190,34.810,85.470,14.530,6.54,224,0.875,bicubic,-11.608,-7.798,+9 +tf_efficientnet_cc_b0_4e.in1k,65.150,34.850,85.160,14.840,13.31,224,0.875,bicubic,-12.156,-8.174,-14 +gernet_s,65.120,34.880,85.510,14.490,8.17,224,0.875,bilinear,-11.796,-7.622,+4 +legacy_seresnext26_32x4d,65.050,34.950,85.660,14.340,16.79,224,0.875,bicubic,-12.054,-7.656,-6 +mobilenetv2_120d.ra_in1k,65.030,34.970,85.960,14.040,5.83,224,0.875,bicubic,-12.254,-7.532,-15 +convnext_atto.d2_in1k,64.940,35.060,86.230,13.770,3.70,288,0.950,bicubic,-12.074,-7.470,-4 +hrnet_w18,64.920,35.080,85.740,14.260,21.30,224,0.875,bilinear,-11.838,-7.704,+5 +hardcorenas_c,64.860,35.140,85.250,14.750,5.52,224,0.875,bilinear,-12.194,-7.908,-8 +densenet169,64.760,35.240,85.240,14.760,14.15,224,0.875,bicubic,-11.146,-7.786,+22 +mixnet_m.ft_in1k,64.700,35.300,85.450,14.550,5.01,224,0.875,bicubic,-12.560,-7.974,-18 +resnet26d,64.680,35.320,85.120,14.880,16.01,224,0.875,bicubic,-12.016,-8.030,+2 +levit_128s,64.610,35.390,84.730,15.270,7.78,224,0.900,bicubic,-11.920,-8.136,+8 +resnext26ts,64.590,35.410,85.110,14.890,10.30,256,0.900,bicubic,-12.190,-8.020,-2 +xcit_nano_12_p8_224_dist,64.520,35.480,85.980,14.020,3.05,224,1.000,bicubic,-11.804,-7.110,+9 +repvgg_a2,64.450,35.550,85.130,14.870,28.21,224,0.875,bilinear,-12.010,-7.874,+7 +xcit_nano_12_p16_384_dist,64.430,35.570,85.300,14.700,3.05,384,1.000,bicubic,-11.028,-7.394,+25 +hardcorenas_b,64.420,35.580,84.870,15.130,5.18,224,0.875,bilinear,-12.118,-7.884,+2 +regnetx_016,64.380,35.620,85.470,14.530,9.19,224,0.875,bicubic,-12.570,-7.950,-14 +tf_efficientnet_lite1.in1k,64.380,35.620,85.470,14.530,5.42,240,0.882,bicubic,-12.262,-7.756,-4 +resmlp_12_224,64.350,35.650,85.580,14.420,15.35,224,0.875,bicubic,-12.304,-7.600,-6 +tf_efficientnet_b0.aa_in1k,64.310,35.690,85.280,14.720,5.29,224,0.875,bicubic,-12.538,-7.948,-12 +tf_mixnet_m.in1k,64.270,35.730,85.090,14.910,5.01,224,0.875,bicubic,-12.672,-8.062,-16 +dpn68,64.230,35.770,85.180,14.820,12.61,224,0.875,bicubic,-12.088,-7.798,+1 +tf_efficientnet_es.in1k,64.230,35.770,84.740,15.260,5.44,224,0.875,bicubic,-12.364,-8.462,-7 +regnety_008,64.160,35.840,85.270,14.730,6.26,224,0.875,bicubic,-12.156,-7.796,0 +vit_small_patch32_224.augreg_in21k_ft_in1k,64.070,35.930,85.560,14.440,22.88,224,0.900,bicubic,-11.920,-7.712,+2 +mobilenetv2_140.ra_in1k,64.060,35.940,85.040,14.960,6.11,224,0.875,bicubic,-12.456,-7.956,-6 +densenet121,63.750,36.250,84.590,15.410,7.98,224,0.875,bicubic,-11.828,-8.062,+9 +hardcorenas_a,63.710,36.290,84.400,15.600,5.26,224,0.875,bilinear,-12.206,-8.114,+1 +mobilevitv2_075,63.590,36.410,84.960,15.040,2.87,256,0.888,bicubic,-12.032,-7.808,+6 +resnest14d,63.590,36.410,84.250,15.750,10.61,224,0.875,bilinear,-11.916,-8.268,+8 +tf_mixnet_s.in1k,63.560,36.440,84.270,15.730,4.13,224,0.875,bicubic,-12.090,-8.358,+2 +resnet26,63.470,36.530,84.260,15.740,16.00,224,0.875,bicubic,-11.822,-8.310,+11 +mixnet_s.ft_in1k,63.390,36.610,84.740,15.260,4.13,224,0.875,bicubic,-12.602,-8.056,-7 +mobilenetv3_large_100.ra_in1k,63.360,36.640,84.090,15.910,5.48,224,0.875,bicubic,-12.406,-8.452,-3 +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,63.340,36.660,85.280,14.720,6.36,384,1.000,bicubic,-12.612,-7.980,-7 +efficientnet_es_pruned.in1k,63.330,36.670,84.950,15.050,5.44,224,0.875,bicubic,-11.670,-7.498,+15 +tv_resnet50,63.330,36.670,84.640,15.360,25.56,224,0.875,bilinear,-12.808,-8.224,-12 +mixer_b16_224,63.270,36.730,83.310,16.690,59.88,224,0.875,bicubic,-13.330,-8.918,-23 +efficientnet_lite0.ra_in1k,63.240,36.760,84.440,15.560,4.65,224,0.875,bicubic,-12.244,-8.070,0 +mobilenetv3_rw.rmsp_in1k,63.220,36.780,84.510,15.490,5.48,224,0.875,bicubic,-12.414,-8.198,-6 +semnasnet_100.rmsp_in1k,63.150,36.850,84.520,15.480,3.89,224,0.875,bicubic,-12.298,-8.084,0 +pit_ti_distilled_224,63.150,36.850,83.960,16.040,5.10,224,0.900,bicubic,-11.380,-8.136,+20 +vit_tiny_patch16_224.augreg_in21k_ft_in1k,63.110,36.890,84.850,15.150,5.72,224,0.900,bicubic,-12.344,-7.998,-3 +regnety_006,63.110,36.890,84.250,15.750,6.06,224,0.875,bicubic,-12.136,-8.282,+1 +mobilevit_xs,62.940,37.060,84.840,15.160,2.32,256,0.900,bicubic,-11.704,-7.512,+13 +tv_densenet121,62.940,37.060,84.250,15.750,7.98,224,0.875,bicubic,-11.798,-7.900,+10 +resnet34,62.870,37.130,84.140,15.860,21.80,224,0.875,bilinear,-12.240,-8.144,+1 +legacy_seresnet34,62.850,37.150,84.210,15.790,21.96,224,0.875,bilinear,-11.958,-7.914,+7 +mobilenetv2_110d.ra_in1k,62.830,37.170,84.500,15.500,4.52,224,0.875,bicubic,-12.206,-7.686,+1 +edgenext_x_small,62.820,37.180,84.680,15.320,2.34,288,1.000,bicubic,-12.868,-8.086,-18 +deit_tiny_distilled_patch16_224,62.810,37.190,83.930,16.070,5.91,224,0.900,bicubic,-11.700,-7.960,+11 +hrnet_w18_small_v2,62.800,37.200,83.980,16.020,15.60,224,0.875,bilinear,-12.314,-8.436,-5 +swsl_resnet18,62.760,37.240,84.300,15.700,11.69,224,0.875,bilinear,-10.516,-7.434,+22 +tinynet_b.in1k,62.730,37.270,84.250,15.750,3.73,188,0.875,bicubic,-12.244,-7.938,-2 +repvgg_b0,62.720,37.280,83.860,16.140,15.82,224,0.875,bilinear,-12.432,-8.558,-10 +gluon_resnet34_v1b,62.570,37.430,83.990,16.010,21.80,224,0.875,bicubic,-12.018,-8.000,+4 +xcit_nano_12_p8_224,62.560,37.440,84.200,15.800,3.05,224,1.000,bicubic,-11.354,-7.972,+11 +tf_efficientnet_lite0.in1k,62.550,37.450,84.220,15.780,4.65,224,0.875,bicubic,-12.280,-7.956,-4 +regnetx_008,62.490,37.510,84.020,15.980,7.26,224,0.875,bicubic,-12.548,-8.316,-10 +dla34,62.480,37.520,83.910,16.090,15.74,224,0.875,bilinear,-12.150,-8.168,-1 +tf_mobilenetv3_large_100.in1k,62.460,37.540,83.970,16.030,5.48,224,0.875,bilinear,-13.058,-8.636,-24 +fbnetc_100.rmsp_in1k,62.440,37.560,83.380,16.620,5.57,224,0.875,bilinear,-12.684,-9.006,-16 +crossvit_9_240,62.260,37.740,84.270,15.730,8.55,240,0.875,bicubic,-11.704,-7.698,+4 +crossvit_tiny_240,62.070,37.930,83.600,16.400,7.01,240,0.875,bicubic,-11.254,-8.316,+9 +mnasnet_100.rmsp_in1k,61.900,38.100,83.710,16.290,4.38,224,0.875,bicubic,-12.758,-8.404,-8 +regnety_004,61.870,38.130,83.430,16.570,4.34,224,0.875,bicubic,-12.164,-8.322,-1 +vgg19_bn,61.860,38.140,83.450,16.550,143.68,224,0.875,bilinear,-12.354,-8.392,-4 +convit_tiny,61.590,38.410,84.120,15.880,5.71,224,0.875,bicubic,-11.526,-7.594,+8 +ssl_resnet18,61.480,38.520,83.300,16.700,11.69,224,0.875,bilinear,-11.130,-8.116,+12 +regnetx_006,61.350,38.650,83.450,16.550,6.20,224,0.875,bicubic,-12.502,-8.222,-1 +spnasnet_100.rmsp_in1k,61.220,38.780,82.790,17.210,4.42,224,0.875,bilinear,-12.864,-9.028,-7 +tv_resnet34,61.190,38.810,82.710,17.290,21.80,224,0.875,bilinear,-12.122,-8.716,+2 +vit_base_patch32_224.augreg_in1k,61.050,38.950,82.740,17.260,88.22,224,0.900,bicubic,-13.854,-9.038,-20 +pit_ti_224,60.980,39.020,83.860,16.140,4.85,224,0.900,bicubic,-11.932,-7.542,+6 +skresnet18,60.860,39.140,82.880,17.120,11.96,224,0.875,bicubic,-12.178,-8.288,+2 +ghostnet_100,60.830,39.170,82.360,17.640,5.18,224,0.875,bilinear,-13.148,-9.096,-10 +vgg16_bn,60.760,39.240,82.950,17.050,138.37,224,0.875,bilinear,-12.590,-8.556,-5 +semnasnet_075.rmsp_in1k,60.710,39.290,82.510,17.490,2.91,224,0.875,bicubic,-12.264,-8.626,0 +tf_mobilenetv3_large_075.in1k,60.400,39.600,81.950,18.050,3.99,224,0.875,bilinear,-13.038,-9.400,-8 +xcit_nano_12_p16_224_dist,60.240,39.760,82.490,17.510,3.05,224,1.000,bicubic,-12.062,-8.372,+6 +mobilenetv2_100.ra_in1k,60.190,39.810,82.240,17.760,3.50,224,0.875,bicubic,-12.780,-8.776,-2 +resnet18d,60.160,39.840,82.300,17.700,11.71,224,0.875,bicubic,-12.100,-8.396,+5 +vit_base_patch32_224.sam,60.010,39.990,81.230,18.770,88.22,224,0.900,bicubic,-13.680,-9.784,-13 +deit_tiny_patch16_224,59.830,40.170,82.670,17.330,5.72,224,0.900,bicubic,-12.338,-8.448,+5 +legacy_seresnet18,59.800,40.200,81.690,18.310,11.78,224,0.875,bicubic,-11.944,-8.644,+9 +vgg19,59.710,40.290,81.450,18.550,143.67,224,0.875,bilinear,-12.658,-9.422,-3 +regnetx_004,59.410,40.590,81.690,18.310,5.16,224,0.875,bicubic,-12.986,-9.140,-5 +edgenext_xx_small,59.390,40.610,81.820,18.180,1.33,288,1.000,bicubic,-12.476,-8.724,+4 +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,59.070,40.930,81.760,18.240,6.34,224,0.900,bicubic,-12.718,-9.068,+3 +tf_mobilenetv3_large_minimal_100.in1k,59.070,40.930,81.150,18.850,3.92,224,0.875,bilinear,-13.178,-9.480,-1 +vgg13_bn,59.000,41.000,81.070,18.930,133.05,224,0.875,bilinear,-12.594,-9.312,+5 +hrnet_w18_small,58.950,41.050,81.340,18.660,13.19,224,0.875,bilinear,-13.392,-9.338,-7 +lcnet_100.ra2_in1k,58.870,41.130,81.180,18.820,2.95,224,0.875,bicubic,-13.244,-9.198,-3 +vgg16,58.830,41.170,81.660,18.340,138.36,224,0.875,bilinear,-12.764,-8.716,+1 +pvt_v2_b0,58.760,41.240,82.130,17.870,3.67,224,0.900,bicubic,-11.896,-8.078,+4 +xcit_nano_12_p16_224,58.350,41.650,80.880,19.120,3.05,224,1.000,bicubic,-11.604,-8.874,+7 +gluon_resnet18_v1b,58.340,41.660,80.970,19.030,11.69,224,0.875,bicubic,-12.496,-8.790,+1 +tinynet_c.in1k,58.170,41.830,80.280,19.720,2.46,184,0.875,bicubic,-13.062,-9.468,-1 +resnet14t,57.800,42.200,79.920,20.080,10.08,224,0.950,bilinear,-14.550,-10.420,-15 +mobilevitv2_050,57.720,42.280,80.900,19.100,1.37,256,0.888,bicubic,-12.420,-9.026,+2 vgg11_bn,57.410,42.590,80.020,19.980,132.87,224,0.875,bilinear,-12.950,-9.782,-1 -resnet18,57.170,42.830,80.200,19.800,11.69,224,0.875,bilinear,-12.578,-8.884,+3 -mobilevit_xxs,57.150,42.850,79.740,20.260,1.27,256,0.900,bicubic,-11.770,-9.206,+4 -vgg13,57.150,42.850,79.550,20.450,133.05,224,0.875,bilinear,-12.776,-9.696,0 -regnety_002,56.980,43.020,79.850,20.150,3.16,224,0.875,bicubic,-13.276,-9.684,-4 -mixer_l16_224,56.690,43.310,75.990,24.010,208.20,224,0.875,bicubic,-15.376,-11.676,-14 -regnetx_002,56.050,43.950,79.230,20.770,2.68,224,0.875,bicubic,-12.704,-9.326,+2 -dla60x_c,56.030,43.970,78.920,21.080,1.32,224,0.875,bilinear,-11.850,-9.514,+4 -vgg11,55.790,44.210,78.840,21.160,132.86,224,0.875,bilinear,-13.238,-9.788,-3 -resnet10t,55.660,44.340,78.020,21.980,5.44,224,0.950,bilinear,-12.648,-10.060,0 -lcnet_075,55.360,44.640,78.310,21.690,2.36,224,0.875,bicubic,-13.454,-10.054,-3 -mobilenetv3_small_100,54.700,45.300,77.780,22.220,2.54,224,0.875,bicubic,-12.958,-9.854,+1 -tf_mobilenetv3_small_100,54.510,45.490,77.070,22.930,2.54,224,0.875,bilinear,-13.416,-10.598,-2 -tinynet_d,53.420,46.580,76.350,23.650,2.34,152,0.875,bicubic,-13.542,-10.714,0 -mnasnet_small,53.270,46.730,75.890,24.110,2.03,224,0.875,bicubic,-12.936,-10.616,0 -dla46x_c,53.060,46.940,76.850,23.150,1.07,224,0.875,bilinear,-12.892,-10.136,0 -mobilenetv2_050,52.850,47.150,75.420,24.580,1.97,224,0.875,bicubic,-13.094,-10.660,0 -tf_mobilenetv3_small_075,52.160,47.840,75.480,24.520,2.04,224,0.875,bilinear,-13.552,-10.650,0 -dla46_c,52.120,47.880,75.680,24.320,1.30,224,0.875,bilinear,-12.752,-10.622,+1 -mobilenetv3_small_075,51.890,48.110,74.730,25.270,2.04,224,0.875,bicubic,-13.348,-10.710,-1 -lcnet_050,49.980,50.020,73.430,26.570,1.88,224,0.875,bicubic,-13.114,-10.952,0 -tf_mobilenetv3_small_minimal_100,49.490,50.510,73.030,26.970,2.04,224,0.875,bilinear,-13.410,-11.204,0 -tinynet_e,46.700,53.300,70.360,29.640,2.04,106,0.875,bicubic,-13.156,-11.406,0 -mobilenetv3_small_050,44.890,55.110,67.670,32.330,1.59,224,0.875,bicubic,-13.000,-12.524,0 +resnet18,57.170,42.830,80.200,19.800,11.69,224,0.875,bilinear,-12.578,-8.878,+2 +mobilevit_xxs,57.170,42.830,79.740,20.260,1.27,256,0.900,bicubic,-11.742,-9.198,+5 +vgg13,57.150,42.850,79.540,20.460,133.05,224,0.875,bilinear,-12.776,-9.706,0 +regnety_002,57.000,43.000,79.840,20.160,3.16,224,0.875,bicubic,-13.252,-9.700,-4 +mixer_l16_224,56.690,43.310,75.990,24.010,208.20,224,0.875,bicubic,-15.368,-11.678,-15 +regnetx_002,56.050,43.950,79.210,20.790,2.68,224,0.875,bicubic,-12.712,-9.346,+2 +dla60x_c,56.000,44.000,78.930,21.070,1.32,224,0.875,bilinear,-11.892,-9.496,+4 +vgg11,55.800,44.200,78.830,21.170,132.86,224,0.875,bilinear,-13.224,-9.798,-3 +resnet10t,55.670,44.330,78.020,21.980,5.44,224,0.950,bilinear,-12.624,-10.058,0 +lcnet_075.ra2_in1k,55.410,44.590,78.300,21.700,2.36,224,0.875,bicubic,-13.408,-10.070,-3 +mobilenetv3_small_100.lamb_in1k,54.700,45.300,77.770,22.230,2.54,224,0.875,bicubic,-12.952,-9.866,+1 +tf_mobilenetv3_small_100.in1k,54.530,45.470,77.060,22.940,2.54,224,0.875,bilinear,-13.392,-10.604,-2 +tinynet_d.in1k,53.420,46.580,76.350,23.650,2.34,152,0.875,bicubic,-13.542,-10.716,0 +mnasnet_small.lamb_in1k,53.280,46.720,75.890,24.110,2.03,224,0.875,bicubic,-12.926,-10.618,0 +dla46x_c,53.050,46.950,76.870,23.130,1.07,224,0.875,bilinear,-12.920,-10.110,0 +mobilenetv2_050.lamb_in1k,52.850,47.150,75.440,24.560,1.97,224,0.875,bicubic,-13.092,-10.642,0 +tf_mobilenetv3_small_075.in1k,52.160,47.840,75.470,24.530,2.04,224,0.875,bilinear,-13.556,-10.660,0 +dla46_c,52.130,47.870,75.690,24.310,1.30,224,0.875,bilinear,-12.736,-10.602,+1 +mobilenetv3_small_075.lamb_in1k,51.900,48.100,74.730,25.270,2.04,224,0.875,bicubic,-13.346,-10.706,-1 +lcnet_050.ra2_in1k,49.990,50.010,73.450,26.550,1.88,224,0.875,bicubic,-13.110,-10.930,0 +tf_mobilenetv3_small_minimal_100.in1k,49.500,50.500,73.050,26.950,2.04,224,0.875,bilinear,-13.406,-11.180,0 +tinynet_e.in1k,46.700,53.300,70.360,29.640,2.04,106,0.875,bicubic,-13.156,-11.402,0 +mobilenetv3_small_050.lamb_in1k,44.890,55.110,67.670,32.330,1.59,224,0.875,bicubic,-13.000,-12.524,0 diff --git a/results/results-sketch.csv b/results/results-sketch.csv index b9024991..cbcf4f8d 100644 --- a/results/results-sketch.csv +++ b/results/results-sketch.csv @@ -1,669 +1,791 @@ model,top1,top1_err,top5,top5_err,param_count,img_size,crop_pct,interpolation,top1_diff,top5_diff,rank_diff -ig_resnext101_32x48d,58.820,41.180,81.094,18.906,828.41,224,0.875,bilinear,-26.616,-16.482,+56 -ig_resnext101_32x32d,58.382,41.618,80.383,19.617,468.53,224,0.875,bilinear,-26.718,-17.051,+69 -ig_resnext101_32x16d,57.686,42.314,79.909,20.091,194.03,224,0.875,bilinear,-26.484,-17.289,+119 -swsl_resnext101_32x16d,57.464,42.536,80.373,19.627,194.03,224,0.875,bilinear,-25.886,-16.471,+167 -beit_large_patch16_384,56.892,43.108,79.227,20.773,305.00,384,1.000,bicubic,-31.514,-19.379,-3 -beit_large_patch16_512,56.753,43.247,78.897,21.103,305.67,512,1.000,bicubic,-31.849,-19.759,-5 -swsl_resnext101_32x8d,56.431,43.569,78.939,21.061,88.79,224,0.875,bilinear,-27.859,-18.243,+105 -deit3_huge_patch14_224_in21ft1k,55.763,44.237,77.622,22.378,632.13,224,1.000,bicubic,-31.417,-20.638,+2 -beit_large_patch16_224,54.955,45.045,77.606,22.394,304.43,224,0.900,bicubic,-32.521,-20.698,-2 -ig_resnext101_32x8d,54.931,45.069,77.535,22.465,88.79,224,0.875,bilinear,-27.767,-19.097,+198 -deit3_large_patch16_384_in21ft1k,54.878,45.122,77.370,22.630,304.76,384,1.000,bicubic,-32.838,-21.142,-6 -deit3_large_patch16_224_in21ft1k,54.361,45.639,76.563,23.437,304.37,224,1.000,bicubic,-32.621,-21.675,+4 -convnext_xlarge_384_in22ft1k,53.658,46.342,75.895,24.105,350.20,384,1.000,bicubic,-33.886,-22.591,-7 -swsl_resnext101_32x4d,53.601,46.399,76.353,23.648,44.18,224,0.875,bilinear,-29.639,-20.407,+161 -vit_large_patch16_384,52.756,47.244,74.696,25.304,304.72,384,1.000,bicubic,-34.324,-23.604,-2 -convnext_xlarge_in22ft1k,52.565,47.435,74.403,25.597,350.20,224,0.875,bicubic,-34.437,-23.809,-1 -swinv2_large_window12to24_192to384_22kft1k,52.298,47.702,74.411,25.589,196.74,384,1.000,bicubic,-35.158,-23.841,-9 -vit_large_r50_s32_384,52.041,47.959,73.566,26.434,329.09,384,1.000,bicubic,-34.139,-24.354,+15 -vit_large_patch16_224,51.831,48.169,73.692,26.308,304.33,224,0.900,bicubic,-34.013,-24.130,+23 -convnext_large_384_in22ft1k,51.738,48.262,73.896,26.104,197.77,384,1.000,bicubic,-35.658,-24.470,-11 -tf_efficientnet_l2_ns_475,51.489,48.511,73.928,26.072,480.31,475,0.936,bicubic,-36.743,-24.618,-17 -swinv2_base_window12to24_192to384_22kft1k,50.978,49.022,73.311,26.689,87.92,384,1.000,bicubic,-36.130,-24.925,-10 -swinv2_large_window12to16_192to256_22kft1k,50.441,49.559,72.760,27.240,196.74,256,0.900,bicubic,-36.505,-25.350,-5 -swsl_resnext50_32x4d,50.437,49.563,73.356,26.644,25.03,224,0.875,bilinear,-31.739,-22.876,+227 -convnext_base_384_in22ft1k,50.429,49.571,73.562,26.438,88.59,384,1.000,bicubic,-36.113,-24.628,-1 -swin_large_patch4_window12_384,50.402,49.598,72.568,27.432,196.74,384,1.000,bicubic,-36.750,-25.672,-15 -convnext_large_in22ft1k,49.940,50.060,72.206,27.794,197.77,224,0.875,bicubic,-36.696,-25.822,-4 -swsl_resnet50,49.527,50.473,72.324,27.676,25.56,224,0.875,bilinear,-31.653,-23.656,+284 -swin_large_patch4_window7_224,48.991,51.009,71.389,28.611,196.53,224,0.900,bicubic,-37.329,-26.503,+1 -convnext_base_in22ft1k,48.794,51.206,71.941,28.059,88.59,224,0.875,bicubic,-37.030,-25.925,+13 -swinv2_base_window12to16_192to256_22kft1k,48.788,51.212,71.385,28.615,87.92,256,0.900,bicubic,-37.482,-26.511,+1 -beit_base_patch16_384,48.663,51.337,72.084,27.916,86.74,384,1.000,bicubic,-38.135,-26.052,-12 -swin_base_patch4_window12_384,48.543,51.457,71.823,28.177,87.90,384,1.000,bicubic,-37.889,-26.233,-5 -vit_large_r50_s32_224,48.209,51.791,70.872,29.128,328.99,224,0.900,bicubic,-36.221,-26.294,+71 -tf_efficientnet_b7_ns,47.792,52.208,69.626,30.374,66.35,600,0.949,bicubic,-39.040,-28.470,-16 -tf_efficientnet_b6_ns,47.757,52.243,69.966,30.034,43.04,528,0.942,bicubic,-38.693,-27.920,-9 -tf_efficientnetv2_xl_in21ft1k,47.747,52.253,70.119,29.881,208.12,512,1.000,bicubic,-38.673,-27.749,-8 -vit_base_patch8_224,47.741,52.259,70.931,29.069,86.58,224,0.900,bicubic,-38.049,-26.861,+9 -deit3_base_patch16_384_in21ft1k,47.663,52.337,69.750,30.250,86.88,384,1.000,bicubic,-39.079,-28.362,-17 -tf_efficientnet_l2_ns,47.572,52.428,70.021,29.979,480.31,800,0.960,bicubic,-40.778,-28.629,-37 -deit3_base_patch16_224_in21ft1k,47.370,52.630,69.772,30.229,86.59,224,1.000,bicubic,-38.346,-27.972,+9 -tf_efficientnetv2_l_in21ft1k,46.943,53.057,70.308,29.692,118.52,480,1.000,bicubic,-39.361,-27.672,-11 -beit_base_patch16_224,46.242,53.758,69.901,30.099,86.53,224,0.900,bicubic,-38.986,-27.755,+23 -vit_base_patch16_384,45.902,54.098,68.553,31.447,86.86,384,1.000,bicubic,-40.104,-29.451,-5 -convnext_small_384_in22ft1k,45.792,54.208,68.494,31.506,50.22,384,1.000,bicubic,-39.932,-29.370,+4 -tf_efficientnet_b8_ap,45.768,54.232,67.907,32.093,87.41,672,0.954,bicubic,-39.604,-29.387,+16 -tf_efficientnet_b5_ns,45.607,54.393,67.842,32.158,30.39,456,0.934,bicubic,-40.481,-29.910,-12 -tf_efficientnetv2_m_in21ft1k,45.574,54.426,69.135,30.865,54.14,480,1.000,bicubic,-40.012,-28.611,+4 -swin_base_patch4_window7_224,45.564,54.436,68.504,31.496,87.77,224,0.900,bicubic,-39.686,-29.058,+15 -volo_d5_512,44.572,55.428,65.753,34.247,296.09,512,1.150,bicubic,-42.468,-32.215,-36 -cait_m48_448,44.245,55.755,64.653,35.347,356.46,448,1.000,bicubic,-42.243,-33.097,-25 -deit3_large_patch16_384,44.175,55.825,64.853,35.147,304.76,384,1.000,bicubic,-41.631,-32.743,-6 -volo_d5_448,44.098,55.902,65.063,34.937,295.91,448,1.150,bicubic,-42.856,-32.877,-36 -deit3_huge_patch14_224,43.795,56.205,64.348,35.652,632.13,224,0.900,bicubic,-41.411,-33.010,+13 -convnext_small_in22ft1k,43.607,56.393,66.582,33.418,50.22,224,0.875,bicubic,-40.961,-30.814,+42 -deit3_large_patch16_224,43.520,56.480,63.572,36.428,304.37,224,0.900,bicubic,-41.242,-33.466,+34 -vit_base_r50_s16_384,43.518,56.482,66.783,33.217,98.95,384,1.000,bicubic,-41.458,-30.507,+24 -tf_efficientnet_b4_ns,43.446,56.554,65.515,34.485,19.34,380,0.922,bicubic,-41.714,-31.955,+11 -volo_d5_224,43.259,56.741,64.077,35.923,295.46,224,0.960,bicubic,-42.811,-33.501,-23 -vit_base_patch16_224,43.229,56.771,65.710,34.290,86.57,224,0.900,bicubic,-41.301,-31.586,+38 -volo_d4_448,43.139,56.861,64.114,35.886,193.41,448,1.150,bicubic,-43.653,-33.768,-40 -xcit_large_24_p8_384_dist,42.828,57.172,63.403,36.597,188.93,384,1.000,bicubic,-43.170,-34.281,-22 -xcit_large_24_p8_224_dist,42.563,57.437,63.100,36.900,188.93,224,1.000,bicubic,-42.835,-34.310,-2 -tf_efficientnet_b8,42.502,57.498,64.867,35.133,87.41,672,0.954,bicubic,-42.866,-32.525,-1 -cait_m36_384,42.400,57.600,63.326,36.674,271.22,384,1.000,bicubic,-43.654,-34.404,-28 -volo_d4_224,42.284,57.716,63.010,36.990,192.96,224,0.960,bicubic,-43.592,-34.458,-25 -deit3_small_patch16_384_in21ft1k,41.956,58.044,64.550,35.450,22.21,384,1.000,bicubic,-42.868,-32.934,+20 -tf_efficientnet_b7_ap,41.431,58.569,62.870,37.130,66.35,600,0.949,bicubic,-43.689,-34.382,+2 -tf_efficientnet_b7,41.425,58.575,63.020,36.980,66.35,600,0.949,bicubic,-43.509,-34.186,+14 -tf_efficientnet_b5_ap,41.416,58.584,62.084,37.916,30.39,456,0.934,bicubic,-42.838,-34.894,+46 -resnetv2_152x4_bitm,41.308,58.692,64.305,35.695,936.53,480,1.000,bilinear,-43.610,-33.137,+14 -tf_efficientnet_b6_ap,41.099,58.901,62.355,37.645,43.04,528,0.942,bicubic,-43.687,-34.783,+17 -xcit_large_24_p16_384_dist,41.034,58.966,61.241,38.759,189.10,384,1.000,bicubic,-44.718,-36.297,-25 -xcit_large_24_p16_224_dist,40.956,59.044,61.320,38.680,189.10,224,1.000,bicubic,-43.964,-35.812,+10 -tf_efficientnetv2_s_in21ft1k,40.952,59.048,63.851,36.149,21.46,384,1.000,bicubic,-43.344,-33.403,+35 -xcit_medium_24_p8_224_dist,40.494,59.506,60.502,39.498,84.32,224,1.000,bicubic,-44.576,-36.778,-1 -vit_small_r26_s32_384,40.482,59.518,62.740,37.260,36.47,384,1.000,bicubic,-43.566,-34.588,+53 -tf_efficientnet_b4_ap,40.482,59.518,61.721,38.279,19.34,380,0.922,bicubic,-42.766,-34.671,+96 -deit3_base_patch16_224,40.374,59.626,60.186,39.814,86.59,224,0.900,bicubic,-43.418,-36.398,+70 -vit_base_patch16_224_miil,40.170,59.830,60.889,39.111,86.54,224,0.875,bilinear,-44.102,-35.913,+34 -deit3_small_patch16_224_in21ft1k,40.160,59.840,61.866,38.134,22.06,224,1.000,bicubic,-42.916,-34.910,+106 -regnetz_e8,40.146,59.854,61.322,38.678,57.70,320,1.000,bicubic,-44.884,-35.942,-3 -convnext_large,40.119,59.881,60.092,39.908,197.77,224,0.875,bicubic,-44.177,-36.802,+28 -xcit_medium_24_p8_384_dist,40.040,59.960,60.455,39.545,84.32,384,1.000,bicubic,-45.776,-37.137,-40 -xcit_medium_24_p16_384_dist,39.903,60.097,60.115,39.885,84.40,384,1.000,bicubic,-45.519,-37.291,-27 -tf_efficientnetv2_l,39.826,60.174,60.807,39.193,118.52,480,1.000,bicubic,-45.662,-36.565,-31 -dm_nfnet_f3,39.816,60.184,60.610,39.390,254.92,416,0.940,bicubic,-45.706,-36.852,-33 -cait_s36_384,39.755,60.245,60.475,39.525,68.37,384,1.000,bicubic,-45.705,-37.003,-32 -volo_d3_448,39.712,60.288,59.760,40.240,86.63,448,1.000,bicubic,-46.784,-37.950,-64 -efficientnetv2_rw_m,39.673,60.327,59.687,40.313,53.24,416,1.000,bicubic,-45.139,-37.459,-2 -xception65,39.645,60.355,60.907,39.093,39.92,299,0.940,bicubic,-43.529,-35.685,+87 -tf_efficientnet_b3_ns,39.590,60.410,61.451,38.549,12.23,300,0.904,bicubic,-44.458,-35.461,+39 -ecaresnet269d,39.584,60.416,60.343,39.657,102.09,352,1.000,bicubic,-45.390,-36.883,-11 -dm_nfnet_f6,39.578,60.422,60.911,39.089,438.36,576,0.956,bicubic,-46.564,-36.819,-60 -dm_nfnet_f5,39.504,60.496,60.227,39.773,377.21,544,0.954,bicubic,-46.312,-37.259,-50 -volo_d3_224,39.490,60.510,59.871,40.129,86.33,224,0.960,bicubic,-45.922,-37.409,-36 -deit3_base_patch16_384,39.405,60.595,58.946,41.054,86.88,384,1.000,bicubic,-45.671,-38.308,-23 -xcit_small_24_p8_224_dist,39.309,60.691,59.414,40.586,47.63,224,1.000,bicubic,-45.567,-37.774,-12 -xcit_medium_24_p16_224_dist,39.262,60.738,59.463,40.537,84.40,224,1.000,bicubic,-45.016,-37.477,+14 -efficientnet_b4,39.075,60.925,59.606,40.394,19.34,384,1.000,bicubic,-44.349,-36.992,+65 -xcit_small_24_p8_384_dist,38.999,61.001,59.174,40.826,47.63,384,1.000,bicubic,-46.555,-38.398,-48 -resnetv2_152x2_bit_teacher_384,38.977,61.023,62.436,37.564,236.34,384,1.000,bicubic,-44.867,-34.680,+39 -convnext_tiny_384_in22ft1k,38.922,61.078,60.728,39.272,28.59,384,1.000,bicubic,-45.154,-36.430,+23 -vit_base_patch32_384,38.798,61.202,60.327,39.673,88.30,384,1.000,bicubic,-44.554,-36.509,+66 -eca_nfnet_l2,38.661,61.339,59.441,40.559,56.72,384,1.000,bicubic,-46.035,-37.823,-12 -xcit_small_12_p8_384_dist,38.545,61.455,58.803,41.197,26.21,384,1.000,bicubic,-46.535,-38.477,-33 -xcit_small_24_p16_384_dist,38.503,61.497,58.390,41.610,47.67,384,1.000,bicubic,-46.585,-38.918,-35 -convnext_tiny_in22ft1k,38.470,61.530,60.481,39.519,28.59,224,0.875,bicubic,-44.442,-36.143,+86 -xcit_small_12_p8_224_dist,38.370,61.630,58.799,41.201,26.21,224,1.000,bicubic,-45.860,-38.075,+9 -tf_efficientnet_b5,38.358,61.642,59.917,40.083,30.39,456,0.934,bicubic,-45.456,-36.831,+36 -deit_base_distilled_patch16_384,38.256,61.744,57.788,42.212,87.63,384,1.000,bicubic,-47.166,-39.544,-52 -dm_nfnet_f4,38.234,61.766,58.628,41.372,316.07,512,0.951,bicubic,-47.480,-38.892,-61 -convnext_base,38.234,61.766,58.225,41.775,88.59,224,0.875,bicubic,-45.606,-38.525,+29 -xcit_large_24_p8_224,38.118,61.882,57.885,42.115,188.93,224,1.000,bicubic,-46.274,-38.773,-7 -resnetv2_152x2_bitm,37.985,62.015,61.135,38.865,236.34,448,1.000,bilinear,-46.525,-36.299,-15 -cait_s24_384,37.865,62.135,58.079,41.921,47.06,384,1.000,bicubic,-47.185,-39.269,-39 -resnet152d,37.853,62.147,58.356,41.644,60.21,320,1.000,bicubic,-45.825,-38.384,+41 -tf_efficientnetv2_m,37.822,62.178,58.712,41.288,54.14,480,1.000,bicubic,-47.214,-38.566,-40 -resnetrs420,37.753,62.247,58.215,41.785,191.89,416,1.000,bicubic,-47.255,-38.909,-39 -xcit_small_24_p16_224_dist,37.700,62.300,57.358,42.642,47.67,224,1.000,bicubic,-46.170,-39.374,+19 -resnetrs350,37.676,62.324,58.089,41.911,163.96,384,1.000,bicubic,-47.036,-38.901,-30 -xcit_small_12_p16_384_dist,37.582,62.418,57.773,42.227,26.25,384,1.000,bicubic,-47.126,-39.343,-30 -pit_b_distilled_224,37.582,62.418,57.232,42.768,74.79,224,0.900,bicubic,-46.560,-39.624,+1 -resnet200d,37.505,62.495,58.303,41.697,64.69,320,1.000,bicubic,-46.455,-38.521,+11 -resnetv2_152x2_bit_teacher,37.322,62.678,59.406,40.594,236.34,224,0.875,bicubic,-45.546,-37.162,+72 -resnest269e,37.311,62.689,57.470,42.530,110.93,416,0.928,bicubic,-47.207,-39.516,-27 -vit_small_r26_s32_224,37.242,62.758,59.068,40.932,36.43,224,0.900,bicubic,-44.620,-36.954,+143 -resmlp_big_24_224_in22ft1k,37.242,62.758,58.180,41.820,129.14,224,0.875,bicubic,-47.156,-38.938,-22 -cait_s24_224,37.153,62.847,56.729,43.271,46.92,224,1.000,bicubic,-46.305,-39.833,+34 -vit_base_patch32_224,37.081,62.919,59.286,40.714,88.22,224,0.900,bicubic,-43.643,-36.280,+213 -volo_d1_384,37.075,62.925,57.132,42.868,26.78,384,1.000,bicubic,-48.175,-40.082,-66 -convnext_small,37.055,62.945,57.105,42.895,50.22,224,0.875,bicubic,-46.095,-39.325,+47 -tf_efficientnet_b3_ap,37.049,62.951,57.238,42.762,12.23,300,0.904,bicubic,-44.775,-38.386,+140 -efficientnetv2_rw_s,37.049,62.951,56.810,43.190,23.94,384,1.000,bicubic,-46.761,-39.914,+13 -swinv2_base_window16_256,36.992,63.008,56.128,43.872,87.92,256,0.900,bicubic,-47.600,-40.946,-40 -xcit_small_12_p16_224_dist,36.971,63.029,56.733,43.267,26.25,224,1.000,bicubic,-46.375,-39.685,+36 -regnetz_040h,36.965,63.035,57.280,42.720,28.94,320,1.000,bicubic,-47.531,-39.726,-36 -volo_d1_224,36.880,63.120,56.633,43.367,26.63,224,0.960,bicubic,-47.284,-40.141,-15 -seresnet152d,36.788,63.212,56.718,43.282,66.84,320,1.000,bicubic,-47.576,-40.326,-31 -seresnext101d_32x8d,36.637,63.363,56.328,43.672,93.59,288,1.000,bicubic,-47.725,-40.590,-31 -volo_d2_224,36.601,63.399,56.466,43.534,58.68,224,0.960,bicubic,-48.593,-40.722,-73 -xception65p,36.554,63.446,56.423,43.577,39.82,299,0.940,bicubic,-46.576,-40.057,+42 -seresnextaa101d_32x8d,36.527,63.473,56.403,43.597,93.59,288,1.000,bicubic,-48.045,-40.667,-47 -regnetz_d32,36.444,63.556,57.370,42.630,27.58,320,0.950,bicubic,-47.580,-39.498,-12 -cait_xs24_384,36.420,63.580,56.940,43.060,26.67,384,1.000,bicubic,-47.644,-39.950,-18 -volo_d2_384,36.419,63.581,56.313,43.687,58.87,384,1.000,bicubic,-49.617,-41.261,-108 -efficientnet_b3,36.411,63.589,56.843,43.157,12.23,320,1.000,bicubic,-45.829,-39.275,+97 -deit_base_distilled_patch16_224,36.399,63.601,56.621,43.379,87.34,224,0.900,bicubic,-46.989,-39.867,+20 -resnetv2_101x3_bitm,36.385,63.615,59.066,40.934,387.93,448,1.000,bilinear,-48.059,-38.316,-47 -resnetrs270,36.318,63.682,56.566,43.434,129.86,352,1.000,bicubic,-48.118,-40.408,-46 -tresnet_m,36.289,63.711,55.796,44.204,31.39,224,0.875,bilinear,-46.785,-40.324,+37 -mixer_b16_224_miil,36.267,63.733,55.967,44.033,59.88,224,0.875,bilinear,-46.037,-39.753,+87 -deit3_small_patch16_384,36.185,63.815,55.566,44.434,22.21,384,1.000,bicubic,-47.243,-41.110,+11 -tf_efficientnet_b2_ns,36.179,63.821,57.551,42.449,9.11,260,0.890,bicubic,-46.205,-38.695,+76 -resnet152,36.084,63.916,55.566,44.434,60.19,224,0.950,bicubic,-46.734,-40.566,+44 -regnetz_040,36.047,63.953,55.747,44.253,27.12,320,1.000,bicubic,-48.189,-41.185,-39 -ecaresnet101d,36.010,63.990,56.161,43.839,44.57,224,0.875,bicubic,-46.160,-39.887,+95 -dm_nfnet_f2,36.006,63.994,55.458,44.542,193.78,352,0.920,bicubic,-49.060,-41.784,-82 -resnest200e,35.937,64.063,55.841,44.159,70.20,320,0.909,bicubic,-47.891,-41.051,-14 -swsl_resnet18,35.862,64.138,58.457,41.543,11.69,224,0.875,bilinear,-37.412,-33.279,+460 -eca_nfnet_l1,35.823,64.177,55.961,44.039,41.41,320,1.000,bicubic,-48.189,-41.071,-27 -sequencer2d_l,35.819,64.181,55.719,44.281,54.30,224,0.875,bicubic,-47.587,-40.781,+4 -vit_relpos_medium_patch16_cls_224,35.735,64.265,54.923,45.077,38.76,224,0.900,bicubic,-46.827,-41.143,+55 -xcit_small_24_p8_224,35.556,64.444,54.782,45.218,47.63,224,1.000,bicubic,-48.284,-41.854,-21 -xcit_large_24_p16_224,35.524,64.476,54.762,45.238,189.10,224,1.000,bicubic,-47.368,-41.116,+30 -xcit_small_12_p8_224,35.520,64.480,55.505,44.495,26.21,224,1.000,bicubic,-47.820,-40.975,+7 -vit_small_patch16_384,35.473,64.527,57.541,42.459,22.20,384,1.000,bicubic,-48.327,-39.559,-19 -xcit_medium_24_p8_224,35.452,64.548,54.825,45.175,84.32,224,1.000,bicubic,-48.286,-41.569,-16 -swinv2_base_window8_256,35.444,64.556,54.611,45.389,87.92,256,0.900,bicubic,-48.818,-42.311,-54 -swinv2_small_window16_256,35.430,64.570,54.637,45.363,49.73,256,0.900,bicubic,-48.780,-42.233,-51 -resnest101e,35.375,64.625,55.794,44.206,48.28,256,0.875,bilinear,-47.513,-40.526,+25 -convit_base,35.314,64.686,54.931,45.069,86.54,224,0.875,bicubic,-46.978,-41.007,+69 -convnext_tiny_hnf,35.279,64.721,53.849,46.151,28.59,224,0.950,bicubic,-46.941,-42.017,+74 -xcit_tiny_24_p8_224_dist,35.255,64.745,55.254,44.746,12.11,224,1.000,bicubic,-47.305,-40.914,+45 -twins_svt_large,35.088,64.912,54.719,45.281,99.27,224,0.900,bicubic,-48.592,-41.875,-18 -repvgg_b3,35.051,64.949,54.558,45.442,123.09,224,0.875,bilinear,-45.445,-40.706,+178 -repvgg_b3g4,35.039,64.961,54.774,45.226,83.83,224,0.875,bilinear,-45.177,-40.334,+204 -regnetz_d8,34.996,65.004,55.941,44.059,23.37,320,1.000,bicubic,-49.056,-41.055,-50 -dm_nfnet_f1,34.986,65.014,54.110,45.890,132.63,320,0.910,bicubic,-49.638,-42.988,-85 -xcit_tiny_24_p8_384_dist,34.931,65.069,55.151,44.849,12.11,384,1.000,bicubic,-48.815,-41.561,-29 -regnetz_d8_evos,34.894,65.106,55.258,44.742,23.46,320,0.950,bicubic,-49.156,-41.738,-52 -resnet101d,34.870,65.130,54.194,45.806,44.57,320,1.000,bicubic,-48.152,-42.252,+10 -seresnext101_32x8d,34.791,65.209,53.448,46.552,93.57,288,1.000,bicubic,-49.413,-43.426,-63 -resmlp_big_24_distilled_224,34.788,65.213,54.637,45.363,129.14,224,0.875,bicubic,-48.800,-42.011,-25 -swin_s3_base_224,34.788,65.213,53.693,46.307,71.13,224,0.900,bicubic,-49.144,-42.967,-49 -vit_relpos_base_patch16_clsgap_224,34.725,65.275,54.218,45.782,86.43,224,0.900,bicubic,-48.035,-41.956,+17 -vit_base_patch16_rpn_224,34.711,65.289,54.662,45.338,86.54,224,0.900,bicubic,-47.489,-41.334,+62 -sequencer2d_m,34.705,65.295,54.010,45.990,38.31,224,0.875,bicubic,-48.103,-42.258,+13 -deit3_small_patch16_224,34.675,65.325,53.163,46.837,22.06,224,0.900,bicubic,-46.707,-42.287,+112 -vit_large_patch32_384,34.670,65.330,55.731,44.269,306.63,384,1.000,bicubic,-46.838,-40.359,+100 -resnet101,34.658,65.342,54.297,45.703,44.55,224,0.950,bicubic,-47.272,-41.469,+77 -dm_nfnet_f0,34.624,65.376,54.672,45.328,71.49,256,0.900,bicubic,-48.760,-41.902,-23 -vit_relpos_base_patch16_224,34.613,65.387,54.291,45.709,86.43,224,0.900,bicubic,-47.873,-41.851,+30 -ssl_resnext101_32x16d,34.605,65.395,55.937,44.063,194.03,224,0.875,bilinear,-47.251,-40.159,+77 -repvgg_b2g4,34.587,65.413,54.778,45.222,61.76,224,0.875,bilinear,-44.779,-39.910,+232 -resnetv2_101,34.581,65.419,53.153,46.847,44.54,224,0.950,bicubic,-47.465,-42.709,+63 -resnetrs200,34.507,65.493,54.291,45.709,93.21,320,1.000,bicubic,-49.933,-42.789,-94 -resnest50d_4s2x40d,34.357,65.643,54.733,45.267,30.42,224,0.875,bicubic,-46.751,-40.829,+119 -resnetrs152,34.357,65.643,53.562,46.438,86.62,320,1.000,bicubic,-49.357,-43.052,-45 -crossvit_18_dagger_408,34.253,65.747,53.088,46.912,44.61,408,1.000,bicubic,-49.941,-43.730,-79 -xcit_medium_24_p16_224,34.237,65.763,53.165,46.835,84.40,224,1.000,bicubic,-48.401,-42.813,+9 -tf_efficientnet_b1_ns,34.165,65.835,55.495,44.505,7.79,240,0.882,bicubic,-47.221,-40.241,+98 -efficientnetv2_rw_t,34.155,65.845,53.137,46.863,13.65,288,1.000,bicubic,-48.189,-43.059,+30 -twins_pcpvt_large,34.106,65.894,54.126,45.874,60.99,224,0.900,bicubic,-49.030,-42.478,-21 -tf_efficientnet_b4,34.064,65.936,54.196,45.804,19.34,380,0.922,bicubic,-48.960,-42.104,-14 -ssl_resnext101_32x8d,34.029,65.971,55.601,44.399,88.79,224,0.875,bilinear,-47.579,-40.441,+77 -nfnet_l0,34.005,65.995,54.361,45.639,35.07,288,1.000,bicubic,-48.747,-42.157,-3 -xcit_small_24_p16_224,34.005,65.995,53.271,46.729,47.67,224,1.000,bicubic,-48.579,-42.729,+7 -efficientnet_b3_pruned,33.994,66.006,54.106,45.894,9.86,300,0.904,bicubic,-46.864,-41.138,+129 -tf_efficientnet_b6,33.992,66.008,54.542,45.458,43.04,528,0.942,bicubic,-50.116,-42.346,-85 -regnety_160,33.972,66.028,53.540,46.460,83.59,288,1.000,bicubic,-49.720,-43.236,-55 -gc_efficientnetv2_rw_t,33.960,66.040,53.222,46.778,13.68,288,1.000,bicubic,-48.506,-43.076,+12 -pit_s_distilled_224,33.935,66.065,53.267,46.733,24.04,224,0.900,bicubic,-48.059,-42.529,+48 -convnext_tiny,33.838,66.162,53.656,46.344,28.59,224,0.875,bicubic,-48.224,-42.198,+44 -swinv2_cr_small_ns_224,33.836,66.164,52.618,47.382,49.70,224,0.900,bicubic,-49.650,-43.866,-53 -resnext101_64x4d,33.827,66.173,52.172,47.828,83.46,288,1.000,bicubic,-49.317,-44.202,-36 -xcit_small_12_p16_224,33.768,66.232,53.233,46.767,26.25,224,1.000,bicubic,-48.204,-42.579,+47 -swin_s3_small_224,33.701,66.299,52.391,47.609,49.74,224,0.900,bicubic,-50.073,-44.061,-68 -resnetv2_50x3_bitm,33.663,66.337,55.882,44.118,217.32,448,1.000,bilinear,-50.349,-41.244,-86 -swinv2_small_window8_256,33.636,66.364,52.821,47.179,49.73,256,0.900,bicubic,-50.218,-43.821,-80 -resnet51q,33.551,66.448,53.023,46.977,35.70,288,1.000,bilinear,-48.807,-43.155,+10 -xcit_tiny_24_p16_384_dist,33.512,66.488,52.768,47.232,12.12,384,1.000,bicubic,-49.060,-43.520,-5 -vit_relpos_medium_patch16_224,33.500,66.500,52.603,47.397,38.75,224,0.900,bicubic,-48.962,-43.483,+2 -regnety_080,33.469,66.531,52.939,47.061,39.18,288,1.000,bicubic,-50.459,-43.949,-87 -cs3edgenet_x,33.463,66.537,52.939,47.061,47.82,288,1.000,bicubic,-49.259,-43.437,-19 -sequencer2d_s,33.434,66.566,52.404,47.596,27.65,224,0.875,bicubic,-48.910,-43.630,+8 -convmixer_1536_20,33.428,66.572,53.029,46.971,51.63,224,0.960,bicubic,-47.942,-42.583,+76 -regnety_032,33.406,66.594,52.758,47.242,19.44,288,1.000,bicubic,-49.318,-43.664,-23 -crossvit_18_240,33.396,66.604,52.243,47.757,43.27,240,0.875,bicubic,-49.002,-43.811,-1 -vit_srelpos_medium_patch16_224,33.373,66.627,52.453,47.547,38.74,224,0.900,bicubic,-48.863,-43.481,+15 -gernet_l,33.361,66.639,51.909,48.091,31.08,256,0.875,bilinear,-47.989,-43.627,+73 -crossvit_15_dagger_408,33.331,66.669,52.190,47.810,28.50,408,1.000,bicubic,-50.507,-44.590,-88 -crossvit_18_dagger_240,33.284,66.716,52.202,47.798,44.27,240,0.875,bicubic,-49.236,-43.866,-12 -tresnet_xl,33.261,66.739,52.294,47.706,78.44,224,0.875,bilinear,-48.801,-43.642,+23 -jx_nest_base,33.214,66.787,51.809,48.191,67.72,224,0.875,bicubic,-50.340,-44.555,-75 -resnest50d_1s4x24d,33.149,66.851,52.852,47.148,25.68,224,0.875,bicubic,-47.835,-42.472,+90 -vit_relpos_medium_patch16_rpn_224,33.109,66.891,52.347,47.653,38.73,224,0.900,bicubic,-49.185,-43.625,+3 -resnet61q,33.099,66.900,51.758,48.242,36.85,288,1.000,bicubic,-49.419,-44.372,-16 -jx_nest_small,33.048,66.952,51.062,48.938,38.35,224,0.875,bicubic,-50.072,-45.268,-54 -crossvit_base_240,33.037,66.963,51.384,48.616,105.03,240,0.875,bicubic,-49.179,-44.448,+8 -twins_pcpvt_base,33.023,66.977,52.489,47.511,43.83,224,0.900,bicubic,-49.685,-43.861,-34 -xcit_tiny_24_p16_224_dist,32.987,67.013,52.056,47.944,12.12,224,1.000,bicubic,-47.461,-43.156,+119 -rexnet_200,32.980,67.020,52.935,47.065,16.37,224,0.875,bicubic,-48.648,-42.733,+38 -resnest50d,32.976,67.024,52.711,47.289,27.48,224,0.875,bilinear,-47.998,-42.669,+83 -convit_small,32.909,67.091,52.123,47.877,27.78,224,0.875,bicubic,-48.519,-43.619,+51 -crossvit_15_dagger_240,32.907,67.093,51.783,48.217,28.21,240,0.875,bicubic,-49.419,-44.173,-9 -tf_efficientnetv2_s,32.907,67.093,51.730,48.270,21.46,384,1.000,bicubic,-50.977,-44.968,-109 -vit_small_patch16_224,32.877,67.123,53.917,46.083,22.05,224,0.900,bicubic,-48.519,-42.221,+51 -tf_efficientnet_b3,32.862,67.138,52.955,47.045,12.23,300,0.904,bicubic,-48.776,-42.763,+31 -pnasnet5large,32.850,67.150,50.506,49.494,86.06,331,0.911,bicubic,-49.932,-45.536,-48 -twins_svt_base,32.832,67.168,51.563,48.437,56.07,224,0.900,bicubic,-50.306,-44.857,-70 -regnetv_064,32.830,67.170,52.854,47.146,30.58,288,1.000,bicubic,-50.882,-43.892,-97 -regnetz_c16,32.828,67.172,53.750,46.250,13.46,320,0.940,bicubic,-49.692,-42.610,-33 -nasnetalarge,32.773,67.227,50.141,49.859,88.75,331,0.911,bicubic,-49.845,-45.903,-42 -gernet_m,32.744,67.256,51.907,48.093,21.14,224,0.875,bilinear,-47.986,-43.279,+87 -inception_resnet_v2,32.736,67.264,50.653,49.347,55.84,299,0.897,bicubic,-47.724,-44.653,+102 -gluon_resnet152_v1d,32.732,67.268,51.088,48.912,60.21,224,0.875,bicubic,-47.744,-44.112,+100 -pit_b_224,32.718,67.282,49.854,50.146,73.76,224,0.900,bicubic,-49.726,-45.858,-31 -tf_efficientnet_b2_ap,32.685,67.315,52.237,47.763,9.11,260,0.890,bicubic,-47.617,-42.791,+114 -fbnetv3_g,32.634,67.366,52.888,47.112,16.62,288,0.950,bilinear,-49.400,-43.178,0 -tresnet_l,32.559,67.441,51.139,48.861,55.99,224,0.875,bilinear,-48.931,-44.487,+30 -cait_xxs36_384,32.543,67.457,52.233,47.767,17.37,384,1.000,bicubic,-49.649,-43.911,-12 -regnetz_c16_evos,32.530,67.470,52.923,47.077,13.49,320,0.950,bicubic,-50.102,-43.553,-52 -wide_resnet50_2,32.435,67.565,51.453,48.547,68.88,224,0.875,bicubic,-49.021,-44.077,+30 -gmlp_s16_224,32.420,67.580,51.819,48.181,19.42,224,0.875,bicubic,-47.220,-42.805,+150 -ens_adv_inception_resnet_v2,32.374,67.626,50.419,49.581,55.84,299,0.897,bicubic,-47.600,-44.523,+127 -deit_base_patch16_224,32.363,67.637,50.997,49.003,86.57,224,0.900,bicubic,-49.631,-44.735,-5 -swin_small_patch4_window7_224,32.349,67.651,50.911,49.089,49.61,224,0.900,bicubic,-50.869,-45.415,-92 -gluon_resnet152_v1s,32.335,67.665,50.528,49.472,60.32,224,0.875,bicubic,-48.679,-44.886,+56 -deit_small_distilled_patch16_224,32.286,67.714,52.109,47.891,22.44,224,0.900,bicubic,-48.922,-43.265,+41 -xcit_tiny_24_p8_224,32.278,67.722,51.903,48.097,12.11,224,1.000,bicubic,-49.618,-44.071,-2 -gluon_seresnext101_64x4d,32.196,67.804,50.306,49.694,88.23,224,0.875,bicubic,-48.684,-44.990,+63 -coat_lite_small,32.121,67.879,49.934,50.066,19.84,224,0.900,bicubic,-50.183,-45.916,-35 -gluon_seresnext101_32x4d,32.109,67.891,51.235,48.765,48.96,224,0.875,bicubic,-48.797,-44.061,+59 -deit_base_patch16_384,31.989,68.011,50.549,49.451,86.86,384,1.000,bicubic,-51.117,-45.821,-89 -seresnext50_32x4d,31.971,68.028,51.227,48.773,27.56,224,0.875,bicubic,-49.291,-44.401,+32 -xcit_tiny_12_p8_224_dist,31.938,68.062,51.394,48.606,6.71,224,1.000,bicubic,-49.270,-44.212,+33 -levit_384,31.865,68.135,50.593,49.407,39.13,224,0.900,bicubic,-50.723,-45.425,-64 -resnetrs101,31.850,68.150,51.019,48.981,63.62,288,0.940,bicubic,-50.434,-44.989,-37 -cs3se_edgenet_x,31.797,68.203,50.763,49.237,50.72,320,1.000,bicubic,-51.751,-45.903,-119 -vit_relpos_small_patch16_224,31.787,68.213,50.628,49.372,21.98,224,0.900,bicubic,-49.667,-45.200,+14 -poolformer_m48,31.698,68.302,49.889,50.111,73.47,224,0.950,bicubic,-50.762,-46.069,-56 -tnt_s_patch16_224,31.643,68.357,51.137,48.863,23.76,224,0.900,bicubic,-49.875,-44.609,+5 -eca_nfnet_l0,31.604,68.396,51.608,48.392,24.14,288,1.000,bicubic,-50.974,-44.882,-68 -resnetv2_50x1_bit_distilled,31.567,68.433,51.268,48.732,25.55,224,0.875,bicubic,-51.255,-45.254,-87 -mobilevitv2_200_in22ft1k,31.527,68.473,51.772,48.228,18.45,256,0.888,bicubic,-50.807,-44.166,-51 -xception41p,31.510,68.490,50.374,49.626,26.91,299,0.940,bicubic,-50.458,-45.420,-22 -regnety_064,31.472,68.528,50.524,49.476,30.58,288,1.000,bicubic,-52.248,-46.202,-135 -poolformer_m36,31.449,68.551,50.034,49.966,56.17,224,0.950,bicubic,-50.659,-45.656,-34 -ssl_resnext101_32x4d,31.415,68.585,52.133,47.867,44.18,224,0.875,bilinear,-49.509,-43.593,+42 -inception_v4,31.380,68.620,49.242,50.758,42.68,299,0.875,bicubic,-48.788,-45.722,+92 -rexnet_150,31.372,68.628,51.276,48.724,9.73,224,0.875,bicubic,-48.942,-43.890,+80 -crossvit_15_240,31.339,68.661,50.168,49.832,27.53,240,0.875,bicubic,-50.205,-45.522,-7 -pit_s_224,31.335,68.665,49.665,50.335,23.46,224,0.900,bicubic,-49.763,-45.667,+25 -swinv2_tiny_window16_256,31.303,68.697,49.645,50.355,28.35,256,0.900,bicubic,-51.507,-46.585,-95 -crossvit_small_240,31.284,68.716,50.196,49.804,26.86,240,0.875,bicubic,-49.732,-45.260,+28 -cspresnet50,31.278,68.722,51.221,48.779,21.62,256,0.887,bilinear,-48.304,-43.487,+120 -vit_srelpos_small_patch16_224,31.278,68.722,50.247,49.753,21.97,224,0.900,bicubic,-49.820,-45.325,+20 -cait_xxs36_224,31.264,68.736,50.612,49.388,17.30,224,1.000,bicubic,-48.484,-44.256,+107 -swinv2_cr_small_224,31.262,68.738,48.737,51.263,49.70,224,0.900,bicubic,-51.876,-47.361,-118 -convmixer_768_32,31.250,68.750,50.950,49.050,21.11,224,0.960,bicubic,-48.914,-44.122,+84 -swin_s3_tiny_224,31.239,68.761,49.718,50.282,28.33,224,0.900,bicubic,-50.885,-46.232,-49 -cspresnext50,31.221,68.779,50.885,49.115,20.57,256,0.887,bilinear,-49.323,-44.439,+47 -regnetv_040,31.213,68.787,50.111,49.889,20.64,288,1.000,bicubic,-51.985,-46.553,-127 -coat_mini,31.203,68.797,49.773,50.227,10.34,224,0.900,bicubic,-50.063,-45.619,+2 -xcit_tiny_12_p8_384_dist,31.188,68.812,50.524,49.476,6.71,384,1.000,bicubic,-51.198,-45.698,-77 -ecaresnetlight,31.125,68.875,50.239,49.761,30.16,224,0.875,bicubic,-49.331,-45.007,+52 -gluon_resnet101_v1s,31.119,68.881,49.799,50.201,44.67,224,0.875,bicubic,-49.179,-45.363,+67 -edgenext_small,31.101,68.899,50.129,49.871,5.59,320,1.000,bicubic,-50.473,-45.585,-25 -tf_efficientnet_cc_b0_8e,31.091,68.909,50.775,49.225,24.01,224,0.875,bicubic,-46.809,-42.883,+199 -resmlp_36_distilled_224,31.072,68.928,49.691,50.309,44.69,224,0.875,bicubic,-50.084,-45.795,+2 -ecaresnet50t,31.052,68.948,50.577,49.423,25.57,320,0.950,bicubic,-51.296,-45.561,-80 -ecaresnet50d,31.048,68.952,50.844,49.156,25.58,224,0.875,bicubic,-49.550,-44.474,+35 -cspdarknet53,31.018,68.981,50.394,49.606,27.64,256,0.887,bilinear,-49.038,-44.692,+77 -resnet50d,31.018,68.981,49.808,50.192,25.58,224,0.875,bicubic,-49.510,-45.360,+37 -cs3sedarknet_x,31.015,68.985,50.144,49.856,35.40,288,1.000,bicubic,-51.639,-46.202,-107 -gcresnet50t,31.011,68.989,50.121,49.879,25.90,256,0.900,bicubic,-49.923,-45.333,+14 -gluon_resnext101_64x4d,30.993,69.007,48.553,51.447,83.46,224,0.875,bicubic,-49.611,-46.439,+29 -gluon_resnet152_v1c,30.991,69.009,48.934,51.066,60.21,224,0.875,bicubic,-48.921,-45.908,+76 -twins_svt_small,30.975,69.025,49.223,50.777,24.06,224,0.900,bicubic,-50.707,-46.443,-42 -resnext50_32x4d,30.922,69.078,49.266,50.734,25.03,224,0.950,bicubic,-50.174,-46.060,-1 -ecaresnet101d_pruned,30.903,69.097,50.003,49.997,24.88,224,0.875,bicubic,-49.907,-45.625,+17 -resmlp_24_distilled_224,30.899,69.101,50.178,49.822,30.02,224,0.875,bicubic,-49.865,-45.044,+18 -tf_efficientnet_cc_b1_8e,30.897,69.103,50.080,49.920,39.72,240,0.882,bicubic,-48.417,-44.290,+106 -gluon_resnext101_32x4d,30.885,69.115,48.547,51.453,44.18,224,0.875,bicubic,-49.455,-46.379,+44 -tf_efficientnetv2_b3,30.861,69.139,49.820,50.180,14.36,300,0.904,bicubic,-51.105,-45.962,-60 -tf_efficientnet_lite4,30.830,69.170,50.394,49.606,13.01,380,0.920,bilinear,-50.704,-45.272,-40 -nf_resnet50,30.700,69.300,49.956,50.044,25.56,288,0.940,bicubic,-49.954,-45.378,+17 -dpn107,30.680,69.320,48.812,51.188,86.92,224,0.875,bicubic,-49.488,-46.094,+55 -poolformer_s36,30.678,69.322,49.433,50.567,30.86,224,0.900,bicubic,-50.740,-46.015,-32 -xcit_tiny_24_p16_224,30.675,69.325,50.416,49.584,12.12,224,1.000,bicubic,-48.769,-44.472,+93 -ese_vovnet39b,30.667,69.333,49.879,50.121,24.57,224,0.875,bicubic,-48.645,-44.835,+99 -tresnet_xl_448,30.620,69.380,49.068,50.932,78.44,448,0.875,bilinear,-52.428,-47.102,-144 -gluon_resnet152_v1b,30.610,69.390,48.515,51.485,60.19,224,0.875,bicubic,-49.072,-46.221,+77 -haloregnetz_b,30.606,69.394,49.013,50.987,11.68,224,0.940,bicubic,-50.438,-46.185,-13 -ssl_resnext50_32x4d,30.596,69.404,50.655,49.345,25.03,224,0.875,bilinear,-49.730,-44.757,+34 -dpn68b,30.525,69.475,49.172,50.828,12.61,224,0.875,bicubic,-48.691,-45.242,+105 -gluon_resnet101_v1d,30.521,69.479,47.953,52.047,44.57,224,0.875,bicubic,-49.897,-47.061,+24 -mobilevitv2_200_384_in22ft1k,30.498,69.502,50.567,49.433,18.45,384,1.000,bicubic,-52.902,-46.015,-172 -resnest26d,30.490,69.510,50.667,49.333,17.07,224,0.875,bilinear,-47.994,-43.627,+135 -efficientnet_b2,30.439,69.561,49.693,50.307,9.11,288,1.000,bicubic,-50.177,-45.623,+5 -tf_efficientnet_b1_ap,30.421,69.579,49.559,50.441,7.79,240,0.882,bicubic,-48.853,-44.749,+96 -xcit_tiny_12_p16_384_dist,30.403,69.597,50.127,49.873,6.72,384,1.000,bicubic,-50.539,-45.281,-13 -cs3darknet_x,30.398,69.603,49.195,50.805,35.05,288,1.000,bicubic,-51.826,-47.035,-98 -twins_pcpvt_small,30.384,69.616,49.388,50.612,24.11,224,0.900,bicubic,-50.706,-46.254,-24 -resnetv2_50,30.384,69.616,48.828,51.172,25.55,224,0.950,bicubic,-50.028,-46.244,+17 -visformer_small,30.335,69.665,48.291,51.709,40.22,224,0.900,bicubic,-51.773,-47.585,-93 -pit_xs_distilled_224,30.278,69.722,49.838,50.162,11.00,224,0.900,bicubic,-49.030,-44.528,+84 -regnety_040,30.252,69.748,48.918,51.082,20.65,288,1.000,bicubic,-52.784,-47.592,-159 -mobilevitv2_175_in22ft1k,30.213,69.787,49.024,50.976,14.25,256,0.888,bicubic,-51.727,-46.766,-83 -vit_relpos_base_patch32_plus_rpn_256,30.211,69.789,48.700,51.300,119.42,256,0.900,bicubic,-49.275,-45.440,+70 -convmixer_1024_20_ks9_p14,30.101,69.899,49.934,50.066,24.38,224,0.960,bicubic,-46.841,-43.424,+199 -seresnet50,30.073,69.927,49.288,50.712,28.09,224,0.875,bicubic,-50.193,-45.782,+23 -dpn98,30.061,69.939,48.254,51.746,61.57,224,0.875,bicubic,-49.583,-46.346,+60 -tf_efficientnet_b2,30.030,69.970,49.581,50.419,9.11,260,0.890,bicubic,-50.058,-45.327,+32 -efficientnet_el,30.022,69.978,48.832,51.168,10.59,300,0.904,bicubic,-51.284,-46.702,-50 -dpn131,30.016,69.984,48.128,51.872,79.25,224,0.875,bicubic,-49.810,-46.580,+46 -legacy_senet154,30.005,69.996,48.042,51.958,115.09,224,0.875,bilinear,-51.303,-47.454,-53 -xcit_tiny_12_p16_224_dist,30.001,69.999,49.643,50.357,6.72,224,1.000,bicubic,-48.577,-44.555,+111 -halo2botnet50ts_256,29.985,70.015,48.374,51.626,22.64,256,0.950,bicubic,-52.083,-47.268,-104 -mobilevitv2_150_in22ft1k,29.957,70.043,49.219,50.781,10.59,256,0.888,bicubic,-51.513,-46.449,-68 -dpn92,29.955,70.045,49.176,50.824,37.67,224,0.875,bicubic,-50.065,-45.654,+30 -resnetv2_101x1_bitm,29.896,70.104,51.127,48.873,44.54,448,1.000,bilinear,-52.436,-45.389,-127 -gluon_senet154,29.877,70.123,47.892,52.108,115.09,224,0.875,bicubic,-51.353,-47.454,-55 -xception,29.863,70.137,48.681,51.319,22.86,299,0.897,bicubic,-49.181,-45.713,+88 -adv_inception_v3,29.820,70.180,47.843,52.157,23.83,299,0.875,bicubic,-47.758,-45.895,+160 -cs3sedarknet_l,29.812,70.188,48.985,51.015,21.91,288,0.950,bicubic,-51.964,-46.985,-91 -resnetaa50,29.794,70.206,48.018,51.982,25.56,288,1.000,bicubic,-51.824,-47.792,-86 -gluon_xception65,29.786,70.214,47.765,52.235,39.92,299,0.903,bicubic,-49.936,-47.095,+38 -lamhalobotnet50ts_256,29.745,70.255,48.339,51.661,22.57,256,0.950,bicubic,-51.807,-47.165,-85 -fbnetv3_d,29.737,70.263,49.453,50.547,10.31,256,0.950,bilinear,-49.943,-45.487,+41 -resmlp_36_224,29.696,70.304,48.969,51.031,44.69,224,0.875,bicubic,-50.074,-45.917,+33 -convnext_nano,29.694,70.306,47.930,52.070,15.59,288,1.000,bicubic,-51.782,-47.730,-81 -resnet50,29.631,70.369,46.745,53.255,25.56,224,0.950,bicubic,-50.743,-47.869,-9 -resnetblur50,29.610,70.391,48.254,51.746,25.56,224,0.875,bicubic,-49.684,-46.380,+61 -resnetv2_50d_gn,29.608,70.392,47.792,52.208,25.57,288,0.950,bicubic,-52.216,-48.132,-104 -jx_nest_tiny,29.543,70.457,46.985,53.015,17.06,224,0.875,bicubic,-51.875,-48.633,-80 -resnet50_gn,29.535,70.465,48.301,51.699,25.56,224,0.940,bicubic,-50.525,-46.647,+12 -efficientnet_em,29.476,70.524,48.942,51.058,6.90,240,0.882,bicubic,-49.776,-45.850,+61 -cs3darknet_l,29.470,70.530,48.215,51.785,21.16,288,0.950,bicubic,-51.416,-47.453,-46 -resnext101_32x8d,29.439,70.561,48.488,51.512,88.79,224,0.875,bilinear,-49.877,-46.030,+48 -gcresnext50ts,29.429,70.571,47.902,52.098,15.67,256,0.900,bicubic,-51.149,-47.268,-33 -coat_lite_mini,29.429,70.571,47.729,52.271,11.01,224,0.900,bicubic,-49.659,-46.879,+66 -deit_small_patch16_224,29.423,70.577,48.258,51.742,22.05,224,0.900,bicubic,-50.441,-46.790,+15 -sebotnet33ts_256,29.423,70.577,47.146,52.854,13.70,256,0.940,bicubic,-51.731,-48.020,-71 -ssl_resnet50,29.405,70.595,49.787,50.213,25.56,224,0.875,bilinear,-49.819,-45.043,+55 -nf_regnet_b1,29.391,70.609,49.411,50.589,10.22,288,0.900,bicubic,-49.909,-45.343,+47 -cait_xxs24_384,29.387,70.612,48.747,51.253,12.03,384,1.000,bicubic,-51.575,-46.897,-59 -edgenext_small_rw,29.350,70.650,48.737,51.263,7.83,320,1.000,bicubic,-51.102,-46.453,-29 -resnet34d,29.332,70.668,48.411,51.589,21.82,224,0.875,bicubic,-47.784,-44.971,+153 -swin_tiny_patch4_window7_224,29.332,70.668,47.611,52.389,28.29,224,0.900,bicubic,-52.044,-47.931,-89 -cait_xxs24_224,29.303,70.697,48.527,51.473,11.96,224,1.000,bicubic,-49.083,-45.781,+91 -ecaresnet50d_pruned,29.209,70.791,48.443,51.557,19.94,224,0.875,bicubic,-50.509,-46.433,+15 -poolformer_s24,29.175,70.825,48.062,51.938,21.39,224,0.900,bicubic,-51.141,-46.980,-23 -tresnet_l_448,29.165,70.835,47.226,52.774,55.99,448,0.875,bilinear,-53.105,-48.754,-152 -gluon_inception_v3,29.120,70.880,46.955,53.045,23.83,299,0.875,bicubic,-49.686,-47.415,+66 -eca_resnet33ts,29.105,70.895,48.796,51.204,19.68,256,0.900,bicubic,-50.975,-46.176,-9 -lambda_resnet50ts,29.097,70.903,46.981,53.019,21.54,256,0.950,bicubic,-52.055,-48.121,-83 -xception71,29.040,70.960,47.411,52.589,42.34,299,0.903,bicubic,-50.830,-47.513,-1 -hrnet_w64,28.991,71.010,47.130,52.870,128.06,224,0.875,bilinear,-50.479,-47.524,+22 -xcit_tiny_12_p8_224,28.957,71.043,47.511,52.489,6.71,224,1.000,bicubic,-50.737,-47.537,+8 -regnetz_b16,28.943,71.057,47.246,52.754,9.72,288,0.940,bicubic,-51.769,-48.228,-58 -cs3darknet_focus_l,28.926,71.074,47.629,52.371,21.15,288,0.950,bicubic,-51.948,-48.063,-67 -tf_efficientnet_b1,28.886,71.114,47.498,52.502,7.79,240,0.882,bicubic,-49.942,-46.700,+57 -tf_efficientnet_b0_ns,28.884,71.116,48.997,51.003,5.29,224,0.875,bicubic,-49.780,-45.379,+63 -resnetv2_50d_evos,28.878,71.121,46.672,53.328,25.59,288,0.950,bicubic,-53.100,-49.240,-143 -vit_small_patch32_384,28.875,71.125,48.889,51.111,22.92,384,1.000,bicubic,-51.615,-46.711,-52 -gluon_resnet101_v1b,28.873,71.127,46.389,53.611,44.55,224,0.875,bicubic,-50.431,-48.131,+25 -mobilevitv2_150_384_in22ft1k,28.869,71.131,47.916,52.084,10.59,384,1.000,bicubic,-53.721,-48.400,-196 -skresnext50_32x4d,28.826,71.174,46.487,53.513,27.48,224,0.875,bicubic,-51.328,-48.159,-24 -sehalonet33ts,28.778,71.222,46.582,53.418,13.69,256,0.940,bicubic,-52.194,-48.690,-83 -levit_256,28.751,71.249,46.721,53.279,18.89,224,0.900,bicubic,-52.765,-48.769,-123 -tf_efficientnet_lite3,28.660,71.340,47.346,52.654,8.20,300,0.904,bilinear,-51.158,-47.568,-9 -skresnet34,28.654,71.346,47.953,52.047,22.28,224,0.875,bicubic,-48.250,-45.367,+139 -gluon_seresnext50_32x4d,28.649,71.351,46.442,53.558,27.56,224,0.875,bicubic,-51.263,-48.390,-19 -darknetaa53,28.647,71.353,46.949,53.051,36.02,288,1.000,bilinear,-51.875,-48.377,-63 -hrnet_w40,28.635,71.365,47.452,52.548,57.56,224,0.875,bilinear,-50.287,-47.018,+41 -swinv2_tiny_window8_256,28.611,71.389,46.171,53.829,28.35,256,0.900,bicubic,-53.199,-49.823,-144 -mobilevitv2_175_384_in22ft1k,28.605,71.395,47.126,52.874,14.25,384,1.000,bicubic,-54.329,-49.304,-226 -halonet50ts,28.580,71.420,46.169,53.831,22.73,256,0.940,bicubic,-53.072,-49.443,-141 -tf_efficientnetv2_b0,28.570,71.430,47.075,52.925,7.14,224,0.875,bicubic,-49.782,-46.951,+65 -tv_resnet152,28.531,71.469,47.116,52.884,60.19,224,0.875,bilinear,-49.789,-46.918,+65 -xcit_tiny_12_p16_224,28.519,71.481,47.403,52.597,6.72,224,1.000,bicubic,-48.605,-46.309,+119 -repvgg_b2,28.430,71.570,47.038,52.962,89.02,224,0.875,bilinear,-50.364,-47.380,+39 -hrnet_w48,28.409,71.591,47.586,52.414,77.47,224,0.875,bilinear,-50.891,-46.928,+10 -gluon_resnext50_32x4d,28.379,71.621,45.316,54.684,25.03,224,0.875,bicubic,-50.981,-49.110,+2 -swinv2_cr_tiny_ns_224,28.373,71.626,45.920,54.080,28.33,224,0.900,bicubic,-53.413,-49.902,-152 -efficientnet_b2_pruned,28.362,71.638,47.050,52.950,8.31,260,0.890,bicubic,-51.556,-47.800,-34 -seresnet33ts,28.338,71.662,47.753,52.247,19.78,256,0.900,bicubic,-52.016,-47.353,-62 -tf_efficientnet_b0_ap,28.338,71.662,47.527,52.473,5.29,224,0.875,bicubic,-48.750,-45.731,+115 -dla169,28.322,71.678,47.393,52.607,53.39,224,0.875,bilinear,-50.360,-46.943,+36 -dla102x2,28.315,71.685,46.770,53.230,41.28,224,0.875,bilinear,-51.127,-47.876,-7 -tf_efficientnet_cc_b0_4e,28.313,71.687,47.360,52.640,13.31,224,0.875,bicubic,-48.997,-45.980,+102 -darknet53,28.313,71.687,46.873,53.127,41.61,288,1.000,bicubic,-52.225,-48.547,-83 -mixnet_xl,28.291,71.709,46.700,53.300,11.90,224,0.875,bicubic,-52.187,-48.234,-79 -gluon_resnet50_v1d,28.240,71.760,45.867,54.133,25.58,224,0.875,bicubic,-50.830,-48.599,+16 -wide_resnet101_2,28.112,71.888,46.411,53.589,126.89,224,0.875,bilinear,-50.740,-47.877,+23 -gluon_resnet101_v1c,28.104,71.896,45.959,54.041,44.57,224,0.875,bicubic,-51.432,-48.619,-20 -regnetx_320,28.093,71.907,45.120,54.880,107.81,224,0.875,bicubic,-52.151,-49.900,-61 -densenet161,28.081,71.919,46.639,53.361,28.68,224,0.875,bicubic,-49.273,-46.997,+94 -regnety_320,28.061,71.939,45.452,54.548,145.05,224,0.875,bicubic,-52.743,-49.792,-101 -gernet_s,28.038,71.963,46.733,53.267,8.17,224,0.875,bilinear,-48.878,-46.401,+110 -mobilevitv2_175,28.034,71.966,46.085,53.915,14.25,256,0.888,bicubic,-52.828,-49.177,-106 -efficientnet_el_pruned,28.018,71.982,46.788,53.212,10.59,300,0.904,bicubic,-52.280,-48.426,-70 -levit_192,28.014,71.986,45.872,54.128,10.95,224,0.900,bicubic,-51.822,-48.918,-43 -xception41,27.888,72.112,45.896,54.104,26.97,299,0.903,bicubic,-50.628,-48.384,+26 -regnetx_160,27.817,72.183,45.623,54.377,54.28,224,0.875,bicubic,-52.037,-49.207,-47 -tf_inception_v3,27.778,72.222,45.717,54.283,23.83,299,0.875,bicubic,-50.074,-47.923,+66 -res2net101_26w_4s,27.774,72.226,45.167,54.833,45.21,224,0.875,bilinear,-51.422,-49.269,-4 -tf_efficientnetv2_b1,27.762,72.238,46.574,53.426,8.14,240,0.882,bicubic,-51.704,-48.148,-27 -vit_base_patch16_224_sam,27.709,72.291,45.112,54.888,86.57,224,0.900,bicubic,-52.535,-49.642,-72 -fbnetv3_b,27.672,72.328,46.981,53.019,8.60,256,0.950,bilinear,-51.470,-47.769,-6 -repvgg_b1,27.648,72.352,46.521,53.479,57.42,224,0.875,bilinear,-50.720,-47.573,+32 -mobilevitv2_200,27.629,72.371,45.766,54.234,18.45,256,0.888,bicubic,-53.511,-49.602,-138 -hrnet_w44,27.623,72.377,45.845,54.155,67.06,224,0.875,bilinear,-51.273,-48.525,+4 -gcresnet33ts,27.585,72.415,46.199,53.801,19.88,256,0.900,bicubic,-52.491,-48.795,-67 -inception_v3,27.556,72.444,45.265,54.735,23.83,299,0.875,bicubic,-49.882,-48.211,+74 -resmlp_24_224,27.534,72.466,45.697,54.303,30.02,224,0.875,bicubic,-51.844,-48.849,-32 -pit_xs_224,27.497,72.503,45.904,54.096,10.62,224,0.900,bicubic,-50.693,-48.262,+35 -regnetx_080,27.393,72.607,45.002,54.998,39.57,224,0.875,bicubic,-51.809,-49.550,-16 -hrnet_w30,27.389,72.611,46.548,53.452,37.71,224,0.875,bilinear,-50.809,-47.676,+32 -hrnet_w32,27.369,72.631,45.990,54.010,41.23,224,0.875,bilinear,-51.083,-48.198,+17 -gluon_resnet50_v1s,27.322,72.678,45.224,54.776,25.68,224,0.875,bicubic,-51.384,-49.014,+3 -res2net50_26w_8s,27.310,72.690,44.823,55.177,48.40,224,0.875,bilinear,-51.642,-49.483,-7 -densenet201,27.259,72.741,46.220,53.780,20.01,224,0.875,bicubic,-50.029,-47.260,+72 -densenetblur121d,27.228,72.772,46.293,53.707,8.00,224,0.875,bicubic,-49.352,-46.895,+97 -efficientnet_b1_pruned,27.181,72.819,45.872,54.128,6.33,240,0.882,bicubic,-51.063,-47.962,+24 -tf_efficientnetv2_b2,27.173,72.827,44.572,55.428,10.10,260,0.890,bicubic,-53.035,-50.472,-86 -resnet33ts,27.136,72.865,45.332,54.668,19.68,256,0.900,bicubic,-52.072,-49.242,-26 -resnetrs50,27.098,72.902,45.029,54.971,35.69,224,0.910,bicubic,-52.788,-49.941,-73 -rexnet_130,27.096,72.904,45.941,54.059,7.56,224,0.875,bicubic,-52.406,-48.741,-52 -resnet32ts,27.045,72.955,45.263,54.737,17.96,256,0.900,bicubic,-51.969,-49.093,-18 -dla102x,27.039,72.961,45.485,54.515,26.31,224,0.875,bilinear,-51.473,-48.743,0 -gmixer_24_224,27.033,72.967,44.369,55.631,24.72,224,0.875,bicubic,-51.003,-49.301,+27 -tv_resnet101,26.963,73.037,45.236,54.764,44.55,224,0.875,bilinear,-50.417,-48.308,+58 -regnetx_120,26.870,73.130,44.676,55.324,46.11,224,0.875,bicubic,-52.722,-50.058,-60 -resnext50d_32x4d,26.866,73.134,44.446,55.554,25.05,224,0.875,bicubic,-52.810,-50.420,-64 -rexnet_100,26.831,73.169,45.377,54.623,4.80,224,0.875,bicubic,-51.029,-48.497,+33 -densenet169,26.827,73.173,45.385,54.615,14.15,224,0.875,bicubic,-49.077,-47.639,+97 -tinynet_a,26.817,73.183,45.106,54.894,6.19,192,0.875,bicubic,-50.831,-48.430,+39 -legacy_seresnext101_32x4d,26.815,73.185,43.501,56.499,48.96,224,0.875,bilinear,-53.407,-51.513,-101 -regnetx_064,26.790,73.210,44.919,55.081,26.21,224,0.875,bicubic,-52.284,-49.541,-31 -regnety_120,26.784,73.216,44.442,55.558,51.82,224,0.875,bicubic,-53.592,-50.680,-119 -regnetx_032,26.707,73.293,45.228,54.772,15.30,224,0.875,bicubic,-51.477,-48.860,+11 -densenet121,26.674,73.326,45.890,54.110,7.98,224,0.875,bicubic,-48.906,-46.758,+96 -legacy_seresnet152,26.672,73.328,43.953,56.047,66.82,224,0.875,bilinear,-51.980,-50.417,-17 -efficientnet_es,26.619,73.381,45.122,54.878,5.44,224,0.875,bicubic,-51.439,-48.822,+13 -res2net50_26w_6s,26.597,73.403,43.998,56.002,37.05,224,0.875,bilinear,-51.973,-50.126,-17 -repvgg_b1g4,26.581,73.419,45.086,54.914,39.97,224,0.875,bilinear,-51.007,-48.744,+35 -dla60x,26.554,73.446,45.008,54.992,17.35,224,0.875,bilinear,-51.674,-49.016,+2 -coat_lite_tiny,26.509,73.491,44.646,55.354,5.72,224,0.900,bicubic,-51.007,-49.268,+37 -mobilenetv3_large_100_miil,26.507,73.493,44.491,55.509,5.48,224,0.875,bilinear,-51.415,-48.429,+16 -res2net50_14w_8s,26.483,73.517,44.371,55.629,25.06,224,0.875,bilinear,-51.661,-49.481,+3 -tf_efficientnet_b0,26.477,73.523,45.650,54.350,5.29,224,0.875,bicubic,-50.363,-47.568,+60 -gluon_resnet50_v1b,26.440,73.560,44.043,55.957,25.56,224,0.875,bicubic,-51.144,-49.677,+30 -tf_efficientnet_el,26.357,73.643,44.175,55.825,10.59,300,0.904,bicubic,-53.897,-50.953,-119 -lambda_resnet26t,26.342,73.658,44.412,55.588,10.96,256,0.940,bicubic,-52.756,-50.178,-49 -levit_128,26.328,73.672,44.114,55.886,9.21,224,0.900,bicubic,-52.154,-49.898,-22 -resmlp_big_24_224,26.320,73.680,43.557,56.443,129.14,224,0.875,bicubic,-54.710,-51.463,-176 -resmlp_12_distilled_224,26.306,73.694,44.870,55.130,15.35,224,0.875,bicubic,-51.640,-48.690,+7 -regnetx_040,26.241,73.759,44.442,55.558,22.12,224,0.875,bicubic,-52.247,-49.796,-27 -mobilevitv2_150,26.190,73.810,43.768,56.232,10.59,256,0.888,bicubic,-54.178,-51.296,-136 -crossvit_9_dagger_240,26.175,73.825,44.538,55.462,8.78,240,0.875,bicubic,-50.803,-49.076,+45 -vit_small_patch32_224,26.161,73.839,45.110,54.890,22.88,224,0.900,bicubic,-49.829,-48.158,+69 -dpn68,26.135,73.865,44.228,55.772,12.61,224,0.875,bicubic,-50.175,-48.750,+65 -efficientnet_b1,26.061,73.939,44.076,55.924,7.79,256,1.000,bicubic,-52.727,-50.270,-42 -mobilevitv2_125,26.025,73.975,43.666,56.334,7.48,256,0.888,bicubic,-53.657,-51.182,-97 -lambda_resnet26rpt_256,26.017,73.983,44.182,55.818,10.99,256,0.940,bicubic,-52.947,-50.244,-52 -hrnet_w18,25.988,74.012,44.817,55.183,21.30,224,0.875,bilinear,-50.772,-48.627,+48 -hardcorenas_f,25.941,74.059,44.212,55.788,8.20,224,0.875,bilinear,-52.161,-49.590,-12 -resnet34,25.890,74.110,43.988,56.012,21.80,224,0.875,bilinear,-49.222,-48.296,+81 -tresnet_m_448,25.862,74.138,42.872,57.128,31.39,448,0.875,bilinear,-55.844,-52.700,-235 -resnet26t,25.860,74.140,43.953,56.047,16.01,256,0.940,bicubic,-52.004,-49.889,-3 -res2net50_26w_4s,25.858,74.142,43.155,56.845,25.70,224,0.875,bilinear,-52.104,-50.697,-8 -coat_tiny,25.848,74.152,43.279,56.721,5.50,224,0.900,bicubic,-52.588,-50.759,-35 -hardcorenas_c,25.821,74.179,44.770,55.230,5.52,224,0.875,bilinear,-51.231,-48.390,+30 -gluon_resnet50_v1c,25.780,74.220,43.025,56.975,25.58,224,0.875,bicubic,-52.228,-50.965,-14 -halonet26t,25.766,74.234,43.231,56.769,12.48,256,0.950,bicubic,-53.346,-51.083,-71 -selecsls60,25.727,74.273,44.065,55.935,30.67,224,0.875,bicubic,-52.257,-49.767,-15 -hardcorenas_e,25.664,74.336,43.404,56.596,8.07,224,0.875,bilinear,-52.122,-50.300,-3 -dla60_res2next,25.656,74.344,43.664,56.336,17.03,224,0.875,bilinear,-52.800,-50.482,-43 -dla60_res2net,25.646,74.354,43.583,56.417,20.85,224,0.875,bilinear,-52.812,-50.613,-45 -poolformer_s12,25.636,74.364,44.137,55.863,11.92,224,0.900,bicubic,-51.602,-49.369,+17 -ecaresnet26t,25.540,74.460,43.666,56.334,16.01,320,0.950,bicubic,-54.312,-51.418,-123 -resmlp_12_224,25.520,74.480,44.340,55.660,15.35,224,0.875,bicubic,-51.136,-48.840,+34 -mixnet_l,25.514,74.486,43.463,56.537,7.33,224,0.875,bicubic,-53.462,-50.715,-71 -tf_efficientnet_lite1,25.503,74.497,43.579,56.421,5.42,240,0.882,bicubic,-51.135,-49.645,+33 -cs3darknet_focus_m,25.485,74.515,43.762,56.238,9.30,288,0.950,bicubic,-51.797,-50.210,+10 -bat_resnext26ts,25.467,74.533,43.206,56.794,10.73,256,0.900,bicubic,-52.781,-50.890,-39 -eca_halonext26ts,25.455,74.545,43.194,56.806,10.76,256,0.940,bicubic,-54.033,-51.410,-110 -botnet26t_256,25.455,74.545,42.638,57.362,12.49,256,0.950,bicubic,-53.803,-51.890,-92 -tv_resnext50_32x4d,25.450,74.550,42.781,57.219,25.03,224,0.875,bilinear,-52.168,-50.919,-10 -repvgg_a2,25.434,74.566,43.941,56.059,28.21,224,0.875,bilinear,-51.026,-49.069,+34 -tf_mixnet_l,25.420,74.580,42.538,57.462,7.33,224,0.875,bicubic,-53.358,-51.460,-69 -hardcorenas_b,25.400,74.600,44.192,55.808,5.18,224,0.875,bilinear,-51.136,-48.562,+29 -res2next50,25.387,74.613,42.498,57.502,24.67,224,0.875,bilinear,-52.871,-51.390,-47 -legacy_seresnet101,25.334,74.666,42.823,57.177,49.33,224,0.875,bilinear,-53.046,-51.439,-53 -selecsls60b,25.332,74.668,43.559,56.441,32.77,224,0.875,bicubic,-53.072,-50.613,-56 -hardcorenas_d,25.324,74.676,43.123,56.877,7.50,224,0.875,bilinear,-52.106,-50.361,-7 -dla102,25.320,74.680,43.846,56.154,33.27,224,0.875,bilinear,-52.708,-50.104,-38 -resnetv2_50x1_bitm,25.316,74.684,45.358,54.642,25.55,448,1.000,bilinear,-55.026,-50.328,-173 -resnest14d,25.275,74.725,44.090,55.910,10.61,224,0.875,bilinear,-50.233,-48.434,+41 -legacy_seresnext50_32x4d,25.214,74.786,41.942,58.058,27.56,224,0.875,bilinear,-53.862,-52.492,-93 -mixer_b16_224,25.117,74.883,41.217,58.783,59.88,224,0.875,bicubic,-51.493,-51.013,+17 -efficientnet_b0,25.027,74.973,42.795,57.205,5.29,224,0.875,bicubic,-52.673,-50.737,-27 -res2net50_48w_2s,25.025,74.975,42.206,57.794,25.29,224,0.875,bilinear,-52.499,-51.344,-19 -gluon_resnet34_v1b,24.935,75.065,42.241,57.759,21.80,224,0.875,bicubic,-49.657,-49.747,+58 -mobilenetv2_120d,24.931,75.069,43.051,56.949,5.83,224,0.875,bicubic,-52.359,-50.449,-12 -dla60,24.911,75.089,43.294,56.706,22.04,224,0.875,bilinear,-52.111,-50.026,-2 -eca_botnext26ts_256,24.868,75.132,42.950,57.050,10.59,256,0.950,bicubic,-54.408,-51.666,-113 -regnety_016,24.817,75.183,42.610,57.390,11.20,224,0.875,bicubic,-53.039,-51.110,-38 -xcit_nano_12_p8_224_dist,24.811,75.189,43.072,56.928,3.05,224,1.000,bicubic,-51.517,-50.022,+16 -seresnext26ts,24.689,75.311,43.106,56.894,10.39,256,0.900,bicubic,-53.169,-50.684,-41 -eca_resnext26ts,24.658,75.342,42.850,57.150,10.30,256,0.900,bicubic,-52.800,-50.718,-24 -cs3darknet_m,24.630,75.370,42.970,57.030,9.31,288,0.950,bicubic,-52.996,-51.044,-34 -mobilevitv2_100,24.547,75.453,42.919,57.081,4.90,256,0.888,bicubic,-53.539,-51.241,-57 -tf_efficientnet_lite2,24.528,75.472,42.280,57.720,6.09,260,0.890,bicubic,-52.938,-51.478,-28 -regnetx_016,24.487,75.513,42.510,57.490,9.19,224,0.875,bicubic,-52.455,-50.914,-8 -skresnet18,24.483,75.517,42.540,57.460,11.96,224,0.875,bicubic,-48.551,-48.626,+63 -pit_ti_distilled_224,24.408,75.592,42.734,57.266,5.10,224,0.900,bicubic,-50.126,-49.362,+46 -hardcorenas_a,24.371,75.629,43.292,56.708,5.26,224,0.875,bilinear,-51.559,-49.218,+14 -tf_efficientnet_lite0,24.367,75.633,42.504,57.496,4.65,224,0.875,bicubic,-50.465,-49.670,+37 -tv_resnet50,24.084,75.916,41.313,58.687,25.56,224,0.875,bilinear,-52.050,-51.555,+8 -levit_128s,24.056,75.944,41.005,58.995,7.78,224,0.900,bicubic,-52.458,-51.865,+1 -legacy_seresnet34,24.029,75.971,41.905,58.095,21.96,224,0.875,bilinear,-50.781,-50.221,+35 -xcit_nano_12_p16_384_dist,24.011,75.989,42.327,57.673,3.05,384,1.000,bicubic,-51.445,-50.363,+20 -xcit_nano_12_p8_384_dist,23.956,76.044,41.946,58.054,3.05,384,1.000,bicubic,-53.860,-52.100,-52 -gcresnext26ts,23.950,76.050,41.359,58.641,10.48,256,0.900,bicubic,-53.864,-52.477,-52 -resnet18d,23.933,76.067,42.298,57.702,11.71,224,0.875,bicubic,-48.325,-48.390,+63 -efficientnet_lite0,23.907,76.093,42.084,57.916,4.65,224,0.875,bicubic,-51.561,-50.432,+14 -resnext26ts,23.868,76.132,41.109,58.891,10.30,256,0.900,bicubic,-52.912,-52.023,-15 -tv_densenet121,23.840,76.160,41.921,58.079,7.98,224,0.875,bicubic,-50.900,-50.227,+29 -efficientnet_es_pruned,23.838,76.162,41.989,58.011,5.44,224,0.875,bicubic,-51.162,-50.453,+23 -mobilenetv2_140,23.714,76.286,41.477,58.523,6.11,224,0.875,bicubic,-52.798,-51.521,-8 -mixnet_m,23.714,76.286,41.148,58.852,5.01,224,0.875,bicubic,-53.548,-52.274,-36 -dla34,23.679,76.321,41.539,58.461,15.74,224,0.875,bilinear,-50.945,-50.533,+28 -legacy_seresnet50,23.651,76.349,40.091,59.909,28.09,224,0.875,bilinear,-53.981,-53.659,-57 -ese_vovnet19b_dw,23.528,76.472,41.284,58.716,6.54,224,0.875,bicubic,-53.266,-51.982,-23 -tf_mixnet_m,23.484,76.516,41.001,58.999,5.01,224,0.875,bicubic,-53.462,-52.151,-30 -tv_resnet34,23.469,76.531,41.364,58.636,21.80,224,0.875,bilinear,-49.839,-50.060,+39 -tf_efficientnet_em,23.361,76.639,40.400,59.600,6.90,240,0.882,bicubic,-54.765,-53.646,-84 -selecsls42b,23.355,76.645,40.675,59.325,32.46,224,0.875,bicubic,-53.823,-52.717,-41 -repvgg_b0,23.319,76.681,41.172,58.828,15.82,224,0.875,bilinear,-51.835,-51.244,+7 -xcit_nano_12_p16_224_dist,23.264,76.736,41.382,58.618,3.05,224,1.000,bicubic,-49.038,-49.480,+47 -mobilenetv2_110d,23.076,76.924,40.748,59.252,4.52,224,0.875,bicubic,-51.960,-51.444,+9 -vit_base_patch32_224_sam,23.048,76.952,39.574,60.426,88.22,224,0.900,bicubic,-50.644,-51.438,+29 -tinynet_b,23.023,76.977,40.968,59.032,3.73,188,0.875,bicubic,-51.951,-51.214,+10 -deit_tiny_distilled_patch16_224,22.726,77.274,40.773,59.227,5.91,224,0.900,bicubic,-51.786,-51.117,+19 -mobilenetv3_large_100,22.655,77.345,40.775,59.225,5.48,224,0.875,bicubic,-53.121,-51.765,-12 -mobilenetv3_rw,22.626,77.374,40.380,59.620,5.48,224,0.875,bicubic,-53.008,-52.328,-11 -tf_mobilenetv3_large_100,22.565,77.435,39.761,60.239,5.48,224,0.875,bilinear,-52.947,-52.845,-9 -mobilevit_s,22.476,77.524,38.643,61.357,5.58,256,0.900,bicubic,-55.834,-55.509,-104 -tf_efficientnet_es,22.416,77.585,39.093,60.907,5.44,224,0.875,bicubic,-54.182,-54.111,-31 -xcit_nano_12_p8_224,22.412,77.588,40.657,59.343,3.05,224,1.000,bicubic,-51.504,-51.511,+19 -hrnet_w18_small_v2,22.337,77.663,39.869,60.131,15.60,224,0.875,bilinear,-52.773,-52.547,-2 -convit_tiny,22.276,77.724,39.665,60.335,5.71,224,0.875,bicubic,-50.838,-52.055,+25 -edgenext_x_small,22.199,77.801,39.075,60.925,2.34,256,0.900,bicubic,-52.665,-53.225,+1 -regnety_008,22.119,77.881,38.891,61.109,6.26,224,0.875,bicubic,-54.195,-54.179,-29 -seresnext26t_32x4d,21.983,78.017,38.486,61.514,16.81,224,0.875,bicubic,-55.985,-55.262,-94 -regnety_006,21.981,78.019,38.950,61.050,6.06,224,0.875,bicubic,-53.271,-53.582,-11 -vit_tiny_r_s16_p8_384,21.958,78.042,39.403,60.597,6.36,384,1.000,bicubic,-53.994,-53.859,-27 -regnetx_008,21.942,78.058,38.926,61.074,7.26,224,0.875,bicubic,-53.092,-53.414,-7 -resnet26d,21.907,78.094,38.621,61.379,16.01,224,0.875,bicubic,-54.795,-54.531,-45 -semnasnet_100,21.897,78.103,38.602,61.398,3.89,224,0.875,bicubic,-53.553,-53.998,-17 -pit_ti_224,21.869,78.131,39.543,60.457,4.85,224,0.900,bicubic,-51.043,-51.863,+20 -regnetx_006,21.738,78.263,38.916,61.084,6.20,224,0.875,bicubic,-52.118,-52.756,+8 -vit_tiny_patch16_384,21.714,78.286,39.327,60.673,5.79,384,1.000,bicubic,-56.716,-55.217,-126 -crossvit_9_240,21.688,78.312,39.278,60.722,8.55,240,0.875,bicubic,-52.272,-52.686,+4 -vgg19_bn,21.625,78.374,39.280,60.720,143.68,224,0.875,bilinear,-52.589,-52.564,-1 -ghostnet_100,21.614,78.386,38.696,61.304,5.18,224,0.875,bilinear,-52.366,-52.762,+1 -semnasnet_075,21.570,78.430,38.934,61.066,2.91,224,0.875,bicubic,-51.404,-52.200,+12 -gluon_resnet18_v1b,21.557,78.443,38.887,61.113,11.69,224,0.875,bicubic,-49.281,-50.875,+31 -mobilevitv2_075,21.535,78.465,38.635,61.365,2.87,256,0.888,bicubic,-54.073,-54.123,-33 -fbnetc_100,21.508,78.492,38.158,61.842,5.57,224,0.875,bilinear,-53.608,-54.228,-23 -xcit_nano_12_p16_224,21.437,78.563,39.798,60.202,3.05,224,1.000,bicubic,-48.517,-49.958,+32 -mnasnet_100,21.362,78.638,37.721,62.279,4.38,224,0.875,bicubic,-53.288,-54.393,-14 -lcnet_100,21.290,78.710,38.849,61.151,2.95,224,0.875,bicubic,-50.820,-51.529,+18 -resnet26,21.285,78.715,38.020,61.980,16.00,224,0.875,bicubic,-54.015,-54.560,-30 -ssl_resnet18,21.278,78.722,39.107,60.893,11.69,224,0.875,bilinear,-51.326,-52.317,+7 -mixnet_s,21.256,78.744,38.183,61.817,4.13,224,0.875,bicubic,-54.740,-54.617,-48 -seresnext26d_32x4d,21.250,78.750,37.319,62.681,16.81,224,0.875,bicubic,-56.356,-56.287,-98 -legacy_seresnext26_32x4d,21.091,78.909,37.629,62.371,16.79,224,0.875,bicubic,-56.013,-55.687,-78 -crossvit_tiny_240,21.050,78.950,38.053,61.947,7.01,240,0.875,bicubic,-52.288,-53.861,-5 -regnetx_004,20.898,79.102,37.568,62.432,5.16,224,0.875,bicubic,-51.498,-53.270,+3 -spnasnet_100,20.865,79.135,37.888,62.112,4.42,224,0.875,bilinear,-53.225,-53.928,-16 -legacy_seresnet18,20.841,79.159,37.613,62.387,11.78,224,0.875,bicubic,-50.899,-52.717,+12 -mobilenetv2_100,20.777,79.223,37.764,62.236,3.50,224,0.875,bicubic,-52.179,-53.246,-3 -tf_mixnet_s,20.462,79.538,36.615,63.385,4.13,224,0.875,bicubic,-55.190,-56.011,-50 -vit_tiny_patch16_224,20.458,79.542,37.603,62.397,5.72,224,0.900,bicubic,-55.006,-55.241,-44 -regnety_004,20.415,79.585,37.002,62.998,4.34,224,0.875,bicubic,-53.609,-54.754,-20 -hrnet_w18_small,20.364,79.636,37.089,62.911,13.19,224,0.875,bilinear,-51.972,-53.591,-1 -tf_mobilenetv3_large_075,20.364,79.636,36.770,63.230,3.99,224,0.875,bilinear,-53.076,-54.578,-16 -resnet18,20.224,79.776,37.256,62.744,11.69,224,0.875,bilinear,-49.524,-51.828,+16 -mixer_l16_224,20.169,79.831,32.942,67.058,208.20,224,0.875,bicubic,-51.897,-54.724,+2 -deit_tiny_patch16_224,20.166,79.835,37.560,62.440,5.72,224,0.900,bicubic,-52.008,-53.554,-1 -tf_mobilenetv3_large_minimal_100,20.108,79.891,36.906,63.094,3.92,224,0.875,bilinear,-52.142,-53.714,-3 -vgg16_bn,19.957,80.043,36.303,63.697,138.37,224,0.875,bilinear,-53.393,-55.201,-20 -vit_tiny_r_s16_p8_224,19.324,80.676,36.051,63.949,6.34,224,0.900,bicubic,-52.470,-54.767,-1 -tinynet_c,19.260,80.740,35.988,64.012,2.46,184,0.875,bicubic,-51.968,-53.760,+2 -edgenext_xx_small,18.580,81.420,34.693,65.307,1.33,256,0.900,bicubic,-52.526,-55.339,+2 -mobilevit_xs,18.303,81.697,33.227,66.773,2.32,256,0.900,bicubic,-56.331,-59.119,-38 -lcnet_075,18.161,81.839,34.406,65.594,2.36,224,0.875,bicubic,-50.653,-53.958,+10 -vgg19,17.929,82.071,33.054,66.946,143.67,224,0.875,bilinear,-54.437,-57.818,-15 -vgg13_bn,17.803,82.197,34.039,65.961,133.05,224,0.875,bilinear,-53.795,-56.337,-5 -vgg16,17.540,82.460,32.769,67.231,138.36,224,0.875,bilinear,-54.050,-57.613,-5 -regnety_002,17.458,82.542,32.431,67.569,3.16,224,0.875,bicubic,-52.798,-57.103,-1 -vgg11_bn,17.403,82.597,33.009,66.991,132.87,224,0.875,bilinear,-52.957,-56.793,-3 -mobilevitv2_050,17.302,82.698,32.999,67.001,1.37,256,0.888,bicubic,-52.838,-56.931,-2 -resnet10t,17.281,82.719,33.070,66.930,5.44,224,0.950,bilinear,-51.027,-55.010,+5 -regnetx_002,16.962,83.038,32.223,67.777,2.68,224,0.875,bicubic,-51.792,-56.333,+3 -mobilenetv3_small_100,16.815,83.185,32.535,67.465,2.54,224,0.875,bicubic,-50.843,-55.099,+6 -tinynet_d,16.675,83.325,32.459,67.541,2.34,152,0.875,bicubic,-50.287,-54.605,+6 -mobilenetv2_050,16.675,83.325,31.952,68.048,1.97,224,0.875,bicubic,-49.269,-54.128,+8 -mnasnet_small,16.636,83.364,31.922,68.078,2.03,224,0.875,bicubic,-49.570,-54.584,+5 -resnet14t,16.471,83.529,30.722,69.278,10.08,224,0.950,bilinear,-55.885,-59.618,-26 -dla60x_c,16.320,83.680,31.752,68.249,1.32,224,0.875,bilinear,-51.560,-56.682,0 -tf_mobilenetv3_small_100,16.227,83.772,31.225,68.775,2.54,224,0.875,bilinear,-51.699,-56.443,-2 -vgg13,16.104,83.896,30.983,69.017,133.05,224,0.875,bilinear,-53.822,-58.263,-10 -vgg11,15.730,84.270,30.453,69.547,132.86,224,0.875,bilinear,-53.298,-58.175,-9 -mobilenetv3_small_075,14.954,85.046,29.735,70.265,2.04,224,0.875,bicubic,-50.284,-55.705,+3 -tf_mobilenetv3_small_075,14.948,85.052,29.576,70.424,2.04,224,0.875,bilinear,-50.764,-56.554,+1 -dla46_c,14.671,85.329,29.374,70.626,1.30,224,0.875,bilinear,-50.201,-56.928,+2 -mobilevit_xxs,14.508,85.492,28.670,71.330,1.27,256,0.900,bicubic,-54.412,-60.276,-12 -dla46x_c,14.382,85.618,29.179,70.821,1.07,224,0.875,bilinear,-51.570,-57.807,-4 -lcnet_050,14.306,85.694,28.647,71.353,1.88,224,0.875,bicubic,-48.788,-55.735,0 -tf_mobilenetv3_small_minimal_100,13.958,86.042,27.979,72.022,2.04,224,0.875,bilinear,-48.942,-56.255,0 -tinynet_e,12.669,87.331,26.389,73.611,2.04,106,0.875,bicubic,-47.187,-55.377,0 -mobilenetv3_small_050,11.034,88.966,23.471,76.529,1.59,224,0.875,bicubic,-46.856,-56.723,0 +eva_giant_patch14_336.clip_ft_in1k,71.180,28.820,90.291,9.709,"1,013.01",336,1.000,bicubic,-18.296,-8.533,+2 +eva_giant_patch14_224.clip_ft_in1k,70.559,29.441,90.004,9.996,"1,012.56",224,1.000,bicubic,-18.540,-8.712,+3 +eva_giant_patch14_336.m30m_ft_in22k_in1k,68.056,31.944,87.821,12.179,"1,013.01",336,1.000,bicubic,-21.512,-11.131,-1 +eva_giant_patch14_560.m30m_ft_in22k_in1k,67.486,32.514,87.461,12.539,"1,014.45",560,1.000,bicubic,-22.310,-11.531,-3 +vit_huge_patch14_clip_224.laion2b_ft_in1k,67.409,32.590,87.895,12.105,632.05,224,1.000,bicubic,-20.184,-10.325,+25 +vit_large_patch14_clip_336.laion2b_ft_in1k,65.733,34.267,86.913,13.087,304.53,336,1.000,bicubic,-22.115,-11.457,+21 +vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k,65.321,34.679,86.836,13.164,632.05,224,1.000,bicubic,-22.925,-11.714,+9 +vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k,65.264,34.736,86.763,13.237,632.46,336,1.000,bicubic,-23.310,-11.897,+1 +vit_large_patch14_clip_224.laion2b_ft_in1k,64.808,35.192,86.573,13.427,304.20,224,1.000,bicubic,-22.484,-11.673,+26 +vit_large_patch14_clip_336.openai_ft_in12k_in1k,64.065,35.935,85.903,14.097,304.53,336,1.000,bicubic,-24.201,-12.629,+5 +eva_large_patch14_336.in22k_ft_in1k,63.108,36.892,84.386,15.614,304.53,336,1.000,bicubic,-25.556,-14.334,-5 +vit_large_patch14_clip_224.openai_ft_in1k,62.630,37.370,85.117,14.883,304.20,224,1.000,bicubic,-25.222,-13.311,+14 +vit_large_patch14_clip_224.laion2b_ft_in12k_in1k,62.053,37.947,84.317,15.683,304.20,224,1.000,bicubic,-25.837,-14.093,+12 +vit_large_patch14_clip_336.laion2b_ft_in12k_in1k,61.628,38.372,83.659,16.341,304.53,336,1.000,bicubic,-26.554,-14.913,+6 +vit_large_patch14_clip_224.openai_ft_in12k_in1k,61.402,38.598,83.374,16.626,304.20,224,1.000,bicubic,-26.766,-15.170,+6 +eva_large_patch14_196.in22k_ft_in1k,61.111,38.889,82.774,17.226,304.14,196,1.000,bicubic,-26.827,-15.718,+7 +eva_large_patch14_336.in22k_ft_in22k_in1k,60.938,39.062,82.155,17.845,304.53,336,1.000,bicubic,-28.265,-16.695,-13 +eva_large_patch14_196.in22k_ft_in22k_in1k,59.852,40.148,81.124,18.876,304.14,196,1.000,bicubic,-28.734,-17.532,-10 +ig_resnext101_32x48d,58.810,41.190,81.076,18.924,828.41,224,0.875,bilinear,-26.618,-16.496,+84 +ig_resnext101_32x32d,58.386,41.614,80.381,19.619,468.53,224,0.875,bilinear,-26.708,-17.057,+107 +beitv2_large_patch16_224.in1k_ft_in22k_in1k,58.358,41.642,80.226,19.774,304.43,224,0.950,bicubic,-30.028,-18.372,-9 +ig_resnext101_32x16d,57.690,42.310,79.905,20.095,194.03,224,0.875,bilinear,-26.480,-17.291,+169 +swsl_resnext101_32x16d,57.458,42.542,80.385,19.615,194.03,224,0.875,bilinear,-25.888,-16.461,+230 +beit_large_patch16_384.in22k_ft_in22k_in1k,56.894,43.106,79.229,20.771,305.00,384,1.000,bicubic,-31.510,-19.379,-13 +vit_base_patch16_clip_384.laion2b_ft_in1k,56.875,43.125,79.994,20.006,86.86,384,1.000,bicubic,-29.745,-18.016,+29 +beit_large_patch16_512.in22k_ft_in22k_in1k,56.755,43.245,78.899,21.101,305.67,512,1.000,bicubic,-31.843,-19.757,-19 +swsl_resnext101_32x8d,56.438,43.562,78.944,21.056,88.79,224,0.875,bilinear,-27.846,-18.232,+152 +maxvit_xlarge_tf_384.in21k_ft_in1k,56.207,43.793,78.750,21.250,475.32,384,1.000,bicubic,-32.099,-19.794,-14 +maxvit_xlarge_tf_512.in21k_ft_in1k,56.150,43.850,78.630,21.370,475.77,512,1.000,bicubic,-32.388,-20.014,-19 +maxvit_base_tf_512.in21k_ft_in1k,56.089,43.911,78.599,21.401,119.88,512,1.000,bicubic,-32.123,-19.933,-11 +deit3_huge_patch14_224_in21ft1k,55.764,44.236,77.624,22.376,632.13,224,1.000,bicubic,-31.419,-20.636,+6 +maxvit_base_tf_384.in21k_ft_in1k,55.633,44.367,78.064,21.936,119.65,384,1.000,bicubic,-32.289,-20.478,-8 +vit_base_patch16_clip_224.laion2b_ft_in1k,55.413,44.587,79.049,20.951,86.57,224,1.000,bicubic,-30.055,-18.527,+68 +maxvit_large_tf_512.in21k_ft_in1k,55.171,44.829,77.276,22.724,212.33,512,1.000,bicubic,-33.047,-21.322,-16 +maxvit_large_tf_384.in21k_ft_in1k,55.075,44.925,77.142,22.858,212.03,384,1.000,bicubic,-32.917,-21.424,-13 +convnext_xlarge.fb_in22k_ft_in1k_384,54.965,45.035,76.828,23.172,350.20,384,1.000,bicubic,-32.783,-21.726,-8 +beit_large_patch16_224.in22k_ft_in22k_in1k,54.959,45.041,77.610,22.390,304.43,224,0.900,bicubic,-32.517,-20.694,-6 +ig_resnext101_32x8d,54.918,45.082,77.534,22.466,88.79,224,0.875,bilinear,-27.770,-19.102,+257 +deit3_large_patch16_384_in21ft1k,54.878,45.122,77.372,22.628,304.76,384,1.000,bicubic,-32.838,-21.140,-10 +deit3_large_patch16_224_in21ft1k,54.361,45.639,76.561,23.439,304.37,224,1.000,bicubic,-32.616,-21.677,+4 +swsl_resnext101_32x4d,53.603,46.397,76.347,23.653,44.18,224,0.875,bilinear,-29.627,-20.413,+216 +vit_base_patch16_clip_384.laion2b_ft_in12k_in1k,53.493,46.507,75.667,24.333,86.86,384,1.000,bicubic,-33.725,-22.367,-6 +vit_base_patch16_clip_384.openai_ft_in1k,53.080,46.920,76.653,23.347,86.86,384,1.000,bicubic,-33.126,-21.221,+25 +convnext_large.fb_in22k_ft_in1k_384,52.758,47.242,74.704,25.296,197.77,384,1.000,bicubic,-34.714,-23.682,-12 +vit_large_patch16_384.augreg_in21k_ft_in1k,52.754,47.246,74.696,25.304,304.72,384,1.000,bicubic,-34.326,-23.604,-5 +swinv2_large_window12to24_192to384_22kft1k,52.300,47.700,74.423,25.577,196.74,384,1.000,bicubic,-35.158,-23.829,-13 +convnext_xlarge.fb_in22k_ft_in1k,52.229,47.771,73.953,26.047,350.20,288,1.000,bicubic,-35.109,-24.375,-13 +vit_large_r50_s32_384.augreg_in21k_ft_in1k,52.039,47.961,73.558,26.442,329.09,384,1.000,bicubic,-34.145,-24.360,+21 +vit_large_patch16_224.augreg_in21k_ft_in1k,51.832,48.168,73.694,26.306,304.33,224,0.900,bicubic,-34.010,-24.130,+34 +vit_base_patch16_clip_224.laion2b_ft_in12k_in1k,51.760,48.240,74.635,25.365,86.57,224,0.950,bicubic,-34.410,-23.119,+20 +convnext_base.fb_in22k_ft_in1k_384,51.561,48.439,74.535,25.465,88.59,384,1.000,bicubic,-35.233,-23.729,-1 +tf_efficientnet_l2.ns_jft_in1k_475,51.494,48.506,73.928,26.072,480.31,475,0.936,bicubic,-36.740,-24.618,-35 +vit_base_patch16_clip_384.openai_ft_in12k_in1k,51.153,48.847,74.328,25.672,86.86,384,0.950,bicubic,-35.882,-23.852,-11 +swinv2_base_window12to24_192to384_22kft1k,50.974,49.026,73.318,26.682,87.92,384,1.000,bicubic,-36.134,-24.918,-15 +vit_base_patch16_clip_224.openai_ft_in1k,50.936,49.064,74.855,25.145,86.57,224,0.900,bicubic,-34.344,-22.551,+56 +swinv2_large_window12to16_192to256_22kft1k,50.441,49.559,72.752,27.247,196.74,256,0.900,bicubic,-36.495,-25.356,-10 +swsl_resnext50_32x4d,50.437,49.563,73.368,26.633,25.03,224,0.875,bilinear,-31.745,-22.862,+290 +swin_large_patch4_window12_384,50.404,49.596,72.564,27.436,196.74,384,1.000,bicubic,-36.744,-25.670,-20 +convnext_large.fb_in22k_ft_in1k,49.993,50.007,72.251,27.749,197.77,288,1.000,bicubic,-37.023,-25.955,-16 +tf_efficientnetv2_xl.in21k_ft_in1k,49.734,50.266,72.120,27.880,208.12,512,1.000,bicubic,-37.014,-25.898,-8 +vit_base_patch16_clip_224.openai_ft_in12k_in1k,49.691,50.309,72.878,27.122,86.57,224,0.950,bicubic,-36.240,-24.846,+19 +swsl_resnet50,49.541,50.459,72.334,27.666,25.56,224,0.875,bilinear,-31.625,-22.762,+354 +beitv2_base_patch16_224.in1k_ft_in22k_in1k,49.512,50.488,72.383,27.617,86.53,224,0.900,bicubic,-36.968,-25.665,-4 +vit_base_patch32_clip_224.laion2b_ft_in1k,49.066,50.934,72.578,27.422,88.22,224,0.900,bicubic,-33.516,-23.624,+240 +swin_large_patch4_window7_224,48.991,51.009,71.391,28.609,196.53,224,0.900,bicubic,-37.329,-26.505,-3 +convnext_base.fb_in22k_ft_in1k,48.934,51.066,71.733,28.267,88.59,288,1.000,bicubic,-37.346,-26.357,-2 +swinv2_base_window12to16_192to256_22kft1k,48.796,51.204,71.387,28.613,87.92,256,0.900,bicubic,-37.478,-26.509,-2 +tf_efficientnetv2_l.in21k_ft_in1k,48.745,51.255,71.990,28.010,118.52,480,1.000,bicubic,-38.061,-26.144,-20 +beit_base_patch16_384.in22k_ft_in22k_in1k,48.669,51.331,72.088,27.912,86.74,384,1.000,bicubic,-38.131,-26.050,-20 +swin_base_patch4_window12_384,48.553,51.447,71.813,28.187,87.90,384,1.000,bicubic,-37.879,-26.245,-9 +maxvit_base_tf_512.in1k,48.240,51.760,70.799,29.201,119.88,512,1.000,bicubic,-38.358,-27.121,-16 +vit_large_r50_s32_224.augreg_in21k_ft_in1k,48.203,51.797,70.868,29.132,328.99,224,0.900,bicubic,-36.231,-26.104,+95 +vit_base_patch32_clip_384.laion2b_ft_in12k_in1k,47.944,52.056,70.923,29.077,88.30,384,1.000,bicubic,-37.428,-26.741,+35 +tf_efficientnet_b7.ns_jft_in1k,47.800,52.200,69.640,30.360,66.35,600,0.949,bicubic,-39.040,-28.454,-27 +tf_efficientnet_b6.ns_jft_in1k,47.761,52.239,69.968,30.032,43.04,528,0.942,bicubic,-38.691,-27.914,-15 +vit_base_patch8_224.augreg_in21k_ft_in1k,47.731,52.269,70.921,29.079,86.58,224,0.900,bicubic,-38.065,-26.869,+11 +deit3_base_patch16_384_in21ft1k,47.664,52.336,69.748,30.252,86.88,384,1.000,bicubic,-39.080,-28.364,-24 +vit_base_patch32_clip_448.laion2b_ft_in12k_in1k,47.572,52.428,70.060,29.940,88.34,448,1.000,bicubic,-38.212,-27.574,+10 +tf_efficientnet_l2.ns_jft_in1k,47.570,52.430,70.019,29.981,480.31,800,0.960,bicubic,-40.782,-28.631,-66 +vit_base_patch8_224.augreg2_in21k_ft_in1k,47.501,52.499,70.322,29.678,86.58,224,0.900,bicubic,-38.711,-27.510,-13 +tf_efficientnetv2_m.in21k_ft_in1k,47.466,52.534,70.939,29.061,54.14,480,1.000,bicubic,-38.538,-27.003,-3 +deit3_base_patch16_224_in21ft1k,47.372,52.628,69.773,30.227,86.59,224,1.000,bicubic,-38.342,-27.971,+9 +maxvit_large_tf_512.in1k,47.016,52.984,69.498,30.502,212.33,512,1.000,bicubic,-39.502,-28.386,-27 +convnext_small.fb_in22k_ft_in1k_384,46.861,53.139,69.530,30.470,50.22,384,1.000,bicubic,-38.917,-28.362,+5 +beit_base_patch16_224.in22k_ft_in22k_in1k,46.242,53.758,69.895,30.105,86.53,224,0.900,bicubic,-38.994,-27.761,+32 +vit_base_patch32_clip_384.openai_ft_in12k_in1k,46.236,53.764,69.312,30.688,88.30,384,0.950,bicubic,-38.976,-28.090,+32 +maxvit_base_tf_384.in1k,46.220,53.780,68.528,31.472,119.65,384,1.000,bicubic,-40.074,-29.276,-24 +vit_base_patch16_384.augreg_in21k_ft_in1k,45.894,54.106,68.557,31.443,86.86,384,1.000,bicubic,-40.112,-29.443,-11 +tf_efficientnet_b8.ap_in1k,45.774,54.226,67.911,32.089,87.41,672,0.954,bicubic,-39.596,-29.479,+21 +maxvit_large_tf_384.in1k,45.751,54.249,68.146,31.854,212.03,384,1.000,bicubic,-40.485,-29.544,-24 +vit_base_patch32_clip_224.laion2b_ft_in12k_in1k,45.747,54.253,68.875,31.125,88.22,224,0.900,bicubic,-37.559,-27.655,+164 +tf_efficientnet_b5.ns_jft_in1k,45.615,54.385,67.842,32.158,30.39,456,0.934,bicubic,-40.473,-29.910,-19 +swin_base_patch4_window7_224,45.560,54.440,68.512,31.488,87.77,224,0.900,bicubic,-39.692,-29.050,+21 +mvitv2_large,45.285,54.715,65.195,34.805,217.99,224,0.900,bicubic,-39.965,-32.019,+22 +vit_base_patch16_224.augreg2_in21k_ft_in1k,45.110,54.890,67.425,32.575,86.57,224,0.900,bicubic,-39.996,-29.955,+29 +vit_base_patch32_clip_224.openai_ft_in1k,45.037,54.963,68.459,31.541,88.22,224,0.900,bicubic,-36.893,-27.509,+273 +volo_d5_512,44.572,55.428,65.755,34.245,296.09,512,1.150,bicubic,-42.472,-32.213,-56 +cait_m48_448,44.245,55.755,64.653,35.347,356.46,448,1.000,bicubic,-42.239,-33.102,-40 +deit3_large_patch16_384,44.175,55.825,64.843,35.157,304.76,384,1.000,bicubic,-41.635,-32.753,-13 +volo_d5_448,44.096,55.904,65.065,34.935,295.91,448,1.150,bicubic,-42.858,-32.873,-55 +deit3_huge_patch14_224,43.801,56.199,64.350,35.650,632.13,224,0.900,bicubic,-41.403,-33.008,+19 +convnext_small.fb_in22k_ft_in1k,43.603,56.397,66.448,33.551,50.22,288,1.000,bicubic,-41.659,-31.236,+11 +deit3_large_patch16_224,43.520,56.480,63.574,36.426,304.37,224,0.900,bicubic,-41.244,-33.464,+44 +vit_base_r50_s16_384.orig_in21k_ft_in1k,43.512,56.488,66.785,33.215,98.95,384,1.000,bicubic,-41.460,-30.503,+32 +tf_efficientnet_b4.ns_jft_in1k,43.450,56.550,65.519,34.481,19.34,380,0.922,bicubic,-41.713,-31.951,+17 +deit3_medium_patch16_224_in21ft1k,43.271,56.729,64.892,35.108,38.85,224,1.000,bicubic,-41.289,-32.296,+51 +volo_d5_224,43.261,56.739,64.077,35.923,295.46,224,0.960,bicubic,-42.807,-33.501,-33 +vit_base_patch16_224.augreg_in21k_ft_in1k,43.220,56.780,65.708,34.292,86.57,224,0.900,bicubic,-41.312,-31.586,+50 +volo_d4_448,43.135,56.865,64.114,35.886,193.41,448,1.150,bicubic,-43.655,-33.768,-58 +efficientnet_b5.in12k_ft_in1k,42.858,57.142,65.419,34.581,30.39,448,1.000,bicubic,-43.030,-32.313,-29 +xcit_large_24_p8_384_dist,42.831,57.169,63.403,36.597,188.93,384,1.000,bicubic,-43.169,-34.283,-32 +maxvit_small_tf_512.in1k,42.689,57.311,64.546,35.454,69.13,512,1.000,bicubic,-43.399,-33.212,-40 +xcit_large_24_p8_224_dist,42.567,57.433,63.100,36.900,188.93,224,1.000,bicubic,-42.829,-34.310,-6 +tf_efficientnet_b8.ra_in1k,42.508,57.492,64.857,35.143,87.41,672,0.954,bicubic,-42.862,-32.437,-5 +maxvit_large_tf_224.in1k,42.410,57.590,63.401,36.599,211.79,224,0.950,bicubic,-42.516,-33.571,+23 +cait_m36_384,42.398,57.602,63.324,36.676,271.22,384,1.000,bicubic,-43.656,-34.406,-41 +volo_d4_224,42.284,57.716,63.000,37.000,192.96,224,0.960,bicubic,-43.588,-34.468,-35 +deit3_small_patch16_384_in21ft1k,41.946,58.054,64.548,35.452,22.21,384,1.000,bicubic,-42.878,-32.938,+26 +vit_medium_patch16_gap_384.in12k_ft_in1k,41.895,58.105,63.692,36.308,39.03,384,0.950,bicubic,-43.641,-33.942,-21 +maxvit_tiny_tf_512.in1k,41.852,58.148,63.586,36.414,31.05,512,1.000,bicubic,-43.810,-33.994,-26 +tf_efficientnet_b7.ra_in1k,41.431,58.569,63.017,36.983,66.35,600,0.949,bicubic,-43.505,-34.186,+16 +tf_efficientnet_b7.ap_in1k,41.429,58.571,62.874,37.126,66.35,600,0.949,bicubic,-43.691,-34.378,+1 +tf_efficientnet_b5.ap_in1k,41.418,58.582,62.084,37.916,30.39,456,0.934,bicubic,-42.834,-34.890,+60 +resnetv2_152x4_bitm,41.302,58.698,64.307,35.693,936.53,480,1.000,bilinear,-43.614,-33.133,+16 +tf_efficientnet_b6.ap_in1k,41.099,58.901,62.355,37.645,43.04,528,0.942,bicubic,-43.689,-34.783,+21 +xcit_large_24_p16_384_dist,41.025,58.975,61.239,38.761,189.10,384,1.000,bicubic,-44.729,-36.299,-36 +xcit_large_24_p16_224_dist,40.958,59.042,61.322,38.678,189.10,224,1.000,bicubic,-43.960,-35.810,+12 +tf_efficientnetv2_s.in21k_ft_in1k,40.950,59.050,63.849,36.151,21.46,384,1.000,bicubic,-43.352,-33.403,+50 +tf_efficientnetv2_l.in1k,40.940,59.060,62.000,38.000,118.52,480,1.000,bicubic,-44.730,-35.474,-36 +maxvit_small_tf_384.in1k,40.848,59.152,61.962,38.038,69.02,384,1.000,bicubic,-44.686,-35.502,-31 +maxvit_base_tf_224.in1k,40.781,59.219,61.202,38.798,119.47,224,0.950,bicubic,-44.079,-35.788,+11 +xcit_medium_24_p8_224_dist,40.488,59.512,60.502,39.498,84.32,224,1.000,bicubic,-44.584,-36.752,-3 +tf_efficientnet_b4.ap_in1k,40.484,59.516,61.723,38.277,19.34,380,0.922,bicubic,-42.764,-34.669,+123 +vit_small_r26_s32_384.augreg_in21k_ft_in1k,40.476,59.524,62.736,37.264,36.47,384,1.000,bicubic,-43.570,-34.592,+66 +deit3_base_patch16_224,40.376,59.624,60.186,39.814,86.59,224,0.900,bicubic,-43.416,-36.398,+85 +vit_medium_patch16_gap_256.in12k_ft_in1k,40.274,59.726,61.668,38.332,38.86,256,0.950,bicubic,-44.156,-35.544,+35 +flexivit_large.600ep_in1k,40.260,59.740,60.365,39.635,304.36,240,0.950,bicubic,-45.278,-37.127,-40 +vit_base_patch16_224_miil.in21k_ft_in1k,40.168,59.832,60.887,39.113,86.54,224,0.875,bilinear,-44.100,-35.915,+43 +deit3_small_patch16_224_in21ft1k,40.166,59.834,61.864,38.136,22.06,224,1.000,bicubic,-42.904,-34.916,+132 +regnetz_e8,40.142,59.858,61.330,38.670,57.70,320,1.000,bicubic,-44.888,-35.934,-7 +maxvit_rmlp_small_rw_224,40.115,59.885,59.504,40.496,64.90,224,0.900,bicubic,-44.369,-37.258,+22 +flexivit_large.1200ep_in1k,40.093,59.907,60.638,39.362,304.36,240,0.950,bicubic,-45.551,-36.904,-47 +xcit_medium_24_p8_384_dist,40.040,59.960,60.457,39.543,84.32,384,1.000,bicubic,-45.776,-37.135,-59 +flexivit_large.300ep_in1k,40.009,59.991,59.991,40.009,304.36,240,0.950,bicubic,-45.271,-37.449,-32 +maxvit_tiny_tf_384.in1k,39.977,60.023,60.897,39.103,30.98,384,1.000,bicubic,-45.129,-36.637,-20 +xcit_medium_24_p16_384_dist,39.901,60.099,60.107,39.893,84.40,384,1.000,bicubic,-45.511,-37.299,-41 +dm_nfnet_f3,39.818,60.182,60.610,39.390,254.92,416,0.940,bicubic,-45.704,-36.852,-47 +convnext_tiny.fb_in22k_ft_in1k_384,39.798,60.202,61.534,38.466,28.59,384,1.000,bicubic,-44.282,-35.608,+47 +cait_s36_384,39.765,60.235,60.475,39.525,68.37,384,1.000,bicubic,-45.695,-37.005,-47 +volo_d3_448,39.702,60.298,59.758,40.242,86.63,448,1.000,bicubic,-46.792,-37.952,-93 +efficientnetv2_rw_m.agc_in1k,39.667,60.333,59.687,40.313,53.24,416,1.000,bicubic,-45.141,-37.461,-6 +xception65,39.635,60.365,60.911,39.089,39.92,299,0.940,bicubic,-43.545,-35.681,+108 +ecaresnet269d,39.594,60.406,60.343,39.657,102.09,352,1.000,bicubic,-45.382,-36.883,-18 +tf_efficientnet_b3.ns_jft_in1k,39.584,60.416,61.453,38.547,12.23,300,0.904,bicubic,-44.464,-35.457,+45 +dm_nfnet_f6,39.578,60.422,60.911,39.089,438.36,576,0.956,bicubic,-46.566,-36.819,-84 +dm_nfnet_f5,39.508,60.492,60.227,39.773,377.21,544,0.954,bicubic,-46.306,-37.261,-71 +volo_d3_224,39.488,60.512,59.873,40.127,86.33,224,0.960,bicubic,-45.920,-37.407,-51 +convnext_large.fb_in1k,39.460,60.540,59.192,40.808,197.77,288,1.000,bicubic,-45.386,-38.020,-15 +deit3_base_patch16_384,39.407,60.593,58.940,41.060,86.88,384,1.000,bicubic,-45.665,-38.338,-29 +xcit_small_24_p8_224_dist,39.305,60.695,59.404,40.596,47.63,224,1.000,bicubic,-45.571,-37.784,-19 +xcit_medium_24_p16_224_dist,39.272,60.728,59.457,40.543,84.40,224,1.000,bicubic,-45.002,-37.483,+19 +efficientnet_b4.ra2_in1k,39.079,60.921,59.608,40.392,19.34,384,1.000,bicubic,-44.349,-36.988,+79 +xcit_small_24_p8_384_dist,39.001,60.999,59.172,40.828,47.63,384,1.000,bicubic,-46.555,-38.400,-67 +tresnet_v2_l,38.995,61.005,59.471,40.529,46.17,224,0.875,bilinear,-44.907,-37.021,+44 +resnetv2_152x2_bit_teacher_384,38.979,61.021,62.440,37.560,236.34,384,1.000,bicubic,-44.865,-34.678,+48 +maxvit_small_tf_224.in1k,38.881,61.119,59.174,40.826,68.93,224,0.950,bicubic,-45.553,-37.990,+4 +coatnet_rmlp_2_rw_224,38.843,61.157,58.030,41.970,73.88,224,0.950,bicubic,-45.757,-38.706,-13 +vit_base_patch32_384.augreg_in21k_ft_in1k,38.794,61.206,60.329,39.671,88.30,384,1.000,bicubic,-44.556,-36.507,+83 +tf_efficientnetv2_m.in1k,38.720,61.280,59.809,40.191,54.14,480,1.000,bicubic,-46.488,-37.559,-50 +eca_nfnet_l2,38.664,61.336,59.445,40.555,56.72,384,1.000,bicubic,-46.033,-37.819,-20 +mvitv2_small,38.580,61.420,58.123,41.877,34.87,224,0.900,bicubic,-45.188,-38.447,+51 +xcit_small_12_p8_384_dist,38.549,61.451,58.799,41.201,26.21,384,1.000,bicubic,-46.539,-38.483,-44 +xcit_small_24_p16_384_dist,38.503,61.497,58.384,41.616,47.67,384,1.000,bicubic,-46.595,-38.926,-47 +mvitv2_base,38.456,61.544,57.930,42.070,51.47,224,0.900,bicubic,-45.966,-38.934,-2 +xcit_small_12_p8_224_dist,38.372,61.628,58.791,41.209,26.21,224,1.000,bicubic,-45.860,-37.987,+10 +tf_efficientnet_b5.ra_in1k,38.356,61.644,59.913,40.087,30.39,456,0.934,bicubic,-45.456,-36.835,+41 +deit_base_distilled_patch16_384,38.260,61.740,57.783,42.217,87.63,384,1.000,bicubic,-47.162,-39.549,-73 +dm_nfnet_f4,38.224,61.776,58.626,41.374,316.07,512,0.951,bicubic,-47.490,-38.894,-86 +xcit_large_24_p8_224,38.114,61.886,57.873,42.127,188.93,224,1.000,bicubic,-46.278,-38.783,-4 +vit_base_patch16_384.orig_in21k_ft_in1k,38.099,61.901,60.428,39.572,86.86,384,1.000,bicubic,-46.111,-36.790,+7 +resnetv2_152x2_bitm,37.985,62.015,61.135,38.865,236.34,448,1.000,bilinear,-46.525,-36.297,-20 +pvt_v2_b4,37.941,62.059,58.207,41.793,62.56,224,0.900,bicubic,-45.775,-38.513,+45 +cait_s24_384,37.873,62.127,58.079,41.921,47.06,384,1.000,bicubic,-47.173,-39.267,-51 +resnet152d,37.857,62.143,58.356,41.644,60.21,320,1.000,bicubic,-45.823,-38.382,+48 +resnetrs420,37.747,62.253,58.215,41.785,191.89,416,1.000,bicubic,-47.261,-38.909,-51 +xcit_small_24_p16_224_dist,37.717,62.283,57.360,42.640,47.67,224,1.000,bicubic,-46.145,-39.368,+25 +deit3_medium_patch16_224,37.712,62.288,57.087,42.913,38.85,224,0.900,bicubic,-45.368,-39.205,+82 +resnetrs350,37.676,62.324,58.083,41.917,163.96,384,1.000,bicubic,-47.044,-38.905,-40 +pit_b_distilled_224,37.590,62.410,57.238,42.762,74.79,224,0.900,bicubic,-46.554,-39.618,+4 +xcit_small_12_p16_384_dist,37.576,62.424,57.769,42.231,26.25,384,1.000,bicubic,-47.130,-39.349,-41 +pvt_v2_b5,37.527,62.473,57.262,42.738,81.96,224,0.900,bicubic,-46.213,-39.450,+33 +resnet200d,37.505,62.495,58.297,41.703,64.69,320,1.000,bicubic,-46.457,-38.526,+12 +maxvit_rmlp_tiny_rw_256,37.393,62.607,57.187,42.813,29.15,256,0.950,bicubic,-46.839,-39.689,-7 +resnetv2_152x2_bit_teacher,37.324,62.676,59.390,40.610,236.34,224,0.875,bicubic,-45.538,-37.178,+88 +resnest269e,37.315,62.685,57.468,42.532,110.93,416,0.928,bicubic,-47.203,-39.468,-36 +convnext_base.fb_in1k,37.307,62.693,57.317,42.683,88.59,288,1.000,bicubic,-47.127,-39.503,-28 +resmlp_big_24_224_in22ft1k,37.244,62.756,58.184,41.816,129.14,224,0.875,bicubic,-47.150,-38.699,-24 +vit_small_r26_s32_224.augreg_in21k_ft_in1k,37.234,62.766,59.060,40.940,36.43,224,0.900,bicubic,-44.624,-36.962,+173 +cait_s24_224,37.153,62.847,56.724,43.276,46.92,224,1.000,bicubic,-46.299,-39.840,+41 +efficientformer_l7,37.126,62.874,56.896,43.104,82.23,224,0.950,bicubic,-46.260,-39.644,+49 +pvt_v2_b3,37.114,62.886,57.331,42.669,45.24,224,0.900,bicubic,-46.012,-39.225,+65 +volo_d1_384,37.083,62.917,57.130,42.870,26.78,384,1.000,bicubic,-48.167,-40.066,-87 +vit_base_patch32_224.augreg_in21k_ft_in1k,37.077,62.923,59.294,40.706,88.22,224,0.900,bicubic,-43.647,-36.274,+246 +tf_efficientnet_b3.ap_in1k,37.055,62.945,57.240,42.760,12.23,300,0.904,bicubic,-44.767,-38.384,+169 +efficientnetv2_rw_s.ra2_in1k,37.049,62.951,56.814,43.186,23.94,384,1.000,bicubic,-46.759,-39.910,+13 +maxvit_tiny_tf_224.in1k,37.016,62.984,56.902,43.098,30.92,224,0.950,bicubic,-46.382,-39.686,+39 +swinv2_base_window16_256,36.996,63.004,56.138,43.862,87.92,256,0.900,bicubic,-47.598,-40.936,-52 +regnetz_040h,36.973,63.027,57.285,42.715,28.94,320,1.000,bicubic,-47.521,-39.721,-46 +xcit_small_12_p16_224_dist,36.973,63.027,56.733,43.267,26.25,224,1.000,bicubic,-46.377,-39.681,+43 +volo_d1_224,36.884,63.116,56.639,43.361,26.63,224,0.960,bicubic,-47.280,-40.137,-18 +seresnet152d,36.790,63.210,56.718,43.282,66.84,320,1.000,bicubic,-47.572,-40.322,-34 +maxxvit_rmlp_small_rw_256,36.705,63.295,56.022,43.978,66.01,256,0.950,bicubic,-47.923,-41.040,-60 +seresnext101d_32x8d,36.641,63.359,56.336,43.664,93.59,288,1.000,bicubic,-47.730,-40.580,-37 +volo_d2_224,36.595,63.405,56.468,43.532,58.68,224,0.960,bicubic,-48.601,-40.720,-93 +xception65p,36.556,63.444,56.429,43.571,39.82,299,0.940,bicubic,-46.574,-40.051,+50 +seresnextaa101d_32x8d,36.527,63.473,56.403,43.597,93.59,288,1.000,bicubic,-48.041,-40.667,-60 +regnetz_d32,36.444,63.556,57.372,42.628,27.58,320,0.950,bicubic,-47.578,-39.494,-16 +efficientnet_b3.ra2_in1k,36.420,63.580,56.845,43.155,12.23,320,1.000,bicubic,-45.822,-39.269,+122 +cait_xs24_384,36.416,63.584,56.944,43.056,26.67,384,1.000,bicubic,-47.645,-39.945,-23 +volo_d2_384,36.416,63.584,56.311,43.689,58.87,384,1.000,bicubic,-49.620,-41.261,-144 +deit_base_distilled_patch16_224,36.397,63.603,56.617,43.383,87.34,224,0.900,bicubic,-46.991,-39.871,+26 +resnetv2_101x3_bitm,36.381,63.619,59.070,40.930,387.93,448,1.000,bilinear,-48.059,-38.312,-56 +gcvit_base,36.371,63.629,55.896,44.104,90.32,224,0.875,bicubic,-48.077,-41.186,-58 +resnetrs270,36.320,63.680,56.562,43.438,129.86,352,1.000,bicubic,-48.114,-40.408,-55 +tresnet_m,36.285,63.715,55.796,44.204,31.39,224,0.875,bilinear,-46.795,-40.322,+45 +mixer_b16_224_miil,36.269,63.731,55.965,44.035,59.88,224,0.875,bilinear,-46.039,-39.751,+109 +convnext_small.fb_in1k,36.251,63.749,55.914,44.086,50.22,288,1.000,bicubic,-47.455,-40.896,+3 +tf_efficientnet_b2.ns_jft_in1k,36.183,63.817,57.551,42.449,9.11,260,0.890,bicubic,-46.197,-38.697,+96 +deit3_small_patch16_384,36.183,63.817,55.564,44.436,22.21,384,1.000,bicubic,-47.243,-41.112,+14 +mvitv2_tiny,36.161,63.839,55.128,44.872,24.17,224,0.900,bicubic,-46.243,-41.028,+90 +resnet152,36.086,63.914,55.550,44.450,60.19,224,0.950,bicubic,-46.736,-40.576,+53 +regnetz_040,36.051,63.949,55.745,44.255,27.12,320,1.000,bicubic,-48.185,-41.187,-48 +ecaresnet101d,36.004,63.996,56.165,43.835,44.57,224,0.875,bicubic,-46.168,-39.881,+114 +dm_nfnet_f2,36.004,63.996,55.456,44.544,193.78,352,0.920,bicubic,-49.060,-41.784,-102 +resnest200e,35.931,64.069,55.849,44.151,70.20,320,0.909,bicubic,-47.901,-41.045,-19 +swsl_resnet18,35.858,64.142,58.455,41.545,11.69,224,0.875,bilinear,-37.418,-33.279,+505 +sequencer2d_l,35.825,64.175,55.712,44.288,54.30,224,0.875,bicubic,-47.581,-40.794,+6 +eca_nfnet_l1,35.823,64.177,55.957,44.043,41.41,320,1.000,bicubic,-48.187,-41.071,-35 +vit_base_patch16_224.orig_in21k_ft_in1k,35.768,64.232,57.390,42.610,86.57,224,0.900,bicubic,-46.018,-38.732,+138 +gcvit_small,35.746,64.254,54.821,45.179,51.09,224,0.875,bicubic,-48.138,-41.837,-30 +vit_relpos_medium_patch16_cls_224.sw_in1k,35.740,64.260,54.918,45.082,38.76,224,0.900,bicubic,-46.822,-41.252,+68 +xcit_small_24_p8_224,35.546,64.454,54.788,45.212,47.63,224,1.000,bicubic,-48.292,-41.848,-27 +xcit_small_12_p8_224,35.520,64.480,55.511,44.489,26.21,224,1.000,bicubic,-47.824,-40.969,+10 +xcit_large_24_p16_224,35.520,64.480,54.760,45.240,189.10,224,1.000,bicubic,-47.376,-41.122,+37 +flexivit_base.1200ep_in1k,35.519,64.481,53.843,46.157,86.59,240,0.950,bicubic,-49.145,-43.149,-94 +vit_small_patch16_384.augreg_in21k_ft_in1k,35.479,64.521,57.549,42.451,22.20,384,1.000,bicubic,-48.323,-39.553,-27 +xcit_medium_24_p8_224,35.446,64.554,54.827,45.173,84.32,224,1.000,bicubic,-48.288,-41.567,-23 +swinv2_small_window16_256,35.446,64.554,54.641,45.359,49.73,256,0.900,bicubic,-48.760,-42.230,-59 +swinv2_base_window8_256,35.444,64.556,54.617,45.383,87.92,256,0.900,bicubic,-48.818,-42.305,-67 +resnest101e,35.373,64.627,55.780,44.220,48.28,256,0.875,bilinear,-47.517,-40.540,+31 +convit_base,35.314,64.686,54.927,45.073,86.54,224,0.875,bicubic,-46.974,-41.081,+87 +efficientformer_l3,35.263,64.737,54.487,45.513,31.41,224,0.950,bicubic,-47.287,-41.761,+58 +xcit_tiny_24_p8_224_dist,35.253,64.747,55.258,44.742,12.11,224,1.000,bicubic,-47.309,-40.809,+55 +edgenext_base,35.208,64.792,55.128,44.872,18.51,320,1.000,bicubic,-48.752,-41.640,-49 +flexivit_base.600ep_in1k,35.137,64.863,53.662,46.338,86.59,240,0.950,bicubic,-49.381,-43.324,-95 +twins_svt_large,35.086,64.914,54.721,45.279,99.27,224,0.900,bicubic,-48.592,-41.873,-23 +repvgg_b3g4,35.043,64.957,54.772,45.228,83.83,224,0.875,bilinear,-45.169,-40.338,+233 +repvgg_b3,35.043,64.957,54.542,45.458,123.09,224,0.875,bilinear,-45.449,-40.718,+205 +regnetz_d8,34.998,65.002,55.939,44.061,23.37,320,1.000,bicubic,-49.052,-41.059,-62 +dm_nfnet_f1,34.990,65.010,54.108,45.892,132.63,320,0.910,bicubic,-49.636,-42.992,-107 +xcit_tiny_24_p8_384_dist,34.925,65.075,55.153,44.847,12.11,384,1.000,bicubic,-48.815,-41.481,-38 +regnetz_d8_evos,34.898,65.103,55.258,44.742,23.46,320,0.950,bicubic,-49.152,-41.736,-64 +resnet101d,34.872,65.128,54.202,45.798,44.57,320,1.000,bicubic,-48.150,-42.244,+12 +coatnet_1_rw_224,34.840,65.159,53.426,46.574,41.72,224,0.950,bicubic,-48.768,-42.962,-30 +swin_s3_base_224,34.797,65.203,53.707,46.293,71.13,224,0.900,bicubic,-49.133,-42.955,-59 +flexivit_base.300ep_in1k,34.797,65.203,53.171,46.829,86.59,240,0.950,bicubic,-49.597,-43.949,-91 +coatnet_rmlp_1_rw_224,34.795,65.205,53.951,46.049,41.69,224,0.950,bicubic,-48.563,-42.505,-17 +maxvit_tiny_rw_224,34.789,65.211,53.347,46.653,29.06,224,0.950,bicubic,-48.715,-43.154,-30 +resmlp_big_24_distilled_224,34.788,65.213,54.637,45.363,129.14,224,0.875,bicubic,-48.800,-42.011,-34 +seresnext101_32x8d,34.788,65.213,53.462,46.538,93.57,288,1.000,bicubic,-49.404,-43.412,-80 +vit_relpos_base_patch16_clsgap_224.sw_in1k,34.728,65.272,54.218,45.782,86.43,224,0.900,bicubic,-48.034,-41.956,+18 +sequencer2d_m,34.709,65.291,53.998,46.002,38.31,224,0.875,bicubic,-48.097,-42.270,+15 +vit_base_patch16_rpn_224.in1k,34.705,65.295,54.658,45.342,86.54,224,0.900,bicubic,-47.497,-41.338,+71 +resnet101,34.681,65.319,54.318,45.682,44.55,224,0.950,bicubic,-47.257,-41.436,+94 +deit3_small_patch16_224,34.677,65.323,53.159,46.841,22.06,224,0.900,bicubic,-46.709,-42.291,+130 +vit_large_patch32_384.orig_in21k_ft_in1k,34.673,65.326,55.729,44.271,306.63,384,1.000,bicubic,-46.833,-40.363,+118 +dm_nfnet_f0,34.618,65.382,54.672,45.328,71.49,256,0.900,bicubic,-48.767,-41.900,-29 +vit_relpos_base_patch16_224.sw_in1k,34.611,65.389,54.287,45.713,86.43,224,0.900,bicubic,-47.873,-41.855,+37 +ssl_resnext101_32x16d,34.603,65.397,55.931,44.069,194.03,224,0.875,bilinear,-47.241,-40.165,+93 +repvgg_b2g4,34.587,65.413,54.782,45.218,61.76,224,0.875,bilinear,-44.779,-39.906,+258 +resnetv2_101,34.583,65.417,53.155,46.845,44.54,224,0.950,bicubic,-47.447,-42.705,+77 +gcvit_tiny,34.567,65.433,53.245,46.755,28.22,224,0.875,bicubic,-48.833,-43.153,-38 +resnetrs200,34.505,65.496,54.283,45.717,93.21,320,1.000,bicubic,-49.943,-42.561,-119 +resnest50d_4s2x40d,34.355,65.645,54.725,45.275,30.42,224,0.875,bicubic,-46.753,-40.833,+139 +resnetrs152,34.355,65.645,53.562,46.438,86.62,320,1.000,bicubic,-49.357,-43.052,-56 +pvt_v2_b2_li,34.308,65.692,54.094,45.906,22.55,224,0.900,bicubic,-47.888,-42.010,+59 +crossvit_18_dagger_408,34.251,65.749,53.092,46.908,44.61,408,1.000,bicubic,-49.945,-43.726,-98 +xcit_medium_24_p16_224,34.243,65.757,53.159,46.841,84.40,224,1.000,bicubic,-48.393,-42.817,+10 +tf_efficientnet_b1.ns_jft_in1k,34.157,65.843,55.489,44.511,7.79,240,0.882,bicubic,-47.231,-40.249,+115 +efficientnetv2_rw_t.ra2_in1k,34.155,65.845,53.131,46.869,13.65,288,1.000,bicubic,-48.193,-43.065,+38 +twins_pcpvt_large,34.111,65.888,54.128,45.872,60.99,224,0.900,bicubic,-49.029,-42.470,-28 +tf_efficientnet_b4.aa_in1k,34.064,65.936,54.198,45.802,19.34,380,0.922,bicubic,-48.958,-42.102,-16 +ssl_resnext101_32x8d,34.017,65.983,55.601,44.399,88.79,224,0.875,bilinear,-47.599,-40.437,+93 +nfnet_l0,34.002,65.999,54.365,45.635,35.07,288,1.000,bicubic,-48.748,-42.151,-4 +tf_efficientnet_b6.aa_in1k,33.998,66.002,54.544,45.456,43.04,528,0.942,bicubic,-50.112,-42.342,-101 +efficientnet_b3_pruned.in1k,33.996,66.004,54.108,45.892,9.86,300,0.904,bicubic,-46.862,-41.134,+148 +xcit_small_24_p16_224,33.996,66.004,53.285,46.715,47.67,224,1.000,bicubic,-48.584,-42.719,+9 +regnety_160,33.976,66.024,53.546,46.454,83.59,288,1.000,bicubic,-49.710,-43.230,-67 +gc_efficientnetv2_rw_t.agc_in1k,33.952,66.048,53.220,46.780,13.68,288,1.000,bicubic,-48.512,-43.078,+18 +pit_s_distilled_224,33.939,66.061,53.265,46.735,24.04,224,0.900,bicubic,-48.057,-42.533,+60 +swinv2_cr_small_ns_224,33.842,66.158,52.618,47.382,49.70,224,0.900,bicubic,-49.646,-43.868,-62 +resnext101_64x4d,33.833,66.168,52.166,47.834,83.46,288,1.000,bicubic,-49.315,-44.206,-41 +xcit_small_12_p16_224,33.776,66.225,53.233,46.767,26.25,224,1.000,bicubic,-48.199,-42.583,+59 +swin_s3_small_224,33.705,66.295,52.396,47.604,49.74,224,0.900,bicubic,-50.065,-44.054,-83 +resnetv2_50x3_bitm,33.660,66.341,55.882,44.118,217.32,448,1.000,bilinear,-50.354,-41.242,-103 +swinv2_small_window8_256,33.646,66.354,52.813,47.187,49.73,256,0.900,bicubic,-50.210,-43.827,-94 +resnet51q,33.563,66.437,53.021,46.979,35.70,288,1.000,bilinear,-48.797,-43.159,+19 +xcit_tiny_24_p16_384_dist,33.510,66.490,52.774,47.226,12.12,384,1.000,bicubic,-49.060,-43.512,-1 +vit_relpos_medium_patch16_224.sw_in1k,33.498,66.502,52.601,47.399,38.75,224,0.900,bicubic,-48.968,-43.487,+7 +regnety_080,33.467,66.533,52.947,47.053,39.18,288,1.000,bicubic,-50.465,-43.941,-104 +cs3edgenet_x,33.451,66.549,52.921,47.079,47.82,288,1.000,bicubic,-49.251,-43.449,-18 +sequencer2d_s,33.426,66.574,52.398,47.602,27.65,224,0.875,bicubic,-48.916,-43.632,+18 +convmixer_1536_20,33.420,66.580,53.027,46.973,51.63,224,0.960,bicubic,-47.956,-42.587,+94 +regnety_032,33.412,66.588,52.754,47.246,19.44,288,1.000,bicubic,-49.312,-43.670,-23 +crossvit_18_240,33.400,66.600,52.241,47.759,43.27,240,0.875,bicubic,-49.000,-43.813,+6 +vit_srelpos_medium_patch16_224.sw_in1k,33.371,66.629,52.461,47.539,38.74,224,0.900,bicubic,-48.865,-43.473,+25 +tf_efficientnetv2_b3.in21k_ft_in1k,33.365,66.635,54.933,45.067,14.36,300,0.900,bicubic,-49.307,-41.691,-21 +gernet_l,33.357,66.643,51.901,48.099,31.08,256,0.875,bilinear,-47.997,-43.635,+90 +crossvit_15_dagger_408,33.331,66.669,52.194,47.806,28.50,408,1.000,bicubic,-50.507,-44.588,-105 +crossvit_18_dagger_240,33.290,66.710,52.198,47.802,44.27,240,0.875,bicubic,-49.228,-44.162,-6 +tresnet_xl,33.257,66.743,52.294,47.706,78.44,224,0.875,bilinear,-48.797,-43.642,+35 +jx_nest_base,33.214,66.787,51.811,48.189,67.72,224,0.875,bicubic,-50.339,-44.559,-86 +convnext_tiny.fb_in1k,33.164,66.836,52.672,47.328,28.59,288,1.000,bicubic,-49.536,-43.464,-29 +resnest50d_1s4x24d,33.147,66.853,52.839,47.161,25.68,224,0.875,bicubic,-47.841,-42.483,+108 +convnext_nano.in12k_ft_in1k,33.119,66.881,53.970,46.030,15.59,288,1.000,bicubic,-49.739,-42.586,-42 +vit_relpos_medium_patch16_rpn_224.sw_in1k,33.103,66.897,52.353,47.647,38.73,224,0.900,bicubic,-49.195,-43.621,+10 +resnet61q,33.097,66.903,51.754,48.246,36.85,288,1.000,bicubic,-49.427,-44.376,-15 +maxxvit_rmlp_nano_rw_256,33.088,66.912,51.854,48.146,16.78,256,0.950,bicubic,-49.942,-44.490,-54 +jx_nest_small,33.042,66.957,51.062,48.938,38.35,224,0.875,bicubic,-50.078,-45.266,-62 +crossvit_base_240,33.033,66.967,51.394,48.606,105.03,240,0.875,bicubic,-49.183,-44.436,+13 +twins_pcpvt_base,33.021,66.979,52.485,47.515,43.83,224,0.900,bicubic,-49.687,-43.861,-39 +pvt_v2_b2,33.015,66.985,52.037,47.963,25.36,224,0.900,bicubic,-49.061,-43.925,+21 +xcit_tiny_24_p16_224_dist,32.989,67.011,52.056,47.944,12.12,224,1.000,bicubic,-47.457,-43.162,+137 +rexnet_200,32.987,67.013,52.939,47.061,16.37,224,0.875,bicubic,-48.645,-42.729,+50 +resnest50d,32.972,67.028,52.713,47.287,27.48,224,0.875,bilinear,-48.002,-42.665,+98 +tf_efficientnetv2_s.in1k,32.915,67.085,51.726,48.274,21.46,384,1.000,bicubic,-50.979,-44.972,-127 +convit_small,32.913,67.087,52.123,47.877,27.78,224,0.875,bicubic,-48.513,-43.621,+63 +crossvit_15_dagger_240,32.903,67.097,51.783,48.217,28.21,240,0.875,bicubic,-49.429,-44.735,-6 +convnext_tiny_hnf.a2h_in1k,32.895,67.105,51.190,48.810,28.59,288,1.000,bicubic,-49.695,-44.826,-37 +vit_small_patch16_224.augreg_in21k_ft_in1k,32.885,67.115,53.923,46.077,22.05,224,0.900,bicubic,-48.517,-42.211,+63 +tf_efficientnet_b3.aa_in1k,32.860,67.140,52.950,47.050,12.23,300,0.904,bicubic,-48.776,-42.768,+42 +pnasnet5large,32.848,67.152,50.500,49.500,86.06,331,0.911,bicubic,-49.934,-45.540,-54 +regnetv_064,32.836,67.164,52.854,47.146,30.58,288,1.000,bicubic,-50.876,-43.894,-115 +twins_svt_base,32.836,67.164,51.559,48.441,56.07,224,0.900,bicubic,-50.300,-44.859,-80 +regnetz_c16,32.821,67.180,53.744,46.256,13.46,320,0.940,bicubic,-49.697,-42.328,-32 +nasnetalarge,32.775,67.225,50.141,49.859,88.75,331,0.911,bicubic,-49.845,-45.906,-46 +gernet_m,32.740,67.260,51.913,48.087,21.14,224,0.875,bilinear,-47.992,-43.271,+101 +inception_resnet_v2,32.738,67.262,50.648,49.352,55.84,299,0.897,bicubic,-47.720,-44.658,+120 +gluon_resnet152_v1d,32.734,67.266,51.088,48.912,60.21,224,0.875,bicubic,-47.740,-44.118,+116 +pit_b_224,32.718,67.282,49.852,50.148,73.76,224,0.900,bicubic,-49.728,-45.858,-31 +tf_efficientnet_b2.ap_in1k,32.681,67.319,52.239,47.761,9.11,260,0.890,bicubic,-47.619,-42.789,+134 +fbnetv3_g.ra2_in1k,32.630,67.370,52.892,47.108,16.62,288,0.950,bilinear,-49.418,-43.172,+5 +tresnet_l,32.559,67.441,51.139,48.861,55.99,224,0.875,bilinear,-48.929,-44.485,+42 +cait_xxs36_384,32.549,67.451,52.233,47.767,17.37,384,1.000,bicubic,-49.645,-43.915,-8 +regnetz_c16_evos,32.539,67.461,52.915,47.085,13.49,320,0.950,bicubic,-50.091,-43.559,-56 +wide_resnet50_2,32.439,67.561,51.459,48.541,68.88,224,0.875,bicubic,-49.017,-44.073,+43 +gmlp_s16_224,32.418,67.582,51.815,48.185,19.42,224,0.875,bicubic,-47.224,-42.783,+168 +ens_adv_inception_resnet_v2,32.372,67.628,50.427,49.573,55.84,299,0.897,bicubic,-47.610,-44.511,+145 +deit_base_patch16_224,32.363,67.637,51.011,48.989,86.57,224,0.900,bicubic,-49.635,-44.723,0 +maxvit_nano_rw_256,32.357,67.643,50.618,49.382,15.45,256,0.950,bicubic,-50.575,-45.604,-81 +swin_small_patch4_window7_224,32.341,67.659,50.905,49.095,49.61,224,0.900,bicubic,-50.871,-45.417,-103 +gluon_resnet152_v1s,32.331,67.669,50.526,49.474,60.32,224,0.875,bicubic,-48.685,-44.886,+69 +deit_small_distilled_patch16_224,32.284,67.716,52.102,47.898,22.44,224,0.900,bicubic,-48.916,-43.276,+52 +xcit_tiny_24_p8_224,32.274,67.726,51.901,48.099,12.11,224,1.000,bicubic,-49.626,-44.075,+6 +gluon_seresnext101_64x4d,32.205,67.795,50.319,49.681,88.23,224,0.875,bicubic,-48.689,-44.989,+76 +coat_lite_small,32.127,67.873,49.934,50.066,19.84,224,0.900,bicubic,-50.181,-45.916,-32 +gluon_seresnext101_32x4d,32.107,67.893,51.237,48.763,48.96,224,0.875,bicubic,-48.797,-44.057,+72 +flexivit_small.1200ep_in1k,32.087,67.912,50.296,49.704,22.06,240,0.950,bicubic,-50.438,-45.840,-57 +coatnext_nano_rw_224,32.076,67.924,51.019,48.981,14.70,224,0.900,bicubic,-49.872,-44.899,-3 +gcvit_xtiny,32.050,67.950,50.995,49.005,19.98,224,0.875,bicubic,-49.902,-44.971,-5 +deit_base_patch16_384,31.989,68.011,50.547,49.453,86.86,384,1.000,bicubic,-51.117,-45.825,-103 +seresnext50_32x4d,31.985,68.015,51.231,48.769,27.56,224,0.875,bicubic,-49.281,-44.389,+40 +maxvit_rmlp_nano_rw_256,31.966,68.034,50.626,49.374,15.50,256,0.950,bicubic,-50.996,-45.644,-96 +xcit_tiny_12_p8_224_dist,31.944,68.056,51.390,48.610,6.71,224,1.000,bicubic,-49.268,-44.210,+40 +coatnet_bn_0_rw_224,31.883,68.117,51.017,48.983,27.44,224,0.950,bicubic,-50.515,-45.165,-53 +levit_384,31.877,68.123,50.598,49.402,39.13,224,0.900,bicubic,-50.709,-45.418,-73 +resnetrs101,31.858,68.142,51.017,48.983,63.62,288,0.940,bicubic,-50.430,-44.921,-40 +cs3se_edgenet_x,31.803,68.197,50.773,49.227,50.72,320,1.000,bicubic,-51.745,-45.897,-141 +vit_relpos_small_patch16_224.sw_in1k,31.785,68.215,50.622,49.378,21.98,224,0.900,bicubic,-49.677,-45.206,+19 +poolformer_m48,31.702,68.298,49.883,50.117,73.47,224,0.950,bicubic,-50.760,-46.075,-62 +convnext_tiny.fb_in22k_ft_in1k,31.679,68.321,51.785,48.215,28.59,288,1.000,bicubic,-47.229,-42.889,+191 +flexivit_small.600ep_in1k,31.649,68.351,49.366,50.634,22.06,240,0.950,bicubic,-50.705,-46.720,-55 +tnt_s_patch16_224,31.643,68.357,51.143,48.857,23.76,224,0.900,bicubic,-49.875,-44.605,+9 +eca_nfnet_l0,31.612,68.388,51.614,48.386,24.14,288,1.000,bicubic,-50.968,-44.876,-79 +resnetv2_50x1_bit_distilled,31.584,68.416,51.263,48.737,25.55,224,0.875,bicubic,-51.234,-45.259,-100 +coatnet_rmlp_nano_rw_224,31.545,68.455,50.170,49.830,15.15,224,0.900,bicubic,-50.519,-45.700,-32 +xception41p,31.516,68.484,50.374,49.626,26.91,299,0.940,bicubic,-50.442,-45.420,-23 +mobilevitv2_200_in22ft1k,31.510,68.490,51.758,48.242,18.45,256,0.888,bicubic,-50.814,-44.182,-55 +regnety_064,31.474,68.526,50.528,49.472,30.58,288,1.000,bicubic,-52.242,-46.146,-163 +poolformer_m36,31.443,68.557,50.036,49.964,56.17,224,0.950,bicubic,-50.667,-45.652,-39 +flexivit_small.300ep_in1k,31.439,68.561,49.215,50.785,22.06,240,0.950,bicubic,-50.733,-46.809,-42 +ssl_resnext101_32x4d,31.423,68.577,52.121,47.879,44.18,224,0.875,bilinear,-49.501,-43.607,+46 +inception_v4,31.378,68.622,49.244,50.756,42.68,299,0.875,bicubic,-48.790,-45.724,+100 +rexnet_150,31.366,68.634,51.288,48.712,9.73,224,0.875,bicubic,-48.944,-43.878,+88 +crossvit_15_240,31.341,68.659,50.168,49.832,27.53,240,0.875,bicubic,-50.195,-45.524,-5 +efficientformer_l1,31.333,68.667,50.449,49.551,12.29,224,0.950,bicubic,-49.169,-44.549,+65 +pit_s_224,31.333,68.667,49.661,50.339,23.46,224,0.900,bicubic,-49.761,-45.909,+29 +swinv2_tiny_window16_256,31.313,68.687,49.630,50.370,28.35,256,0.900,bicubic,-51.497,-46.602,-112 +vit_srelpos_small_patch16_224.sw_in1k,31.280,68.720,50.243,49.757,21.97,224,0.900,bicubic,-49.814,-45.089,+26 +cait_xxs36_224,31.278,68.722,50.616,49.384,17.30,224,1.000,bicubic,-48.472,-44.250,+116 +crossvit_small_240,31.276,68.724,50.192,49.808,26.86,240,0.875,bicubic,-49.744,-45.268,+29 +cspresnet50,31.270,68.730,51.223,48.777,21.62,256,0.887,bilinear,-48.304,-43.489,+126 +swinv2_cr_small_224,31.256,68.744,48.747,51.253,49.70,224,0.900,bicubic,-51.890,-47.347,-141 +coatnet_0_rw_224,31.250,68.750,48.621,51.379,27.44,224,0.950,bicubic,-51.140,-47.215,-81 +convmixer_768_32,31.248,68.752,50.942,49.058,21.11,224,0.960,bicubic,-48.916,-44.130,+89 +swin_s3_tiny_224,31.242,68.757,49.720,50.280,28.33,224,0.900,bicubic,-50.880,-46.228,-56 +cspresnext50,31.229,68.771,50.889,49.111,20.57,256,0.887,bilinear,-49.317,-44.431,+49 +regnetv_040,31.211,68.789,50.115,49.885,20.64,288,1.000,bicubic,-51.983,-46.545,-149 +coat_mini,31.203,68.797,49.773,50.227,10.34,224,0.900,bicubic,-50.065,-45.619,+2 +xcit_tiny_12_p8_384_dist,31.191,68.809,50.522,49.478,6.71,384,1.000,bicubic,-51.197,-45.702,-86 +ecaresnetlight,31.121,68.879,50.243,49.757,30.16,224,0.875,bicubic,-49.341,-45.005,+56 +gluon_resnet101_v1s,31.115,68.885,49.793,50.207,44.67,224,0.875,bicubic,-49.187,-45.367,+71 +edgenext_small,31.103,68.897,50.131,49.869,5.59,320,1.000,bicubic,-50.465,-45.575,-25 +coatnet_nano_rw_224,31.093,68.907,49.586,50.414,15.14,224,0.900,bicubic,-50.607,-46.052,-34 +tf_efficientnet_cc_b0_8e.in1k,31.087,68.913,50.761,49.239,24.01,224,0.875,bicubic,-46.821,-42.892,+212 +resmlp_36_distilled_224,31.070,68.930,49.683,50.317,44.69,224,0.875,bicubic,-50.090,-45.805,+2 +ecaresnet50d,31.058,68.942,50.848,49.152,25.58,224,0.875,bicubic,-49.534,-44.472,+37 +ecaresnet50t,31.058,68.942,50.577,49.423,25.57,320,0.950,bicubic,-51.288,-45.561,-89 +cs3sedarknet_x,31.028,68.972,50.135,49.865,35.40,288,1.000,bicubic,-51.626,-46.219,-122 +resnet50d,31.020,68.980,49.808,50.192,25.58,224,0.875,bicubic,-49.510,-45.352,+38 +cspdarknet53,31.017,68.984,50.390,49.610,27.64,256,0.887,bilinear,-49.042,-44.694,+79 +gcresnet50t,31.009,68.991,50.123,49.877,25.90,256,0.900,bicubic,-49.931,-45.331,+14 +gluon_resnet152_v1c,30.991,69.009,48.924,51.076,60.21,224,0.875,bicubic,-48.919,-45.916,+83 +gluon_resnext101_64x4d,30.987,69.013,48.549,51.451,83.46,224,0.875,bicubic,-49.617,-46.439,+29 +twins_svt_small,30.985,69.015,49.223,50.777,24.06,224,0.900,bicubic,-50.697,-46.447,-44 +resnext50_32x4d,30.950,69.050,49.270,50.730,25.03,224,0.950,bicubic,-50.168,-46.062,-4 +resmlp_24_distilled_224,30.901,69.099,50.178,49.822,30.02,224,0.875,bicubic,-49.865,-45.040,+20 +tf_efficientnet_cc_b1_8e.in1k,30.899,69.101,50.080,49.920,39.72,240,0.882,bicubic,-48.409,-44.290,+114 +ecaresnet101d_pruned,30.897,69.103,50.013,49.987,24.88,224,0.875,bicubic,-49.921,-45.615,+16 +gluon_resnext101_32x4d,30.877,69.123,48.537,51.463,44.18,224,0.875,bicubic,-49.457,-46.389,+49 +tf_efficientnetv2_b3.in1k,30.861,69.139,49.814,50.186,14.36,300,0.904,bicubic,-51.109,-45.968,-68 +tf_efficientnet_lite4.in1k,30.830,69.170,50.386,49.614,13.01,380,0.920,bilinear,-50.706,-45.282,-41 +nf_resnet50,30.702,69.298,49.958,50.042,25.56,288,0.940,bicubic,-49.960,-45.378,+18 +dpn107,30.678,69.322,48.810,51.190,86.92,224,0.875,bicubic,-49.478,-46.100,+61 +xcit_tiny_24_p16_224,30.677,69.323,50.410,49.590,12.12,224,1.000,bicubic,-48.767,-44.472,+101 +poolformer_s36,30.667,69.333,49.435,50.565,30.86,224,0.900,bicubic,-50.749,-46.011,-35 +ese_vovnet39b,30.657,69.343,49.875,50.125,24.57,224,0.875,bicubic,-48.663,-44.837,+103 +gluon_resnet152_v1b,30.623,69.376,48.521,51.479,60.19,224,0.875,bicubic,-49.063,-46.215,+83 +tresnet_xl_448,30.614,69.386,49.069,50.931,78.44,448,0.875,bilinear,-52.436,-47.105,-167 +ssl_resnext50_32x4d,30.594,69.406,50.657,49.343,25.03,224,0.875,bilinear,-49.724,-44.749,+39 +haloregnetz_b,30.594,69.406,49.009,50.991,11.68,224,0.940,bicubic,-50.456,-46.187,-12 +gluon_resnet101_v1d,30.523,69.477,47.950,52.050,44.57,224,0.875,bicubic,-49.891,-47.064,+31 +dpn68b,30.517,69.483,49.162,50.838,12.61,224,0.875,bicubic,-48.699,-45.252,+110 +mobilevitv2_200_384_in22ft1k,30.498,69.502,50.575,49.425,18.45,384,1.000,bicubic,-52.896,-46.005,-198 +resnest26d,30.490,69.510,50.677,49.323,17.07,224,0.875,bilinear,-47.988,-43.621,+149 +efficientnet_b2.ra_in1k,30.435,69.565,49.698,50.302,9.11,288,1.000,bicubic,-50.177,-45.620,+6 +tf_efficientnet_b1.ap_in1k,30.421,69.579,49.553,50.447,7.79,240,0.882,bicubic,-48.859,-44.753,+101 +cs3darknet_x,30.409,69.591,49.187,50.813,35.05,288,1.000,bicubic,-51.819,-47.047,-106 +xcit_tiny_12_p16_384_dist,30.405,69.595,50.131,49.869,6.72,384,1.000,bicubic,-50.535,-45.279,-12 +resnetv2_50,30.386,69.614,48.834,51.166,25.55,224,0.950,bicubic,-50.046,-46.246,+21 +twins_pcpvt_small,30.382,69.618,49.386,50.614,24.11,224,0.900,bicubic,-50.706,-46.256,-24 +visformer_small,30.329,69.671,48.285,51.715,40.22,224,0.900,bicubic,-51.777,-47.587,-100 +pit_xs_distilled_224,30.278,69.722,49.836,50.164,11.00,224,0.900,bicubic,-49.028,-44.528,+91 +regnety_040,30.254,69.746,48.910,51.090,20.65,288,1.000,bicubic,-52.784,-47.600,-181 +mobilevitv2_175_in22ft1k,30.209,69.791,49.034,50.966,14.25,256,0.888,bicubic,-51.735,-46.758,-88 +vit_relpos_base_patch32_plus_rpn_256.sw_in1k,30.207,69.793,48.700,51.300,119.42,256,0.900,bicubic,-49.273,-45.438,+76 +convmixer_1024_20_ks9_p14,30.105,69.895,49.932,50.068,24.38,224,0.960,bicubic,-46.841,-43.426,+213 +seresnet50,30.077,69.923,49.292,50.708,28.09,224,0.875,bicubic,-50.197,-45.778,+28 +dpn98,30.067,69.933,48.244,51.756,61.57,224,0.875,bicubic,-49.575,-46.384,+67 +tf_efficientnet_b2.aa_in1k,30.026,69.974,49.581,50.419,9.11,260,0.890,bicubic,-50.060,-45.328,+37 +dpn131,30.024,69.976,48.146,51.854,79.25,224,0.875,bicubic,-49.798,-46.564,+52 +efficientnet_el.ra_in1k,30.018,69.982,48.834,51.166,10.59,300,0.904,bicubic,-51.298,-46.692,-53 +legacy_senet154,30.001,69.999,48.034,51.966,115.09,224,0.875,bilinear,-51.309,-47.462,-53 +xcit_tiny_12_p16_224_dist,29.997,70.003,49.641,50.359,6.72,224,1.000,bicubic,-48.581,-44.555,+124 +halo2botnet50ts_256,29.983,70.017,48.388,51.612,22.64,256,0.950,bicubic,-52.077,-47.248,-110 +dpn92,29.953,70.047,49.162,50.838,37.67,224,0.875,bicubic,-50.055,-45.674,+36 +mobilevitv2_150_in22ft1k,29.951,70.049,49.215,50.785,10.59,256,0.888,bicubic,-51.526,-46.459,-71 +resnetv2_101x1_bitm,29.898,70.102,51.121,48.879,44.54,448,1.000,bilinear,-52.434,-44.835,-137 +gluon_senet154,29.877,70.123,47.894,52.106,115.09,224,0.875,bicubic,-51.357,-47.454,-56 +xception,29.865,70.135,48.686,51.314,22.86,299,0.897,bicubic,-49.187,-45.706,+96 +cs3sedarknet_l,29.814,70.186,48.987,51.013,21.91,288,0.950,bicubic,-51.960,-46.981,-94 +adv_inception_v3,29.814,70.186,47.847,52.153,23.83,299,0.875,bicubic,-47.768,-45.889,+172 +resnetaa50,29.810,70.190,48.022,51.978,25.56,288,1.000,bicubic,-51.812,-47.786,-88 +vit_base_patch16_384.augreg_in1k,29.792,70.208,48.333,51.667,86.86,384,1.000,bicubic,-51.310,-46.999,-50 +gluon_xception65,29.784,70.216,47.755,52.245,39.92,299,0.903,bicubic,-49.932,-47.105,+43 +lamhalobotnet50ts_256,29.755,70.245,48.344,51.656,22.57,256,0.950,bicubic,-51.789,-47.160,-87 +fbnetv3_d.ra2_in1k,29.743,70.257,49.472,50.528,10.31,256,0.950,bilinear,-49.937,-45.472,+46 +convnext_nano.d1h_in1k,29.698,70.302,47.920,52.080,15.59,288,1.000,bicubic,-51.772,-47.738,-81 +resmlp_36_224,29.692,70.308,48.969,51.031,44.69,224,0.875,bicubic,-50.078,-45.917,+36 +vit_base_patch32_384.augreg_in1k,29.657,70.343,48.985,51.015,88.30,384,1.000,bicubic,-49.103,-45.243,+101 +resnet50,29.639,70.361,46.729,53.271,25.56,224,0.950,bicubic,-50.735,-47.885,-6 +resnetblur50,29.625,70.375,48.250,51.750,25.56,224,0.875,bicubic,-49.661,-46.388,+65 +resnetv2_50d_gn,29.611,70.388,47.784,52.216,25.57,288,0.950,bicubic,-52.205,-48.140,-109 +jx_nest_tiny,29.543,70.457,46.994,53.006,17.06,224,0.875,bicubic,-51.871,-48.622,-82 +resnet50_gn,29.537,70.463,48.305,51.695,25.56,224,0.940,bicubic,-50.515,-46.641,+16 +efficientnet_em.ra2_in1k,29.486,70.514,48.946,51.054,6.90,240,0.882,bicubic,-49.766,-45.848,+65 +cs3darknet_l,29.474,70.526,48.217,51.783,21.16,288,0.950,bicubic,-51.422,-47.453,-47 +resnext101_32x8d,29.439,70.561,48.486,51.514,88.79,224,0.875,bilinear,-49.869,-46.032,+53 +gcresnext50ts,29.433,70.567,47.904,52.096,15.67,256,0.900,bicubic,-51.147,-47.266,-35 +coat_lite_mini,29.433,70.567,47.724,52.276,11.01,224,0.900,bicubic,-49.655,-46.880,+73 +ssl_resnet50,29.423,70.577,49.781,50.219,25.56,224,0.875,bilinear,-49.799,-45.051,+61 +deit_small_patch16_224,29.421,70.579,48.256,51.744,22.05,224,0.900,bicubic,-50.435,-46.796,+17 +sebotnet33ts_256,29.421,70.579,47.156,52.844,13.70,256,0.940,bicubic,-51.729,-48.018,-74 +nf_regnet_b1,29.390,70.611,49.425,50.575,10.22,288,0.900,bicubic,-49.903,-45.323,+52 +cait_xxs24_384,29.387,70.612,48.753,51.247,12.03,384,1.000,bicubic,-51.578,-46.893,-61 +edgenext_small_rw,29.352,70.648,48.743,51.257,7.83,320,1.000,bicubic,-51.104,-46.449,-27 +swin_tiny_patch4_window7_224,29.334,70.666,47.602,52.398,28.29,224,0.900,bicubic,-52.044,-47.938,-91 +resnet34d,29.328,70.671,48.409,51.591,21.82,224,0.875,bicubic,-47.788,-44.973,+164 +convnext_nano_ols.d1h_in1k,29.317,70.683,47.484,52.516,15.65,288,1.000,bicubic,-52.293,-48.156,-112 +cait_xxs24_224,29.303,70.697,48.535,51.465,11.96,224,1.000,bicubic,-49.083,-45.775,+101 +pvt_v2_b1,29.242,70.758,48.977,51.023,14.01,224,0.900,bicubic,-49.452,-45.515,+83 +maxvit_rmlp_pico_rw_256,29.226,70.774,47.721,52.279,7.52,256,0.950,bicubic,-51.290,-47.491,-42 +gcvit_xxtiny,29.216,70.784,48.372,51.628,12.00,224,0.875,bicubic,-50.498,-46.708,+16 +ecaresnet50d_pruned,29.215,70.785,48.453,51.547,19.94,224,0.875,bicubic,-50.501,-46.427,+13 +poolformer_s24,29.177,70.823,48.069,51.931,21.39,224,0.900,bicubic,-51.139,-46.969,-24 +tresnet_l_448,29.165,70.835,47.232,52.768,55.99,448,0.875,bilinear,-53.103,-48.744,-167 +gluon_inception_v3,29.124,70.876,46.955,53.045,23.83,299,0.875,bicubic,-49.682,-47.415,+70 +lambda_resnet50ts,29.118,70.882,46.973,53.027,21.54,256,0.950,bicubic,-52.048,-48.999,-91 +eca_resnet33ts,29.095,70.905,48.792,51.208,19.68,256,0.900,bicubic,-50.983,-46.178,-10 +xception71,29.047,70.953,47.405,52.595,42.34,299,0.903,bicubic,-50.826,-47.517,-2 +hrnet_w64,28.989,71.011,47.142,52.858,128.06,224,0.875,bilinear,-50.485,-47.510,+22 +xcit_tiny_12_p8_224,28.953,71.047,47.515,52.485,6.71,224,1.000,bicubic,-50.741,-47.537,+8 +regnetz_b16,28.941,71.059,47.246,52.754,9.72,288,0.940,bicubic,-51.775,-48.232,-63 +cs3darknet_focus_l,28.928,71.072,47.633,52.367,21.15,288,0.950,bicubic,-51.956,-48.049,-72 +tf_efficientnet_b0.ns_jft_in1k,28.902,71.098,49.011,50.989,5.29,224,0.875,bicubic,-49.756,-45.365,+72 +tf_efficientnet_b1.aa_in1k,28.886,71.114,47.503,52.497,7.79,240,0.882,bicubic,-49.940,-46.695,+60 +gluon_resnet101_v1b,28.878,71.121,46.389,53.611,44.55,224,0.875,bicubic,-50.427,-48.135,+26 +mobilevitv2_150_384_in22ft1k,28.873,71.127,47.924,52.076,10.59,384,1.000,bicubic,-53.721,-48.394,-217 +vit_small_patch32_384.augreg_in21k_ft_in1k,28.871,71.129,48.887,51.113,22.92,384,1.000,bicubic,-51.609,-46.711,-56 +resnetv2_50d_evos,28.867,71.133,46.672,53.328,25.59,288,0.950,bicubic,-53.109,-49.244,-159 +skresnext50_32x4d,28.818,71.182,46.497,53.503,27.48,224,0.875,bicubic,-51.338,-48.145,-25 +sehalonet33ts,28.778,71.222,46.586,53.414,13.69,256,0.940,bicubic,-52.180,-48.690,-87 +levit_256,28.745,71.255,46.723,53.277,18.89,224,0.900,bicubic,-52.765,-48.767,-130 +tf_efficientnet_lite3.in1k,28.660,71.340,47.354,52.646,8.20,300,0.904,bilinear,-51.160,-47.560,-10 +gluon_seresnext50_32x4d,28.651,71.349,46.436,53.564,27.56,224,0.875,bicubic,-51.267,-48.386,-21 +skresnet34,28.645,71.355,47.953,52.047,22.28,224,0.875,bicubic,-48.267,-45.369,+147 +hrnet_w40,28.641,71.359,47.454,52.546,57.56,224,0.875,bilinear,-50.279,-47.016,+44 +darknetaa53,28.639,71.361,46.945,53.055,36.02,288,1.000,bilinear,-51.883,-48.377,-69 +mobilevitv2_175_384_in22ft1k,28.615,71.385,47.144,52.856,14.25,384,1.000,bicubic,-54.327,-49.282,-251 +swinv2_tiny_window8_256,28.613,71.387,46.177,53.823,28.35,256,0.900,bicubic,-53.193,-49.817,-155 +halonet50ts,28.578,71.422,46.167,53.833,22.73,256,0.940,bicubic,-53.066,-49.441,-149 +tf_efficientnetv2_b0.in1k,28.566,71.434,47.079,52.921,7.14,224,0.875,bicubic,-49.790,-46.945,+72 +tv_resnet152,28.533,71.467,47.118,52.882,60.19,224,0.875,bilinear,-49.779,-46.920,+73 +xcit_tiny_12_p16_224,28.523,71.477,47.403,52.597,6.72,224,1.000,bicubic,-48.597,-46.309,+127 +repvgg_b2,28.427,71.573,47.038,52.962,89.02,224,0.875,bilinear,-50.365,-47.376,+44 +hrnet_w48,28.413,71.587,47.586,52.414,77.47,224,0.875,bilinear,-50.887,-46.926,+9 +swinv2_cr_tiny_ns_224,28.377,71.623,45.920,54.080,28.33,224,0.900,bicubic,-53.413,-49.904,-161 +gluon_resnext50_32x4d,28.375,71.624,45.328,54.672,25.03,224,0.875,bicubic,-50.978,-49.098,+1 +efficientnet_b2_pruned.in1k,28.362,71.638,47.051,52.949,8.31,260,0.890,bicubic,-51.554,-47.805,-34 +tf_efficientnet_b0.ap_in1k,28.346,71.654,47.531,52.469,5.29,224,0.875,bicubic,-48.740,-45.725,+124 +seresnet33ts,28.340,71.660,47.757,52.243,19.78,256,0.900,bicubic,-52.012,-47.349,-64 +darknet53,28.317,71.683,46.873,53.127,41.61,288,1.000,bicubic,-52.218,-48.547,-85 +tf_efficientnet_cc_b0_4e.in1k,28.315,71.685,47.364,52.636,13.31,224,0.875,bicubic,-48.991,-45.970,+109 +dla102x2,28.315,71.685,46.761,53.239,41.28,224,0.875,bilinear,-51.133,-47.879,-8 +dla169,28.313,71.687,47.391,52.609,53.39,224,0.875,bilinear,-50.375,-46.945,+40 +mixnet_xl.ra_in1k,28.287,71.713,46.702,53.298,11.90,224,0.875,bicubic,-52.189,-48.234,-82 +gluon_resnet50_v1d,28.246,71.754,45.878,54.122,25.58,224,0.875,bicubic,-50.828,-48.592,+17 +wide_resnet101_2,28.108,71.892,46.401,53.599,126.89,224,0.875,bilinear,-50.748,-47.881,+26 +gluon_resnet101_v1c,28.104,71.896,45.961,54.039,44.57,224,0.875,bicubic,-51.430,-48.617,-20 +regnetx_320,28.093,71.907,45.126,54.874,107.81,224,0.875,bicubic,-52.153,-49.900,-62 +densenet161,28.081,71.919,46.641,53.359,28.68,224,0.875,bicubic,-49.277,-46.997,+101 +regnety_320,28.059,71.941,45.444,54.556,145.05,224,0.875,bicubic,-52.751,-49.800,-106 +mobilevitv2_175,28.034,71.966,46.093,53.907,14.25,256,0.888,bicubic,-52.826,-49.162,-110 +gernet_s,28.022,71.978,46.723,53.277,8.17,224,0.875,bilinear,-48.894,-46.409,+118 +efficientnet_el_pruned.in1k,28.016,71.984,46.790,53.210,10.59,300,0.904,bicubic,-52.284,-48.428,-71 +levit_192,28.016,71.984,45.880,54.120,10.95,224,0.900,bicubic,-51.826,-48.906,-44 +vit_base_patch16_224.augreg_in1k,27.963,72.037,45.725,54.275,86.57,224,0.900,bicubic,-51.191,-48.375,+1 +xception41,27.888,72.112,45.890,54.110,26.97,299,0.903,bicubic,-50.628,-48.388,+32 +regnetx_160,27.817,72.183,45.617,54.383,54.28,224,0.875,bicubic,-52.039,-49.213,-49 +tf_inception_v3,27.784,72.216,45.721,54.279,23.83,299,0.875,bicubic,-50.076,-47.919,+71 +res2net101_26w_4s,27.768,72.232,45.179,54.821,45.21,224,0.875,bilinear,-51.430,-49.253,-5 +tf_efficientnetv2_b1.in1k,27.760,72.240,46.580,53.420,8.14,240,0.882,bicubic,-51.702,-48.142,-28 +vit_base_patch16_224.sam,27.709,72.291,45.106,54.894,86.57,224,0.900,bicubic,-52.533,-49.650,-74 +fbnetv3_b.ra2_in1k,27.676,72.324,46.989,53.011,8.60,256,0.950,bilinear,-51.474,-47.757,-5 +repvgg_b1,27.656,72.344,46.531,53.469,57.42,224,0.875,bilinear,-50.710,-47.567,+38 +hrnet_w44,27.621,72.379,45.837,54.163,67.06,224,0.875,bilinear,-51.275,-48.531,+7 +mobilevitv2_200,27.615,72.385,45.762,54.238,18.45,256,0.888,bicubic,-53.521,-49.604,-147 +gcresnet33ts,27.591,72.409,46.191,53.809,19.88,256,0.900,bicubic,-52.491,-48.807,-70 +inception_v3,27.556,72.444,45.267,54.733,23.83,299,0.875,bicubic,-49.884,-48.209,+80 +resmlp_24_224,27.521,72.479,45.696,54.304,30.02,224,0.875,bicubic,-51.853,-48.851,-33 +pit_xs_224,27.491,72.509,45.900,54.100,10.62,224,0.900,bicubic,-50.691,-48.268,+41 +regnetx_080,27.405,72.595,45.002,54.998,39.57,224,0.875,bicubic,-51.789,-49.558,-15 +hrnet_w30,27.381,72.619,46.554,53.446,37.71,224,0.875,bilinear,-50.825,-47.668,+38 +hrnet_w32,27.369,72.631,45.994,54.006,41.23,224,0.875,bilinear,-51.081,-48.192,+22 +convnext_pico.d1_in1k,27.354,72.646,45.648,54.352,9.05,288,0.950,bicubic,-53.072,-49.410,-103 +vit_small_patch16_384.augreg_in1k,27.328,72.672,46.114,53.886,22.20,384,1.000,bicubic,-53.792,-49.460,-155 +gluon_resnet50_v1s,27.326,72.674,45.222,54.778,25.68,224,0.875,bicubic,-51.386,-49.016,+5 +convnext_pico_ols.d1_in1k,27.305,72.695,45.644,54.356,9.06,288,1.000,bicubic,-53.159,-49.598,-112 +densenet201,27.265,72.735,46.222,53.778,20.01,224,0.875,bicubic,-50.021,-47.256,+75 +densenetblur121d,27.228,72.772,46.299,53.701,8.00,224,0.875,bicubic,-49.360,-46.893,+103 +efficientnet_b1_pruned.in1k,27.181,72.819,45.872,54.128,6.33,240,0.882,bicubic,-51.055,-47.962,+29 +tf_efficientnetv2_b2.in1k,27.163,72.837,44.570,55.430,10.10,260,0.890,bicubic,-53.045,-50.472,-90 +vit_base_patch32_224.augreg_in1k,27.141,72.859,45.175,54.825,88.22,224,0.900,bicubic,-47.763,-46.603,+136 +resnet33ts,27.134,72.866,45.338,54.662,19.68,256,0.900,bicubic,-52.080,-49.236,-30 +resnetrs50,27.110,72.890,45.029,54.971,35.69,224,0.910,bicubic,-52.782,-49.939,-78 +rexnet_130,27.094,72.906,45.933,54.067,7.56,224,0.875,bicubic,-52.406,-48.749,-56 +res2net50_26w_8s,27.078,72.921,44.428,55.572,48.40,224,0.875,bilinear,-52.122,-49.940,-32 +dla102x,27.061,72.939,45.474,54.526,26.31,224,0.875,bilinear,-51.449,-48.754,+3 +resnet32ts,27.037,72.963,45.253,54.747,17.96,256,0.900,bicubic,-51.967,-49.103,-22 +gmixer_24_224,27.027,72.972,44.361,55.639,24.72,224,0.875,bicubic,-51.008,-49.303,+29 +tv_resnet101,26.963,73.037,45.234,54.766,44.55,224,0.875,bilinear,-50.411,-48.306,+60 +resnext50d_32x4d,26.876,73.124,44.436,55.564,25.05,224,0.875,bicubic,-52.800,-50.430,-68 +regnetx_120,26.868,73.132,44.682,55.318,46.11,224,0.875,bicubic,-52.728,-50.056,-66 +rexnet_100,26.831,73.169,45.369,54.631,4.80,224,0.875,bicubic,-51.027,-48.501,+38 +densenet169,26.829,73.171,45.373,54.627,14.15,224,0.875,bicubic,-49.077,-47.653,+101 +legacy_seresnext101_32x4d,26.811,73.189,43.497,56.503,48.96,224,0.875,bilinear,-53.417,-51.521,-106 +tinynet_a.in1k,26.807,73.193,45.098,54.902,6.19,192,0.875,bicubic,-50.845,-48.438,+40 +regnety_120,26.788,73.212,44.454,55.546,51.82,224,0.875,bicubic,-53.578,-50.672,-122 +regnetx_064,26.784,73.216,44.927,55.073,26.21,224,0.875,bicubic,-52.288,-49.531,-34 +regnetx_032,26.703,73.297,45.236,54.764,15.30,224,0.875,bicubic,-51.469,-48.852,+13 +legacy_seresnet152,26.676,73.324,43.947,56.053,66.82,224,0.875,bilinear,-51.984,-50.423,-15 +densenet121,26.664,73.336,45.900,54.100,7.98,224,0.875,bicubic,-48.914,-46.752,+100 +efficientnet_es.ra_in1k,26.621,73.379,45.112,54.888,5.44,224,0.875,bicubic,-51.445,-48.814,+15 +res2net50_26w_6s,26.595,73.405,43.990,56.010,37.05,224,0.875,bilinear,-51.975,-50.134,-15 +repvgg_b1g4,26.579,73.421,45.084,54.916,39.97,224,0.875,bilinear,-51.015,-48.742,+37 +dla60x,26.552,73.448,45.023,54.977,17.35,224,0.875,bilinear,-51.694,-48.995,+1 +coat_lite_tiny,26.507,73.493,44.644,55.356,5.72,224,0.900,bicubic,-51.005,-49.272,+39 +tf_efficientnet_b0.aa_in1k,26.485,73.515,45.646,54.354,5.29,224,0.875,bicubic,-50.363,-47.582,+66 +res2net50_14w_8s,26.483,73.517,44.371,55.629,25.06,224,0.875,bilinear,-51.667,-49.477,+5 +mobilenetv3_large_100.miil_in21k_ft_in1k,26.481,73.519,44.473,55.527,5.48,224,0.875,bilinear,-51.435,-48.437,+16 +gluon_resnet50_v1b,26.436,73.564,44.035,55.965,25.56,224,0.875,bicubic,-51.144,-49.681,+33 +tf_efficientnet_el.in1k,26.357,73.643,44.175,55.825,10.59,300,0.904,bicubic,-53.893,-50.953,-125 +lambda_resnet26t,26.348,73.653,44.408,55.592,10.96,256,0.940,bicubic,-52.749,-50.184,-52 +levit_128,26.332,73.668,44.096,55.904,9.21,224,0.900,bicubic,-52.154,-49.914,-22 +resmlp_big_24_224,26.318,73.682,43.556,56.444,129.14,224,0.875,bicubic,-54.710,-51.466,-186 +resmlp_12_distilled_224,26.314,73.686,44.874,55.126,15.35,224,0.875,bicubic,-51.630,-48.684,+9 +regnetx_040,26.243,73.757,44.438,55.562,22.12,224,0.875,bicubic,-52.239,-49.806,-24 +mobilevitv2_150,26.190,73.810,43.764,56.236,10.59,256,0.888,bicubic,-54.186,-51.296,-144 +crossvit_9_dagger_240,26.183,73.817,44.544,55.456,8.78,240,0.875,bicubic,-50.797,-49.066,+49 +vit_small_patch32_224.augreg_in21k_ft_in1k,26.151,73.849,45.104,54.896,22.88,224,0.900,bicubic,-49.839,-48.168,+73 +dpn68,26.129,73.871,44.228,55.772,12.61,224,0.875,bicubic,-50.189,-48.750,+68 +efficientnet_b1.ft_in1k,26.061,73.939,44.080,55.920,7.79,256,1.000,bicubic,-52.733,-50.262,-44 +mobilevitv2_125,26.029,73.971,43.670,56.330,7.48,256,0.888,bicubic,-53.655,-51.180,-101 +lambda_resnet26rpt_256,26.025,73.975,44.188,55.812,10.99,256,0.940,bicubic,-52.945,-50.242,-55 +hrnet_w18,25.986,74.014,44.813,55.187,21.30,224,0.875,bilinear,-50.772,-48.631,+52 +hardcorenas_f,25.951,74.049,44.220,55.780,8.20,224,0.875,bilinear,-52.153,-49.582,-10 +vit_small_patch16_224.augreg_in1k,25.949,74.051,43.988,56.012,22.05,224,0.900,bicubic,-52.897,-50.296,-52 +resnet34,25.888,74.112,43.982,56.018,21.80,224,0.875,bilinear,-49.222,-48.302,+86 +res2net50_26w_4s,25.866,74.134,43.155,56.845,25.70,224,0.875,bilinear,-52.098,-50.699,-5 +resnet26t,25.852,74.148,43.953,56.047,16.01,256,0.940,bicubic,-52.030,-49.888,-2 +tresnet_m_448,25.852,74.148,42.874,57.126,31.39,448,0.875,bilinear,-55.862,-52.698,-252 +coat_tiny,25.843,74.157,43.276,56.724,5.50,224,0.900,bicubic,-52.591,-50.761,-34 +hardcorenas_c,25.815,74.185,44.772,55.228,5.52,224,0.875,bilinear,-51.239,-48.386,+32 +gluon_resnet50_v1c,25.784,74.216,43.031,56.969,25.58,224,0.875,bicubic,-52.228,-50.957,-13 +halonet26t,25.764,74.236,43.229,56.771,12.48,256,0.950,bicubic,-53.336,-51.083,-75 +selecsls60,25.729,74.272,44.065,55.935,30.67,224,0.875,bicubic,-52.254,-49.764,-13 +hardcorenas_e,25.662,74.338,43.412,56.588,8.07,224,0.875,bilinear,-52.132,-50.282,-2 +dla60_res2net,25.652,74.348,43.599,56.401,20.85,224,0.875,bilinear,-52.812,-50.607,-43 +dla60_res2next,25.640,74.360,43.670,56.330,17.03,224,0.875,bilinear,-52.800,-50.482,-42 +poolformer_s12,25.630,74.370,44.137,55.863,11.92,224,0.900,bicubic,-51.600,-49.367,+18 +ecaresnet26t,25.538,74.462,43.660,56.340,16.01,320,0.950,bicubic,-54.316,-51.424,-130 +resmlp_12_224,25.518,74.482,44.324,55.676,15.35,224,0.875,bicubic,-51.136,-48.856,+37 +convnext_femto.d1_in1k,25.516,74.484,43.683,56.317,5.22,288,0.950,bicubic,-53.188,-50.751,-60 +mixnet_l.ft_in1k,25.512,74.488,43.455,56.545,7.33,224,0.875,bicubic,-53.464,-50.727,-76 +tf_efficientnet_lite1.in1k,25.499,74.501,43.585,56.415,5.42,240,0.882,bicubic,-51.143,-49.641,+35 +eca_halonext26ts,25.479,74.521,43.194,56.806,10.76,256,0.940,bicubic,-54.007,-51.404,-115 +cs3darknet_focus_m,25.475,74.525,43.750,56.250,9.30,288,0.950,bicubic,-51.803,-50.220,+9 +bat_resnext26ts,25.463,74.537,43.210,56.790,10.73,256,0.900,bicubic,-52.779,-50.890,-39 +tv_resnext50_32x4d,25.455,74.545,42.787,57.213,25.03,224,0.875,bilinear,-52.165,-50.909,-9 +botnet26t_256,25.444,74.556,42.636,57.364,12.49,256,0.950,bicubic,-53.828,-51.892,-100 +repvgg_a2,25.436,74.564,43.939,56.061,28.21,224,0.875,bilinear,-51.024,-49.065,+36 +tf_mixnet_l.in1k,25.422,74.578,42.534,57.466,7.33,224,0.875,bicubic,-53.352,-51.464,-72 +hardcorenas_b,25.402,74.598,44.190,55.810,5.18,224,0.875,bilinear,-51.136,-48.564,+31 +res2next50,25.389,74.611,42.508,57.492,24.67,224,0.875,bilinear,-52.857,-51.384,-46 +convnext_femto_ols.d1_in1k,25.387,74.613,43.153,56.847,5.23,288,0.950,bicubic,-53.547,-51.379,-85 +legacy_seresnet101,25.334,74.666,42.825,57.175,49.33,224,0.875,bilinear,-53.048,-51.439,-54 +selecsls60b,25.332,74.668,43.559,56.441,32.77,224,0.875,bicubic,-53.080,-50.615,-57 +resnetv2_50x1_bitm,25.324,74.676,45.359,54.641,25.55,448,1.000,bilinear,-55.018,-50.325,-180 +dla102,25.316,74.684,43.827,56.173,33.27,224,0.875,bilinear,-52.716,-50.119,-39 +hardcorenas_d,25.300,74.700,43.121,56.879,7.50,224,0.875,bilinear,-52.132,-50.363,-10 +resnest14d,25.284,74.716,44.114,55.886,10.61,224,0.875,bilinear,-50.222,-48.404,+43 +legacy_seresnext50_32x4d,25.210,74.790,41.936,58.064,27.56,224,0.875,bilinear,-53.868,-52.500,-99 +mixer_b16_224,25.121,74.879,41.229,58.771,59.88,224,0.875,bicubic,-51.479,-50.999,+18 +res2net50_48w_2s,25.027,74.973,42.208,57.792,25.29,224,0.875,bilinear,-52.495,-51.346,-19 +efficientnet_b0.ra_in1k,25.015,74.985,42.787,57.213,5.29,224,0.875,bicubic,-52.683,-50.745,-29 +gluon_resnet34_v1b,24.939,75.061,42.243,57.757,21.80,224,0.875,bicubic,-49.649,-49.747,+60 +mobilenetv2_120d.ra_in1k,24.937,75.063,43.058,56.942,5.83,224,0.875,bicubic,-52.347,-50.434,-12 +dla60,24.933,75.067,43.296,56.704,22.04,224,0.875,bilinear,-52.099,-50.022,-2 +eca_botnext26ts_256,24.870,75.130,42.946,57.054,10.59,256,0.950,bicubic,-54.404,-51.668,-120 +regnety_016,24.811,75.189,42.616,57.384,11.20,224,0.875,bicubic,-53.051,-51.104,-40 +xcit_nano_12_p8_224_dist,24.803,75.197,43.076,56.924,3.05,224,1.000,bicubic,-51.521,-50.014,+17 +seresnext26ts,24.683,75.317,43.098,56.902,10.39,256,0.900,bicubic,-53.183,-50.692,-43 +eca_resnext26ts,24.660,75.340,42.842,57.158,10.30,256,0.900,bicubic,-52.792,-50.724,-25 +cs3darknet_m,24.630,75.370,42.966,57.034,9.31,288,0.950,bicubic,-53.006,-51.048,-36 +mobilevitv2_100,24.552,75.448,42.911,57.089,4.90,256,0.888,bicubic,-53.538,-51.253,-58 +tf_efficientnet_lite2.in1k,24.530,75.470,42.280,57.720,6.09,260,0.890,bicubic,-52.938,-51.474,-29 +skresnet18,24.483,75.517,42.536,57.464,11.96,224,0.875,bicubic,-48.555,-48.632,+66 +regnetx_016,24.473,75.527,42.514,57.486,9.19,224,0.875,bicubic,-52.477,-50.906,-9 +pit_ti_distilled_224,24.406,75.594,42.730,57.270,5.10,224,0.900,bicubic,-50.124,-49.366,+48 +tf_efficientnet_lite0.in1k,24.373,75.627,42.487,57.513,4.65,224,0.875,bicubic,-50.457,-49.689,+40 +hardcorenas_a,24.369,75.631,43.284,56.716,5.26,224,0.875,bilinear,-51.547,-49.230,+14 +tv_resnet50,24.070,75.930,41.313,58.687,25.56,224,0.875,bilinear,-52.068,-51.551,+9 +levit_128s,24.058,75.942,41.007,58.993,7.78,224,0.900,bicubic,-52.472,-51.859,+2 +legacy_seresnet34,24.027,75.973,41.909,58.091,21.96,224,0.875,bilinear,-50.781,-50.215,+37 +xcit_nano_12_p16_384_dist,24.011,75.989,42.324,57.676,3.05,384,1.000,bicubic,-51.447,-50.370,+21 +xcit_nano_12_p8_384_dist,23.950,76.050,41.940,58.060,3.05,384,1.000,bicubic,-53.870,-52.096,-53 +gcresnext26ts,23.944,76.056,41.353,58.647,10.48,256,0.900,bicubic,-53.870,-52.481,-53 +resnet18d,23.929,76.071,42.302,57.698,11.71,224,0.875,bicubic,-48.331,-48.394,+65 +efficientnet_lite0.ra_in1k,23.909,76.091,42.088,57.912,4.65,224,0.875,bicubic,-51.575,-50.422,+16 +resnext26ts,23.864,76.136,41.107,58.893,10.30,256,0.900,bicubic,-52.916,-52.023,-14 +tv_densenet121,23.844,76.156,41.925,58.075,7.98,224,0.875,bicubic,-50.894,-50.225,+31 +efficientnet_es_pruned.in1k,23.828,76.172,41.995,58.005,5.44,224,0.875,bicubic,-51.172,-50.453,+25 +mobilenetv2_140.ra_in1k,23.712,76.288,41.477,58.523,6.11,224,0.875,bicubic,-52.804,-51.519,-7 +mixnet_m.ft_in1k,23.710,76.290,41.141,58.859,5.01,224,0.875,bicubic,-53.550,-52.284,-37 +dla34,23.669,76.331,41.551,58.449,15.74,224,0.875,bilinear,-50.961,-50.527,+30 +legacy_seresnet50,23.651,76.349,40.091,59.909,28.09,224,0.875,bilinear,-53.978,-53.657,-57 +convnext_atto.d2_in1k,23.591,76.409,41.076,58.924,3.70,288,0.950,bicubic,-53.423,-52.624,-30 +ese_vovnet19b_dw,23.535,76.465,41.288,58.712,6.54,224,0.875,bicubic,-53.263,-51.980,-23 +tf_mixnet_m.in1k,23.484,76.516,40.989,59.011,5.01,224,0.875,bicubic,-53.458,-52.163,-28 +tv_resnet34,23.473,76.527,41.367,58.633,21.80,224,0.875,bilinear,-49.839,-50.059,+40 +tf_efficientnet_em.in1k,23.359,76.641,40.404,59.596,6.90,240,0.882,bicubic,-54.771,-53.640,-86 +selecsls42b,23.357,76.643,40.677,59.323,32.46,224,0.875,bicubic,-53.817,-52.713,-42 +repvgg_b0,23.316,76.684,41.182,58.818,15.82,224,0.875,bilinear,-51.837,-51.236,+8 +xcit_nano_12_p16_224_dist,23.253,76.747,41.376,58.624,3.05,224,1.000,bicubic,-49.049,-49.486,+48 +convnext_atto_ols.a2_in1k,23.131,76.869,40.873,59.127,3.70,288,0.950,bicubic,-54.085,-52.807,-46 +mobilenetv2_110d.ra_in1k,23.066,76.934,40.716,59.284,4.52,224,0.875,bicubic,-51.970,-51.470,+10 +vit_base_patch32_224.sam,23.048,76.952,39.572,60.428,88.22,224,0.900,bicubic,-50.642,-51.442,+29 +tinynet_b.in1k,23.015,76.985,40.975,59.025,3.73,188,0.875,bicubic,-51.959,-51.213,+10 +deit_tiny_distilled_patch16_224,22.718,77.282,40.771,59.229,5.91,224,0.900,bicubic,-51.792,-51.119,+19 +mobilenetv3_large_100.ra_in1k,22.655,77.345,40.781,59.219,5.48,224,0.875,bicubic,-53.111,-51.761,-13 +mobilenetv3_rw.rmsp_in1k,22.630,77.370,40.374,59.626,5.48,224,0.875,bicubic,-53.004,-52.334,-11 +edgenext_x_small,22.598,77.402,39.500,60.500,2.34,288,1.000,bicubic,-53.090,-53.266,-14 +tf_mobilenetv3_large_100.in1k,22.569,77.431,39.767,60.233,5.48,224,0.875,bilinear,-52.949,-52.839,-10 +mobilevit_s,22.468,77.531,38.635,61.365,5.58,256,0.900,bicubic,-55.843,-55.511,-109 +xcit_nano_12_p8_224,22.413,77.587,40.661,59.339,3.05,224,1.000,bicubic,-51.501,-51.511,+18 +tf_efficientnet_es.in1k,22.413,77.587,39.095,60.905,5.44,224,0.875,bicubic,-54.180,-54.107,-33 +hrnet_w18_small_v2,22.337,77.663,39.861,60.139,15.60,224,0.875,bilinear,-52.777,-52.555,-4 +convit_tiny,22.282,77.718,39.669,60.331,5.71,224,0.875,bicubic,-50.834,-52.045,+24 +regnety_008,22.119,77.881,38.900,61.100,6.26,224,0.875,bicubic,-54.197,-54.166,-29 +seresnext26t_32x4d,21.991,78.009,38.482,61.518,16.81,224,0.875,bicubic,-55.995,-55.264,-98 +regnety_006,21.971,78.029,38.955,61.045,6.06,224,0.875,bicubic,-53.275,-53.577,-11 +vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k,21.954,78.046,39.405,60.595,6.36,384,1.000,bicubic,-53.998,-53.855,-28 +regnetx_008,21.940,78.060,38.928,61.072,7.26,224,0.875,bicubic,-53.098,-53.408,-8 +resnet26d,21.907,78.094,38.619,61.381,16.01,224,0.875,bicubic,-54.789,-54.531,-46 +semnasnet_100.rmsp_in1k,21.903,78.097,38.600,61.400,3.89,224,0.875,bicubic,-53.545,-54.004,-17 +pit_ti_224,21.875,78.125,39.541,60.459,4.85,224,0.900,bicubic,-51.037,-51.861,+20 +pvt_v2_b0,21.838,78.162,40.142,59.858,3.67,224,0.900,bicubic,-48.818,-50.066,+38 +regnetx_006,21.738,78.263,38.904,61.096,6.20,224,0.875,bicubic,-52.115,-52.768,+7 +vit_tiny_patch16_384.augreg_in21k_ft_in1k,21.708,78.292,39.329,60.671,5.79,384,1.000,bicubic,-56.722,-55.213,-130 +crossvit_9_240,21.683,78.317,39.270,60.730,8.55,240,0.875,bicubic,-52.281,-52.698,+3 +vgg19_bn,21.628,78.373,39.283,60.717,143.68,224,0.875,bilinear,-52.587,-52.559,-2 +ghostnet_100,21.620,78.380,38.692,61.308,5.18,224,0.875,bilinear,-52.358,-52.764,0 +semnasnet_075.rmsp_in1k,21.572,78.428,38.928,61.072,2.91,224,0.875,bicubic,-51.402,-52.208,+11 +gluon_resnet18_v1b,21.549,78.451,38.869,61.131,11.69,224,0.875,bicubic,-49.287,-50.891,+30 +mobilevitv2_075,21.541,78.459,38.633,61.367,2.87,256,0.888,bicubic,-54.081,-54.135,-34 +fbnetc_100.rmsp_in1k,21.484,78.516,38.161,61.839,5.57,224,0.875,bilinear,-53.640,-54.224,-24 +xcit_nano_12_p16_224,21.437,78.563,39.796,60.204,3.05,224,1.000,bicubic,-48.517,-49.958,+32 +mnasnet_100.rmsp_in1k,21.350,78.650,37.719,62.281,4.38,224,0.875,bicubic,-53.308,-54.395,-15 +resnet26,21.295,78.705,38.018,61.982,16.00,224,0.875,bicubic,-53.997,-54.552,-30 +lcnet_100.ra2_in1k,21.293,78.707,38.837,61.163,2.95,224,0.875,bicubic,-50.821,-51.541,+16 +ssl_resnet18,21.278,78.722,39.113,60.887,11.69,224,0.875,bilinear,-51.332,-52.303,+6 +mixnet_s.ft_in1k,21.254,78.746,38.187,61.813,4.13,224,0.875,bicubic,-54.738,-54.609,-50 +seresnext26d_32x4d,21.252,78.748,37.311,62.689,16.81,224,0.875,bicubic,-56.350,-56.297,-102 +legacy_seresnext26_32x4d,21.093,78.907,37.633,62.367,16.79,224,0.875,bicubic,-56.011,-55.683,-81 +crossvit_tiny_240,21.050,78.950,38.055,61.945,7.01,240,0.875,bicubic,-52.274,-53.861,-6 +regnetx_004,20.898,79.102,37.566,62.434,5.16,224,0.875,bicubic,-51.498,-53.264,+2 +spnasnet_100.rmsp_in1k,20.863,79.137,37.896,62.104,4.42,224,0.875,bilinear,-53.221,-53.922,-17 +legacy_seresnet18,20.837,79.162,37.619,62.381,11.78,224,0.875,bicubic,-50.907,-52.715,+12 +mobilenetv2_100.ra_in1k,20.773,79.227,37.759,62.241,3.50,224,0.875,bicubic,-52.197,-53.257,-4 +tf_mixnet_s.in1k,20.470,79.530,36.607,63.393,4.13,224,0.875,bicubic,-55.180,-56.021,-51 +vit_tiny_patch16_224.augreg_in21k_ft_in1k,20.458,79.542,37.597,62.403,5.72,224,0.900,bicubic,-54.996,-55.251,-44 +regnety_004,20.415,79.585,37.002,62.998,4.34,224,0.875,bicubic,-53.619,-54.750,-21 +hrnet_w18_small,20.368,79.632,37.093,62.907,13.19,224,0.875,bilinear,-51.974,-53.585,-2 +tf_mobilenetv3_large_075.in1k,20.366,79.634,36.764,63.236,3.99,224,0.875,bilinear,-53.072,-54.586,-17 +resnet18,20.228,79.772,37.261,62.739,11.69,224,0.875,bilinear,-49.520,-51.817,+16 +mixer_l16_224,20.173,79.827,32.952,67.048,208.20,224,0.875,bicubic,-51.885,-54.716,+1 +deit_tiny_patch16_224,20.162,79.838,37.546,62.454,5.72,224,0.900,bicubic,-52.007,-53.572,-2 +tf_mobilenetv3_large_minimal_100.in1k,20.122,79.878,36.908,63.092,3.92,224,0.875,bilinear,-52.126,-53.722,-4 +vgg16_bn,19.959,80.041,36.301,63.699,138.37,224,0.875,bilinear,-53.391,-55.205,-21 +vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k,19.334,80.666,36.047,63.953,6.34,224,0.900,bicubic,-52.454,-54.781,-1 +tinynet_c.in1k,19.252,80.748,35.994,64.006,2.46,184,0.875,bicubic,-51.980,-53.754,+2 +edgenext_xx_small,18.876,81.124,35.155,64.845,1.33,288,1.000,bicubic,-52.990,-55.389,-4 +mobilevit_xs,18.295,81.705,33.214,66.787,2.32,256,0.900,bicubic,-56.349,-59.139,-39 +lcnet_075.ra2_in1k,18.167,81.833,34.410,65.590,2.36,224,0.875,bicubic,-50.651,-53.960,+10 +vgg19,17.929,82.071,33.054,66.946,143.67,224,0.875,bilinear,-54.439,-57.818,-16 +vgg13_bn,17.802,82.198,34.039,65.961,133.05,224,0.875,bilinear,-53.792,-56.343,-4 +vgg16,17.540,82.460,32.773,67.227,138.36,224,0.875,bilinear,-54.054,-57.603,-6 +regnety_002,17.450,82.550,32.431,67.569,3.16,224,0.875,bicubic,-52.802,-57.109,-1 +vgg11_bn,17.403,82.597,33.011,66.989,132.87,224,0.875,bilinear,-52.957,-56.791,-3 +mobilevitv2_050,17.300,82.700,33.003,66.997,1.37,256,0.888,bicubic,-52.840,-56.923,-2 +resnet10t,17.279,82.721,33.078,66.922,5.44,224,0.950,bilinear,-51.015,-55.000,+5 +regnetx_002,16.962,83.038,32.225,67.775,2.68,224,0.875,bicubic,-51.800,-56.331,+3 +mobilenetv3_small_100.lamb_in1k,16.807,83.193,32.524,67.476,2.54,224,0.875,bicubic,-50.845,-55.112,+6 +tinynet_d.in1k,16.674,83.326,32.457,67.543,2.34,152,0.875,bicubic,-50.288,-54.609,+6 +mobilenetv2_050.lamb_in1k,16.666,83.334,31.952,68.048,1.97,224,0.875,bicubic,-49.276,-54.130,+8 +mnasnet_small.lamb_in1k,16.634,83.366,31.921,68.079,2.03,224,0.875,bicubic,-49.572,-54.587,+5 +resnet14t,16.467,83.533,30.732,69.268,10.08,224,0.950,bilinear,-55.883,-59.608,-27 +dla60x_c,16.310,83.690,31.761,68.239,1.32,224,0.875,bilinear,-51.582,-56.665,0 +tf_mobilenetv3_small_100.in1k,16.227,83.772,31.223,68.777,2.54,224,0.875,bilinear,-51.694,-56.441,-2 +vgg13,16.100,83.900,30.985,69.015,133.05,224,0.875,bilinear,-53.826,-58.261,-10 +vgg11,15.728,84.272,30.453,69.547,132.86,224,0.875,bilinear,-53.296,-58.175,-9 +mobilenetv3_small_075.lamb_in1k,14.954,85.046,29.739,70.261,2.04,224,0.875,bicubic,-50.292,-55.697,+3 +tf_mobilenetv3_small_075.in1k,14.944,85.056,29.572,70.428,2.04,224,0.875,bilinear,-50.772,-56.558,+1 +dla46_c,14.657,85.343,29.380,70.620,1.30,224,0.875,bilinear,-50.209,-56.912,+2 +mobilevit_xxs,14.508,85.492,28.670,71.330,1.27,256,0.900,bicubic,-54.404,-60.268,-12 +dla46x_c,14.382,85.618,29.191,70.809,1.07,224,0.875,bilinear,-51.588,-57.789,-4 +lcnet_050.ra2_in1k,14.316,85.684,28.649,71.351,1.88,224,0.875,bicubic,-48.785,-55.731,0 +tf_mobilenetv3_small_minimal_100.in1k,13.964,86.036,27.988,72.012,2.04,224,0.875,bilinear,-48.942,-56.242,0 +tinynet_e.in1k,12.671,87.329,26.389,73.611,2.04,106,0.875,bicubic,-47.185,-55.373,0 +mobilenetv3_small_050.lamb_in1k,11.034,88.966,23.473,76.527,1.59,224,0.875,bicubic,-46.856,-56.721,0 diff --git a/tests/test_models.py b/tests/test_models.py index 4c848440..141caabb 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -27,9 +27,7 @@ NON_STD_FILTERS = [ 'vit_*', 'tnt_*', 'pit_*', 'swin_*', 'coat_*', 'cait_*', '*mixer_*', 'gmlp_*', 'resmlp_*', 'twins_*', 'convit_*', 'levit*', 'visformer*', 'deit*', 'jx_nest_*', 'nest_*', 'xcit_*', 'crossvit_*', 'beit*', 'poolformer_*', 'volo_*', 'sequencer2d_*', 'swinv2_*', 'pvt_v2*', 'mvitv2*', 'gcvit*', 'efficientformer*', - - 'coatnet*', 'coatnext*', 'maxvit*', 'maxxvit*', 'eva_*' - + 'coatnet*', 'coatnext*', 'maxvit*', 'maxxvit*', 'eva_*', 'flexivit*' ] NUM_NON_STD = len(NON_STD_FILTERS) @@ -40,7 +38,7 @@ if 'GITHUB_ACTIONS' in os.environ: '*efficientnet_l2*', '*resnext101_32x48d', '*in21k', '*152x4_bitm', '*101x3_bitm', '*50x3_bitm', '*nfnet_f3*', '*nfnet_f4*', '*nfnet_f5*', '*nfnet_f6*', '*nfnet_f7*', '*efficientnetv2_xl*', '*resnetrs350*', '*resnetrs420*', 'xcit_large_24_p8*', 'vit_huge*', 'vit_gi*', 'swin*huge*', - 'swin*giant*', 'davit*giant', 'davit*huge'] + 'swin*giant*', 'davit_giant', 'davit_huge', 'convnextv2_huge*'] NON_STD_EXCLUDE_FILTERS = ['vit_huge*', 'vit_gi*', 'swin*giant*', 'eva_giant*'] else: EXCLUDE_FILTERS = [] @@ -131,7 +129,7 @@ def test_model_backward(model_name, batch_size): @pytest.mark.timeout(300) -@pytest.mark.parametrize('model_name', list_models(exclude_filters=NON_STD_FILTERS)) +@pytest.mark.parametrize('model_name', list_models(exclude_filters=NON_STD_FILTERS, include_tags=True)) @pytest.mark.parametrize('batch_size', [1]) def test_model_default_cfgs(model_name, batch_size): """Run a single forward pass with each model""" @@ -193,7 +191,7 @@ def test_model_default_cfgs(model_name, batch_size): @pytest.mark.timeout(300) -@pytest.mark.parametrize('model_name', list_models(filter=NON_STD_FILTERS, exclude_filters=NON_STD_EXCLUDE_FILTERS)) +@pytest.mark.parametrize('model_name', list_models(filter=NON_STD_FILTERS, exclude_filters=NON_STD_EXCLUDE_FILTERS, include_tags=True)) @pytest.mark.parametrize('batch_size', [1]) def test_model_default_cfgs_non_std(model_name, batch_size): """Run a single forward pass with each model""" @@ -306,7 +304,7 @@ if 'GITHUB_ACTIONS' in os.environ: # and 'Linux' in platform.system(): @pytest.mark.timeout(120) -@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FEAT_FILTERS)) +@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FEAT_FILTERS, include_tags=True)) @pytest.mark.parametrize('batch_size', [1]) def test_model_forward_features(model_name, batch_size): """Run a single forward pass with each model in feature extraction mode""" diff --git a/timm/data/auto_augment.py b/timm/data/auto_augment.py index a7701b82..b6eacbf5 100644 --- a/timm/data/auto_augment.py +++ b/timm/data/auto_augment.py @@ -741,7 +741,6 @@ class RandAugment: self.ops = ops self.num_layers = num_layers self.choice_weights = choice_weights - print(self.ops, self.choice_weights) def __call__(self, img): # no replacement when using weighted choice diff --git a/timm/data/dataset_factory.py b/timm/data/dataset_factory.py index 757c2e5d..a4c18e39 100644 --- a/timm/data/dataset_factory.py +++ b/timm/data/dataset_factory.py @@ -151,7 +151,7 @@ def create_dataset( elif name.startswith('hfds/'): # NOTE right now, HF datasets default arrow format is a random-access Dataset, # There will be a IterableDataset variant too, TBD - ds = ImageDataset(root, reader=name, split=split, **kwargs) + ds = ImageDataset(root, reader=name, split=split, class_map=class_map, **kwargs) elif name.startswith('tfds/'): ds = IterableImageDataset( root, diff --git a/timm/data/readers/reader_factory.py b/timm/data/readers/reader_factory.py index 58ff56cd..226e3857 100644 --- a/timm/data/readers/reader_factory.py +++ b/timm/data/readers/reader_factory.py @@ -6,7 +6,7 @@ from .reader_image_in_tar import ReaderImageInTar def create_reader(name, root, split='train', **kwargs): name = name.lower() - name = name.split('/', 2) + name = name.split('/', 1) prefix = '' if len(name) > 1: prefix = name[0] diff --git a/timm/data/readers/reader_hfds.py b/timm/data/readers/reader_hfds.py index 901cf4bc..62ae5f4d 100644 --- a/timm/data/readers/reader_hfds.py +++ b/timm/data/readers/reader_hfds.py @@ -13,13 +13,14 @@ try: except ImportError as e: print("Please install Hugging Face datasets package `pip install datasets`.") exit(1) +from .class_map import load_class_map from .reader import Reader -def get_class_labels(info): +def get_class_labels(info, label_key='label'): if 'label' not in info.features: return {} - class_label = info.features['label'] + class_label = info.features[label_key] class_to_idx = {n: class_label.str2int(n) for n in class_label.names} return class_to_idx @@ -32,6 +33,7 @@ class ReaderHfds(Reader): name, split='train', class_map=None, + label_key='label', download=False, ): """ @@ -43,12 +45,17 @@ class ReaderHfds(Reader): name, # 'name' maps to path arg in hf datasets split=split, cache_dir=self.root, # timm doesn't expect hidden cache dir for datasets, specify a path - #use_auth_token=True, ) # leave decode for caller, plus we want easy access to original path names... self.dataset = self.dataset.cast_column('image', datasets.Image(decode=False)) - self.class_to_idx = get_class_labels(self.dataset.info) + self.label_key = label_key + self.remap_class = False + if class_map: + self.class_to_idx = load_class_map(class_map) + self.remap_class = True + else: + self.class_to_idx = get_class_labels(self.dataset.info, self.label_key) self.split_info = self.dataset.info.splits[split] self.num_samples = self.split_info.num_examples @@ -60,7 +67,10 @@ class ReaderHfds(Reader): else: assert 'path' in image and image['path'] image = open(image['path'], 'rb') - return image, item['label'] + label = item[self.label_key] + if self.remap_class: + label = self.class_to_idx[label] + return image, label def __len__(self): return len(self.dataset) diff --git a/timm/layers/__init__.py b/timm/layers/__init__.py index 21c641b6..6b2dabba 100644 --- a/timm/layers/__init__.py +++ b/timm/layers/__init__.py @@ -1,6 +1,7 @@ from .activations import * from .adaptive_avgmax_pool import \ adaptive_avgmax_pool2d, select_adaptive_pool2d, AdaptiveAvgMaxPool2d, SelectAdaptivePool2d +from .attention_pool2d import AttentionPool2d, RotAttentionPool2d, RotaryEmbedding from .blur_pool import BlurPool2d from .classifier import ClassifierHead, create_classifier from .cond_conv2d import CondConv2d, get_condconv_initializer @@ -25,13 +26,18 @@ from .helpers import to_ntuple, to_2tuple, to_3tuple, to_4tuple, make_divisible, from .inplace_abn import InplaceAbn from .linear import Linear from .mixed_conv2d import MixedConv2d -from .mlp import Mlp, GluMlp, GatedMlp, ConvMlp +from .mlp import Mlp, GluMlp, GatedMlp, ConvMlp, GlobalResponseNormMlp from .non_local_attn import NonLocalAttn, BatNonLocalAttn from .norm import GroupNorm, GroupNorm1, LayerNorm, LayerNorm2d -from .norm_act import BatchNormAct2d, GroupNormAct, convert_sync_batchnorm +from .norm_act import BatchNormAct2d, GroupNormAct, GroupNorm1Act, LayerNormAct, LayerNormAct2d,\ + SyncBatchNormAct, convert_sync_batchnorm, FrozenBatchNormAct2d, freeze_batch_norm_2d, unfreeze_batch_norm_2d from .padding import get_padding, get_same_padding, pad_same -from .patch_embed import PatchEmbed +from .patch_embed import PatchEmbed, resample_patch_embed from .pool2d_same import AvgPool2dSame, create_pool2d +from .pos_embed import resample_abs_pos_embed +from .pos_embed_rel import RelPosMlp, RelPosBias, RelPosBiasTf, gen_relative_position_index, gen_relative_log_coords +from .pos_embed_sincos import build_sincos2d_pos_embed, build_fourier_pos_embed, build_rotary_pos_embed, \ + FourierEmbed, RotaryEmbedding from .squeeze_excite import SEModule, SqueezeExcite, EffectiveSEModule, EffectiveSqueezeExcite from .selective_kernel import SelectiveKernel from .separable_conv import SeparableConv2d, SeparableConvNormAct diff --git a/timm/layers/attention_pool2d.py b/timm/layers/attention_pool2d.py index a13a6881..765efa08 100644 --- a/timm/layers/attention_pool2d.py +++ b/timm/layers/attention_pool2d.py @@ -13,7 +13,7 @@ import torch import torch.nn as nn from .helpers import to_2tuple -from .pos_embed import apply_rot_embed, RotaryEmbedding +from .pos_embed_sincos import apply_rot_embed, RotaryEmbedding from .weight_init import trunc_normal_ diff --git a/timm/layers/grn.py b/timm/layers/grn.py new file mode 100644 index 00000000..ae71e013 --- /dev/null +++ b/timm/layers/grn.py @@ -0,0 +1,39 @@ +""" Global Response Normalization Module + +Based on the GRN layer presented in +`ConvNeXt-V2 - Co-designing and Scaling ConvNets with Masked Autoencoders` - https://arxiv.org/abs/2301.00808 + +This implementation +* works for both NCHW and NHWC tensor layouts +* uses affine param names matching existing torch norm layers +* slightly improves eager mode performance via fused addcmul + +Hacked together by / Copyright 2023 Ross Wightman +""" + +import torch +from torch import nn as nn + + +class GlobalResponseNorm(nn.Module): + """ Global Response Normalization layer + """ + def __init__(self, dim, eps=1e-6, channels_last=True): + super().__init__() + self.eps = eps + if channels_last: + self.spatial_dim = (1, 2) + self.channel_dim = -1 + self.wb_shape = (1, 1, 1, -1) + else: + self.spatial_dim = (2, 3) + self.channel_dim = 1 + self.wb_shape = (1, -1, 1, 1) + + self.weight = nn.Parameter(torch.zeros(dim)) + self.bias = nn.Parameter(torch.zeros(dim)) + + def forward(self, x): + x_g = x.norm(p=2, dim=self.spatial_dim, keepdim=True) + x_n = x_g / (x_g.mean(dim=self.channel_dim, keepdim=True) + self.eps) + return x + torch.addcmul(self.bias.view(self.wb_shape), self.weight.view(self.wb_shape), x * x_n) diff --git a/timm/layers/helpers.py b/timm/layers/helpers.py index 2fa296bc..bc75ef3e 100644 --- a/timm/layers/helpers.py +++ b/timm/layers/helpers.py @@ -10,7 +10,7 @@ import collections.abc def _ntuple(n): def parse(x): if isinstance(x, collections.abc.Iterable) and not isinstance(x, str): - return x + return tuple(x) return tuple(repeat(x, n)) return parse diff --git a/timm/layers/mlp.py b/timm/layers/mlp.py index 91e80a84..d0188291 100644 --- a/timm/layers/mlp.py +++ b/timm/layers/mlp.py @@ -2,25 +2,38 @@ Hacked together by / Copyright 2020 Ross Wightman """ +from functools import partial + from torch import nn as nn +from .grn import GlobalResponseNorm from .helpers import to_2tuple class Mlp(nn.Module): """ MLP as used in Vision Transformer, MLP-Mixer and related networks """ - def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, bias=True, drop=0.): + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + bias=True, + drop=0., + use_conv=False, + ): super().__init__() out_features = out_features or in_features hidden_features = hidden_features or in_features bias = to_2tuple(bias) drop_probs = to_2tuple(drop) + linear_layer = partial(nn.Conv2d, kernel_size=1) if use_conv else nn.Linear - self.fc1 = nn.Linear(in_features, hidden_features, bias=bias[0]) + self.fc1 = linear_layer(in_features, hidden_features, bias=bias[0]) self.act = act_layer() self.drop1 = nn.Dropout(drop_probs[0]) - self.fc2 = nn.Linear(hidden_features, out_features, bias=bias[1]) + self.fc2 = linear_layer(hidden_features, out_features, bias=bias[1]) self.drop2 = nn.Dropout(drop_probs[1]) def forward(self, x): @@ -36,18 +49,29 @@ class GluMlp(nn.Module): """ MLP w/ GLU style gating See: https://arxiv.org/abs/1612.08083, https://arxiv.org/abs/2002.05202 """ - def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.Sigmoid, bias=True, drop=0.): + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.Sigmoid, + bias=True, + drop=0., + use_conv=False, + ): super().__init__() out_features = out_features or in_features hidden_features = hidden_features or in_features assert hidden_features % 2 == 0 bias = to_2tuple(bias) drop_probs = to_2tuple(drop) + linear_layer = partial(nn.Conv2d, kernel_size=1) if use_conv else nn.Linear + self.chunk_dim = 1 if use_conv else -1 - self.fc1 = nn.Linear(in_features, hidden_features, bias=bias[0]) + self.fc1 = linear_layer(in_features, hidden_features, bias=bias[0]) self.act = act_layer() self.drop1 = nn.Dropout(drop_probs[0]) - self.fc2 = nn.Linear(hidden_features // 2, out_features, bias=bias[1]) + self.fc2 = linear_layer(hidden_features // 2, out_features, bias=bias[1]) self.drop2 = nn.Dropout(drop_probs[1]) def init_weights(self): @@ -58,7 +82,7 @@ class GluMlp(nn.Module): def forward(self, x): x = self.fc1(x) - x, gates = x.chunk(2, dim=-1) + x, gates = x.chunk(2, dim=self.chunk_dim) x = x * self.act(gates) x = self.drop1(x) x = self.fc2(x) @@ -70,8 +94,15 @@ class GatedMlp(nn.Module): """ MLP as used in gMLP """ def __init__( - self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, - gate_layer=None, bias=True, drop=0.): + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + gate_layer=None, + bias=True, + drop=0., + ): super().__init__() out_features = out_features or in_features hidden_features = hidden_features or in_features @@ -104,8 +135,15 @@ class ConvMlp(nn.Module): """ MLP using 1x1 convs that keeps spatial dims """ def __init__( - self, in_features, hidden_features=None, out_features=None, act_layer=nn.ReLU, - norm_layer=None, bias=True, drop=0.): + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.ReLU, + norm_layer=None, + bias=True, + drop=0., + ): super().__init__() out_features = out_features or in_features hidden_features = hidden_features or in_features @@ -124,3 +162,40 @@ class ConvMlp(nn.Module): x = self.drop(x) x = self.fc2(x) return x + + +class GlobalResponseNormMlp(nn.Module): + """ MLP w/ Global Response Norm (see grn.py), nn.Linear or 1x1 Conv2d + """ + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + bias=True, + drop=0., + use_conv=False, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + bias = to_2tuple(bias) + drop_probs = to_2tuple(drop) + linear_layer = partial(nn.Conv2d, kernel_size=1) if use_conv else nn.Linear + + self.fc1 = linear_layer(in_features, hidden_features, bias=bias[0]) + self.act = act_layer() + self.drop1 = nn.Dropout(drop_probs[0]) + self.grn = GlobalResponseNorm(hidden_features, channels_last=not use_conv) + self.fc2 = linear_layer(hidden_features, out_features, bias=bias[1]) + self.drop2 = nn.Dropout(drop_probs[1]) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop1(x) + x = self.grn(x) + x = self.fc2(x) + x = self.drop2(x) + return x diff --git a/timm/layers/norm_act.py b/timm/layers/norm_act.py index ff075fbc..5ca21d18 100644 --- a/timm/layers/norm_act.py +++ b/timm/layers/norm_act.py @@ -17,6 +17,7 @@ from typing import Union, List, Optional, Any import torch from torch import nn as nn from torch.nn import functional as F +from torchvision.ops.misc import FrozenBatchNorm2d from .create_act import get_act_layer from .fast_norm import is_fast_norm, fast_group_norm, fast_layer_norm @@ -77,7 +78,7 @@ class BatchNormAct2d(nn.BatchNorm2d): if self.training and self.track_running_stats: # TODO: if statement only here to tell the jit to skip emitting this when it is None if self.num_batches_tracked is not None: # type: ignore[has-type] - self.num_batches_tracked = self.num_batches_tracked + 1 # type: ignore[has-type] + self.num_batches_tracked.add_(1) # type: ignore[has-type] if self.momentum is None: # use cumulative moving average exponential_average_factor = 1.0 / float(self.num_batches_tracked) else: # use exponential moving average @@ -169,6 +170,159 @@ def convert_sync_batchnorm(module, process_group=None): return module_output +class FrozenBatchNormAct2d(torch.nn.Module): + """ + BatchNormAct2d where the batch statistics and the affine parameters are fixed + + Args: + num_features (int): Number of features ``C`` from an expected input of size ``(N, C, H, W)`` + eps (float): a value added to the denominator for numerical stability. Default: 1e-5 + """ + + def __init__( + self, + num_features: int, + eps: float = 1e-5, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): + super().__init__() + self.eps = eps + self.register_buffer("weight", torch.ones(num_features)) + self.register_buffer("bias", torch.zeros(num_features)) + self.register_buffer("running_mean", torch.zeros(num_features)) + self.register_buffer("running_var", torch.ones(num_features)) + + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module + if act_layer is not None and apply_act: + act_args = dict(inplace=True) if inplace else {} + self.act = act_layer(**act_args) + else: + self.act = nn.Identity() + + def _load_from_state_dict( + self, + state_dict: dict, + prefix: str, + local_metadata: dict, + strict: bool, + missing_keys: List[str], + unexpected_keys: List[str], + error_msgs: List[str], + ): + num_batches_tracked_key = prefix + "num_batches_tracked" + if num_batches_tracked_key in state_dict: + del state_dict[num_batches_tracked_key] + + super()._load_from_state_dict( + state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + # move reshapes to the beginning + # to make it fuser-friendly + w = self.weight.reshape(1, -1, 1, 1) + b = self.bias.reshape(1, -1, 1, 1) + rv = self.running_var.reshape(1, -1, 1, 1) + rm = self.running_mean.reshape(1, -1, 1, 1) + scale = w * (rv + self.eps).rsqrt() + bias = b - rm * scale + x = x * scale + bias + x = self.act(self.drop(x)) + return x + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.weight.shape[0]}, eps={self.eps}, act={self.act})" + + +def freeze_batch_norm_2d(module): + """ + Converts all `BatchNorm2d` and `SyncBatchNorm` or `BatchNormAct2d` and `SyncBatchNormAct2d` layers + of provided module into `FrozenBatchNorm2d` or `FrozenBatchNormAct2d` respectively. + + Args: + module (torch.nn.Module): Any PyTorch module. + + Returns: + torch.nn.Module: Resulting module + + Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 + """ + res = module + if isinstance(module, (BatchNormAct2d, SyncBatchNormAct)): + res = FrozenBatchNormAct2d(module.num_features) + res.num_features = module.num_features + res.affine = module.affine + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + res.drop = module.drop + res.act = module.act + elif isinstance(module, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): + res = FrozenBatchNorm2d(module.num_features) + res.num_features = module.num_features + res.affine = module.affine + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for name, child in module.named_children(): + new_child = freeze_batch_norm_2d(child) + if new_child is not child: + res.add_module(name, new_child) + return res + + +def unfreeze_batch_norm_2d(module): + """ + Converts all `FrozenBatchNorm2d` layers of provided module into `BatchNorm2d`. If `module` is itself and instance + of `FrozenBatchNorm2d`, it is converted into `BatchNorm2d` and returned. Otherwise, the module is walked + recursively and submodules are converted in place. + + Args: + module (torch.nn.Module): Any PyTorch module. + + Returns: + torch.nn.Module: Resulting module + + Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 + """ + res = module + if isinstance(module, FrozenBatchNormAct2d): + res = BatchNormAct2d(module.num_features) + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + res.drop = module.drop + res.act = module.act + elif isinstance(module, FrozenBatchNorm2d): + res = torch.nn.BatchNorm2d(module.num_features) + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for name, child in module.named_children(): + new_child = unfreeze_batch_norm_2d(child) + if new_child is not child: + res.add_module(name, new_child) + return res + + def _num_groups(num_channels, num_groups, group_size): if group_size: assert num_channels % group_size == 0 @@ -179,10 +333,54 @@ def _num_groups(num_channels, num_groups, group_size): class GroupNormAct(nn.GroupNorm): # NOTE num_channel and num_groups order flipped for easier layer swaps / binding of fixed args def __init__( - self, num_channels, num_groups=32, eps=1e-5, affine=True, group_size=None, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + self, + num_channels, + num_groups=32, + eps=1e-5, + affine=True, + group_size=None, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): super(GroupNormAct, self).__init__( - _num_groups(num_channels, num_groups, group_size), num_channels, eps=eps, affine=affine) + _num_groups(num_channels, num_groups, group_size), + num_channels, + eps=eps, + affine=affine, + ) + self.drop = drop_layer() if drop_layer is not None else nn.Identity() + act_layer = get_act_layer(act_layer) # string -> nn.Module + if act_layer is not None and apply_act: + act_args = dict(inplace=True) if inplace else {} + self.act = act_layer(**act_args) + else: + self.act = nn.Identity() + self._fast_norm = is_fast_norm() + + def forward(self, x): + if self._fast_norm: + x = fast_group_norm(x, self.num_groups, self.weight, self.bias, self.eps) + else: + x = F.group_norm(x, self.num_groups, self.weight, self.bias, self.eps) + x = self.drop(x) + x = self.act(x) + return x + + +class GroupNorm1Act(nn.GroupNorm): + def __init__( + self, + num_channels, + eps=1e-5, + affine=True, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): + super(GroupNorm1Act, self).__init__(1, num_channels, eps=eps, affine=affine) self.drop = drop_layer() if drop_layer is not None else nn.Identity() act_layer = get_act_layer(act_layer) # string -> nn.Module if act_layer is not None and apply_act: @@ -204,8 +402,15 @@ class GroupNormAct(nn.GroupNorm): class LayerNormAct(nn.LayerNorm): def __init__( - self, normalization_shape: Union[int, List[int], torch.Size], eps=1e-5, affine=True, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + self, + normalization_shape: Union[int, List[int], torch.Size], + eps=1e-5, + affine=True, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): super(LayerNormAct, self).__init__(normalization_shape, eps=eps, elementwise_affine=affine) self.drop = drop_layer() if drop_layer is not None else nn.Identity() act_layer = get_act_layer(act_layer) # string -> nn.Module @@ -228,8 +433,15 @@ class LayerNormAct(nn.LayerNorm): class LayerNormAct2d(nn.LayerNorm): def __init__( - self, num_channels, eps=1e-5, affine=True, - apply_act=True, act_layer=nn.ReLU, inplace=True, drop_layer=None): + self, + num_channels, + eps=1e-5, + affine=True, + apply_act=True, + act_layer=nn.ReLU, + inplace=True, + drop_layer=None, + ): super(LayerNormAct2d, self).__init__(num_channels, eps=eps, elementwise_affine=affine) self.drop = drop_layer() if drop_layer is not None else nn.Identity() act_layer = get_act_layer(act_layer) # string -> nn.Module diff --git a/timm/layers/patch_embed.py b/timm/layers/patch_embed.py index be8740ce..764519f2 100644 --- a/timm/layers/patch_embed.py +++ b/timm/layers/patch_embed.py @@ -2,15 +2,24 @@ A convolution based approach to patchifying a 2D image w/ embedding projection. -Based on the impl in https://github.com/google-research/vision_transformer +Based on code in: + * https://github.com/google-research/vision_transformer + * https://github.com/google-research/big_vision/tree/main/big_vision Hacked together by / Copyright 2020 Ross Wightman """ +import logging +from typing import List + +import torch from torch import nn as nn +import torch.nn.functional as F from .helpers import to_2tuple from .trace_utils import _assert +_logger = logging.getLogger(__name__) + class PatchEmbed(nn.Module): """ 2D Image to Patch Embedding @@ -46,3 +55,130 @@ class PatchEmbed(nn.Module): x = x.flatten(2).transpose(1, 2) # BCHW -> BNC x = self.norm(x) return x + + +def resample_patch_embed( + patch_embed, + new_size: List[int], + interpolation: str = 'bicubic', + antialias: bool = True, + verbose: bool = False, +): + """Resample the weights of the patch embedding kernel to target resolution. + We resample the patch embedding kernel by approximately inverting the effect + of patch resizing. + + Code based on: + https://github.com/google-research/big_vision/blob/b00544b81f8694488d5f36295aeb7972f3755ffe/big_vision/models/proj/flexi/vit.py + + With this resizing, we can for example load a B/8 filter into a B/16 model + and, on 2x larger input image, the result will match. + + Args: + patch_embed: original parameter to be resized. + new_size (tuple(int, int): target shape (height, width)-only. + interpolation (str): interpolation for resize + antialias (bool): use anti-aliasing filter in resize + verbose (bool): log operation + Returns: + Resized patch embedding kernel. + """ + import numpy as np + try: + import functorch + vmap = functorch.vmap + except ImportError: + if hasattr(torch, 'vmap'): + vmap = torch.vmap + else: + assert False, "functorch or a version of torch with vmap is required for FlexiViT resizing." + + assert len(patch_embed.shape) == 4, "Four dimensions expected" + assert len(new_size) == 2, "New shape should only be hw" + old_size = patch_embed.shape[-2:] + if tuple(old_size) == tuple(new_size): + return patch_embed + + if verbose: + _logger.info(f"Resize patch embedding {patch_embed.shape} to {new_size}, w/ {interpolation} interpolation.") + + def resize(x_np, _new_size): + x_tf = torch.Tensor(x_np)[None, None, ...] + x_upsampled = F.interpolate( + x_tf, size=_new_size, mode=interpolation, antialias=antialias)[0, 0, ...].numpy() + return x_upsampled + + def get_resize_mat(_old_size, _new_size): + mat = [] + for i in range(np.prod(_old_size)): + basis_vec = np.zeros(_old_size) + basis_vec[np.unravel_index(i, _old_size)] = 1. + mat.append(resize(basis_vec, _new_size).reshape(-1)) + return np.stack(mat).T + + resize_mat = get_resize_mat(old_size, new_size) + resize_mat_pinv = torch.Tensor(np.linalg.pinv(resize_mat.T)) + + def resample_kernel(kernel): + resampled_kernel = resize_mat_pinv @ kernel.reshape(-1) + return resampled_kernel.reshape(new_size) + + v_resample_kernel = vmap(vmap(resample_kernel, 0, 0), 1, 1) + return v_resample_kernel(patch_embed) + + +# def divs(n, m=None): +# m = m or n // 2 +# if m == 1: +# return [1] +# if n % m == 0: +# return [m] + divs(n, m - 1) +# return divs(n, m - 1) +# +# +# class FlexiPatchEmbed(nn.Module): +# """ 2D Image to Patch Embedding w/ Flexible Patch sizes (FlexiViT) +# FIXME WIP +# """ +# def __init__( +# self, +# img_size=240, +# patch_size=16, +# in_chans=3, +# embed_dim=768, +# base_img_size=240, +# base_patch_size=32, +# norm_layer=None, +# flatten=True, +# bias=True, +# ): +# super().__init__() +# self.img_size = to_2tuple(img_size) +# self.patch_size = to_2tuple(patch_size) +# self.num_patches = 0 +# +# # full range for 240 = (5, 6, 8, 10, 12, 14, 15, 16, 20, 24, 30, 40, 48) +# self.seqhw = (6, 8, 10, 12, 14, 15, 16, 20, 24, 30) +# +# self.base_img_size = to_2tuple(base_img_size) +# self.base_patch_size = to_2tuple(base_patch_size) +# self.base_grid_size = tuple([i // p for i, p in zip(self.base_img_size, self.base_patch_size)]) +# self.base_num_patches = self.base_grid_size[0] * self.base_grid_size[1] +# +# self.flatten = flatten +# self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=self.patch_size, stride=self.patch_size, bias=bias) +# self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() +# +# def forward(self, x): +# B, C, H, W = x.shape +# +# if self.patch_size == self.base_patch_size: +# weight = self.proj.weight +# else: +# weight = resample_patch_embed(self.proj.weight, self.patch_size) +# patch_size = self.patch_size +# x = F.conv2d(x, weight, bias=self.proj.bias, stride=patch_size) +# if self.flatten: +# x = x.flatten(2).transpose(1, 2) # BCHW -> BNC +# x = self.norm(x) +# return x diff --git a/timm/layers/pos_embed.py b/timm/layers/pos_embed.py index 99a122a0..d0e67521 100644 --- a/timm/layers/pos_embed.py +++ b/timm/layers/pos_embed.py @@ -1,207 +1,52 @@ +""" Position Embedding Utilities + +Hacked together by / Copyright 2022 Ross Wightman +""" +import logging import math from typing import List, Tuple, Optional, Union import torch -from torch import nn as nn - - -def pixel_freq_bands( - num_bands: int, - max_freq: float = 224., - linear_bands: bool = True, - dtype: torch.dtype = torch.float32, - device: Optional[torch.device] = None, -): - if linear_bands: - bands = torch.linspace(1.0, max_freq / 2, num_bands, dtype=dtype, device=device) - else: - bands = 2 ** torch.linspace(0, math.log(max_freq, 2) - 1, num_bands, dtype=dtype, device=device) - return bands * torch.pi - - -def inv_freq_bands( - num_bands: int, - temperature: float = 100000., - step: int = 2, - dtype: torch.dtype = torch.float32, - device: Optional[torch.device] = None, -) -> torch.Tensor: - inv_freq = 1. / (temperature ** (torch.arange(0, num_bands, step, dtype=dtype, device=device) / num_bands)) - return inv_freq - - -def build_sincos2d_pos_embed( - feat_shape: List[int], - dim: int = 64, - temperature: float = 10000., - reverse_coord: bool = False, - interleave_sin_cos: bool = False, - dtype: torch.dtype = torch.float32, - device: Optional[torch.device] = None -) -> torch.Tensor: - """ - - Args: - feat_shape: - dim: - temperature: - reverse_coord: stack grid order W, H instead of H, W - interleave_sin_cos: sin, cos, sin, cos stack instead of sin, sin, cos, cos - dtype: - device: - - Returns: - - """ - assert dim % 4 == 0, 'Embed dimension must be divisible by 4 for sin-cos 2D position embedding' - pos_dim = dim // 4 - bands = inv_freq_bands(pos_dim, temperature=temperature, step=1, dtype=dtype, device=device) - - if reverse_coord: - feat_shape = feat_shape[::-1] # stack W, H instead of H, W - grid = torch.stack( - torch.meshgrid([torch.arange(s, device=device, dtype=dtype) for s in feat_shape])).flatten(1).transpose(0, 1) - pos2 = grid.unsqueeze(-1) * bands.unsqueeze(0) - # FIXME add support for unflattened spatial dim? - - stack_dim = 2 if interleave_sin_cos else 1 # stack sin, cos, sin, cos instead of sin sin cos cos - pos_emb = torch.stack([torch.sin(pos2), torch.cos(pos2)], dim=stack_dim).flatten(1) - return pos_emb - - -def build_fourier_pos_embed( - feat_shape: List[int], - bands: Optional[torch.Tensor] = None, - num_bands: int = 64, - max_res: int = 224, - linear_bands: bool = False, - include_grid: bool = False, - concat_out: bool = True, - in_pixels: bool = True, - dtype: torch.dtype = torch.float32, - device: Optional[torch.device] = None, -) -> List[torch.Tensor]: - if bands is None: - if in_pixels: - bands = pixel_freq_bands(num_bands, float(max_res), linear_bands=linear_bands, dtype=dtype, device=device) - else: - bands = inv_freq_bands(num_bands, step=1, dtype=dtype, device=device) - else: - if device is None: - device = bands.device - if dtype is None: - dtype = bands.dtype - - if in_pixels: - grid = torch.stack(torch.meshgrid( - [torch.linspace(-1., 1., steps=s, device=device, dtype=dtype) for s in feat_shape]), dim=-1) - else: - grid = torch.stack(torch.meshgrid( - [torch.arange(s, device=device, dtype=dtype) for s in feat_shape]), dim=-1) - grid = grid.unsqueeze(-1) - pos = grid * bands - - pos_sin, pos_cos = pos.sin(), pos.cos() - out = (grid, pos_sin, pos_cos) if include_grid else (pos_sin, pos_cos) - # FIXME torchscript doesn't like multiple return types, probably need to always cat? - if concat_out: - out = torch.cat(out, dim=-1) - return out - - -class FourierEmbed(nn.Module): - - def __init__(self, max_res: int = 224, num_bands: int = 64, concat_grid=True, keep_spatial=False): - super().__init__() - self.max_res = max_res - self.num_bands = num_bands - self.concat_grid = concat_grid - self.keep_spatial = keep_spatial - self.register_buffer('bands', pixel_freq_bands(max_res, num_bands), persistent=False) - - def forward(self, x): - B, C = x.shape[:2] - feat_shape = x.shape[2:] - emb = build_fourier_pos_embed( - feat_shape, - self.bands, - include_grid=self.concat_grid, - dtype=x.dtype, - device=x.device) - emb = emb.transpose(-1, -2).flatten(len(feat_shape)) - batch_expand = (B,) + (-1,) * (x.ndim - 1) - - # FIXME support nD - if self.keep_spatial: - x = torch.cat([x, emb.unsqueeze(0).expand(batch_expand).permute(0, 3, 1, 2)], dim=1) - else: - x = torch.cat([x.permute(0, 2, 3, 1), emb.unsqueeze(0).expand(batch_expand)], dim=-1) - x = x.reshape(B, feat_shape.numel(), -1) - - return x - +import torch.nn.functional as F -def rot(x): - return torch.stack([-x[..., 1::2], x[..., ::2]], -1).reshape(x.shape) +from .helpers import to_2tuple +_logger = logging.getLogger(__name__) -def apply_rot_embed(x: torch.Tensor, sin_emb, cos_emb): - return x * cos_emb + rot(x) * sin_emb - -def apply_rot_embed_list(x: List[torch.Tensor], sin_emb, cos_emb): - if isinstance(x, torch.Tensor): - x = [x] - return [t * cos_emb + rot(t) * sin_emb for t in x] - - -def apply_rot_embed_split(x: torch.Tensor, emb): - split = emb.shape[-1] // 2 - return x * emb[:, :split] + rot(x) * emb[:, split:] - - -def build_rotary_pos_embed( - feat_shape: List[int], - bands: Optional[torch.Tensor] = None, - dim: int = 64, - max_freq: float = 224, - linear_bands: bool = False, - dtype: torch.dtype = torch.float32, - device: Optional[torch.device] = None, +def resample_abs_pos_embed( + posemb, + new_size: List[int], + old_size: Optional[List[int]] = None, + num_prefix_tokens: int = 1, + interpolation: str = 'bicubic', + antialias: bool = True, + verbose: bool = False, ): - """ - NOTE: shape arg should include spatial dim only - """ - feat_shape = torch.Size(feat_shape) - - sin_emb, cos_emb = build_fourier_pos_embed( - feat_shape, bands=bands, num_bands=dim // 4, max_res=max_freq, linear_bands=linear_bands, - concat_out=False, device=device, dtype=dtype) - N = feat_shape.numel() - sin_emb = sin_emb.reshape(N, -1).repeat_interleave(2, -1) - cos_emb = cos_emb.reshape(N, -1).repeat_interleave(2, -1) - return sin_emb, cos_emb - - -class RotaryEmbedding(nn.Module): - """ Rotary position embedding - - NOTE: This is my initial attempt at impl rotary embedding for spatial use, it has not - been well tested, and will likely change. It will be moved to its own file. + # sort out sizes, assume square if old size not provided + new_size = to_2tuple(new_size) + new_ntok = new_size[0] * new_size[1] + if not old_size: + old_size = int(math.sqrt(posemb.shape[1] - num_prefix_tokens)) + old_size = to_2tuple(old_size) + if new_size == old_size: # might not both be same container type + return posemb + + if num_prefix_tokens: + posemb_prefix, posemb = posemb[:, :num_prefix_tokens], posemb[:, num_prefix_tokens:] + else: + posemb_prefix, posemb = None, posemb - The following impl/resources were referenced for this impl: - * https://github.com/lucidrains/vit-pytorch/blob/6f3a5fcf0bca1c5ec33a35ef48d97213709df4ba/vit_pytorch/rvt.py - * https://blog.eleuther.ai/rotary-embeddings/ - """ - def __init__(self, dim, max_res=224, linear_bands: bool = False): - super().__init__() - self.dim = dim - self.register_buffer('bands', pixel_freq_bands(dim // 4, max_res, linear_bands=linear_bands), persistent=False) + # do the interpolation + posemb = posemb.reshape(1, old_size[0], old_size[1], -1).permute(0, 3, 1, 2) + posemb = F.interpolate(posemb, size=new_size, mode=interpolation, antialias=antialias) + posemb = posemb.permute(0, 2, 3, 1).reshape(1, new_ntok, -1) - def get_embed(self, shape: List[int]): - return build_rotary_pos_embed(shape, self.bands) + if verbose: + _logger.info(f'Resized position embedding: {old_size} to {new_size}.') - def forward(self, x): - # assuming channel-first tensor where spatial dim are >= 2 - sin_emb, cos_emb = self.get_embed(x.shape[2:]) - return apply_rot_embed(x, sin_emb, cos_emb) + # add back extra (class, etc) prefix tokens + if posemb_prefix is not None: + print(posemb_prefix.shape, posemb.shape) + posemb = torch.cat([posemb_prefix, posemb], dim=1) + return posemb diff --git a/timm/layers/pos_embed_rel.py b/timm/layers/pos_embed_rel.py new file mode 100644 index 00000000..2ef25670 --- /dev/null +++ b/timm/layers/pos_embed_rel.py @@ -0,0 +1,283 @@ +""" Relative position embedding modules and functions + +Hacked together by / Copyright 2022 Ross Wightman +""" +import math +from typing import Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .mlp import Mlp +from .weight_init import trunc_normal_ + + +def gen_relative_position_index( + q_size: Tuple[int, int], + k_size: Tuple[int, int] = None, + class_token: bool = False) -> torch.Tensor: + # Adapted with significant modifications from Swin / BeiT codebases + # get pair-wise relative position index for each token inside the window + q_coords = torch.stack(torch.meshgrid([torch.arange(q_size[0]), torch.arange(q_size[1])])).flatten(1) # 2, Wh, Ww + if k_size is None: + k_coords = q_coords + k_size = q_size + else: + # different q vs k sizes is a WIP + k_coords = torch.stack(torch.meshgrid([torch.arange(k_size[0]), torch.arange(k_size[1])])).flatten(1) + relative_coords = q_coords[:, :, None] - k_coords[:, None, :] # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute(1, 2, 0) # Wh*Ww, Wh*Ww, 2 + _, relative_position_index = torch.unique(relative_coords.view(-1, 2), return_inverse=True, dim=0) + + if class_token: + # handle cls to token & token 2 cls & cls to cls as per beit for rel pos bias + # NOTE not intended or tested with MLP log-coords + max_size = (max(q_size[0], k_size[0]), max(q_size[1], k_size[1])) + num_relative_distance = (2 * max_size[0] - 1) * (2 * max_size[1] - 1) + 3 + relative_position_index = F.pad(relative_position_index, [1, 0, 1, 0]) + relative_position_index[0, 0:] = num_relative_distance - 3 + relative_position_index[0:, 0] = num_relative_distance - 2 + relative_position_index[0, 0] = num_relative_distance - 1 + + return relative_position_index.contiguous() + + +class RelPosBias(nn.Module): + """ Relative Position Bias + Adapted from Swin-V1 relative position bias impl, modularized. + """ + + def __init__(self, window_size, num_heads, prefix_tokens=0): + super().__init__() + assert prefix_tokens <= 1 + self.window_size = window_size + self.window_area = window_size[0] * window_size[1] + self.bias_shape = (self.window_area + prefix_tokens,) * 2 + (num_heads,) + + num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3 * prefix_tokens + self.relative_position_bias_table = nn.Parameter(torch.zeros(num_relative_distance, num_heads)) + self.register_buffer( + "relative_position_index", + gen_relative_position_index(self.window_size, class_token=prefix_tokens > 0), + persistent=False, + ) + + self.init_weights() + + def init_weights(self): + trunc_normal_(self.relative_position_bias_table, std=.02) + + def get_bias(self) -> torch.Tensor: + relative_position_bias = self.relative_position_bias_table[self.relative_position_index.view(-1)] + # win_h * win_w, win_h * win_w, num_heads + relative_position_bias = relative_position_bias.view(self.bias_shape).permute(2, 0, 1) + return relative_position_bias.unsqueeze(0).contiguous() + + def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None): + return attn + self.get_bias() + + +def gen_relative_log_coords( + win_size: Tuple[int, int], + pretrained_win_size: Tuple[int, int] = (0, 0), + mode='swin', +): + assert mode in ('swin', 'cr', 'rw') + # as per official swin-v2 impl, supporting timm specific 'cr' and 'rw' log coords as well + relative_coords_h = torch.arange(-(win_size[0] - 1), win_size[0], dtype=torch.float32) + relative_coords_w = torch.arange(-(win_size[1] - 1), win_size[1], dtype=torch.float32) + relative_coords_table = torch.stack(torch.meshgrid([relative_coords_h, relative_coords_w])) + relative_coords_table = relative_coords_table.permute(1, 2, 0).contiguous() # 2*Wh-1, 2*Ww-1, 2 + if mode == 'swin': + if pretrained_win_size[0] > 0: + relative_coords_table[:, :, 0] /= (pretrained_win_size[0] - 1) + relative_coords_table[:, :, 1] /= (pretrained_win_size[1] - 1) + else: + relative_coords_table[:, :, 0] /= (win_size[0] - 1) + relative_coords_table[:, :, 1] /= (win_size[1] - 1) + relative_coords_table *= 8 # normalize to -8, 8 + relative_coords_table = torch.sign(relative_coords_table) * torch.log2( + 1.0 + relative_coords_table.abs()) / math.log2(8) + else: + if mode == 'rw': + # cr w/ window size normalization -> [-1,1] log coords + relative_coords_table[:, :, 0] /= (win_size[0] - 1) + relative_coords_table[:, :, 1] /= (win_size[1] - 1) + relative_coords_table *= 8 # scale to -8, 8 + relative_coords_table = torch.sign(relative_coords_table) * torch.log2( + 1.0 + relative_coords_table.abs()) + relative_coords_table /= math.log2(9) # -> [-1, 1] + else: + # mode == 'cr' + relative_coords_table = torch.sign(relative_coords_table) * torch.log( + 1.0 + relative_coords_table.abs()) + + return relative_coords_table + + +class RelPosMlp(nn.Module): + """ Log-Coordinate Relative Position MLP + Based on ideas presented in Swin-V2 paper (https://arxiv.org/abs/2111.09883) + + This impl covers the 'swin' implementation as well as two timm specific modes ('cr', and 'rw') + """ + def __init__( + self, + window_size, + num_heads=8, + hidden_dim=128, + prefix_tokens=0, + mode='cr', + pretrained_window_size=(0, 0) + ): + super().__init__() + self.window_size = window_size + self.window_area = self.window_size[0] * self.window_size[1] + self.prefix_tokens = prefix_tokens + self.num_heads = num_heads + self.bias_shape = (self.window_area,) * 2 + (num_heads,) + if mode == 'swin': + self.bias_act = nn.Sigmoid() + self.bias_gain = 16 + mlp_bias = (True, False) + elif mode == 'rw': + self.bias_act = nn.Tanh() + self.bias_gain = 4 + mlp_bias = True + else: + self.bias_act = nn.Identity() + self.bias_gain = None + mlp_bias = True + + self.mlp = Mlp( + 2, # x, y + hidden_features=hidden_dim, + out_features=num_heads, + act_layer=nn.ReLU, + bias=mlp_bias, + drop=(0.125, 0.) + ) + + self.register_buffer( + "relative_position_index", + gen_relative_position_index(window_size), + persistent=False) + + # get relative_coords_table + self.register_buffer( + "rel_coords_log", + gen_relative_log_coords(window_size, pretrained_window_size, mode=mode), + persistent=False) + + def get_bias(self) -> torch.Tensor: + relative_position_bias = self.mlp(self.rel_coords_log) + if self.relative_position_index is not None: + relative_position_bias = relative_position_bias.view(-1, self.num_heads)[ + self.relative_position_index.view(-1)] # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.view(self.bias_shape) + relative_position_bias = relative_position_bias.permute(2, 0, 1) + relative_position_bias = self.bias_act(relative_position_bias) + if self.bias_gain is not None: + relative_position_bias = self.bias_gain * relative_position_bias + if self.prefix_tokens: + relative_position_bias = F.pad(relative_position_bias, [self.prefix_tokens, 0, self.prefix_tokens, 0]) + return relative_position_bias.unsqueeze(0).contiguous() + + def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None): + return attn + self.get_bias() + + +def generate_lookup_tensor( + length: int, + max_relative_position: Optional[int] = None, +): + """Generate a one_hot lookup tensor to reindex embeddings along one dimension. + + Args: + length: the length to reindex to. + max_relative_position: the maximum relative position to consider. + Relative position embeddings for distances above this threshold + are zeroed out. + Returns: + a lookup Tensor of size [length, length, vocab_size] that satisfies + ret[n,m,v] = 1{m - n + max_relative_position = v}. + """ + if max_relative_position is None: + max_relative_position = length - 1 + # Return the cached lookup tensor, otherwise compute it and cache it. + vocab_size = 2 * max_relative_position + 1 + ret = torch.zeros(length, length, vocab_size) + for i in range(length): + for x in range(length): + v = x - i + max_relative_position + if abs(x - i) > max_relative_position: + continue + ret[i, x, v] = 1 + return ret + + +def reindex_2d_einsum_lookup( + relative_position_tensor, + height: int, + width: int, + height_lookup: torch.Tensor, + width_lookup: torch.Tensor, +) -> torch.Tensor: + """Reindex 2d relative position bias with 2 independent einsum lookups. + + Adapted from: + https://github.com/google-research/maxvit/blob/2e06a7f1f70c76e64cd3dabe5cd1b8c1a23c9fb7/maxvit/models/attention_utils.py + + Args: + relative_position_tensor: tensor of shape + [..., vocab_height, vocab_width, ...]. + height: height to reindex to. + width: width to reindex to. + height_lookup: one-hot height lookup + width_lookup: one-hot width lookup + Returns: + reindexed_tensor: a Tensor of shape + [..., height * width, height * width, ...] + """ + reindexed_tensor = torch.einsum('nhw,ixh->nixw', relative_position_tensor, height_lookup) + reindexed_tensor = torch.einsum('nixw,jyw->nijxy', reindexed_tensor, width_lookup) + area = height * width + return reindexed_tensor.reshape(relative_position_tensor.shape[0], area, area) + + +class RelPosBiasTf(nn.Module): + """ Relative Position Bias Impl (Compatible with Tensorflow MaxViT models) + Adapted from: + https://github.com/google-research/maxvit/blob/2e06a7f1f70c76e64cd3dabe5cd1b8c1a23c9fb7/maxvit/models/attention_utils.py + """ + def __init__(self, window_size, num_heads, prefix_tokens=0): + super().__init__() + assert prefix_tokens <= 1 + self.window_size = window_size + self.window_area = window_size[0] * window_size[1] + self.num_heads = num_heads + + vocab_height = 2 * window_size[0] - 1 + vocab_width = 2 * window_size[1] - 1 + self.bias_shape = (self.num_heads, vocab_height, vocab_width) + self.relative_position_bias_table = nn.Parameter(torch.zeros(self.bias_shape)) + self.register_buffer('height_lookup', generate_lookup_tensor(window_size[0]), persistent=False) + self.register_buffer('width_lookup', generate_lookup_tensor(window_size[1]), persistent=False) + self.init_weights() + + def init_weights(self): + nn.init.normal_(self.relative_position_bias_table, std=.02) + + def get_bias(self) -> torch.Tensor: + # FIXME change to not use one-hot/einsum? + return reindex_2d_einsum_lookup( + self.relative_position_bias_table, + self.window_size[0], + self.window_size[1], + self.height_lookup, + self.width_lookup + ) + + def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None): + return attn + self.get_bias() diff --git a/timm/layers/pos_embed_sincos.py b/timm/layers/pos_embed_sincos.py new file mode 100644 index 00000000..5603a5cd --- /dev/null +++ b/timm/layers/pos_embed_sincos.py @@ -0,0 +1,219 @@ +""" Sin-cos, fourier, rotary position embedding modules and functions + +Hacked together by / Copyright 2022 Ross Wightman +""" +import math +from typing import List, Tuple, Optional, Union + +import torch +from torch import nn as nn + + +def pixel_freq_bands( + num_bands: int, + max_freq: float = 224., + linear_bands: bool = True, + dtype: torch.dtype = torch.float32, + device: Optional[torch.device] = None, +): + if linear_bands: + bands = torch.linspace(1.0, max_freq / 2, num_bands, dtype=dtype, device=device) + else: + bands = 2 ** torch.linspace(0, math.log(max_freq, 2) - 1, num_bands, dtype=dtype, device=device) + return bands * torch.pi + + +def inv_freq_bands( + num_bands: int, + temperature: float = 100000., + step: int = 2, + dtype: torch.dtype = torch.float32, + device: Optional[torch.device] = None, +) -> torch.Tensor: + inv_freq = 1. / (temperature ** (torch.arange(0, num_bands, step, dtype=dtype, device=device) / num_bands)) + return inv_freq + + +def build_sincos2d_pos_embed( + feat_shape: List[int], + dim: int = 64, + temperature: float = 10000., + reverse_coord: bool = False, + interleave_sin_cos: bool = False, + dtype: torch.dtype = torch.float32, + device: Optional[torch.device] = None +) -> torch.Tensor: + """ + + Args: + feat_shape: + dim: + temperature: + reverse_coord: stack grid order W, H instead of H, W + interleave_sin_cos: sin, cos, sin, cos stack instead of sin, sin, cos, cos + dtype: + device: + + Returns: + + """ + assert dim % 4 == 0, 'Embed dimension must be divisible by 4 for sin-cos 2D position embedding' + pos_dim = dim // 4 + bands = inv_freq_bands(pos_dim, temperature=temperature, step=1, dtype=dtype, device=device) + + if reverse_coord: + feat_shape = feat_shape[::-1] # stack W, H instead of H, W + grid = torch.stack( + torch.meshgrid([torch.arange(s, device=device, dtype=dtype) for s in feat_shape])).flatten(1).transpose(0, 1) + pos2 = grid.unsqueeze(-1) * bands.unsqueeze(0) + # FIXME add support for unflattened spatial dim? + + stack_dim = 2 if interleave_sin_cos else 1 # stack sin, cos, sin, cos instead of sin sin cos cos + pos_emb = torch.stack([torch.sin(pos2), torch.cos(pos2)], dim=stack_dim).flatten(1) + return pos_emb + + +def build_fourier_pos_embed( + feat_shape: List[int], + bands: Optional[torch.Tensor] = None, + num_bands: int = 64, + max_res: int = 224, + linear_bands: bool = False, + include_grid: bool = False, + concat_out: bool = True, + in_pixels: bool = True, + dtype: torch.dtype = torch.float32, + device: Optional[torch.device] = None, +) -> List[torch.Tensor]: + if bands is None: + if in_pixels: + bands = pixel_freq_bands(num_bands, float(max_res), linear_bands=linear_bands, dtype=dtype, device=device) + else: + bands = inv_freq_bands(num_bands, step=1, dtype=dtype, device=device) + else: + if device is None: + device = bands.device + if dtype is None: + dtype = bands.dtype + + if in_pixels: + grid = torch.stack(torch.meshgrid( + [torch.linspace(-1., 1., steps=s, device=device, dtype=dtype) for s in feat_shape]), dim=-1) + else: + grid = torch.stack(torch.meshgrid( + [torch.arange(s, device=device, dtype=dtype) for s in feat_shape]), dim=-1) + grid = grid.unsqueeze(-1) + pos = grid * bands + + pos_sin, pos_cos = pos.sin(), pos.cos() + out = (grid, pos_sin, pos_cos) if include_grid else (pos_sin, pos_cos) + # FIXME torchscript doesn't like multiple return types, probably need to always cat? + if concat_out: + out = torch.cat(out, dim=-1) + return out + + +class FourierEmbed(nn.Module): + + def __init__(self, max_res: int = 224, num_bands: int = 64, concat_grid=True, keep_spatial=False): + super().__init__() + self.max_res = max_res + self.num_bands = num_bands + self.concat_grid = concat_grid + self.keep_spatial = keep_spatial + self.register_buffer('bands', pixel_freq_bands(max_res, num_bands), persistent=False) + + def forward(self, x): + B, C = x.shape[:2] + feat_shape = x.shape[2:] + emb = build_fourier_pos_embed( + feat_shape, + self.bands, + include_grid=self.concat_grid, + dtype=x.dtype, + device=x.device) + emb = emb.transpose(-1, -2).flatten(len(feat_shape)) + batch_expand = (B,) + (-1,) * (x.ndim - 1) + + # FIXME support nD + if self.keep_spatial: + x = torch.cat([x, emb.unsqueeze(0).expand(batch_expand).permute(0, 3, 1, 2)], dim=1) + else: + x = torch.cat([x.permute(0, 2, 3, 1), emb.unsqueeze(0).expand(batch_expand)], dim=-1) + x = x.reshape(B, feat_shape.numel(), -1) + + return x + + +def rot(x): + return torch.stack([-x[..., 1::2], x[..., ::2]], -1).reshape(x.shape) + + +def apply_rot_embed(x: torch.Tensor, sin_emb, cos_emb): + return x * cos_emb + rot(x) * sin_emb + + +def apply_rot_embed_list(x: List[torch.Tensor], sin_emb, cos_emb): + if isinstance(x, torch.Tensor): + x = [x] + return [t * cos_emb + rot(t) * sin_emb for t in x] + + +def apply_rot_embed_split(x: torch.Tensor, emb): + split = emb.shape[-1] // 2 + return x * emb[:, :split] + rot(x) * emb[:, split:] + + +def build_rotary_pos_embed( + feat_shape: List[int], + bands: Optional[torch.Tensor] = None, + dim: int = 64, + max_freq: float = 224, + linear_bands: bool = False, + dtype: torch.dtype = torch.float32, + device: Optional[torch.device] = None, +): + """ + NOTE: shape arg should include spatial dim only + """ + feat_shape = torch.Size(feat_shape) + + sin_emb, cos_emb = build_fourier_pos_embed( + feat_shape, + bands=bands, + num_bands=dim // 4, + max_res=max_freq, + linear_bands=linear_bands, + concat_out=False, + device=device, + dtype=dtype, + ) + N = feat_shape.numel() + sin_emb = sin_emb.reshape(N, -1).repeat_interleave(2, -1) + cos_emb = cos_emb.reshape(N, -1).repeat_interleave(2, -1) + return sin_emb, cos_emb + + +class RotaryEmbedding(nn.Module): + """ Rotary position embedding + + NOTE: This is my initial attempt at impl rotary embedding for spatial use, it has not + been well tested, and will likely change. It will be moved to its own file. + + The following impl/resources were referenced for this impl: + * https://github.com/lucidrains/vit-pytorch/blob/6f3a5fcf0bca1c5ec33a35ef48d97213709df4ba/vit_pytorch/rvt.py + * https://blog.eleuther.ai/rotary-embeddings/ + """ + + def __init__(self, dim, max_res=224, linear_bands: bool = False): + super().__init__() + self.dim = dim + self.register_buffer('bands', pixel_freq_bands(dim // 4, max_res, linear_bands=linear_bands), persistent=False) + + def get_embed(self, shape: List[int]): + return build_rotary_pos_embed(shape, self.bands) + + def forward(self, x): + # assuming channel-first tensor where spatial dim are >= 2 + sin_emb, cos_emb = self.get_embed(x.shape[2:]) + return apply_rot_embed(x, sin_emb, cos_emb) diff --git a/timm/models/_builder.py b/timm/models/_builder.py index f634650e..901d7d44 100644 --- a/timm/models/_builder.py +++ b/timm/models/_builder.py @@ -1,5 +1,6 @@ import dataclasses import logging +import os from copy import deepcopy from typing import Optional, Dict, Callable, Any, Tuple @@ -9,7 +10,7 @@ from torch.hub import load_state_dict_from_url from timm.models._features import FeatureListNet, FeatureHookNet from timm.models._features_fx import FeatureGraphNet from timm.models._helpers import load_state_dict -from timm.models._hub import has_hf_hub, download_cached_file, load_state_dict_from_hf +from timm.models._hub import has_hf_hub, download_cached_file, check_cached_file, load_state_dict_from_hf from timm.models._manipulate import adapt_input_conv from timm.models._pretrained import PretrainedCfg from timm.models._prune import adapt_model_from_file @@ -32,6 +33,7 @@ def _resolve_pretrained_source(pretrained_cfg): pretrained_url = pretrained_cfg.get('url', None) pretrained_file = pretrained_cfg.get('file', None) hf_hub_id = pretrained_cfg.get('hf_hub_id', None) + # resolve where to load pretrained weights from load_from = '' pretrained_loc = '' @@ -43,15 +45,20 @@ def _resolve_pretrained_source(pretrained_cfg): else: # default source == timm or unspecified if pretrained_file: + # file load override is the highest priority if set load_from = 'file' pretrained_loc = pretrained_file - elif pretrained_url: - load_from = 'url' - pretrained_loc = pretrained_url - elif hf_hub_id and has_hf_hub(necessary=True): - # hf-hub available as alternate weight source in default_cfg - load_from = 'hf-hub' - pretrained_loc = hf_hub_id + else: + # next, HF hub is prioritized unless a valid cached version of weights exists already + cached_url_valid = check_cached_file(pretrained_url) if pretrained_url else False + if hf_hub_id and has_hf_hub(necessary=True) and not cached_url_valid: + # hf-hub available as alternate weight source in default_cfg + load_from = 'hf-hub' + pretrained_loc = hf_hub_id + elif pretrained_url: + load_from = 'url' + pretrained_loc = pretrained_url + if load_from == 'hf-hub' and pretrained_cfg.get('hf_hub_filename', None): # if a filename override is set, return tuple for location w/ (hub_id, filename) pretrained_loc = pretrained_loc, pretrained_cfg['hf_hub_filename'] @@ -105,7 +112,7 @@ def load_custom_pretrained( pretrained_loc = download_cached_file( pretrained_loc, check_hash=_CHECK_HASH, - progress=_DOWNLOAD_PROGRESS + progress=_DOWNLOAD_PROGRESS, ) if load_fn is not None: @@ -146,12 +153,21 @@ def load_pretrained( state_dict = load_state_dict(pretrained_loc) elif load_from == 'url': _logger.info(f'Loading pretrained weights from url ({pretrained_loc})') - state_dict = load_state_dict_from_url( - pretrained_loc, - map_location='cpu', - progress=_DOWNLOAD_PROGRESS, - check_hash=_CHECK_HASH, - ) + if pretrained_cfg.get('custom_load', False): + pretrained_loc = download_cached_file( + pretrained_loc, + progress=_DOWNLOAD_PROGRESS, + check_hash=_CHECK_HASH, + ) + model.load_pretrained(pretrained_loc) + return + else: + state_dict = load_state_dict_from_url( + pretrained_loc, + map_location='cpu', + progress=_DOWNLOAD_PROGRESS, + check_hash=_CHECK_HASH, + ) elif load_from == 'hf-hub': _logger.info(f'Loading pretrained weights from Hugging Face hub ({pretrained_loc})') if isinstance(pretrained_loc, (list, tuple)): @@ -364,20 +380,14 @@ def build_model_with_cfg( # For classification models, check class attr, then kwargs, then default to 1k, otherwise 0 for feats num_classes_pretrained = 0 if features else getattr(model, 'num_classes', kwargs.get('num_classes', 1000)) if pretrained: - if pretrained_cfg.get('custom_load', False): - load_custom_pretrained( - model, - pretrained_cfg=pretrained_cfg, - ) - else: - load_pretrained( - model, - pretrained_cfg=pretrained_cfg, - num_classes=num_classes_pretrained, - in_chans=kwargs.get('in_chans', 3), - filter_fn=pretrained_filter_fn, - strict=pretrained_strict, - ) + load_pretrained( + model, + pretrained_cfg=pretrained_cfg, + num_classes=num_classes_pretrained, + in_chans=kwargs.get('in_chans', 3), + filter_fn=pretrained_filter_fn, + strict=pretrained_strict, + ) # Wrap the model in a feature extraction module if enabled if features: diff --git a/timm/models/_hub.py b/timm/models/_hub.py index e6b7d558..df1a1ef7 100644 --- a/timm/models/_hub.py +++ b/timm/models/_hub.py @@ -1,3 +1,4 @@ +import hashlib import json import logging import os @@ -67,6 +68,26 @@ def download_cached_file(url, check_hash=True, progress=False): return cached_file +def check_cached_file(url, check_hash=True): + if isinstance(url, (list, tuple)): + url, filename = url + else: + parts = urlparse(url) + filename = os.path.basename(parts.path) + cached_file = os.path.join(get_cache_dir(), filename) + if os.path.exists(cached_file): + if check_hash: + r = HASH_REGEX.search(filename) # r is Optional[Match[str]] + hash_prefix = r.group(1) if r else None + if hash_prefix: + with open(cached_file, 'rb') as f: + hd = hashlib.sha256(f.read()).hexdigest() + if hd[:len(hash_prefix)] != hash_prefix: + return False + return True + return False + + def has_hf_hub(necessary=False): if not _has_hf_hub and necessary: # if no HF Hub module installed, and it is necessary to continue, raise error @@ -90,14 +111,14 @@ def load_cfg_from_json(json_file: Union[str, os.PathLike]): return json.loads(text) -def _download_from_hf(model_id: str, filename: str): +def download_from_hf(model_id: str, filename: str): hf_model_id, hf_revision = hf_split(model_id) return hf_hub_download(hf_model_id, filename, revision=hf_revision) def load_model_config_from_hf(model_id: str): assert has_hf_hub(True) - cached_file = _download_from_hf(model_id, 'config.json') + cached_file = download_from_hf(model_id, 'config.json') hf_config = load_cfg_from_json(cached_file) if 'pretrained_cfg' not in hf_config: @@ -124,34 +145,28 @@ def load_model_config_from_hf(model_id: str): def load_state_dict_from_hf(model_id: str, filename: str = 'pytorch_model.bin'): assert has_hf_hub(True) - cached_file = _download_from_hf(model_id, filename) + cached_file = download_from_hf(model_id, filename) state_dict = torch.load(cached_file, map_location='cpu') return state_dict -def save_for_hf(model, save_directory, model_config=None): - assert has_hf_hub(True) +def save_config_for_hf(model, config_path, model_config=None): model_config = model_config or {} - save_directory = Path(save_directory) - save_directory.mkdir(exist_ok=True, parents=True) - - weights_path = save_directory / 'pytorch_model.bin' - torch.save(model.state_dict(), weights_path) - - config_path = save_directory / 'config.json' hf_config = {} pretrained_cfg = filter_pretrained_cfg(model.pretrained_cfg, remove_source=True, remove_null=True) # set some values at root config level hf_config['architecture'] = pretrained_cfg.pop('architecture') hf_config['num_classes'] = model_config.get('num_classes', model.num_classes) hf_config['num_features'] = model_config.get('num_features', model.num_features) - hf_config['global_pool'] = model_config.get('global_pool', getattr(model, 'global_pool', None)) + global_pool_type = model_config.get('global_pool', getattr(model, 'global_pool', None)) + if isinstance(global_pool_type, str) and global_pool_type: + hf_config['global_pool'] = global_pool_type - if 'label' in model_config: + if 'labels' in model_config: _logger.warning( - "'label' as a config field for timm models is deprecated. Please use 'label_name' and 'display_name'. " + "'labels' as a config field for timm models is deprecated. Please use 'label_name' and 'display_name'. " "Using provided 'label' field as 'label_name'.") - model_config['label_name'] = model_config.pop('label') + model_config['label_name'] = model_config.pop('labels') label_name = model_config.pop('label_name', None) if label_name: @@ -173,6 +188,18 @@ def save_for_hf(model, save_directory, model_config=None): json.dump(hf_config, f, indent=2) +def save_for_hf(model, save_directory, model_config=None): + assert has_hf_hub(True) + save_directory = Path(save_directory) + save_directory.mkdir(exist_ok=True, parents=True) + + weights_path = save_directory / 'pytorch_model.bin' + torch.save(model.state_dict(), weights_path) + + config_path = save_directory / 'config.json' + save_config_for_hf(model, config_path, model_config=model_config) + + def push_to_hf_hub( model, repo_id: str, @@ -182,6 +209,7 @@ def push_to_hf_hub( private: bool = False, create_pr: bool = False, model_config: Optional[dict] = None, + model_card: Optional[dict] = None, ): # Create repo if it doesn't exist yet repo_url = create_repo(repo_id, token=token, private=private, exist_ok=True) @@ -205,9 +233,23 @@ def push_to_hf_hub( # Add readme if it does not exist if not has_readme: + model_card = model_card or {} model_name = repo_id.split('/')[-1] readme_path = Path(tmpdir) / "README.md" - readme_text = f'---\ntags:\n- image-classification\n- timm\nlibrary_tag: timm\n---\n# Model card for {model_name}' + readme_text = "---\n" + readme_text += "tags:\n- image-classification\n- timm\n" + readme_text += "library_tag: timm\n" + readme_text += f"license: {model_card.get('license', 'apache-2.0')}\n" + readme_text += "---\n" + readme_text += f"# Model card for {model_name}\n" + if 'description' in model_card: + readme_text += f"\n{model_card['description']}\n" + if 'details' in model_card: + readme_text += f"\n## Model Details\n" + for k, v in model_card['details'].items(): + readme_text += f"- **{k}:** {v}\n" + if 'citation' in model_card: + readme_text += f"\n## Citation\n```\n{model_card['citation']}```\n" readme_path.write_text(readme_text) # Upload model and return diff --git a/timm/models/_pretrained.py b/timm/models/_pretrained.py index b5ecbc50..dca81eb0 100644 --- a/timm/models/_pretrained.py +++ b/timm/models/_pretrained.py @@ -19,6 +19,7 @@ class PretrainedCfg: source: Optional[str] = None # source of cfg / weight location used (url, file, hf-hub) architecture: Optional[str] = None # architecture variant can be set when not implicit + tag: Optional[str] = None # pretrained tag of source custom_load: bool = False # use custom model specific model.load_pretrained() (ie for npz files) # input / data config @@ -44,9 +45,11 @@ class PretrainedCfg: classifier: Optional[str] = None license: Optional[str] = None - source_url: Optional[str] = None - paper: Optional[str] = None - notes: Optional[str] = None + description: Optional[str] = None + origin_url: Optional[str] = None + paper_name: Optional[str] = None + paper_ids: Optional[Union[str, Tuple[str]]] = None + notes: Optional[Tuple[str]] = None @property def has_weights(self): @@ -62,11 +65,11 @@ class PretrainedCfg: def filter_pretrained_cfg(cfg, remove_source=False, remove_null=True): filtered_cfg = {} - keep_none = {'pool_size', 'first_conv', 'classifier'} # always keep these keys, even if none + keep_null = {'pool_size', 'first_conv', 'classifier'} # always keep these keys, even if none for k, v in cfg.items(): if remove_source and k in {'url', 'file', 'hf_hub_id', 'hf_hub_id', 'hf_hub_filename', 'source'}: continue - if remove_null and v is None and k not in keep_none: + if remove_null and v is None and k not in keep_null: continue filtered_cfg[k] = v return filtered_cfg diff --git a/timm/models/_registry.py b/timm/models/_registry.py index fc7b3437..80eb2e94 100644 --- a/timm/models/_registry.py +++ b/timm/models/_registry.py @@ -7,6 +7,7 @@ import re import sys from collections import defaultdict, deque from copy import deepcopy +from dataclasses import replace from typing import List, Optional, Union, Tuple from ._pretrained import PretrainedCfg, DefaultCfg, split_model_name_tag @@ -20,7 +21,7 @@ _model_to_module = {} # mapping of model names to module names _model_entrypoints = {} # mapping of model names to architecture entrypoint fns _model_has_pretrained = set() # set of model names that have pretrained weight url present _model_default_cfgs = dict() # central repo for model arch -> default cfg objects -_model_pretrained_cfgs = dict() # central repo for model arch + tag -> pretrained cfgs +_model_pretrained_cfgs = dict() # central repo for model arch.tag -> pretrained cfgs _model_with_tags = defaultdict(list) # shortcut to map each model arch to all model + tag names @@ -48,24 +49,31 @@ def register_model(fn): if hasattr(mod, 'default_cfgs') and model_name in mod.default_cfgs: # this will catch all models that have entrypoint matching cfg key, but miss any aliasing # entrypoints or non-matching combos - cfg = mod.default_cfgs[model_name] - if not isinstance(cfg, DefaultCfg): + default_cfg = mod.default_cfgs[model_name] + if not isinstance(default_cfg, DefaultCfg): # new style default cfg dataclass w/ multiple entries per model-arch - assert isinstance(cfg, dict) + assert isinstance(default_cfg, dict) # old style cfg dict per model-arch - cfg = PretrainedCfg(**cfg) - cfg = DefaultCfg(tags=deque(['']), cfgs={'': cfg}) + pretrained_cfg = PretrainedCfg(**default_cfg) + default_cfg = DefaultCfg(tags=deque(['']), cfgs={'': pretrained_cfg}) - for tag_idx, tag in enumerate(cfg.tags): + for tag_idx, tag in enumerate(default_cfg.tags): is_default = tag_idx == 0 - pretrained_cfg = cfg.cfgs[tag] + pretrained_cfg = default_cfg.cfgs[tag] + model_name_tag = '.'.join([model_name, tag]) if tag else model_name + replace_items = dict(architecture=model_name, tag=tag if tag else None) + if pretrained_cfg.hf_hub_id and pretrained_cfg.hf_hub_id == 'timm/': + # auto-complete hub name w/ architecture.tag + replace_items['hf_hub_id'] = pretrained_cfg.hf_hub_id + model_name_tag + pretrained_cfg = replace(pretrained_cfg, **replace_items) + if is_default: _model_pretrained_cfgs[model_name] = pretrained_cfg if pretrained_cfg.has_weights: # add tagless entry if it's default and has weights _model_has_pretrained.add(model_name) + if tag: - model_name_tag = '.'.join([model_name, tag]) _model_pretrained_cfgs[model_name_tag] = pretrained_cfg if pretrained_cfg.has_weights: # add model w/ tag if tag is valid @@ -74,7 +82,7 @@ def register_model(fn): else: _model_with_tags[model_name].append(model_name) # has empty tag (to slowly remove these instances) - _model_default_cfgs[model_name] = cfg + _model_default_cfgs[model_name] = default_cfg return fn @@ -198,15 +206,21 @@ def is_model_pretrained(model_name): return model_name in _model_has_pretrained -def get_pretrained_cfg(model_name): +def get_pretrained_cfg(model_name, allow_unregistered=True): if model_name in _model_pretrained_cfgs: return deepcopy(_model_pretrained_cfgs[model_name]) - raise RuntimeError(f'No pretrained config exists for model {model_name}.') + arch_name, tag = split_model_name_tag(model_name) + if arch_name in _model_default_cfgs: + # if model arch exists, but the tag is wrong, error out + raise RuntimeError(f'Invalid pretrained tag ({tag}) for {arch_name}.') + if allow_unregistered: + # if model arch doesn't exist, it has no pretrained_cfg registered, allow a default to be created + return None + raise RuntimeError(f'Model architecture ({arch_name}) has no pretrained cfg registered.') def get_pretrained_cfg_value(model_name, cfg_key): """ Get a specific model default_cfg value by key. None if key doesn't exist. """ - if model_name in _model_pretrained_cfgs: - return getattr(_model_pretrained_cfgs[model_name], cfg_key, None) - raise RuntimeError(f'No pretrained config exist for model {model_name}.') \ No newline at end of file + cfg = get_pretrained_cfg(model_name, allow_unregistered=False) + return getattr(cfg, cfg_key, None) diff --git a/timm/models/beit.py b/timm/models/beit.py index de71f441..12ec493d 100644 --- a/timm/models/beit.py +++ b/timm/models/beit.py @@ -355,64 +355,76 @@ def _cfg(url='', **kwargs): default_cfgs = generate_default_cfgs({ 'beit_base_patch16_224.in22k_ft_in22k_in1k': _cfg( - url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_base_patch16_224_pt22k_ft22kto1k.pth'), + url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_base_patch16_224_pt22k_ft22kto1k.pth', + hf_hub_id='timm/'), 'beit_base_patch16_384.in22k_ft_in22k_in1k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_base_patch16_384_pt22k_ft22kto1k.pth', + hf_hub_id='timm/', input_size=(3, 384, 384), crop_pct=1.0, ), 'beit_base_patch16_224.in22k_ft_in22k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_base_patch16_224_pt22k_ft22k.pth', + hf_hub_id='timm/', num_classes=21841, ), 'beit_large_patch16_224.in22k_ft_in22k_in1k': _cfg( - url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_224_pt22k_ft22kto1k.pth'), + url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_224_pt22k_ft22kto1k.pth', + hf_hub_id='timm/'), 'beit_large_patch16_384.in22k_ft_in22k_in1k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_384_pt22k_ft22kto1k.pth', + hf_hub_id='timm/', input_size=(3, 384, 384), crop_pct=1.0, ), 'beit_large_patch16_512.in22k_ft_in22k_in1k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_512_pt22k_ft22kto1k.pth', + hf_hub_id='timm/', input_size=(3, 512, 512), crop_pct=1.0, ), 'beit_large_patch16_224.in22k_ft_in22k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beit/beit_large_patch16_224_pt22k_ft22k.pth', + hf_hub_id='timm/', num_classes=21841, ), 'beitv2_base_patch16_224.in1k_ft_in22k_in1k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_base_patch16_224_pt1k_ft21kto1k.pth', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD ), 'beitv2_base_patch16_224.in1k_ft_in22k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_base_patch16_224_pt1k_ft21k.pth', - num_classes=21841, - mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD + hf_hub_id='timm/', + num_classes=21841, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD ), 'beitv2_large_patch16_224.in1k_ft_in22k_in1k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_large_patch16_224_pt1k_ft21kto1k.pth', - crop_pct=0.95, - mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD + hf_hub_id='timm/', + crop_pct=0.95, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD ), 'beitv2_large_patch16_224.in1k_ft_in22k': _cfg( url='https://conversationhub.blob.core.windows.net/beit-share-public/beitv2/beitv2_large_patch16_224_pt1k_ft21k.pth', - num_classes=21841, - mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD + hf_hub_id='timm/', + num_classes=21841, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD ), 'eva_giant_patch14_224.clip_ft_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_clip_vis_enc_sz224_ftcls_89p1.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_clip_vis_enc_sz224_ftcls_89p1.pt', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, ), 'eva_giant_patch14_336.clip_ft_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_clip_vis_enc_sz336_ftcls_89p4.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_clip_vis_enc_sz336_ftcls_89p4.pt', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'), 'eva_giant_patch14_336.m30m_ft_in22k_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_21k_1k_336px_psz14_ema_89p6.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_21k_1k_336px_psz14_ema_89p6.pt', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'), 'eva_giant_patch14_560.m30m_ft_in22k_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_21k_1k_560px_psz14_ema_89p7.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_21k_1k_560px_psz14_ema_89p7.pt', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, input_size=(3, 560, 560), crop_pct=1.0, crop_mode='squash'), }) diff --git a/timm/models/byobnet.py b/timm/models/byobnet.py index 0e5c9c7f..1c7f1137 100644 --- a/timm/models/byobnet.py +++ b/timm/models/byobnet.py @@ -218,7 +218,10 @@ def _rep_vgg_bcfg(d=(4, 6, 16, 1), wf=(1., 1., 1., 1.), groups=0): def interleave_blocks( - types: Tuple[str, str], d, every: Union[int, List[int]] = 1, first: bool = False, **kwargs + types: Tuple[str, str], d, + every: Union[int, List[int]] = 1, + first: bool = False, + **kwargs, ) -> Tuple[ByoBlockCfg]: """ interleave 2 block types in stack """ @@ -962,9 +965,21 @@ class BasicBlock(nn.Module): """ def __init__( - self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), group_size=None, bottle_ratio=1.0, - downsample='avg', attn_last=True, linear_out=False, layers: LayerFn = None, drop_block=None, - drop_path_rate=0.): + self, + in_chs, + out_chs, + kernel_size=3, + stride=1, + dilation=(1, 1), + group_size=None, + bottle_ratio=1.0, + downsample='avg', + attn_last=True, + linear_out=False, + layers: LayerFn = None, + drop_block=None, + drop_path_rate=0., + ): super(BasicBlock, self).__init__() layers = layers or LayerFn() mid_chs = make_divisible(out_chs * bottle_ratio) @@ -983,7 +998,7 @@ class BasicBlock(nn.Module): self.act = nn.Identity() if linear_out else layers.act(inplace=True) def init_weights(self, zero_init_last: bool = False): - if zero_init_last and self.shortcut is not None: + if zero_init_last and self.shortcut is not None and getattr(self.conv2_kxk.bn, 'weight', None) is not None: nn.init.zeros_(self.conv2_kxk.bn.weight) for attn in (self.attn, self.attn_last): if hasattr(attn, 'reset_parameters'): @@ -1005,9 +1020,23 @@ class BottleneckBlock(nn.Module): """ def __init__( - self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bottle_ratio=1., group_size=None, - downsample='avg', attn_last=False, linear_out=False, extra_conv=False, bottle_in=False, - layers: LayerFn = None, drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + kernel_size=3, + stride=1, + dilation=(1, 1), + bottle_ratio=1., + group_size=None, + downsample='avg', + attn_last=False, + linear_out=False, + extra_conv=False, + bottle_in=False, + layers: LayerFn = None, + drop_block=None, + drop_path_rate=0., + ): super(BottleneckBlock, self).__init__() layers = layers or LayerFn() mid_chs = make_divisible((in_chs if bottle_in else out_chs) * bottle_ratio) @@ -1031,7 +1060,7 @@ class BottleneckBlock(nn.Module): self.act = nn.Identity() if linear_out else layers.act(inplace=True) def init_weights(self, zero_init_last: bool = False): - if zero_init_last and self.shortcut is not None: + if zero_init_last and self.shortcut is not None and getattr(self.conv3_1x1.bn, 'weight', None) is not None: nn.init.zeros_(self.conv3_1x1.bn.weight) for attn in (self.attn, self.attn_last): if hasattr(attn, 'reset_parameters'): @@ -1063,9 +1092,21 @@ class DarkBlock(nn.Module): """ def __init__( - self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bottle_ratio=1.0, group_size=None, - downsample='avg', attn_last=True, linear_out=False, layers: LayerFn = None, drop_block=None, - drop_path_rate=0.): + self, + in_chs, + out_chs, + kernel_size=3, + stride=1, + dilation=(1, 1), + bottle_ratio=1.0, + group_size=None, + downsample='avg', + attn_last=True, + linear_out=False, + layers: LayerFn = None, + drop_block=None, + drop_path_rate=0., + ): super(DarkBlock, self).__init__() layers = layers or LayerFn() mid_chs = make_divisible(out_chs * bottle_ratio) @@ -1085,7 +1126,7 @@ class DarkBlock(nn.Module): self.act = nn.Identity() if linear_out else layers.act(inplace=True) def init_weights(self, zero_init_last: bool = False): - if zero_init_last and self.shortcut is not None: + if zero_init_last and self.shortcut is not None and getattr(self.conv2_kxk.bn, 'weight', None) is not None: nn.init.zeros_(self.conv2_kxk.bn.weight) for attn in (self.attn, self.attn_last): if hasattr(attn, 'reset_parameters'): @@ -1114,9 +1155,21 @@ class EdgeBlock(nn.Module): """ def __init__( - self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bottle_ratio=1.0, group_size=None, - downsample='avg', attn_last=False, linear_out=False, layers: LayerFn = None, - drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + kernel_size=3, + stride=1, + dilation=(1, 1), + bottle_ratio=1.0, + group_size=None, + downsample='avg', + attn_last=False, + linear_out=False, + layers: LayerFn = None, + drop_block=None, + drop_path_rate=0., + ): super(EdgeBlock, self).__init__() layers = layers or LayerFn() mid_chs = make_divisible(out_chs * bottle_ratio) @@ -1135,7 +1188,7 @@ class EdgeBlock(nn.Module): self.act = nn.Identity() if linear_out else layers.act(inplace=True) def init_weights(self, zero_init_last: bool = False): - if zero_init_last and self.shortcut is not None: + if zero_init_last and self.shortcut is not None and getattr(self.conv2_1x1.bn, 'weight', None) is not None: nn.init.zeros_(self.conv2_1x1.bn.weight) for attn in (self.attn, self.attn_last): if hasattr(attn, 'reset_parameters'): @@ -1162,8 +1215,19 @@ class RepVggBlock(nn.Module): """ def __init__( - self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bottle_ratio=1.0, group_size=None, - downsample='', layers: LayerFn = None, drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + kernel_size=3, + stride=1, + dilation=(1, 1), + bottle_ratio=1.0, + group_size=None, + downsample='', + layers: LayerFn = None, + drop_block=None, + drop_path_rate=0., + ): super(RepVggBlock, self).__init__() layers = layers or LayerFn() groups = num_groups(group_size, in_chs) @@ -1204,9 +1268,24 @@ class SelfAttnBlock(nn.Module): """ def __init__( - self, in_chs, out_chs, kernel_size=3, stride=1, dilation=(1, 1), bottle_ratio=1., group_size=None, - downsample='avg', extra_conv=False, linear_out=False, bottle_in=False, post_attn_na=True, - feat_size=None, layers: LayerFn = None, drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + kernel_size=3, + stride=1, + dilation=(1, 1), + bottle_ratio=1., + group_size=None, + downsample='avg', + extra_conv=False, + linear_out=False, + bottle_in=False, + post_attn_na=True, + feat_size=None, + layers: LayerFn = None, + drop_block=None, + drop_path_rate=0., + ): super(SelfAttnBlock, self).__init__() assert layers is not None mid_chs = make_divisible((in_chs if bottle_in else out_chs) * bottle_ratio) @@ -1233,7 +1312,7 @@ class SelfAttnBlock(nn.Module): self.act = nn.Identity() if linear_out else layers.act(inplace=True) def init_weights(self, zero_init_last: bool = False): - if zero_init_last and self.shortcut is not None: + if zero_init_last and self.shortcut is not None and getattr(self.conv3_1x1.bn, 'weight', None) is not None: nn.init.zeros_(self.conv3_1x1.bn.weight) if hasattr(self.self_attn, 'reset_parameters'): self.self_attn.reset_parameters() @@ -1274,8 +1353,17 @@ def create_block(block: Union[str, nn.Module], **kwargs): class Stem(nn.Sequential): def __init__( - self, in_chs, out_chs, kernel_size=3, stride=4, pool='maxpool', - num_rep=3, num_act=None, chs_decay=0.5, layers: LayerFn = None): + self, + in_chs, + out_chs, + kernel_size=3, + stride=4, + pool='maxpool', + num_rep=3, + num_act=None, + chs_decay=0.5, + layers: LayerFn = None, + ): super().__init__() assert stride in (2, 4) layers = layers or LayerFn() @@ -1319,7 +1407,14 @@ class Stem(nn.Sequential): assert curr_stride == stride -def create_byob_stem(in_chs, out_chs, stem_type='', pool_type='', feat_prefix='stem', layers: LayerFn = None): +def create_byob_stem( + in_chs, + out_chs, + stem_type='', + pool_type='', + feat_prefix='stem', + layers: LayerFn = None, +): layers = layers or LayerFn() assert stem_type in ('', 'quad', 'quad2', 'tiered', 'deep', 'rep', '7x7', '3x3') if 'quad' in stem_type: @@ -1407,10 +1502,14 @@ def update_block_kwargs(block_kwargs: Dict[str, Any], block_cfg: ByoBlockCfg, mo def create_byob_stages( - cfg: ByoModelCfg, drop_path_rate: float, output_stride: int, stem_feat: Dict[str, Any], + cfg: ByoModelCfg, + drop_path_rate: float, + output_stride: int, + stem_feat: Dict[str, Any], feat_size: Optional[int] = None, layers: Optional[LayerFn] = None, - block_kwargs_fn: Optional[Callable] = update_block_kwargs): + block_kwargs_fn: Optional[Callable] = update_block_kwargs, +): layers = layers or LayerFn() feature_info = [] @@ -1485,12 +1584,38 @@ class ByobNet(nn.Module): Current assumption is that both stem and blocks are in conv-bn-act order (w/ block ending in act). """ def __init__( - self, cfg: ByoModelCfg, num_classes=1000, in_chans=3, global_pool='avg', output_stride=32, - zero_init_last=True, img_size=None, drop_rate=0., drop_path_rate=0.): + self, + cfg: ByoModelCfg, + num_classes=1000, + in_chans=3, + global_pool='avg', + output_stride=32, + img_size=None, + drop_rate=0., + drop_path_rate=0., + zero_init_last=True, + **kwargs, + ): + """ + + Args: + cfg (ByoModelCfg): Model architecture configuration + num_classes (int): Number of classifier classes (default: 1000) + in_chans (int): Number of input channels (default: 3) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + img_size (Union[int, Tuple[int]): Image size for fixed image size models (i.e. self-attn) + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + zero_init_last (bool): Zero-init last weight of residual path + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate self.grad_checkpointing = False + + cfg = replace(cfg, **kwargs) # overlay kwargs onto cfg layers = get_layer_fns(cfg) if cfg.fixed_input_size: assert img_size is not None, 'img_size argument is required for fixed input size model' diff --git a/timm/models/convnext.py b/timm/models/convnext.py index eea5782a..05e29a73 100644 --- a/timm/models/convnext.py +++ b/timm/models/convnext.py @@ -1,25 +1,51 @@ """ ConvNeXt -Paper: `A ConvNet for the 2020s` - https://arxiv.org/pdf/2201.03545.pdf - -Original code and weights from https://github.com/facebookresearch/ConvNeXt, original copyright below - -Model defs atto, femto, pico, nano and _ols / _hnf variants are timm specific. +Papers: +* `A ConvNet for the 2020s` - https://arxiv.org/pdf/2201.03545.pdf +@Article{liu2022convnet, + author = {Zhuang Liu and Hanzi Mao and Chao-Yuan Wu and Christoph Feichtenhofer and Trevor Darrell and Saining Xie}, + title = {A ConvNet for the 2020s}, + journal = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2022}, +} + +* `ConvNeXt-V2 - Co-designing and Scaling ConvNets with Masked Autoencoders` - https://arxiv.org/abs/2301.00808 +@article{Woo2023ConvNeXtV2, + title={ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders}, + author={Sanghyun Woo, Shoubhik Debnath, Ronghang Hu, Xinlei Chen, Zhuang Liu, In So Kweon and Saining Xie}, + year={2023}, + journal={arXiv preprint arXiv:2301.00808}, +} + +Original code and weights from: +* https://github.com/facebookresearch/ConvNeXt, original copyright below +* https://github.com/facebookresearch/ConvNeXt-V2, original copyright below + +Model defs atto, femto, pico, nano and _ols / _hnf variants are timm originals. Modifications and additions for timm hacked together by / Copyright 2022, Ross Wightman """ +# ConvNeXt # Copyright (c) Meta Platforms, Inc. and affiliates. # All rights reserved. # This source code is licensed under the MIT license + +# ConvNeXt-V2 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree (Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)) +# No code was used directly from ConvNeXt-V2, however the weights are CC BY-NC 4.0 so beware if using commercially. + from collections import OrderedDict from functools import partial import torch import torch.nn as nn -from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import trunc_normal_, SelectAdaptivePool2d, DropPath, ConvMlp, Mlp, LayerNorm2d, LayerNorm, \ - create_conv2d, get_act_layer, make_divisible, to_ntuple +from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, OPENAI_CLIP_MEAN, OPENAI_CLIP_STD +from timm.layers import trunc_normal_, SelectAdaptivePool2d, DropPath, Mlp, GlobalResponseNormMlp, \ + LayerNorm2d, LayerNorm, create_conv2d, get_act_layer, make_divisible, to_ntuple from ._builder import build_model_with_cfg from ._manipulate import named_apply, checkpoint_seq from ._pretrained import generate_default_cfgs @@ -54,6 +80,7 @@ class ConvNeXtBlock(nn.Module): mlp_ratio=4, conv_mlp=False, conv_bias=True, + use_grn=False, ls_init_value=1e-6, act_layer='gelu', norm_layer=None, @@ -64,14 +91,13 @@ class ConvNeXtBlock(nn.Module): act_layer = get_act_layer(act_layer) if not norm_layer: norm_layer = LayerNorm2d if conv_mlp else LayerNorm - mlp_layer = ConvMlp if conv_mlp else Mlp + mlp_layer = partial(GlobalResponseNormMlp if use_grn else Mlp, use_conv=conv_mlp) self.use_conv_mlp = conv_mlp - self.conv_dw = create_conv2d( in_chs, out_chs, kernel_size=kernel_size, stride=stride, dilation=dilation, depthwise=True, bias=conv_bias) self.norm = norm_layer(out_chs) self.mlp = mlp_layer(out_chs, int(mlp_ratio * out_chs), act_layer=act_layer) - self.gamma = nn.Parameter(ls_init_value * torch.ones(out_chs)) if ls_init_value > 0 else None + self.gamma = nn.Parameter(ls_init_value * torch.ones(out_chs)) if ls_init_value is not None else None self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() def forward(self, x): @@ -106,6 +132,7 @@ class ConvNeXtStage(nn.Module): ls_init_value=1.0, conv_mlp=False, conv_bias=True, + use_grn=False, act_layer='gelu', norm_layer=None, norm_layer_cl=None @@ -138,8 +165,9 @@ class ConvNeXtStage(nn.Module): ls_init_value=ls_init_value, conv_mlp=conv_mlp, conv_bias=conv_bias, + use_grn=use_grn, act_layer=act_layer, - norm_layer=norm_layer if conv_mlp else norm_layer_cl + norm_layer=norm_layer if conv_mlp else norm_layer_cl, )) in_chs = out_chs self.blocks = nn.Sequential(*stage_blocks) @@ -156,16 +184,6 @@ class ConvNeXtStage(nn.Module): class ConvNeXt(nn.Module): r""" ConvNeXt A PyTorch impl of : `A ConvNet for the 2020s` - https://arxiv.org/pdf/2201.03545.pdf - - Args: - in_chans (int): Number of input image channels. Default: 3 - num_classes (int): Number of classes for classification head. Default: 1000 - depths (tuple(int)): Number of blocks at each stage. Default: [3, 3, 9, 3] - dims (tuple(int)): Feature dimension at each stage. Default: [96, 192, 384, 768] - drop_rate (float): Head dropout rate - drop_path_rate (float): Stochastic depth rate. Default: 0. - ls_init_value (float): Init value for Layer Scale. Default: 1e-6. - head_init_scale (float): Init scaling value for classifier weights and biases. Default: 1. """ def __init__( @@ -184,21 +202,50 @@ class ConvNeXt(nn.Module): head_norm_first=False, conv_mlp=False, conv_bias=True, + use_grn=False, act_layer='gelu', norm_layer=None, + norm_eps=None, drop_rate=0., drop_path_rate=0., ): + """ + Args: + in_chans (int): Number of input image channels (default: 3) + num_classes (int): Number of classes for classification head (default: 1000) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + depths (tuple(int)): Number of blocks at each stage. (default: [3, 3, 9, 3]) + dims (tuple(int)): Feature dimension at each stage. (default: [96, 192, 384, 768]) + kernel_sizes (Union[int, List[int]]: Depthwise convolution kernel-sizes for each stage (default: 7) + ls_init_value (float): Init value for Layer Scale (default: 1e-6) + stem_type (str): Type of stem (default: 'patch') + patch_size (int): Stem patch size for patch stem (default: 4) + head_init_scale (float): Init scaling value for classifier weights and biases (default: 1) + head_norm_first (bool): Apply normalization before global pool + head (default: False) + conv_mlp (bool): Use 1x1 conv in MLP, improves speed for small networks w/ chan last (default: False) + conv_bias (bool): Use bias layers w/ all convolutions (default: True) + use_grn (bool): Use Global Response Norm (ConvNeXt-V2) in MLP (default: False) + act_layer (Union[str, nn.Module]): Activation Layer + norm_layer (Union[str, nn.Module]): Normalization Layer + drop_rate (float): Head dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth rate (default: 0.) + """ super().__init__() assert output_stride in (8, 16, 32) kernel_sizes = to_ntuple(4)(kernel_sizes) if norm_layer is None: norm_layer = LayerNorm2d norm_layer_cl = norm_layer if conv_mlp else LayerNorm + if norm_eps is not None: + norm_layer = partial(norm_layer, eps=norm_eps) + norm_layer_cl = partial(norm_layer_cl, eps=norm_eps) else: assert conv_mlp,\ 'If a norm_layer is specified, conv MLP must be used so all norm expect rank-4, channels-first input' norm_layer_cl = norm_layer + if norm_eps is not None: + norm_layer_cl = partial(norm_layer_cl, eps=norm_eps) self.num_classes = num_classes self.drop_rate = drop_rate @@ -209,7 +256,7 @@ class ConvNeXt(nn.Module): # NOTE: this stem is a minimal form of ViT PatchEmbed, as used in SwinTransformer w/ patch_size = 4 self.stem = nn.Sequential( nn.Conv2d(in_chans, dims[0], kernel_size=patch_size, stride=patch_size, bias=conv_bias), - norm_layer(dims[0]) + norm_layer(dims[0]), ) stem_stride = patch_size else: @@ -247,9 +294,10 @@ class ConvNeXt(nn.Module): ls_init_value=ls_init_value, conv_mlp=conv_mlp, conv_bias=conv_bias, + use_grn=use_grn, act_layer=act_layer, norm_layer=norm_layer, - norm_layer_cl=norm_layer_cl + norm_layer_cl=norm_layer_cl, )) prev_chs = out_chs # NOTE feature_info use currently assumes stage 0 == stride 1, rest are stride 2 @@ -334,7 +382,15 @@ def checkpoint_filter_fn(state_dict, model): return state_dict # non-FB checkpoint if 'model' in state_dict: state_dict = state_dict['model'] + out_dict = {} + if 'visual.trunk.stem.0.weight' in state_dict: + out_dict = {k.replace('visual.trunk.', ''): v for k, v in state_dict.items() if k.startswith('visual.trunk.')} + if 'visual.head.proj.weight' in state_dict: + out_dict['head.fc.weight'] = state_dict['visual.head.proj.weight'] + out_dict['head.fc.bias'] = torch.zeros(state_dict['visual.head.proj.weight'].shape[0]) + return out_dict + import re for k, v in state_dict.items(): k = k.replace('downsample_layers.0.', 'stem.') @@ -342,6 +398,10 @@ def checkpoint_filter_fn(state_dict, model): k = re.sub(r'downsample_layers.([0-9]+).([0-9]+)', r'stages.\1.downsample.\2', k) k = k.replace('dwconv', 'conv_dw') k = k.replace('pwconv', 'mlp.fc') + if 'grn' in k: + k = k.replace('grn.beta', 'mlp.grn.bias') + k = k.replace('grn.gamma', 'mlp.grn.weight') + v = v.reshape(v.shape[-1]) k = k.replace('head.', 'head.fc.') if k.startswith('norm.'): k = k.replace('norm', 'head.norm') @@ -349,10 +409,16 @@ def checkpoint_filter_fn(state_dict, model): model_shape = model.state_dict()[k].shape v = v.reshape(model_shape) out_dict[k] = v + return out_dict def _create_convnext(variant, pretrained=False, **kwargs): + if kwargs.get('pretrained_cfg', '') == 'fcmae': + # NOTE fcmae pretrained weights have no classifier or final norm-layer (`head.norm`) + # This is workaround loading with num_classes=0 w/o removing norm-layer. + kwargs.setdefault('pretrained_strict', False) + model = build_model_with_cfg( ConvNeXt, variant, pretrained, pretrained_filter_fn=checkpoint_filter_fn, @@ -361,7 +427,6 @@ def _create_convnext(variant, pretrained=False, **kwargs): return model - def _cfg(url='', **kwargs): return { 'url': url, @@ -373,92 +438,295 @@ def _cfg(url='', **kwargs): } +def _cfgv2(url='', **kwargs): + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), + 'crop_pct': 0.875, 'interpolation': 'bicubic', + 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, + 'first_conv': 'stem.0', 'classifier': 'head.fc', + 'license': 'cc-by-nc-4.0', 'paper_ids': 'arXiv:2301.00808', + 'paper_name': 'ConvNeXt-V2: Co-designing and Scaling ConvNets with Masked Autoencoders', + 'origin_url': 'https://github.com/facebookresearch/ConvNeXt-V2', + **kwargs + } + + default_cfgs = generate_default_cfgs({ # timm specific variants - 'convnext_atto.timm_in1k': _cfg( + 'convnext_atto.d2_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_atto_d2-01bb0f51.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=0.95), - 'convnext_atto_ols.timm_in1k': _cfg( + 'convnext_atto_ols.a2_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_atto_ols_a2-78d1c8f3.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=0.95), - 'convnext_femto.timm_in1k': _cfg( + 'convnext_femto.d1_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_femto_d1-d71d5b4c.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=0.95), - 'convnext_femto_ols.timm_in1k': _cfg( + 'convnext_femto_ols.d1_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_femto_ols_d1-246bf2ed.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=0.95), - 'convnext_pico.timm_in1k': _cfg( + 'convnext_pico.d1_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_pico_d1-10ad7f0d.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=0.95), - 'convnext_pico_ols.timm_in1k': _cfg( + 'convnext_pico_ols.d1_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_pico_ols_d1-611f0ca7.pth', + hf_hub_id='timm/', + crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnext_nano.in12k_ft_in1k': _cfg( + hf_hub_id='timm/', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), - 'convnext_nano.timm_in1k': _cfg( + 'convnext_nano.d1h_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_nano_d1h-7eb4bdea.pth', + hf_hub_id='timm/', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), - 'convnext_nano_ols.timm_in1k': _cfg( + 'convnext_nano_ols.d1h_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_nano_ols_d1h-ae424a9a.pth', + hf_hub_id='timm/', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), - 'convnext_tiny_hnf.timm_in1k': _cfg( + 'convnext_tiny_hnf.a2h_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-rsb-weights/convnext_tiny_hnf_a2h-ab7e9df2.pth', + hf_hub_id='timm/', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnext_tiny.in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnext_small.in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), + + 'convnext_nano.in12k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, num_classes=11821), + 'convnext_tiny.in12k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, num_classes=11821), + 'convnext_small.in12k': _cfg( + hf_hub_id='timm/', + crop_pct=0.95, num_classes=11821), 'convnext_tiny.fb_in1k': _cfg( url="https://dl.fbaipublicfiles.com/convnext/convnext_tiny_1k_224_ema.pth", + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_small.fb_in1k': _cfg( url="https://dl.fbaipublicfiles.com/convnext/convnext_small_1k_224_ema.pth", + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_base.fb_in1k': _cfg( url="https://dl.fbaipublicfiles.com/convnext/convnext_base_1k_224_ema.pth", + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_large.fb_in1k': _cfg( url="https://dl.fbaipublicfiles.com/convnext/convnext_large_1k_224_ema.pth", + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_xlarge.untrained': _cfg(), + 'convnext_xxlarge.untrained': _cfg(), 'convnext_tiny.fb_in22k_ft_in1k': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_1k_224.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_small.fb_in22k_ft_in1k': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_1k_224.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_base.fb_in22k_ft_in1k': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_1k_224.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_large.fb_in22k_ft_in1k': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_1k_224.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_xlarge.fb_in22k_ft_in1k': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_1k_224_ema.pth', + hf_hub_id='timm/', test_input_size=(3, 288, 288), test_crop_pct=1.0), 'convnext_tiny.fb_in22k_ft_in1k_384': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_1k_384.pth', + hf_hub_id='timm/', input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - 'convnext_small..fb_in22k_ft_in1k_384': _cfg( + 'convnext_small.fb_in22k_ft_in1k_384': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_1k_384.pth', + hf_hub_id='timm/', input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'convnext_base.fb_in22k_ft_in1k_384': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_1k_384.pth', + hf_hub_id='timm/', input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'convnext_large.fb_in22k_ft_in1k_384': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_1k_384.pth', + hf_hub_id='timm/', input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'convnext_xlarge.fb_in22k_ft_in1k_384': _cfg( url='https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_1k_384_ema.pth', + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + + 'convnext_tiny.fb_in22k': _cfg( + url="https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_224.pth", + hf_hub_id='timm/', + num_classes=21841), + 'convnext_small.fb_in22k': _cfg( + url="https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_224.pth", + hf_hub_id='timm/', + num_classes=21841), + 'convnext_base.fb_in22k': _cfg( + url="https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_224.pth", + hf_hub_id='timm/', + num_classes=21841), + 'convnext_large.fb_in22k': _cfg( + url="https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_224.pth", + hf_hub_id='timm/', + num_classes=21841), + 'convnext_xlarge.fb_in22k': _cfg( + url="https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_224.pth", + hf_hub_id='timm/', + num_classes=21841), + + 'convnextv2_nano.fcmae_ft_in22k_in1k': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_nano_22k_224_ema.pt', + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_nano.fcmae_ft_in22k_in1k_384': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_nano_22k_384_ema.pt', + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'convnextv2_tiny.fcmae_ft_in22k_in1k': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_tiny_22k_224_ema.pt", + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_tiny.fcmae_ft_in22k_in1k_384': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_tiny_22k_384_ema.pt", + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'convnextv2_base.fcmae_ft_in22k_in1k': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_base_22k_224_ema.pt", + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_base.fcmae_ft_in22k_in1k_384': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_base_22k_384_ema.pt", + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'convnextv2_large.fcmae_ft_in22k_in1k': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_large_22k_224_ema.pt", + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_large.fcmae_ft_in22k_in1k_384': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_large_22k_384_ema.pt", + hf_hub_id='timm/', input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'convnextv2_huge.fcmae_ft_in22k_in1k_384': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_huge_22k_384_ema.pt", + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'convnextv2_huge.fcmae_ft_in22k_in1k_512': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im22k/convnextv2_huge_22k_512_ema.pt", + hf_hub_id='timm/', + input_size=(3, 512, 512), pool_size=(15, 15), crop_pct=1.0, crop_mode='squash'), + + 'convnextv2_atto.fcmae_ft_in1k': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_atto_1k_224_ema.pt', + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=0.95), + 'convnextv2_femto.fcmae_ft_in1k': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_femto_1k_224_ema.pt', + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=0.95), + 'convnextv2_pico.fcmae_ft_in1k': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_pico_1k_224_ema.pt', + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=0.95), + 'convnextv2_nano.fcmae_ft_in1k': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_nano_1k_224_ema.pt', + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_tiny.fcmae_ft_in1k': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_tiny_1k_224_ema.pt", + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_base.fcmae_ft_in1k': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_base_1k_224_ema.pt", + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_large.fcmae_ft_in1k': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_large_1k_224_ema.pt", + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnextv2_huge.fcmae_ft_in1k': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/im1k/convnextv2_huge_1k_224_ema.pt", + hf_hub_id='timm/', + test_input_size=(3, 288, 288), test_crop_pct=1.0), - 'convnext_tiny_in22k.fb_in22k': _cfg( - url="https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_224.pth", num_classes=21841), - 'convnext_small_in22k.fb_in22k': _cfg( - url="https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_224.pth", num_classes=21841), - 'convnext_base_in22k.fb_in22k': _cfg( - url="https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_224.pth", num_classes=21841), - 'convnext_large_in22k.fb_in22k': _cfg( - url="https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_224.pth", num_classes=21841), - 'convnext_xlarge_in22k.fb_in22k': _cfg( - url="https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_224.pth", num_classes=21841), + 'convnextv2_atto.fcmae': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_atto_1k_224_fcmae.pt', + hf_hub_id='timm/', + num_classes=0), + 'convnextv2_femto.fcmae': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_femto_1k_224_fcmae.pt', + hf_hub_id='timm/', + num_classes=0), + 'convnextv2_pico.fcmae': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_pico_1k_224_fcmae.pt', + hf_hub_id='timm/', + num_classes=0), + 'convnextv2_nano.fcmae': _cfgv2( + url='https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_nano_1k_224_fcmae.pt', + hf_hub_id='timm/', + num_classes=0), + 'convnextv2_tiny.fcmae': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_tiny_1k_224_fcmae.pt", + hf_hub_id='timm/', + num_classes=0), + 'convnextv2_base.fcmae': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_base_1k_224_fcmae.pt", + hf_hub_id='timm/', + num_classes=0), + 'convnextv2_large.fcmae': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_large_1k_224_fcmae.pt", + hf_hub_id='timm/', + num_classes=0), + 'convnextv2_huge.fcmae': _cfgv2( + url="https://dl.fbaipublicfiles.com/convnext/convnextv2/pt_only/convnextv2_huge_1k_224_fcmae.pt", + hf_hub_id='timm/', + num_classes=0), + + 'convnextv2_small.untrained': _cfg(), + + # CLIP based weights, original image tower weights and fine-tunes + 'convnext_base.clip_laion2b': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laion2b_augreg': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K-augreg', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laiona': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laiona_320': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), + 'convnext_base.clip_laiona_augreg_320': _cfg( + hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K-augreg', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), }) @@ -576,3 +844,82 @@ def convnext_xlarge(pretrained=False, **kwargs): model_args = dict(depths=[3, 3, 27, 3], dims=[256, 512, 1024, 2048], **kwargs) model = _create_convnext('convnext_xlarge', pretrained=pretrained, **model_args) return model + + +@register_model +def convnext_xxlarge(pretrained=False, **kwargs): + model_args = dict(depths=[3, 4, 30, 3], dims=[384, 768, 1536, 3072], **kwargs) + model = _create_convnext('convnext_xxlarge', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_atto(pretrained=False, **kwargs): + # timm femto variant (NOTE: still tweaking depths, will vary between 3-4M param, current is 3.7M + model_args = dict( + depths=(2, 2, 6, 2), dims=(40, 80, 160, 320), use_grn=True, ls_init_value=None, conv_mlp=True, **kwargs) + model = _create_convnext('convnextv2_atto', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_femto(pretrained=False, **kwargs): + # timm femto variant + model_args = dict( + depths=(2, 2, 6, 2), dims=(48, 96, 192, 384), use_grn=True, ls_init_value=None, conv_mlp=True, **kwargs) + model = _create_convnext('convnextv2_femto', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_pico(pretrained=False, **kwargs): + # timm pico variant + model_args = dict( + depths=(2, 2, 6, 2), dims=(64, 128, 256, 512), use_grn=True, ls_init_value=None, conv_mlp=True, **kwargs) + model = _create_convnext('convnextv2_pico', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_nano(pretrained=False, **kwargs): + # timm nano variant with standard stem and head + model_args = dict( + depths=(2, 2, 8, 2), dims=(80, 160, 320, 640), use_grn=True, ls_init_value=None, conv_mlp=True, **kwargs) + model = _create_convnext('convnextv2_nano', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_tiny(pretrained=False, **kwargs): + model_args = dict( + depths=(3, 3, 9, 3), dims=(96, 192, 384, 768), use_grn=True, ls_init_value=None, **kwargs) + model = _create_convnext('convnextv2_tiny', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_small(pretrained=False, **kwargs): + model_args = dict(depths=[3, 3, 27, 3], dims=[96, 192, 384, 768], use_grn=True, ls_init_value=None, **kwargs) + model = _create_convnext('convnextv2_small', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_base(pretrained=False, **kwargs): + model_args = dict(depths=[3, 3, 27, 3], dims=[128, 256, 512, 1024], use_grn=True, ls_init_value=None, **kwargs) + model = _create_convnext('convnextv2_base', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_large(pretrained=False, **kwargs): + model_args = dict(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536], use_grn=True, ls_init_value=None, **kwargs) + model = _create_convnext('convnextv2_large', pretrained=pretrained, **model_args) + return model + + +@register_model +def convnextv2_huge(pretrained=False, **kwargs): + model_args = dict(depths=[3, 3, 27, 3], dims=[352, 704, 1408, 2816], use_grn=True, ls_init_value=None, **kwargs) + model = _create_convnext('convnextv2_huge', pretrained=pretrained, **model_args) + return model \ No newline at end of file diff --git a/timm/models/cspnet.py b/timm/models/cspnet.py index 280f929e..26ec54d9 100644 --- a/timm/models/cspnet.py +++ b/timm/models/cspnet.py @@ -12,7 +12,7 @@ Reference impl via darknet cfg files at https://github.com/WongKinYiu/CrossStage Hacked together by / Copyright 2020 Ross Wightman """ -from dataclasses import dataclass, asdict +from dataclasses import dataclass, asdict, replace from functools import partial from typing import Any, Dict, Optional, Tuple, Union @@ -518,7 +518,7 @@ class CrossStage(nn.Module): cross_linear=False, block_dpr=None, block_fn=BottleneckBlock, - **block_kwargs + **block_kwargs, ): super(CrossStage, self).__init__() first_dilation = first_dilation or dilation @@ -558,7 +558,7 @@ class CrossStage(nn.Module): bottle_ratio=bottle_ratio, groups=groups, drop_path=block_dpr[i] if block_dpr is not None else 0., - **block_kwargs + **block_kwargs, )) prev_chs = block_out_chs @@ -597,7 +597,7 @@ class CrossStage3(nn.Module): cross_linear=False, block_dpr=None, block_fn=BottleneckBlock, - **block_kwargs + **block_kwargs, ): super(CrossStage3, self).__init__() first_dilation = first_dilation or dilation @@ -635,7 +635,7 @@ class CrossStage3(nn.Module): bottle_ratio=bottle_ratio, groups=groups, drop_path=block_dpr[i] if block_dpr is not None else 0., - **block_kwargs + **block_kwargs, )) prev_chs = block_out_chs @@ -668,7 +668,7 @@ class DarkStage(nn.Module): avg_down=False, block_fn=BottleneckBlock, block_dpr=None, - **block_kwargs + **block_kwargs, ): super(DarkStage, self).__init__() first_dilation = first_dilation or dilation @@ -715,7 +715,7 @@ def create_csp_stem( padding='', act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - aa_layer=None + aa_layer=None, ): stem = nn.Sequential() feature_info = [] @@ -738,7 +738,7 @@ def create_csp_stem( stride=conv_stride, padding=padding if i == 0 else '', act_layer=act_layer, - norm_layer=norm_layer + norm_layer=norm_layer, )) stem_stride *= conv_stride prev_chs = chs @@ -800,7 +800,7 @@ def create_csp_stages( cfg: CspModelCfg, drop_path_rate: float, output_stride: int, - stem_feat: Dict[str, Any] + stem_feat: Dict[str, Any], ): cfg_dict = asdict(cfg.stages) num_stages = len(cfg.stages.depth) @@ -868,12 +868,27 @@ class CspNet(nn.Module): global_pool='avg', drop_rate=0., drop_path_rate=0., - zero_init_last=True + zero_init_last=True, + **kwargs, ): + """ + Args: + cfg (CspModelCfg): Model architecture configuration + in_chans (int): Number of input channels (default: 3) + num_classes (int): Number of classifier classes (default: 1000) + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + global_pool (str): Global pooling type (default: 'avg') + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + zero_init_last (bool): Zero-init last weight of residual path + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate assert output_stride in (8, 16, 32) + + cfg = replace(cfg, **kwargs) # overlay kwargs onto cfg layer_args = dict( act_layer=cfg.act_layer, norm_layer=cfg.norm_layer, diff --git a/timm/models/davit.py b/timm/models/davit.py index 0ccd2ae0..f57cc5ae 100644 --- a/timm/models/davit.py +++ b/timm/models/davit.py @@ -12,6 +12,7 @@ DaViT model defs and weights adapted from https://github.com/dingmyu/davit, orig # All rights reserved. # This source code is licensed under the MIT license +from collections import OrderedDict import itertools import torch @@ -20,7 +21,7 @@ import torch.nn.functional as F from torch import Tensor from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import DropPath, to_2tuple, trunc_normal_, ClassifierHead, Mlp +from timm.layers import DropPath, to_2tuple, trunc_normal_, SelectAdaptivePool2d, Mlp # ClassifierHead from ._builder import build_model_with_cfg from ._features import FeatureInfo from ._features_fx import register_notrace_function @@ -407,7 +408,11 @@ class DaViTStage(nn.Module): stage_blocks.append(nn.Sequential(*dual_attention_block)) self.blocks = nn.Sequential(*stage_blocks) - + + @torch.jit.ignore + def set_grad_checkpointing(self, enable=True): + self.grad_checkpointing = enable + def forward(self, x : Tensor): x = self.patch_embed(x) if self.grad_checkpointing and not torch.jit.is_scripting(): @@ -455,7 +460,8 @@ class DaViT(nn.Module): drop_rate=0., attn_drop_rate=0., num_classes=1000, - global_pool='avg' + global_pool='avg', + head_norm_first=False, ): super().__init__() @@ -503,11 +509,19 @@ class DaViT(nn.Module): stages.append(stage) self.feature_info += [dict(num_chs=self.embed_dims[stage_id], reduction=2, module=f'stages.{stage_id}')] - self.stages = nn.Sequential(*stages) - self.norms = norm_layer(self.num_features) - self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate) + # if head_norm_first == true, norm -> global pool -> fc ordering, like most other nets + # otherwise pool -> norm -> fc, the default DaViT order, similar to ConvNeXt + # FIXME generalize this structure to ClassifierHead + self.norm_pre = norm_layer(self.num_features) if head_norm_first else nn.Identity() + self.head = nn.Sequential(OrderedDict([ + ('global_pool', SelectAdaptivePool2d(pool_type=global_pool)), + ('norm', nn.Identity() if head_norm_first else norm_layer(self.num_features)), + ('flatten', nn.Flatten(1) if global_pool else nn.Identity()), + ('drop', nn.Dropout(self.drop_rate)), + ('fc', nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity())])) + self.apply(self._init_weights) def _init_weights(self, m): @@ -522,40 +536,44 @@ class DaViT(nn.Module): @torch.jit.ignore def set_grad_checkpointing(self, enable=True): self.grad_checkpointing = enable + for stage in self.stages: + stage.set_grad_checkpointing(enable=enable) @torch.jit.ignore def get_classifier(self): return self.head.fc def reset_classifier(self, num_classes, global_pool=None): - self.num_classes = num_classes - if global_pool is None: - global_pool = self.head.global_pool.pool_type - self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate) + if global_pool is not None: + self.head.global_pool = SelectAdaptivePool2d(pool_type=global_pool) + self.head.flatten = nn.Flatten(1) if global_pool else nn.Identity() + self.head.fc = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity() def forward_features(self, x): x = self.patch_embed(x) - x = self.stages(x) - # take final feature and norm - x = self.norms(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) - #H, W = sizes[-1] - #x = x.view(-1, H, W, self.embed_dims[-1]).permute(0, 3, 1, 2).contiguous() + if self.grad_checkpointing and not torch.jit.is_scripting(): + x = checkpoint_seq(self.stages, x) + else: + x = self.stages(x) + x = self.norm_pre(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) return x def forward_head(self, x, pre_logits: bool = False): - return self.head(x, pre_logits=pre_logits) + x = self.head.global_pool(x) + x = self.head.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + x = self.head.flatten(x) + x = self.head.drop(x) + return x if pre_logits else self.head.fc(x) - def forward_classifier(self, x): + def forward(self, x): x = self.forward_features(x) x = self.forward_head(x) return x - - def forward(self, x): - return self.forward_classifier(x) + def checkpoint_filter_fn(state_dict, model): """ Remap MSFT checkpoints -> timm """ - if 'head.norm.weight' in state_dict: + if 'head' in state_dict: return state_dict # non-MSFT checkpoint if 'state_dict' in state_dict: @@ -569,6 +587,7 @@ def checkpoint_filter_fn(state_dict, model): k = re.sub(r'main_blocks.([0-9]+)', r'stages.\1.blocks', k) k = k.replace('stages.0.patch_embed', 'patch_embed') k = k.replace('head.', 'head.fc.') + k = k.replace('norms.', 'head.norm.') k = k.replace('cpe.0', 'cpe1') k = k.replace('cpe.1', 'cpe2') out_dict[k] = v @@ -577,8 +596,6 @@ def checkpoint_filter_fn(state_dict, model): def _create_davit(variant, pretrained=False, **kwargs): - - default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1)))) out_indices = kwargs.pop('out_indices', default_out_indices) @@ -594,11 +611,11 @@ def _create_davit(variant, pretrained=False, **kwargs): -def _cfg(url='', **kwargs): # not sure how this should be set up +def _cfg(url='', **kwargs): return { 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), - 'crop_pct': 0.875, 'interpolation': 'bilinear', + 'crop_pct': 0.850, 'interpolation': 'bicubic', 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, 'first_conv': 'patch_embed.proj', 'classifier': 'head.fc', **kwargs diff --git a/timm/models/densenet.py b/timm/models/densenet.py index e731f7b0..ccbb491c 100644 --- a/timm/models/densenet.py +++ b/timm/models/densenet.py @@ -12,7 +12,7 @@ import torch.utils.checkpoint as cp from torch.jit.annotations import List from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import BatchNormAct2d, create_norm_act_layer, BlurPool2d, create_classifier +from timm.layers import BatchNormAct2d, get_norm_act_layer, BlurPool2d, create_classifier from ._builder import build_model_with_cfg from ._manipulate import MATCH_PREV_GROUP from ._registry import register_model @@ -115,8 +115,15 @@ class DenseBlock(nn.ModuleDict): _version = 2 def __init__( - self, num_layers, num_input_features, bn_size, growth_rate, norm_layer=BatchNormAct2d, - drop_rate=0., memory_efficient=False): + self, + num_layers, + num_input_features, + bn_size, + growth_rate, + norm_layer=BatchNormAct2d, + drop_rate=0., + memory_efficient=False, + ): super(DenseBlock, self).__init__() for i in range(num_layers): layer = DenseLayer( @@ -165,12 +172,25 @@ class DenseNet(nn.Module): """ def __init__( - self, growth_rate=32, block_config=(6, 12, 24, 16), num_classes=1000, in_chans=3, global_pool='avg', - bn_size=4, stem_type='', norm_layer=BatchNormAct2d, aa_layer=None, drop_rate=0, - memory_efficient=False, aa_stem_only=True): + self, + growth_rate=32, + block_config=(6, 12, 24, 16), + num_classes=1000, + in_chans=3, + global_pool='avg', + bn_size=4, + stem_type='', + act_layer='relu', + norm_layer='batchnorm2d', + aa_layer=None, + drop_rate=0, + memory_efficient=False, + aa_stem_only=True, + ): self.num_classes = num_classes self.drop_rate = drop_rate super(DenseNet, self).__init__() + norm_layer = get_norm_act_layer(norm_layer, act_layer=act_layer) # Stem deep_stem = 'deep' in stem_type # 3x3 deep stem @@ -226,8 +246,11 @@ class DenseNet(nn.Module): dict(num_chs=num_features, reduction=current_stride, module='features.' + module_name)] current_stride *= 2 trans = DenseTransition( - num_input_features=num_features, num_output_features=num_features // 2, - norm_layer=norm_layer, aa_layer=transition_aa_layer) + num_input_features=num_features, + num_output_features=num_features // 2, + norm_layer=norm_layer, + aa_layer=transition_aa_layer, + ) self.features.add_module(f'transition{i + 1}', trans) num_features = num_features // 2 @@ -322,8 +345,8 @@ def densenetblur121d(pretrained=False, **kwargs): `"Densely Connected Convolutional Networks" ` """ model = _create_densenet( - 'densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, stem_type='deep', - aa_layer=BlurPool2d, **kwargs) + 'densenetblur121d', growth_rate=32, block_config=(6, 12, 24, 16), pretrained=pretrained, + stem_type='deep', aa_layer=BlurPool2d, **kwargs) return model @@ -382,11 +405,9 @@ def densenet264(pretrained=False, **kwargs): def densenet264d_iabn(pretrained=False, **kwargs): r"""Densenet-264 model with deep stem and Inplace-ABN """ - def norm_act_fn(num_features, **kwargs): - return create_norm_act_layer('iabn', num_features, act_layer='leaky_relu', **kwargs) model = _create_densenet( 'densenet264d_iabn', growth_rate=48, block_config=(6, 12, 64, 48), stem_type='deep', - norm_layer=norm_act_fn, pretrained=pretrained, **kwargs) + norm_layer='iabn', act_layer='leaky_relu', pretrained=pretrained, **kwargs) return model diff --git a/timm/models/dpn.py b/timm/models/dpn.py index 87bd918f..29a7a7e8 100644 --- a/timm/models/dpn.py +++ b/timm/models/dpn.py @@ -15,7 +15,7 @@ import torch.nn as nn import torch.nn.functional as F from timm.data import IMAGENET_DPN_MEAN, IMAGENET_DPN_STD, IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier +from timm.layers import BatchNormAct2d, ConvNormAct, create_conv2d, create_classifier, get_norm_act_layer from ._builder import build_model_with_cfg from ._registry import register_model @@ -33,6 +33,7 @@ def _cfg(url='', **kwargs): default_cfgs = { + 'dpn48b': _cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), 'dpn68': _cfg( url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn68-66bebafa7.pth'), 'dpn68b': _cfg( @@ -82,7 +83,16 @@ class BnActConv2d(nn.Module): class DualPathBlock(nn.Module): def __init__( - self, in_chs, num_1x1_a, num_3x3_b, num_1x1_c, inc, groups, block_type='normal', b=False): + self, + in_chs, + num_1x1_a, + num_3x3_b, + num_1x1_c, + inc, + groups, + block_type='normal', + b=False, + ): super(DualPathBlock, self).__init__() self.num_1x1_c = num_1x1_c self.inc = inc @@ -167,16 +177,31 @@ class DualPathBlock(nn.Module): class DPN(nn.Module): def __init__( - self, small=False, num_init_features=64, k_r=96, groups=32, global_pool='avg', - b=False, k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128), output_stride=32, - num_classes=1000, in_chans=3, drop_rate=0., fc_act_layer=nn.ELU): + self, + k_sec=(3, 4, 20, 3), + inc_sec=(16, 32, 24, 128), + k_r=96, + groups=32, + num_classes=1000, + in_chans=3, + output_stride=32, + global_pool='avg', + small=False, + num_init_features=64, + b=False, + drop_rate=0., + norm_layer='batchnorm2d', + act_layer='relu', + fc_act_layer='elu', + ): super(DPN, self).__init__() self.num_classes = num_classes self.drop_rate = drop_rate self.b = b assert output_stride == 32 # FIXME look into dilation support - norm_layer = partial(BatchNormAct2d, eps=.001) - fc_norm_layer = partial(BatchNormAct2d, eps=.001, act_layer=fc_act_layer, inplace=False) + + norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=act_layer), eps=.001) + fc_norm_layer = partial(get_norm_act_layer(norm_layer, act_layer=fc_act_layer), eps=.001, inplace=False) bw_factor = 1 if small else 4 blocks = OrderedDict() @@ -291,49 +316,57 @@ def _create_dpn(variant, pretrained=False, **kwargs): **kwargs) +@register_model +def dpn48b(pretrained=False, **kwargs): + model_kwargs = dict( + small=True, num_init_features=10, k_r=128, groups=32, + b=True, k_sec=(3, 4, 6, 3), inc_sec=(16, 32, 32, 64), act_layer='silu') + return _create_dpn('dpn48b', pretrained=pretrained, **dict(model_kwargs, **kwargs)) + + @register_model def dpn68(pretrained=False, **kwargs): model_kwargs = dict( small=True, num_init_features=10, k_r=128, groups=32, - k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64), **kwargs) - return _create_dpn('dpn68', pretrained=pretrained, **model_kwargs) + k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64)) + return _create_dpn('dpn68', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn68b(pretrained=False, **kwargs): model_kwargs = dict( small=True, num_init_features=10, k_r=128, groups=32, - b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64), **kwargs) - return _create_dpn('dpn68b', pretrained=pretrained, **model_kwargs) + b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64)) + return _create_dpn('dpn68b', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn92(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=64, k_r=96, groups=32, - k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128), **kwargs) - return _create_dpn('dpn92', pretrained=pretrained, **model_kwargs) + k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128)) + return _create_dpn('dpn92', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn98(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=96, k_r=160, groups=40, - k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128), **kwargs) - return _create_dpn('dpn98', pretrained=pretrained, **model_kwargs) + k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128)) + return _create_dpn('dpn98', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn131(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=128, k_r=160, groups=40, - k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128), **kwargs) - return _create_dpn('dpn131', pretrained=pretrained, **model_kwargs) + k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128)) + return _create_dpn('dpn131', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model def dpn107(pretrained=False, **kwargs): model_kwargs = dict( num_init_features=128, k_r=200, groups=50, - k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128), **kwargs) - return _create_dpn('dpn107', pretrained=pretrained, **model_kwargs) + k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128)) + return _create_dpn('dpn107', pretrained=pretrained, **dict(model_kwargs, **kwargs)) diff --git a/timm/models/efficientnet.py b/timm/models/efficientnet.py index a1324ae3..a3866fec 100644 --- a/timm/models/efficientnet.py +++ b/timm/models/efficientnet.py @@ -50,410 +50,12 @@ from ._efficientnet_builder import EfficientNetBuilder, decode_arch_def, efficie round_channels, resolve_bn_args, resolve_act_layer, BN_EPS_TF_DEFAULT from ._features import FeatureInfo, FeatureHooks from ._manipulate import checkpoint_seq +from ._pretrained import generate_default_cfgs from ._registry import register_model __all__ = ['EfficientNet', 'EfficientNetFeatures'] -def _cfg(url='', **kwargs): - return { - 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), - 'crop_pct': 0.875, 'interpolation': 'bicubic', - 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, - 'first_conv': 'conv_stem', 'classifier': 'classifier', - **kwargs - } - - -default_cfgs = { - 'mnasnet_050': _cfg(url=''), - 'mnasnet_075': _cfg(url=''), - 'mnasnet_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_b1-74cb7081.pth'), - 'mnasnet_140': _cfg(url=''), - - 'semnasnet_050': _cfg(url=''), - 'semnasnet_075': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/semnasnet_075-18710866.pth'), - 'semnasnet_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_a1-d9418771.pth'), - 'semnasnet_140': _cfg(url=''), - 'mnasnet_small': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_small_lamb-aff75073.pth'), - - 'mobilenetv2_035': _cfg( - url=''), - 'mobilenetv2_050': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_050-3d30d450.pth', - interpolation='bicubic', - ), - 'mobilenetv2_075': _cfg( - url=''), - 'mobilenetv2_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_100_ra-b33bc2c4.pth'), - 'mobilenetv2_110d': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_110d_ra-77090ade.pth'), - 'mobilenetv2_120d': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_120d_ra-5987e2ed.pth'), - 'mobilenetv2_140': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_140_ra-21a4e913.pth'), - - 'fbnetc_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetc_100-c345b898.pth', - interpolation='bilinear'), - 'spnasnet_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/spnasnet_100-048bc3f4.pth', - interpolation='bilinear'), - - # NOTE experimenting with alternate attention - 'efficientnet_b0': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b0_ra-3dd342df.pth'), - 'efficientnet_b1': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b1-533bc792.pth', - test_input_size=(3, 256, 256), crop_pct=1.0), - 'efficientnet_b2': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b2_ra-bcdf34b7.pth', - input_size=(3, 256, 256), pool_size=(8, 8), test_input_size=(3, 288, 288), crop_pct=1.0), - 'efficientnet_b3': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra2-cf984f9c.pth', - input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), - 'efficientnet_b4': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b4_ra2_320-7eb33cd5.pth', - input_size=(3, 320, 320), pool_size=(10, 10), test_input_size=(3, 384, 384), crop_pct=1.0), - 'efficientnet_b5': _cfg( - url='', input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934), - 'efficientnet_b6': _cfg( - url='', input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), - 'efficientnet_b7': _cfg( - url='', input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), - 'efficientnet_b8': _cfg( - url='', input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954), - 'efficientnet_l2': _cfg( - url='', input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.961), - - # FIXME experimental - 'efficientnet_b0_gn': _cfg( - url=''), - 'efficientnet_b0_g8_gn': _cfg( - url=''), - 'efficientnet_b0_g16_evos': _cfg( - url=''), - 'efficientnet_b3_gn': _cfg( - url='', - input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), - 'efficientnet_b3_g8_gn': _cfg( - url='', - input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), - - 'efficientnet_es': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_ra-f111e99c.pth'), - 'efficientnet_em': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_em_ra2-66250f76.pth', - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - 'efficientnet_el': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_el-3b455510.pth', - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), - - 'efficientnet_es_pruned': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_pruned75-1b7248cf.pth'), - 'efficientnet_el_pruned': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_el_pruned70-ef2a2ccf.pth', - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), - - 'efficientnet_cc_b0_4e': _cfg(url=''), - 'efficientnet_cc_b0_8e': _cfg(url=''), - 'efficientnet_cc_b1_8e': _cfg(url='', input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - - 'efficientnet_lite0': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_lite0_ra-37913777.pth'), - 'efficientnet_lite1': _cfg( - url='', - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - 'efficientnet_lite2': _cfg( - url='', - input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), - 'efficientnet_lite3': _cfg( - url='', - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), - 'efficientnet_lite4': _cfg( - url='', input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), - - 'efficientnet_b1_pruned': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb1_pruned-bea43a3a.pth', - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'efficientnet_b2_pruned': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb2_pruned-08c1b27c.pth', - input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'efficientnet_b3_pruned': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb3_pruned-59ecf72d.pth', - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - - 'efficientnetv2_rw_t': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnetv2_t_agc-3620981a.pth', - input_size=(3, 224, 224), test_input_size=(3, 288, 288), pool_size=(7, 7), crop_pct=1.0), - 'gc_efficientnetv2_rw_t': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gc_efficientnetv2_rw_t_agc-927a0bde.pth', - input_size=(3, 224, 224), test_input_size=(3, 288, 288), pool_size=(7, 7), crop_pct=1.0), - 'efficientnetv2_rw_s': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_v2s_ra2_288-a6477665.pth', - input_size=(3, 288, 288), test_input_size=(3, 384, 384), pool_size=(9, 9), crop_pct=1.0), - 'efficientnetv2_rw_m': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnetv2_rw_m_agc-3d90cb1e.pth', - input_size=(3, 320, 320), test_input_size=(3, 416, 416), pool_size=(10, 10), crop_pct=1.0), - - 'efficientnetv2_s': _cfg( - url='', - input_size=(3, 288, 288), test_input_size=(3, 384, 384), pool_size=(9, 9), crop_pct=1.0), - 'efficientnetv2_m': _cfg( - url='', - input_size=(3, 320, 320), test_input_size=(3, 416, 416), pool_size=(10, 10), crop_pct=1.0), - 'efficientnetv2_l': _cfg( - url='', - input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0), - 'efficientnetv2_xl': _cfg( - url='', - input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0), - - 'tf_efficientnet_b0': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_aa-827b6e33.pth', - input_size=(3, 224, 224)), - 'tf_efficientnet_b1': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_aa-ea7a6ee0.pth', - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - 'tf_efficientnet_b2': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_aa-60c94f97.pth', - input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), - 'tf_efficientnet_b3': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_aa-84b4657e.pth', - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), - 'tf_efficientnet_b4': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_aa-818f208c.pth', - input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), - 'tf_efficientnet_b5': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ra-9a3e5369.pth', - input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934), - 'tf_efficientnet_b6': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_aa-80ba17e4.pth', - input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), - 'tf_efficientnet_b7': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ra-6c08e654.pth', - input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), - 'tf_efficientnet_b8': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ra-572d5dd9.pth', - input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954), - - 'tf_efficientnet_b0_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ap-f262efe1.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, input_size=(3, 224, 224)), - 'tf_efficientnet_b1_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ap-44ef0a3d.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - 'tf_efficientnet_b2_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ap-2f8e7636.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), - 'tf_efficientnet_b3_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ap-aad25bdd.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), - 'tf_efficientnet_b4_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ap-dedb23e6.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), - 'tf_efficientnet_b5_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ap-9e82fae8.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934), - 'tf_efficientnet_b6_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ap-4ffb161f.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), - 'tf_efficientnet_b7_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ap-ddb28fec.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), - 'tf_efficientnet_b8_ap': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ap-00e169fa.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954), - - 'tf_efficientnet_b0_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ns-c0e6a31c.pth', - input_size=(3, 224, 224)), - 'tf_efficientnet_b1_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ns-99dd0c41.pth', - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - 'tf_efficientnet_b2_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ns-00306e48.pth', - input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), - 'tf_efficientnet_b3_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ns-9d44bf68.pth', - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), - 'tf_efficientnet_b4_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ns-d6313a46.pth', - input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), - 'tf_efficientnet_b5_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ns-6f26d0cf.pth', - input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934), - 'tf_efficientnet_b6_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ns-51548356.pth', - input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), - 'tf_efficientnet_b7_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ns-1dbc32de.pth', - input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), - 'tf_efficientnet_l2_ns_475': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns_475-bebbd00a.pth', - input_size=(3, 475, 475), pool_size=(15, 15), crop_pct=0.936), - 'tf_efficientnet_l2_ns': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns-df73bb44.pth', - input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.96), - - 'tf_efficientnet_es': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_es-ca1afbfe.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 224, 224), ), - 'tf_efficientnet_em': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_em-e78cfe58.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - 'tf_efficientnet_el': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_el-5143854e.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), - - 'tf_efficientnet_cc_b0_4e': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_4e-4362b6b2.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'tf_efficientnet_cc_b0_8e': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_8e-66184a25.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'tf_efficientnet_cc_b1_8e': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b1_8e-f7c79ae1.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), - - 'tf_efficientnet_lite0': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite0-0aa007d2.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res - ), - 'tf_efficientnet_lite1': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite1-bde8b488.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882, - interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res - ), - 'tf_efficientnet_lite2': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite2-dcccb7df.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890, - interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res - ), - 'tf_efficientnet_lite3': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite3-b733e338.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904, interpolation='bilinear'), - 'tf_efficientnet_lite4': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite4-741542c3.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.920, interpolation='bilinear'), - - 'tf_efficientnetv2_s': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s-eb54923e.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0), - 'tf_efficientnetv2_m': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m-cc09e0cd.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - 'tf_efficientnetv2_l': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l-d664b728.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - - 'tf_efficientnetv2_s_in21ft1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21ft1k-d7dafa41.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0), - 'tf_efficientnetv2_m_in21ft1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21ft1k-bf41664a.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - 'tf_efficientnetv2_l_in21ft1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21ft1k-60127a9d.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - 'tf_efficientnetv2_xl_in21ft1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21ft1k-06c35c48.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), - input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - - 'tf_efficientnetv2_s_in21k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21k-6337ad01.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, - input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0), - 'tf_efficientnetv2_m_in21k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21k-361418a2.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, - input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - 'tf_efficientnetv2_l_in21k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21k-91a19ec9.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, - input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - 'tf_efficientnetv2_xl_in21k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21k-fd7e8abf.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, - input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), - - 'tf_efficientnetv2_b0': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b0-c7cc451f.pth', - input_size=(3, 192, 192), test_input_size=(3, 224, 224), pool_size=(6, 6)), - 'tf_efficientnetv2_b1': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b1-be6e41b0.pth', - input_size=(3, 192, 192), test_input_size=(3, 240, 240), pool_size=(6, 6), crop_pct=0.882), - 'tf_efficientnetv2_b2': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b2-847de54e.pth', - input_size=(3, 208, 208), test_input_size=(3, 260, 260), pool_size=(7, 7), crop_pct=0.890), - 'tf_efficientnetv2_b3': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b3-57773f13.pth', - input_size=(3, 240, 240), test_input_size=(3, 300, 300), pool_size=(8, 8), crop_pct=0.904), - - 'mixnet_s': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_s-a907afbc.pth'), - 'mixnet_m': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_m-4647fc68.pth'), - 'mixnet_l': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_l-5a9a2ed8.pth'), - 'mixnet_xl': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_xl_ra-aac3c00c.pth'), - 'mixnet_xxl': _cfg(), - - 'tf_mixnet_s': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_s-89d3354b.pth'), - 'tf_mixnet_m': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_m-0f4d8805.pth'), - 'tf_mixnet_l': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_l-6c92e0c8.pth'), - - "tinynet_a": _cfg( - input_size=(3, 192, 192), pool_size=(6, 6), # int(224 * 0.86) - url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_a.pth'), - "tinynet_b": _cfg( - input_size=(3, 188, 188), pool_size=(6, 6), # int(224 * 0.84) - url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_b.pth'), - "tinynet_c": _cfg( - input_size=(3, 184, 184), pool_size=(6, 6), # int(224 * 0.825) - url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_c.pth'), - "tinynet_d": _cfg( - input_size=(3, 152, 152), pool_size=(5, 5), # int(224 * 0.68) - url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_d.pth'), - "tinynet_e": _cfg( - input_size=(3, 106, 106), pool_size=(4, 4), # int(224 * 0.475) - url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_e.pth'), -} - - class EfficientNet(nn.Module): """ EfficientNet @@ -471,9 +73,23 @@ class EfficientNet(nn.Module): """ def __init__( - self, block_args, num_classes=1000, num_features=1280, in_chans=3, stem_size=32, fix_stem=False, - output_stride=32, pad_type='', round_chs_fn=round_channels, act_layer=None, norm_layer=None, - se_layer=None, drop_rate=0., drop_path_rate=0., global_pool='avg'): + self, + block_args, + num_classes=1000, + num_features=1280, + in_chans=3, + stem_size=32, + fix_stem=False, + output_stride=32, + pad_type='', + round_chs_fn=round_channels, + act_layer=None, + norm_layer=None, + se_layer=None, + drop_rate=0., + drop_path_rate=0., + global_pool='avg' + ): super(EfficientNet, self).__init__() act_layer = act_layer or nn.ReLU norm_layer = norm_layer or nn.BatchNorm2d @@ -492,8 +108,14 @@ class EfficientNet(nn.Module): # Middle stages (IR/ER/DS Blocks) builder = EfficientNetBuilder( - output_stride=output_stride, pad_type=pad_type, round_chs_fn=round_chs_fn, - act_layer=act_layer, norm_layer=norm_layer, se_layer=se_layer, drop_path_rate=drop_path_rate) + output_stride=output_stride, + pad_type=pad_type, + round_chs_fn=round_chs_fn, + act_layer=act_layer, + norm_layer=norm_layer, + se_layer=se_layer, + drop_path_rate=drop_path_rate, + ) self.blocks = nn.Sequential(*builder(stem_size, block_args)) self.feature_info = builder.features head_chs = builder.in_chs @@ -567,9 +189,22 @@ class EfficientNetFeatures(nn.Module): """ def __init__( - self, block_args, out_indices=(0, 1, 2, 3, 4), feature_location='bottleneck', in_chans=3, - stem_size=32, fix_stem=False, output_stride=32, pad_type='', round_chs_fn=round_channels, - act_layer=None, norm_layer=None, se_layer=None, drop_rate=0., drop_path_rate=0.): + self, + block_args, + out_indices=(0, 1, 2, 3, 4), + feature_location='bottleneck', + in_chans=3, + stem_size=32, + fix_stem=False, + output_stride=32, + pad_type='', + round_chs_fn=round_channels, + act_layer=None, + norm_layer=None, + se_layer=None, + drop_rate=0., + drop_path_rate=0. + ): super(EfficientNetFeatures, self).__init__() act_layer = act_layer or nn.ReLU norm_layer = norm_layer or nn.BatchNorm2d @@ -585,9 +220,15 @@ class EfficientNetFeatures(nn.Module): # Middle stages (IR/ER/DS Blocks) builder = EfficientNetBuilder( - output_stride=output_stride, pad_type=pad_type, round_chs_fn=round_chs_fn, - act_layer=act_layer, norm_layer=norm_layer, se_layer=se_layer, drop_path_rate=drop_path_rate, - feature_location=feature_location) + output_stride=output_stride, + pad_type=pad_type, + round_chs_fn=round_chs_fn, + act_layer=act_layer, + norm_layer=norm_layer, + se_layer=se_layer, + drop_path_rate=drop_path_rate, + feature_location=feature_location, + ) self.blocks = nn.Sequential(*builder(stem_size, block_args)) self.feature_info = FeatureInfo(builder.features, out_indices) self._stage_out_idx = {v['stage']: i for i, v in enumerate(self.feature_info) if i in out_indices} @@ -1233,23 +874,518 @@ def _gen_tinynet( return model -@register_model -def mnasnet_050(pretrained=False, **kwargs): - """ MNASNet B1, depth multiplier of 0.5. """ - model = _gen_mnasnet_b1('mnasnet_050', 0.5, pretrained=pretrained, **kwargs) - return model - +def _cfg(url='', **kwargs): + return { + 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), + 'crop_pct': 0.875, 'interpolation': 'bicubic', + 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, + 'first_conv': 'conv_stem', 'classifier': 'classifier', + **kwargs + } -@register_model -def mnasnet_075(pretrained=False, **kwargs): - """ MNASNet B1, depth multiplier of 0.75. """ - model = _gen_mnasnet_b1('mnasnet_075', 0.75, pretrained=pretrained, **kwargs) - return model +default_cfgs = generate_default_cfgs({ + 'mnasnet_050.untrained': _cfg(), + 'mnasnet_075.untrained': _cfg(), + 'mnasnet_100.rmsp_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_b1-74cb7081.pth', + hf_hub_id='timm/'), + 'mnasnet_140.untrained': _cfg(), + + 'semnasnet_050.untrained': _cfg(), + 'semnasnet_075.rmsp_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/semnasnet_075-18710866.pth', + hf_hub_id='timm/'), + 'semnasnet_100.rmsp_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_a1-d9418771.pth', + hf_hub_id='timm/'), + 'semnasnet_140.untrained': _cfg(), + 'mnasnet_small.lamb_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_small_lamb-aff75073.pth', + hf_hub_id='timm/'), + + 'mobilenetv2_035.untrained': _cfg(), + 'mobilenetv2_050.lamb_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_050-3d30d450.pth', + hf_hub_id='timm/', + interpolation='bicubic', + ), + 'mobilenetv2_075.untrained': _cfg(), + 'mobilenetv2_100.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_100_ra-b33bc2c4.pth', + hf_hub_id='timm/'), + 'mobilenetv2_110d.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_110d_ra-77090ade.pth', + hf_hub_id='timm/'), + 'mobilenetv2_120d.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_120d_ra-5987e2ed.pth', + hf_hub_id='timm/'), + 'mobilenetv2_140.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_140_ra-21a4e913.pth', + hf_hub_id='timm/'), + + 'fbnetc_100.rmsp_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetc_100-c345b898.pth', + hf_hub_id='timm/', + interpolation='bilinear'), + 'spnasnet_100.rmsp_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/spnasnet_100-048bc3f4.pth', + hf_hub_id='timm/', + interpolation='bilinear'), -@register_model -def mnasnet_100(pretrained=False, **kwargs): - """ MNASNet B1, depth multiplier of 1.0. """ + # NOTE experimenting with alternate attention + 'efficientnet_b0.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b0_ra-3dd342df.pth', + hf_hub_id='timm/'), + 'efficientnet_b1.ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b1-533bc792.pth', + hf_hub_id='timm/', + test_input_size=(3, 256, 256), crop_pct=1.0), + 'efficientnet_b2.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b2_ra-bcdf34b7.pth', + hf_hub_id='timm/', + input_size=(3, 256, 256), pool_size=(8, 8), test_input_size=(3, 288, 288), crop_pct=1.0), + 'efficientnet_b3.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra2-cf984f9c.pth', + hf_hub_id='timm/', + input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), + 'efficientnet_b4.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b4_ra2_320-7eb33cd5.pth', + hf_hub_id='timm/', + input_size=(3, 320, 320), pool_size=(10, 10), test_input_size=(3, 384, 384), crop_pct=1.0), + 'efficientnet_b5.in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + input_size=(3, 448, 448), pool_size=(14, 14), crop_pct=1.0, crop_mode='squash'), + 'efficientnet_b5.in12k': _cfg( + hf_hub_id='timm/', + input_size=(3, 416, 416), pool_size=(13, 13), crop_pct=0.95, num_classes=11821), + 'efficientnet_b6.untrained': _cfg( + url='', input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), + 'efficientnet_b7.untrained': _cfg( + url='', input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), + 'efficientnet_b8.untrained': _cfg( + url='', input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954), + 'efficientnet_l2.untrained': _cfg( + url='', input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.961), + + # FIXME experimental + 'efficientnet_b0_gn.untrained': _cfg(), + 'efficientnet_b0_g8_gn.untrained': _cfg(), + 'efficientnet_b0_g16_evos.untrained': _cfg(), + 'efficientnet_b3_gn.untrained': _cfg( + input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), + 'efficientnet_b3_g8_gn.untrained': _cfg( + input_size=(3, 288, 288), pool_size=(9, 9), test_input_size=(3, 320, 320), crop_pct=1.0), + + 'efficientnet_es.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_ra-f111e99c.pth', + hf_hub_id='timm/'), + 'efficientnet_em.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_em_ra2-66250f76.pth', + hf_hub_id='timm/', + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + 'efficientnet_el.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_el-3b455510.pth', + hf_hub_id='timm/', + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), + + 'efficientnet_es_pruned.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_pruned75-1b7248cf.pth', + hf_hub_id='timm/'), + 'efficientnet_el_pruned.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_el_pruned70-ef2a2ccf.pth', + hf_hub_id='timm/', + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), + + 'efficientnet_cc_b0_4e.untrained': _cfg(), + 'efficientnet_cc_b0_8e.untrained': _cfg(), + 'efficientnet_cc_b1_8e.untrained': _cfg(input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + + 'efficientnet_lite0.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_lite0_ra-37913777.pth', + hf_hub_id='timm/'), + 'efficientnet_lite1.untrained': _cfg( + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + 'efficientnet_lite2.untrained': _cfg( + input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), + 'efficientnet_lite3.untrained': _cfg( + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), + 'efficientnet_lite4.untrained': _cfg( + input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), + + 'efficientnet_b1_pruned.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb1_pruned-bea43a3a.pth', + hf_hub_id='timm/', + input_size=(3, 240, 240), pool_size=(8, 8), + crop_pct=0.882, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'efficientnet_b2_pruned.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb2_pruned-08c1b27c.pth', + hf_hub_id='timm/', + input_size=(3, 260, 260), pool_size=(9, 9), + crop_pct=0.890, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'efficientnet_b3_pruned.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/effnetb3_pruned-59ecf72d.pth', + hf_hub_id='timm/', + input_size=(3, 300, 300), pool_size=(10, 10), + crop_pct=0.904, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + + 'efficientnetv2_rw_t.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnetv2_t_agc-3620981a.pth', + hf_hub_id='timm/', + input_size=(3, 224, 224), test_input_size=(3, 288, 288), pool_size=(7, 7), crop_pct=1.0), + 'gc_efficientnetv2_rw_t.agc_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/gc_efficientnetv2_rw_t_agc-927a0bde.pth', + hf_hub_id='timm/', + input_size=(3, 224, 224), test_input_size=(3, 288, 288), pool_size=(7, 7), crop_pct=1.0), + 'efficientnetv2_rw_s.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_v2s_ra2_288-a6477665.pth', + hf_hub_id='timm/', + input_size=(3, 288, 288), test_input_size=(3, 384, 384), pool_size=(9, 9), crop_pct=1.0), + 'efficientnetv2_rw_m.agc_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnetv2_rw_m_agc-3d90cb1e.pth', + hf_hub_id='timm/', + input_size=(3, 320, 320), test_input_size=(3, 416, 416), pool_size=(10, 10), crop_pct=1.0), + + 'efficientnetv2_s.untrained': _cfg( + input_size=(3, 288, 288), test_input_size=(3, 384, 384), pool_size=(9, 9), crop_pct=1.0), + 'efficientnetv2_m.untrained': _cfg( + input_size=(3, 320, 320), test_input_size=(3, 416, 416), pool_size=(10, 10), crop_pct=1.0), + 'efficientnetv2_l.untrained': _cfg( + input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0), + 'efficientnetv2_xl.untrained': _cfg( + input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0), + + 'tf_efficientnet_b0.aa_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_aa-827b6e33.pth', + hf_hub_id='timm/', + input_size=(3, 224, 224)), + 'tf_efficientnet_b1.aa_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_aa-ea7a6ee0.pth', + hf_hub_id='timm/', + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + 'tf_efficientnet_b2.aa_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_aa-60c94f97.pth', + hf_hub_id='timm/', + input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), + 'tf_efficientnet_b3.aa_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_aa-84b4657e.pth', + hf_hub_id='timm/', + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), + 'tf_efficientnet_b4.aa_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_aa-818f208c.pth', + hf_hub_id='timm/', + input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), + 'tf_efficientnet_b5.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ra-9a3e5369.pth', + hf_hub_id='timm/', + input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934), + 'tf_efficientnet_b6.aa_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_aa-80ba17e4.pth', + hf_hub_id='timm/', + input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), + 'tf_efficientnet_b7.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ra-6c08e654.pth', + hf_hub_id='timm/', + input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), + 'tf_efficientnet_b8.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ra-572d5dd9.pth', + hf_hub_id='timm/', + input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954), + + 'tf_efficientnet_b0.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ap-f262efe1.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, input_size=(3, 224, 224)), + 'tf_efficientnet_b1.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ap-44ef0a3d.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + 'tf_efficientnet_b2.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ap-2f8e7636.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), + 'tf_efficientnet_b3.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ap-aad25bdd.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), + 'tf_efficientnet_b4.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ap-dedb23e6.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), + 'tf_efficientnet_b5.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ap-9e82fae8.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934), + 'tf_efficientnet_b6.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ap-4ffb161f.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), + 'tf_efficientnet_b7.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ap-ddb28fec.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), + 'tf_efficientnet_b8.ap_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ap-00e169fa.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954), + + 'tf_efficientnet_b0.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ns-c0e6a31c.pth', + hf_hub_id='timm/', + input_size=(3, 224, 224)), + 'tf_efficientnet_b1.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ns-99dd0c41.pth', + hf_hub_id='timm/', + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + 'tf_efficientnet_b2.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ns-00306e48.pth', + hf_hub_id='timm/', + input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890), + 'tf_efficientnet_b3.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ns-9d44bf68.pth', + hf_hub_id='timm/', + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), + 'tf_efficientnet_b4.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ns-d6313a46.pth', + hf_hub_id='timm/', + input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922), + 'tf_efficientnet_b5.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ns-6f26d0cf.pth', + hf_hub_id='timm/', + input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934), + 'tf_efficientnet_b6.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ns-51548356.pth', + hf_hub_id='timm/', + input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942), + 'tf_efficientnet_b7.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ns-1dbc32de.pth', + hf_hub_id='timm/', + input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949), + 'tf_efficientnet_l2.ns_jft_in1k_475': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns_475-bebbd00a.pth', + hf_hub_id='timm/', + input_size=(3, 475, 475), pool_size=(15, 15), crop_pct=0.936), + 'tf_efficientnet_l2.ns_jft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns-df73bb44.pth', + hf_hub_id='timm/', + input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.96), + + 'tf_efficientnet_es.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_es-ca1afbfe.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 224, 224), ), + 'tf_efficientnet_em.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_em-e78cfe58.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + 'tf_efficientnet_el.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_el-5143854e.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904), + + 'tf_efficientnet_cc_b0_4e.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_4e-4362b6b2.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'tf_efficientnet_cc_b0_8e.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_8e-66184a25.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'tf_efficientnet_cc_b1_8e.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b1_8e-f7c79ae1.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882), + + 'tf_efficientnet_lite0.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite0-0aa007d2.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res + ), + 'tf_efficientnet_lite1.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite1-bde8b488.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882, + interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res + ), + 'tf_efficientnet_lite2.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite2-dcccb7df.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890, + interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res + ), + 'tf_efficientnet_lite3.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite3-b733e338.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904, interpolation='bilinear'), + 'tf_efficientnet_lite4.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite4-741542c3.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.920, interpolation='bilinear'), + + 'tf_efficientnetv2_s.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s-eb54923e.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0), + 'tf_efficientnetv2_m.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m-cc09e0cd.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'tf_efficientnetv2_l.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l-d664b728.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + + 'tf_efficientnetv2_s.in21k_ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21ft1k-d7dafa41.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0), + 'tf_efficientnetv2_m.in21k_ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21ft1k-bf41664a.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'tf_efficientnetv2_l.in21k_ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21ft1k-60127a9d.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'tf_efficientnetv2_xl.in21k_ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21ft1k-06c35c48.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), + input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + + 'tf_efficientnetv2_s.in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_s_21k-6337ad01.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, + input_size=(3, 300, 300), test_input_size=(3, 384, 384), pool_size=(10, 10), crop_pct=1.0), + 'tf_efficientnetv2_m.in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_m_21k-361418a2.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, + input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'tf_efficientnetv2_l.in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_l_21k-91a19ec9.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, + input_size=(3, 384, 384), test_input_size=(3, 480, 480), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'tf_efficientnetv2_xl.in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_xl_in21k-fd7e8abf.pth', + hf_hub_id='timm/', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), num_classes=21843, + input_size=(3, 384, 384), test_input_size=(3, 512, 512), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + + 'tf_efficientnetv2_b0.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b0-c7cc451f.pth', + hf_hub_id='timm/', + input_size=(3, 192, 192), test_input_size=(3, 224, 224), pool_size=(6, 6)), + 'tf_efficientnetv2_b1.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b1-be6e41b0.pth', + hf_hub_id='timm/', + input_size=(3, 192, 192), test_input_size=(3, 240, 240), pool_size=(6, 6), crop_pct=0.882), + 'tf_efficientnetv2_b2.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b2-847de54e.pth', + hf_hub_id='timm/', + input_size=(3, 208, 208), test_input_size=(3, 260, 260), pool_size=(7, 7), crop_pct=0.890), + 'tf_efficientnetv2_b3.in21k_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + input_size=(3, 240, 240), test_input_size=(3, 300, 300), pool_size=(8, 8), crop_pct=0.9, crop_mode='squash'), + 'tf_efficientnetv2_b3.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-effv2-weights/tf_efficientnetv2_b3-57773f13.pth', + hf_hub_id='timm/', + input_size=(3, 240, 240), test_input_size=(3, 300, 300), pool_size=(8, 8), crop_pct=0.904), + 'tf_efficientnetv2_b3.in21k': _cfg( + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, num_classes=21843, + input_size=(3, 240, 240), test_input_size=(3, 300, 300), pool_size=(8, 8), crop_pct=0.904), + + 'mixnet_s.ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_s-a907afbc.pth', + hf_hub_id='timm/'), + 'mixnet_m.ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_m-4647fc68.pth', + hf_hub_id='timm/'), + 'mixnet_l.ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_l-5a9a2ed8.pth', + hf_hub_id='timm/'), + 'mixnet_xl.ra_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_xl_ra-aac3c00c.pth', + hf_hub_id='timm/'), + 'mixnet_xxl.untrained': _cfg(), + + 'tf_mixnet_s.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_s-89d3354b.pth', + hf_hub_id='timm/'), + 'tf_mixnet_m.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_m-0f4d8805.pth', + hf_hub_id='timm/'), + 'tf_mixnet_l.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_l-6c92e0c8.pth', + hf_hub_id='timm/'), + + "tinynet_a.in1k": _cfg( + input_size=(3, 192, 192), pool_size=(6, 6), # int(224 * 0.86) + url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_a.pth', + hf_hub_id='timm/'), + "tinynet_b.in1k": _cfg( + input_size=(3, 188, 188), pool_size=(6, 6), # int(224 * 0.84) + url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_b.pth', + hf_hub_id='timm/'), + "tinynet_c.in1k": _cfg( + input_size=(3, 184, 184), pool_size=(6, 6), # int(224 * 0.825) + url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_c.pth', + hf_hub_id='timm/'), + "tinynet_d.in1k": _cfg( + input_size=(3, 152, 152), pool_size=(5, 5), # int(224 * 0.68) + url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_d.pth', + hf_hub_id='timm/'), + "tinynet_e.in1k": _cfg( + input_size=(3, 106, 106), pool_size=(4, 4), # int(224 * 0.475) + url='https://github.com/huawei-noah/CV-Backbones/releases/download/v1.2.0/tinynet_e.pth', + hf_hub_id='timm/'), +}) + + +@register_model +def mnasnet_050(pretrained=False, **kwargs): + """ MNASNet B1, depth multiplier of 0.5. """ + model = _gen_mnasnet_b1('mnasnet_050', 0.5, pretrained=pretrained, **kwargs) + return model + + +@register_model +def mnasnet_075(pretrained=False, **kwargs): + """ MNASNet B1, depth multiplier of 0.75. """ + model = _gen_mnasnet_b1('mnasnet_075', 0.75, pretrained=pretrained, **kwargs) + return model + + +@register_model +def mnasnet_100(pretrained=False, **kwargs): + """ MNASNet B1, depth multiplier of 1.0. """ model = _gen_mnasnet_b1('mnasnet_100', 1.0, pretrained=pretrained, **kwargs) return model @@ -1830,199 +1966,13 @@ def tf_efficientnet_b8(pretrained=False, **kwargs): @register_model -def tf_efficientnet_b0_ap(pretrained=False, **kwargs): - """ EfficientNet-B0 AdvProp. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b0_ap', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b1_ap(pretrained=False, **kwargs): - """ EfficientNet-B1 AdvProp. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b1_ap', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b2_ap(pretrained=False, **kwargs): - """ EfficientNet-B2 AdvProp. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b2_ap', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b3_ap(pretrained=False, **kwargs): - """ EfficientNet-B3 AdvProp. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b3_ap', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b4_ap(pretrained=False, **kwargs): - """ EfficientNet-B4 AdvProp. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b4_ap', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b5_ap(pretrained=False, **kwargs): - """ EfficientNet-B5 AdvProp. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b5_ap', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b6_ap(pretrained=False, **kwargs): - """ EfficientNet-B6 AdvProp. Tensorflow compatible variant """ - # NOTE for train, drop_rate should be 0.5 - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b6_ap', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b7_ap(pretrained=False, **kwargs): - """ EfficientNet-B7 AdvProp. Tensorflow compatible variant """ - # NOTE for train, drop_rate should be 0.5 - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b7_ap', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b8_ap(pretrained=False, **kwargs): - """ EfficientNet-B8 AdvProp. Tensorflow compatible variant """ - # NOTE for train, drop_rate should be 0.5 - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b8_ap', channel_multiplier=2.2, depth_multiplier=3.6, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b0_ns(pretrained=False, **kwargs): - """ EfficientNet-B0 NoisyStudent. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b0_ns', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b1_ns(pretrained=False, **kwargs): - """ EfficientNet-B1 NoisyStudent. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b1_ns', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b2_ns(pretrained=False, **kwargs): - """ EfficientNet-B2 NoisyStudent. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b2_ns', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b3_ns(pretrained=False, **kwargs): - """ EfficientNet-B3 NoisyStudent. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b3_ns', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b4_ns(pretrained=False, **kwargs): - """ EfficientNet-B4 NoisyStudent. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b4_ns', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b5_ns(pretrained=False, **kwargs): - """ EfficientNet-B5 NoisyStudent. Tensorflow compatible variant """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b5_ns', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b6_ns(pretrained=False, **kwargs): - """ EfficientNet-B6 NoisyStudent. Tensorflow compatible variant """ - # NOTE for train, drop_rate should be 0.5 - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b6_ns', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_b7_ns(pretrained=False, **kwargs): - """ EfficientNet-B7 NoisyStudent. Tensorflow compatible variant """ - # NOTE for train, drop_rate should be 0.5 - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_b7_ns', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_l2_ns_475(pretrained=False, **kwargs): - """ EfficientNet-L2 NoisyStudent @ 475x475. Tensorflow compatible variant """ - # NOTE for train, drop_rate should be 0.5 - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnet( - 'tf_efficientnet_l2_ns_475', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnet_l2_ns(pretrained=False, **kwargs): +def tf_efficientnet_l2(pretrained=False, **kwargs): """ EfficientNet-L2 NoisyStudent. Tensorflow compatible variant """ # NOTE for train, drop_rate should be 0.5 kwargs['bn_eps'] = BN_EPS_TF_DEFAULT kwargs['pad_type'] = 'same' model = _gen_efficientnet( - 'tf_efficientnet_l2_ns', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs) + 'tf_efficientnet_l2', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs) return model @@ -2146,7 +2096,6 @@ def tf_efficientnet_lite4(pretrained=False, **kwargs): return model - @register_model def tf_efficientnetv2_s(pretrained=False, **kwargs): """ EfficientNet-V2 Small. Tensorflow compatible variant """ @@ -2175,82 +2124,12 @@ def tf_efficientnetv2_l(pretrained=False, **kwargs): @register_model -def tf_efficientnetv2_s_in21ft1k(pretrained=False, **kwargs): - """ EfficientNet-V2 Small. Pretrained on ImageNet-21k, fine-tuned on 1k. Tensorflow compatible variant - """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_s('tf_efficientnetv2_s_in21ft1k', pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnetv2_m_in21ft1k(pretrained=False, **kwargs): - """ EfficientNet-V2 Medium. Pretrained on ImageNet-21k, fine-tuned on 1k. Tensorflow compatible variant - """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_m('tf_efficientnetv2_m_in21ft1k', pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnetv2_l_in21ft1k(pretrained=False, **kwargs): - """ EfficientNet-V2 Large. Pretrained on ImageNet-21k, fine-tuned on 1k. Tensorflow compatible variant - """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_l('tf_efficientnetv2_l_in21ft1k', pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnetv2_xl_in21ft1k(pretrained=False, **kwargs): - """ EfficientNet-V2 Xtra-Large. Pretrained on ImageNet-21k, fine-tuned on 1k. Tensorflow compatible variant - """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_xl('tf_efficientnetv2_xl_in21ft1k', pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnetv2_s_in21k(pretrained=False, **kwargs): - """ EfficientNet-V2 Small w/ ImageNet-21k pretrained weights. Tensorflow compatible variant - """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_s('tf_efficientnetv2_s_in21k', pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnetv2_m_in21k(pretrained=False, **kwargs): - """ EfficientNet-V2 Medium w/ ImageNet-21k pretrained weights. Tensorflow compatible variant - """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_m('tf_efficientnetv2_m_in21k', pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnetv2_l_in21k(pretrained=False, **kwargs): - """ EfficientNet-V2 Large w/ ImageNet-21k pretrained weights. Tensorflow compatible variant - """ - kwargs['bn_eps'] = BN_EPS_TF_DEFAULT - kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_l('tf_efficientnetv2_l_in21k', pretrained=pretrained, **kwargs) - return model - - -@register_model -def tf_efficientnetv2_xl_in21k(pretrained=False, **kwargs): - """ EfficientNet-V2 Xtra-Large w/ ImageNet-21k pretrained weights. Tensorflow compatible variant +def tf_efficientnetv2_xl(pretrained=False, **kwargs): + """ EfficientNet-V2 Xtra-Large. Tensorflow compatible variant """ kwargs['bn_eps'] = BN_EPS_TF_DEFAULT kwargs['pad_type'] = 'same' - model = _gen_efficientnetv2_xl('tf_efficientnetv2_xl_in21k', pretrained=pretrained, **kwargs) + model = _gen_efficientnetv2_xl('tf_efficientnetv2_xl', pretrained=pretrained, **kwargs) return model diff --git a/timm/models/layers/__init__.py b/timm/models/layers/__init__.py index 97e70563..dd5b27d9 100644 --- a/timm/models/layers/__init__.py +++ b/timm/models/layers/__init__.py @@ -2,6 +2,7 @@ from timm.layers.activations import * from timm.layers.adaptive_avgmax_pool import \ adaptive_avgmax_pool2d, select_adaptive_pool2d, AdaptiveAvgMaxPool2d, SelectAdaptivePool2d +from timm.layers.attention_pool2d import AttentionPool2d, RotAttentionPool2d, RotaryEmbedding from timm.layers.blur_pool import BlurPool2d from timm.layers.classifier import ClassifierHead, create_classifier from timm.layers.cond_conv2d import CondConv2d, get_condconv_initializer diff --git a/timm/models/maxxvit.py b/timm/models/maxxvit.py index 1e2666e5..dd424078 100644 --- a/timm/models/maxxvit.py +++ b/timm/models/maxxvit.py @@ -47,16 +47,15 @@ import torch from torch import nn from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import Mlp, ConvMlp, DropPath, ClassifierHead, trunc_normal_tf_, LayerNorm -from timm.layers import SelectAdaptivePool2d, create_pool2d -from timm.layers import create_attn, get_act_layer, get_norm_layer, get_norm_act_layer, create_conv2d -from timm.layers import to_2tuple, extend_tuple, make_divisible, _assert +from timm.layers import Mlp, ConvMlp, DropPath, ClassifierHead, LayerNorm, SelectAdaptivePool2d +from timm.layers import create_attn, get_act_layer, get_norm_layer, get_norm_act_layer, create_conv2d, create_pool2d +from timm.layers import trunc_normal_tf_, to_2tuple, extend_tuple, make_divisible, _assert +from timm.layers import RelPosMlp, RelPosBias, RelPosBiasTf from ._builder import build_model_with_cfg from ._features_fx import register_notrace_function from ._manipulate import named_apply, checkpoint_seq from ._pretrained import generate_default_cfgs from ._registry import register_model -from .vision_transformer_relpos import RelPosMlp, RelPosBias # FIXME move these to common location __all__ = ['MaxxVitCfg', 'MaxxVitConvCfg', 'MaxxVitTransformerCfg', 'MaxxVit'] @@ -1076,93 +1075,6 @@ def cfg_window_size(cfg: MaxxVitTransformerCfg, img_size: Tuple[int, int]): return cfg -def generate_lookup_tensor( - length: int, - max_relative_position: Optional[int] = None, -): - """Generate a one_hot lookup tensor to reindex embeddings along one dimension. - Args: - length: the length to reindex to. - max_relative_position: the maximum relative position to consider. - Relative position embeddings for distances above this threshold - are zeroed out. - Returns: - a lookup Tensor of size [length, length, vocab_size] that satisfies - ret[n,m,v] = 1{m - n + max_relative_position = v}. - """ - if max_relative_position is None: - max_relative_position = length - 1 - # Return the cached lookup tensor, otherwise compute it and cache it. - vocab_size = 2 * max_relative_position + 1 - ret = torch.zeros(length, length, vocab_size) - for i in range(length): - for x in range(length): - v = x - i + max_relative_position - if abs(x - i) > max_relative_position: - continue - ret[i, x, v] = 1 - return ret - - -def reindex_2d_einsum_lookup( - relative_position_tensor, - height: int, - width: int, - height_lookup: torch.Tensor, - width_lookup: torch.Tensor, -) -> torch.Tensor: - """Reindex 2d relative position bias with 2 independent einsum lookups. - Args: - relative_position_tensor: tensor of shape - [..., vocab_height, vocab_width, ...]. - height: height to reindex to. - width: width to reindex to. - height_lookup: one-hot height lookup - width_lookup: one-hot width lookup - Returns: - reindexed_tensor: a Tensor of shape - [..., height * width, height * width, ...] - """ - reindexed_tensor = torch.einsum('nhw,ixh->nixw', relative_position_tensor, height_lookup) - reindexed_tensor = torch.einsum('nixw,jyw->nijxy', reindexed_tensor, width_lookup) - area = height * width - return reindexed_tensor.reshape(relative_position_tensor.shape[0], area, area) - - -class RelPosBiasTf(nn.Module): - - def __init__(self, window_size, num_heads, prefix_tokens=0): - super().__init__() - assert prefix_tokens <= 1 - self.window_size = window_size - self.window_area = window_size[0] * window_size[1] - self.num_heads = num_heads - - vocab_height = 2 * window_size[0] - 1 - vocab_width = 2 * window_size[1] - 1 - self.bias_shape = (self.num_heads, vocab_height, vocab_width) - self.relative_position_bias_table = nn.Parameter(torch.zeros(self.bias_shape)) - self.register_buffer('height_lookup', generate_lookup_tensor(window_size[0]), persistent=False) - self.register_buffer('width_lookup', generate_lookup_tensor(window_size[1]), persistent=False) - self.init_weights() - - def init_weights(self): - nn.init.normal_(self.relative_position_bias_table, std=.02) - - def get_bias(self) -> torch.Tensor: - # FIXME change to not use one-hot/einsum? - return reindex_2d_einsum_lookup( - self.relative_position_bias_table, - self.window_size[0], - self.window_size[1], - self.height_lookup, - self.width_lookup - ) - - def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None): - return attn + self.get_bias() - - class NormMlpHead(nn.Module): def __init__( @@ -1204,6 +1116,26 @@ class NormMlpHead(nn.Module): return x +def _overlay_kwargs(cfg: MaxxVitCfg, **kwargs): + transformer_kwargs = {} + conv_kwargs = {} + base_kwargs = {} + for k, v in kwargs.items(): + if k.startswith('transformer_'): + transformer_kwargs[k.replace('transformer_', '')] = v + elif k.startswith('conv_'): + conv_kwargs[k.replace('conv_', '')] = v + else: + base_kwargs[k] = v + cfg = replace( + cfg, + transformer_cfg=replace(cfg.transformer_cfg, **transformer_kwargs), + conv_cfg=replace(cfg.conv_cfg, **conv_kwargs), + **base_kwargs + ) + return cfg + + class MaxxVit(nn.Module): """ CoaTNet + MaxVit base model. @@ -1218,10 +1150,13 @@ class MaxxVit(nn.Module): num_classes: int = 1000, global_pool: str = 'avg', drop_rate: float = 0., - drop_path_rate: float = 0. + drop_path_rate: float = 0., + **kwargs, ): super().__init__() img_size = to_2tuple(img_size) + if kwargs: + cfg = _overlay_kwargs(cfg, **kwargs) transformer_cfg = cfg_window_size(cfg.transformer_cfg, img_size) self.num_classes = num_classes self.global_pool = global_pool @@ -1745,6 +1680,26 @@ model_cfgs = dict( init_values=1e-6, ), ), + maxvit_rmlp_base_rw_224=MaxxVitCfg( + embed_dim=(96, 192, 384, 768), + depths=(2, 6, 14, 2), + block_type=('M',) * 4, + stem_width=(32, 64), + head_hidden_size=768, + **_rw_max_cfg( + rel_pos_type='mlp', + ), + ), + maxvit_rmlp_base_rw_384=MaxxVitCfg( + embed_dim=(96, 192, 384, 768), + depths=(2, 6, 14, 2), + block_type=('M',) * 4, + stem_width=(32, 64), + head_hidden_size=768, + **_rw_max_cfg( + rel_pos_type='mlp', + ), + ), maxvit_tiny_pm_256=MaxxVitCfg( embed_dim=(64, 128, 256, 512), @@ -1927,6 +1882,12 @@ default_cfgs = generate_default_cfgs({ 'maxvit_rmlp_small_rw_256': _cfg( url='', input_size=(3, 256, 256), pool_size=(8, 8)), + 'maxvit_rmlp_base_rw_224': _cfg( + url='', + ), + 'maxvit_rmlp_base_rw_384': _cfg( + url='', + input_size=(3, 384, 384), pool_size=(12, 12)), 'maxvit_tiny_pm_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), @@ -2156,6 +2117,16 @@ def maxvit_rmlp_small_rw_256(pretrained=False, **kwargs): return _create_maxxvit('maxvit_rmlp_small_rw_256', pretrained=pretrained, **kwargs) +@register_model +def maxvit_rmlp_base_rw_224(pretrained=False, **kwargs): + return _create_maxxvit('maxvit_rmlp_base_rw_224', pretrained=pretrained, **kwargs) + + +@register_model +def maxvit_rmlp_base_rw_384(pretrained=False, **kwargs): + return _create_maxxvit('maxvit_rmlp_base_rw_384', pretrained=pretrained, **kwargs) + + @register_model def maxvit_tiny_pm_256(pretrained=False, **kwargs): return _create_maxxvit('maxvit_tiny_pm_256', pretrained=pretrained, **kwargs) diff --git a/timm/models/mobilenetv3.py b/timm/models/mobilenetv3.py index cf4f268d..e1da91a2 100644 --- a/timm/models/mobilenetv3.py +++ b/timm/models/mobilenetv3.py @@ -21,93 +21,12 @@ from ._efficientnet_builder import EfficientNetBuilder, decode_arch_def, efficie round_channels, resolve_bn_args, resolve_act_layer, BN_EPS_TF_DEFAULT from ._features import FeatureInfo, FeatureHooks from ._manipulate import checkpoint_seq +from ._pretrained import generate_default_cfgs from ._registry import register_model __all__ = ['MobileNetV3', 'MobileNetV3Features'] -def _cfg(url='', **kwargs): - return { - 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), - 'crop_pct': 0.875, 'interpolation': 'bilinear', - 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, - 'first_conv': 'conv_stem', 'classifier': 'classifier', - **kwargs - } - - -default_cfgs = { - 'mobilenetv3_large_075': _cfg(url=''), - 'mobilenetv3_large_100': _cfg( - interpolation='bicubic', - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_large_100_ra-f55367f5.pth'), - 'mobilenetv3_large_100_miil': _cfg( - interpolation='bilinear', mean=(0., 0., 0.), std=(1., 1., 1.), - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mobilenetv3_large_100_1k_miil_78_0-66471c13.pth'), - 'mobilenetv3_large_100_miil_in21k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mobilenetv3_large_100_in21k_miil-d71cc17b.pth', - interpolation='bilinear', mean=(0., 0., 0.), std=(1., 1., 1.), num_classes=11221), - - 'mobilenetv3_small_050': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_050_lambc-4b7bbe87.pth', - interpolation='bicubic'), - 'mobilenetv3_small_075': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_075_lambc-384766db.pth', - interpolation='bicubic'), - 'mobilenetv3_small_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_100_lamb-266a294c.pth', - interpolation='bicubic'), - - 'mobilenetv3_rw': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_100-35495452.pth', - interpolation='bicubic'), - - 'tf_mobilenetv3_large_075': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_075-150ee8b0.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'tf_mobilenetv3_large_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_100-427764d5.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'tf_mobilenetv3_large_minimal_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_minimal_100-8596ae28.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'tf_mobilenetv3_small_075': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_075-da427f52.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'tf_mobilenetv3_small_100': _cfg( - url= 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_100-37f49e2b.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - 'tf_mobilenetv3_small_minimal_100': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_minimal_100-922a7843.pth', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), - - 'fbnetv3_b': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_b_224-ead5d2a1.pth', - test_input_size=(3, 256, 256), crop_pct=0.95), - 'fbnetv3_d': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_d_224-c98bce42.pth', - test_input_size=(3, 256, 256), crop_pct=0.95), - 'fbnetv3_g': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_g_240-0b1df83b.pth', - input_size=(3, 240, 240), test_input_size=(3, 288, 288), crop_pct=0.95, pool_size=(8, 8)), - - "lcnet_035": _cfg(), - "lcnet_050": _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_050-f447553b.pth', - interpolation='bicubic', - ), - "lcnet_075": _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_075-318cad2c.pth', - interpolation='bicubic', - ), - "lcnet_100": _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_100-a929038c.pth', - interpolation='bicubic', - ), - "lcnet_150": _cfg(), -} - - class MobileNetV3(nn.Module): """ MobiletNet-V3 @@ -124,9 +43,24 @@ class MobileNetV3(nn.Module): """ def __init__( - self, block_args, num_classes=1000, in_chans=3, stem_size=16, fix_stem=False, num_features=1280, - head_bias=True, pad_type='', act_layer=None, norm_layer=None, se_layer=None, se_from_exp=True, - round_chs_fn=round_channels, drop_rate=0., drop_path_rate=0., global_pool='avg'): + self, + block_args, + num_classes=1000, + in_chans=3, + stem_size=16, + fix_stem=False, + num_features=1280, + head_bias=True, + pad_type='', + act_layer=None, + norm_layer=None, + se_layer=None, + se_from_exp=True, + round_chs_fn=round_channels, + drop_rate=0., + drop_path_rate=0., + global_pool='avg', + ): super(MobileNetV3, self).__init__() act_layer = act_layer or nn.ReLU norm_layer = norm_layer or nn.BatchNorm2d @@ -145,8 +79,15 @@ class MobileNetV3(nn.Module): # Middle stages (IR/ER/DS Blocks) builder = EfficientNetBuilder( - output_stride=32, pad_type=pad_type, round_chs_fn=round_chs_fn, se_from_exp=se_from_exp, - act_layer=act_layer, norm_layer=norm_layer, se_layer=se_layer, drop_path_rate=drop_path_rate) + output_stride=32, + pad_type=pad_type, + round_chs_fn=round_chs_fn, + se_from_exp=se_from_exp, + act_layer=act_layer, + norm_layer=norm_layer, + se_layer=se_layer, + drop_path_rate=drop_path_rate, + ) self.blocks = nn.Sequential(*builder(stem_size, block_args)) self.feature_info = builder.features head_chs = builder.in_chs @@ -225,9 +166,23 @@ class MobileNetV3Features(nn.Module): """ def __init__( - self, block_args, out_indices=(0, 1, 2, 3, 4), feature_location='bottleneck', in_chans=3, - stem_size=16, fix_stem=False, output_stride=32, pad_type='', round_chs_fn=round_channels, - se_from_exp=True, act_layer=None, norm_layer=None, se_layer=None, drop_rate=0., drop_path_rate=0.): + self, + block_args, + out_indices=(0, 1, 2, 3, 4), + feature_location='bottleneck', + in_chans=3, + stem_size=16, + fix_stem=False, + output_stride=32, + pad_type='', + round_chs_fn=round_channels, + se_from_exp=True, + act_layer=None, + norm_layer=None, + se_layer=None, + drop_rate=0., + drop_path_rate=0., + ): super(MobileNetV3Features, self).__init__() act_layer = act_layer or nn.ReLU norm_layer = norm_layer or nn.BatchNorm2d @@ -243,9 +198,16 @@ class MobileNetV3Features(nn.Module): # Middle stages (IR/ER/DS Blocks) builder = EfficientNetBuilder( - output_stride=output_stride, pad_type=pad_type, round_chs_fn=round_chs_fn, se_from_exp=se_from_exp, - act_layer=act_layer, norm_layer=norm_layer, se_layer=se_layer, - drop_path_rate=drop_path_rate, feature_location=feature_location) + output_stride=output_stride, + pad_type=pad_type, + round_chs_fn=round_chs_fn, + se_from_exp=se_from_exp, + act_layer=act_layer, + norm_layer=norm_layer, + se_layer=se_layer, + drop_path_rate=drop_path_rate, + feature_location=feature_location, + ) self.blocks = nn.Sequential(*builder(stem_size, block_args)) self.feature_info = FeatureInfo(builder.features, out_indices) self._stage_out_idx = {v['stage']: i for i, v in enumerate(self.feature_info) if i in out_indices} @@ -286,7 +248,9 @@ def _create_mnv3(variant, pretrained=False, **kwargs): kwargs_filter = ('num_classes', 'num_features', 'head_conv', 'head_bias', 'global_pool') model_cls = MobileNetV3Features model = build_model_with_cfg( - model_cls, variant, pretrained, + model_cls, + variant, + pretrained, pretrained_strict=not features_only, kwargs_filter=kwargs_filter, **kwargs) @@ -567,6 +531,110 @@ def _gen_lcnet(variant, channel_multiplier=1.0, pretrained=False, **kwargs): return model +def _cfg(url='', **kwargs): + return { + 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), + 'crop_pct': 0.875, 'interpolation': 'bilinear', + 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, + 'first_conv': 'conv_stem', 'classifier': 'classifier', + **kwargs + } + + +default_cfgs = generate_default_cfgs({ + 'mobilenetv3_large_075.untrained': _cfg(url=''), + 'mobilenetv3_large_100.ra_in1k': _cfg( + interpolation='bicubic', + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_large_100_ra-f55367f5.pth', + hf_hub_id='timm/'), + 'mobilenetv3_large_100.miil_in21k_ft_in1k': _cfg( + interpolation='bilinear', mean=(0., 0., 0.), std=(1., 1., 1.), + origin_url='https://github.com/Alibaba-MIIL/ImageNet21K', + paper_ids='arXiv:2104.10972v4', + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mobilenetv3_large_100_1k_miil_78_0-66471c13.pth', + hf_hub_id='timm/'), + 'mobilenetv3_large_100.miil_in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/mobilenetv3_large_100_in21k_miil-d71cc17b.pth', + hf_hub_id='timm/', + origin_url='https://github.com/Alibaba-MIIL/ImageNet21K', + paper_ids='arXiv:2104.10972v4', + interpolation='bilinear', mean=(0., 0., 0.), std=(1., 1., 1.), num_classes=11221), + + 'mobilenetv3_small_050.lamb_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_050_lambc-4b7bbe87.pth', + hf_hub_id='timm/', + interpolation='bicubic'), + 'mobilenetv3_small_075.lamb_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_075_lambc-384766db.pth', + hf_hub_id='timm/', + interpolation='bicubic'), + 'mobilenetv3_small_100.lamb_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_small_100_lamb-266a294c.pth', + hf_hub_id='timm/', + interpolation='bicubic'), + + 'mobilenetv3_rw.rmsp_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv3_100-35495452.pth', + interpolation='bicubic'), + + 'tf_mobilenetv3_large_075.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_075-150ee8b0.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'tf_mobilenetv3_large_100.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_100-427764d5.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'tf_mobilenetv3_large_minimal_100.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_large_minimal_100-8596ae28.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'tf_mobilenetv3_small_075.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_075-da427f52.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'tf_mobilenetv3_small_100.in1k': _cfg( + url= 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_100-37f49e2b.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + 'tf_mobilenetv3_small_minimal_100.in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mobilenetv3_small_minimal_100-922a7843.pth', + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD), + + 'fbnetv3_b.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_b_224-ead5d2a1.pth', + hf_hub_id='timm/', + test_input_size=(3, 256, 256), crop_pct=0.95), + 'fbnetv3_d.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_d_224-c98bce42.pth', + hf_hub_id='timm/', + test_input_size=(3, 256, 256), crop_pct=0.95), + 'fbnetv3_g.ra2_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetv3_g_240-0b1df83b.pth', + hf_hub_id='timm/', + input_size=(3, 240, 240), test_input_size=(3, 288, 288), crop_pct=0.95, pool_size=(8, 8)), + + "lcnet_035.untrained": _cfg(), + "lcnet_050.ra2_in1k": _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_050-f447553b.pth', + hf_hub_id='timm/', + interpolation='bicubic', + ), + "lcnet_075.ra2_in1k": _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_075-318cad2c.pth', + hf_hub_id='timm/', + interpolation='bicubic', + ), + "lcnet_100.ra2_in1k": _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/lcnet_100-a929038c.pth', + hf_hub_id='timm/', + interpolation='bicubic', + ), + "lcnet_150.untrained": _cfg(), +}) + + @register_model def mobilenetv3_large_075(pretrained=False, **kwargs): """ MobileNet V3 """ @@ -581,24 +649,6 @@ def mobilenetv3_large_100(pretrained=False, **kwargs): return model -@register_model -def mobilenetv3_large_100_miil(pretrained=False, **kwargs): - """ MobileNet V3 - Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K - """ - model = _gen_mobilenet_v3('mobilenetv3_large_100_miil', 1.0, pretrained=pretrained, **kwargs) - return model - - -@register_model -def mobilenetv3_large_100_miil_in21k(pretrained=False, **kwargs): - """ MobileNet V3, 21k pretraining - Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K - """ - model = _gen_mobilenet_v3('mobilenetv3_large_100_miil_in21k', 1.0, pretrained=pretrained, **kwargs) - return model - - @register_model def mobilenetv3_small_050(pretrained=False, **kwargs): """ MobileNet V3 """ diff --git a/timm/models/mobilevit.py b/timm/models/mobilevit.py index 3d2ae84a..8e8f4428 100644 --- a/timm/models/mobilevit.py +++ b/timm/models/mobilevit.py @@ -266,9 +266,16 @@ class MobileVitBlock(nn.Module): self.transformer = nn.Sequential(*[ TransformerBlock( - transformer_dim, mlp_ratio=mlp_ratio, num_heads=num_heads, qkv_bias=True, - attn_drop=attn_drop, drop=drop, drop_path=drop_path_rate, - act_layer=layers.act, norm_layer=transformer_norm_layer) + transformer_dim, + mlp_ratio=mlp_ratio, + num_heads=num_heads, + qkv_bias=True, + attn_drop=attn_drop, + drop=drop, + drop_path=drop_path_rate, + act_layer=layers.act, + norm_layer=transformer_norm_layer, + ) for _ in range(transformer_depth) ]) self.norm = transformer_norm_layer(transformer_dim) diff --git a/timm/models/nfnet.py b/timm/models/nfnet.py index 48f91b35..f9a90ab3 100644 --- a/timm/models/nfnet.py +++ b/timm/models/nfnet.py @@ -17,7 +17,7 @@ Status: Hacked together by / copyright Ross Wightman, 2021. """ from collections import OrderedDict -from dataclasses import dataclass +from dataclasses import dataclass, replace from functools import partial from typing import Tuple, Optional @@ -159,11 +159,25 @@ class NfCfg: def _nfres_cfg( - depths, channels=(256, 512, 1024, 2048), group_size=None, act_layer='relu', attn_layer=None, attn_kwargs=None): + depths, + channels=(256, 512, 1024, 2048), + group_size=None, + act_layer='relu', + attn_layer=None, + attn_kwargs=None, +): attn_kwargs = attn_kwargs or {} cfg = NfCfg( - depths=depths, channels=channels, stem_type='7x7_pool', stem_chs=64, bottle_ratio=0.25, - group_size=group_size, act_layer=act_layer, attn_layer=attn_layer, attn_kwargs=attn_kwargs) + depths=depths, + channels=channels, + stem_type='7x7_pool', + stem_chs=64, + bottle_ratio=0.25, + group_size=group_size, + act_layer=act_layer, + attn_layer=attn_layer, + attn_kwargs=attn_kwargs, + ) return cfg @@ -171,28 +185,70 @@ def _nfreg_cfg(depths, channels=(48, 104, 208, 440)): num_features = 1280 * channels[-1] // 440 attn_kwargs = dict(rd_ratio=0.5) cfg = NfCfg( - depths=depths, channels=channels, stem_type='3x3', group_size=8, width_factor=0.75, bottle_ratio=2.25, - num_features=num_features, reg=True, attn_layer='se', attn_kwargs=attn_kwargs) + depths=depths, + channels=channels, + stem_type='3x3', + group_size=8, + width_factor=0.75, + bottle_ratio=2.25, + num_features=num_features, + reg=True, + attn_layer='se', + attn_kwargs=attn_kwargs, + ) return cfg def _nfnet_cfg( - depths, channels=(256, 512, 1536, 1536), group_size=128, bottle_ratio=0.5, feat_mult=2., - act_layer='gelu', attn_layer='se', attn_kwargs=None): + depths, + channels=(256, 512, 1536, 1536), + group_size=128, + bottle_ratio=0.5, + feat_mult=2., + act_layer='gelu', + attn_layer='se', + attn_kwargs=None, +): num_features = int(channels[-1] * feat_mult) attn_kwargs = attn_kwargs if attn_kwargs is not None else dict(rd_ratio=0.5) cfg = NfCfg( - depths=depths, channels=channels, stem_type='deep_quad', stem_chs=128, group_size=group_size, - bottle_ratio=bottle_ratio, extra_conv=True, num_features=num_features, act_layer=act_layer, - attn_layer=attn_layer, attn_kwargs=attn_kwargs) + depths=depths, + channels=channels, + stem_type='deep_quad', + stem_chs=128, + group_size=group_size, + bottle_ratio=bottle_ratio, + extra_conv=True, + num_features=num_features, + act_layer=act_layer, + attn_layer=attn_layer, + attn_kwargs=attn_kwargs, + ) return cfg -def _dm_nfnet_cfg(depths, channels=(256, 512, 1536, 1536), act_layer='gelu', skipinit=True): +def _dm_nfnet_cfg( + depths, + channels=(256, 512, 1536, 1536), + act_layer='gelu', + skipinit=True, +): cfg = NfCfg( - depths=depths, channels=channels, stem_type='deep_quad', stem_chs=128, group_size=128, - bottle_ratio=0.5, extra_conv=True, gamma_in_act=True, same_padding=True, skipinit=skipinit, - num_features=int(channels[-1] * 2.0), act_layer=act_layer, attn_layer='se', attn_kwargs=dict(rd_ratio=0.5)) + depths=depths, + channels=channels, + stem_type='deep_quad', + stem_chs=128, + group_size=128, + bottle_ratio=0.5, + extra_conv=True, + gamma_in_act=True, + same_padding=True, + skipinit=skipinit, + num_features=int(channels[-1] * 2.0), + act_layer=act_layer, + attn_layer='se', + attn_kwargs=dict(rd_ratio=0.5), + ) return cfg @@ -278,7 +334,14 @@ def act_with_gamma(act_type, gamma: float = 1.): class DownsampleAvg(nn.Module): def __init__( - self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, conv_layer=ScaledStdConv2d): + self, + in_chs, + out_chs, + stride=1, + dilation=1, + first_dilation=None, + conv_layer=ScaledStdConv2d, + ): """ AvgPool Downsampling as in 'D' ResNet variants. Support for dilation.""" super(DownsampleAvg, self).__init__() avg_stride = stride if dilation == 1 else 1 @@ -299,9 +362,26 @@ class NormFreeBlock(nn.Module): """ def __init__( - self, in_chs, out_chs=None, stride=1, dilation=1, first_dilation=None, - alpha=1.0, beta=1.0, bottle_ratio=0.25, group_size=None, ch_div=1, reg=True, extra_conv=False, - skipinit=False, attn_layer=None, attn_gain=2.0, act_layer=None, conv_layer=None, drop_path_rate=0.): + self, + in_chs, + out_chs=None, + stride=1, + dilation=1, + first_dilation=None, + alpha=1.0, + beta=1.0, + bottle_ratio=0.25, + group_size=None, + ch_div=1, + reg=True, + extra_conv=False, + skipinit=False, + attn_layer=None, + attn_gain=2.0, + act_layer=None, + conv_layer=None, + drop_path_rate=0., + ): super().__init__() first_dilation = first_dilation or dilation out_chs = out_chs or in_chs @@ -316,7 +396,13 @@ class NormFreeBlock(nn.Module): if in_chs != out_chs or stride != 1 or dilation != first_dilation: self.downsample = DownsampleAvg( - in_chs, out_chs, stride=stride, dilation=dilation, first_dilation=first_dilation, conv_layer=conv_layer) + in_chs, + out_chs, + stride=stride, + dilation=dilation, + first_dilation=first_dilation, + conv_layer=conv_layer, + ) else: self.downsample = None @@ -452,14 +538,33 @@ class NormFreeNet(nn.Module): for what it is/does. Approx 8-10% throughput loss. """ def __init__( - self, cfg: NfCfg, num_classes=1000, in_chans=3, global_pool='avg', output_stride=32, - drop_rate=0., drop_path_rate=0. + self, + cfg: NfCfg, + num_classes=1000, + in_chans=3, + global_pool='avg', + output_stride=32, + drop_rate=0., + drop_path_rate=0., + **kwargs, ): + """ + Args: + cfg (NfCfg): Model architecture configuration + num_classes (int): Number of classifier classes (default: 1000) + in_chans (int): Number of input channels (default: 3) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate self.grad_checkpointing = False + cfg = replace(cfg, **kwargs) assert cfg.act_layer in _nonlin_gamma, f"Please add non-linearity constants for activation ({cfg.act_layer})." conv_layer = ScaledStdConv2dSame if cfg.same_padding else ScaledStdConv2d if cfg.gamma_in_act: @@ -472,7 +577,12 @@ class NormFreeNet(nn.Module): stem_chs = make_divisible((cfg.stem_chs or cfg.channels[0]) * cfg.width_factor, cfg.ch_div) self.stem, stem_stride, stem_feat = create_stem( - in_chans, stem_chs, cfg.stem_type, conv_layer=conv_layer, act_layer=act_layer) + in_chans, + stem_chs, + cfg.stem_type, + conv_layer=conv_layer, + act_layer=act_layer, + ) self.feature_info = [stem_feat] drop_path_rates = [x.tolist() for x in torch.linspace(0, drop_path_rate, sum(cfg.depths)).split(cfg.depths)] diff --git a/timm/models/regnet.py b/timm/models/regnet.py index e1cc821b..9d2528f6 100644 --- a/timm/models/regnet.py +++ b/timm/models/regnet.py @@ -14,7 +14,7 @@ Weights from original impl have been modified Hacked together by / Copyright 2020 Ross Wightman """ import math -from dataclasses import dataclass +from dataclasses import dataclass, replace from functools import partial from typing import Optional, Union, Callable @@ -237,7 +237,15 @@ def downsample_avg(in_chs, out_chs, kernel_size=1, stride=1, dilation=1, norm_la def create_shortcut( - downsample_type, in_chs, out_chs, kernel_size, stride, dilation=(1, 1), norm_layer=None, preact=False): + downsample_type, + in_chs, + out_chs, + kernel_size, + stride, + dilation=(1, 1), + norm_layer=None, + preact=False, +): assert downsample_type in ('avg', 'conv1x1', '', None) if in_chs != out_chs or stride != 1 or dilation[0] != dilation[1]: dargs = dict(stride=stride, dilation=dilation[0], norm_layer=norm_layer, preact=preact) @@ -259,9 +267,21 @@ class Bottleneck(nn.Module): """ def __init__( - self, in_chs, out_chs, stride=1, dilation=(1, 1), bottle_ratio=1, group_size=1, se_ratio=0.25, - downsample='conv1x1', linear_out=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + stride=1, + dilation=(1, 1), + bottle_ratio=1, + group_size=1, + se_ratio=0.25, + downsample='conv1x1', + linear_out=False, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + drop_block=None, + drop_path_rate=0., + ): super(Bottleneck, self).__init__() act_layer = get_act_layer(act_layer) bottleneck_chs = int(round(out_chs * bottle_ratio)) @@ -307,9 +327,21 @@ class PreBottleneck(nn.Module): """ def __init__( - self, in_chs, out_chs, stride=1, dilation=(1, 1), bottle_ratio=1, group_size=1, se_ratio=0.25, - downsample='conv1x1', linear_out=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - drop_block=None, drop_path_rate=0.): + self, + in_chs, + out_chs, + stride=1, + dilation=(1, 1), + bottle_ratio=1, + group_size=1, + se_ratio=0.25, + downsample='conv1x1', + linear_out=False, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + drop_block=None, + drop_path_rate=0., + ): super(PreBottleneck, self).__init__() norm_act_layer = get_norm_act_layer(norm_layer, act_layer) bottleneck_chs = int(round(out_chs * bottle_ratio)) @@ -353,8 +385,16 @@ class RegStage(nn.Module): """Stage (sequence of blocks w/ the same output shape).""" def __init__( - self, depth, in_chs, out_chs, stride, dilation, - drop_path_rates=None, block_fn=Bottleneck, **block_kwargs): + self, + depth, + in_chs, + out_chs, + stride, + dilation, + drop_path_rates=None, + block_fn=Bottleneck, + **block_kwargs, + ): super(RegStage, self).__init__() self.grad_checkpointing = False @@ -367,8 +407,13 @@ class RegStage(nn.Module): name = "b{}".format(i + 1) self.add_module( name, block_fn( - block_in_chs, out_chs, stride=block_stride, dilation=block_dilation, - drop_path_rate=dpr, **block_kwargs) + block_in_chs, + out_chs, + stride=block_stride, + dilation=block_dilation, + drop_path_rate=dpr, + **block_kwargs, + ) ) first_dilation = dilation @@ -389,12 +434,35 @@ class RegNet(nn.Module): """ def __init__( - self, cfg: RegNetCfg, in_chans=3, num_classes=1000, output_stride=32, global_pool='avg', - drop_rate=0., drop_path_rate=0., zero_init_last=True): + self, + cfg: RegNetCfg, + in_chans=3, + num_classes=1000, + output_stride=32, + global_pool='avg', + drop_rate=0., + drop_path_rate=0., + zero_init_last=True, + **kwargs, + ): + """ + + Args: + cfg (RegNetCfg): Model architecture configuration + in_chans (int): Number of input channels (default: 3) + num_classes (int): Number of classifier classes (default: 1000) + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + global_pool (str): Global pooling type (default: 'avg') + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + zero_init_last (bool): Zero-init last weight of residual path + kwargs (dict): Extra kwargs overlayed onto cfg + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate assert output_stride in (8, 16, 32) + cfg = replace(cfg, **kwargs) # update cfg with extra passed kwargs # Construct the stem stem_width = cfg.stem_width @@ -461,8 +529,12 @@ class RegNet(nn.Module): dict(zip(arg_names, params)) for params in zip(stage_widths, stage_strides, stage_dilations, stage_depths, stage_br, stage_gs, stage_dpr)] common_args = dict( - downsample=cfg.downsample, se_ratio=cfg.se_ratio, linear_out=cfg.linear_out, - act_layer=cfg.act_layer, norm_layer=cfg.norm_layer) + downsample=cfg.downsample, + se_ratio=cfg.se_ratio, + linear_out=cfg.linear_out, + act_layer=cfg.act_layer, + norm_layer=cfg.norm_layer, + ) return per_stage_args, common_args @torch.jit.ignore @@ -518,7 +590,6 @@ def _init_weights(module, name='', zero_init_last=False): def _filter_fn(state_dict): - """ convert patch embedding weight from manual patchify + linear proj to conv""" if 'classy_state_dict' in state_dict: import re state_dict = state_dict['classy_state_dict']['base_model']['model'] diff --git a/timm/models/res2net.py b/timm/models/res2net.py index 4724df2a..29a49953 100644 --- a/timm/models/res2net.py +++ b/timm/models/res2net.py @@ -51,9 +51,21 @@ class Bottle2neck(nn.Module): expansion = 4 def __init__( - self, inplanes, planes, stride=1, downsample=None, - cardinality=1, base_width=26, scale=4, dilation=1, first_dilation=None, - act_layer=nn.ReLU, norm_layer=None, attn_layer=None, **_): + self, + inplanes, + planes, + stride=1, + downsample=None, + cardinality=1, + base_width=26, + scale=4, + dilation=1, + first_dilation=None, + act_layer=nn.ReLU, + norm_layer=None, + attn_layer=None, + **_, + ): super(Bottle2neck, self).__init__() self.scale = scale self.is_first = stride > 1 or downsample is not None @@ -89,7 +101,8 @@ class Bottle2neck(nn.Module): self.downsample = downsample def zero_init_last(self): - nn.init.zeros_(self.bn3.weight) + if getattr(self.bn3, 'weight', None) is not None: + nn.init.zeros_(self.bn3.weight) def forward(self, x): shortcut = x @@ -143,8 +156,8 @@ def res2net50_26w_4s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=4), **kwargs) - return _create_res2net('res2net50_26w_4s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=4)) + return _create_res2net('res2net50_26w_4s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -154,8 +167,8 @@ def res2net101_26w_4s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, block_args=dict(scale=4), **kwargs) - return _create_res2net('res2net101_26w_4s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 23, 3], base_width=26, block_args=dict(scale=4)) + return _create_res2net('res2net101_26w_4s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -165,8 +178,8 @@ def res2net50_26w_6s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=6), **kwargs) - return _create_res2net('res2net50_26w_6s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=6)) + return _create_res2net('res2net50_26w_6s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -176,8 +189,8 @@ def res2net50_26w_8s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=8), **kwargs) - return _create_res2net('res2net50_26w_8s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=26, block_args=dict(scale=8)) + return _create_res2net('res2net50_26w_8s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -187,8 +200,8 @@ def res2net50_48w_2s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=48, block_args=dict(scale=2), **kwargs) - return _create_res2net('res2net50_48w_2s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=48, block_args=dict(scale=2)) + return _create_res2net('res2net50_48w_2s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -198,8 +211,8 @@ def res2net50_14w_8s(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=14, block_args=dict(scale=8), **kwargs) - return _create_res2net('res2net50_14w_8s', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=14, block_args=dict(scale=8)) + return _create_res2net('res2net50_14w_8s', pretrained, **dict(model_args, **kwargs)) @register_model @@ -209,5 +222,5 @@ def res2next50(pretrained=False, **kwargs): pretrained (bool): If True, returns a model pre-trained on ImageNet """ model_args = dict( - block=Bottle2neck, layers=[3, 4, 6, 3], base_width=4, cardinality=8, block_args=dict(scale=4), **kwargs) - return _create_res2net('res2next50', pretrained, **model_args) + block=Bottle2neck, layers=[3, 4, 6, 3], base_width=4, cardinality=8, block_args=dict(scale=4)) + return _create_res2net('res2next50', pretrained, **dict(model_args, **kwargs)) diff --git a/timm/models/resnest.py b/timm/models/resnest.py index 3b001c7b..38303f9c 100644 --- a/timm/models/resnest.py +++ b/timm/models/resnest.py @@ -57,10 +57,27 @@ class ResNestBottleneck(nn.Module): expansion = 4 def __init__( - self, inplanes, planes, stride=1, downsample=None, - radix=1, cardinality=1, base_width=64, avd=False, avd_first=False, is_first=False, - reduce_first=1, dilation=1, first_dilation=None, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - attn_layer=None, aa_layer=None, drop_block=None, drop_path=None): + self, + inplanes, + planes, + stride=1, + downsample=None, + radix=1, + cardinality=1, + base_width=64, + avd=False, + avd_first=False, + is_first=False, + reduce_first=1, + dilation=1, + first_dilation=None, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + attn_layer=None, + aa_layer=None, + drop_block=None, + drop_path=None, + ): super(ResNestBottleneck, self).__init__() assert reduce_first == 1 # not supported assert attn_layer is None # not supported @@ -103,7 +120,8 @@ class ResNestBottleneck(nn.Module): self.downsample = downsample def zero_init_last(self): - nn.init.zeros_(self.bn3.weight) + if getattr(self.bn3, 'weight', None) is not None: + nn.init.zeros_(self.bn3.weight) def forward(self, x): shortcut = x @@ -145,8 +163,8 @@ def resnest14d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[1, 1, 1, 1], stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest14d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest14d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -156,8 +174,8 @@ def resnest26d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[2, 2, 2, 2], stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest26d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest26d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -168,8 +186,8 @@ def resnest50d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 6, 3], stem_type='deep', stem_width=32, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest50d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest50d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -180,8 +198,8 @@ def resnest101e(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 23, 3], stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest101e', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest101e', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -192,8 +210,8 @@ def resnest200e(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 24, 36, 3], stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest200e', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest200e', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -204,8 +222,8 @@ def resnest269e(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 30, 48, 8], stem_type='deep', stem_width=64, avg_down=True, base_width=64, cardinality=1, - block_args=dict(radix=2, avd=True, avd_first=False), **kwargs) - return _create_resnest('resnest269e', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=2, avd=True, avd_first=False)) + return _create_resnest('resnest269e', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -215,8 +233,8 @@ def resnest50d_4s2x40d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 6, 3], stem_type='deep', stem_width=32, avg_down=True, base_width=40, cardinality=2, - block_args=dict(radix=4, avd=True, avd_first=True), **kwargs) - return _create_resnest('resnest50d_4s2x40d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=4, avd=True, avd_first=True)) + return _create_resnest('resnest50d_4s2x40d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) @register_model @@ -226,5 +244,5 @@ def resnest50d_1s4x24d(pretrained=False, **kwargs): model_kwargs = dict( block=ResNestBottleneck, layers=[3, 4, 6, 3], stem_type='deep', stem_width=32, avg_down=True, base_width=24, cardinality=4, - block_args=dict(radix=1, avd=True, avd_first=True), **kwargs) - return _create_resnest('resnest50d_1s4x24d', pretrained=pretrained, **model_kwargs) + block_args=dict(radix=1, avd=True, avd_first=True)) + return _create_resnest('resnest50d_1s4x24d', pretrained=pretrained, **dict(model_kwargs, **kwargs)) diff --git a/timm/models/resnet.py b/timm/models/resnet.py index 50849017..200280b3 100644 --- a/timm/models/resnet.py +++ b/timm/models/resnet.py @@ -16,7 +16,7 @@ import torch.nn.functional as F from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from timm.layers import DropBlock2d, DropPath, AvgPool2dSame, BlurPool2d, GroupNorm, create_attn, get_attn, \ - create_classifier + get_act_layer, get_norm_layer, create_classifier from ._builder import build_model_with_cfg from ._manipulate import checkpoint_seq from ._registry import register_model, model_entrypoint @@ -337,9 +337,23 @@ class BasicBlock(nn.Module): expansion = 1 def __init__( - self, inplanes, planes, stride=1, downsample=None, cardinality=1, base_width=64, - reduce_first=1, dilation=1, first_dilation=None, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - attn_layer=None, aa_layer=None, drop_block=None, drop_path=None): + self, + inplanes, + planes, + stride=1, + downsample=None, + cardinality=1, + base_width=64, + reduce_first=1, + dilation=1, + first_dilation=None, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + attn_layer=None, + aa_layer=None, + drop_block=None, + drop_path=None, + ): super(BasicBlock, self).__init__() assert cardinality == 1, 'BasicBlock only supports cardinality of 1' @@ -370,7 +384,8 @@ class BasicBlock(nn.Module): self.drop_path = drop_path def zero_init_last(self): - nn.init.zeros_(self.bn2.weight) + if getattr(self.bn2, 'weight', None) is not None: + nn.init.zeros_(self.bn2.weight) def forward(self, x): shortcut = x @@ -402,9 +417,23 @@ class Bottleneck(nn.Module): expansion = 4 def __init__( - self, inplanes, planes, stride=1, downsample=None, cardinality=1, base_width=64, - reduce_first=1, dilation=1, first_dilation=None, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - attn_layer=None, aa_layer=None, drop_block=None, drop_path=None): + self, + inplanes, + planes, + stride=1, + downsample=None, + cardinality=1, + base_width=64, + reduce_first=1, + dilation=1, + first_dilation=None, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + attn_layer=None, + aa_layer=None, + drop_block=None, + drop_path=None, + ): super(Bottleneck, self).__init__() width = int(math.floor(planes * (base_width / 64)) * cardinality) @@ -437,7 +466,8 @@ class Bottleneck(nn.Module): self.drop_path = drop_path def zero_init_last(self): - nn.init.zeros_(self.bn3.weight) + if getattr(self.bn3, 'weight', None) is not None: + nn.init.zeros_(self.bn3.weight) def forward(self, x): shortcut = x @@ -470,7 +500,14 @@ class Bottleneck(nn.Module): def downsample_conv( - in_channels, out_channels, kernel_size, stride=1, dilation=1, first_dilation=None, norm_layer=None): + in_channels, + out_channels, + kernel_size, + stride=1, + dilation=1, + first_dilation=None, + norm_layer=None, +): norm_layer = norm_layer or nn.BatchNorm2d kernel_size = 1 if stride == 1 and dilation == 1 else kernel_size first_dilation = (first_dilation or dilation) if kernel_size > 1 else 1 @@ -484,7 +521,14 @@ def downsample_conv( def downsample_avg( - in_channels, out_channels, kernel_size, stride=1, dilation=1, first_dilation=None, norm_layer=None): + in_channels, + out_channels, + kernel_size, + stride=1, + dilation=1, + first_dilation=None, + norm_layer=None, +): norm_layer = norm_layer or nn.BatchNorm2d avg_stride = stride if dilation == 1 else 1 if stride == 1 and dilation == 1: @@ -508,8 +552,18 @@ def drop_blocks(drop_prob=0.): def make_blocks( - block_fn, channels, block_repeats, inplanes, reduce_first=1, output_stride=32, - down_kernel_size=1, avg_down=False, drop_block_rate=0., drop_path_rate=0., **kwargs): + block_fn, + channels, + block_repeats, + inplanes, + reduce_first=1, + output_stride=32, + down_kernel_size=1, + avg_down=False, + drop_block_rate=0., + drop_path_rate=0., + **kwargs, +): stages = [] feature_info = [] net_num_blocks = sum(block_repeats) @@ -528,8 +582,14 @@ def make_blocks( downsample = None if stride != 1 or inplanes != planes * block_fn.expansion: down_kwargs = dict( - in_channels=inplanes, out_channels=planes * block_fn.expansion, kernel_size=down_kernel_size, - stride=stride, dilation=dilation, first_dilation=prev_dilation, norm_layer=kwargs.get('norm_layer')) + in_channels=inplanes, + out_channels=planes * block_fn.expansion, + kernel_size=down_kernel_size, + stride=stride, + dilation=dilation, + first_dilation=prev_dilation, + norm_layer=kwargs.get('norm_layer'), + ) downsample = downsample_avg(**down_kwargs) if avg_down else downsample_conv(**down_kwargs) block_kwargs = dict(reduce_first=reduce_first, dilation=dilation, drop_block=db, **kwargs) @@ -581,44 +641,72 @@ class ResNet(nn.Module): SENet-154 - 3 layer deep 3x3 stem (same as v1c-v1s), stem_width = 64, cardinality=64, reduction by 2 on width of first bottleneck convolution, 3x3 downsample convs after first block - - Parameters - ---------- - block : Block, class for the residual block. Options are BasicBlockGl, BottleneckGl. - layers : list of int, number of layers in each block - num_classes : int, default 1000, number of classification classes. - in_chans : int, default 3, number of input (color) channels. - output_stride : int, default 32, output stride of the network, 32, 16, or 8. - global_pool : str, Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' - cardinality : int, default 1, number of convolution groups for 3x3 conv in Bottleneck. - base_width : int, default 64, factor determining bottleneck channels. `planes * base_width / 64 * cardinality` - stem_width : int, default 64, number of channels in stem convolutions - stem_type : str, default '' - The type of stem: - * '', default - a single 7x7 conv with a width of stem_width - * 'deep' - three 3x3 convolution layers of widths stem_width, stem_width, stem_width * 2 - * 'deep_tiered' - three 3x3 conv layers of widths stem_width//4 * 3, stem_width, stem_width * 2 - block_reduce_first : int, default 1 - Reduction factor for first convolution output width of residual blocks, 1 for all archs except senets, where 2 - down_kernel_size : int, default 1, kernel size of residual block downsample path, 1x1 for most, 3x3 for senets - avg_down : bool, default False, use average pooling for projection skip connection between stages/downsample. - act_layer : nn.Module, activation layer - norm_layer : nn.Module, normalization layer - aa_layer : nn.Module, anti-aliasing layer - drop_rate : float, default 0. Dropout probability before classifier, for training """ def __init__( - self, block, layers, num_classes=1000, in_chans=3, output_stride=32, global_pool='avg', - cardinality=1, base_width=64, stem_width=64, stem_type='', replace_stem_pool=False, block_reduce_first=1, - down_kernel_size=1, avg_down=False, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, aa_layer=None, - drop_rate=0.0, drop_path_rate=0., drop_block_rate=0., zero_init_last=True, block_args=None): + self, + block, + layers, + num_classes=1000, + in_chans=3, + output_stride=32, + global_pool='avg', + cardinality=1, + base_width=64, + stem_width=64, + stem_type='', + replace_stem_pool=False, + block_reduce_first=1, + down_kernel_size=1, + avg_down=False, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + aa_layer=None, + drop_rate=0.0, + drop_path_rate=0., + drop_block_rate=0., + zero_init_last=True, + block_args=None, + ): + """ + Args: + block (nn.Module): class for the residual block. Options are BasicBlock, Bottleneck. + layers (List[int]) : number of layers in each block + num_classes (int): number of classification classes (default 1000) + in_chans (int): number of input (color) channels. (default 3) + output_stride (int): output stride of the network, 32, 16, or 8. (default 32) + global_pool (str): Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' (default 'avg') + cardinality (int): number of convolution groups for 3x3 conv in Bottleneck. (default 1) + base_width (int): bottleneck channels factor. `planes * base_width / 64 * cardinality` (default 64) + stem_width (int): number of channels in stem convolutions (default 64) + stem_type (str): The type of stem (default ''): + * '', default - a single 7x7 conv with a width of stem_width + * 'deep' - three 3x3 convolution layers of widths stem_width, stem_width, stem_width * 2 + * 'deep_tiered' - three 3x3 conv layers of widths stem_width//4 * 3, stem_width, stem_width * 2 + replace_stem_pool (bool): replace stem max-pooling layer with a 3x3 stride-2 convolution + block_reduce_first (int): Reduction factor for first convolution output width of residual blocks, + 1 for all archs except senets, where 2 (default 1) + down_kernel_size (int): kernel size of residual block downsample path, + 1x1 for most, 3x3 for senets (default: 1) + avg_down (bool): use avg pooling for projection skip connection between stages/downsample (default False) + act_layer (str, nn.Module): activation layer + norm_layer (str, nn.Module): normalization layer + aa_layer (nn.Module): anti-aliasing layer + drop_rate (float): Dropout probability before classifier, for training (default 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default 0.) + drop_block_rate (float): Drop block rate (default 0.) + zero_init_last (bool): zero-init the last weight in residual path (usually last BN affine weight) + block_args (dict): Extra kwargs to pass through to block module + """ super(ResNet, self).__init__() block_args = block_args or dict() assert output_stride in (8, 16, 32) self.num_classes = num_classes self.drop_rate = drop_rate self.grad_checkpointing = False + + act_layer = get_act_layer(act_layer) + norm_layer = get_norm_layer(norm_layer) # Stem deep_stem = 'deep' in stem_type @@ -663,10 +751,23 @@ class ResNet(nn.Module): # Feature Blocks channels = [64, 128, 256, 512] stage_modules, stage_feature_info = make_blocks( - block, channels, layers, inplanes, cardinality=cardinality, base_width=base_width, - output_stride=output_stride, reduce_first=block_reduce_first, avg_down=avg_down, - down_kernel_size=down_kernel_size, act_layer=act_layer, norm_layer=norm_layer, aa_layer=aa_layer, - drop_block_rate=drop_block_rate, drop_path_rate=drop_path_rate, **block_args) + block, + channels, + layers, + inplanes, + cardinality=cardinality, + base_width=base_width, + output_stride=output_stride, + reduce_first=block_reduce_first, + avg_down=avg_down, + down_kernel_size=down_kernel_size, + act_layer=act_layer, + norm_layer=norm_layer, + aa_layer=aa_layer, + drop_block_rate=drop_block_rate, + drop_path_rate=drop_path_rate, + **block_args, + ) for stage in stage_modules: self.add_module(*stage) # layer1, layer2, etc self.feature_info.extend(stage_feature_info) @@ -687,9 +788,6 @@ class ResNet(nn.Module): for n, m in self.named_modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') - elif isinstance(m, nn.BatchNorm2d): - nn.init.ones_(m.weight) - nn.init.zeros_(m.bias) if zero_init_last: for m in self.modules(): if hasattr(m, 'zero_init_last'): @@ -747,77 +845,72 @@ def _create_resnet(variant, pretrained=False, **kwargs): def resnet10t(pretrained=False, **kwargs): """Constructs a ResNet-10-T model. """ - model_args = dict( - block=BasicBlock, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet10t', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet10t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet14t(pretrained=False, **kwargs): """Constructs a ResNet-14-T model. """ - model_args = dict( - block=Bottleneck, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet14t', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[1, 1, 1, 1], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet14t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet18(pretrained=False, **kwargs): """Constructs a ResNet-18 model. """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('resnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2]) + return _create_resnet('resnet18', pretrained, **dict(model_args, **kwargs)) @register_model def resnet18d(pretrained=False, **kwargs): """Constructs a ResNet-18-D model. """ - model_args = dict( - block=BasicBlock, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet18d', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet18d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet34(pretrained=False, **kwargs): """Constructs a ResNet-34 model. """ - model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('resnet34', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3]) + return _create_resnet('resnet34', pretrained, **dict(model_args, **kwargs)) @register_model def resnet34d(pretrained=False, **kwargs): """Constructs a ResNet-34-D model. """ - model_args = dict( - block=BasicBlock, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet34d', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet34d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet26(pretrained=False, **kwargs): """Constructs a ResNet-26 model. """ - model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('resnet26', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2]) + return _create_resnet('resnet26', pretrained, **dict(model_args, **kwargs)) @register_model def resnet26t(pretrained=False, **kwargs): """Constructs a ResNet-26-T model. """ - model_args = dict( - block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet26t', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet26t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet26d(pretrained=False, **kwargs): """Constructs a ResNet-26-D model. """ - model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet26d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet26d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -825,83 +918,79 @@ def resnet50(pretrained=False, **kwargs): """Constructs a ResNet-50 model. """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('resnet50', pretrained, **model_args) + return _create_resnet('resnet50', pretrained, **dict(model_args, **kwargs)) @register_model def resnet50d(pretrained=False, **kwargs) -> ResNet: """Constructs a ResNet-50-D model. """ - model_args = dict( - block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet50d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet50d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet50t(pretrained=False, **kwargs): """Constructs a ResNet-50-T model. """ - model_args = dict( - block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True, **kwargs) - return _create_resnet('resnet50t', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True) + return _create_resnet('resnet50t', pretrained, **dict(model_args, **kwargs)) @register_model def resnet101(pretrained=False, **kwargs): """Constructs a ResNet-101 model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], **kwargs) - return _create_resnet('resnet101', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3]) + return _create_resnet('resnet101', pretrained, **dict(model_args, **kwargs)) @register_model def resnet101d(pretrained=False, **kwargs): """Constructs a ResNet-101-D model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet101d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet101d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet152(pretrained=False, **kwargs): """Constructs a ResNet-152 model. """ - model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], **kwargs) - return _create_resnet('resnet152', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3]) + return _create_resnet('resnet152', pretrained, **dict(model_args, **kwargs)) @register_model def resnet152d(pretrained=False, **kwargs): """Constructs a ResNet-152-D model. """ - model_args = dict( - block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet152d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet152d', pretrained, **dict(model_args, **kwargs)) @register_model def resnet200(pretrained=False, **kwargs): """Constructs a ResNet-200 model. """ - model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3], **kwargs) - return _create_resnet('resnet200', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3]) + return _create_resnet('resnet200', pretrained, **dict(model_args, **kwargs)) @register_model def resnet200d(pretrained=False, **kwargs): """Constructs a ResNet-200-D model. """ - model_args = dict( - block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnet200d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnet200d', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnet34(pretrained=False, **kwargs): """Constructs a ResNet-34 model with original Torchvision weights. """ - model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('tv_resnet34', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3]) + return _create_resnet('tv_resnet34', pretrained, **dict(model_args, **kwargs)) @register_model @@ -909,23 +998,23 @@ def tv_resnet50(pretrained=False, **kwargs): """Constructs a ResNet-50 model with original Torchvision weights. """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('tv_resnet50', pretrained, **model_args) + return _create_resnet('tv_resnet50', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnet101(pretrained=False, **kwargs): """Constructs a ResNet-101 model w/ Torchvision pretrained weights. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], **kwargs) - return _create_resnet('tv_resnet101', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3]) + return _create_resnet('tv_resnet101', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnet152(pretrained=False, **kwargs): """Constructs a ResNet-152 model w/ Torchvision pretrained weights. """ - model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], **kwargs) - return _create_resnet('tv_resnet152', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3]) + return _create_resnet('tv_resnet152', pretrained, **dict(model_args, **kwargs)) @register_model @@ -936,8 +1025,8 @@ def wide_resnet50_2(pretrained=False, **kwargs): convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048 channels, and in Wide ResNet-50-2 has 2048-1024-2048. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], base_width=128, **kwargs) - return _create_resnet('wide_resnet50_2', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], base_width=128) + return _create_resnet('wide_resnet50_2', pretrained, **dict(model_args, **kwargs)) @register_model @@ -947,8 +1036,8 @@ def wide_resnet101_2(pretrained=False, **kwargs): which is twice larger in every block. The number of channels in outer 1x1 convolutions is the same. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], base_width=128, **kwargs) - return _create_resnet('wide_resnet101_2', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], base_width=128) + return _create_resnet('wide_resnet101_2', pretrained, **dict(model_args, **kwargs)) @register_model @@ -963,8 +1052,8 @@ def resnet50_gn(pretrained=False, **kwargs): def resnext50_32x4d(pretrained=False, **kwargs): """Constructs a ResNeXt50-32x4d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -973,40 +1062,40 @@ def resnext50d_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnext50d_32x4d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnext50d_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def resnext101_32x4d(pretrained=False, **kwargs): """Constructs a ResNeXt-101 32x4d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('resnext101_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4) + return _create_resnet('resnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def resnext101_32x8d(pretrained=False, **kwargs): """Constructs a ResNeXt-101 32x8d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model def resnext101_64x4d(pretrained=False, **kwargs): """Constructs a ResNeXt101-64x4d model. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=64, base_width=4, **kwargs) - return _create_resnet('resnext101_64x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=64, base_width=4) + return _create_resnet('resnext101_64x4d', pretrained, **dict(model_args, **kwargs)) @register_model def tv_resnext50_32x4d(pretrained=False, **kwargs): """Constructs a ResNeXt50-32x4d model with original Torchvision weights. """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('tv_resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('tv_resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1016,8 +1105,8 @@ def ig_resnext101_32x8d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('ig_resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('ig_resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1027,8 +1116,8 @@ def ig_resnext101_32x16d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs) - return _create_resnet('ig_resnext101_32x16d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16) + return _create_resnet('ig_resnext101_32x16d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1038,8 +1127,8 @@ def ig_resnext101_32x32d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=32, **kwargs) - return _create_resnet('ig_resnext101_32x32d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=32) + return _create_resnet('ig_resnext101_32x32d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1049,8 +1138,8 @@ def ig_resnext101_32x48d(pretrained=False, **kwargs): `"Exploring the Limits of Weakly Supervised Pretraining" `_ Weights from https://pytorch.org/hub/facebookresearch_WSL-Images_resnext/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=48, **kwargs) - return _create_resnet('ig_resnext101_32x48d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=48) + return _create_resnet('ig_resnext101_32x48d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1059,8 +1148,8 @@ def ssl_resnet18(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('ssl_resnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2]) + return _create_resnet('ssl_resnet18', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1070,7 +1159,7 @@ def ssl_resnet50(pretrained=False, **kwargs): Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('ssl_resnet50', pretrained, **model_args) + return _create_resnet('ssl_resnet50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1079,8 +1168,8 @@ def ssl_resnext50_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('ssl_resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('ssl_resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1089,8 +1178,8 @@ def ssl_resnext101_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('ssl_resnext101_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4) + return _create_resnet('ssl_resnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1099,8 +1188,8 @@ def ssl_resnext101_32x8d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('ssl_resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('ssl_resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1109,8 +1198,8 @@ def ssl_resnext101_32x16d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs) - return _create_resnet('ssl_resnext101_32x16d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16) + return _create_resnet('ssl_resnext101_32x16d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1120,8 +1209,8 @@ def swsl_resnet18(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], **kwargs) - return _create_resnet('swsl_resnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2]) + return _create_resnet('swsl_resnet18', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1132,7 +1221,7 @@ def swsl_resnet50(pretrained=False, **kwargs): Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], **kwargs) - return _create_resnet('swsl_resnet50', pretrained, **model_args) + return _create_resnet('swsl_resnet50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1142,8 +1231,8 @@ def swsl_resnext50_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('swsl_resnext50_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4) + return _create_resnet('swsl_resnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1153,8 +1242,8 @@ def swsl_resnext101_32x4d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, **kwargs) - return _create_resnet('swsl_resnext101_32x4d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4) + return _create_resnet('swsl_resnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1164,8 +1253,8 @@ def swsl_resnext101_32x8d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, **kwargs) - return _create_resnet('swsl_resnext101_32x8d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8) + return _create_resnet('swsl_resnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1175,8 +1264,8 @@ def swsl_resnext101_32x16d(pretrained=False, **kwargs): `"Billion-scale Semi-Supervised Learning for Image Classification" `_ Weights from https://github.com/facebookresearch/semi-supervised-ImageNet1K-models/ """ - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16, **kwargs) - return _create_resnet('swsl_resnext101_32x16d', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=16) + return _create_resnet('swsl_resnext101_32x16d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1187,8 +1276,8 @@ def ecaresnet26t(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet26t', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet26t', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1197,8 +1286,8 @@ def ecaresnet50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet50d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1208,8 +1297,8 @@ def ecaresnet50d_pruned(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet50d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs)) @register_model @@ -1219,8 +1308,8 @@ def ecaresnet50t(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet50t', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet50t', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1229,8 +1318,8 @@ def ecaresnetlight(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[1, 1, 11, 3], stem_width=32, avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnetlight', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnetlight', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1239,8 +1328,8 @@ def ecaresnet101d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet101d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet101d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1250,8 +1339,8 @@ def ecaresnet101d_pruned(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **dict(model_args, **kwargs)) @register_model @@ -1260,8 +1349,8 @@ def ecaresnet200d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet200d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet200d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1270,8 +1359,8 @@ def ecaresnet269d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnet269d', pretrained, **model_args) + block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnet269d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1282,8 +1371,8 @@ def ecaresnext26t_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnext26t_32x4d', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnext26t_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1294,54 +1383,54 @@ def ecaresnext50t_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) - return _create_resnet('ecaresnext50t_32x4d', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='eca')) + return _create_resnet('ecaresnext50t_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet18(pretrained=False, **kwargs): - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet18', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet34(pretrained=False, **kwargs): - model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet34', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet34', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet50(pretrained=False, **kwargs): - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet50', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet50', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet50t(pretrained=False, **kwargs): model_args = dict( - block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet50t', pretrained, **model_args) + block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep_tiered', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet50t', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet101(pretrained=False, **kwargs): - model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet101', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet101', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet152(pretrained=False, **kwargs): - model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet152', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 8, 36, 3], block_args=dict(attn_layer='se')) + return _create_resnet('seresnet152', pretrained, **dict(model_args, **kwargs)) @register_model def seresnet152d(pretrained=False, **kwargs): model_args = dict( - block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet152d', pretrained, **model_args) + block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet152d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1349,9 +1438,9 @@ def seresnet200d(pretrained=False, **kwargs): """Constructs a ResNet-200-D model with SE attn. """ model_args = dict( - block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet200d', pretrained, **model_args) + block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet200d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1359,9 +1448,9 @@ def seresnet269d(pretrained=False, **kwargs): """Constructs a ResNet-269-D model with SE attn. """ model_args = dict( - block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnet269d', pretrained, **model_args) + block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', + avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnet269d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1372,8 +1461,8 @@ def seresnext26d_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext26d_32x4d', pretrained, **model_args) + stem_type='deep', avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnext26d_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1384,8 +1473,8 @@ def seresnext26t_32x4d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[2, 2, 2, 2], cardinality=32, base_width=4, stem_width=32, - stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext26t_32x4d', pretrained, **model_args) + stem_type='deep_tiered', avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnext26t_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1401,24 +1490,24 @@ def seresnext26tn_32x4d(pretrained=False, **kwargs): def seresnext50_32x4d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], cardinality=32, base_width=4, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext50_32x4d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext50_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def seresnext101_32x4d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=4, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext101_32x4d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext101_32x4d', pretrained, **dict(model_args, **kwargs)) @register_model def seresnext101_32x8d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext101_32x8d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext101_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1426,32 +1515,32 @@ def seresnext101d_32x8d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, stem_width=32, stem_type='deep', avg_down=True, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnext101d_32x8d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnext101d_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model def senet154(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 8, 36, 3], cardinality=64, base_width=4, stem_type='deep', - down_kernel_size=3, block_reduce_first=2, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('senet154', pretrained, **model_args) + down_kernel_size=3, block_reduce_first=2, block_args=dict(attn_layer='se')) + return _create_resnet('senet154', pretrained, **dict(model_args, **kwargs)) @register_model def resnetblur18(pretrained=False, **kwargs): """Constructs a ResNet-18 model with blur anti-aliasing """ - model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], aa_layer=BlurPool2d, **kwargs) - return _create_resnet('resnetblur18', pretrained, **model_args) + model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], aa_layer=BlurPool2d) + return _create_resnet('resnetblur18', pretrained, **dict(model_args, **kwargs)) @register_model def resnetblur50(pretrained=False, **kwargs): """Constructs a ResNet-50 model with blur anti-aliasing """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d, **kwargs) - return _create_resnet('resnetblur50', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d) + return _create_resnet('resnetblur50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1460,8 +1549,8 @@ def resnetblur50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=BlurPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetblur50d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetblur50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1470,16 +1559,25 @@ def resnetblur101d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], aa_layer=BlurPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetblur101d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetblur101d', pretrained, **dict(model_args, **kwargs)) + + +@register_model +def resnetaa34d(pretrained=False, **kwargs): + """Constructs a ResNet-34-D model w/ avgpool anti-aliasing + """ + model_args = dict( + block=BasicBlock, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetaa34d', pretrained, **dict(model_args, **kwargs)) @register_model def resnetaa50(pretrained=False, **kwargs): """Constructs a ResNet-50 model with avgpool anti-aliasing """ - model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, **kwargs) - return _create_resnet('resnetaa50', pretrained, **model_args) + model_args = dict(block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d) + return _create_resnet('resnetaa50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1488,8 +1586,8 @@ def resnetaa50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetaa50d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetaa50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1498,8 +1596,8 @@ def resnetaa101d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], aa_layer=nn.AvgPool2d, - stem_width=32, stem_type='deep', avg_down=True, **kwargs) - return _create_resnet('resnetaa101d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True) + return _create_resnet('resnetaa101d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1508,8 +1606,8 @@ def seresnetaa50d(pretrained=False, **kwargs): """ model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], aa_layer=nn.AvgPool2d, - stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnetaa50d', pretrained, **model_args) + stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='se')) + return _create_resnet('seresnetaa50d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1519,8 +1617,8 @@ def seresnextaa101d_32x8d(pretrained=False, **kwargs): model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], cardinality=32, base_width=8, stem_width=32, stem_type='deep', avg_down=True, aa_layer=nn.AvgPool2d, - block_args=dict(attn_layer='se'), **kwargs) - return _create_resnet('seresnextaa101d_32x8d', pretrained, **model_args) + block_args=dict(attn_layer='se')) + return _create_resnet('seresnextaa101d_32x8d', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1532,8 +1630,8 @@ def resnetrs50(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 4, 6, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs50', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs50', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1545,8 +1643,8 @@ def resnetrs101(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs101', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs101', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1558,8 +1656,8 @@ def resnetrs152(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 8, 36, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs152', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs152', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1571,8 +1669,8 @@ def resnetrs200(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs200', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs200', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1584,8 +1682,8 @@ def resnetrs270(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[4, 29, 53, 4], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs270', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs270', pretrained, **dict(model_args, **kwargs)) @@ -1598,8 +1696,8 @@ def resnetrs350(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[4, 36, 72, 4], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs350', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs350', pretrained, **dict(model_args, **kwargs)) @register_model @@ -1611,5 +1709,5 @@ def resnetrs420(pretrained=False, **kwargs): attn_layer = partial(get_attn('se'), rd_ratio=0.25) model_args = dict( block=Bottleneck, layers=[4, 44, 87, 4], stem_width=32, stem_type='deep', replace_stem_pool=True, - avg_down=True, block_args=dict(attn_layer=attn_layer), **kwargs) - return _create_resnet('resnetrs420', pretrained, **model_args) + avg_down=True, block_args=dict(attn_layer=attn_layer)) + return _create_resnet('resnetrs420', pretrained, **dict(model_args, **kwargs)) diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py index f8c4298b..41e29e12 100644 --- a/timm/models/resnetv2.py +++ b/timm/models/resnetv2.py @@ -37,7 +37,7 @@ import torch.nn as nn from timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD from timm.layers import GroupNormAct, BatchNormAct2d, EvoNorm2dB0, EvoNorm2dS0, FilterResponseNormTlu2d, \ - ClassifierHead, DropPath, AvgPool2dSame, create_pool2d, StdConv2d, create_conv2d + ClassifierHead, DropPath, AvgPool2dSame, create_pool2d, StdConv2d, create_conv2d, get_act_layer, get_norm_act_layer from ._builder import build_model_with_cfg from ._manipulate import checkpoint_seq, named_apply, adapt_input_conv from ._registry import register_model @@ -155,8 +155,20 @@ class PreActBottleneck(nn.Module): """ def __init__( - self, in_chs, out_chs=None, bottle_ratio=0.25, stride=1, dilation=1, first_dilation=None, groups=1, - act_layer=None, conv_layer=None, norm_layer=None, proj_layer=None, drop_path_rate=0.): + self, + in_chs, + out_chs=None, + bottle_ratio=0.25, + stride=1, + dilation=1, + first_dilation=None, + groups=1, + act_layer=None, + conv_layer=None, + norm_layer=None, + proj_layer=None, + drop_path_rate=0., + ): super().__init__() first_dilation = first_dilation or dilation conv_layer = conv_layer or StdConv2d @@ -202,8 +214,20 @@ class Bottleneck(nn.Module): """Non Pre-activation bottleneck block, equiv to V1.5/V1b Bottleneck. Used for ViT. """ def __init__( - self, in_chs, out_chs=None, bottle_ratio=0.25, stride=1, dilation=1, first_dilation=None, groups=1, - act_layer=None, conv_layer=None, norm_layer=None, proj_layer=None, drop_path_rate=0.): + self, + in_chs, + out_chs=None, + bottle_ratio=0.25, + stride=1, + dilation=1, + first_dilation=None, + groups=1, + act_layer=None, + conv_layer=None, + norm_layer=None, + proj_layer=None, + drop_path_rate=0., + ): super().__init__() first_dilation = first_dilation or dilation act_layer = act_layer or nn.ReLU @@ -229,7 +253,8 @@ class Bottleneck(nn.Module): self.act3 = act_layer(inplace=True) def zero_init_last(self): - nn.init.zeros_(self.norm3.weight) + if getattr(self.norm3, 'weight', None) is not None: + nn.init.zeros_(self.norm3.weight) def forward(self, x): # shortcut branch @@ -251,8 +276,16 @@ class Bottleneck(nn.Module): class DownsampleConv(nn.Module): def __init__( - self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, preact=True, - conv_layer=None, norm_layer=None): + self, + in_chs, + out_chs, + stride=1, + dilation=1, + first_dilation=None, + preact=True, + conv_layer=None, + norm_layer=None, + ): super(DownsampleConv, self).__init__() self.conv = conv_layer(in_chs, out_chs, 1, stride=stride) self.norm = nn.Identity() if preact else norm_layer(out_chs, apply_act=False) @@ -263,8 +296,16 @@ class DownsampleConv(nn.Module): class DownsampleAvg(nn.Module): def __init__( - self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, - preact=True, conv_layer=None, norm_layer=None): + self, + in_chs, + out_chs, + stride=1, + dilation=1, + first_dilation=None, + preact=True, + conv_layer=None, + norm_layer=None, + ): """ AvgPool Downsampling as in 'D' ResNet variants. This is not in RegNet space but I might experiment.""" super(DownsampleAvg, self).__init__() avg_stride = stride if dilation == 1 else 1 @@ -283,9 +324,22 @@ class DownsampleAvg(nn.Module): class ResNetStage(nn.Module): """ResNet Stage.""" def __init__( - self, in_chs, out_chs, stride, dilation, depth, bottle_ratio=0.25, groups=1, - avg_down=False, block_dpr=None, block_fn=PreActBottleneck, - act_layer=None, conv_layer=None, norm_layer=None, **block_kwargs): + self, + in_chs, + out_chs, + stride, + dilation, + depth, + bottle_ratio=0.25, + groups=1, + avg_down=False, + block_dpr=None, + block_fn=PreActBottleneck, + act_layer=None, + conv_layer=None, + norm_layer=None, + **block_kwargs, + ): super(ResNetStage, self).__init__() first_dilation = 1 if dilation in (1, 2) else 2 layer_kwargs = dict(act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer) @@ -296,9 +350,18 @@ class ResNetStage(nn.Module): drop_path_rate = block_dpr[block_idx] if block_dpr else 0. stride = stride if block_idx == 0 else 1 self.blocks.add_module(str(block_idx), block_fn( - prev_chs, out_chs, stride=stride, dilation=dilation, bottle_ratio=bottle_ratio, groups=groups, - first_dilation=first_dilation, proj_layer=proj_layer, drop_path_rate=drop_path_rate, - **layer_kwargs, **block_kwargs)) + prev_chs, + out_chs, + stride=stride, + dilation=dilation, + bottle_ratio=bottle_ratio, + groups=groups, + first_dilation=first_dilation, + proj_layer=proj_layer, + drop_path_rate=drop_path_rate, + **layer_kwargs, + **block_kwargs, + )) prev_chs = out_chs first_dilation = dilation proj_layer = None @@ -313,8 +376,13 @@ def is_stem_deep(stem_type): def create_resnetv2_stem( - in_chs, out_chs=64, stem_type='', preact=True, - conv_layer=StdConv2d, norm_layer=partial(GroupNormAct, num_groups=32)): + in_chs, + out_chs=64, + stem_type='', + preact=True, + conv_layer=StdConv2d, + norm_layer=partial(GroupNormAct, num_groups=32), +): stem = OrderedDict() assert stem_type in ('', 'fixed', 'same', 'deep', 'deep_fixed', 'deep_same', 'tiered') @@ -357,20 +425,62 @@ class ResNetV2(nn.Module): """ def __init__( - self, layers, channels=(256, 512, 1024, 2048), - num_classes=1000, in_chans=3, global_pool='avg', output_stride=32, - width_factor=1, stem_chs=64, stem_type='', avg_down=False, preact=True, - act_layer=nn.ReLU, conv_layer=StdConv2d, norm_layer=partial(GroupNormAct, num_groups=32), - drop_rate=0., drop_path_rate=0., zero_init_last=False): + self, + layers, + channels=(256, 512, 1024, 2048), + num_classes=1000, + in_chans=3, + global_pool='avg', + output_stride=32, + width_factor=1, + stem_chs=64, + stem_type='', + avg_down=False, + preact=True, + act_layer=nn.ReLU, + norm_layer=partial(GroupNormAct, num_groups=32), + conv_layer=StdConv2d, + drop_rate=0., + drop_path_rate=0., + zero_init_last=False, + ): + """ + Args: + layers (List[int]) : number of layers in each block + channels (List[int]) : number of channels in each block: + num_classes (int): number of classification classes (default 1000) + in_chans (int): number of input (color) channels. (default 3) + global_pool (str): Global pooling type. One of 'avg', 'max', 'avgmax', 'catavgmax' (default 'avg') + output_stride (int): output stride of the network, 32, 16, or 8. (default 32) + width_factor (int): channel (width) multiplication factor + stem_chs (int): stem width (default: 64) + stem_type (str): stem type (default: '' == 7x7) + avg_down (bool): average pooling in residual downsampling (default: False) + preact (bool): pre-activiation (default: True) + act_layer (Union[str, nn.Module]): activation layer + norm_layer (Union[str, nn.Module]): normalization layer + conv_layer (nn.Module): convolution module + drop_rate: classifier dropout rate (default: 0.) + drop_path_rate: stochastic depth rate (default: 0.) + zero_init_last: zero-init last weight in residual path (default: False) + """ super().__init__() self.num_classes = num_classes self.drop_rate = drop_rate wf = width_factor + norm_layer = get_norm_act_layer(norm_layer, act_layer=act_layer) + act_layer = get_act_layer(act_layer) self.feature_info = [] stem_chs = make_div(stem_chs * wf) self.stem = create_resnetv2_stem( - in_chans, stem_chs, stem_type, preact, conv_layer=conv_layer, norm_layer=norm_layer) + in_chans, + stem_chs, + stem_type, + preact, + conv_layer=conv_layer, + norm_layer=norm_layer, + ) stem_feat = ('stem.conv3' if is_stem_deep(stem_type) else 'stem.conv') if preact else 'stem.norm' self.feature_info.append(dict(num_chs=stem_chs, reduction=2, module=stem_feat)) @@ -387,8 +497,18 @@ class ResNetV2(nn.Module): dilation *= stride stride = 1 stage = ResNetStage( - prev_chs, out_chs, stride=stride, dilation=dilation, depth=d, avg_down=avg_down, - act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer, block_dpr=bdpr, block_fn=block_fn) + prev_chs, + out_chs, + stride=stride, + dilation=dilation, + depth=d, + avg_down=avg_down, + act_layer=act_layer, + conv_layer=conv_layer, + norm_layer=norm_layer, + block_dpr=bdpr, + block_fn=block_fn, + ) prev_chs = out_chs curr_stride *= stride self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{stage_idx}')] @@ -626,86 +746,83 @@ def resnetv2_152x2_bit_teacher_384(pretrained=False, **kwargs): @register_model def resnetv2_50(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50', pretrained=pretrained, - layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs) + model_args = dict(layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d) + return _create_resnetv2('resnetv2_50', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50t(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50t', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='tiered', avg_down=True, **kwargs) + stem_type='tiered', avg_down=True) + return _create_resnetv2('resnetv2_50t', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_101(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_101', pretrained=pretrained, - layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs) + model_args = dict(layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d) + return _create_resnetv2('resnetv2_101', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_101d(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_101d', pretrained=pretrained, + model_args = dict( layers=[3, 4, 23, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_101d', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_152(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_152', pretrained=pretrained, - layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, **kwargs) + model_args = dict(layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d) + return _create_resnetv2('resnetv2_152', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_152d(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_152d', pretrained=pretrained, + model_args = dict( layers=[3, 8, 36, 3], conv_layer=create_conv2d, norm_layer=BatchNormAct2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_152d', pretrained=pretrained, **dict(model_args, **kwargs)) # Experimental configs (may change / be removed) @register_model def resnetv2_50d_gn(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_gn', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=GroupNormAct, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d_gn', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d_evob(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_evob', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=EvoNorm2dB0, - stem_type='deep', avg_down=True, zero_init_last=True, **kwargs) + stem_type='deep', avg_down=True, zero_init_last=True) + return _create_resnetv2('resnetv2_50d_evob', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d_evos(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_evos', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=EvoNorm2dS0, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d_evos', pretrained=pretrained, **dict(model_args, **kwargs)) @register_model def resnetv2_50d_frn(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50d_frn', pretrained=pretrained, + model_args = dict( layers=[3, 4, 6, 3], conv_layer=create_conv2d, norm_layer=FilterResponseNormTlu2d, - stem_type='deep', avg_down=True, **kwargs) + stem_type='deep', avg_down=True) + return _create_resnetv2('resnetv2_50d_frn', pretrained=pretrained, **dict(model_args, **kwargs)) diff --git a/timm/models/sknet.py b/timm/models/sknet.py index 5a29b9a4..425bd7c2 100644 --- a/timm/models/sknet.py +++ b/timm/models/sknet.py @@ -47,9 +47,24 @@ class SelectiveKernelBasic(nn.Module): expansion = 1 def __init__( - self, inplanes, planes, stride=1, downsample=None, cardinality=1, base_width=64, - sk_kwargs=None, reduce_first=1, dilation=1, first_dilation=None, act_layer=nn.ReLU, - norm_layer=nn.BatchNorm2d, attn_layer=None, aa_layer=None, drop_block=None, drop_path=None): + self, + inplanes, + planes, + stride=1, + downsample=None, + cardinality=1, + base_width=64, + sk_kwargs=None, + reduce_first=1, + dilation=1, + first_dilation=None, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + attn_layer=None, + aa_layer=None, + drop_block=None, + drop_path=None, + ): super(SelectiveKernelBasic, self).__init__() sk_kwargs = sk_kwargs or {} @@ -71,7 +86,8 @@ class SelectiveKernelBasic(nn.Module): self.drop_path = drop_path def zero_init_last(self): - nn.init.zeros_(self.conv2.bn.weight) + if getattr(self.conv2.bn, 'weight', None) is not None: + nn.init.zeros_(self.conv2.bn.weight) def forward(self, x): shortcut = x @@ -92,9 +108,24 @@ class SelectiveKernelBottleneck(nn.Module): expansion = 4 def __init__( - self, inplanes, planes, stride=1, downsample=None, cardinality=1, base_width=64, sk_kwargs=None, - reduce_first=1, dilation=1, first_dilation=None, act_layer=nn.ReLU, norm_layer=nn.BatchNorm2d, - attn_layer=None, aa_layer=None, drop_block=None, drop_path=None): + self, + inplanes, + planes, + stride=1, + downsample=None, + cardinality=1, + base_width=64, + sk_kwargs=None, + reduce_first=1, + dilation=1, + first_dilation=None, + act_layer=nn.ReLU, + norm_layer=nn.BatchNorm2d, + attn_layer=None, + aa_layer=None, + drop_block=None, + drop_path=None, + ): super(SelectiveKernelBottleneck, self).__init__() sk_kwargs = sk_kwargs or {} @@ -115,7 +146,8 @@ class SelectiveKernelBottleneck(nn.Module): self.drop_path = drop_path def zero_init_last(self): - nn.init.zeros_(self.conv3.bn.weight) + if getattr(self.conv3.bn, 'weight', None) is not None: + nn.init.zeros_(self.conv3.bn.weight) def forward(self, x): shortcut = x diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 5b93628f..8ffb1200 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -8,14 +8,18 @@ A PyTorch implement of Vision Transformers as described in: `How to train your ViT? Data, Augmentation, and Regularization in Vision Transformers` - https://arxiv.org/abs/2106.10270 -The official jax code is released and available at https://github.com/google-research/vision_transformer +`FlexiViT: One Model for All Patch Sizes` + - https://arxiv.org/abs/2212.08013 + +The official jax code is released and available at + * https://github.com/google-research/vision_transformer + * https://github.com/google-research/big_vision Acknowledgments: -* The paper authors for releasing code and weights, thanks! -* I fixed my class token impl based on Phil Wang's https://github.com/lucidrains/vit-pytorch ... check it out -for some einops/einsum fun -* Simple transformer style inspired by Andrej Karpathy's https://github.com/karpathy/minGPT -* Bert reference code checks against Huggingface Transformers and Tensorflow Bert + * The paper authors for releasing code and weights, thanks! + * I fixed my class token impl based on Phil Wang's https://github.com/lucidrains/vit-pytorch + * Simple transformer style inspired by Andrej Karpathy's https://github.com/karpathy/minGPT + * Bert reference code checks against Huggingface Transformers and Tensorflow Bert Hacked together by / Copyright 2020, Ross Wightman """ @@ -23,7 +27,7 @@ import logging import math from collections import OrderedDict from functools import partial -from typing import Optional +from typing import Optional, List import torch import torch.nn as nn @@ -32,7 +36,8 @@ import torch.utils.checkpoint from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD, IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD, \ OPENAI_CLIP_MEAN, OPENAI_CLIP_STD -from timm.layers import PatchEmbed, Mlp, DropPath, trunc_normal_, lecun_normal_ +from timm.layers import PatchEmbed, Mlp, DropPath, trunc_normal_, lecun_normal_, resample_patch_embed, \ + resample_abs_pos_embed from ._builder import build_model_with_cfg from ._manipulate import named_apply, checkpoint_seq, adapt_input_conv from ._pretrained import generate_default_cfgs @@ -449,6 +454,39 @@ def get_init_weights_vit(mode='jax', head_bias: float = 0.): return init_weights_vit_timm +def resize_pos_embed( + posemb, + posemb_new, + num_prefix_tokens=1, + gs_new=(), + interpolation='bicubic', + antialias=False, +): + """ Rescale the grid of position embeddings when loading from state_dict. + + *DEPRECATED* This function is being deprecated in favour of resample_abs_pos_embed + + Adapted from: + https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224 + """ + ntok_new = posemb_new.shape[1] + if num_prefix_tokens: + posemb_prefix, posemb_grid = posemb[:, :num_prefix_tokens], posemb[0, num_prefix_tokens:] + ntok_new -= num_prefix_tokens + else: + posemb_prefix, posemb_grid = posemb[:, :0], posemb[0] + gs_old = int(math.sqrt(len(posemb_grid))) + if not len(gs_new): # backwards compatibility + gs_new = [int(math.sqrt(ntok_new))] * 2 + assert len(gs_new) >= 2 + _logger.info(f'Resized position embedding: {posemb.shape} ({[gs_old, gs_old]}) to {posemb_new.shape} ({gs_new}).') + posemb_grid = posemb_grid.reshape(1, gs_old, gs_old, -1).permute(0, 3, 1, 2) + posemb_grid = F.interpolate(posemb_grid, size=gs_new, mode=interpolation, antialias=antialias, align_corners=False) + posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_new[0] * gs_new[1], -1) + posemb = torch.cat([posemb_prefix, posemb_grid], dim=1) + return posemb + + @torch.no_grad() def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = ''): """ Load weights from .npz checkpoints for official Google Brain Flax implementation @@ -468,8 +506,15 @@ def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = return torch.from_numpy(w) w = np.load(checkpoint_path) - if not prefix and 'opt/target/embedding/kernel' in w: - prefix = 'opt/target/' + interpolation = 'bilinear' + antialias = False + big_vision = False + if not prefix: + if 'opt/target/embedding/kernel' in w: + prefix = 'opt/target/' + elif 'params/embedding/kernel' in w: + prefix = 'params/' + big_vision = True if hasattr(model.patch_embed, 'backbone'): # hybrid @@ -495,17 +540,33 @@ def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = else: embed_conv_w = adapt_input_conv( model.patch_embed.proj.weight.shape[1], _n2p(w[f'{prefix}embedding/kernel'])) + if embed_conv_w.shape[-2:] != model.patch_embed.proj.weight.shape[-2:]: + embed_conv_w = resample_patch_embed( + embed_conv_w, + model.patch_embed.proj.weight.shape[-2:], + interpolation=interpolation, + antialias=antialias, + verbose=True, + ) + model.patch_embed.proj.weight.copy_(embed_conv_w) model.patch_embed.proj.bias.copy_(_n2p(w[f'{prefix}embedding/bias'])) if model.cls_token is not None: model.cls_token.copy_(_n2p(w[f'{prefix}cls'], t=False)) - pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False) + if big_vision: + pos_embed_w = _n2p(w[f'{prefix}pos_embedding'], t=False) + else: + pos_embed_w = _n2p(w[f'{prefix}Transformer/posembed_input/pos_embedding'], t=False) if pos_embed_w.shape != model.pos_embed.shape: - pos_embed_w = resize_pos_embed( # resize pos embedding when different size from pretrained weights + old_shape = pos_embed_w.shape + num_prefix_tokens = 0 if getattr(model, 'no_embed_class', False) else getattr(model, 'num_prefix_tokens', 1) + pos_embed_w = resample_abs_pos_embed( # resize pos embedding when different size from pretrained weights pos_embed_w, - model.pos_embed, - getattr(model, 'num_prefix_tokens', 1), - model.patch_embed.grid_size + new_size=model.patch_embed.grid_size, + num_prefix_tokens=num_prefix_tokens, + interpolation=interpolation, + antialias=antialias, + verbose=True, ) model.pos_embed.copy_(pos_embed_w) model.norm.weight.copy_(_n2p(w[f'{prefix}Transformer/encoder_norm/scale'])) @@ -517,9 +578,10 @@ def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = # if isinstance(getattr(model.pre_logits, 'fc', None), nn.Linear) and f'{prefix}pre_logits/bias' in w: # model.pre_logits.fc.weight.copy_(_n2p(w[f'{prefix}pre_logits/kernel'])) # model.pre_logits.fc.bias.copy_(_n2p(w[f'{prefix}pre_logits/bias'])) + mha_sub, b_sub, ln1_sub = (0, 0, 1) if big_vision else (1, 3, 2) for i, block in enumerate(model.blocks.children()): block_prefix = f'{prefix}Transformer/encoderblock_{i}/' - mha_prefix = block_prefix + 'MultiHeadDotProductAttention_1/' + mha_prefix = block_prefix + f'MultiHeadDotProductAttention_{mha_sub}/' block.norm1.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/scale'])) block.norm1.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_0/bias'])) block.attn.qkv.weight.copy_(torch.cat([ @@ -529,32 +591,10 @@ def _load_weights(model: VisionTransformer, checkpoint_path: str, prefix: str = block.attn.proj.weight.copy_(_n2p(w[f'{mha_prefix}out/kernel']).flatten(1)) block.attn.proj.bias.copy_(_n2p(w[f'{mha_prefix}out/bias'])) for r in range(2): - getattr(block.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/kernel'])) - getattr(block.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_3/Dense_{r}/bias'])) - block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/scale'])) - block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_2/bias'])) - - -def resize_pos_embed(posemb, posemb_new, num_prefix_tokens=1, gs_new=()): - # Rescale the grid of position embeddings when loading from state_dict. Adapted from - # https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224 - _logger.info('Resized position embedding: %s to %s', posemb.shape, posemb_new.shape) - ntok_new = posemb_new.shape[1] - if num_prefix_tokens: - posemb_prefix, posemb_grid = posemb[:, :num_prefix_tokens], posemb[0, num_prefix_tokens:] - ntok_new -= num_prefix_tokens - else: - posemb_prefix, posemb_grid = posemb[:, :0], posemb[0] - gs_old = int(math.sqrt(len(posemb_grid))) - if not len(gs_new): # backwards compatibility - gs_new = [int(math.sqrt(ntok_new))] * 2 - assert len(gs_new) >= 2 - _logger.info('Position embedding grid-size from %s to %s', [gs_old, gs_old], gs_new) - posemb_grid = posemb_grid.reshape(1, gs_old, gs_old, -1).permute(0, 3, 1, 2) - posemb_grid = F.interpolate(posemb_grid, size=gs_new, mode='bicubic', align_corners=False) - posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_new[0] * gs_new[1], -1) - posemb = torch.cat([posemb_prefix, posemb_grid], dim=1) - return posemb + getattr(block.mlp, f'fc{r + 1}').weight.copy_(_n2p(w[f'{block_prefix}MlpBlock_{b_sub}/Dense_{r}/kernel'])) + getattr(block.mlp, f'fc{r + 1}').bias.copy_(_n2p(w[f'{block_prefix}MlpBlock_{b_sub}/Dense_{r}/bias'])) + block.norm2.weight.copy_(_n2p(w[f'{block_prefix}LayerNorm_{ln1_sub}/scale'])) + block.norm2.bias.copy_(_n2p(w[f'{block_prefix}LayerNorm_{ln1_sub}/bias'])) def _convert_openai_clip(state_dict, model): @@ -591,7 +631,13 @@ def _convert_openai_clip(state_dict, model): return out_dict -def checkpoint_filter_fn(state_dict, model, adapt_layer_scale=False): +def checkpoint_filter_fn( + state_dict, + model, + adapt_layer_scale=False, + interpolation='bicubic', + antialias=True, +): """ convert patch embedding weight from manual patchify + linear proj to conv""" import re out_dict = {} @@ -603,17 +649,30 @@ def checkpoint_filter_fn(state_dict, model, adapt_layer_scale=False): return _convert_openai_clip(state_dict, model) for k, v in state_dict.items(): - if 'patch_embed.proj.weight' in k and len(v.shape) < 4: - # For old models that I trained prior to conv based patchification + if 'patch_embed.proj.weight' in k: O, I, H, W = model.patch_embed.proj.weight.shape - v = v.reshape(O, -1, H, W) + if len(v.shape) < 4: + # For old models that I trained prior to conv based patchification + O, I, H, W = model.patch_embed.proj.weight.shape + v = v.reshape(O, -1, H, W) + if v.shape[-1] != W or v.shape[-2] != H: + v = resample_patch_embed( + v, + (H, W), + interpolation=interpolation, + antialias=antialias, + verbose=True, + ) elif k == 'pos_embed' and v.shape[1] != model.pos_embed.shape[1]: # To resize pos embedding when using model at different size from pretrained weights - v = resize_pos_embed( + num_prefix_tokens = 0 if getattr(model, 'no_embed_class', False) else getattr(model, 'num_prefix_tokens', 1) + v = resample_abs_pos_embed( v, - model.pos_embed, - 0 if getattr(model, 'no_embed_class') else getattr(model, 'num_prefix_tokens', 1), - model.patch_embed.grid_size + new_size=model.patch_embed.grid_size, + num_prefix_tokens=num_prefix_tokens, + interpolation=interpolation, + antialias=antialias, + verbose=True, ) elif adapt_layer_scale and 'gamma_' in k: # remap layer-scale gamma into sub-module (deit3 models) @@ -638,70 +697,104 @@ def _cfg(url='', **kwargs): default_cfgs = generate_default_cfgs({ + # re-finetuned augreg 21k FT on in1k weights + 'vit_base_patch16_224.augreg2_in21k_ft_in1k': _cfg( + hf_hub_id='timm/'), + 'vit_base_patch16_384.augreg2_in21k_ft_in1k': _cfg(), + 'vit_base_patch8_224.augreg2_in21k_ft_in1k': _cfg( + hf_hub_id='timm/'), + # How to train your ViT (augreg) weights, pretrained on 21k FT on in1k 'vit_tiny_patch16_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_tiny_patch16_384.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), 'vit_small_patch32_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_small_patch32_384.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), 'vit_small_patch16_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_small_patch16_384.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), 'vit_base_patch32_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_base_patch32_384.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_light1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), 'vit_base_patch16_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_base_patch16_384.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz', + hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), 'vit_base_patch8_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_large_patch16_224.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_large_patch16_384.augreg_in21k_ft_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz', + hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), - # re-finetuned augreg 21k FT on in1k weights - 'vit_base_patch16_224.augreg2_in21k_ft_in1k': _cfg( - file='b16_augreg-a-8.pth'), - 'vit_base_patch16_384.augreg2_in21k_ft_in1k': _cfg( - url=''), - 'vit_base_patch8_224.augreg2_in21k_ft_in1k': _cfg( - url=''), - # patch models (weights from official Google JAX impl) pretrained on in21k FT on in1k 'vit_base_patch16_224.orig_in21k_ft_in1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth'), + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth', + hf_hub_id='timm/'), 'vit_base_patch16_384.orig_in21k_ft_in1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_384-83fb41ba.pth'), + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_384-83fb41ba.pth', + hf_hub_id='timm/', + input_size=(3, 384, 384), crop_pct=1.0), 'vit_large_patch32_384.orig_in21k_ft_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p32_384-9b920ba8.pth', + hf_hub_id='timm/', input_size=(3, 384, 384), crop_pct=1.0), - # How to train your ViT (augreg) weights trained on in1k + # How to train your ViT (augreg) weights trained on in1k only + 'vit_small_patch16_224.augreg_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/S_16-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_224.npz', + hf_hub_id='timm/', + custom_load=True), + 'vit_small_patch16_384.augreg_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/S_16-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz', + hf_hub_id='timm/', + custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), + 'vit_base_patch32_224.augreg_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_32-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz', + hf_hub_id='timm/', + custom_load=True), + 'vit_base_patch32_384.augreg_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/B_32-i1k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz', + hf_hub_id='timm/', + custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), 'vit_base_patch16_224.augreg_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_16-i1k-300ep-lr_0.001-aug_strong2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz', + hf_hub_id='timm/', custom_load=True), 'vit_base_patch16_384.augreg_in1k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_16-i1k-300ep-lr_0.001-aug_strong2-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_384.npz', + hf_hub_id='timm/', custom_load=True, input_size=(3, 384, 384), crop_pct=1.0), 'vit_large_patch14_224.untrained': _cfg(url=''), @@ -709,229 +802,244 @@ default_cfgs = generate_default_cfgs({ 'vit_giant_patch14_224.untrained': _cfg(url=''), 'vit_gigantic_patch14_224.untrained': _cfg(url=''), - # patch models, imagenet21k (weights from official Google JAX impl) - 'vit_large_patch32_224.v1_in21k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth', - num_classes=21843), - 'vit_huge_patch14_224.v1_in21k': _cfg( + 'vit_large_patch32_224.orig_in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth', + hf_hub_id='timm/', + num_classes=21843), + 'vit_huge_patch14_224.orig_in21k': _cfg( url='https://storage.googleapis.com/vit_models/imagenet21k/ViT-H_14.npz', - hf_hub_id='timm/vit_huge_patch14_224_in21k', + hf_hub_id='timm/', custom_load=True, num_classes=21843), # How to train your ViT (augreg) weights, pretrained on in21k 'vit_tiny_patch16_224.augreg_in21k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', custom_load=True, num_classes=21843), 'vit_small_patch32_224.augreg_in21k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/S_32-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', custom_load=True, num_classes=21843), 'vit_small_patch16_224.augreg_in21k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/S_16-i21k-300ep-lr_0.001-aug_light1-wd_0.03-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', custom_load=True, num_classes=21843), 'vit_base_patch32_224.augreg_in21k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.03-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', custom_load=True, num_classes=21843), 'vit_base_patch16_224.augreg_in21k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', custom_load=True, num_classes=21843), 'vit_base_patch8_224.augreg_in21k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/B_8-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', custom_load=True, num_classes=21843), 'vit_large_patch16_224.augreg_in21k': _cfg( url='https://storage.googleapis.com/vit_models/augreg/L_16-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1.npz', + hf_hub_id='timm/', custom_load=True, num_classes=21843), # SAM trained models (https://arxiv.org/abs/2106.01548) 'vit_base_patch32_224.sam': _cfg( - url='https://storage.googleapis.com/vit_models/sam/ViT-B_32.npz', custom_load=True), + url='https://storage.googleapis.com/vit_models/sam/ViT-B_32.npz', custom_load=True, + hf_hub_id='timm/'), 'vit_base_patch16_224.sam': _cfg( - url='https://storage.googleapis.com/vit_models/sam/ViT-B_16.npz', custom_load=True), + url='https://storage.googleapis.com/vit_models/sam/ViT-B_16.npz', custom_load=True, + hf_hub_id='timm/'), # DINO pretrained - https://arxiv.org/abs/2104.14294 (no classifier head, for fine-tune only) 'vit_small_patch16_224.dino': _cfg( url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall16_pretrain/dino_deitsmall16_pretrain.pth', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), 'vit_small_patch8_224.dino': _cfg( url='https://dl.fbaipublicfiles.com/dino/dino_deitsmall8_pretrain/dino_deitsmall8_pretrain.pth', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), 'vit_base_patch16_224.dino': _cfg( url='https://dl.fbaipublicfiles.com/dino/dino_vitbase16_pretrain/dino_vitbase16_pretrain.pth', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), 'vit_base_patch8_224.dino': _cfg( url='https://dl.fbaipublicfiles.com/dino/dino_vitbase8_pretrain/dino_vitbase8_pretrain.pth', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, num_classes=0), - # ViT ImageNet-21K-P pretraining by MILL 'vit_base_patch16_224_miil.in21k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/vit_base_patch16_224_in21k_miil-887286df.pth', + hf_hub_id='timm/', mean=(0., 0., 0.), std=(1., 1., 1.), crop_pct=0.875, interpolation='bilinear', num_classes=11221), 'vit_base_patch16_224_miil.in21k_ft_in1k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tresnet/vit_base_patch16_224_1k_miil_84_4-2deb18e3.pth', + hf_hub_id='timm/', mean=(0., 0., 0.), std=(1., 1., 1.), crop_pct=0.875, interpolation='bilinear'), - # custom timm variants + # Custom timm variants 'vit_base_patch16_rpn_224.in1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_base_patch16_rpn_224-sw-3b07e89d.pth'), + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_base_patch16_rpn_224-sw-3b07e89d.pth', + hf_hub_id='timm/'), 'vit_medium_patch16_gap_240.in12k': _cfg( - hf_hub_id='timm/vit_medium_patch16_gap_240.in12k', + hf_hub_id='timm/', input_size=(3, 240, 240), crop_pct=0.95, num_classes=11821), 'vit_medium_patch16_gap_256.in12k_ft_in1k': _cfg( - hf_hub_id='timm/vit_medium_patch16_gap_256.in12k_ft_in1k', + hf_hub_id='timm/', input_size=(3, 256, 256), crop_pct=0.95), 'vit_medium_patch16_gap_384.in12k_ft_in1k': _cfg( - hf_hub_id='timm/vit_medium_patch16_gap_384.in12k_ft_in1k', + hf_hub_id='timm/', input_size=(3, 384, 384), crop_pct=0.95, crop_mode='squash'), 'vit_base_patch16_gap_224': _cfg(), # CLIP pretrained image tower and related fine-tuned weights - 'vit_base_patch32_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-B-32-laion2B-s34B-b79K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), - 'vit_base_patch16_clip_224.laion2b': _cfg( - #hf_hub_id='laion/CLIP-ViT-B-16-laion2B-s34B-b88K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512), - 'vit_large_patch14_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-L-14-laion2B-s32B-b82K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=768), - 'vit_huge_patch14_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-H-14-laion2B-s32B-b79K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), - 'vit_giant_patch14_clip_224.laion2b': _cfg( - hf_hub_id='laion/CLIP-ViT-g-14-laion2B-s12B-b42K', - hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), - - 'vit_base_patch32_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), - 'vit_base_patch16_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), - 'vit_base_patch16_clip_384.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_384.laion2b_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'), - 'vit_large_patch14_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_224.laion2b_ft_in1k', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0), - 'vit_large_patch14_clip_336.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_336.laion2b_ft_in1k', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, - crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), - 'vit_huge_patch14_clip_224.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_huge_patch14_clip_224.laion2b_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), - 'vit_huge_patch14_clip_336.laion2b_ft_in1k': _cfg( - hf_hub_id='', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), - 'vit_base_patch32_clip_224.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), 'vit_base_patch32_clip_384.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_384.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 384, 384)), 'vit_base_patch32_clip_448.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_448.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 448, 448)), 'vit_base_patch16_clip_224.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95), 'vit_base_patch16_clip_384.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_384.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'), 'vit_large_patch14_clip_224.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_224.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0), 'vit_large_patch14_clip_336.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_336.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), 'vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_huge_patch14_clip_224.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), 'vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_huge_patch14_clip_336.laion2b_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), - 'vit_base_patch32_clip_224.laion2b_ft_in12k': _cfg( - #hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in12k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), - 'vit_base_patch16_clip_224.laion2b_ft_in12k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in12k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), - 'vit_large_patch14_clip_224.laion2b_ft_in12k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_224.laion2b_ft_in12k', - mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=11821), - 'vit_huge_patch14_clip_224.laion2b_ft_in12k': _cfg( - hf_hub_id='timm/vit_huge_patch14_clip_224.laion2b_ft_in12k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821), - - 'vit_base_patch32_clip_224.openai': _cfg( - hf_hub_id='timm/clip_vit_base_patch32_224.openai', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), - 'vit_base_patch16_clip_224.openai': _cfg( - hf_hub_id='timm/clip_vit_base_patch16_224.openai', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), - 'vit_large_patch14_clip_224.openai': _cfg( - hf_hub_id='timm/clip_vit_large_patch14_224.openai', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768), - - 'vit_base_patch32_clip_224.openai_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), - 'vit_base_patch16_clip_224.openai_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_224.openai_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), - 'vit_base_patch16_clip_384.openai_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_384.openai_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'), - 'vit_large_patch14_clip_224.openai_ft_in1k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_224.openai_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), - 'vit_base_patch32_clip_224.openai_ft_in12k_in1k': _cfg( - #hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k_in1k', + # hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), 'vit_base_patch32_clip_384.openai_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_384.openai_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'), 'vit_base_patch16_clip_224.openai_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_224.openai_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95), 'vit_base_patch16_clip_384.openai_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_384.openai_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=0.95, input_size=(3, 384, 384), crop_mode='squash'), 'vit_large_patch14_clip_224.openai_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_224.openai_ft_in12k_in1k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), 'vit_large_patch14_clip_336.openai_ft_in12k_in1k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_336.openai_ft_in12k_in1k', + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), + + 'vit_base_patch32_clip_224.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), + 'vit_base_patch16_clip_224.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), + 'vit_base_patch16_clip_384.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'), + 'vit_large_patch14_clip_224.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0), + 'vit_large_patch14_clip_336.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, + crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), + 'vit_huge_patch14_clip_224.laion2b_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), + 'vit_huge_patch14_clip_336.laion2b_ft_in1k': _cfg( + hf_hub_id='', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), + 'vit_base_patch32_clip_224.openai_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), + 'vit_base_patch16_clip_224.openai_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), + 'vit_base_patch16_clip_384.openai_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, + crop_pct=1.0, input_size=(3, 384, 384), crop_mode='squash'), + 'vit_large_patch14_clip_224.openai_ft_in1k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), + + 'vit_base_patch32_clip_224.laion2b_ft_in12k': _cfg( + #hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in12k', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), + 'vit_base_patch16_clip_224.laion2b_ft_in12k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), + 'vit_large_patch14_clip_224.laion2b_ft_in12k': _cfg( + hf_hub_id='timm/', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=11821), + 'vit_huge_patch14_clip_224.laion2b_ft_in12k': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821), + 'vit_base_patch32_clip_224.openai_ft_in12k': _cfg( - #hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k', + # hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), 'vit_base_patch16_clip_224.openai_ft_in12k': _cfg( - hf_hub_id='timm/vit_base_patch16_clip_224.openai_ft_in12k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), 'vit_large_patch14_clip_224.openai_ft_in12k': _cfg( - hf_hub_id='timm/vit_large_patch14_clip_224.openai_ft_in12k', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=11821), + 'vit_base_patch32_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-B-32-laion2B-s34B-b79K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), + 'vit_base_patch16_clip_224.laion2b': _cfg( + # hf_hub_id='laion/CLIP-ViT-B-16-laion2B-s34B-b88K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512), + 'vit_large_patch14_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-L-14-laion2B-s32B-b82K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, crop_pct=1.0, num_classes=768), + 'vit_huge_patch14_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-H-14-laion2B-s32B-b79K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), + 'vit_giant_patch14_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-g-14-laion2B-s12B-b42K', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), + + 'vit_base_patch32_clip_224.openai': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), + 'vit_base_patch16_clip_224.openai': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), + 'vit_large_patch14_clip_224.openai': _cfg( + hf_hub_id='timm/', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=768), + # experimental (may be removed) 'vit_base_patch32_plus_256': _cfg(url='', input_size=(3, 256, 256), crop_pct=0.95), 'vit_base_patch16_plus_240': _cfg(url='', input_size=(3, 240, 240), crop_pct=0.95), @@ -942,21 +1050,81 @@ default_cfgs = generate_default_cfgs({ # EVA fine-tuned weights from MAE style MIM - EVA-CLIP target pretrain # https://github.com/baaivision/EVA/blob/7ecf2c0a370d97967e86d047d7af9188f78d2df3/eva/README.md#eva-l-learning-better-mim-representations-from-eva-clip 'eva_large_patch14_196.in22k_ft_in22k_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_196px_21k_to_1k_ft_88p6.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_196px_21k_to_1k_ft_88p6.pt', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 196, 196), crop_pct=1.0), 'eva_large_patch14_336.in22k_ft_in22k_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_336px_21k_to_1k_ft_89p2.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_336px_21k_to_1k_ft_89p2.pt', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'), 'eva_large_patch14_196.in22k_ft_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_196px_1k_ft_88p0.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_196px_1k_ft_88p0.pt', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 196, 196), crop_pct=1.0), 'eva_large_patch14_336.in22k_ft_in1k': _cfg( - hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_336px_1k_ft_88p65.pt', + # hf_hub_id='BAAI/EVA', hf_hub_filename='eva_l_psz14_336px_1k_ft_88p65.pt', + hf_hub_id='timm/', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, input_size=(3, 336, 336), crop_pct=1.0, crop_mode='squash'), + + 'flexivit_small.1200ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_s_i1k.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + 'flexivit_small.600ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_s_i1k_600ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + 'flexivit_small.300ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_s_i1k_300ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + + 'flexivit_base.1200ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i1k.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + 'flexivit_base.600ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i1k_600ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + 'flexivit_base.300ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i1k_300ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + 'flexivit_base.1000ep_in21k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i21k_1000ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843), + 'flexivit_base.300ep_in21k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_b_i21k_300ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843), + + 'flexivit_large.1200ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_l_i1k.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + 'flexivit_large.600ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_l_i1k_600ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + 'flexivit_large.300ep_in1k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/flexivit_l_i1k_300ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95), + + 'flexivit_base.patch16_in21k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/vit_b16_i21k_300ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843), + 'flexivit_base.patch30_in21k': _cfg( + url='https://storage.googleapis.com/big_vision/flexivit/vit_b30_i21k_300ep.npz', custom_load=True, + hf_hub_id='timm/', + input_size=(3, 240, 240), crop_pct=0.95, num_classes=21843), }) @@ -964,9 +1132,16 @@ def _create_vision_transformer(variant, pretrained=False, **kwargs): if kwargs.get('features_only', None): raise RuntimeError('features_only not implemented for Vision Transformer models.') + if 'flexi' in variant: + # FIXME Google FlexiViT pretrained models have a strong preference for bilinear patch / embed + # interpolation, other pretrained models resize better w/ anti-aliased bicubic interpolation. + _filter_fn = partial(checkpoint_filter_fn, interpolation='bilinear', antialias=False) + else: + _filter_fn = checkpoint_filter_fn + return build_model_with_cfg( VisionTransformer, variant, pretrained, - pretrained_filter_fn=checkpoint_filter_fn, + pretrained_filter_fn=_filter_fn, **kwargs, ) @@ -975,8 +1150,8 @@ def _create_vision_transformer(variant, pretrained=False, **kwargs): def vit_tiny_patch16_224(pretrained=False, **kwargs): """ ViT-Tiny (Vit-Ti/16) """ - model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) - model = _create_vision_transformer('vit_tiny_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3) + model = _create_vision_transformer('vit_tiny_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -984,8 +1159,8 @@ def vit_tiny_patch16_224(pretrained=False, **kwargs): def vit_tiny_patch16_384(pretrained=False, **kwargs): """ ViT-Tiny (Vit-Ti/16) @ 384x384. """ - model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) - model = _create_vision_transformer('vit_tiny_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3) + model = _create_vision_transformer('vit_tiny_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -993,8 +1168,8 @@ def vit_tiny_patch16_384(pretrained=False, **kwargs): def vit_small_patch32_224(pretrained=False, **kwargs): """ ViT-Small (ViT-S/32) """ - model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch32_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch32_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1002,8 +1177,8 @@ def vit_small_patch32_224(pretrained=False, **kwargs): def vit_small_patch32_384(pretrained=False, **kwargs): """ ViT-Small (ViT-S/32) at 384x384. """ - model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch32_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch32_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1011,8 +1186,8 @@ def vit_small_patch32_384(pretrained=False, **kwargs): def vit_small_patch16_224(pretrained=False, **kwargs): """ ViT-Small (ViT-S/16) """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1020,8 +1195,8 @@ def vit_small_patch16_224(pretrained=False, **kwargs): def vit_small_patch16_384(pretrained=False, **kwargs): """ ViT-Small (ViT-S/16) """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1029,8 +1204,8 @@ def vit_small_patch16_384(pretrained=False, **kwargs): def vit_small_patch8_224(pretrained=False, **kwargs): """ ViT-Small (ViT-S/8) """ - model_kwargs = dict(patch_size=8, embed_dim=384, depth=12, num_heads=6, **kwargs) - model = _create_vision_transformer('vit_small_patch8_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=8, embed_dim=384, depth=12, num_heads=6) + model = _create_vision_transformer('vit_small_patch8_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1039,8 +1214,8 @@ def vit_base_patch32_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1049,8 +1224,8 @@ def vit_base_patch32_384(pretrained=False, **kwargs): """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1059,8 +1234,8 @@ def vit_base_patch16_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1069,8 +1244,8 @@ def vit_base_patch16_384(pretrained=False, **kwargs): """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1079,8 +1254,8 @@ def vit_base_patch8_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/8) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs) - model = _create_vision_transformer('vit_base_patch8_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=8, embed_dim=768, depth=12, num_heads=12) + model = _create_vision_transformer('vit_base_patch8_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1088,8 +1263,8 @@ def vit_base_patch8_224(pretrained=False, **kwargs): def vit_large_patch32_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. """ - model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1098,8 +1273,8 @@ def vit_large_patch32_384(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1108,8 +1283,8 @@ def vit_large_patch16_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1118,8 +1293,8 @@ def vit_large_patch16_384(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1127,8 +1302,8 @@ def vit_large_patch16_384(pretrained=False, **kwargs): def vit_large_patch14_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/14) """ - model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_large_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16) + model = _create_vision_transformer('vit_large_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1136,8 +1311,8 @@ def vit_large_patch14_224(pretrained=False, **kwargs): def vit_huge_patch14_224(pretrained=False, **kwargs): """ ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929). """ - model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_huge_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16) + model = _create_vision_transformer('vit_huge_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1145,8 +1320,8 @@ def vit_huge_patch14_224(pretrained=False, **kwargs): def vit_giant_patch14_224(pretrained=False, **kwargs): """ ViT-Giant (little-g) model (ViT-g/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 """ - model_kwargs = dict(patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_giant_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16) + model = _create_vision_transformer('vit_giant_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1154,8 +1329,9 @@ def vit_giant_patch14_224(pretrained=False, **kwargs): def vit_gigantic_patch14_224(pretrained=False, **kwargs): """ ViT-Gigantic (big-G) model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 """ - model_kwargs = dict(patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, **kwargs) - model = _create_vision_transformer('vit_gigantic_patch14_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16) + model = _create_vision_transformer( + 'vit_gigantic_patch14_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1164,8 +1340,9 @@ def vit_base_patch16_224_miil(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, **kwargs) - model = _create_vision_transformer('vit_base_patch16_224_miil', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False) + model = _create_vision_transformer( + 'vit_base_patch16_224_miil', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1175,8 +1352,9 @@ def vit_medium_patch16_gap_240(pretrained=False, **kwargs): """ model_kwargs = dict( patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), qkv_bias=False, init_values=1e-6, fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_medium_patch16_gap_240', pretrained=pretrained, **model_kwargs) + global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False) + model = _create_vision_transformer( + 'vit_medium_patch16_gap_240', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1186,8 +1364,9 @@ def vit_medium_patch16_gap_256(pretrained=False, **kwargs): """ model_kwargs = dict( patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), qkv_bias=False, init_values=1e-6, fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_medium_patch16_gap_256', pretrained=pretrained, **model_kwargs) + global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False) + model = _create_vision_transformer( + 'vit_medium_patch16_gap_256', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1197,8 +1376,9 @@ def vit_medium_patch16_gap_384(pretrained=False, **kwargs): """ model_kwargs = dict( patch_size=16, embed_dim=512, depth=12, num_heads=8, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), qkv_bias=False, init_values=1e-6, fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_medium_patch16_gap_384', pretrained=pretrained, **model_kwargs) + global_pool='avg', qkv_bias=False, init_values=1e-6, fc_norm=False) + model = _create_vision_transformer( + 'vit_medium_patch16_gap_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1207,9 +1387,9 @@ def vit_base_patch16_gap_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) w/o class token, w/ avg-pool @ 256x256 """ model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=16, class_token=False, - global_pool=kwargs.get('global_pool', 'avg'), fc_norm=False, **kwargs) - model = _create_vision_transformer('vit_base_patch16_gap_224', pretrained=pretrained, **model_kwargs) + patch_size=16, embed_dim=768, depth=12, num_heads=16, class_token=False, global_pool='avg', fc_norm=False) + model = _create_vision_transformer( + 'vit_base_patch16_gap_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1218,8 +1398,9 @@ def vit_base_patch32_clip_224(pretrained=False, **kwargs): """ ViT-B/32 CLIP image tower @ 224x224 """ model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch32_clip_224', pretrained=pretrained, **model_kwargs) + patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch32_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1228,8 +1409,9 @@ def vit_base_patch32_clip_384(pretrained=False, **kwargs): """ ViT-B/32 CLIP image tower @ 384x384 """ model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch32_clip_384', pretrained=pretrained, **model_kwargs) + patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch32_clip_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1238,8 +1420,9 @@ def vit_base_patch32_clip_448(pretrained=False, **kwargs): """ ViT-B/32 CLIP image tower @ 448x448 """ model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch32_clip_448', pretrained=pretrained, **model_kwargs) + patch_size=32, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch32_clip_448', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1247,9 +1430,9 @@ def vit_base_patch32_clip_448(pretrained=False, **kwargs): def vit_base_patch16_clip_224(pretrained=False, **kwargs): """ ViT-B/16 CLIP image tower """ - model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch16_clip_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch16_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1257,9 +1440,9 @@ def vit_base_patch16_clip_224(pretrained=False, **kwargs): def vit_base_patch16_clip_384(pretrained=False, **kwargs): """ ViT-B/16 CLIP image tower @ 384x384 """ - model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_base_patch16_clip_384', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_base_patch16_clip_384', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1267,9 +1450,9 @@ def vit_base_patch16_clip_384(pretrained=False, **kwargs): def vit_large_patch14_clip_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/14) CLIP image tower """ - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_large_patch14_clip_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_large_patch14_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1277,9 +1460,9 @@ def vit_large_patch14_clip_224(pretrained=False, **kwargs): def vit_large_patch14_clip_336(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/14) CLIP image tower @ 336x336 """ - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_large_patch14_clip_336', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_large_patch14_clip_336', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1287,9 +1470,9 @@ def vit_large_patch14_clip_336(pretrained=False, **kwargs): def vit_huge_patch14_clip_224(pretrained=False, **kwargs): """ ViT-Huge model (ViT-H/14) CLIP image tower. """ - model_kwargs = dict( - patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_huge_patch14_clip_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_huge_patch14_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1297,9 +1480,9 @@ def vit_huge_patch14_clip_224(pretrained=False, **kwargs): def vit_huge_patch14_clip_336(pretrained=False, **kwargs): """ ViT-Huge model (ViT-H/14) CLIP image tower @ 336x336 """ - model_kwargs = dict( - patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_huge_patch14_clip_336', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1280, depth=32, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_huge_patch14_clip_336', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1309,9 +1492,9 @@ def vit_giant_patch14_clip_224(pretrained=False, **kwargs): Pretrained weights from CLIP image tower. """ model_kwargs = dict( - patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, - pre_norm=True, norm_layer=nn.LayerNorm, **kwargs) - model = _create_vision_transformer('vit_giant_patch14_clip_224', pretrained=pretrained, **model_kwargs) + patch_size=14, embed_dim=1408, mlp_ratio=48/11, depth=40, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_giant_patch14_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1321,8 +1504,9 @@ def vit_giant_patch14_clip_224(pretrained=False, **kwargs): def vit_base_patch32_plus_256(pretrained=False, **kwargs): """ ViT-Base (ViT-B/32+) """ - model_kwargs = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) - model = _create_vision_transformer('vit_base_patch32_plus_256', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=32, embed_dim=896, depth=12, num_heads=14, init_values=1e-5) + model = _create_vision_transformer( + 'vit_base_patch32_plus_256', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1330,8 +1514,9 @@ def vit_base_patch32_plus_256(pretrained=False, **kwargs): def vit_base_patch16_plus_240(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16+) """ - model_kwargs = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14, init_values=1e-5, **kwargs) - model = _create_vision_transformer('vit_base_patch16_plus_240', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=896, depth=12, num_heads=14, init_values=1e-5) + model = _create_vision_transformer( + 'vit_base_patch16_plus_240', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1340,9 +1525,10 @@ def vit_base_patch16_rpn_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) w/ residual post-norm """ model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, init_values=1e-5, class_token=False, - block_fn=ResPostBlock, global_pool=kwargs.pop('global_pool', 'avg'), **kwargs) - model = _create_vision_transformer('vit_base_patch16_rpn_224', pretrained=pretrained, **model_kwargs) + patch_size=16, embed_dim=768, depth=12, num_heads=12, qkv_bias=False, init_values=1e-5, + class_token=False, block_fn=ResPostBlock, global_pool='avg') + model = _create_vision_transformer( + 'vit_base_patch16_rpn_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1352,8 +1538,9 @@ def vit_small_patch16_36x1_224(pretrained=False, **kwargs): Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=36, num_heads=6, init_values=1e-5, **kwargs) - model = _create_vision_transformer('vit_small_patch16_36x1_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=36, num_heads=6, init_values=1e-5) + model = _create_vision_transformer( + 'vit_small_patch16_36x1_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1364,8 +1551,9 @@ def vit_small_patch16_18x2_224(pretrained=False, **kwargs): Paper focuses on 24x2 + 48x1 for 'Small' width but those are extremely slow. """ model_kwargs = dict( - patch_size=16, embed_dim=384, depth=18, num_heads=6, init_values=1e-5, block_fn=ParallelBlock, **kwargs) - model = _create_vision_transformer('vit_small_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + patch_size=16, embed_dim=384, depth=18, num_heads=6, init_values=1e-5, block_fn=ParallelBlock) + model = _create_vision_transformer( + 'vit_small_patch16_18x2_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @@ -1374,25 +1562,51 @@ def vit_base_patch16_18x2_224(pretrained=False, **kwargs): """ ViT-Base w/ LayerScale + 18 x 2 (36 block parallel) config. Experimental, may remove. Based on `Three things everyone should know about Vision Transformers` - https://arxiv.org/abs/2203.09795 """ - model_kwargs = dict( - patch_size=16, embed_dim=768, depth=18, num_heads=12, init_values=1e-5, block_fn=ParallelBlock, **kwargs) - model = _create_vision_transformer('vit_base_patch16_18x2_224', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=18, num_heads=12, init_values=1e-5, block_fn=ParallelBlock) + model = _create_vision_transformer( + 'vit_base_patch16_18x2_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @register_model def eva_large_patch14_196(pretrained=False, **kwargs): """ EVA-large model https://arxiv.org/abs/2211.07636 /via MAE MIM pretrain""" - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg', **kwargs) - model = _create_vision_transformer('eva_large_patch14_196', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg') + model = _create_vision_transformer( + 'eva_large_patch14_196', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model @register_model def eva_large_patch14_336(pretrained=False, **kwargs): """ EVA-large model https://arxiv.org/abs/2211.07636 via MAE MIM pretrain""" - model_kwargs = dict( - patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg', **kwargs) - model = _create_vision_transformer('eva_large_patch14_336', pretrained=pretrained, **model_kwargs) + model_kwargs = dict(patch_size=14, embed_dim=1024, depth=24, num_heads=16, global_pool='avg') + model = _create_vision_transformer('eva_large_patch14_336', pretrained=pretrained, **dict(model_kwargs, **kwargs)) + return model + + +@register_model +def flexivit_small(pretrained=False, **kwargs): + """ FlexiViT-Small + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, no_embed_class=True) + model = _create_vision_transformer('flexivit_small', pretrained=pretrained, **dict(model_kwargs, **kwargs)) + return model + + +@register_model +def flexivit_base(pretrained=False, **kwargs): + """ FlexiViT-Base + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, no_embed_class=True) + model = _create_vision_transformer('flexivit_base', pretrained=pretrained, **dict(model_kwargs, **kwargs)) + return model + + +@register_model +def flexivit_large(pretrained=False, **kwargs): + """ FlexiViT-Large + """ + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, no_embed_class=True) + model = _create_vision_transformer('flexivit_large', pretrained=pretrained, **dict(model_kwargs, **kwargs)) return model diff --git a/timm/models/vision_transformer_hybrid.py b/timm/models/vision_transformer_hybrid.py index cfdd0a0e..bec7989c 100644 --- a/timm/models/vision_transformer_hybrid.py +++ b/timm/models/vision_transformer_hybrid.py @@ -27,72 +27,6 @@ from .resnetv2 import ResNetV2, create_resnetv2_stem from .vision_transformer import _create_vision_transformer -def _cfg(url='', **kwargs): - return { - 'url': url, - 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, - 'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True, - 'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5), - 'first_conv': 'patch_embed.backbone.stem.conv', 'classifier': 'head', - **kwargs - } - - -default_cfgs = generate_default_cfgs({ - # hybrid in-1k models (weights from official JAX impl where they exist) - 'vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz', - custom_load=True, - first_conv='patch_embed.backbone.conv'), - 'vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', - first_conv='patch_embed.backbone.conv', input_size=(3, 384, 384), crop_pct=1.0, custom_load=True), - 'vit_small_r26_s32_224.augreg_in21k_ft_in1k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_light0-wd_0.03-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.03-res_224.npz', - custom_load=True, - ), - 'vit_small_r26_s32_384.augreg_in21k_ft_in1k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', - input_size=(3, 384, 384), crop_pct=1.0, custom_load=True), - 'vit_base_r26_s32_224.untrained': _cfg(), - 'vit_base_r50_s16_384.v1_in21k_ft_in1k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_384-9fd3c705.pth', - input_size=(3, 384, 384), crop_pct=1.0), - 'vit_large_r50_s32_224.augreg_in21k_ft_in1k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz', - custom_load=True, - ), - 'vit_large_r50_s32_384.augreg_in21k_ft_in1k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz', - input_size=(3, 384, 384), crop_pct=1.0, custom_load=True, - ), - - # hybrid in-21k models (weights from official Google JAX impl where they exist) - 'vit_tiny_r_s16_p8_224.augreg_in21k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0.npz', - num_classes=21843, crop_pct=0.9, first_conv='patch_embed.backbone.conv', custom_load=True), - 'vit_small_r26_s32_224.augreg_in21k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.03-do_0.0-sd_0.0.npz', - num_classes=21843, crop_pct=0.9, custom_load=True), - 'vit_base_r50_s16_224.v1_in21k': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_224_in21k-6f7c7740.pth', - num_classes=21843, crop_pct=0.9), - 'vit_large_r50_s32_224.augreg_in21k': _cfg( - url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0.npz', - num_classes=21843, crop_pct=0.9, custom_load=True), - - # hybrid models (using timm resnet backbones) - 'vit_small_resnet26d_224': _cfg( - mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), - 'vit_small_resnet50d_s16_224': _cfg( - mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), - 'vit_base_resnet26d_224': _cfg( - mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), - 'vit_base_resnet50d_224': _cfg( - mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), -}) - - class HybridEmbed(nn.Module): """ CNN Feature Map Embedding Extract feature map from CNN, flatten, project to embedding dim. @@ -166,6 +100,83 @@ def _resnetv2(layers=(3, 4, 9), **kwargs): return backbone +def _cfg(url='', **kwargs): + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, + 'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True, + 'mean': (0.5, 0.5, 0.5), 'std': (0.5, 0.5, 0.5), + 'first_conv': 'patch_embed.backbone.stem.conv', 'classifier': 'head', + **kwargs + } + + +default_cfgs = generate_default_cfgs({ + # hybrid in-1k models (weights from official JAX impl where they exist) + 'vit_tiny_r_s16_p8_224.augreg_in21k_ft_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_224.npz', + hf_hub_id='timm/', + custom_load=True, + first_conv='patch_embed.backbone.conv'), + 'vit_tiny_r_s16_p8_384.augreg_in21k_ft_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + hf_hub_id='timm/', + first_conv='patch_embed.backbone.conv', input_size=(3, 384, 384), crop_pct=1.0, custom_load=True), + 'vit_small_r26_s32_224.augreg_in21k_ft_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_light0-wd_0.03-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.03-res_224.npz', + hf_hub_id='timm/', + custom_load=True, + ), + 'vit_small_r26_s32_384.augreg_in21k_ft_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.03-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.03-res_384.npz', + hf_hub_id='timm/', + input_size=(3, 384, 384), crop_pct=1.0, custom_load=True), + 'vit_base_r26_s32_224.untrained': _cfg(), + 'vit_base_r50_s16_384.orig_in21k_ft_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_384-9fd3c705.pth', + hf_hub_id='timm/', + input_size=(3, 384, 384), crop_pct=1.0), + 'vit_large_r50_s32_224.augreg_in21k_ft_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium1-wd_0.1-do_0.1-sd_0.1--imagenet2012-steps_20k-lr_0.01-res_224.npz', + hf_hub_id='timm/', + custom_load=True, + ), + 'vit_large_r50_s32_384.augreg_in21k_ft_in1k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0--imagenet2012-steps_20k-lr_0.01-res_384.npz', + hf_hub_id='timm/', + input_size=(3, 384, 384), crop_pct=1.0, custom_load=True, + ), + + # hybrid in-21k models (weights from official Google JAX impl where they exist) + 'vit_tiny_r_s16_p8_224.augreg_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R_Ti_16-i21k-300ep-lr_0.001-aug_none-wd_0.03-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', + num_classes=21843, crop_pct=0.9, first_conv='patch_embed.backbone.conv', custom_load=True), + 'vit_small_r26_s32_224.augreg_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R26_S_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.03-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', + num_classes=21843, crop_pct=0.9, custom_load=True), + 'vit_base_r50_s16_224.orig_in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_224_in21k-6f7c7740.pth', + hf_hub_id='timm/', + num_classes=21843, crop_pct=0.9), + 'vit_large_r50_s32_224.augreg_in21k': _cfg( + url='https://storage.googleapis.com/vit_models/augreg/R50_L_32-i21k-300ep-lr_0.001-aug_medium2-wd_0.1-do_0.0-sd_0.0.npz', + hf_hub_id='timm/', + num_classes=21843, crop_pct=0.9, custom_load=True), + + # hybrid models (using timm resnet backbones) + 'vit_small_resnet26d_224.untrained': _cfg( + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), + 'vit_small_resnet50d_s16_224.untrained': _cfg( + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), + 'vit_base_resnet26d_224.untrained': _cfg( + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), + 'vit_base_resnet50d_224.untrained': _cfg( + mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, first_conv='patch_embed.backbone.conv1.0'), +}) + + @register_model def vit_tiny_r_s16_p8_224(pretrained=False, **kwargs): """ R+ViT-Ti/S16 w/ 8x8 patch hybrid @ 224 x 224. diff --git a/timm/models/vision_transformer_relpos.py b/timm/models/vision_transformer_relpos.py index 1a7c2f40..a7cf3e53 100644 --- a/timm/models/vision_transformer_relpos.py +++ b/timm/models/vision_transformer_relpos.py @@ -11,12 +11,12 @@ from typing import Optional, Tuple import torch import torch.nn as nn -import torch.nn.functional as F from torch.utils.checkpoint import checkpoint from timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD -from timm.layers import PatchEmbed, Mlp, DropPath, trunc_normal_ +from timm.layers import PatchEmbed, Mlp, DropPath, RelPosMlp, RelPosBias from ._builder import build_model_with_cfg +from ._pretrained import generate_default_cfgs from ._registry import register_model __all__ = ['VisionTransformerRelPos'] # model_registry will add each entrypoint fn to this @@ -24,216 +24,6 @@ __all__ = ['VisionTransformerRelPos'] # model_registry will add each entrypoint _logger = logging.getLogger(__name__) -def _cfg(url='', **kwargs): - return { - 'url': url, - 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, - 'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True, - 'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD, - 'first_conv': 'patch_embed.proj', 'classifier': 'head', - **kwargs - } - - -default_cfgs = { - 'vit_relpos_base_patch32_plus_rpn_256': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_replos_base_patch32_plus_rpn_256-sw-dd486f51.pth', - input_size=(3, 256, 256)), - 'vit_relpos_base_patch16_plus_240': _cfg(url='', input_size=(3, 240, 240)), - - 'vit_relpos_small_patch16_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_small_patch16_224-sw-ec2778b4.pth'), - 'vit_relpos_medium_patch16_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_224-sw-11c174af.pth'), - 'vit_relpos_base_patch16_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_base_patch16_224-sw-49049aed.pth'), - - 'vit_srelpos_small_patch16_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_srelpos_small_patch16_224-sw-6cdb8849.pth'), - 'vit_srelpos_medium_patch16_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_srelpos_medium_patch16_224-sw-ad702b8c.pth'), - - 'vit_relpos_medium_patch16_cls_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_cls_224-sw-cfe8e259.pth'), - 'vit_relpos_base_patch16_cls_224': _cfg( - url=''), - 'vit_relpos_base_patch16_clsgap_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_base_patch16_gapcls_224-sw-1a341d6c.pth'), - - 'vit_relpos_small_patch16_rpn_224': _cfg(url=''), - 'vit_relpos_medium_patch16_rpn_224': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_rpn_224-sw-5d2befd8.pth'), - 'vit_relpos_base_patch16_rpn_224': _cfg(url=''), -} - - -def gen_relative_position_index( - q_size: Tuple[int, int], - k_size: Tuple[int, int] = None, - class_token: bool = False) -> torch.Tensor: - # Adapted with significant modifications from Swin / BeiT codebases - # get pair-wise relative position index for each token inside the window - q_coords = torch.stack(torch.meshgrid([torch.arange(q_size[0]), torch.arange(q_size[1])])).flatten(1) # 2, Wh, Ww - if k_size is None: - k_coords = q_coords - k_size = q_size - else: - # different q vs k sizes is a WIP - k_coords = torch.stack(torch.meshgrid([torch.arange(k_size[0]), torch.arange(k_size[1])])).flatten(1) - relative_coords = q_coords[:, :, None] - k_coords[:, None, :] # 2, Wh*Ww, Wh*Ww - relative_coords = relative_coords.permute(1, 2, 0) # Wh*Ww, Wh*Ww, 2 - _, relative_position_index = torch.unique(relative_coords.view(-1, 2), return_inverse=True, dim=0) - - if class_token: - # handle cls to token & token 2 cls & cls to cls as per beit for rel pos bias - # NOTE not intended or tested with MLP log-coords - max_size = (max(q_size[0], k_size[0]), max(q_size[1], k_size[1])) - num_relative_distance = (2 * max_size[0] - 1) * (2 * max_size[1] - 1) + 3 - relative_position_index = F.pad(relative_position_index, [1, 0, 1, 0]) - relative_position_index[0, 0:] = num_relative_distance - 3 - relative_position_index[0:, 0] = num_relative_distance - 2 - relative_position_index[0, 0] = num_relative_distance - 1 - - return relative_position_index.contiguous() - - -def gen_relative_log_coords( - win_size: Tuple[int, int], - pretrained_win_size: Tuple[int, int] = (0, 0), - mode='swin', -): - assert mode in ('swin', 'cr', 'rw') - # as per official swin-v2 impl, supporting timm specific 'cr' and 'rw' log coords as well - relative_coords_h = torch.arange(-(win_size[0] - 1), win_size[0], dtype=torch.float32) - relative_coords_w = torch.arange(-(win_size[1] - 1), win_size[1], dtype=torch.float32) - relative_coords_table = torch.stack(torch.meshgrid([relative_coords_h, relative_coords_w])) - relative_coords_table = relative_coords_table.permute(1, 2, 0).contiguous() # 2*Wh-1, 2*Ww-1, 2 - if mode == 'swin': - if pretrained_win_size[0] > 0: - relative_coords_table[:, :, 0] /= (pretrained_win_size[0] - 1) - relative_coords_table[:, :, 1] /= (pretrained_win_size[1] - 1) - else: - relative_coords_table[:, :, 0] /= (win_size[0] - 1) - relative_coords_table[:, :, 1] /= (win_size[1] - 1) - relative_coords_table *= 8 # normalize to -8, 8 - relative_coords_table = torch.sign(relative_coords_table) * torch.log2( - 1.0 + relative_coords_table.abs()) / math.log2(8) - else: - if mode == 'rw': - # cr w/ window size normalization -> [-1,1] log coords - relative_coords_table[:, :, 0] /= (win_size[0] - 1) - relative_coords_table[:, :, 1] /= (win_size[1] - 1) - relative_coords_table *= 8 # scale to -8, 8 - relative_coords_table = torch.sign(relative_coords_table) * torch.log2( - 1.0 + relative_coords_table.abs()) - relative_coords_table /= math.log2(9) # -> [-1, 1] - else: - # mode == 'cr' - relative_coords_table = torch.sign(relative_coords_table) * torch.log( - 1.0 + relative_coords_table.abs()) - - return relative_coords_table - - -class RelPosMlp(nn.Module): - def __init__( - self, - window_size, - num_heads=8, - hidden_dim=128, - prefix_tokens=0, - mode='cr', - pretrained_window_size=(0, 0) - ): - super().__init__() - self.window_size = window_size - self.window_area = self.window_size[0] * self.window_size[1] - self.prefix_tokens = prefix_tokens - self.num_heads = num_heads - self.bias_shape = (self.window_area,) * 2 + (num_heads,) - if mode == 'swin': - self.bias_act = nn.Sigmoid() - self.bias_gain = 16 - mlp_bias = (True, False) - elif mode == 'rw': - self.bias_act = nn.Tanh() - self.bias_gain = 4 - mlp_bias = True - else: - self.bias_act = nn.Identity() - self.bias_gain = None - mlp_bias = True - - self.mlp = Mlp( - 2, # x, y - hidden_features=hidden_dim, - out_features=num_heads, - act_layer=nn.ReLU, - bias=mlp_bias, - drop=(0.125, 0.) - ) - - self.register_buffer( - "relative_position_index", - gen_relative_position_index(window_size), - persistent=False) - - # get relative_coords_table - self.register_buffer( - "rel_coords_log", - gen_relative_log_coords(window_size, pretrained_window_size, mode=mode), - persistent=False) - - def get_bias(self) -> torch.Tensor: - relative_position_bias = self.mlp(self.rel_coords_log) - if self.relative_position_index is not None: - relative_position_bias = relative_position_bias.view(-1, self.num_heads)[ - self.relative_position_index.view(-1)] # Wh*Ww,Wh*Ww,nH - relative_position_bias = relative_position_bias.view(self.bias_shape) - relative_position_bias = relative_position_bias.permute(2, 0, 1) - relative_position_bias = self.bias_act(relative_position_bias) - if self.bias_gain is not None: - relative_position_bias = self.bias_gain * relative_position_bias - if self.prefix_tokens: - relative_position_bias = F.pad(relative_position_bias, [self.prefix_tokens, 0, self.prefix_tokens, 0]) - return relative_position_bias.unsqueeze(0).contiguous() - - def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None): - return attn + self.get_bias() - - -class RelPosBias(nn.Module): - - def __init__(self, window_size, num_heads, prefix_tokens=0): - super().__init__() - assert prefix_tokens <= 1 - self.window_size = window_size - self.window_area = window_size[0] * window_size[1] - self.bias_shape = (self.window_area + prefix_tokens,) * 2 + (num_heads,) - - num_relative_distance = (2 * window_size[0] - 1) * (2 * window_size[1] - 1) + 3 * prefix_tokens - self.relative_position_bias_table = nn.Parameter(torch.zeros(num_relative_distance, num_heads)) - self.register_buffer( - "relative_position_index", - gen_relative_position_index(self.window_size, class_token=prefix_tokens > 0), - persistent=False, - ) - - self.init_weights() - - def init_weights(self): - trunc_normal_(self.relative_position_bias_table, std=.02) - - def get_bias(self) -> torch.Tensor: - relative_position_bias = self.relative_position_bias_table[self.relative_position_index.view(-1)] - # win_h * win_w, win_h * win_w, num_heads - relative_position_bias = relative_position_bias.view(self.bias_shape).permute(2, 0, 1) - return relative_position_bias.unsqueeze(0).contiguous() - - def forward(self, attn, shared_rel_pos: Optional[torch.Tensor] = None): - return attn + self.get_bias() - - class RelPosAttention(nn.Module): def __init__(self, dim, num_heads=8, qkv_bias=False, rel_pos_cls=None, attn_drop=0., proj_drop=0.): super().__init__() @@ -513,6 +303,57 @@ def _create_vision_transformer_relpos(variant, pretrained=False, **kwargs): return model +def _cfg(url='', **kwargs): + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, + 'crop_pct': .9, 'interpolation': 'bicubic', 'fixed_input_size': True, + 'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD, + 'first_conv': 'patch_embed.proj', 'classifier': 'head', + **kwargs + } + + +default_cfgs = generate_default_cfgs({ + 'vit_relpos_base_patch32_plus_rpn_256.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_replos_base_patch32_plus_rpn_256-sw-dd486f51.pth', + hf_hub_id='timm/', + input_size=(3, 256, 256)), + 'vit_relpos_base_patch16_plus_240.untrained': _cfg(url='', input_size=(3, 240, 240)), + + 'vit_relpos_small_patch16_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_small_patch16_224-sw-ec2778b4.pth', + hf_hub_id='timm/'), + 'vit_relpos_medium_patch16_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_224-sw-11c174af.pth', + hf_hub_id='timm/'), + 'vit_relpos_base_patch16_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_base_patch16_224-sw-49049aed.pth', + hf_hub_id='timm/'), + + 'vit_srelpos_small_patch16_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_srelpos_small_patch16_224-sw-6cdb8849.pth', + hf_hub_id='timm/'), + 'vit_srelpos_medium_patch16_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_srelpos_medium_patch16_224-sw-ad702b8c.pth', + hf_hub_id='timm/'), + + 'vit_relpos_medium_patch16_cls_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_cls_224-sw-cfe8e259.pth', + hf_hub_id='timm/'), + 'vit_relpos_base_patch16_cls_224.untrained': _cfg(), + 'vit_relpos_base_patch16_clsgap_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_base_patch16_gapcls_224-sw-1a341d6c.pth', + hf_hub_id='timm/'), + + 'vit_relpos_small_patch16_rpn_224.untrained': _cfg(), + 'vit_relpos_medium_patch16_rpn_224.sw_in1k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-tpu-weights/vit_relpos_medium_patch16_rpn_224-sw-5d2befd8.pth', + hf_hub_id='timm/'), + 'vit_relpos_base_patch16_rpn_224.untrained': _cfg(), +}) + + @register_model def vit_relpos_base_patch32_plus_rpn_256(pretrained=False, **kwargs): """ ViT-Base (ViT-B/32+) w/ relative log-coord position and residual post-norm, no class token diff --git a/timm/models/vovnet.py b/timm/models/vovnet.py index bf0e4f89..8aea5802 100644 --- a/timm/models/vovnet.py +++ b/timm/models/vovnet.py @@ -181,8 +181,18 @@ class SequentialAppendList(nn.Sequential): class OsaBlock(nn.Module): def __init__( - self, in_chs, mid_chs, out_chs, layer_per_block, residual=False, - depthwise=False, attn='', norm_layer=BatchNormAct2d, act_layer=nn.ReLU, drop_path=None): + self, + in_chs, + mid_chs, + out_chs, + layer_per_block, + residual=False, + depthwise=False, + attn='', + norm_layer=BatchNormAct2d, + act_layer=nn.ReLU, + drop_path=None, + ): super(OsaBlock, self).__init__() self.residual = residual @@ -232,9 +242,20 @@ class OsaBlock(nn.Module): class OsaStage(nn.Module): def __init__( - self, in_chs, mid_chs, out_chs, block_per_stage, layer_per_block, downsample=True, - residual=True, depthwise=False, attn='ese', norm_layer=BatchNormAct2d, act_layer=nn.ReLU, - drop_path_rates=None): + self, + in_chs, + mid_chs, + out_chs, + block_per_stage, + layer_per_block, + downsample=True, + residual=True, + depthwise=False, + attn='ese', + norm_layer=BatchNormAct2d, + act_layer=nn.ReLU, + drop_path_rates=None, + ): super(OsaStage, self).__init__() self.grad_checkpointing = False @@ -270,16 +291,38 @@ class OsaStage(nn.Module): class VovNet(nn.Module): def __init__( - self, cfg, in_chans=3, num_classes=1000, global_pool='avg', drop_rate=0., stem_stride=4, - output_stride=32, norm_layer=BatchNormAct2d, act_layer=nn.ReLU, drop_path_rate=0.): - """ VovNet (v2) + self, + cfg, + in_chans=3, + num_classes=1000, + global_pool='avg', + output_stride=32, + norm_layer=BatchNormAct2d, + act_layer=nn.ReLU, + drop_rate=0., + drop_path_rate=0., + **kwargs, + ): + """ + Args: + cfg (dict): Model architecture configuration + in_chans (int): Number of input channels (default: 3) + num_classes (int): Number of classifier classes (default: 1000) + global_pool (str): Global pooling type (default: 'avg') + output_stride (int): Output stride of network, one of (8, 16, 32) (default: 32) + norm_layer (Union[str, nn.Module]): normalization layer + act_layer (Union[str, nn.Module]): activation layer + drop_rate (float): Dropout rate (default: 0.) + drop_path_rate (float): Stochastic depth drop-path rate (default: 0.) + kwargs (dict): Extra kwargs overlayed onto cfg """ super(VovNet, self).__init__() self.num_classes = num_classes self.drop_rate = drop_rate - assert stem_stride in (4, 2) assert output_stride == 32 # FIXME support dilation + cfg = dict(cfg, **kwargs) + stem_stride = cfg.get("stem_stride", 4) stem_chs = cfg["stem_chs"] stage_conv_chs = cfg["stage_conv_chs"] stage_out_chs = cfg["stage_out_chs"] @@ -307,9 +350,15 @@ class VovNet(nn.Module): for i in range(4): # num_stages downsample = stem_stride == 2 or i > 0 # first stage has no stride/downsample if stem_stride is 4 stages += [OsaStage( - in_ch_list[i], stage_conv_chs[i], stage_out_chs[i], block_per_stage[i], layer_per_block, - downsample=downsample, drop_path_rates=stage_dpr[i], **stage_args) - ] + in_ch_list[i], + stage_conv_chs[i], + stage_out_chs[i], + block_per_stage[i], + layer_per_block, + downsample=downsample, + drop_path_rates=stage_dpr[i], + **stage_args, + )] self.num_features = stage_out_chs[i] current_stride *= 2 if downsample else 1 self.feature_info += [dict(num_chs=self.num_features, reduction=current_stride, module=f'stages.{i}')] @@ -324,7 +373,6 @@ class VovNet(nn.Module): elif isinstance(m, nn.Linear): nn.init.zeros_(m.bias) - @torch.jit.ignore def group_matcher(self, coarse=False): return dict( diff --git a/timm/utils/__init__.py b/timm/utils/__init__.py index a9ff0c78..7727adff 100644 --- a/timm/utils/__init__.py +++ b/timm/utils/__init__.py @@ -8,7 +8,7 @@ from .distributed import distribute_bn, reduce_tensor, init_distributed_device,\ from .jit import set_jit_legacy, set_jit_fuser from .log import setup_default_logging, FormatterNoInfo from .metrics import AverageMeter, accuracy -from .misc import natural_key, add_bool_arg +from .misc import natural_key, add_bool_arg, ParseKwargs from .model import unwrap_model, get_state_dict, freeze, unfreeze from .model_ema import ModelEma, ModelEmaV2 from .random import random_seed diff --git a/timm/utils/misc.py b/timm/utils/misc.py index 39c0097c..326a50f7 100644 --- a/timm/utils/misc.py +++ b/timm/utils/misc.py @@ -2,6 +2,8 @@ Hacked together by / Copyright 2020 Ross Wightman """ +import argparse +import ast import re @@ -16,3 +18,15 @@ def add_bool_arg(parser, name, default=False, help=''): group.add_argument('--' + name, dest=dest_name, action='store_true', help=help) group.add_argument('--no-' + name, dest=dest_name, action='store_false', help=help) parser.set_defaults(**{dest_name: default}) + + +class ParseKwargs(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + kw = {} + for value in values: + key, value = value.split('=') + try: + kw[key] = ast.literal_eval(value) + except ValueError: + kw[key] = str(value) # fallback to string (avoid need to escape on command line) + setattr(namespace, self.dest, kw) diff --git a/timm/utils/model.py b/timm/utils/model.py index b95c4539..d74ee5b7 100644 --- a/timm/utils/model.py +++ b/timm/utils/model.py @@ -7,6 +7,8 @@ import fnmatch import torch from torchvision.ops.misc import FrozenBatchNorm2d +from timm.layers import BatchNormAct2d, SyncBatchNormAct, FrozenBatchNormAct2d,\ + freeze_batch_norm_2d, unfreeze_batch_norm_2d from .model_ema import ModelEma @@ -100,70 +102,6 @@ def extract_spp_stats( return hook.stats -def freeze_batch_norm_2d(module): - """ - Converts all `BatchNorm2d` and `SyncBatchNorm` layers of provided module into `FrozenBatchNorm2d`. If `module` is - itself an instance of either `BatchNorm2d` or `SyncBatchNorm`, it is converted into `FrozenBatchNorm2d` and - returned. Otherwise, the module is walked recursively and submodules are converted in place. - - Args: - module (torch.nn.Module): Any PyTorch module. - - Returns: - torch.nn.Module: Resulting module - - Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 - """ - res = module - if isinstance(module, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): - res = FrozenBatchNorm2d(module.num_features) - res.num_features = module.num_features - res.affine = module.affine - if module.affine: - res.weight.data = module.weight.data.clone().detach() - res.bias.data = module.bias.data.clone().detach() - res.running_mean.data = module.running_mean.data - res.running_var.data = module.running_var.data - res.eps = module.eps - else: - for name, child in module.named_children(): - new_child = freeze_batch_norm_2d(child) - if new_child is not child: - res.add_module(name, new_child) - return res - - -def unfreeze_batch_norm_2d(module): - """ - Converts all `FrozenBatchNorm2d` layers of provided module into `BatchNorm2d`. If `module` is itself and instance - of `FrozenBatchNorm2d`, it is converted into `BatchNorm2d` and returned. Otherwise, the module is walked - recursively and submodules are converted in place. - - Args: - module (torch.nn.Module): Any PyTorch module. - - Returns: - torch.nn.Module: Resulting module - - Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 - """ - res = module - if isinstance(module, FrozenBatchNorm2d): - res = torch.nn.BatchNorm2d(module.num_features) - if module.affine: - res.weight.data = module.weight.data.clone().detach() - res.bias.data = module.bias.data.clone().detach() - res.running_mean.data = module.running_mean.data - res.running_var.data = module.running_var.data - res.eps = module.eps - else: - for name, child in module.named_children(): - new_child = unfreeze_batch_norm_2d(child) - if new_child is not child: - res.add_module(name, new_child) - return res - - def _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, mode='freeze'): """ Freeze or unfreeze parameters of the specified modules and those of all their hierarchical descendants. This is @@ -179,7 +117,12 @@ def _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, """ assert mode in ["freeze", "unfreeze"], '`mode` must be one of "freeze" or "unfreeze"' - if isinstance(root_module, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): + if isinstance(root_module, ( + torch.nn.modules.batchnorm.BatchNorm2d, + torch.nn.modules.batchnorm.SyncBatchNorm, + BatchNormAct2d, + SyncBatchNormAct, + )): # Raise assertion here because we can't convert it in place raise AssertionError( "You have provided a batch norm layer as the `root module`. Please use " @@ -213,13 +156,18 @@ def _freeze_unfreeze(root_module, submodules=[], include_bn_running_stats=True, # It's possible that `m` is a type of BatchNorm in itself, in which case `unfreeze_batch_norm_2d` won't # convert it in place, but will return the converted result. In this case `res` holds the converted # result and we may try to re-assign the named module - if isinstance(m, (torch.nn.modules.batchnorm.BatchNorm2d, torch.nn.modules.batchnorm.SyncBatchNorm)): + if isinstance(m, ( + torch.nn.modules.batchnorm.BatchNorm2d, + torch.nn.modules.batchnorm.SyncBatchNorm, + BatchNormAct2d, + SyncBatchNormAct, + )): _add_submodule(root_module, n, res) # Unfreeze batch norm else: res = unfreeze_batch_norm_2d(m) # Ditto. See note above in mode == 'freeze' branch - if isinstance(m, FrozenBatchNorm2d): + if isinstance(m, (FrozenBatchNorm2d, FrozenBatchNormAct2d)): _add_submodule(root_module, n, res) diff --git a/timm/version.py b/timm/version.py index 0716d38a..b285df69 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.8.1dev0' +__version__ = '0.8.6dev0' diff --git a/train.py b/train.py index e51d7c90..9f450ab8 100755 --- a/train.py +++ b/train.py @@ -89,56 +89,58 @@ parser.add_argument('--data-dir', metavar='DIR', parser.add_argument('--dataset', metavar='NAME', default='', help='dataset type + name ("/") (default: ImageFolder or ImageTar if empty)') group.add_argument('--train-split', metavar='NAME', default='train', - help='dataset train split (default: train)') + help='dataset train split (default: train)') group.add_argument('--val-split', metavar='NAME', default='validation', - help='dataset validation split (default: validation)') + help='dataset validation split (default: validation)') group.add_argument('--dataset-download', action='store_true', default=False, - help='Allow download of dataset for torch/ and tfds/ datasets that support it.') + help='Allow download of dataset for torch/ and tfds/ datasets that support it.') group.add_argument('--class-map', default='', type=str, metavar='FILENAME', - help='path to class to idx mapping file (default: "")') + help='path to class to idx mapping file (default: "")') # Model parameters group = parser.add_argument_group('Model parameters') group.add_argument('--model', default='resnet50', type=str, metavar='MODEL', - help='Name of model to train (default: "resnet50")') + help='Name of model to train (default: "resnet50")') group.add_argument('--pretrained', action='store_true', default=False, - help='Start with pretrained version of specified network (if avail)') + help='Start with pretrained version of specified network (if avail)') group.add_argument('--initial-checkpoint', default='', type=str, metavar='PATH', - help='Initialize model from this checkpoint (default: none)') + help='Initialize model from this checkpoint (default: none)') group.add_argument('--resume', default='', type=str, metavar='PATH', - help='Resume full model and optimizer state from checkpoint (default: none)') + help='Resume full model and optimizer state from checkpoint (default: none)') group.add_argument('--no-resume-opt', action='store_true', default=False, - help='prevent resume of optimizer state when resuming model') + help='prevent resume of optimizer state when resuming model') group.add_argument('--num-classes', type=int, default=None, metavar='N', - help='number of label classes (Model default if None)') + help='number of label classes (Model default if None)') group.add_argument('--gp', default=None, type=str, metavar='POOL', - help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.') + help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.') group.add_argument('--img-size', type=int, default=None, metavar='N', - help='Image size (default: None => model default)') + help='Image size (default: None => model default)') group.add_argument('--in-chans', type=int, default=None, metavar='N', - help='Image input channels (default: None => 3)') + help='Image input channels (default: None => 3)') group.add_argument('--input-size', default=None, nargs=3, type=int, - metavar='N N N', help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') + metavar='N N N', + help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') group.add_argument('--crop-pct', default=None, type=float, - metavar='N', help='Input image center crop percent (for validation only)') + metavar='N', help='Input image center crop percent (for validation only)') group.add_argument('--mean', type=float, nargs='+', default=None, metavar='MEAN', - help='Override mean pixel value of dataset') + help='Override mean pixel value of dataset') group.add_argument('--std', type=float, nargs='+', default=None, metavar='STD', - help='Override std deviation of dataset') + help='Override std deviation of dataset') group.add_argument('--interpolation', default='', type=str, metavar='NAME', - help='Image resize interpolation type (overrides model)') + help='Image resize interpolation type (overrides model)') group.add_argument('-b', '--batch-size', type=int, default=128, metavar='N', - help='Input batch size for training (default: 128)') + help='Input batch size for training (default: 128)') group.add_argument('-vb', '--validation-batch-size', type=int, default=None, metavar='N', - help='Validation batch size override (default: None)') + help='Validation batch size override (default: None)') group.add_argument('--channels-last', action='store_true', default=False, - help='Use channels_last memory layout') + help='Use channels_last memory layout') group.add_argument('--fuser', default='', type=str, - help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") + help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") group.add_argument('--grad-checkpointing', action='store_true', default=False, - help='Enable gradient checkpointing through model blocks/stages') + help='Enable gradient checkpointing through model blocks/stages') group.add_argument('--fast-norm', default=False, action='store_true', - help='enable experimental fast-norm') + help='enable experimental fast-norm') +group.add_argument('--model-kwargs', nargs='*', default={}, action=utils.ParseKwargs) scripting_group = group.add_mutually_exclusive_group() scripting_group.add_argument('--torchscript', dest='torchscript', action='store_true', @@ -151,199 +153,200 @@ scripting_group.add_argument('--aot-autograd', default=False, action='store_true # Optimizer parameters group = parser.add_argument_group('Optimizer parameters') group.add_argument('--opt', default='sgd', type=str, metavar='OPTIMIZER', - help='Optimizer (default: "sgd")') + help='Optimizer (default: "sgd")') group.add_argument('--opt-eps', default=None, type=float, metavar='EPSILON', - help='Optimizer Epsilon (default: None, use opt default)') + help='Optimizer Epsilon (default: None, use opt default)') group.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA', - help='Optimizer Betas (default: None, use opt default)') + help='Optimizer Betas (default: None, use opt default)') group.add_argument('--momentum', type=float, default=0.9, metavar='M', - help='Optimizer momentum (default: 0.9)') + help='Optimizer momentum (default: 0.9)') group.add_argument('--weight-decay', type=float, default=2e-5, - help='weight decay (default: 2e-5)') + help='weight decay (default: 2e-5)') group.add_argument('--clip-grad', type=float, default=None, metavar='NORM', - help='Clip gradient norm (default: None, no clipping)') + help='Clip gradient norm (default: None, no clipping)') group.add_argument('--clip-mode', type=str, default='norm', - help='Gradient clipping mode. One of ("norm", "value", "agc")') + help='Gradient clipping mode. One of ("norm", "value", "agc")') group.add_argument('--layer-decay', type=float, default=None, - help='layer-wise learning rate decay (default: None)') + help='layer-wise learning rate decay (default: None)') +group.add_argument('--opt-kwargs', nargs='*', default={}, action=utils.ParseKwargs) # Learning rate schedule parameters group = parser.add_argument_group('Learning rate schedule parameters') group.add_argument('--sched', type=str, default='cosine', metavar='SCHEDULER', - help='LR scheduler (default: "step"') + help='LR scheduler (default: "step"') group.add_argument('--sched-on-updates', action='store_true', default=False, - help='Apply LR scheduler step on update instead of epoch end.') + help='Apply LR scheduler step on update instead of epoch end.') group.add_argument('--lr', type=float, default=None, metavar='LR', - help='learning rate, overrides lr-base if set (default: None)') + help='learning rate, overrides lr-base if set (default: None)') group.add_argument('--lr-base', type=float, default=0.1, metavar='LR', - help='base learning rate: lr = lr_base * global_batch_size / base_size') + help='base learning rate: lr = lr_base * global_batch_size / base_size') group.add_argument('--lr-base-size', type=int, default=256, metavar='DIV', - help='base learning rate batch size (divisor, default: 256).') + help='base learning rate batch size (divisor, default: 256).') group.add_argument('--lr-base-scale', type=str, default='', metavar='SCALE', - help='base learning rate vs batch_size scaling ("linear", "sqrt", based on opt if empty)') + help='base learning rate vs batch_size scaling ("linear", "sqrt", based on opt if empty)') group.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct', - help='learning rate noise on/off epoch percentages') + help='learning rate noise on/off epoch percentages') group.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT', - help='learning rate noise limit percent (default: 0.67)') + help='learning rate noise limit percent (default: 0.67)') group.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV', - help='learning rate noise std-dev (default: 1.0)') + help='learning rate noise std-dev (default: 1.0)') group.add_argument('--lr-cycle-mul', type=float, default=1.0, metavar='MULT', - help='learning rate cycle len multiplier (default: 1.0)') + help='learning rate cycle len multiplier (default: 1.0)') group.add_argument('--lr-cycle-decay', type=float, default=0.5, metavar='MULT', - help='amount to decay each learning rate cycle (default: 0.5)') + help='amount to decay each learning rate cycle (default: 0.5)') group.add_argument('--lr-cycle-limit', type=int, default=1, metavar='N', - help='learning rate cycle limit, cycles enabled if > 1') + help='learning rate cycle limit, cycles enabled if > 1') group.add_argument('--lr-k-decay', type=float, default=1.0, - help='learning rate k-decay for cosine/poly (default: 1.0)') + help='learning rate k-decay for cosine/poly (default: 1.0)') group.add_argument('--warmup-lr', type=float, default=1e-5, metavar='LR', - help='warmup learning rate (default: 1e-5)') + help='warmup learning rate (default: 1e-5)') group.add_argument('--min-lr', type=float, default=0, metavar='LR', - help='lower lr bound for cyclic schedulers that hit 0 (default: 0)') + help='lower lr bound for cyclic schedulers that hit 0 (default: 0)') group.add_argument('--epochs', type=int, default=300, metavar='N', - help='number of epochs to train (default: 300)') + help='number of epochs to train (default: 300)') group.add_argument('--epoch-repeats', type=float, default=0., metavar='N', - help='epoch repeat multiplier (number of times to repeat dataset epoch per train epoch).') + help='epoch repeat multiplier (number of times to repeat dataset epoch per train epoch).') group.add_argument('--start-epoch', default=None, type=int, metavar='N', - help='manual epoch number (useful on restarts)') + help='manual epoch number (useful on restarts)') group.add_argument('--decay-milestones', default=[90, 180, 270], type=int, nargs='+', metavar="MILESTONES", - help='list of decay epoch indices for multistep lr. must be increasing') + help='list of decay epoch indices for multistep lr. must be increasing') group.add_argument('--decay-epochs', type=float, default=90, metavar='N', - help='epoch interval to decay LR') + help='epoch interval to decay LR') group.add_argument('--warmup-epochs', type=int, default=5, metavar='N', - help='epochs to warmup LR, if scheduler supports') + help='epochs to warmup LR, if scheduler supports') group.add_argument('--warmup-prefix', action='store_true', default=False, - help='Exclude warmup period from decay schedule.'), + help='Exclude warmup period from decay schedule.'), group.add_argument('--cooldown-epochs', type=int, default=0, metavar='N', - help='epochs to cooldown LR at min_lr, after cyclic schedule ends') + help='epochs to cooldown LR at min_lr, after cyclic schedule ends') group.add_argument('--patience-epochs', type=int, default=10, metavar='N', - help='patience epochs for Plateau LR scheduler (default: 10)') + help='patience epochs for Plateau LR scheduler (default: 10)') group.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE', - help='LR decay rate (default: 0.1)') + help='LR decay rate (default: 0.1)') # Augmentation & regularization parameters group = parser.add_argument_group('Augmentation and regularization parameters') group.add_argument('--no-aug', action='store_true', default=False, - help='Disable all training augmentation, override other train aug args') + help='Disable all training augmentation, override other train aug args') group.add_argument('--scale', type=float, nargs='+', default=[0.08, 1.0], metavar='PCT', - help='Random resize scale (default: 0.08 1.0)') -group.add_argument('--ratio', type=float, nargs='+', default=[3./4., 4./3.], metavar='RATIO', - help='Random resize aspect ratio (default: 0.75 1.33)') + help='Random resize scale (default: 0.08 1.0)') +group.add_argument('--ratio', type=float, nargs='+', default=[3. / 4., 4. / 3.], metavar='RATIO', + help='Random resize aspect ratio (default: 0.75 1.33)') group.add_argument('--hflip', type=float, default=0.5, - help='Horizontal flip training aug probability') + help='Horizontal flip training aug probability') group.add_argument('--vflip', type=float, default=0., - help='Vertical flip training aug probability') + help='Vertical flip training aug probability') group.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT', - help='Color jitter factor (default: 0.4)') + help='Color jitter factor (default: 0.4)') group.add_argument('--aa', type=str, default=None, metavar='NAME', - help='Use AutoAugment policy. "v0" or "original". (default: None)'), + help='Use AutoAugment policy. "v0" or "original". (default: None)'), group.add_argument('--aug-repeats', type=float, default=0, - help='Number of augmentation repetitions (distributed training only) (default: 0)') + help='Number of augmentation repetitions (distributed training only) (default: 0)') group.add_argument('--aug-splits', type=int, default=0, - help='Number of augmentation splits (default: 0, valid: 0 or >=2)') + help='Number of augmentation splits (default: 0, valid: 0 or >=2)') group.add_argument('--jsd-loss', action='store_true', default=False, - help='Enable Jensen-Shannon Divergence + CE loss. Use with `--aug-splits`.') + help='Enable Jensen-Shannon Divergence + CE loss. Use with `--aug-splits`.') group.add_argument('--bce-loss', action='store_true', default=False, - help='Enable BCE loss w/ Mixup/CutMix use.') + help='Enable BCE loss w/ Mixup/CutMix use.') group.add_argument('--bce-target-thresh', type=float, default=None, - help='Threshold for binarizing softened BCE targets (default: None, disabled)') + help='Threshold for binarizing softened BCE targets (default: None, disabled)') group.add_argument('--reprob', type=float, default=0., metavar='PCT', - help='Random erase prob (default: 0.)') + help='Random erase prob (default: 0.)') group.add_argument('--remode', type=str, default='pixel', - help='Random erase mode (default: "pixel")') + help='Random erase mode (default: "pixel")') group.add_argument('--recount', type=int, default=1, - help='Random erase count (default: 1)') + help='Random erase count (default: 1)') group.add_argument('--resplit', action='store_true', default=False, - help='Do not random erase first (clean) augmentation split') + help='Do not random erase first (clean) augmentation split') group.add_argument('--mixup', type=float, default=0.0, - help='mixup alpha, mixup enabled if > 0. (default: 0.)') + help='mixup alpha, mixup enabled if > 0. (default: 0.)') group.add_argument('--cutmix', type=float, default=0.0, - help='cutmix alpha, cutmix enabled if > 0. (default: 0.)') + help='cutmix alpha, cutmix enabled if > 0. (default: 0.)') group.add_argument('--cutmix-minmax', type=float, nargs='+', default=None, - help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)') + help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)') group.add_argument('--mixup-prob', type=float, default=1.0, - help='Probability of performing mixup or cutmix when either/both is enabled') + help='Probability of performing mixup or cutmix when either/both is enabled') group.add_argument('--mixup-switch-prob', type=float, default=0.5, - help='Probability of switching to cutmix when both mixup and cutmix enabled') + help='Probability of switching to cutmix when both mixup and cutmix enabled') group.add_argument('--mixup-mode', type=str, default='batch', - help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"') + help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"') group.add_argument('--mixup-off-epoch', default=0, type=int, metavar='N', - help='Turn off mixup after this epoch, disabled if 0 (default: 0)') + help='Turn off mixup after this epoch, disabled if 0 (default: 0)') group.add_argument('--smoothing', type=float, default=0.1, - help='Label smoothing (default: 0.1)') + help='Label smoothing (default: 0.1)') group.add_argument('--train-interpolation', type=str, default='random', - help='Training interpolation (random, bilinear, bicubic default: "random")') + help='Training interpolation (random, bilinear, bicubic default: "random")') group.add_argument('--drop', type=float, default=0.0, metavar='PCT', - help='Dropout rate (default: 0.)') + help='Dropout rate (default: 0.)') group.add_argument('--drop-connect', type=float, default=None, metavar='PCT', - help='Drop connect rate, DEPRECATED, use drop-path (default: None)') + help='Drop connect rate, DEPRECATED, use drop-path (default: None)') group.add_argument('--drop-path', type=float, default=None, metavar='PCT', - help='Drop path rate (default: None)') + help='Drop path rate (default: None)') group.add_argument('--drop-block', type=float, default=None, metavar='PCT', - help='Drop block rate (default: None)') + help='Drop block rate (default: None)') # Batch norm parameters (only works with gen_efficientnet based models currently) group = parser.add_argument_group('Batch norm parameters', 'Only works with gen_efficientnet based models currently.') group.add_argument('--bn-momentum', type=float, default=None, - help='BatchNorm momentum override (if not None)') + help='BatchNorm momentum override (if not None)') group.add_argument('--bn-eps', type=float, default=None, - help='BatchNorm epsilon override (if not None)') + help='BatchNorm epsilon override (if not None)') group.add_argument('--sync-bn', action='store_true', - help='Enable NVIDIA Apex or Torch synchronized BatchNorm.') + help='Enable NVIDIA Apex or Torch synchronized BatchNorm.') group.add_argument('--dist-bn', type=str, default='reduce', - help='Distribute BatchNorm stats between nodes after each epoch ("broadcast", "reduce", or "")') + help='Distribute BatchNorm stats between nodes after each epoch ("broadcast", "reduce", or "")') group.add_argument('--split-bn', action='store_true', - help='Enable separate BN layers per augmentation split.') + help='Enable separate BN layers per augmentation split.') # Model Exponential Moving Average group = parser.add_argument_group('Model exponential moving average parameters') group.add_argument('--model-ema', action='store_true', default=False, - help='Enable tracking moving average of model weights') + help='Enable tracking moving average of model weights') group.add_argument('--model-ema-force-cpu', action='store_true', default=False, - help='Force ema to be tracked on CPU, rank=0 node only. Disables EMA validation.') + help='Force ema to be tracked on CPU, rank=0 node only. Disables EMA validation.') group.add_argument('--model-ema-decay', type=float, default=0.9998, - help='decay factor for model weights moving average (default: 0.9998)') + help='decay factor for model weights moving average (default: 0.9998)') # Misc group = parser.add_argument_group('Miscellaneous parameters') group.add_argument('--seed', type=int, default=42, metavar='S', - help='random seed (default: 42)') + help='random seed (default: 42)') group.add_argument('--worker-seeding', type=str, default='all', - help='worker seed mode (default: all)') + help='worker seed mode (default: all)') group.add_argument('--log-interval', type=int, default=50, metavar='N', - help='how many batches to wait before logging training status') + help='how many batches to wait before logging training status') group.add_argument('--recovery-interval', type=int, default=0, metavar='N', - help='how many batches to wait before writing recovery checkpoint') + help='how many batches to wait before writing recovery checkpoint') group.add_argument('--checkpoint-hist', type=int, default=10, metavar='N', - help='number of checkpoints to keep (default: 10)') + help='number of checkpoints to keep (default: 10)') group.add_argument('-j', '--workers', type=int, default=4, metavar='N', - help='how many training processes to use (default: 4)') + help='how many training processes to use (default: 4)') group.add_argument('--save-images', action='store_true', default=False, - help='save images of input bathes every log interval for debugging') + help='save images of input bathes every log interval for debugging') group.add_argument('--amp', action='store_true', default=False, - help='use NVIDIA Apex AMP or Native AMP for mixed precision training') + help='use NVIDIA Apex AMP or Native AMP for mixed precision training') group.add_argument('--amp-dtype', default='float16', type=str, - help='lower precision AMP dtype (default: float16)') + help='lower precision AMP dtype (default: float16)') group.add_argument('--amp-impl', default='native', type=str, - help='AMP impl to use, "native" or "apex" (default: native)') + help='AMP impl to use, "native" or "apex" (default: native)') group.add_argument('--no-ddp-bb', action='store_true', default=False, - help='Force broadcast buffers for native DDP to off.') + help='Force broadcast buffers for native DDP to off.') group.add_argument('--pin-mem', action='store_true', default=False, - help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.') + help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.') group.add_argument('--no-prefetcher', action='store_true', default=False, - help='disable fast prefetcher') + help='disable fast prefetcher') group.add_argument('--output', default='', type=str, metavar='PATH', - help='path to output folder (default: none, current dir)') + help='path to output folder (default: none, current dir)') group.add_argument('--experiment', default='', type=str, metavar='NAME', - help='name of train experiment, name of sub-folder for output') + help='name of train experiment, name of sub-folder for output') group.add_argument('--eval-metric', default='top1', type=str, metavar='EVAL_METRIC', - help='Best metric (default: "top1"') + help='Best metric (default: "top1"') group.add_argument('--tta', type=int, default=0, metavar='N', - help='Test/inference time augmentation (oversampling) factor. 0=None (default: 0)') + help='Test/inference time augmentation (oversampling) factor. 0=None (default: 0)') group.add_argument("--local_rank", default=0, type=int) group.add_argument('--use-multi-epochs-loader', action='store_true', default=False, - help='use the multi-epochs-loader to save time at the beginning of every epoch') + help='use the multi-epochs-loader to save time at the beginning of every epoch') group.add_argument('--log-wandb', action='store_true', default=False, - help='log training and validation metrics to wandb') + help='log training and validation metrics to wandb') def _parse_args(): @@ -371,8 +374,6 @@ def main(): torch.backends.cuda.matmul.allow_tf32 = True torch.backends.cudnn.benchmark = True - if args.data and not args.data_dir: - args.data_dir = args.data args.prefetcher = not args.no_prefetcher device = utils.init_distributed_device(args) if args.distributed: @@ -383,14 +384,6 @@ def main(): _logger.info(f'Training with a single process on 1 device ({args.device}).') assert args.rank >= 0 - if utils.is_primary(args) and args.log_wandb: - if has_wandb: - wandb.init(project=args.experiment, config=args) - else: - _logger.warning( - "You've requested to log metrics to wandb but package not found. " - "Metrics not being logged to wandb, try `pip install wandb`") - # resolve AMP arguments based on PyTorch / Apex availability use_amp = None amp_dtype = torch.float16 @@ -432,6 +425,7 @@ def main(): bn_eps=args.bn_eps, scriptable=args.torchscript, checkpoint_path=args.initial_checkpoint, + **args.model_kwargs, ) if args.num_classes is None: assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' @@ -504,7 +498,11 @@ def main(): f'Learning rate ({args.lr}) calculated from base learning rate ({args.lr_base}) ' f'and global batch size ({global_batch_size}) with {args.lr_base_scale} scaling.') - optimizer = create_optimizer_v2(model, **optimizer_kwargs(cfg=args)) + optimizer = create_optimizer_v2( + model, + **optimizer_kwargs(cfg=args), + **args.opt_kwargs, + ) # setup automatic mixed-precision (AMP) loss scaling and op casting amp_autocast = suppress # do nothing @@ -559,6 +557,8 @@ def main(): # NOTE: EMA model does not need to be wrapped by DDP # create the train and eval datasets + if args.data and not args.data_dir: + args.data_dir = args.data dataset_train = create_dataset( args.dataset, root=args.data_dir, @@ -712,6 +712,14 @@ def main(): with open(os.path.join(output_dir, 'args.yaml'), 'w') as f: f.write(args_text) + if utils.is_primary(args) and args.log_wandb: + if has_wandb: + wandb.init(project=args.experiment, config=args) + else: + _logger.warning( + "You've requested to log metrics to wandb but package not found. " + "Metrics not being logged to wandb, try `pip install wandb`") + # setup learning rate schedule and starting epoch updates_per_epoch = len(loader_train) lr_scheduler, num_epochs = create_scheduler_v2( diff --git a/validate.py b/validate.py index 4669fbac..b606103d 100755 --- a/validate.py +++ b/validate.py @@ -26,7 +26,7 @@ from timm.data import create_dataset, create_loader, resolve_data_config, RealLa from timm.layers import apply_test_time_pool, set_fast_norm from timm.models import create_model, load_checkpoint, is_model, list_models from timm.utils import accuracy, AverageMeter, natural_key, setup_default_logging, set_jit_fuser, \ - decay_batch_step, check_batch_size_retry + decay_batch_step, check_batch_size_retry, ParseKwargs try: from apex import amp @@ -71,6 +71,8 @@ parser.add_argument('-b', '--batch-size', default=256, type=int, metavar='N', help='mini-batch size (default: 256)') parser.add_argument('--img-size', default=None, type=int, metavar='N', help='Input image dimension, uses model default if empty') +parser.add_argument('--in-chans', type=int, default=None, metavar='N', + help='Image input channels (default: None => 3)') parser.add_argument('--input-size', default=None, nargs=3, type=int, metavar='N N N', help='Input all image dimensions (d h w, e.g. --input-size 3 224 224), uses model default if empty') parser.add_argument('--use-train-size', action='store_true', default=False, @@ -123,6 +125,8 @@ parser.add_argument('--fuser', default='', type=str, help="Select jit fuser. One of ('', 'te', 'old', 'nvfuser')") parser.add_argument('--fast-norm', default=False, action='store_true', help='enable experimental fast-norm') +parser.add_argument('--model-kwargs', nargs='*', default={}, action=ParseKwargs) + scripting_group = parser.add_mutually_exclusive_group() scripting_group.add_argument('--torchscript', default=False, action='store_true', @@ -181,13 +185,20 @@ def validate(args): set_fast_norm() # create model + in_chans = 3 + if args.in_chans is not None: + in_chans = args.in_chans + elif args.input_size is not None: + in_chans = args.input_size[0] + model = create_model( args.model, pretrained=args.pretrained, num_classes=args.num_classes, - in_chans=3, + in_chans=in_chans, global_pool=args.gp, scriptable=args.torchscript, + **args.model_kwargs, ) if args.num_classes is None: assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' @@ -232,8 +243,9 @@ def validate(args): criterion = nn.CrossEntropyLoss().to(device) + root_dir = args.data or args.data_dir dataset = create_dataset( - root=args.data, + root=root_dir, name=args.dataset, split=args.split, download=args.dataset_download, @@ -389,7 +401,7 @@ def main(): if args.model == 'all': # validate all models in a list of names with pretrained checkpoints args.pretrained = True - model_names = list_models(pretrained=True, exclude_filters=['*_in21k', '*_in22k', '*_dino']) + model_names = list_models('convnext*', pretrained=True, exclude_filters=['*_in21k', '*_in22k', '*in12k', '*_dino', '*fcmae']) model_cfgs = [(n, '') for n in model_names] elif not is_model(args.model): # model name doesn't exist, try as wildcard filter From 8ab573cd2637a0a18e7f4cd799d1ff790e03989e Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 14:40:16 -0800 Subject: [PATCH 23/34] Add convnext_tiny and convnext_small 384x384 fine-tunes of in12k weights, fix pool size for laion CLIP convnext weights --- timm/models/convnext.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/timm/models/convnext.py b/timm/models/convnext.py index 05e29a73..2bbe0b11 100644 --- a/timm/models/convnext.py +++ b/timm/models/convnext.py @@ -500,6 +500,13 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='timm/', crop_pct=0.95, test_input_size=(3, 288, 288), test_crop_pct=1.0), + 'convnext_tiny.in12k_ft_in1k_384': _cfg( + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'convnext_small.in12k_ft_in1k_384': _cfg( + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'convnext_nano.in12k': _cfg( hf_hub_id='timm/', crop_pct=0.95, num_classes=11821), @@ -706,27 +713,27 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K', hf_hub_filename='open_clip_pytorch_model.bin', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laion2b_augreg': _cfg( hf_hub_id='laion/CLIP-convnext_base_w-laion2B-s13B-b82K-augreg', hf_hub_filename='open_clip_pytorch_model.bin', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laiona': _cfg( hf_hub_id='laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K', hf_hub_filename='open_clip_pytorch_model.bin', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - input_size=(3, 256, 256), crop_pct=1.0, num_classes=640), + input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laiona_320': _cfg( hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K', hf_hub_filename='open_clip_pytorch_model.bin', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), + input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0, num_classes=640), 'convnext_base.clip_laiona_augreg_320': _cfg( hf_hub_id='laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K-augreg', hf_hub_filename='open_clip_pytorch_model.bin', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, - input_size=(3, 320, 320), crop_pct=1.0, num_classes=640), + input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0, num_classes=640), }) From ca38e1e73fe4f318e153f2cb66efe41de4b7cf5f Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 14:44:05 -0800 Subject: [PATCH 24/34] Update ClassifierHead module, add reset() method, update in_chs -> in_features for consistency --- timm/layers/classifier.py | 15 +++++++++++++-- timm/models/cspnet.py | 2 +- timm/models/regnet.py | 2 +- timm/models/xception_aligned.py | 2 +- 4 files changed, 16 insertions(+), 5 deletions(-) diff --git a/timm/layers/classifier.py b/timm/layers/classifier.py index 3ac33387..e885084c 100644 --- a/timm/layers/classifier.py +++ b/timm/layers/classifier.py @@ -38,13 +38,24 @@ def create_classifier(num_features, num_classes, pool_type='avg', use_conv=False class ClassifierHead(nn.Module): """Classifier head w/ configurable global pooling and dropout.""" - def __init__(self, in_chs, num_classes, pool_type='avg', drop_rate=0., use_conv=False): + def __init__(self, in_features, num_classes, pool_type='avg', drop_rate=0., use_conv=False): super(ClassifierHead, self).__init__() self.drop_rate = drop_rate - self.global_pool, num_pooled_features = _create_pool(in_chs, num_classes, pool_type, use_conv=use_conv) + self.in_features = in_features + self.use_conv = use_conv + + self.global_pool, num_pooled_features = _create_pool(in_features, num_classes, pool_type, use_conv=use_conv) self.fc = _create_fc(num_pooled_features, num_classes, use_conv=use_conv) self.flatten = nn.Flatten(1) if use_conv and pool_type else nn.Identity() + def reset(self, num_classes, global_pool=None): + if global_pool is not None: + if global_pool != self.global_pool.pool_type: + self.global_pool, _ = _create_pool(self.in_features, num_classes, global_pool, use_conv=self.use_conv) + self.flatten = nn.Flatten(1) if self.use_conv and global_pool else nn.Identity() + num_pooled_features = self.in_features * self.global_pool.feat_mult() + self.fc = _create_fc(num_pooled_features, num_classes, use_conv=self.use_conv) + def forward(self, x, pre_logits: bool = False): x = self.global_pool(x) if self.drop_rate: diff --git a/timm/models/cspnet.py b/timm/models/cspnet.py index 26ec54d9..da9d1ae0 100644 --- a/timm/models/cspnet.py +++ b/timm/models/cspnet.py @@ -913,7 +913,7 @@ class CspNet(nn.Module): # Construct the head self.num_features = prev_chs self.head = ClassifierHead( - in_chs=prev_chs, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) + in_features=prev_chs, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) named_apply(partial(_init_weights, zero_init_last=zero_init_last), self) diff --git a/timm/models/regnet.py b/timm/models/regnet.py index 9d2528f6..63c9b57f 100644 --- a/timm/models/regnet.py +++ b/timm/models/regnet.py @@ -496,7 +496,7 @@ class RegNet(nn.Module): self.final_conv = get_act_layer(cfg.act_layer)() if final_act else nn.Identity() self.num_features = prev_width self.head = ClassifierHead( - in_chs=self.num_features, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) + in_features=self.num_features, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) named_apply(partial(_init_weights, zero_init_last=zero_init_last), self) diff --git a/timm/models/xception_aligned.py b/timm/models/xception_aligned.py index e3348e64..6bb7085f 100644 --- a/timm/models/xception_aligned.py +++ b/timm/models/xception_aligned.py @@ -216,7 +216,7 @@ class XceptionAligned(nn.Module): num_chs=self.num_features, reduction=curr_stride, module='blocks.' + str(len(self.blocks) - 1))] self.act = act_layer(inplace=True) if preact else nn.Identity() self.head = ClassifierHead( - in_chs=self.num_features, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) + in_features=self.num_features, num_classes=num_classes, pool_type=global_pool, drop_rate=drop_rate) @torch.jit.ignore def group_matcher(self, coarse=False): From bed350f5e584241a753d22b94ab36146ad824c2e Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 14:45:25 -0800 Subject: [PATCH 25/34] Push all MaxxViT weights to HF hub, cleanup impl, add feature map extraction support and prompote to 'std' architecture. Fix norm head for proper embedding / feat map output. Add new in12k + ft 1k weights. --- tests/test_models.py | 5 +- timm/models/maxxvit.py | 453 +++++++++++++++++++++++++---------------- 2 files changed, 286 insertions(+), 172 deletions(-) diff --git a/tests/test_models.py b/tests/test_models.py index 3e91d9a8..4ad18477 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -27,8 +27,9 @@ NON_STD_FILTERS = [ 'vit_*', 'tnt_*', 'pit_*', 'swin_*', 'coat_*', 'cait_*', '*mixer_*', 'gmlp_*', 'resmlp_*', 'twins_*', 'convit_*', 'levit*', 'visformer*', 'deit*', 'jx_nest_*', 'nest_*', 'xcit_*', 'crossvit_*', 'beit*', 'poolformer_*', 'volo_*', 'sequencer2d_*', 'swinv2_*', 'pvt_v2*', 'mvitv2*', 'gcvit*', 'efficientformer*', - 'coatnet*', 'coatnext*', 'maxvit*', 'maxxvit*', 'eva_*', 'flexivit*' + 'eva_*', 'flexivit*' ] +#'coatnet*', 'coatnext*', 'maxvit*', 'maxxvit*', ' NUM_NON_STD = len(NON_STD_FILTERS) # exclude models that cause specific test failures @@ -53,7 +54,7 @@ MAX_JIT_SIZE = 320 TARGET_FFEAT_SIZE = 96 MAX_FFEAT_SIZE = 256 TARGET_FWD_FX_SIZE = 128 -MAX_FWD_FX_SIZE = 224 +MAX_FWD_FX_SIZE = 256 TARGET_BWD_FX_SIZE = 128 MAX_BWD_FX_SIZE = 224 diff --git a/timm/models/maxxvit.py b/timm/models/maxxvit.py index dd424078..e730fa30 100644 --- a/timm/models/maxxvit.py +++ b/timm/models/maxxvit.py @@ -12,9 +12,6 @@ These configs work well and appear to be a bit faster / lower resource than the The models without extra prefix / suffix' (coatnet_0_224, maxvit_tiny_224, etc), are intended to match paper, BUT, without any official pretrained weights it's difficult to confirm a 100% match. -# FIXME / WARNING -This impl remains a WIP, some configs and models may vanish or change... - Papers: MaxViT: Multi-Axis Vision Transformer - https://arxiv.org/abs/2204.01697 @@ -76,6 +73,8 @@ class MaxxVitTransformerCfg: partition_ratio: int = 32 window_size: Optional[Tuple[int, int]] = None grid_size: Optional[Tuple[int, int]] = None + no_block_attn: bool = False # disable window block attention for maxvit (ie only grid) + use_nchw_attn: bool = False # for MaxViT variants (not used for CoAt), keep tensors in NCHW order init_values: Optional[float] = None act_layer: str = 'gelu' norm_layer: str = 'layernorm2d' @@ -889,19 +888,17 @@ class MaxxVitBlock(nn.Module): stride: int = 1, conv_cfg: MaxxVitConvCfg = MaxxVitConvCfg(), transformer_cfg: MaxxVitTransformerCfg = MaxxVitTransformerCfg(), - use_nchw_attn: bool = False, # FIXME move to cfg? True is ~20-30% faster on TPU, 5-10% slower on GPU - use_block_attn: bool = True, # FIXME for testing ConvNeXt conv w/o block attention drop_path: float = 0., ): super().__init__() + self.nchw_attn = transformer_cfg.use_nchw_attn conv_cls = ConvNeXtBlock if conv_cfg.block_type == 'convnext' else MbConvBlock self.conv = conv_cls(dim, dim_out, stride=stride, cfg=conv_cfg, drop_path=drop_path) attn_kwargs = dict(dim=dim_out, cfg=transformer_cfg, drop_path=drop_path) - partition_layer = PartitionAttention2d if use_nchw_attn else PartitionAttentionCl - self.nchw_attn = use_nchw_attn - self.attn_block = partition_layer(**attn_kwargs) if use_block_attn else None + partition_layer = PartitionAttention2d if self.nchw_attn else PartitionAttentionCl + self.attn_block = None if transformer_cfg.no_block_attn else partition_layer(**attn_kwargs) self.attn_grid = partition_layer(partition_type='grid', **attn_kwargs) def init_weights(self, scheme=''): @@ -1084,26 +1081,48 @@ class NormMlpHead(nn.Module): hidden_size=None, pool_type='avg', drop_rate=0., - norm_layer=nn.LayerNorm, - act_layer=nn.Tanh, + norm_layer='layernorm2d', + act_layer='tanh', ): super().__init__() self.drop_rate = drop_rate + self.in_features = in_features + self.hidden_size = hidden_size self.num_features = in_features + self.use_conv = not pool_type + norm_layer = get_norm_layer(norm_layer) + act_layer = get_act_layer(act_layer) + linear_layer = partial(nn.Conv2d, kernel_size=1) if self.use_conv else nn.Linear self.global_pool = SelectAdaptivePool2d(pool_type=pool_type) self.norm = norm_layer(in_features) self.flatten = nn.Flatten(1) if pool_type else nn.Identity() if hidden_size: self.pre_logits = nn.Sequential(OrderedDict([ - ('fc', nn.Linear(in_features, hidden_size)), + ('fc', linear_layer(in_features, hidden_size)), ('act', act_layer()), ])) self.num_features = hidden_size else: self.pre_logits = nn.Identity() self.drop = nn.Dropout(self.drop_rate) - self.fc = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity() + self.fc = linear_layer(self.num_features, num_classes) if num_classes > 0 else nn.Identity() + + def reset(self, num_classes, global_pool=None): + if global_pool is not None: + self.global_pool = SelectAdaptivePool2d(pool_type=global_pool) + self.flatten = nn.Flatten(1) if global_pool else nn.Identity() + self.use_conv = self.global_pool.is_identity() + linear_layer = partial(nn.Conv2d, kernel_size=1) if self.use_conv else nn.Linear + if self.hidden_size: + if ((isinstance(self.pre_logits.fc, nn.Conv2d) and not self.use_conv) or + (isinstance(self.pre_logits.fc, nn.Linear) and self.use_conv)): + with torch.no_grad(): + new_fc = linear_layer(self.in_features, self.hidden_size) + new_fc.weight.copy_(self.pre_logits.fc.weight.reshape(new_fc.weight.shape)) + new_fc.bias.copy_(self.pre_logits.fc.bias) + self.pre_logits.fc = new_fc + self.fc = linear_layer(self.num_features, num_classes) if num_classes > 0 else nn.Identity() def forward(self, x, pre_logits: bool = False): x = self.global_pool(x) @@ -1163,6 +1182,7 @@ class MaxxVit(nn.Module): self.num_features = self.embed_dim = cfg.embed_dim[-1] self.drop_rate = drop_rate self.grad_checkpointing = False + self.feature_info = [] self.stem = Stem( in_chs=in_chans, @@ -1173,8 +1193,8 @@ class MaxxVit(nn.Module): norm_layer=cfg.conv_cfg.norm_layer, norm_eps=cfg.conv_cfg.norm_eps, ) - stride = self.stem.stride + self.feature_info += [dict(num_chs=self.stem.out_chs, reduction=2, module='stem')] feat_size = tuple([i // s for i, s in zip(img_size, to_2tuple(stride))]) num_stages = len(cfg.embed_dim) @@ -1198,15 +1218,17 @@ class MaxxVit(nn.Module): )] stride *= stage_stride in_chs = out_chs + self.feature_info += [dict(num_chs=out_chs, reduction=stride, module=f'stages.{i}')] self.stages = nn.Sequential(*stages) final_norm_layer = partial(get_norm_layer(cfg.transformer_cfg.norm_layer), eps=cfg.transformer_cfg.norm_eps) - if cfg.head_hidden_size: + self.head_hidden_size = cfg.head_hidden_size + if self.head_hidden_size: self.norm = nn.Identity() self.head = NormMlpHead( self.num_features, num_classes, - hidden_size=cfg.head_hidden_size, + hidden_size=self.head_hidden_size, pool_type=global_pool, drop_rate=drop_rate, norm_layer=final_norm_layer, @@ -1253,9 +1275,7 @@ class MaxxVit(nn.Module): def reset_classifier(self, num_classes, global_pool=None): self.num_classes = num_classes - if global_pool is None: - global_pool = self.head.global_pool.pool_type - self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate) + self.head.reset(num_classes, global_pool) def forward_features(self, x): x = self.stem(x) @@ -1376,6 +1396,7 @@ def _next_cfg( transformer_norm_layer='layernorm2d', transformer_norm_layer_cl='layernorm', window_size=None, + no_block_attn=False, init_values=1e-6, rel_pos_type='mlp', # MLP by default for maxxvit rel_pos_dim=512, @@ -1396,6 +1417,7 @@ def _next_cfg( expand_first=False, pool_type=pool_type, window_size=window_size, + no_block_attn=no_block_attn, # enabled for MaxxViT-V2 init_values=init_values[1], norm_layer=transformer_norm_layer, norm_layer_cl=transformer_norm_layer_cl, @@ -1422,8 +1444,8 @@ def _tf_cfg(): model_cfgs = dict( - # Fiddling with configs / defaults / still pretraining - coatnet_pico_rw_224=MaxxVitCfg( + # timm specific CoAtNet configs + coatnet_pico_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(2, 3, 5, 2), stem_width=(32, 64), @@ -1432,7 +1454,7 @@ model_cfgs = dict( conv_attn_ratio=0.25, ), ), - coatnet_nano_rw_224=MaxxVitCfg( + coatnet_nano_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(3, 4, 6, 3), stem_width=(32, 64), @@ -1442,7 +1464,7 @@ model_cfgs = dict( conv_attn_ratio=0.25, ), ), - coatnet_0_rw_224=MaxxVitCfg( + coatnet_0_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 3, 7, 2), # deeper than paper '0' model stem_width=(32, 64), @@ -1451,7 +1473,7 @@ model_cfgs = dict( transformer_shortcut_bias=False, ), ), - coatnet_1_rw_224=MaxxVitCfg( + coatnet_1_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 6, 14, 2), stem_width=(32, 64), @@ -1461,7 +1483,7 @@ model_cfgs = dict( transformer_shortcut_bias=False, ) ), - coatnet_2_rw_224=MaxxVitCfg( + coatnet_2_rw=MaxxVitCfg( embed_dim=(128, 256, 512, 1024), depths=(2, 6, 14, 2), stem_width=(64, 128), @@ -1471,7 +1493,7 @@ model_cfgs = dict( #init_values=1e-6, ), ), - coatnet_3_rw_224=MaxxVitCfg( + coatnet_3_rw=MaxxVitCfg( embed_dim=(192, 384, 768, 1536), depths=(2, 6, 14, 2), stem_width=(96, 192), @@ -1482,8 +1504,8 @@ model_cfgs = dict( ), ), - # Highly experimental configs - coatnet_bn_0_rw_224=MaxxVitCfg( + # Experimental CoAtNet configs w/ ImageNet-1k train (different norm layers, MLP rel-pos) + coatnet_bn_0_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 3, 7, 2), # deeper than paper '0' model stem_width=(32, 64), @@ -1494,7 +1516,7 @@ model_cfgs = dict( transformer_norm_layer='batchnorm2d', ) ), - coatnet_rmlp_nano_rw_224=MaxxVitCfg( + coatnet_rmlp_nano_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(3, 4, 6, 3), stem_width=(32, 64), @@ -1505,7 +1527,7 @@ model_cfgs = dict( rel_pos_dim=384, ), ), - coatnet_rmlp_0_rw_224=MaxxVitCfg( + coatnet_rmlp_0_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 3, 7, 2), # deeper than paper '0' model stem_width=(32, 64), @@ -1514,7 +1536,7 @@ model_cfgs = dict( rel_pos_type='mlp', ), ), - coatnet_rmlp_1_rw_224=MaxxVitCfg( + coatnet_rmlp_1_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 6, 14, 2), stem_width=(32, 64), @@ -1526,7 +1548,7 @@ model_cfgs = dict( rel_pos_dim=384, # was supposed to be 512, woops ), ), - coatnet_rmlp_1_rw2_224=MaxxVitCfg( + coatnet_rmlp_1_rw2=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 6, 14, 2), stem_width=(32, 64), @@ -1536,7 +1558,7 @@ model_cfgs = dict( rel_pos_dim=512, # was supposed to be 512, woops ), ), - coatnet_rmlp_2_rw_224=MaxxVitCfg( + coatnet_rmlp_2_rw=MaxxVitCfg( embed_dim=(128, 256, 512, 1024), depths=(2, 6, 14, 2), stem_width=(64, 128), @@ -1547,7 +1569,7 @@ model_cfgs = dict( rel_pos_type='mlp' ), ), - coatnet_rmlp_3_rw_224=MaxxVitCfg( + coatnet_rmlp_3_rw=MaxxVitCfg( embed_dim=(192, 384, 768, 1536), depths=(2, 6, 14, 2), stem_width=(96, 192), @@ -1559,14 +1581,14 @@ model_cfgs = dict( ), ), - coatnet_nano_cc_224=MaxxVitCfg( + coatnet_nano_cc=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(3, 4, 6, 3), stem_width=(32, 64), block_type=('C', 'C', ('C', 'T'), ('C', 'T')), **_rw_coat_cfg(), ), - coatnext_nano_rw_224=MaxxVitCfg( + coatnext_nano_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(3, 4, 6, 3), stem_width=(32, 64), @@ -1578,89 +1600,95 @@ model_cfgs = dict( ), # Trying to be like the CoAtNet paper configs - coatnet_0_224=MaxxVitCfg( + coatnet_0=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 3, 5, 2), stem_width=64, + head_hidden_size=768, ), - coatnet_1_224=MaxxVitCfg( + coatnet_1=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 6, 14, 2), stem_width=64, + head_hidden_size=768, ), - coatnet_2_224=MaxxVitCfg( + coatnet_2=MaxxVitCfg( embed_dim=(128, 256, 512, 1024), depths=(2, 6, 14, 2), stem_width=128, + head_hidden_size=1024, ), - coatnet_3_224=MaxxVitCfg( + coatnet_3=MaxxVitCfg( embed_dim=(192, 384, 768, 1536), depths=(2, 6, 14, 2), stem_width=192, + head_hidden_size=1536, ), - coatnet_4_224=MaxxVitCfg( + coatnet_4=MaxxVitCfg( embed_dim=(192, 384, 768, 1536), depths=(2, 12, 28, 2), stem_width=192, + head_hidden_size=1536, ), - coatnet_5_224=MaxxVitCfg( + coatnet_5=MaxxVitCfg( embed_dim=(256, 512, 1280, 2048), depths=(2, 12, 28, 2), stem_width=192, + head_hidden_size=2048, ), # Experimental MaxVit configs - maxvit_pico_rw_256=MaxxVitCfg( + maxvit_pico_rw=MaxxVitCfg( embed_dim=(32, 64, 128, 256), depths=(2, 2, 5, 2), block_type=('M',) * 4, stem_width=(24, 32), **_rw_max_cfg(), ), - maxvit_nano_rw_256=MaxxVitCfg( + maxvit_nano_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(1, 2, 3, 1), block_type=('M',) * 4, stem_width=(32, 64), **_rw_max_cfg(), ), - maxvit_tiny_rw_224=MaxxVitCfg( + maxvit_tiny_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(2, 2, 5, 2), block_type=('M',) * 4, stem_width=(32, 64), **_rw_max_cfg(), ), - maxvit_tiny_rw_256=MaxxVitCfg( + maxvit_tiny_pm=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(2, 2, 5, 2), - block_type=('M',) * 4, + block_type=('PM',) * 4, stem_width=(32, 64), **_rw_max_cfg(), ), - maxvit_rmlp_pico_rw_256=MaxxVitCfg( + maxvit_rmlp_pico_rw=MaxxVitCfg( embed_dim=(32, 64, 128, 256), depths=(2, 2, 5, 2), block_type=('M',) * 4, stem_width=(24, 32), **_rw_max_cfg(rel_pos_type='mlp'), ), - maxvit_rmlp_nano_rw_256=MaxxVitCfg( + maxvit_rmlp_nano_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(1, 2, 3, 1), block_type=('M',) * 4, stem_width=(32, 64), **_rw_max_cfg(rel_pos_type='mlp'), ), - maxvit_rmlp_tiny_rw_256=MaxxVitCfg( + maxvit_rmlp_tiny_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(2, 2, 5, 2), block_type=('M',) * 4, stem_width=(32, 64), **_rw_max_cfg(rel_pos_type='mlp'), ), - maxvit_rmlp_small_rw_224=MaxxVitCfg( + maxvit_rmlp_small_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 2, 5, 2), block_type=('M',) * 4, @@ -1670,27 +1698,7 @@ model_cfgs = dict( init_values=1e-6, ), ), - maxvit_rmlp_small_rw_256=MaxxVitCfg( - embed_dim=(96, 192, 384, 768), - depths=(2, 2, 5, 2), - block_type=('M',) * 4, - stem_width=(32, 64), - **_rw_max_cfg( - rel_pos_type='mlp', - init_values=1e-6, - ), - ), - maxvit_rmlp_base_rw_224=MaxxVitCfg( - embed_dim=(96, 192, 384, 768), - depths=(2, 6, 14, 2), - block_type=('M',) * 4, - stem_width=(32, 64), - head_hidden_size=768, - **_rw_max_cfg( - rel_pos_type='mlp', - ), - ), - maxvit_rmlp_base_rw_384=MaxxVitCfg( + maxvit_rmlp_base_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 6, 14, 2), block_type=('M',) * 4, @@ -1701,15 +1709,7 @@ model_cfgs = dict( ), ), - maxvit_tiny_pm_256=MaxxVitCfg( - embed_dim=(64, 128, 256, 512), - depths=(2, 2, 5, 2), - block_type=('PM',) * 4, - stem_width=(32, 64), - **_rw_max_cfg(), - ), - - maxxvit_rmlp_nano_rw_256=MaxxVitCfg( + maxxvit_rmlp_nano_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(1, 2, 3, 1), block_type=('M',) * 4, @@ -1717,33 +1717,50 @@ model_cfgs = dict( weight_init='normal', **_next_cfg(), ), - maxxvit_rmlp_tiny_rw_256=MaxxVitCfg( + maxxvit_rmlp_tiny_rw=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(2, 2, 5, 2), block_type=('M',) * 4, stem_width=(32, 64), **_next_cfg(), ), - maxxvit_rmlp_small_rw_256=MaxxVitCfg( + maxxvit_rmlp_small_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), depths=(2, 2, 5, 2), block_type=('M',) * 4, stem_width=(48, 96), **_next_cfg(), ), - maxxvit_rmlp_base_rw_224=MaxxVitCfg( + + maxxvitv2_nano_rw=MaxxVitCfg( embed_dim=(96, 192, 384, 768), - depths=(2, 6, 14, 2), + depths=(1, 2, 3, 1), block_type=('M',) * 4, stem_width=(48, 96), - **_next_cfg(), + weight_init='normal', + **_next_cfg( + no_block_attn=True, + rel_pos_type='bias', + ), ), - maxxvit_rmlp_large_rw_224=MaxxVitCfg( + maxxvitv2_rmlp_base_rw=MaxxVitCfg( embed_dim=(128, 256, 512, 1024), depths=(2, 6, 12, 2), block_type=('M',) * 4, stem_width=(64, 128), - **_next_cfg(), + **_next_cfg( + no_block_attn=True, + ), + ), + maxxvitv2_rmlp_large_rw=MaxxVitCfg( + embed_dim=(160, 320, 640, 1280), + depths=(2, 6, 16, 2), + block_type=('M',) * 4, + stem_width=(80, 160), + head_hidden_size=1280, + **_next_cfg( + no_block_attn=True, + ), ), # Trying to be like the MaxViT paper configs @@ -1795,11 +1812,29 @@ model_cfgs = dict( ) +def checkpoint_filter_fn(state_dict, model: nn.Module): + model_state_dict = model.state_dict() + out_dict = {} + for k, v in state_dict.items(): + if k in model_state_dict and v.ndim != model_state_dict[k].ndim and v.numel() == model_state_dict[k].numel(): + # adapt between conv2d / linear layers + assert v.ndim in (2, 4) + v = v.reshape(model_state_dict[k].shape) + out_dict[k] = v + return out_dict + + def _create_maxxvit(variant, cfg_variant=None, pretrained=False, **kwargs): + if cfg_variant is None: + if variant in model_cfgs: + cfg_variant = variant + else: + cfg_variant = '_'.join(variant.split('_')[:-1]) return build_model_with_cfg( MaxxVit, variant, pretrained, - model_cfg=model_cfgs[variant] if not cfg_variant else model_cfgs[cfg_variant], + model_cfg=model_cfgs[cfg_variant], feature_cfg=dict(flatten_sequential=True), + pretrained_filter_fn=checkpoint_filter_fn, **kwargs) @@ -1815,155 +1850,218 @@ def _cfg(url='', **kwargs): default_cfgs = generate_default_cfgs({ - # Fiddling with configs / defaults / still pretraining - 'coatnet_pico_rw_224': _cfg(url=''), - 'coatnet_nano_rw_224': _cfg( + # timm specific CoAtNet configs, ImageNet-1k pretrain, fixed rel-pos + 'coatnet_pico_rw_224.untrained': _cfg(url=''), + 'coatnet_nano_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_nano_rw_224_sw-f53093b4.pth', crop_pct=0.9), - 'coatnet_0_rw_224': _cfg( + 'coatnet_0_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_0_rw_224_sw-a6439706.pth'), - 'coatnet_1_rw_224': _cfg( + 'coatnet_1_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_1_rw_224_sw-5cae1ea8.pth' ), - 'coatnet_2_rw_224': _cfg(url=''), - 'coatnet_3_rw_224': _cfg(url=''), - # Highly experimental configs - 'coatnet_bn_0_rw_224': _cfg( + # timm specific CoAtNet configs, ImageNet-12k pretrain w/ 1k fine-tune, fixed rel-pos + 'coatnet_2_rw_224.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/'), + #'coatnet_3_rw_224.untrained': _cfg(url=''), + + # Experimental CoAtNet configs w/ ImageNet-12k pretrain -> 1k fine-tune (different norm layers, MLP rel-pos) + 'coatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/'), + 'coatnet_rmlp_2_rw_224.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/'), + 'coatnet_rmlp_2_rw_384.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + + # Experimental CoAtNet configs w/ ImageNet-1k train (different norm layers, MLP rel-pos) + 'coatnet_bn_0_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_bn_0_rw_224_sw-c228e218.pth', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, crop_pct=0.95), - 'coatnet_rmlp_nano_rw_224': _cfg( + 'coatnet_rmlp_nano_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_rmlp_nano_rw_224_sw-bd1d51b3.pth', crop_pct=0.9), - 'coatnet_rmlp_0_rw_224': _cfg(url=''), - 'coatnet_rmlp_1_rw_224': _cfg( + 'coatnet_rmlp_0_rw_224.untrained': _cfg(url=''), + 'coatnet_rmlp_1_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_rmlp_1_rw_224_sw-9051e6c3.pth'), - 'coatnet_rmlp_1_rw2_224': _cfg(url=''), - 'coatnet_rmlp_2_rw_224': _cfg( + 'coatnet_rmlp_2_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnet_rmlp_2_rw_224_sw-5ccfac55.pth'), - 'coatnet_rmlp_3_rw_224': _cfg(url=''), - 'coatnet_nano_cc_224': _cfg(url=''), - 'coatnext_nano_rw_224': _cfg( + 'coatnet_rmlp_3_rw_224.untrained': _cfg(url=''), + 'coatnet_nano_cc_224.untrained': _cfg(url=''), + 'coatnext_nano_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/coatnext_nano_rw_224_ad-22cb71c2.pth', crop_pct=0.9), - # Trying to be like the CoAtNet paper configs - 'coatnet_0_224': _cfg(url=''), - 'coatnet_1_224': _cfg(url=''), - 'coatnet_2_224': _cfg(url=''), - 'coatnet_3_224': _cfg(url=''), - 'coatnet_4_224': _cfg(url=''), - 'coatnet_5_224': _cfg(url=''), - - # Experimental configs - 'maxvit_pico_rw_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_nano_rw_256': _cfg( + # ImagenNet-12k pretrain CoAtNet + 'coatnet_2_rw_224.sw_in12k': _cfg( + hf_hub_id='timm/', + num_classes=11821), + 'coatnet_3_rw_224.sw_in12k': _cfg( + hf_hub_id='timm/', + num_classes=11821), + 'coatnet_rmlp_1_rw2_224.sw_in12k': _cfg( + hf_hub_id='timm/', + num_classes=11821), + 'coatnet_rmlp_2_rw_224.sw_in12k': _cfg( + hf_hub_id='timm/', + num_classes=11821), + + # Trying to be like the CoAtNet paper configs (will adapt if 'tf' weights are ever released) + 'coatnet_0_224.untrained': _cfg(url=''), + 'coatnet_1_224.untrained': _cfg(url=''), + 'coatnet_2_224.untrained': _cfg(url=''), + 'coatnet_3_224.untrained': _cfg(url=''), + 'coatnet_4_224.untrained': _cfg(url=''), + 'coatnet_5_224.untrained': _cfg(url=''), + + # timm specific MaxVit configs, ImageNet-1k pretrain or untrained + 'maxvit_pico_rw_256.untrained': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), + 'maxvit_nano_rw_256.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_nano_rw_256_sw-fb127241.pth', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_tiny_rw_224': _cfg( + 'maxvit_tiny_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_tiny_rw_224_sw-7d0dffeb.pth'), - 'maxvit_tiny_rw_256': _cfg( + 'maxvit_tiny_rw_256.untrained': _cfg( url='', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_rmlp_pico_rw_256': _cfg( + 'maxvit_tiny_pm_256.untrained': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), + + # timm specific MaxVit w/ MLP rel-pos, ImageNet-1k pretrain + 'maxvit_rmlp_pico_rw_256.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_pico_rw_256_sw-8d82f2c6.pth', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_rmlp_nano_rw_256': _cfg( + 'maxvit_rmlp_nano_rw_256.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_nano_rw_256_sw-c17bb0d6.pth', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_rmlp_tiny_rw_256': _cfg( + 'maxvit_rmlp_tiny_rw_256.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_tiny_rw_256_sw-bbef0ff5.pth', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_rmlp_small_rw_224': _cfg( + 'maxvit_rmlp_small_rw_224.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_small_rw_224_sw-6ef0ae4f.pth', crop_pct=0.9, ), - 'maxvit_rmlp_small_rw_256': _cfg( + 'maxvit_rmlp_small_rw_256.untrained': _cfg( url='', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxvit_rmlp_base_rw_224': _cfg( - url='', - ), - 'maxvit_rmlp_base_rw_384': _cfg( - url='', - input_size=(3, 384, 384), pool_size=(12, 12)), - 'maxvit_tiny_pm_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), + # timm specific MaxVit w/ ImageNet-12k pretrain and 1k fine-tune + 'maxvit_rmlp_base_rw_224.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + ), + 'maxvit_rmlp_base_rw_384.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + + # timm specific MaxVit w/ ImageNet-12k pretrain + 'maxvit_rmlp_base_rw_224.sw_in12k': _cfg( + hf_hub_id='timm/', + num_classes=11821, + ), - 'maxxvit_rmlp_nano_rw_256': _cfg( + # timm MaxxViT configs (ConvNeXt conv blocks mixed with MaxVit transformer blocks) + 'maxxvit_rmlp_nano_rw_256.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxxvit_rmlp_nano_rw_256_sw-0325d459.pth', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxxvit_rmlp_tiny_rw_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxxvit_rmlp_small_rw_256': _cfg( + 'maxxvit_rmlp_tiny_rw_256.untrained': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), + 'maxxvit_rmlp_small_rw_256.sw_in1k': _cfg( + hf_hub_id='timm/', url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxxvit_rmlp_small_rw_256_sw-37e217ff.pth', input_size=(3, 256, 256), pool_size=(8, 8)), - 'maxxvit_rmlp_base_rw_224': _cfg(url=''), - 'maxxvit_rmlp_large_rw_224': _cfg(url=''), + # timm MaxxViT-V2 configs (ConvNeXt conv blocks mixed with MaxVit transformer blocks, more width, no block attn) + 'maxxvitv2_nano_rw_256.sw_in1k': _cfg( + hf_hub_id='timm/', + input_size=(3, 256, 256), pool_size=(8, 8)), + 'maxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/'), + 'maxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k': _cfg( + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), + 'maxxvitv2_rmlp_large_rw_224.untrained': _cfg(url=''), + + 'maxxvitv2_rmlp_base_rw_224.sw_in12k': _cfg( + hf_hub_id='timm/', + num_classes=11821), # MaxViT models ported from official Tensorflow impl 'maxvit_tiny_tf_224.in1k': _cfg( - hf_hub_id='timm/maxvit_tiny_tf_224.in1k', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), 'maxvit_tiny_tf_384.in1k': _cfg( - hf_hub_id='timm/maxvit_tiny_tf_384.in1k', - input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'maxvit_tiny_tf_512.in1k': _cfg( - hf_hub_id='timm/maxvit_tiny_tf_512.in1k', - input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'), 'maxvit_small_tf_224.in1k': _cfg( - hf_hub_id='timm/maxvit_small_tf_224.in1k', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), 'maxvit_small_tf_384.in1k': _cfg( - hf_hub_id='timm/maxvit_small_tf_384.in1k', - input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'maxvit_small_tf_512.in1k': _cfg( - hf_hub_id='timm/maxvit_small_tf_512.in1k', - input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'), 'maxvit_base_tf_224.in1k': _cfg( - hf_hub_id='timm/maxvit_base_tf_224.in1k', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), 'maxvit_base_tf_384.in1k': _cfg( - hf_hub_id='timm/maxvit_base_tf_384.in1k', - input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'maxvit_base_tf_512.in1k': _cfg( - hf_hub_id='timm/maxvit_base_tf_512.in1k', - input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'), 'maxvit_large_tf_224.in1k': _cfg( - hf_hub_id='timm/maxvit_large_tf_224.in1k', + hf_hub_id='timm/', mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), 'maxvit_large_tf_384.in1k': _cfg( - hf_hub_id='timm/maxvit_large_tf_384.in1k', - input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'maxvit_large_tf_512.in1k': _cfg( - hf_hub_id='timm/maxvit_large_tf_512.in1k', - input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'), 'maxvit_base_tf_224.in21k': _cfg( url=''), 'maxvit_base_tf_384.in21k_ft_in1k': _cfg( - hf_hub_id='timm/maxvit_base_tf_384.in21k_ft_in1k', - input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'maxvit_base_tf_512.in21k_ft_in1k': _cfg( - hf_hub_id='timm/maxvit_base_tf_512.in21k_ft_in1k', - input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'), 'maxvit_large_tf_224.in21k': _cfg( url=''), 'maxvit_large_tf_384.in21k_ft_in1k': _cfg( - hf_hub_id='timm/maxvit_large_tf_384.in21k_ft_in1k', - input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'maxvit_large_tf_512.in21k_ft_in1k': _cfg( - hf_hub_id='timm/maxvit_large_tf_512.in21k_ft_in1k', + hf_hub_id='timm/', input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'), 'maxvit_xlarge_tf_224.in21k': _cfg( url=''), 'maxvit_xlarge_tf_384.in21k_ft_in1k': _cfg( - hf_hub_id='timm/maxvit_xlarge_tf_384.in21k_ft_in1k', - input_size=(3, 384, 384), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 384, 384), pool_size=(12, 12), crop_pct=1.0, crop_mode='squash'), 'maxvit_xlarge_tf_512.in21k_ft_in1k': _cfg( - hf_hub_id='timm/maxvit_xlarge_tf_512.in21k_ft_in1k', - input_size=(3, 512, 512), crop_pct=1.0, crop_mode='squash'), + hf_hub_id='timm/', + input_size=(3, 512, 512), pool_size=(16, 16), crop_pct=1.0, crop_mode='squash'), }) @@ -2027,6 +2125,11 @@ def coatnet_rmlp_2_rw_224(pretrained=False, **kwargs): return _create_maxxvit('coatnet_rmlp_2_rw_224', pretrained=pretrained, **kwargs) +@register_model +def coatnet_rmlp_2_rw_384(pretrained=False, **kwargs): + return _create_maxxvit('coatnet_rmlp_2_rw_384', pretrained=pretrained, **kwargs) + + @register_model def coatnet_rmlp_3_rw_224(pretrained=False, **kwargs): return _create_maxxvit('coatnet_rmlp_3_rw_224', pretrained=pretrained, **kwargs) @@ -2148,13 +2251,23 @@ def maxxvit_rmlp_small_rw_256(pretrained=False, **kwargs): @register_model -def maxxvit_rmlp_base_rw_224(pretrained=False, **kwargs): - return _create_maxxvit('maxxvit_rmlp_base_rw_224', pretrained=pretrained, **kwargs) +def maxxvitv2_nano_rw_256(pretrained=False, **kwargs): + return _create_maxxvit('maxxvitv2_nano_rw_256', pretrained=pretrained, **kwargs) + + +@register_model +def maxxvitv2_rmlp_base_rw_224(pretrained=False, **kwargs): + return _create_maxxvit('maxxvitv2_rmlp_base_rw_224', pretrained=pretrained, **kwargs) + + +@register_model +def maxxvitv2_rmlp_base_rw_384(pretrained=False, **kwargs): + return _create_maxxvit('maxxvitv2_rmlp_base_rw_384', pretrained=pretrained, **kwargs) @register_model -def maxxvit_rmlp_large_rw_224(pretrained=False, **kwargs): - return _create_maxxvit('maxxvit_rmlp_large_rw_224', pretrained=pretrained, **kwargs) +def maxxvitv2_rmlp_large_rw_224(pretrained=False, **kwargs): + return _create_maxxvit('maxxvitv2_rmlp_large_rw_224', pretrained=pretrained, **kwargs) @register_model From e9f1376cdee610daa67fb954aad170c8a6bf5695 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 14:47:55 -0800 Subject: [PATCH 26/34] Cleanup resolve data config fns, add 'model' variant that takes model as first arg, make 'args' arg optional in original fn --- timm/data/__init__.py | 2 +- timm/data/config.py | 97 ++++++++++++++++++++++++++++--------------- 2 files changed, 65 insertions(+), 34 deletions(-) diff --git a/timm/data/__init__.py b/timm/data/__init__.py index 7cc7b0b0..9f62a7d5 100644 --- a/timm/data/__init__.py +++ b/timm/data/__init__.py @@ -1,6 +1,6 @@ from .auto_augment import RandAugment, AutoAugment, rand_augment_ops, auto_augment_policy,\ rand_augment_transform, auto_augment_transform -from .config import resolve_data_config +from .config import resolve_data_config, resolve_model_data_config from .constants import * from .dataset import ImageDataset, IterableImageDataset, AugMixDataset from .dataset_factory import create_dataset diff --git a/timm/data/config.py b/timm/data/config.py index a65695d0..a6c2298c 100644 --- a/timm/data/config.py +++ b/timm/data/config.py @@ -6,16 +6,18 @@ _logger = logging.getLogger(__name__) def resolve_data_config( - args, - default_cfg=None, + args=None, + pretrained_cfg=None, model=None, use_test_size=False, verbose=False ): - new_config = {} - default_cfg = default_cfg or {} - if not default_cfg and model is not None and hasattr(model, 'default_cfg'): - default_cfg = model.default_cfg + assert model or args or pretrained_cfg, "At least one of model, args, or pretrained_cfg required for data config." + args = args or {} + pretrained_cfg = pretrained_cfg or {} + if not pretrained_cfg and model is not None and hasattr(model, 'pretrained_cfg'): + pretrained_cfg = model.pretrained_cfg + data_config = {} # Resolve input/image size in_chans = 3 @@ -32,65 +34,94 @@ def resolve_data_config( assert isinstance(args['img_size'], int) input_size = (in_chans, args['img_size'], args['img_size']) else: - if use_test_size and default_cfg.get('test_input_size', None) is not None: - input_size = default_cfg['test_input_size'] - elif default_cfg.get('input_size', None) is not None: - input_size = default_cfg['input_size'] - new_config['input_size'] = input_size + if use_test_size and pretrained_cfg.get('test_input_size', None) is not None: + input_size = pretrained_cfg['test_input_size'] + elif pretrained_cfg.get('input_size', None) is not None: + input_size = pretrained_cfg['input_size'] + data_config['input_size'] = input_size # resolve interpolation method - new_config['interpolation'] = 'bicubic' + data_config['interpolation'] = 'bicubic' if args.get('interpolation', None): - new_config['interpolation'] = args['interpolation'] - elif default_cfg.get('interpolation', None): - new_config['interpolation'] = default_cfg['interpolation'] + data_config['interpolation'] = args['interpolation'] + elif pretrained_cfg.get('interpolation', None): + data_config['interpolation'] = pretrained_cfg['interpolation'] # resolve dataset + model mean for normalization - new_config['mean'] = IMAGENET_DEFAULT_MEAN + data_config['mean'] = IMAGENET_DEFAULT_MEAN if args.get('mean', None) is not None: mean = tuple(args['mean']) if len(mean) == 1: mean = tuple(list(mean) * in_chans) else: assert len(mean) == in_chans - new_config['mean'] = mean - elif default_cfg.get('mean', None): - new_config['mean'] = default_cfg['mean'] + data_config['mean'] = mean + elif pretrained_cfg.get('mean', None): + data_config['mean'] = pretrained_cfg['mean'] # resolve dataset + model std deviation for normalization - new_config['std'] = IMAGENET_DEFAULT_STD + data_config['std'] = IMAGENET_DEFAULT_STD if args.get('std', None) is not None: std = tuple(args['std']) if len(std) == 1: std = tuple(list(std) * in_chans) else: assert len(std) == in_chans - new_config['std'] = std - elif default_cfg.get('std', None): - new_config['std'] = default_cfg['std'] + data_config['std'] = std + elif pretrained_cfg.get('std', None): + data_config['std'] = pretrained_cfg['std'] # resolve default inference crop crop_pct = DEFAULT_CROP_PCT if args.get('crop_pct', None): crop_pct = args['crop_pct'] else: - if use_test_size and default_cfg.get('test_crop_pct', None): - crop_pct = default_cfg['test_crop_pct'] - elif default_cfg.get('crop_pct', None): - crop_pct = default_cfg['crop_pct'] - new_config['crop_pct'] = crop_pct + if use_test_size and pretrained_cfg.get('test_crop_pct', None): + crop_pct = pretrained_cfg['test_crop_pct'] + elif pretrained_cfg.get('crop_pct', None): + crop_pct = pretrained_cfg['crop_pct'] + data_config['crop_pct'] = crop_pct # resolve default crop percentage crop_mode = DEFAULT_CROP_MODE if args.get('crop_mode', None): crop_mode = args['crop_mode'] - elif default_cfg.get('crop_mode', None): - crop_mode = default_cfg['crop_mode'] - new_config['crop_mode'] = crop_mode + elif pretrained_cfg.get('crop_mode', None): + crop_mode = pretrained_cfg['crop_mode'] + data_config['crop_mode'] = crop_mode if verbose: _logger.info('Data processing configuration for current model + dataset:') - for n, v in new_config.items(): + for n, v in data_config.items(): _logger.info('\t%s: %s' % (n, str(v))) - return new_config + return data_config + + +def resolve_model_data_config( + model, + args=None, + pretrained_cfg=None, + use_test_size=False, + verbose=False, +): + """ Resolve Model Data Config + This is equivalent to resolve_data_config() but with arguments re-ordered to put model first. + + Args: + model (nn.Module): the model instance + args (dict): command line arguments / configuration in dict form (overrides pretrained_cfg) + pretrained_cfg (dict): pretrained model config (overrides pretrained_cfg attached to model) + use_test_size (bool): use the test time input resolution (if one exists) instead of default train resolution + verbose (bool): enable extra logging of resolved values + + Returns: + dictionary of config + """ + return resolve_data_config( + args=args, + pretrained_cfg=pretrained_cfg, + model=model, + use_test_size=use_test_size, + verbose=verbose, + ) From 32f252381d476be94a0230655c439cf20730e214 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 14:48:54 -0800 Subject: [PATCH 27/34] Change order of checkpoitn filtering fn application in builder, try dict, model variant first --- timm/models/_builder.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/timm/models/_builder.py b/timm/models/_builder.py index 901d7d44..32a35304 100644 --- a/timm/models/_builder.py +++ b/timm/models/_builder.py @@ -179,11 +179,11 @@ def load_pretrained( return if filter_fn is not None: - # for backwards compat with filter fn that take one arg, try one first, the two try: - state_dict = filter_fn(state_dict) - except TypeError: state_dict = filter_fn(state_dict, model) + except TypeError as e: + # for backwards compat with filter fn that take one arg + state_dict = filter_fn(state_dict) input_convs = pretrained_cfg.get('first_conv', None) if input_convs is not None and in_chans != 3: From 36989cfae40b773e6193680d05a154dc9406df0c Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 14:49:40 -0800 Subject: [PATCH 28/34] Factor out readme generation in hub helper, add more readme fields --- timm/models/_hub.py | 63 +++++++++++++++++++++++++++++++++++---------- 1 file changed, 49 insertions(+), 14 deletions(-) diff --git a/timm/models/_hub.py b/timm/models/_hub.py index df1a1ef7..378d646c 100644 --- a/timm/models/_hub.py +++ b/timm/models/_hub.py @@ -236,20 +236,7 @@ def push_to_hf_hub( model_card = model_card or {} model_name = repo_id.split('/')[-1] readme_path = Path(tmpdir) / "README.md" - readme_text = "---\n" - readme_text += "tags:\n- image-classification\n- timm\n" - readme_text += "library_tag: timm\n" - readme_text += f"license: {model_card.get('license', 'apache-2.0')}\n" - readme_text += "---\n" - readme_text += f"# Model card for {model_name}\n" - if 'description' in model_card: - readme_text += f"\n{model_card['description']}\n" - if 'details' in model_card: - readme_text += f"\n## Model Details\n" - for k, v in model_card['details'].items(): - readme_text += f"- **{k}:** {v}\n" - if 'citation' in model_card: - readme_text += f"\n## Citation\n```\n{model_card['citation']}```\n" + readme_text = generate_readme(model_card, model_name) readme_path.write_text(readme_text) # Upload model and return @@ -260,3 +247,51 @@ def push_to_hf_hub( create_pr=create_pr, commit_message=commit_message, ) + + +def generate_readme(model_card, model_name): + readme_text = "---\n" + readme_text += "tags:\n- image-classification\n- timm\n" + readme_text += "library_tag: timm\n" + readme_text += f"license: {model_card.get('license', 'apache-2.0')}\n" + if 'details' in model_card and 'Dataset' in model_card['details']: + readme_text += 'datasets:\n' + readme_text += f"- {model_card['details']['Dataset'].lower()}\n" + if 'Pretrain Dataset' in model_card['details']: + readme_text += f"- {model_card['details']['Pretrain Dataset'].lower()}\n" + readme_text += "---\n" + readme_text += f"# Model card for {model_name}\n" + if 'description' in model_card: + readme_text += f"\n{model_card['description']}\n" + if 'details' in model_card: + readme_text += f"\n## Model Details\n" + for k, v in model_card['details'].items(): + if isinstance(v, (list, tuple)): + readme_text += f"- **{k}:**\n" + for vi in v: + readme_text += f" - {vi}\n" + elif isinstance(v, dict): + readme_text += f"- **{k}:**\n" + for ki, vi in v.items(): + readme_text += f" - {ki}: {vi}\n" + else: + readme_text += f"- **{k}:** {v}\n" + if 'usage' in model_card: + readme_text += f"\n## Model Usage\n" + readme_text += model_card['usage'] + readme_text += '\n' + + if 'comparison' in model_card: + readme_text += f"\n## Model Comparison\n" + readme_text += model_card['comparison'] + readme_text += '\n' + + if 'citation' in model_card: + readme_text += f"\n## Citation\n" + if not isinstance(model_card['citation'], (list, tuple)): + citations = [model_card['citation']] + else: + citations = model_card['citation'] + for c in citations: + readme_text += f"```bibtex\n{c}\n```\n" + return readme_text From 0417a9dd81ffbd92841cba044ad5e20ca8324310 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 15:00:49 -0800 Subject: [PATCH 29/34] Update README --- README.md | 53 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/README.md b/README.md index ee07c368..287b6f66 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,59 @@ And a big thanks to all GitHub sponsors who helped with some of my costs before * ❗Updates after Oct 10, 2022 are available in 0.8.x pre-releases (`pip install --pre timm`) or cloning main❗ * Stable releases are 0.6.x and available by normal pip install or clone from [0.6.x](https://github.com/rwightman/pytorch-image-models/tree/0.6.x) branch. +### Jan 20, 2023 +* Add two convnext 12k -> 1k fine-tunes at 384x384 + * `convnext_tiny.in12k_ft_in1k_384` - 85.1 @ 384 + * `convnext_small.in12k_ft_in1k_384` - 86.2 @ 384 + +* Push all MaxxViT weights to HF hub, and add new ImageNet-12k -> 1k fine-tunes for `rw` base MaxViT and CoAtNet 1/2 models + +|model |top1 |top5 |samples / sec |Params (M) |GMAC |Act (M)| +|------------------------------------------------------------------------------------------------------------------------|----:|----:|--------------:|--------------:|-----:|------:| +|[maxvit_xlarge_tf_512.in21k_ft_in1k](https://huggingface.co/timm/maxvit_xlarge_tf_512.in21k_ft_in1k) |88.53|98.64| 21.76| 475.77|534.14|1413.22| +|[maxvit_xlarge_tf_384.in21k_ft_in1k](https://huggingface.co/timm/maxvit_xlarge_tf_384.in21k_ft_in1k) |88.32|98.54| 42.53| 475.32|292.78| 668.76| +|[maxvit_base_tf_512.in21k_ft_in1k](https://huggingface.co/timm/maxvit_base_tf_512.in21k_ft_in1k) |88.20|98.53| 50.87| 119.88|138.02| 703.99| +|[maxvit_large_tf_512.in21k_ft_in1k](https://huggingface.co/timm/maxvit_large_tf_512.in21k_ft_in1k) |88.04|98.40| 36.42| 212.33|244.75| 942.15| +|[maxvit_large_tf_384.in21k_ft_in1k](https://huggingface.co/timm/maxvit_large_tf_384.in21k_ft_in1k) |87.98|98.56| 71.75| 212.03|132.55| 445.84| +|[maxvit_base_tf_384.in21k_ft_in1k](https://huggingface.co/timm/maxvit_base_tf_384.in21k_ft_in1k) |87.92|98.54| 104.71| 119.65| 73.80| 332.90| +|[maxvit_rmlp_base_rw_384.sw_in12k_ft_in1k](https://huggingface.co/timm/maxvit_rmlp_base_rw_384.sw_in12k_ft_in1k) |87.81|98.37| 106.55| 116.14| 70.97| 318.95| +|[maxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k](https://huggingface.co/timm/maxxvitv2_rmlp_base_rw_384.sw_in12k_ft_in1k) |87.47|98.37| 149.49| 116.09| 72.98| 213.74| +|[coatnet_rmlp_2_rw_384.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_rmlp_2_rw_384.sw_in12k_ft_in1k) |87.39|98.31| 160.80| 73.88| 47.69| 209.43| +|[maxvit_rmlp_base_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/maxvit_rmlp_base_rw_224.sw_in12k_ft_in1k) |86.89|98.02| 375.86| 116.14| 23.15| 92.64| +|[maxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/maxxvitv2_rmlp_base_rw_224.sw_in12k_ft_in1k) |86.64|98.02| 501.03| 116.09| 24.20| 62.77| +|[maxvit_base_tf_512.in1k](https://huggingface.co/timm/maxvit_base_tf_512.in1k) |86.60|97.92| 50.75| 119.88|138.02| 703.99| +|[coatnet_2_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_2_rw_224.sw_in12k_ft_in1k) |86.57|97.89| 631.88| 73.87| 15.09| 49.22| +|[maxvit_large_tf_512.in1k](https://huggingface.co/timm/maxvit_large_tf_512.in1k) |86.52|97.88| 36.04| 212.33|244.75| 942.15| +|[coatnet_rmlp_2_rw_224.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_rmlp_2_rw_224.sw_in12k_ft_in1k) |86.49|97.90| 620.58| 73.88| 15.18| 54.78| +|[maxvit_base_tf_384.in1k](https://huggingface.co/timm/maxvit_base_tf_384.in1k) |86.29|97.80| 101.09| 119.65| 73.80| 332.90| +|[maxvit_large_tf_384.in1k](https://huggingface.co/timm/maxvit_large_tf_384.in1k) |86.23|97.69| 70.56| 212.03|132.55| 445.84| +|[maxvit_small_tf_512.in1k](https://huggingface.co/timm/maxvit_small_tf_512.in1k) |86.10|97.76| 88.63| 69.13| 67.26| 383.77| +|[maxvit_tiny_tf_512.in1k](https://huggingface.co/timm/maxvit_tiny_tf_512.in1k) |85.67|97.58| 144.25| 31.05| 33.49| 257.59| +|[maxvit_small_tf_384.in1k](https://huggingface.co/timm/maxvit_small_tf_384.in1k) |85.54|97.46| 188.35| 69.02| 35.87| 183.65| +|[maxvit_tiny_tf_384.in1k](https://huggingface.co/timm/maxvit_tiny_tf_384.in1k) |85.11|97.38| 293.46| 30.98| 17.53| 123.42| +|[maxvit_large_tf_224.in1k](https://huggingface.co/timm/maxvit_large_tf_224.in1k) |84.93|96.97| 247.71| 211.79| 43.68| 127.35| +|[coatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k](https://huggingface.co/timm/coatnet_rmlp_1_rw2_224.sw_in12k_ft_in1k) |84.90|96.96| 1025.45| 41.72| 8.11| 40.13| +|[maxvit_base_tf_224.in1k](https://huggingface.co/timm/maxvit_base_tf_224.in1k) |84.85|96.99| 358.25| 119.47| 24.04| 95.01| +|[maxxvit_rmlp_small_rw_256.sw_in1k](https://huggingface.co/timm/maxxvit_rmlp_small_rw_256.sw_in1k) |84.63|97.06| 575.53| 66.01| 14.67| 58.38| +|[coatnet_rmlp_2_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_rmlp_2_rw_224.sw_in1k) |84.61|96.74| 625.81| 73.88| 15.18| 54.78| +|[maxvit_rmlp_small_rw_224.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_small_rw_224.sw_in1k) |84.49|96.76| 693.82| 64.90| 10.75| 49.30| +|[maxvit_small_tf_224.in1k](https://huggingface.co/timm/maxvit_small_tf_224.in1k) |84.43|96.83| 647.96| 68.93| 11.66| 53.17| +|[maxvit_rmlp_tiny_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_tiny_rw_256.sw_in1k) |84.23|96.78| 807.21| 29.15| 6.77| 46.92| +|[coatnet_1_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_1_rw_224.sw_in1k) |83.62|96.38| 989.59| 41.72| 8.04| 34.60| +|[maxvit_tiny_rw_224.sw_in1k](https://huggingface.co/timm/maxvit_tiny_rw_224.sw_in1k) |83.50|96.50| 1100.53| 29.06| 5.11| 33.11| +|[maxvit_tiny_tf_224.in1k](https://huggingface.co/timm/maxvit_tiny_tf_224.in1k) |83.41|96.59| 1004.94| 30.92| 5.60| 35.78| +|[coatnet_rmlp_1_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_rmlp_1_rw_224.sw_in1k) |83.36|96.45| 1093.03| 41.69| 7.85| 35.47| +|[maxxvitv2_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxxvitv2_nano_rw_256.sw_in1k) |83.11|96.33| 1276.88| 23.70| 6.26| 23.05| +|[maxxvit_rmlp_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxxvit_rmlp_nano_rw_256.sw_in1k) |83.03|96.34| 1341.24| 16.78| 4.37| 26.05| +|[maxvit_rmlp_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_nano_rw_256.sw_in1k) |82.96|96.26| 1283.24| 15.50| 4.47| 31.92| +|[maxvit_nano_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_nano_rw_256.sw_in1k) |82.93|96.23| 1218.17| 15.45| 4.46| 30.28| +|[coatnet_bn_0_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_bn_0_rw_224.sw_in1k) |82.39|96.19| 1600.14| 27.44| 4.67| 22.04| +|[coatnet_0_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_0_rw_224.sw_in1k) |82.39|95.84| 1831.21| 27.44| 4.43| 18.73| +|[coatnet_rmlp_nano_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_rmlp_nano_rw_224.sw_in1k) |82.05|95.87| 2109.09| 15.15| 2.62| 20.34| +|[coatnext_nano_rw_224.sw_in1k](https://huggingface.co/timm/coatnext_nano_rw_224.sw_in1k) |81.95|95.92| 2525.52| 14.70| 2.47| 12.80| +|[coatnet_nano_rw_224.sw_in1k](https://huggingface.co/timm/coatnet_nano_rw_224.sw_in1k) |81.70|95.64| 2344.52| 15.14| 2.41| 15.41| +|[maxvit_rmlp_pico_rw_256.sw_in1k](https://huggingface.co/timm/maxvit_rmlp_pico_rw_256.sw_in1k) |80.53|95.21| 1594.71| 7.52| 1.85| 24.86| + ### Jan 11, 2023 * Update ConvNeXt ImageNet-12k pretrain series w/ two new fine-tuned weights (and pre FT `.in12k` tags) * `convnext_nano.in12k_ft_in1k` - 82.3 @ 224, 82.9 @ 288 (previously released) From c2822568ecbc18031daccd20b94ef5fb0a5fd657 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 15:01:10 -0800 Subject: [PATCH 30/34] Update version to 0.8.7dev0 --- timm/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/version.py b/timm/version.py index b285df69..7a485ace 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.8.6dev0' +__version__ = '0.8.7dev0' From 9983ed772198f00818d61841d74d30f4c8b91445 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 20 Jan 2023 16:16:20 -0800 Subject: [PATCH 31/34] xlarge maxvit killing the tests --- tests/test_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_models.py b/tests/test_models.py index 4ad18477..d30be744 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -39,7 +39,7 @@ if 'GITHUB_ACTIONS' in os.environ: '*efficientnet_l2*', '*resnext101_32x48d', '*in21k', '*152x4_bitm', '*101x3_bitm', '*50x3_bitm', '*nfnet_f3*', '*nfnet_f4*', '*nfnet_f5*', '*nfnet_f6*', '*nfnet_f7*', '*efficientnetv2_xl*', '*resnetrs350*', '*resnetrs420*', 'xcit_large_24_p8*', 'vit_huge*', 'vit_gi*', 'swin*huge*', - 'swin*giant*', 'convnextv2_huge*'] + 'swin*giant*', 'convnextv2_huge*', 'maxvit_xlarge*'] NON_STD_EXCLUDE_FILTERS = ['vit_huge*', 'vit_gi*', 'swin*giant*', 'eva_giant*'] else: EXCLUDE_FILTERS = [] From 64667bfa0ee1470f6389141321f5dfb51c945e3b Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 25 Jan 2023 18:02:10 -0800 Subject: [PATCH 32/34] Add 'gigantic' vit clip variant for feature extraction and future fine-tuning --- timm/models/vision_transformer.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 8ffb1200..d32f9dea 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -1029,6 +1029,10 @@ default_cfgs = generate_default_cfgs({ hf_hub_id='laion/CLIP-ViT-g-14-laion2B-s12B-b42K', hf_hub_filename='open_clip_pytorch_model.bin', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1024), + 'vit_gigantic_patch14_clip_224.laion2b': _cfg( + hf_hub_id='laion/CLIP-ViT-bigG-14-laion2B-39B-b160k', + hf_hub_filename='open_clip_pytorch_model.bin', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=1280), 'vit_base_patch32_clip_224.openai': _cfg( hf_hub_id='timm/', @@ -1498,6 +1502,17 @@ def vit_giant_patch14_clip_224(pretrained=False, **kwargs): return model +@register_model +def vit_gigantic_patch14_clip_224(pretrained=False, **kwargs): + """ ViT-bigG model (ViT-G/14) from `Scaling Vision Transformers` - https://arxiv.org/abs/2106.04560 + Pretrained weights from CLIP image tower. + """ + model_kwargs = dict( + patch_size=14, embed_dim=1664, mlp_ratio=64/13, depth=48, num_heads=16, pre_norm=True, norm_layer=nn.LayerNorm) + model = _create_vision_transformer( + 'vit_gigantic_patch14_clip_224', pretrained=pretrained, **dict(model_kwargs, **kwargs)) + return model + # Experimental models below @register_model From 2bbc26dd82d1090b26e74d69c97d6a1aaa3757eb Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 25 Jan 2023 18:02:48 -0800 Subject: [PATCH 33/34] version 0.8.8dev0 --- timm/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/version.py b/timm/version.py index 7a485ace..e2ac9a76 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.8.7dev0' +__version__ = '0.8.8dev0' From 9a53c3f727c37a7e45a5394fc2db5ea851c2cdf8 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 27 Jan 2023 13:54:04 -0800 Subject: [PATCH 34/34] Finalize DaViT, some formatting and modelling simplifications (separate PatchEmbed to Stem + Downsample, weights on HF hub. --- tests/test_models.py | 1 - timm/models/davit.py | 555 ++++++++++++++++++++++--------------------- 2 files changed, 279 insertions(+), 277 deletions(-) diff --git a/tests/test_models.py b/tests/test_models.py index eb470d5f..fdededc7 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -29,7 +29,6 @@ NON_STD_FILTERS = [ 'poolformer_*', 'volo_*', 'sequencer2d_*', 'swinv2_*', 'pvt_v2*', 'mvitv2*', 'gcvit*', 'efficientformer*', 'eva_*', 'flexivit*' ] -#'coatnet*', 'coatnext*', 'maxvit*', 'maxxvit*', ' NUM_NON_STD = len(NON_STD_FILTERS) # exclude models that cause specific test failures diff --git a/timm/models/davit.py b/timm/models/davit.py index f57cc5ae..8b9e67b4 100644 --- a/timm/models/davit.py +++ b/timm/models/davit.py @@ -11,9 +11,10 @@ DaViT model defs and weights adapted from https://github.com/dingmyu/davit, orig # Copyright (c) 2022 Mingyu Ding # All rights reserved. # This source code is licensed under the MIT license - -from collections import OrderedDict import itertools +from collections import OrderedDict +from functools import partial +from typing import Tuple import torch import torch.nn as nn @@ -21,9 +22,8 @@ import torch.nn.functional as F from torch import Tensor from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD -from timm.layers import DropPath, to_2tuple, trunc_normal_, SelectAdaptivePool2d, Mlp # ClassifierHead +from timm.layers import DropPath, to_2tuple, trunc_normal_, SelectAdaptivePool2d, Mlp, LayerNorm2d, get_norm_layer from ._builder import build_model_with_cfg -from ._features import FeatureInfo from ._features_fx import register_notrace_function from ._manipulate import checkpoint_seq from ._pretrained import generate_default_cfgs @@ -33,89 +33,83 @@ __all__ = ['DaViT'] class ConvPosEnc(nn.Module): - def __init__(self, dim : int, k : int=3, act : bool=False, normtype : str='none'): - + def __init__(self, dim: int, k: int = 3, act: bool = False): super(ConvPosEnc, self).__init__() - self.proj = nn.Conv2d(dim, - dim, - to_2tuple(k), - to_2tuple(1), - to_2tuple(k // 2), - groups=dim) - self.normtype = normtype - self.norm = nn.Identity() - if self.normtype == 'batch': - self.norm = nn.BatchNorm2d(dim) - elif self.normtype == 'layer': - self.norm = nn.LayerNorm(dim) - self.activation = nn.GELU() if act else nn.Identity() - - def forward(self, x : Tensor): - B, C, H, W = x.shape - #feat = x.transpose(1, 2).view(B, C, H, W) + self.proj = nn.Conv2d(dim, dim, k, 1, k // 2, groups=dim) + self.act = nn.GELU() if act else nn.Identity() + + def forward(self, x: Tensor): feat = self.proj(x) - if self.normtype == 'batch': - feat = self.norm(feat).flatten(2).transpose(1, 2) - elif self.normtype == 'layer': - feat = self.norm(feat.flatten(2).transpose(1, 2)) - else: - feat = feat.flatten(2).transpose(1, 2) - x = x + self.activation(feat).transpose(1, 2).view(B, C, H, W) + x = x + self.act(feat) return x -class PatchEmbed(nn.Module): +class Stem(nn.Module): """ Size-agnostic implementation of 2D image to patch embedding, allowing input size to be adjusted during model forward operation """ def __init__( self, - patch_size=4, - in_chans=3, - embed_dim=96, - overlapped=False): + in_chs=3, + out_chs=96, + stride=4, + norm_layer=LayerNorm2d, + ): super().__init__() - patch_size = to_2tuple(patch_size) - self.patch_size = patch_size - self.in_chans = in_chans - self.embed_dim = embed_dim - - if patch_size[0] == 4: - self.proj = nn.Conv2d( - in_chans, - embed_dim, - kernel_size=(7, 7), - stride=patch_size, - padding=(3, 3)) - self.norm = nn.LayerNorm(embed_dim) - if patch_size[0] == 2: - kernel = 3 if overlapped else 2 - pad = 1 if overlapped else 0 - self.proj = nn.Conv2d( - in_chans, - embed_dim, - kernel_size=to_2tuple(kernel), - stride=patch_size, - padding=to_2tuple(pad)) - self.norm = nn.LayerNorm(in_chans) - - - def forward(self, x : Tensor): + stride = to_2tuple(stride) + self.stride = stride + self.in_chs = in_chs + self.out_chs = out_chs + assert stride[0] == 4 # only setup for stride==4 + self.conv = nn.Conv2d( + in_chs, + out_chs, + kernel_size=7, + stride=stride, + padding=3, + ) + self.norm = norm_layer(out_chs) + + def forward(self, x: Tensor): B, C, H, W = x.shape - if self.norm.normalized_shape[0] == self.in_chans: - x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) - - x = F.pad(x, (0, (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1])) - x = F.pad(x, (0, 0, 0, (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0])) + x = F.pad(x, (0, (self.stride[1] - W % self.stride[1]) % self.stride[1])) + x = F.pad(x, (0, 0, 0, (self.stride[0] - H % self.stride[0]) % self.stride[0])) + x = self.conv(x) + x = self.norm(x) + return x - x = self.proj(x) - if self.norm.normalized_shape[0] == self.embed_dim: - x = self.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) +class Downsample(nn.Module): + def __init__( + self, + in_chs, + out_chs, + norm_layer=LayerNorm2d, + ): + super().__init__() + self.in_chs = in_chs + self.out_chs = out_chs + + self.norm = norm_layer(in_chs) + self.conv = nn.Conv2d( + in_chs, + out_chs, + kernel_size=2, + stride=2, + padding=0, + ) + + def forward(self, x: Tensor): + B, C, H, W = x.shape + x = self.norm(x) + x = F.pad(x, (0, (2 - W % 2) % 2)) + x = F.pad(x, (0, 0, 0, (2 - H % 2) % 2)) + x = self.conv(x) return x - + + class ChannelAttention(nn.Module): def __init__(self, dim, num_heads=8, qkv_bias=False): @@ -127,11 +121,11 @@ class ChannelAttention(nn.Module): self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) self.proj = nn.Linear(dim, dim) - def forward(self, x : Tensor): + def forward(self, x: Tensor): B, N, C = x.shape qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) - q, k, v = qkv[0], qkv[1], qkv[2] + q, k, v = qkv.unbind(0) k = k * self.scale attention = k.transpose(-1, -2) @ v @@ -140,50 +134,64 @@ class ChannelAttention(nn.Module): x = x.transpose(1, 2).reshape(B, N, C) x = self.proj(x) return x - + class ChannelBlock(nn.Module): - def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, - drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, - ffn=True, cpe_act=False): + def __init__( + self, + dim, + num_heads, + mlp_ratio=4., + qkv_bias=False, + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + ffn=True, + cpe_act=False, + ): super().__init__() self.cpe1 = ConvPosEnc(dim=dim, k=3, act=cpe_act) self.ffn = ffn self.norm1 = norm_layer(dim) self.attn = ChannelAttention(dim, num_heads=num_heads, qkv_bias=qkv_bias) - self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity() self.cpe2 = ConvPosEnc(dim=dim, k=3, act=cpe_act) - + if self.ffn: self.norm2 = norm_layer(dim) - mlp_hidden_dim = int(dim * mlp_ratio) self.mlp = Mlp( in_features=dim, - hidden_features=mlp_hidden_dim, - act_layer=act_layer) - - - def forward(self, x : Tensor): + hidden_features=int(dim * mlp_ratio), + act_layer=act_layer, + ) + self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + else: + self.norm2 = None + self.mlp = None + self.drop_path2 = None + def forward(self, x: Tensor): B, C, H, W = x.shape x = self.cpe1(x).flatten(2).transpose(1, 2) - + cur = self.norm1(x) cur = self.attn(cur) - x = x + self.drop_path(cur) + x = x + self.drop_path1(cur) + + x = self.cpe2(x.transpose(1, 2).view(B, C, H, W)) + + if self.mlp is not None: + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path2(self.mlp(self.norm2(x))) + x = x.transpose(1, 2).view(B, C, H, W) - x = self.cpe2(x.transpose(1, 2).view(B, C, H, W)).flatten(2).transpose(1, 2) - if self.ffn: - x = x + self.drop_path(self.mlp(self.norm2(x))) - - x = x.transpose(1, 2).view(B, C, H, W) - return x -def window_partition(x : Tensor, window_size: int): + +def window_partition(x: Tensor, window_size: Tuple[int, int]): """ Args: x: (B, H, W, C) @@ -192,12 +200,13 @@ def window_partition(x : Tensor, window_size: int): windows: (num_windows*B, window_size, window_size, C) """ B, H, W, C = x.shape - x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) - windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + x = x.view(B, H // window_size[0], window_size[0], W // window_size[1], window_size[1], C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size[0], window_size[1], C) return windows -@register_notrace_function # reason: int argument is a Proxy -def window_reverse(windows : Tensor, window_size: int, H: int, W: int): + +@register_notrace_function # reason: int argument is a Proxy +def window_reverse(windows: Tensor, window_size: Tuple[int, int], H: int, W: int): """ Args: windows: (num_windows*B, window_size, window_size, C) @@ -207,9 +216,8 @@ def window_reverse(windows : Tensor, window_size: int, H: int, W: int): Returns: x: (B, H, W, C) """ - - B = int(windows.shape[0] / (H * W / window_size / window_size)) - x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1) + B = int(windows.shape[0] / (H * W / window_size[0] / window_size[1])) + x = windows.view(B, H // window_size[0], W // window_size[1], window_size[0], window_size[1], -1) x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) return x @@ -225,7 +233,6 @@ class WindowAttention(nn.Module): """ def __init__(self, dim, window_size, num_heads, qkv_bias=True): - super().__init__() self.dim = dim self.window_size = window_size @@ -238,11 +245,11 @@ class WindowAttention(nn.Module): self.softmax = nn.Softmax(dim=-1) - def forward(self, x : Tensor): + def forward(self, x: Tensor): B_, N, C = x.shape - + qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) - q, k, v = qkv[0], qkv[1], qkv[2] + q, k, v = qkv.unbind(0) q = q * self.scale attn = (q @ k.transpose(-2, -1)) @@ -266,108 +273,119 @@ class SpatialBlock(nn.Module): norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm """ - def __init__(self, dim, num_heads, window_size=7, - mlp_ratio=4., qkv_bias=True, drop_path=0., - act_layer=nn.GELU, norm_layer=nn.LayerNorm, - ffn=True, cpe_act=False): + def __init__( + self, + dim, + num_heads, + window_size=7, + mlp_ratio=4., + qkv_bias=True, + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + ffn=True, + cpe_act=False, + ): super().__init__() self.dim = dim self.ffn = ffn self.num_heads = num_heads - self.window_size = window_size + self.window_size = to_2tuple(window_size) self.mlp_ratio = mlp_ratio - + self.cpe1 = ConvPosEnc(dim=dim, k=3, act=cpe_act) self.norm1 = norm_layer(dim) self.attn = WindowAttention( dim, - window_size=to_2tuple(self.window_size), + self.window_size, num_heads=num_heads, - qkv_bias=qkv_bias) + qkv_bias=qkv_bias, + ) + self.drop_path1 = DropPath(drop_path) if drop_path > 0. else nn.Identity() - self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() self.cpe2 = ConvPosEnc(dim=dim, k=3, act=cpe_act) - if self.ffn: self.norm2 = norm_layer(dim) mlp_hidden_dim = int(dim * mlp_ratio) self.mlp = Mlp( in_features=dim, hidden_features=mlp_hidden_dim, - act_layer=act_layer) - + act_layer=act_layer, + ) + self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity() + else: + self.norm2 = None + self.mlp = None + self.drop_path1 = None - def forward(self, x : Tensor): + def forward(self, x: Tensor): B, C, H, W = x.shape - shortcut = self.cpe1(x).flatten(2).transpose(1, 2) + x = self.norm1(shortcut) x = x.view(B, H, W, C) pad_l = pad_t = 0 - pad_r = (self.window_size - W % self.window_size) % self.window_size - pad_b = (self.window_size - H % self.window_size) % self.window_size + pad_r = (self.window_size[1] - W % self.window_size[1]) % self.window_size[1] + pad_b = (self.window_size[0] - H % self.window_size[0]) % self.window_size[0] x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b)) _, Hp, Wp, _ = x.shape x_windows = window_partition(x, self.window_size) - x_windows = x_windows.view(-1, self.window_size * self.window_size, C) + x_windows = x_windows.view(-1, self.window_size[0] * self.window_size[1], C) # W-MSA/SW-MSA attn_windows = self.attn(x_windows) # merge windows - attn_windows = attn_windows.view(-1, - self.window_size, - self.window_size, - C) + attn_windows = attn_windows.view(-1, self.window_size[0], self.window_size[1], C) x = window_reverse(attn_windows, self.window_size, Hp, Wp) - #if pad_r > 0 or pad_b > 0: + # if pad_r > 0 or pad_b > 0: x = x[:, :H, :W, :].contiguous() x = x.view(B, H * W, C) - x = shortcut + self.drop_path(x) + x = shortcut + self.drop_path1(x) + + x = self.cpe2(x.transpose(1, 2).view(B, C, H, W)) + + if self.mlp is not None: + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path2(self.mlp(self.norm2(x))) + x = x.transpose(1, 2).view(B, C, H, W) - x = self.cpe2(x.transpose(1, 2).view(B, C, H, W)).flatten(2).transpose(1, 2) - if self.ffn: - x = x + self.drop_path(self.mlp(self.norm2(x))) - - x = x.transpose(1, 2).view(B, C, H, W) - return x - + class DaViTStage(nn.Module): def __init__( - self, - in_chs, - out_chs, - depth = 1, - patch_size = 4, - overlapped_patch = False, - attention_types = ('spatial', 'channel'), - num_heads = 3, - window_size = 7, - mlp_ratio = 4, - qkv_bias = True, - drop_path_rates = (0, 0), - norm_layer = nn.LayerNorm, - ffn = True, - cpe_act = False + self, + in_chs, + out_chs, + depth=1, + downsample=True, + attn_types=('spatial', 'channel'), + num_heads=3, + window_size=7, + mlp_ratio=4, + qkv_bias=True, + drop_path_rates=(0, 0), + norm_layer=LayerNorm2d, + norm_layer_cl=nn.LayerNorm, + ffn=True, + cpe_act=False ): super().__init__() self.grad_checkpointing = False - - # patch embedding layer at the beginning of each stage - self.patch_embed = PatchEmbed( - patch_size=patch_size, - in_chans=in_chs, - embed_dim=out_chs, - overlapped=overlapped_patch - ) + + # downsample embedding layer at the beginning of each stage + if downsample: + self.downsample = Downsample(in_chs, out_chs, norm_layer=norm_layer) + else: + self.downsample = nn.Identity() + ''' repeating alternating attention blocks in each stage default: (spatial -> channel) x depth @@ -377,44 +395,40 @@ class DaViTStage(nn.Module): ''' stage_blocks = [] for block_idx in range(depth): - dual_attention_block = [] - - for attention_id, attention_type in enumerate(attention_types): - if attention_type == 'spatial': + for attn_idx, attn_type in enumerate(attn_types): + if attn_type == 'spatial': dual_attention_block.append(SpatialBlock( dim=out_chs, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, - drop_path=drop_path_rates[len(attention_types) * block_idx + attention_id], - norm_layer=norm_layer, + drop_path=drop_path_rates[block_idx], + norm_layer=norm_layer_cl, ffn=ffn, cpe_act=cpe_act, window_size=window_size, )) - elif attention_type == 'channel': + elif attn_type == 'channel': dual_attention_block.append(ChannelBlock( dim=out_chs, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, - drop_path=drop_path_rates[len(attention_types) * block_idx + attention_id], - norm_layer=norm_layer, + drop_path=drop_path_rates[block_idx], + norm_layer=norm_layer_cl, ffn=ffn, cpe_act=cpe_act )) - stage_blocks.append(nn.Sequential(*dual_attention_block)) - self.blocks = nn.Sequential(*stage_blocks) - + @torch.jit.ignore def set_grad_checkpointing(self, enable=True): self.grad_checkpointing = enable - - def forward(self, x : Tensor): - x = self.patch_embed(x) + + def forward(self, x: Tensor): + x = self.downsample(x) if self.grad_checkpointing and not torch.jit.is_scripting(): x = checkpoint_seq(self.blocks, x) else: @@ -431,7 +445,6 @@ class DaViT(nn.Module): in_chans (int): Number of input image channels. Default: 3 num_classes (int): Number of classes for classification head. Default: 1000 depths (tuple(int)): Number of blocks in each stage. Default: (1, 1, 3, 1) - patch_size (int | tuple(int)): Patch size. Default: 4 embed_dims (tuple(int)): Patch embedding dimension. Default: (96, 192, 384, 768) num_heads (tuple(int)): Number of attention heads in different layers. Default: (3, 6, 12, 24) window_size (int): Window size. Default: 7 @@ -442,75 +455,67 @@ class DaViT(nn.Module): """ def __init__( - self, - in_chans=3, - depths=(1, 1, 3, 1), - patch_size=4, - embed_dims=(96, 192, 384, 768), - num_heads=(3, 6, 12, 24), - window_size=7, - mlp_ratio=4., - qkv_bias=True, - drop_path_rate=0.1, - norm_layer=nn.LayerNorm, - attention_types=('spatial', 'channel'), - ffn=True, - overlapped_patch=False, - cpe_act=False, - drop_rate=0., - attn_drop_rate=0., - num_classes=1000, - global_pool='avg', - head_norm_first=False, + self, + in_chans=3, + depths=(1, 1, 3, 1), + embed_dims=(96, 192, 384, 768), + num_heads=(3, 6, 12, 24), + window_size=7, + mlp_ratio=4, + qkv_bias=True, + norm_layer='layernorm2d', + norm_layer_cl='layernorm', + norm_eps=1e-5, + attn_types=('spatial', 'channel'), + ffn=True, + cpe_act=False, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + num_classes=1000, + global_pool='avg', + head_norm_first=False, ): super().__init__() - - architecture = [[index] * item for index, item in enumerate(depths)] - self.architecture = architecture - self.embed_dims = embed_dims - self.num_heads = num_heads - self.num_stages = len(self.embed_dims) - dpr = [x.item() for x in torch.linspace(0, drop_path_rate, len(attention_types) * len(list(itertools.chain(*self.architecture))))] - assert self.num_stages == len(self.num_heads) == (sorted(list(itertools.chain(*self.architecture)))[-1] + 1) - + num_stages = len(embed_dims) + assert num_stages == len(num_heads) == len(depths) + norm_layer = partial(get_norm_layer(norm_layer), eps=norm_eps) + norm_layer_cl = partial(get_norm_layer(norm_layer_cl), eps=norm_eps) self.num_classes = num_classes self.num_features = embed_dims[-1] - self.drop_rate=drop_rate + self.drop_rate = drop_rate self.grad_checkpointing = False self.feature_info = [] - - self.patch_embed = None - stages = [] - - for stage_id in range(self.num_stages): - stage_drop_rates = dpr[len(attention_types) * sum(depths[:stage_id]):len(attention_types) * sum(depths[:stage_id + 1])] + self.stem = Stem(in_chans, embed_dims[0], norm_layer=norm_layer) + in_chs = embed_dims[0] + + dpr = [x.tolist() for x in torch.linspace(0, drop_path_rate, sum(depths)).split(depths)] + stages = [] + for stage_idx in range(num_stages): + out_chs = embed_dims[stage_idx] stage = DaViTStage( - in_chans if stage_id == 0 else embed_dims[stage_id - 1], - embed_dims[stage_id], - depth = depths[stage_id], - patch_size = patch_size if stage_id == 0 else 2, - overlapped_patch = overlapped_patch, - attention_types = attention_types, - num_heads = num_heads[stage_id], - window_size = window_size, - mlp_ratio = mlp_ratio, - qkv_bias = qkv_bias, - drop_path_rates = stage_drop_rates, - norm_layer = nn.LayerNorm, - ffn = ffn, - cpe_act = cpe_act + in_chs, + out_chs, + depth=depths[stage_idx], + downsample=stage_idx > 0, + attn_types=attn_types, + num_heads=num_heads[stage_idx], + window_size=window_size, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path_rates=dpr[stage_idx], + norm_layer=norm_layer, + norm_layer_cl=norm_layer_cl, + ffn=ffn, + cpe_act=cpe_act, ) - - if stage_id == 0: - self.patch_embed = stage.patch_embed - stage.patch_embed = nn.Identity() - + in_chs = out_chs stages.append(stage) - self.feature_info += [dict(num_chs=self.embed_dims[stage_id], reduction=2, module=f'stages.{stage_id}')] - + self.feature_info += [dict(num_chs=out_chs, reduction=2, module=f'stages.{stage_idx}')] + self.stages = nn.Sequential(*stages) - + # if head_norm_first == true, norm -> global pool -> fc ordering, like most other nets # otherwise pool -> norm -> fc, the default DaViT order, similar to ConvNeXt # FIXME generalize this structure to ClassifierHead @@ -521,28 +526,25 @@ class DaViT(nn.Module): ('flatten', nn.Flatten(1) if global_pool else nn.Identity()), ('drop', nn.Dropout(self.drop_rate)), ('fc', nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity())])) - + self.apply(self._init_weights) - + def _init_weights(self, m): if isinstance(m, nn.Linear): trunc_normal_(m.weight, std=.02) if isinstance(m, nn.Linear) and m.bias is not None: nn.init.constant_(m.bias, 0) - elif isinstance(m, nn.LayerNorm): - nn.init.constant_(m.bias, 0) - nn.init.constant_(m.weight, 1.0) - + @torch.jit.ignore def set_grad_checkpointing(self, enable=True): self.grad_checkpointing = enable for stage in self.stages: stage.set_grad_checkpointing(enable=enable) - + @torch.jit.ignore def get_classifier(self): return self.head.fc - + def reset_classifier(self, num_classes, global_pool=None): if global_pool is not None: self.head.global_pool = SelectAdaptivePool2d(pool_type=global_pool) @@ -550,21 +552,21 @@ class DaViT(nn.Module): self.head.fc = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity() def forward_features(self, x): - x = self.patch_embed(x) + x = self.stem(x) if self.grad_checkpointing and not torch.jit.is_scripting(): x = checkpoint_seq(self.stages, x) else: x = self.stages(x) - x = self.norm_pre(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + x = self.norm_pre(x) return x - + def forward_head(self, x, pre_logits: bool = False): x = self.head.global_pool(x) - x = self.head.norm(x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2) + x = self.head.norm(x) x = self.head.flatten(x) x = self.head.drop(x) return x if pre_logits else self.head.fc(x) - + def forward(self, x): x = self.forward_features(x) x = self.forward_head(x) @@ -573,29 +575,28 @@ class DaViT(nn.Module): def checkpoint_filter_fn(state_dict, model): """ Remap MSFT checkpoints -> timm """ - if 'head' in state_dict: + if 'head.fc.weight' in state_dict: return state_dict # non-MSFT checkpoint - + if 'state_dict' in state_dict: state_dict = state_dict['state_dict'] import re out_dict = {} for k, v in state_dict.items(): - - k = re.sub(r'patch_embeds.([0-9]+)', r'stages.\1.patch_embed', k) + k = re.sub(r'patch_embeds.([0-9]+)', r'stages.\1.downsample', k) k = re.sub(r'main_blocks.([0-9]+)', r'stages.\1.blocks', k) - k = k.replace('stages.0.patch_embed', 'patch_embed') + k = k.replace('downsample.proj', 'downsample.conv') + k = k.replace('stages.0.downsample', 'stem') k = k.replace('head.', 'head.fc.') k = k.replace('norms.', 'head.norm.') k = k.replace('cpe.0', 'cpe1') k = k.replace('cpe.1', 'cpe2') out_dict[k] = v return out_dict - -def _create_davit(variant, pretrained=False, **kwargs): +def _create_davit(variant, pretrained=False, **kwargs): default_out_indices = tuple(i for i, _ in enumerate(kwargs.get('depths', (1, 1, 3, 1)))) out_indices = kwargs.pop('out_indices', default_out_indices) @@ -608,69 +609,71 @@ def _create_davit(variant, pretrained=False, **kwargs): **kwargs) return model - - + def _cfg(url='', **kwargs): return { 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), - 'crop_pct': 0.850, 'interpolation': 'bicubic', + 'crop_pct': 0.95, 'interpolation': 'bicubic', 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, - 'first_conv': 'patch_embed.proj', 'classifier': 'head.fc', + 'first_conv': 'stem.conv', 'classifier': 'head.fc', **kwargs } - # TODO contact authors to get larger pretrained models default_cfgs = generate_default_cfgs({ # official microsoft weights from https://github.com/dingmyu/davit 'davit_tiny.msft_in1k': _cfg( - url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_tiny_ed28dd55.pth.tar"), + hf_hub_id='timm/'), 'davit_small.msft_in1k': _cfg( - url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_small_d1ecf281.pth.tar"), + hf_hub_id='timm/'), 'davit_base.msft_in1k': _cfg( - url="https://github.com/fffffgggg54/pytorch-image-models/releases/download/checkpoint/davit_base_67d9ac26.pth.tar"), + hf_hub_id='timm/'), 'davit_large': _cfg(), 'davit_huge': _cfg(), 'davit_giant': _cfg(), }) - @register_model def davit_tiny(pretrained=False, **kwargs): - model_kwargs = dict(depths=(1, 1, 3, 1), embed_dims=(96, 192, 384, 768), - num_heads=(3, 6, 12, 24), **kwargs) + model_kwargs = dict( + depths=(1, 1, 3, 1), embed_dims=(96, 192, 384, 768), num_heads=(3, 6, 12, 24), **kwargs) return _create_davit('davit_tiny', pretrained=pretrained, **model_kwargs) - + + @register_model def davit_small(pretrained=False, **kwargs): - model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(96, 192, 384, 768), - num_heads=(3, 6, 12, 24), **kwargs) + model_kwargs = dict( + depths=(1, 1, 9, 1), embed_dims=(96, 192, 384, 768), num_heads=(3, 6, 12, 24), **kwargs) return _create_davit('davit_small', pretrained=pretrained, **model_kwargs) - + + @register_model def davit_base(pretrained=False, **kwargs): - model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(128, 256, 512, 1024), - num_heads=(4, 8, 16, 32), **kwargs) + model_kwargs = dict( + depths=(1, 1, 9, 1), embed_dims=(128, 256, 512, 1024), num_heads=(4, 8, 16, 32), **kwargs) return _create_davit('davit_base', pretrained=pretrained, **model_kwargs) + @register_model def davit_large(pretrained=False, **kwargs): - model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(192, 384, 768, 1536), - num_heads=(6, 12, 24, 48), **kwargs) + model_kwargs = dict( + depths=(1, 1, 9, 1), embed_dims=(192, 384, 768, 1536), num_heads=(6, 12, 24, 48), **kwargs) return _create_davit('davit_large', pretrained=pretrained, **model_kwargs) - + + @register_model def davit_huge(pretrained=False, **kwargs): - model_kwargs = dict(depths=(1, 1, 9, 1), embed_dims=(256, 512, 1024, 2048), - num_heads=(8, 16, 32, 64), **kwargs) + model_kwargs = dict( + depths=(1, 1, 9, 1), embed_dims=(256, 512, 1024, 2048), num_heads=(8, 16, 32, 64), **kwargs) return _create_davit('davit_huge', pretrained=pretrained, **model_kwargs) - + + @register_model def davit_giant(pretrained=False, **kwargs): - model_kwargs = dict(depths=(1, 1, 12, 3), embed_dims=(384, 768, 1536, 3072), - num_heads=(12, 24, 48, 96), **kwargs) + model_kwargs = dict( + depths=(1, 1, 12, 3), embed_dims=(384, 768, 1536, 3072), num_heads=(12, 24, 48, 96), **kwargs) return _create_davit('davit_giant', pretrained=pretrained, **model_kwargs)