From c1b3cea19df84beb9d5e141272b383e9ba9a0980 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Wed, 7 Sep 2022 10:27:11 -0700 Subject: [PATCH] Add maxvit_rmlp_tiny_rw_256 model def and weights w/ 84.2 top-1 @ 256, 84.8 @ 320 --- timm/models/maxxvit.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/timm/models/maxxvit.py b/timm/models/maxxvit.py index f1df148b..f10e9f59 100644 --- a/timm/models/maxxvit.py +++ b/timm/models/maxxvit.py @@ -121,6 +121,9 @@ default_cfgs = { 'maxvit_rmlp_nano_rw_256': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_nano_rw_256_sw-c17bb0d6.pth', input_size=(3, 256, 256), pool_size=(8, 8)), + 'maxvit_rmlp_tiny_rw_256': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_tiny_rw_256_sw-2da819a5.pth', + input_size=(3, 256, 256), pool_size=(8, 8)), 'maxvit_tiny_pm_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), 'maxxvit_nano_rw_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)), @@ -515,6 +518,13 @@ model_cfgs = dict( stem_width=(32, 64), **_rw_max_cfg(rel_pos_type='mlp'), ), + maxvit_rmlp_tiny_rw_256=MaxxVitCfg( + embed_dim=(64, 128, 256, 512), + depths=(2, 2, 5, 2), + block_type=('M',) * 4, + stem_width=(32, 64), + **_rw_max_cfg(rel_pos_type='mlp'), + ), maxvit_tiny_pm_256=MaxxVitCfg( embed_dim=(64, 128, 256, 512), depths=(2, 2, 5, 2), @@ -1721,6 +1731,11 @@ def maxvit_rmlp_nano_rw_256(pretrained=False, **kwargs): return _create_maxxvit('maxvit_rmlp_nano_rw_256', pretrained=pretrained, **kwargs) +@register_model +def maxvit_rmlp_tiny_rw_256(pretrained=False, **kwargs): + return _create_maxxvit('maxvit_rmlp_tiny_rw_256', pretrained=pretrained, **kwargs) + + @register_model def maxvit_tiny_pm_256(pretrained=False, **kwargs): return _create_maxxvit('maxvit_tiny_pm_256', pretrained=pretrained, **kwargs)