From 899cf84ccc7e371f83f6dc55076356d91ab6c0a2 Mon Sep 17 00:00:00 2001 From: Alexander Soare Date: Sun, 11 Jul 2021 12:41:51 +0100 Subject: [PATCH] bug fix - missing _dist postfix for many of the 224_dist models --- timm/models/xcit.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/timm/models/xcit.py b/timm/models/xcit.py index 6608d1a6..be357ddb 100644 --- a/timm/models/xcit.py +++ b/timm/models/xcit.py @@ -532,7 +532,7 @@ def xcit_small_12_p16_224(pretrained=False, **kwargs): def xcit_small_12_p16_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=16, embed_dim=384, depth=12, num_heads=8, eta=1.0, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_small_12_p16_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_small_12_p16_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -556,7 +556,7 @@ def xcit_tiny_24_p16_224(pretrained=False, **kwargs): def xcit_tiny_24_p16_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=16, embed_dim=192, depth=24, num_heads=4, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_tiny_24_p16_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_tiny_24_p16_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -580,7 +580,7 @@ def xcit_small_24_p16_224(pretrained=False, **kwargs): def xcit_small_24_p16_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=16, embed_dim=384, depth=24, num_heads=8, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_small_24_p16_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_small_24_p16_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -604,7 +604,7 @@ def xcit_medium_24_p16_224(pretrained=False, **kwargs): def xcit_medium_24_p16_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=16, embed_dim=512, depth=24, num_heads=8, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_medium_24_p16_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_medium_24_p16_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -628,7 +628,7 @@ def xcit_large_24_p16_224(pretrained=False, **kwargs): def xcit_large_24_p16_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=16, embed_dim=768, depth=24, num_heads=16, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_large_24_p16_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_large_24_p16_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -653,7 +653,7 @@ def xcit_nano_12_p8_224(pretrained=False, **kwargs): def xcit_nano_12_p8_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=8, embed_dim=128, depth=12, num_heads=4, eta=1.0, tokens_norm=False, **kwargs) - model = _create_xcit('xcit_nano_12_p8_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_nano_12_p8_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -677,7 +677,7 @@ def xcit_tiny_12_p8_224(pretrained=False, **kwargs): def xcit_tiny_12_p8_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=8, embed_dim=192, depth=12, num_heads=4, eta=1.0, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_tiny_12_p8_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_tiny_12_p8_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -701,7 +701,7 @@ def xcit_small_12_p8_224(pretrained=False, **kwargs): def xcit_small_12_p8_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=8, embed_dim=384, depth=12, num_heads=8, eta=1.0, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_small_12_p8_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_small_12_p8_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -725,7 +725,7 @@ def xcit_tiny_24_p8_224(pretrained=False, **kwargs): def xcit_tiny_24_p8_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=8, embed_dim=192, depth=24, num_heads=4, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_tiny_24_p8_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_tiny_24_p8_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -749,7 +749,7 @@ def xcit_small_24_p8_224(pretrained=False, **kwargs): def xcit_small_24_p8_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=8, embed_dim=384, depth=24, num_heads=8, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_small_24_p8_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_small_24_p8_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -773,7 +773,7 @@ def xcit_medium_24_p8_224(pretrained=False, **kwargs): def xcit_medium_24_p8_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=8, embed_dim=512, depth=24, num_heads=8, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_medium_24_p8_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_medium_24_p8_224_dist', pretrained=pretrained, **model_kwargs) return model @@ -797,7 +797,7 @@ def xcit_large_24_p8_224(pretrained=False, **kwargs): def xcit_large_24_p8_224_dist(pretrained=False, **kwargs): model_kwargs = dict( patch_size=8, embed_dim=768, depth=24, num_heads=16, eta=1e-5, tokens_norm=True, **kwargs) - model = _create_xcit('xcit_large_24_p8_224', pretrained=pretrained, **model_kwargs) + model = _create_xcit('xcit_large_24_p8_224_dist', pretrained=pretrained, **model_kwargs) return model