From 5bcf686cb0aad39b7c9114931db2e7fc2bc4f24c Mon Sep 17 00:00:00 2001 From: talrid Date: Wed, 19 May 2021 20:51:10 +0300 Subject: [PATCH 1/3] mixer_b16_224_miil --- timm/models/mlp_mixer.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/timm/models/mlp_mixer.py b/timm/models/mlp_mixer.py index 248568fc..87edbfd6 100644 --- a/timm/models/mlp_mixer.py +++ b/timm/models/mlp_mixer.py @@ -60,6 +60,15 @@ default_cfgs = dict( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_l16_224_in21k-846aa33c.pth', num_classes=21843 ), + # Mixer ImageNet-21K-P pretraining + mixer_b16_224_miil_in21k=_cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil_in21k.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221, + ), + mixer_b16_224_miil=_cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', + ), ) @@ -255,3 +264,21 @@ def mixer_l16_224_in21k(pretrained=False, **kwargs): model_args = dict(patch_size=16, num_blocks=24, hidden_dim=1024, tokens_dim=512, channels_dim=4096, **kwargs) model = _create_mixer('mixer_l16_224_in21k', pretrained=pretrained, **model_args) return model + +@register_model +def mixer_b16_224_miil(pretrained=False, **kwargs): + """ Mixer-B/16 224x224. ImageNet-21k pretrained weights. + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, tokens_dim=384, channels_dim=3072, **kwargs) + model = _create_mixer('mixer_b16_224_miil', pretrained=pretrained, **model_args) + return model + +@register_model +def mixer_b16_224_miil_in21k(pretrained=False, **kwargs): + """ Mixer-B/16 224x224. ImageNet-1k pretrained weights. + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, tokens_dim=384, channels_dim=3072, **kwargs) + model = _create_mixer('mixer_b16_224_miil_in21k', pretrained=pretrained, **model_args) + return model \ No newline at end of file From 240e6677468392283835c372fe2addc72514cff9 Mon Sep 17 00:00:00 2001 From: talrid Date: Thu, 20 May 2021 10:23:07 +0300 Subject: [PATCH 2/3] Revert "mixer_b16_224_miil" --- timm/models/mlp_mixer.py | 27 --------------------------- 1 file changed, 27 deletions(-) diff --git a/timm/models/mlp_mixer.py b/timm/models/mlp_mixer.py index 87edbfd6..248568fc 100644 --- a/timm/models/mlp_mixer.py +++ b/timm/models/mlp_mixer.py @@ -60,15 +60,6 @@ default_cfgs = dict( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_l16_224_in21k-846aa33c.pth', num_classes=21843 ), - # Mixer ImageNet-21K-P pretraining - mixer_b16_224_miil_in21k=_cfg( - url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil_in21k.pth', - mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221, - ), - mixer_b16_224_miil=_cfg( - url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil.pth', - mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', - ), ) @@ -264,21 +255,3 @@ def mixer_l16_224_in21k(pretrained=False, **kwargs): model_args = dict(patch_size=16, num_blocks=24, hidden_dim=1024, tokens_dim=512, channels_dim=4096, **kwargs) model = _create_mixer('mixer_l16_224_in21k', pretrained=pretrained, **model_args) return model - -@register_model -def mixer_b16_224_miil(pretrained=False, **kwargs): - """ Mixer-B/16 224x224. ImageNet-21k pretrained weights. - Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K - """ - model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, tokens_dim=384, channels_dim=3072, **kwargs) - model = _create_mixer('mixer_b16_224_miil', pretrained=pretrained, **model_args) - return model - -@register_model -def mixer_b16_224_miil_in21k(pretrained=False, **kwargs): - """ Mixer-B/16 224x224. ImageNet-1k pretrained weights. - Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K - """ - model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, tokens_dim=384, channels_dim=3072, **kwargs) - model = _create_mixer('mixer_b16_224_miil_in21k', pretrained=pretrained, **model_args) - return model \ No newline at end of file From dc1a4efd28b335ebd85e13d64edd78404f75aeb7 Mon Sep 17 00:00:00 2001 From: talrid Date: Thu, 20 May 2021 10:35:50 +0300 Subject: [PATCH 3/3] mixer_b16_224_miil, mixer_b16_224_miil_in21k models --- timm/models/mlp_mixer.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/timm/models/mlp_mixer.py b/timm/models/mlp_mixer.py index 2241fe43..92ca115b 100644 --- a/timm/models/mlp_mixer.py +++ b/timm/models/mlp_mixer.py @@ -80,6 +80,15 @@ default_cfgs = dict( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_l16_224_in21k-846aa33c.pth', num_classes=21843 ), + # Mixer ImageNet-21K-P pretraining + mixer_b16_224_miil_in21k=_cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil_in21k.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221, + ), + mixer_b16_224_miil=_cfg( + url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil.pth', + mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', + ), gmixer_12_224=_cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), gmixer_24_224=_cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD), @@ -365,6 +374,23 @@ def mixer_l16_224_in21k(pretrained=False, **kwargs): model = _create_mixer('mixer_l16_224_in21k', pretrained=pretrained, **model_args) return model +@register_model +def mixer_b16_224_miil(pretrained=False, **kwargs): + """ Mixer-B/16 224x224. ImageNet-21k pretrained weights. + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, **kwargs) + model = _create_mixer('mixer_b16_224_miil', pretrained=pretrained, **model_args) + return model + +@register_model +def mixer_b16_224_miil_in21k(pretrained=False, **kwargs): + """ Mixer-B/16 224x224. ImageNet-1k pretrained weights. + Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K + """ + model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, **kwargs) + model = _create_mixer('mixer_b16_224_miil_in21k', pretrained=pretrained, **model_args) + return model @register_model def gmixer_12_224(pretrained=False, **kwargs):