From 0b641175929d77634675b22e576ecd68d2ec1480 Mon Sep 17 00:00:00 2001 From: Ceshine Lee Date: Sun, 24 Jul 2022 19:11:45 +0800 Subject: [PATCH] Take `no_emb_class` into account when calling `resize_pos_embed` --- timm/models/vision_transformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 9066a9de..9602355b 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -644,7 +644,7 @@ def checkpoint_filter_fn(state_dict, model, adapt_layer_scale=False): v = resize_pos_embed( v, model.pos_embed, - getattr(model, 'num_prefix_tokens', 1), + 0 if getattr(model, 'no_embed_class') else getattr(model, 'num_prefix_tokens', 1), model.patch_embed.grid_size ) elif adapt_layer_scale and 'gamma_' in k: