From 883fa2eeaa22c4b94e1937dab7f85b8b4b8bd8d0 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 8 Nov 2022 15:44:40 -0800 Subject: [PATCH] Add fine-tuned B/16 224x224 in1k clip models --- timm/models/vision_transformer.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 6b79cd28..dc2ec97a 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -761,7 +761,7 @@ default_cfgs = generate_defaults({ 'vit_base_patch16_clip_224.laion2b': _cfg( #hf_hub_id='laion/CLIP-ViT-B-16-laion2B-s34B-b88K', hf_hub_filename='open_clip_pytorch_model.bin', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=512), + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, num_classes=512), 'vit_large_patch14_clip_224.laion2b': _cfg( hf_hub_id='laion/CLIP-ViT-L-14-laion2B-s32B-b82K', hf_hub_filename='open_clip_pytorch_model.bin', @@ -782,8 +782,8 @@ default_cfgs = generate_defaults({ hf_hub_id='timm/vit_base_patch32_clip_384.laion2b_ft_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 384, 384)), 'vit_base_patch16_clip_224.laion2b_ft_in1k': _cfg( - #hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), + hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in1k', + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), 'vit_base_patch16_clip_384.laion2b_ft_in1k': _cfg( #hf_hub_id='timm/vit_base_patch16_clip_384.laion2b_ft_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 384, 384)), @@ -814,7 +814,7 @@ default_cfgs = generate_defaults({ mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 448, 448)), 'vit_base_patch16_clip_224.laion2b_ft_in12k_in1k': _cfg( #hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in12k_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), + mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), 'vit_base_patch16_clip_384.laion2b_ft_in12k_in1k': _cfg( #hf_hub_id='timm/vit_base_patch16_clip_384.laion2b_ft_in12k_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 384, 384)), @@ -858,7 +858,7 @@ default_cfgs = generate_defaults({ #hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), 'vit_base_patch16_clip_224.openai_ft_in1k': _cfg( - #hf_hub_id='timm/vit_base_patch16_clip_224.openai_ft_in1k', + hf_hub_id='timm/vit_base_patch16_clip_224.openai_ft_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), 'vit_large_patch14_clip_224.openai_ft_in1k': _cfg( hf_hub_id='timm/vit_large_patch14_clip_224.openai_ft_in1k',