From 3785c234d7e67b881cf68f1f8e1ceb0c53c4da53 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 2 Dec 2022 20:26:51 -0800 Subject: [PATCH] Remove clip vit models that won't be ft and comment two that aren't uploaded yet --- timm/models/vision_transformer.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index eea7ebb8..4effbed6 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -805,12 +805,6 @@ default_cfgs = generate_default_cfgs({ 'vit_base_patch32_clip_224.laion2b_ft_in1k': _cfg( hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD), - 'vit_base_patch32_clip_384.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_384.laion2b_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 384, 384)), - 'vit_base_patch32_clip_448.laion2b_ft_in1k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_448.laion2b_ft_in1k', - mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0, input_size=(3, 448, 448)), 'vit_base_patch16_clip_224.laion2b_ft_in1k': _cfg( hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in1k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, crop_pct=1.0), @@ -865,7 +859,7 @@ default_cfgs = generate_default_cfgs({ crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), 'vit_base_patch32_clip_224.laion2b_ft_in12k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in12k', + #hf_hub_id='timm/vit_base_patch32_clip_224.laion2b_ft_in12k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), 'vit_base_patch16_clip_224.laion2b_ft_in12k': _cfg( hf_hub_id='timm/vit_base_patch16_clip_224.laion2b_ft_in12k', @@ -924,7 +918,7 @@ default_cfgs = generate_default_cfgs({ crop_pct=1.0, input_size=(3, 336, 336), crop_mode='squash'), 'vit_base_patch32_clip_224.openai_ft_in12k': _cfg( - hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k', + #hf_hub_id='timm/vit_base_patch32_clip_224.openai_ft_in12k', mean=OPENAI_CLIP_MEAN, std=OPENAI_CLIP_STD, num_classes=11821), 'vit_base_patch16_clip_224.openai_ft_in12k': _cfg( hf_hub_id='timm/vit_base_patch16_clip_224.openai_ft_in12k',