From a85df349936ae21c2a33c54a1b4a6522f0b0c9d0 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 8 Oct 2021 17:44:13 -0700 Subject: [PATCH] Update lambda_resnet26rpt weights to 78.9, add better halonet26t weights at 79.1 with tweak to attention dim --- timm/models/byoanet.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/timm/models/byoanet.py b/timm/models/byoanet.py index 8b629dc4..54c7081d 100644 --- a/timm/models/byoanet.py +++ b/timm/models/byoanet.py @@ -45,8 +45,8 @@ default_cfgs = { 'halonet_h1': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)), 'halonet26t': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_256-9b4bf0b3.pth', - input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94), + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_a1h_256-3083328c.pth', + input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)), 'sehalonet33ts': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/sehalonet33ts_256-87e053f9.pth', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94), @@ -64,8 +64,8 @@ default_cfgs = { url='', min_input_size=(3, 128, 128), input_size=(3, 256, 256), pool_size=(8, 8)), 'lambda_resnet26rpt_256': _cfg( - url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26rpt_a2h_256-482adad8.pth', - fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)), + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26rpt_c_256-ab00292d.pth', + fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.94), 'haloregnetz_b': _cfg( url='', @@ -149,7 +149,7 @@ model_cfgs = dict( stem_type='tiered', stem_pool='maxpool', self_attn_layer='halo', - self_attn_kwargs=dict(block_size=8, halo_size=2, dim_head=16) + self_attn_kwargs=dict(block_size=8, halo_size=2) ), sehalonet33ts=ByoModelCfg( blocks=(