Update lambda_resnet26rpt weights to 78.9, add better halonet26t weights at 79.1 with tweak to attention dim

pull/910/head
Ross Wightman 3 years ago
parent 38804c721b
commit a85df34993

@ -45,8 +45,8 @@ default_cfgs = {
'halonet_h1': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'halonet26t': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_256-9b4bf0b3.pth',
input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94),
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_a1h_256-3083328c.pth',
input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'sehalonet33ts': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/sehalonet33ts_256-87e053f9.pth',
input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94),
@ -64,8 +64,8 @@ default_cfgs = {
url='',
min_input_size=(3, 128, 128), input_size=(3, 256, 256), pool_size=(8, 8)),
'lambda_resnet26rpt_256': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26rpt_a2h_256-482adad8.pth',
fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26rpt_c_256-ab00292d.pth',
fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.94),
'haloregnetz_b': _cfg(
url='',
@ -149,7 +149,7 @@ model_cfgs = dict(
stem_type='tiered',
stem_pool='maxpool',
self_attn_layer='halo',
self_attn_kwargs=dict(block_size=8, halo_size=2, dim_head=16)
self_attn_kwargs=dict(block_size=8, halo_size=2)
),
sehalonet33ts=ByoModelCfg(
blocks=(

Loading…
Cancel
Save