More adjustments to ByoaNet models for further experiments.

pull/625/head
Ross Wightman 4 years ago
parent f480734ad5
commit 3ba6b55cb2

@ -46,18 +46,18 @@ def _cfg(url='', **kwargs):
default_cfgs = {
# GPU-Efficient (ResNet) weights
'botnet26t_256': _cfg(url='', fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),
'botnet50t_224': _cfg(url='', fixed_input_size=True),
'botnet50t_c4c5_224': _cfg(url='', fixed_input_size=True),
'botnet50ts_256': _cfg(url='', fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),
'halonet_h1': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'halonet_h1_c4c5': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'halonet26t': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'halonet50t': _cfg(url='', min_input_size=(3, 224, 224)),
'halonet50ts': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'lambda_resnet26t': _cfg(url='', min_input_size=(3, 128, 128), input_size=(3, 256, 256), pool_size=(8, 8)),
'lambda_resnet50t': _cfg(url='', min_input_size=(3, 128, 128)),
'swinnet26t_256': _cfg(url='', fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),
'swinnet50ts_256': _cfg(url='', fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),
}
@ -110,38 +110,18 @@ model_cfgs = dict(
self_attn_fixed_size=True,
self_attn_kwargs=dict()
),
botnet50t=ByoaCfg(
botnet50ts=ByoaCfg(
blocks=(
ByoaBlocksCfg(type='bottle', d=3, c=256, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=4, c=512, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=6, c=1024, s=2, gs=0, br=0.25),
interleave_attn(types=('bottle', 'self_attn'), every=1, d=6, c=1024, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='self_attn', d=3, c=2048, s=1, gs=0, br=0.25),
),
stem_chs=64,
stem_type='tiered',
stem_pool='',
num_features=0,
self_attn_layer='bottleneck',
self_attn_fixed_size=True,
self_attn_kwargs=dict()
),
botnet50t_c4c5=ByoaCfg(
blocks=(
ByoaBlocksCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=4, c=512, s=2, gs=0, br=0.25),
(
ByoaBlocksCfg(type='self_attn', d=1, c=1024, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=5, c=1024, s=1, gs=0, br=0.25),
),
(
ByoaBlocksCfg(type='self_attn', d=1, c=2048, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=2, c=2048, s=1, gs=0, br=0.25),
)
),
stem_chs=64,
stem_type='tiered',
stem_pool='maxpool',
num_features=0,
act_layer='silu',
self_attn_layer='bottleneck',
self_attn_fixed_size=True,
self_attn_kwargs=dict()
@ -189,19 +169,20 @@ model_cfgs = dict(
self_attn_layer='halo',
self_attn_kwargs=dict(block_size=8, halo_size=2) # intended for 256x256 res
),
halonet50t=ByoaCfg(
halonet50ts=ByoaCfg(
blocks=(
ByoaBlocksCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=4, c=512, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=6, c=1024, s=2, gs=0, br=0.25),
interleave_attn(types=('bottle', 'self_attn'), every=1, d=6, c=1024, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='self_attn', d=3, c=2048, s=2, gs=0, br=0.25),
),
stem_chs=64,
stem_type='tiered',
stem_pool='maxpool',
num_features=0,
act_layer='silu',
self_attn_layer='halo',
self_attn_kwargs=dict(block_size=7, halo_size=2)
self_attn_kwargs=dict(block_size=8, halo_size=2)
),
lambda_resnet26t=ByoaCfg(
@ -234,9 +215,24 @@ model_cfgs = dict(
),
swinnet26t=ByoaCfg(
blocks=(
ByoaBlocksCfg(type='bottle', d=2, c=256, s=1, gs=0, br=0.25),
interleave_attn(types=('bottle', 'self_attn'), every=1, d=2, c=512, s=2, gs=0, br=0.25),
interleave_attn(types=('bottle', 'self_attn'), every=1, d=2, c=1024, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='self_attn', d=2, c=2048, s=2, gs=0, br=0.25),
),
stem_chs=64,
stem_type='tiered',
stem_pool='maxpool',
num_features=0,
self_attn_layer='swin',
self_attn_fixed_size=True,
self_attn_kwargs=dict(win_size=8)
),
swinnet50ts=ByoaCfg(
blocks=(
ByoaBlocksCfg(type='bottle', d=3, c=256, s=1, gs=0, br=0.25),
ByoaBlocksCfg(type='bottle', d=4, c=512, s=2, gs=0, br=0.25),
interleave_attn(types=('bottle', 'self_attn'), every=1, d=4, c=512, s=2, gs=0, br=0.25),
interleave_attn(types=('bottle', 'self_attn'), every=1, d=2, c=1024, s=2, gs=0, br=0.25),
ByoaBlocksCfg(type='self_attn', d=3, c=2048, s=2, gs=0, br=0.25),
),
@ -244,6 +240,7 @@ model_cfgs = dict(
stem_type='tiered',
stem_pool='maxpool',
num_features=0,
act_layer='silu',
self_attn_layer='swin',
self_attn_fixed_size=True,
self_attn_kwargs=dict(win_size=8)
@ -415,19 +412,11 @@ def botnet26t_256(pretrained=False, **kwargs):
@register_model
def botnet50t_224(pretrained=False, **kwargs):
def botnet50ts_256(pretrained=False, **kwargs):
""" Bottleneck Transformer w/ ResNet50-T backbone. Bottleneck attn in final stage.
"""
kwargs.setdefault('img_size', 224)
return _create_byoanet('botnet50t_224', 'botnet50t', pretrained=pretrained, **kwargs)
@register_model
def botnet50t_c4c5_224(pretrained=False, **kwargs):
""" Bottleneck Transformer w/ ResNet50-T backbone. Bottleneck attn in last two stages.
"""
kwargs.setdefault('img_size', 224)
return _create_byoanet('botnet50t_c4c5_224', 'botnet50t_c4c5', pretrained=pretrained, **kwargs)
kwargs.setdefault('img_size', 256)
return _create_byoanet('botnet50ts_256', 'botnet50ts', pretrained=pretrained, **kwargs)
@register_model
@ -454,10 +443,10 @@ def halonet26t(pretrained=False, **kwargs):
@register_model
def halonet50t(pretrained=False, **kwargs):
def halonet50ts(pretrained=False, **kwargs):
""" HaloNet w/ a ResNet50-t backbone, Hallo attention in final stage
"""
return _create_byoanet('halonet50t', pretrained=pretrained, **kwargs)
return _create_byoanet('halonet50ts', pretrained=pretrained, **kwargs)
@register_model
@ -480,3 +469,11 @@ def swinnet26t_256(pretrained=False, **kwargs):
"""
kwargs.setdefault('img_size', 256)
return _create_byoanet('swinnet26t_256', 'swinnet26t', pretrained=pretrained, **kwargs)
@register_model
def swinnet50ts_256(pretrained=False, **kwargs):
"""
"""
kwargs.setdefault('img_size', 256)
return _create_byoanet('swinnet50ts_256', 'swinnet50ts', pretrained=pretrained, **kwargs)

Loading…
Cancel
Save