Update davit.py

pull/1630/head
Fredo Guan 3 years ago
parent f47f6fce31
commit dbef70fc31

@ -411,32 +411,35 @@ class DaViT(nn.Module):
for stage_id, stage_param in enumerate(self.architecture): for stage_id, stage_param in enumerate(self.architecture):
layer_offset_id = len(list(itertools.chain(*self.architecture[:stage_id]))) layer_offset_id = len(list(itertools.chain(*self.architecture[:stage_id])))
stage = nn.Sequential([ stage = nn.Sequential(
nn.Sequential([ nn.ModuleList([
ChannelBlock( nn.Sequential(
dim=self.embed_dims[item], nn.ModuleList([
num_heads=self.num_heads[item], ChannelBlock(
mlp_ratio=mlp_ratio, dim=self.embed_dims[item],
qkv_bias=qkv_bias, num_heads=self.num_heads[item],
drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id], mlp_ratio=mlp_ratio,
norm_layer=nn.LayerNorm, qkv_bias=qkv_bias,
ffn=ffn, drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id],
cpe_act=cpe_act norm_layer=nn.LayerNorm,
) if attention_type == 'channel' else ffn=ffn,
SpatialBlock( cpe_act=cpe_act
dim=self.embed_dims[item], ) if attention_type == 'channel' else
num_heads=self.num_heads[item], SpatialBlock(
mlp_ratio=mlp_ratio, dim=self.embed_dims[item],
qkv_bias=qkv_bias, num_heads=self.num_heads[item],
drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id], mlp_ratio=mlp_ratio,
norm_layer=nn.LayerNorm, qkv_bias=qkv_bias,
ffn=ffn, drop_path=dpr[2 * (layer_id + layer_offset_id) + attention_id],
cpe_act=cpe_act, norm_layer=nn.LayerNorm,
window_size=window_size, ffn=ffn,
) if attention_type == 'spatial' else None cpe_act=cpe_act,
for attention_id, attention_type in enumerate(attention_types)] window_size=window_size,
) for layer_id, item in enumerate(stage_param) ) if attention_type == 'spatial' else None
]) for attention_id, attention_type in enumerate(attention_types)])
) for layer_id, item in enumerate(stage_param)
])
)
self.stages.add_module(f'stage_{stage_id}', stage) self.stages.add_module(f'stage_{stage_id}', stage)

Loading…
Cancel
Save