|
|
@ -100,7 +100,7 @@ class PatchEmbed(nn.Module):
|
|
|
|
padding=to_2tuple(pad))
|
|
|
|
padding=to_2tuple(pad))
|
|
|
|
self.norm = nn.LayerNorm(in_chans)
|
|
|
|
self.norm = nn.LayerNorm(in_chans)
|
|
|
|
|
|
|
|
|
|
|
|
@register_notrace_function # reason: dim in control sequence
|
|
|
|
@register_notrace_function # reason: dim in control sequence
|
|
|
|
def forward(self, x : Tensor, size: Tuple[int, int]):
|
|
|
|
def forward(self, x : Tensor, size: Tuple[int, int]):
|
|
|
|
H, W = size
|
|
|
|
H, W = size
|
|
|
|
dim = len(x.shape)
|
|
|
|
dim = len(x.shape)
|
|
|
|