@ -100,7 +100,7 @@ class PatchEmbed(nn.Module):
padding=to_2tuple(pad))
self.norm = nn.LayerNorm(in_chans)
@register_notrace_function # reason: dim in control sequence
def forward(self, x : Tensor, size: Tuple[int, int]):
H, W = size
dim = len(x.shape)