From faae93e62d8c093e85da725e480cea99c1b0a310 Mon Sep 17 00:00:00 2001 From: Christoph Reich <34400551+ChristophReich1996@users.noreply.github.com> Date: Thu, 28 Jul 2022 19:08:08 -0400 Subject: [PATCH] Fix typo in PositionalEncodingFourier --- timm/models/xcit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/timm/models/xcit.py b/timm/models/xcit.py index d70500ce..6802fc84 100644 --- a/timm/models/xcit.py +++ b/timm/models/xcit.py @@ -104,7 +104,7 @@ default_cfgs = { @register_notrace_module # reason: FX can't symbolically trace torch.arange in forward method class PositionalEncodingFourier(nn.Module): """ - Positional encoding relying on a fourier kernel matching the one used in the "Attention is all of Need" paper. + Positional encoding relying on a fourier kernel matching the one used in the "Attention is all you Need" paper. Based on the official XCiT code - https://github.com/facebookresearch/xcit/blob/master/xcit.py """