diff --git a/timm/models/layers/padding.py b/timm/models/layers/padding.py index 3b035c58..1f8e1c8e 100644 --- a/timm/models/layers/padding.py +++ b/timm/models/layers/padding.py @@ -17,7 +17,7 @@ def get_padding(kernel_size: int, stride: int = 1, dilation: int = 1, **_) -> in # Calculate asymmetric TensorFlow-like 'SAME' padding for a convolution def get_same_padding(x: int, k: int, s: int, d: int): - return max((math.ceil(torch.true_divide(c / s)) - 1) * s + (k - 1) * d + 1 - x, 0) + return max((math.ceil(torch.true_divide(c, s)) - 1) * s + (k - 1) * d + 1 - x, 0) # Can SAME padding for given args be done statically?