From f28170df3f461ed3a8dfb89246c8bf1e6280feb1 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 1 Jun 2020 17:26:42 -0700 Subject: [PATCH] Fix an an untested change, remove a debug print --- timm/models/efficientnet_blocks.py | 2 +- timm/models/layers/norm_act.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/timm/models/efficientnet_blocks.py b/timm/models/efficientnet_blocks.py index 5f64dc37..236623ff 100644 --- a/timm/models/efficientnet_blocks.py +++ b/timm/models/efficientnet_blocks.py @@ -220,7 +220,7 @@ class InvertedResidual(nn.Module): has_se = se_ratio is not None and se_ratio > 0. self.has_residual = (in_chs == out_chs and stride == 1) and not noskip self.drop_path_rate = drop_path_rate - print(act_layer) + # Point-wise expansion self.conv_pw = create_conv2d(in_chs, mid_chs, exp_kernel_size, padding=pad_type, **conv_kwargs) self.bn1 = norm_layer(mid_chs, **norm_kwargs) diff --git a/timm/models/layers/norm_act.py b/timm/models/layers/norm_act.py index 48c4d6da..ad1b1eca 100644 --- a/timm/models/layers/norm_act.py +++ b/timm/models/layers/norm_act.py @@ -60,7 +60,7 @@ class BatchNormAct2d(nn.BatchNorm2d): if torch.jit.is_scripting(): x = self._forward_jit(x) else: - self._forward_python(x) + x = self._forward_python(x) if self.act is not None: x = self.act(x) return x