diff --git a/timm/models/gen_efficientnet.py b/timm/models/gen_efficientnet.py index 9a0a6dd4..2541bc6b 100644 --- a/timm/models/gen_efficientnet.py +++ b/timm/models/gen_efficientnet.py @@ -1430,7 +1430,7 @@ def efficientnet_b1(pretrained=False, num_classes=1000, in_chans=3, **kwargs): """ EfficientNet-B1 """ default_cfg = default_cfgs['efficientnet_b1'] # NOTE for train, drop_rate should be 0.2 - kwargs['drop_connect_rate'] = 0.2 # set when training, TODO add as cmd arg + #kwargs['drop_connect_rate'] = 0.2 # set when training, TODO add as cmd arg model = _gen_efficientnet( channel_multiplier=1.0, depth_multiplier=1.1, num_classes=num_classes, in_chans=in_chans, **kwargs) @@ -1445,7 +1445,7 @@ def efficientnet_b2(pretrained=False, num_classes=1000, in_chans=3, **kwargs): """ EfficientNet-B2 """ default_cfg = default_cfgs['efficientnet_b2'] # NOTE for train, drop_rate should be 0.3 - kwargs['drop_connect_rate'] = 0.2 # set when training, TODO add as cmd arg + #kwargs['drop_connect_rate'] = 0.2 # set when training, TODO add as cmd arg model = _gen_efficientnet( channel_multiplier=1.1, depth_multiplier=1.2, num_classes=num_classes, in_chans=in_chans, **kwargs) diff --git a/timm/models/resnet.py b/timm/models/resnet.py index 9a4b22cd..32ff3acf 100644 --- a/timm/models/resnet.py +++ b/timm/models/resnet.py @@ -334,7 +334,7 @@ def wide_resnet50_2(pretrained=False, num_classes=1000, in_chans=3, **kwargs): @register_model def wide_resnet101_2(pretrained=False, num_classes=1000, in_chans=3, **kwargs): - """Constructs a Wide ResNet-100-2 model. + """Constructs a Wide ResNet-101-2 model. The model is the same as ResNet except for the bottleneck number of channels which is twice larger in every block. The number of channels in outer 1x1 convolutions is the same.