Consistency in model entrypoints

* move pretrained entrypoint arg to first pos to be closer to torchvision/hub
* change DPN weight URLS to my github location
pull/16/head
Ross Wightman 5 years ago
parent b20bb58284
commit 6cc214bd7a

@ -43,7 +43,7 @@ def _filter_pretrained(state_dict):
return state_dict
def densenet121(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def densenet121(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
@ -56,7 +56,7 @@ def densenet121(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def densenet169(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def densenet169(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
r"""Densenet-169 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
@ -69,7 +69,7 @@ def densenet169(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def densenet201(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def densenet201(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
r"""Densenet-201 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""
@ -82,7 +82,7 @@ def densenet201(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def densenet161(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def densenet161(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
r"""Densenet-201 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
"""

@ -11,6 +11,7 @@ from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
from collections import OrderedDict
from .helpers import load_pretrained
@ -31,81 +32,87 @@ def _cfg(url=''):
default_cfgs = {
'dpn68': _cfg(url='http://data.lip6.fr/cadene/pretrainedmodels/dpn68-66bebafa7.pth'),
'dpn68b_extra': _cfg(url='http://data.lip6.fr/cadene/pretrainedmodels/dpn68b_extra-84854c156.pth'),
'dpn92_extra': _cfg(url='http://data.lip6.fr/cadene/pretrainedmodels/dpn92_extra-b040e4a9b.pth'),
'dpn98': _cfg(url='http://data.lip6.fr/cadene/pretrainedmodels/dpn98-5b90dec4d.pth'),
'dpn131': _cfg(url='http://data.lip6.fr/cadene/pretrainedmodels/dpn131-71dfe43e0.pth'),
'dpn107_extra': _cfg(url='http://data.lip6.fr/cadene/pretrainedmodels/dpn107_extra-1ac7121e2.pth')
'dpn68': _cfg(
url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn68-66bebafa7.pth'),
'dpn68b_extra': _cfg(
url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn68b_extra-84854c156.pth'),
'dpn92_extra': _cfg(
url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn92_extra-b040e4a9b.pth'),
'dpn98': _cfg(
url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn98-5b90dec4d.pth'),
'dpn131': _cfg(
url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn131-71dfe43e0.pth'),
'dpn107_extra': _cfg(
url='https://github.com/rwightman/pytorch-dpn-pretrained/releases/download/v0.1/dpn107_extra-1ac7121e2.pth')
}
def dpn68(num_classes=1000, in_chans=3, pretrained=False):
def dpn68(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['dpn68']
model = DPN(
small=True, num_init_features=10, k_r=128, groups=32,
k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64),
num_classes=num_classes, in_chans=in_chans)
num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(model, default_cfg, num_classes, in_chans)
return model
def dpn68b(num_classes=1000, in_chans=3, pretrained=False):
def dpn68b(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['dpn68b_extra']
model = DPN(
small=True, num_init_features=10, k_r=128, groups=32,
b=True, k_sec=(3, 4, 12, 3), inc_sec=(16, 32, 32, 64),
num_classes=num_classes, in_chans=in_chans)
num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(model, default_cfg, num_classes, in_chans)
return model
def dpn92(num_classes=1000, in_chans=3, pretrained=False):
def dpn92(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['dpn92_extra']
model = DPN(
num_init_features=64, k_r=96, groups=32,
k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128),
num_classes=num_classes, in_chans=in_chans)
num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(model, default_cfg, num_classes, in_chans)
return model
def dpn98(num_classes=1000, in_chans=3, pretrained=False):
def dpn98(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['dpn98']
model = DPN(
num_init_features=96, k_r=160, groups=40,
k_sec=(3, 6, 20, 3), inc_sec=(16, 32, 32, 128),
num_classes=num_classes, in_chans=in_chans)
num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(model, default_cfg, num_classes, in_chans)
return model
def dpn131(num_classes=1000, in_chans=3, pretrained=False):
def dpn131(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['dpn131']
model = DPN(
num_init_features=128, k_r=160, groups=40,
k_sec=(4, 8, 28, 3), inc_sec=(16, 32, 32, 128),
num_classes=num_classes, in_chans=in_chans)
num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(model, default_cfg, num_classes, in_chans)
return model
def dpn107(num_classes=1000, in_chans=3, pretrained=False):
def dpn107(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['dpn107_extra']
model = DPN(
num_init_features=128, k_r=200, groups=50,
k_sec=(4, 8, 20, 3), inc_sec=(20, 64, 64, 128),
num_classes=num_classes, in_chans=in_chans)
num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(model, default_cfg, num_classes, in_chans)
@ -220,9 +227,11 @@ class DualPathBlock(nn.Module):
class DPN(nn.Module):
def __init__(self, small=False, num_init_features=64, k_r=96, groups=32,
b=False, k_sec=(3, 4, 20, 3), inc_sec=(16, 32, 24, 128),
num_classes=1000, in_chans=3, fc_act=nn.ELU(inplace=True)):
num_classes=1000, in_chans=3, drop_rate=0., global_pool='avg', fc_act=nn.ELU()):
super(DPN, self).__init__()
self.num_classes = num_classes
self.drop_rate = drop_rate
self.global_pool = global_pool
self.b = b
bw_factor = 1 if small else 4
@ -285,8 +294,9 @@ class DPN(nn.Module):
def get_classifier(self):
return self.classifier
def reset_classifier(self, num_classes):
def reset_classifier(self, num_classes, global_pool='avg'):
self.num_classes = num_classes
self.global_pool = global_pool
del self.classifier
if num_classes:
self.classifier = nn.Conv2d(self.num_features, num_classes, kernel_size=1, bias=True)
@ -296,11 +306,13 @@ class DPN(nn.Module):
def forward_features(self, x, pool=True):
x = self.features(x)
if pool:
x = select_adaptive_pool2d(x, pool_type='avg')
x = select_adaptive_pool2d(x, pool_type=self.global_pool)
return x
def forward(self, x):
x = self.forward_features(x)
if self.drop_rate > 0.:
x = F.dropout(x, p=self.drop_rate, training=self.training)
out = self.classifier(x)
return out.view(out.size(0), -1)

@ -1157,7 +1157,7 @@ def _gen_efficientnet(channel_multiplier=1.0, depth_multiplier=1.0, num_classes=
return model
def mnasnet_050(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def mnasnet_050(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet B1, depth multiplier of 0.5. """
default_cfg = default_cfgs['mnasnet_050']
model = _gen_mnasnet_b1(0.5, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1167,7 +1167,7 @@ def mnasnet_050(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def mnasnet_075(num_classes, in_chans=3, pretrained=False, **kwargs):
def mnasnet_075(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet B1, depth multiplier of 0.75. """
default_cfg = default_cfgs['mnasnet_075']
model = _gen_mnasnet_b1(0.75, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1177,7 +1177,7 @@ def mnasnet_075(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def mnasnet_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet B1, depth multiplier of 1.0. """
default_cfg = default_cfgs['mnasnet_100']
model = _gen_mnasnet_b1(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1187,12 +1187,12 @@ def mnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mnasnet_b1(num_classes, in_chans=3, pretrained=False, **kwargs):
def mnasnet_b1(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet B1, depth multiplier of 1.0. """
return mnasnet_100(num_classes, in_chans, pretrained, **kwargs)
def tflite_mnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def tflite_mnasnet_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet B1, depth multiplier of 1.0. """
default_cfg = default_cfgs['tflite_mnasnet_100']
# these two args are for compat with tflite pretrained weights
@ -1205,7 +1205,7 @@ def tflite_mnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mnasnet_140(num_classes, in_chans=3, pretrained=False, **kwargs):
def mnasnet_140(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet B1, depth multiplier of 1.4 """
default_cfg = default_cfgs['mnasnet_140']
model = _gen_mnasnet_b1(1.4, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1215,7 +1215,7 @@ def mnasnet_140(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def semnasnet_050(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def semnasnet_050(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 0.5 """
default_cfg = default_cfgs['semnasnet_050']
model = _gen_mnasnet_a1(0.5, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1225,7 +1225,7 @@ def semnasnet_050(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def semnasnet_075(num_classes, in_chans=3, pretrained=False, **kwargs):
def semnasnet_075(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 0.75. """
default_cfg = default_cfgs['semnasnet_075']
model = _gen_mnasnet_a1(0.75, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1235,7 +1235,7 @@ def semnasnet_075(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def semnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def semnasnet_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 1.0. """
default_cfg = default_cfgs['semnasnet_100']
model = _gen_mnasnet_a1(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1245,12 +1245,12 @@ def semnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mnasnet_a1(num_classes, in_chans=3, pretrained=False, **kwargs):
def mnasnet_a1(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 1.0. """
return semnasnet_100(num_classes, in_chans, pretrained, **kwargs)
def tflite_semnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def tflite_semnasnet_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet A1, depth multiplier of 1.0. """
default_cfg = default_cfgs['tflite_semnasnet_100']
# these two args are for compat with tflite pretrained weights
@ -1263,7 +1263,7 @@ def tflite_semnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def semnasnet_140(num_classes, in_chans=3, pretrained=False, **kwargs):
def semnasnet_140(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 1.4. """
default_cfg = default_cfgs['semnasnet_140']
model = _gen_mnasnet_a1(1.4, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1273,7 +1273,7 @@ def semnasnet_140(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mnasnet_small(num_classes, in_chans=3, pretrained=False, **kwargs):
def mnasnet_small(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MNASNet Small, depth multiplier of 1.0. """
default_cfg = default_cfgs['mnasnet_small']
model = _gen_mnasnet_small(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1283,7 +1283,7 @@ def mnasnet_small(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mobilenetv1_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def mobilenetv1_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MobileNet V1 """
default_cfg = default_cfgs['mobilenetv1_100']
model = _gen_mobilenet_v1(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1293,7 +1293,7 @@ def mobilenetv1_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mobilenetv2_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def mobilenetv2_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MobileNet V2 """
default_cfg = default_cfgs['mobilenetv2_100']
model = _gen_mobilenet_v2(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1303,7 +1303,7 @@ def mobilenetv2_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mobilenetv3_050(num_classes, in_chans=3, pretrained=False, **kwargs):
def mobilenetv3_050(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MobileNet V3 """
default_cfg = default_cfgs['mobilenetv3_050']
model = _gen_mobilenet_v3(0.5, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1313,7 +1313,7 @@ def mobilenetv3_050(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mobilenetv3_075(num_classes, in_chans=3, pretrained=False, **kwargs):
def mobilenetv3_075(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MobileNet V3 """
default_cfg = default_cfgs['mobilenetv3_075']
model = _gen_mobilenet_v3(0.75, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1323,7 +1323,7 @@ def mobilenetv3_075(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def mobilenetv3_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def mobilenetv3_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" MobileNet V3 """
default_cfg = default_cfgs['mobilenetv3_100']
if pretrained:
@ -1336,7 +1336,7 @@ def mobilenetv3_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def fbnetc_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def fbnetc_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" FBNet-C """
default_cfg = default_cfgs['fbnetc_100']
if pretrained:
@ -1349,7 +1349,7 @@ def fbnetc_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def chamnetv1_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def chamnetv1_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" ChamNet """
default_cfg = default_cfgs['chamnetv1_100']
model = _gen_chamnet_v1(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1359,7 +1359,7 @@ def chamnetv1_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def chamnetv2_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def chamnetv2_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" ChamNet """
default_cfg = default_cfgs['chamnetv2_100']
model = _gen_chamnet_v2(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1369,7 +1369,7 @@ def chamnetv2_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def spnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
def spnasnet_100(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" Single-Path NAS Pixel1"""
default_cfg = default_cfgs['spnasnet_100']
model = _gen_spnasnet(1.0, num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -1379,7 +1379,7 @@ def spnasnet_100(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def efficientnet_b0(num_classes, in_chans=3, pretrained=False, **kwargs):
def efficientnet_b0(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B0 """
default_cfg = default_cfgs['efficientnet_b0']
# NOTE for train, drop_rate should be 0.2
@ -1392,7 +1392,7 @@ def efficientnet_b0(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def efficientnet_b1(num_classes, in_chans=3, pretrained=False, **kwargs):
def efficientnet_b1(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B1 """
default_cfg = default_cfgs['efficientnet_b1']
# NOTE for train, drop_rate should be 0.2
@ -1405,7 +1405,7 @@ def efficientnet_b1(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def efficientnet_b2(num_classes, in_chans=3, pretrained=False, **kwargs):
def efficientnet_b2(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B2 """
default_cfg = default_cfgs['efficientnet_b2']
# NOTE for train, drop_rate should be 0.3
@ -1418,7 +1418,7 @@ def efficientnet_b2(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def efficientnet_b3(num_classes, in_chans=3, pretrained=False, **kwargs):
def efficientnet_b3(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B3 """
default_cfg = default_cfgs['efficientnet_b3']
# NOTE for train, drop_rate should be 0.3
@ -1431,7 +1431,7 @@ def efficientnet_b3(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def efficientnet_b4(num_classes, in_chans=3, pretrained=False, **kwargs):
def efficientnet_b4(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B4 """
default_cfg = default_cfgs['efficientnet_b4']
# NOTE for train, drop_rate should be 0.4
@ -1444,7 +1444,7 @@ def efficientnet_b4(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def efficientnet_b5(num_classes, in_chans=3, pretrained=False, **kwargs):
def efficientnet_b5(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B5 """
# NOTE for train, drop_rate should be 0.4
default_cfg = default_cfgs['efficientnet_b5']
@ -1457,7 +1457,7 @@ def efficientnet_b5(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def tf_efficientnet_b0(num_classes, in_chans=3, pretrained=False, **kwargs):
def tf_efficientnet_b0(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B0. Tensorflow compatible variant """
default_cfg = default_cfgs['tf_efficientnet_b0']
kwargs['bn_eps'] = _BN_EPS_TF_DEFAULT
@ -1471,7 +1471,7 @@ def tf_efficientnet_b0(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def tf_efficientnet_b1(num_classes, in_chans=3, pretrained=False, **kwargs):
def tf_efficientnet_b1(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B1. Tensorflow compatible variant """
default_cfg = default_cfgs['tf_efficientnet_b1']
kwargs['bn_eps'] = _BN_EPS_TF_DEFAULT
@ -1485,7 +1485,7 @@ def tf_efficientnet_b1(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def tf_efficientnet_b2(num_classes, in_chans=3, pretrained=False, **kwargs):
def tf_efficientnet_b2(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B2. Tensorflow compatible variant """
default_cfg = default_cfgs['tf_efficientnet_b2']
kwargs['bn_eps'] = _BN_EPS_TF_DEFAULT
@ -1499,7 +1499,7 @@ def tf_efficientnet_b2(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def tf_efficientnet_b3(num_classes, in_chans=3, pretrained=False, **kwargs):
def tf_efficientnet_b3(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B3. Tensorflow compatible variant """
default_cfg = default_cfgs['tf_efficientnet_b3']
kwargs['bn_eps'] = _BN_EPS_TF_DEFAULT
@ -1513,7 +1513,7 @@ def tf_efficientnet_b3(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def tf_efficientnet_b4(num_classes, in_chans=3, pretrained=False, **kwargs):
def tf_efficientnet_b4(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B4. Tensorflow compatible variant """
default_cfg = default_cfgs['tf_efficientnet_b4']
kwargs['bn_eps'] = _BN_EPS_TF_DEFAULT
@ -1527,7 +1527,7 @@ def tf_efficientnet_b4(num_classes, in_chans=3, pretrained=False, **kwargs):
return model
def tf_efficientnet_b5(num_classes, in_chans=3, pretrained=False, **kwargs):
def tf_efficientnet_b5(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
""" EfficientNet-B5. Tensorflow compatible variant """
default_cfg = default_cfgs['tf_efficientnet_b5']
kwargs['bn_eps'] = _BN_EPS_TF_DEFAULT

@ -361,7 +361,7 @@ class GluonResNet(nn.Module):
return x
def gluon_resnet18_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet18_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-18 model.
"""
default_cfg = default_cfgs['gluon_resnet18_v1b']
@ -372,7 +372,7 @@ def gluon_resnet18_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs)
return model
def gluon_resnet34_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet34_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-34 model.
"""
default_cfg = default_cfgs['gluon_resnet34_v1b']
@ -383,7 +383,7 @@ def gluon_resnet34_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs)
return model
def gluon_resnet50_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet50_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-50 model.
"""
default_cfg = default_cfgs['gluon_resnet50_v1b']
@ -394,7 +394,7 @@ def gluon_resnet50_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs)
return model
def gluon_resnet101_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet101_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-101 model.
"""
default_cfg = default_cfgs['gluon_resnet101_v1b']
@ -405,7 +405,7 @@ def gluon_resnet101_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet152_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet152_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-152 model.
"""
default_cfg = default_cfgs['gluon_resnet152_v1b']
@ -416,7 +416,7 @@ def gluon_resnet152_v1b(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet50_v1c(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet50_v1c(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-50 model.
"""
default_cfg = default_cfgs['gluon_resnet50_v1c']
@ -428,7 +428,7 @@ def gluon_resnet50_v1c(num_classes=1000, in_chans=3, pretrained=False, **kwargs)
return model
def gluon_resnet101_v1c(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet101_v1c(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-101 model.
"""
default_cfg = default_cfgs['gluon_resnet101_v1c']
@ -440,7 +440,7 @@ def gluon_resnet101_v1c(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet152_v1c(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet152_v1c(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-152 model.
"""
default_cfg = default_cfgs['gluon_resnet152_v1c']
@ -452,7 +452,7 @@ def gluon_resnet152_v1c(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet50_v1d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet50_v1d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-50 model.
"""
default_cfg = default_cfgs['gluon_resnet50_v1d']
@ -464,7 +464,7 @@ def gluon_resnet50_v1d(num_classes=1000, in_chans=3, pretrained=False, **kwargs)
return model
def gluon_resnet101_v1d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet101_v1d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-101 model.
"""
default_cfg = default_cfgs['gluon_resnet101_v1d']
@ -476,7 +476,7 @@ def gluon_resnet101_v1d(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet152_v1d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet152_v1d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-152 model.
"""
default_cfg = default_cfgs['gluon_resnet152_v1d']
@ -488,7 +488,7 @@ def gluon_resnet152_v1d(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet50_v1e(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet50_v1e(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-50-V1e model. No pretrained weights for any 'e' variants
"""
default_cfg = default_cfgs['gluon_resnet50_v1e']
@ -500,7 +500,7 @@ def gluon_resnet50_v1e(num_classes=1000, in_chans=3, pretrained=False, **kwargs)
return model
def gluon_resnet101_v1e(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet101_v1e(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-101 model.
"""
default_cfg = default_cfgs['gluon_resnet101_v1e']
@ -512,7 +512,7 @@ def gluon_resnet101_v1e(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet152_v1e(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet152_v1e(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-152 model.
"""
default_cfg = default_cfgs['gluon_resnet152_v1e']
@ -524,7 +524,7 @@ def gluon_resnet152_v1e(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet50_v1s(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet50_v1s(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-50 model.
"""
default_cfg = default_cfgs['gluon_resnet50_v1s']
@ -536,7 +536,7 @@ def gluon_resnet50_v1s(num_classes=1000, in_chans=3, pretrained=False, **kwargs)
return model
def gluon_resnet101_v1s(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet101_v1s(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-101 model.
"""
default_cfg = default_cfgs['gluon_resnet101_v1s']
@ -548,7 +548,7 @@ def gluon_resnet101_v1s(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnet152_v1s(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnet152_v1s(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-152 model.
"""
default_cfg = default_cfgs['gluon_resnet152_v1s']
@ -560,7 +560,7 @@ def gluon_resnet152_v1s(num_classes=1000, in_chans=3, pretrained=False, **kwargs
return model
def gluon_resnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnext50_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt50-32x4d model.
"""
default_cfg = default_cfgs['gluon_resnext50_32x4d']
@ -573,7 +573,7 @@ def gluon_resnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwar
return model
def gluon_resnext101_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnext101_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt-101 model.
"""
default_cfg = default_cfgs['gluon_resnext101_32x4d']
@ -586,7 +586,7 @@ def gluon_resnext101_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwa
return model
def gluon_resnext101_64x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnext101_64x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt-101 model.
"""
default_cfg = default_cfgs['gluon_resnext101_64x4d']
@ -599,7 +599,7 @@ def gluon_resnext101_64x4d(num_classes=1000, in_chans=3, pretrained=False, **kwa
return model
def gluon_resnext152_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_resnext152_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt152-32x4d model.
"""
default_cfg = default_cfgs['gluon_resnext152_32x4d']
@ -612,7 +612,7 @@ def gluon_resnext152_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwa
return model
def gluon_seresnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_seresnext50_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a SEResNeXt50-32x4d model.
"""
default_cfg = default_cfgs['gluon_seresnext50_32x4d']
@ -625,7 +625,7 @@ def gluon_seresnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kw
return model
def gluon_seresnext101_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_seresnext101_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a SEResNeXt-101-32x4d model.
"""
default_cfg = default_cfgs['gluon_seresnext101_32x4d']
@ -638,7 +638,7 @@ def gluon_seresnext101_32x4d(num_classes=1000, in_chans=3, pretrained=False, **k
return model
def gluon_seresnext101_64x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_seresnext101_64x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a SEResNeXt-101-64x4d model.
"""
default_cfg = default_cfgs['gluon_seresnext101_64x4d']
@ -651,7 +651,7 @@ def gluon_seresnext101_64x4d(num_classes=1000, in_chans=3, pretrained=False, **k
return model
def gluon_seresnext152_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_seresnext152_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a SEResNeXt152-32x4d model.
"""
default_cfg = default_cfgs['gluon_seresnext152_32x4d']
@ -664,7 +664,7 @@ def gluon_seresnext152_32x4d(num_classes=1000, in_chans=3, pretrained=False, **k
return model
def gluon_senet154(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_senet154(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs an SENet-154 model.
"""
default_cfg = default_cfgs['gluon_senet154']

@ -318,7 +318,7 @@ class InceptionResnetV2(nn.Module):
return x
def inception_resnet_v2(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def inception_resnet_v2(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
r"""InceptionResnetV2 model architecture from the
`"InceptionV4, Inception-ResNet..." <https://arxiv.org/abs/1602.07261>`_ paper.
"""

@ -66,7 +66,7 @@ def _assert_default_kwargs(kwargs):
assert kwargs.pop('drop_rate', 0.) == 0.
def inception_v3(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def inception_v3(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
# original PyTorch weights, ported from Tensorflow but modified
default_cfg = default_cfgs['inception_v3']
assert in_chans == 3
@ -78,7 +78,7 @@ def inception_v3(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def tf_inception_v3(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def tf_inception_v3(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
# my port of Tensorflow SLIM weights (http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz)
default_cfg = default_cfgs['tf_inception_v3']
assert in_chans == 3
@ -90,7 +90,7 @@ def tf_inception_v3(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def adv_inception_v3(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def adv_inception_v3(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
# my port of Tensorflow adversarially trained Inception V3 from
# http://download.tensorflow.org/models/adv_inception_v3_2017_08_18.tar.gz
default_cfg = default_cfgs['adv_inception_v3']
@ -103,7 +103,7 @@ def adv_inception_v3(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def gluon_inception_v3(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def gluon_inception_v3(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
# from gluon pretrained models, best performing in terms of accuracy/loss metrics
# https://gluon-cv.mxnet.io/model_zoo/classification.html
default_cfg = default_cfgs['gluon_inception_v3']

@ -293,7 +293,7 @@ class InceptionV4(nn.Module):
return x
def inception_v4(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def inception_v4(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['inception_v4']
model = InceptionV4(num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg

@ -21,7 +21,7 @@ def create_model(
checkpoint_path='',
**kwargs):
margs = dict(num_classes=num_classes, in_chans=in_chans, pretrained=pretrained)
margs = dict(pretrained=pretrained, num_classes=num_classes, in_chans=in_chans)
# Not all models have support for batchnorm params passed as args, only gen_efficientnet variants
supports_bn_params = model_name in gen_efficientnet_model_names()

@ -385,13 +385,13 @@ class PNASNet5Large(nn.Module):
return x
def pnasnet5large(num_classes=1000, in_chans=3, pretrained='imagenet'):
def pnasnet5large(pretrained='imagenet', num_classes=1000, in_chans=3, **kwargs):
r"""PNASNet-5 model architecture from the
`"Progressive Neural Architecture Search"
<https://arxiv.org/abs/1712.00559>`_ paper.
"""
default_cfg = default_cfgs['pnasnet5large']
model = PNASNet5Large(num_classes=1000, in_chans=in_chans)
model = PNASNet5Large(num_classes=1000, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(model, default_cfg, num_classes, in_chans)

@ -219,7 +219,7 @@ class ResNet(nn.Module):
return x
def resnet18(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnet18(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-18 model.
"""
default_cfg = default_cfgs['resnet18']
@ -230,7 +230,7 @@ def resnet18(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnet34(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnet34(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-34 model.
"""
default_cfg = default_cfgs['resnet34']
@ -241,7 +241,7 @@ def resnet34(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnet50(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnet50(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-50 model.
"""
default_cfg = default_cfgs['resnet50']
@ -252,7 +252,7 @@ def resnet50(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnet101(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnet101(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-101 model.
"""
default_cfg = default_cfgs['resnet101']
@ -263,7 +263,7 @@ def resnet101(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnet152(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnet152(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNet-152 model.
"""
default_cfg = default_cfgs['resnet152']
@ -274,7 +274,7 @@ def resnet152(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnext50_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt50-32x4d model.
"""
default_cfg = default_cfgs['resnext50_32x4d']
@ -287,7 +287,7 @@ def resnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnext101_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnext101_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt-101 model.
"""
default_cfg = default_cfgs['resnext101_32x4d']
@ -300,7 +300,7 @@ def resnext101_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnext101_64x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnext101_64x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt101-64x4d model.
"""
default_cfg = default_cfgs['resnext101_32x4d']
@ -313,7 +313,7 @@ def resnext101_64x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def resnext152_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def resnext152_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
"""Constructs a ResNeXt152-32x4d model.
"""
default_cfg = default_cfgs['resnext152_32x4d']

@ -400,7 +400,7 @@ class SENet(nn.Module):
return x
def seresnet18(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnet18(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnet18']
model = SENet(SEResNetBlock, [2, 2, 2, 2], groups=1, reduction=16,
inplanes=64, input_3x3=False,
@ -412,7 +412,7 @@ def seresnet18(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def seresnet34(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnet34(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnet34']
model = SENet(SEResNetBlock, [3, 4, 6, 3], groups=1, reduction=16,
inplanes=64, input_3x3=False,
@ -424,7 +424,7 @@ def seresnet34(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def seresnet50(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnet50(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnet50']
model = SENet(SEResNetBottleneck, [3, 4, 6, 3], groups=1, reduction=16,
inplanes=64, input_3x3=False,
@ -436,7 +436,7 @@ def seresnet50(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def seresnet101(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnet101(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnet101']
model = SENet(SEResNetBottleneck, [3, 4, 23, 3], groups=1, reduction=16,
inplanes=64, input_3x3=False,
@ -448,7 +448,7 @@ def seresnet101(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def seresnet152(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnet152(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnet152']
model = SENet(SEResNetBottleneck, [3, 8, 36, 3], groups=1, reduction=16,
inplanes=64, input_3x3=False,
@ -460,7 +460,7 @@ def seresnet152(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def senet154(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def senet154(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['senet154']
model = SENet(SEBottleneck, [3, 8, 36, 3], groups=64, reduction=16,
num_classes=num_classes, in_chans=in_chans, **kwargs)
@ -470,7 +470,7 @@ def senet154(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def seresnext26_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnext26_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnext26_32x4d']
model = SENet(SEResNeXtBottleneck, [2, 2, 2, 2], groups=32, reduction=16,
inplanes=64, input_3x3=False,
@ -482,7 +482,7 @@ def seresnext26_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def seresnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnext50_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnext50_32x4d']
model = SENet(SEResNeXtBottleneck, [3, 4, 6, 3], groups=32, reduction=16,
inplanes=64, input_3x3=False,
@ -494,7 +494,7 @@ def seresnext50_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
return model
def seresnext101_32x4d(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def seresnext101_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['seresnext101_32x4d']
model = SENet(SEResNeXtBottleneck, [3, 4, 23, 3], groups=32, reduction=16,
inplanes=64, input_3x3=False,

@ -228,7 +228,7 @@ class Xception(nn.Module):
return x
def xception(num_classes=1000, in_chans=3, pretrained=False, **kwargs):
def xception(pretrained=False, num_classes=1000, in_chans=3, **kwargs):
default_cfg = default_cfgs['xception']
model = Xception(num_classes=num_classes, in_chans=in_chans, **kwargs)
model.default_cfg = default_cfg

Loading…
Cancel
Save