support bits checkpoints in avg/load

pull/880/head
Ross Wightman 3 years ago
parent 007bc39323
commit b2094f4ee8

@ -41,6 +41,10 @@ def checkpoint_metric(checkpoint_path):
metric = None
if 'metric' in checkpoint:
metric = checkpoint['metric']
elif 'metrics' in checkpoint and 'metric_name' in checkpoint:
metrics = checkpoint['metrics']
print(metrics)
metric = metrics[checkpoint['metric_name']]
return metric

@ -24,13 +24,20 @@ _logger = logging.getLogger(__name__)
def load_state_dict(checkpoint_path, use_ema=False):
if checkpoint_path and os.path.isfile(checkpoint_path):
checkpoint = torch.load(checkpoint_path, map_location='cpu')
state_dict_key = 'state_dict'
state_dict_key = ''
if isinstance(checkpoint, dict):
if use_ema and 'state_dict_ema' in checkpoint:
if use_ema and checkpoint.get('state_dict_ema', None) is not None:
state_dict_key = 'state_dict_ema'
if state_dict_key and state_dict_key in checkpoint:
elif use_ema and checkpoint.get('model_ema', None) is not None:
state_dict_key = 'model_ema'
elif 'state_dict' in checkpoint:
state_dict_key = 'state_dict'
elif 'model' in checkpoint:
state_dict_key = 'model'
if state_dict_key:
state_dict = checkpoint[state_dict_key]
new_state_dict = OrderedDict()
for k, v in checkpoint[state_dict_key].items():
for k, v in state_dict.items():
# strip `module.` prefix
name = k[7:] if k.startswith('module') else k
new_state_dict[name] = v

Loading…
Cancel
Save