From de6046e213087e9c282e557d5286a060bee6e594 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Tue, 8 Dec 2020 17:03:00 -0800 Subject: [PATCH 01/20] Initial commit for dataset / parser reorg to support additional datasets / types --- inference.py | 4 +- timm/data/__init__.py | 2 +- timm/data/dataset.py | 170 +++-------------------- timm/data/parsers/__init__.py | 4 + timm/data/parsers/class_map.py | 15 ++ timm/data/parsers/constants.py | 3 + timm/data/parsers/parser.py | 17 +++ timm/data/parsers/parser_image_folder.py | 69 +++++++++ timm/data/parsers/parser_image_tar.py | 66 +++++++++ timm/data/parsers/parser_in21k_tar.py | 104 ++++++++++++++ train.py | 9 +- validate.py | 7 +- 12 files changed, 309 insertions(+), 161 deletions(-) create mode 100644 timm/data/parsers/__init__.py create mode 100644 timm/data/parsers/class_map.py create mode 100644 timm/data/parsers/constants.py create mode 100644 timm/data/parsers/parser.py create mode 100644 timm/data/parsers/parser_image_folder.py create mode 100644 timm/data/parsers/parser_image_tar.py create mode 100644 timm/data/parsers/parser_in21k_tar.py diff --git a/inference.py b/inference.py index 16d19944..f7ee6d3e 100755 --- a/inference.py +++ b/inference.py @@ -13,7 +13,7 @@ import numpy as np import torch from timm.models import create_model, apply_test_time_pool -from timm.data import Dataset, create_loader, resolve_data_config +from timm.data import ImageDataset, create_loader, resolve_data_config from timm.utils import AverageMeter, setup_default_logging torch.backends.cudnn.benchmark = True @@ -81,7 +81,7 @@ def main(): model = model.cuda() loader = create_loader( - Dataset(args.data), + ImageDataset(args.data), input_size=config['input_size'], batch_size=args.batch_size, use_prefetcher=True, diff --git a/timm/data/__init__.py b/timm/data/__init__.py index 15617859..1dd8ac57 100644 --- a/timm/data/__init__.py +++ b/timm/data/__init__.py @@ -1,6 +1,6 @@ from .constants import * from .config import resolve_data_config -from .dataset import Dataset, DatasetTar, AugMixDataset +from .dataset import ImageDataset, AugMixDataset from .transforms import * from .loader import create_loader from .transforms_factory import create_transform diff --git a/timm/data/dataset.py b/timm/data/dataset.py index 99d99917..8013c846 100644 --- a/timm/data/dataset.py +++ b/timm/data/dataset.py @@ -2,177 +2,49 @@ Hacked together by / Copyright 2020 Ross Wightman """ -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - import torch.utils.data as data - import os -import re import torch -import tarfile -from PIL import Image - - -IMG_EXTENSIONS = ['.png', '.jpg', '.jpeg'] - - -def natural_key(string_): - """See http://www.codinghorror.com/blog/archives/001018.html""" - return [int(s) if s.isdigit() else s for s in re.split(r'(\d+)', string_.lower())] - - -def find_images_and_targets(folder, types=IMG_EXTENSIONS, class_to_idx=None, leaf_name_only=True, sort=True): - labels = [] - filenames = [] - for root, subdirs, files in os.walk(folder, topdown=False): - rel_path = os.path.relpath(root, folder) if (root != folder) else '' - label = os.path.basename(rel_path) if leaf_name_only else rel_path.replace(os.path.sep, '_') - for f in files: - base, ext = os.path.splitext(f) - if ext.lower() in types: - filenames.append(os.path.join(root, f)) - labels.append(label) - if class_to_idx is None: - # building class index - unique_labels = set(labels) - sorted_labels = list(sorted(unique_labels, key=natural_key)) - class_to_idx = {c: idx for idx, c in enumerate(sorted_labels)} - images_and_targets = [(f, class_to_idx[l]) for f, l in zip(filenames, labels) if l in class_to_idx] - if sort: - images_and_targets = sorted(images_and_targets, key=lambda k: natural_key(k[0])) - return images_and_targets, class_to_idx - - -def load_class_map(filename, root=''): - class_map_path = filename - if not os.path.exists(class_map_path): - class_map_path = os.path.join(root, filename) - assert os.path.exists(class_map_path), 'Cannot locate specified class map file (%s)' % filename - class_map_ext = os.path.splitext(filename)[-1].lower() - if class_map_ext == '.txt': - with open(class_map_path) as f: - class_to_idx = {v.strip(): k for k, v in enumerate(f)} - else: - assert False, 'Unsupported class map extension' - return class_to_idx - - -class Dataset(data.Dataset): + +from .parsers import ParserImageFolder, ParserImageTar + + +class ImageDataset(data.Dataset): def __init__( self, - root, + img_root, + parser=None, + class_map='', load_bytes=False, transform=None, - class_map=''): - - class_to_idx = None - if class_map: - class_to_idx = load_class_map(class_map, root) - images, class_to_idx = find_images_and_targets(root, class_to_idx=class_to_idx) - if len(images) == 0: - raise RuntimeError(f'Found 0 images in subfolders of {root}. ' - f'Supported image extensions are {", ".join(IMG_EXTENSIONS)}') - self.root = root - self.samples = images - self.imgs = self.samples # torchvision ImageFolder compat - self.class_to_idx = class_to_idx + ): + self.img_root = img_root + if parser is None: + if os.path.isfile(img_root) and os.path.splitext(img_root)[1] == '.tar': + parser = ParserImageTar(img_root, load_bytes=load_bytes, class_map=class_map) + else: + parser = ParserImageFolder(img_root, load_bytes=load_bytes, class_map=class_map) + self.parser = parser self.load_bytes = load_bytes self.transform = transform def __getitem__(self, index): - path, target = self.samples[index] - img = open(path, 'rb').read() if self.load_bytes else Image.open(path).convert('RGB') + img, target = self.parser[index] if self.transform is not None: img = self.transform(img) if target is None: - target = torch.zeros(1).long() + target = torch.tensor(-1, dtype=torch.long) return img, target def __len__(self): - return len(self.samples) + return len(self.parser) def filename(self, index, basename=False, absolute=False): - filename = self.samples[index][0] - if basename: - filename = os.path.basename(filename) - elif not absolute: - filename = os.path.relpath(filename, self.root) - return filename + return self.parser.filename(index, basename, absolute) def filenames(self, basename=False, absolute=False): - fn = lambda x: x - if basename: - fn = os.path.basename - elif not absolute: - fn = lambda x: os.path.relpath(x, self.root) - return [fn(x[0]) for x in self.samples] - - -def _extract_tar_info(tarfile, class_to_idx=None, sort=True): - files = [] - labels = [] - for ti in tarfile.getmembers(): - if not ti.isfile(): - continue - dirname, basename = os.path.split(ti.path) - label = os.path.basename(dirname) - ext = os.path.splitext(basename)[1] - if ext.lower() in IMG_EXTENSIONS: - files.append(ti) - labels.append(label) - if class_to_idx is None: - unique_labels = set(labels) - sorted_labels = list(sorted(unique_labels, key=natural_key)) - class_to_idx = {c: idx for idx, c in enumerate(sorted_labels)} - tarinfo_and_targets = [(f, class_to_idx[l]) for f, l in zip(files, labels) if l in class_to_idx] - if sort: - tarinfo_and_targets = sorted(tarinfo_and_targets, key=lambda k: natural_key(k[0].path)) - return tarinfo_and_targets, class_to_idx - - -class DatasetTar(data.Dataset): - - def __init__(self, root, load_bytes=False, transform=None, class_map=''): - - class_to_idx = None - if class_map: - class_to_idx = load_class_map(class_map, root) - assert os.path.isfile(root) - self.root = root - with tarfile.open(root) as tf: # cannot keep this open across processes, reopen later - self.samples, self.class_to_idx = _extract_tar_info(tf, class_to_idx) - self.imgs = self.samples - self.tarfile = None # lazy init in __getitem__ - self.load_bytes = load_bytes - self.transform = transform - - def __getitem__(self, index): - if self.tarfile is None: - self.tarfile = tarfile.open(self.root) - tarinfo, target = self.samples[index] - iob = self.tarfile.extractfile(tarinfo) - img = iob.read() if self.load_bytes else Image.open(iob).convert('RGB') - if self.transform is not None: - img = self.transform(img) - if target is None: - target = torch.zeros(1).long() - return img, target - - def __len__(self): - return len(self.samples) - - def filename(self, index, basename=False): - filename = self.samples[index][0].name - if basename: - filename = os.path.basename(filename) - return filename - - def filenames(self, basename=False): - fn = os.path.basename if basename else lambda x: x - return [fn(x[0].name) for x in self.samples] + return self.parser.filenames(basename, absolute) class AugMixDataset(torch.utils.data.Dataset): diff --git a/timm/data/parsers/__init__.py b/timm/data/parsers/__init__.py new file mode 100644 index 00000000..c502eec8 --- /dev/null +++ b/timm/data/parsers/__init__.py @@ -0,0 +1,4 @@ +from .parser import Parser +from .parser_image_folder import ParserImageFolder +from .parser_image_tar import ParserImageTar +from .parser_in21k_tar import ParserIn21kTar \ No newline at end of file diff --git a/timm/data/parsers/class_map.py b/timm/data/parsers/class_map.py new file mode 100644 index 00000000..f5fa7e2a --- /dev/null +++ b/timm/data/parsers/class_map.py @@ -0,0 +1,15 @@ + + +def load_class_map(filename, root=''): + class_map_path = filename + if not os.path.exists(class_map_path): + class_map_path = os.path.join(root, filename) + assert os.path.exists(class_map_path), 'Cannot locate specified class map file (%s)' % filename + class_map_ext = os.path.splitext(filename)[-1].lower() + if class_map_ext == '.txt': + with open(class_map_path) as f: + class_to_idx = {v.strip(): k for k, v in enumerate(f)} + else: + assert False, 'Unsupported class map extension' + return class_to_idx + diff --git a/timm/data/parsers/constants.py b/timm/data/parsers/constants.py new file mode 100644 index 00000000..6e3be34b --- /dev/null +++ b/timm/data/parsers/constants.py @@ -0,0 +1,3 @@ +IMG_EXTENSIONS = ('.png', '.jpg', '.jpeg') + + diff --git a/timm/data/parsers/parser.py b/timm/data/parsers/parser.py new file mode 100644 index 00000000..76ab6d18 --- /dev/null +++ b/timm/data/parsers/parser.py @@ -0,0 +1,17 @@ +from abc import abstractmethod + + +class Parser: + def __init__(self): + pass + + @abstractmethod + def _filename(self, index, basename=False, absolute=False): + pass + + def filename(self, index, basename=False, absolute=False): + return self._filename(index, basename=basename, absolute=absolute) + + def filenames(self, basename=False, absolute=False): + return [self._filename(index, basename=basename, absolute=absolute) for index in range(len(self))] + diff --git a/timm/data/parsers/parser_image_folder.py b/timm/data/parsers/parser_image_folder.py new file mode 100644 index 00000000..8a61007f --- /dev/null +++ b/timm/data/parsers/parser_image_folder.py @@ -0,0 +1,69 @@ +import os +import io +import torch + +from PIL import Image +from timm.utils.misc import natural_key + +from .parser import Parser +from .class_map import load_class_map +from .constants import IMG_EXTENSIONS + + +def find_images_and_targets(folder, types=IMG_EXTENSIONS, class_to_idx=None, leaf_name_only=True, sort=True): + labels = [] + filenames = [] + for root, subdirs, files in os.walk(folder, topdown=False): + rel_path = os.path.relpath(root, folder) if (root != folder) else '' + label = os.path.basename(rel_path) if leaf_name_only else rel_path.replace(os.path.sep, '_') + for f in files: + base, ext = os.path.splitext(f) + if ext.lower() in types: + filenames.append(os.path.join(root, f)) + labels.append(label) + if class_to_idx is None: + # building class index + unique_labels = set(labels) + sorted_labels = list(sorted(unique_labels, key=natural_key)) + class_to_idx = {c: idx for idx, c in enumerate(sorted_labels)} + images_and_targets = [(f, class_to_idx[l]) for f, l in zip(filenames, labels) if l in class_to_idx] + if sort: + images_and_targets = sorted(images_and_targets, key=lambda k: natural_key(k[0])) + return images_and_targets, class_to_idx + + +class ParserImageFolder(Parser): + + def __init__( + self, + root, + load_bytes=False, + class_map=''): + super().__init__() + + self.root = root + self.load_bytes = load_bytes + + class_to_idx = None + if class_map: + class_to_idx = load_class_map(class_map, root) + self.samples, self.class_to_idx = find_images_and_targets(root, class_to_idx=class_to_idx) + if len(self.samples) == 0: + raise RuntimeError(f'Found 0 images in subfolders of {root}. ' + f'Supported image extensions are {", ".join(IMG_EXTENSIONS)}') + + def __getitem__(self, index): + path, target = self.samples[index] + img = open(path, 'rb').read() if self.load_bytes else Image.open(path).convert('RGB') + return img, target + + def __len__(self): + return len(self.samples) + + def _filename(self, index, basename=False, absolute=False): + filename = self.samples[index][0] + if basename: + filename = os.path.basename(filename) + elif not absolute: + filename = os.path.relpath(filename, self.root) + return filename diff --git a/timm/data/parsers/parser_image_tar.py b/timm/data/parsers/parser_image_tar.py new file mode 100644 index 00000000..504e71e8 --- /dev/null +++ b/timm/data/parsers/parser_image_tar.py @@ -0,0 +1,66 @@ +import os +import io +import torch +import tarfile + +from .parser import Parser +from .class_map import load_class_map +from .constants import IMG_EXTENSIONS +from PIL import Image +from timm.utils.misc import natural_key + + +def extract_tar_info(tarfile, class_to_idx=None, sort=True): + files = [] + labels = [] + for ti in tarfile.getmembers(): + if not ti.isfile(): + continue + dirname, basename = os.path.split(ti.path) + label = os.path.basename(dirname) + ext = os.path.splitext(basename)[1] + if ext.lower() in IMG_EXTENSIONS: + files.append(ti) + labels.append(label) + if class_to_idx is None: + unique_labels = set(labels) + sorted_labels = list(sorted(unique_labels, key=natural_key)) + class_to_idx = {c: idx for idx, c in enumerate(sorted_labels)} + tarinfo_and_targets = [(f, class_to_idx[l]) for f, l in zip(files, labels) if l in class_to_idx] + if sort: + tarinfo_and_targets = sorted(tarinfo_and_targets, key=lambda k: natural_key(k[0].path)) + return tarinfo_and_targets, class_to_idx + + +class ParserImageTar(Parser): + + def __init__(self, root, load_bytes=False, class_map=''): + super().__init__() + + class_to_idx = None + if class_map: + class_to_idx = load_class_map(class_map, root) + assert os.path.isfile(root) + self.root = root + with tarfile.open(root) as tf: # cannot keep this open across processes, reopen later + self.samples, self.class_to_idx = extract_tar_info(tf, class_to_idx) + self.imgs = self.samples + self.tarfile = None # lazy init in __getitem__ + self.load_bytes = load_bytes + + def __getitem__(self, index): + if self.tarfile is None: + self.tarfile = tarfile.open(self.root) + tarinfo, target = self.samples[index] + iob = self.tarfile.extractfile(tarinfo) + img = iob.read() if self.load_bytes else Image.open(iob).convert('RGB') + return img, target + + def __len__(self): + return len(self.samples) + + def _filename(self, index, basename=False, absolute=False): + filename = self.samples[index][0].name + if basename: + filename = os.path.basename(filename) + return filename diff --git a/timm/data/parsers/parser_in21k_tar.py b/timm/data/parsers/parser_in21k_tar.py new file mode 100644 index 00000000..da7e9d26 --- /dev/null +++ b/timm/data/parsers/parser_in21k_tar.py @@ -0,0 +1,104 @@ +import os +import io +import re +import torch +import tarfile +import pickle +from glob import glob +import numpy as np + +import torch.utils.data as data + +from timm.utils.misc import natural_key + +from .constants import IMG_EXTENSIONS + + +def load_class_map(filename, root=''): + class_map_path = filename + if not os.path.exists(class_map_path): + class_map_path = os.path.join(root, filename) + assert os.path.exists(class_map_path), 'Cannot locate specified class map file (%s)' % filename + class_map_ext = os.path.splitext(filename)[-1].lower() + if class_map_ext == '.txt': + with open(class_map_path) as f: + class_to_idx = {v.strip(): k for k, v in enumerate(f)} + else: + assert False, 'Unsupported class map extension' + return class_to_idx + + +class ParserIn21kTar(data.Dataset): + + CACHE_FILENAME = 'class_info.pickle' + + def __init__(self, root, class_map=''): + + class_to_idx = None + if class_map: + class_to_idx = load_class_map(class_map, root) + assert os.path.isdir(root) + self.root = root + tar_filenames = glob(os.path.join(self.root, '*.tar'), recursive=True) + assert len(tar_filenames) + num_tars = len(tar_filenames) + + if os.path.exists(self.CACHE_FILENAME): + with open(self.CACHE_FILENAME, 'rb') as pf: + class_info = pickle.load(pf) + else: + class_info = {} + for fi, fn in enumerate(tar_filenames): + if fi % 1000 == 0: + print(f'DEBUG: tar {fi}/{num_tars}') + # cannot keep this open across processes, reopen later + name = os.path.splitext(os.path.basename(fn))[0] + img_tarinfos = [] + with tarfile.open(fn) as tf: + img_tarinfos.extend(tf.getmembers()) + class_info[name] = dict(img_tarinfos=img_tarinfos) + print(f'DEBUG: {len(img_tarinfos)} images for synset {name}') + class_info = {k: v for k, v in sorted(class_info.items())} + + with open('class_info.pickle', 'wb') as pf: + pickle.dump(class_info, pf, protocol=pickle.HIGHEST_PROTOCOL) + + if class_to_idx is not None: + out_dict = {} + for k, v in class_info.items(): + if k in class_to_idx: + class_idx = class_to_idx[k] + v['class_idx'] = class_idx + out_dict[k] = v + class_info = {k: v for k, v in sorted(out_dict.items(), key=lambda x: x[1]['class_idx'])} + else: + for i, (k, v) in enumerate(class_info.items()): + v['class_idx'] = i + + self.img_infos = [] + self.targets = [] + self.tarnames = [] + for k, v in class_info.items(): + num_samples = len(v['img_tarinfos']) + self.img_infos.extend(v['img_tarinfos']) + self.targets.extend([v['class_idx']] * num_samples) + self.tarnames.extend([k] * num_samples) + self.targets = np.array(self.targets) # separate, uniform np array are more memory efficient + self.tarnames = np.array(self.tarnames) + + self.tarfiles = {} # to open lazily + del class_info + + def __len__(self): + return len(self.img_infos) + + def __getitem__(self, idx): + img_tarinfo = self.img_infos[idx] + name = self.tarnames[idx] + tf = self.tarfiles.setdefault(name, tarfile.open(os.path.join(self.root, name + '.tar'))) + img_bytes = tf.extractfile(img_tarinfo) + if self.targets: + target = self.targets[idx] + else: + target = None + return img_bytes, target diff --git a/train.py b/train.py index 7a93a1b6..ca406655 100755 --- a/train.py +++ b/train.py @@ -28,7 +28,7 @@ import torch.nn as nn import torchvision.utils from torch.nn.parallel import DistributedDataParallel as NativeDDP -from timm.data import Dataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset +from timm.data import ImageDataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset from timm.models import create_model, resume_checkpoint, load_checkpoint, convert_splitbn_model from timm.utils import * from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy, JsdCrossEntropy @@ -275,7 +275,7 @@ def _parse_args(): def main(): - setup_default_logging() + setup_default_logging(log_path='./train.log') args, args_text = _parse_args() args.prefetcher = not args.no_prefetcher @@ -330,6 +330,7 @@ def main(): scriptable=args.torchscript, checkpoint_path=args.initial_checkpoint) + print(model) if args.local_rank == 0: _logger.info('Model %s created, param count: %d' % (args.model, sum([m.numel() for m in model.parameters()]))) @@ -439,7 +440,7 @@ def main(): if not os.path.exists(train_dir): _logger.error('Training folder does not exist at: {}'.format(train_dir)) exit(1) - dataset_train = Dataset(train_dir) + dataset_train = ImageDataset(train_dir) eval_dir = os.path.join(args.data, 'val') if not os.path.isdir(eval_dir): @@ -447,7 +448,7 @@ def main(): if not os.path.isdir(eval_dir): _logger.error('Validation folder does not exist at: {}'.format(eval_dir)) exit(1) - dataset_eval = Dataset(eval_dir) + dataset_eval = ImageDataset(eval_dir) # setup mixup / cutmix collate_fn = None diff --git a/validate.py b/validate.py index 5a0d388c..645dfd1d 100755 --- a/validate.py +++ b/validate.py @@ -20,7 +20,7 @@ from collections import OrderedDict from contextlib import suppress from timm.models import create_model, apply_test_time_pool, load_checkpoint, is_model, list_models -from timm.data import Dataset, DatasetTar, create_loader, resolve_data_config, RealLabelsImagenet +from timm.data import ImageDataset, create_loader, resolve_data_config, RealLabelsImagenet from timm.utils import accuracy, AverageMeter, natural_key, setup_default_logging, set_jit_legacy has_apex = False @@ -157,10 +157,7 @@ def validate(args): criterion = nn.CrossEntropyLoss().cuda() - if os.path.splitext(args.data)[1] == '.tar' and os.path.isfile(args.data): - dataset = DatasetTar(args.data, load_bytes=args.tf_preprocessing, class_map=args.class_map) - else: - dataset = Dataset(args.data, load_bytes=args.tf_preprocessing, class_map=args.class_map) + dataset = ImageDataset(args.data, load_bytes=args.tf_preprocessing, class_map=args.class_map) if args.valid_labels: with open(args.valid_labels, 'r') as f: From 231d04e91adca07f94ac32df8b5bdb8492281b0c Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Thu, 24 Dec 2020 12:03:31 -0800 Subject: [PATCH 02/20] ResNetV2 pre-act and non-preact model, w/ BiT pretrained weights and support for ViT R50 model. Tweaks for in21k num_classes passing. More to do... tests failing. --- timm/models/__init__.py | 1 + timm/models/factory.py | 6 +- timm/models/helpers.py | 75 +++- timm/models/layers/__init__.py | 6 +- timm/models/layers/classifier.py | 28 +- timm/models/layers/norm_act.py | 4 +- timm/models/nasnet.py | 2 +- timm/models/resnetv2.py | 578 ++++++++++++++++++++++++++++++ timm/models/vision_transformer.py | 161 +++++++-- train.py | 4 +- 10 files changed, 815 insertions(+), 50 deletions(-) create mode 100644 timm/models/resnetv2.py diff --git a/timm/models/__init__.py b/timm/models/__init__.py index 53765fc8..fba6d1b8 100644 --- a/timm/models/__init__.py +++ b/timm/models/__init__.py @@ -16,6 +16,7 @@ from .regnet import * from .res2net import * from .resnest import * from .resnet import * +from .resnetv2 import * from .rexnet import * from .selecsls import * from .senet import * diff --git a/timm/models/factory.py b/timm/models/factory.py index 70209c96..a7b6c90e 100644 --- a/timm/models/factory.py +++ b/timm/models/factory.py @@ -6,8 +6,6 @@ from .layers import set_layer_config def create_model( model_name, pretrained=False, - num_classes=1000, - in_chans=3, checkpoint_path='', scriptable=None, exportable=None, @@ -18,8 +16,6 @@ def create_model( Args: model_name (str): name of model to instantiate pretrained (bool): load pretrained ImageNet-1k weights if true - num_classes (int): number of classes for final fully connected layer (default: 1000) - in_chans (int): number of input channels / colors (default: 3) checkpoint_path (str): path of checkpoint to load after model is initialized scriptable (bool): set layer config so that model is jit scriptable (not working for all models yet) exportable (bool): set layer config so that model is traceable / ONNX exportable (not fully impl/obeyed yet) @@ -30,7 +26,7 @@ def create_model( global_pool (str): global pool type (default: 'avg') **: other kwargs are model specific """ - model_args = dict(pretrained=pretrained, num_classes=num_classes, in_chans=in_chans) + model_args = dict(pretrained=pretrained) # Only EfficientNet and MobileNetV3 models have support for batchnorm params or drop_connect_rate passed as args is_efficientnet = is_model_in_modules(model_name, ['efficientnet', 'mobilenetv3']) diff --git a/timm/models/helpers.py b/timm/models/helpers.py index 77b98dc6..2a15e528 100644 --- a/timm/models/helpers.py +++ b/timm/models/helpers.py @@ -11,7 +11,7 @@ from typing import Callable import torch import torch.nn as nn -import torch.utils.model_zoo as model_zoo +from torch.hub import get_dir, load_state_dict_from_url, download_url_to_file, urlparse, HASH_REGEX from .features import FeatureListNet, FeatureDictNet, FeatureHookNet from .layers import Conv2dSame, Linear @@ -88,15 +88,70 @@ def resume_checkpoint(model, checkpoint_path, optimizer=None, loss_scaler=None, raise FileNotFoundError() -def load_pretrained(model, cfg=None, num_classes=1000, in_chans=3, filter_fn=None, strict=True): +def load_custom_pretrained(model, cfg=None, load_fn=None, progress=False, check_hash=False): + r"""Loads a custom (read non .pth) weight file + + Downloads checkpoint file into cache-dir like torch.hub based loaders, but calls + a passed in custom load fun, or the `load_pretrained` model member fn. + + If the object is already present in `model_dir`, it's deserialized and returned. + The default value of `model_dir` is ``/checkpoints`` where + `hub_dir` is the directory returned by :func:`~torch.hub.get_dir`. + + Args: + model: The instantiated model to load weights into + cfg (dict): Default pretrained model cfg + load_fn: An external stand alone fn that loads weights into provided model, otherwise a fn named + 'laod_pretrained' on the model will be called if it exists + progress (bool, optional): whether or not to display a progress bar to stderr. Default: False + check_hash(bool, optional): If True, the filename part of the URL should follow the naming convention + ``filename-.ext`` where ```` is the first eight or more + digits of the SHA256 hash of the contents of the file. The hash is used to + ensure unique names and to verify the contents of the file. Default: False + """ if cfg is None: cfg = getattr(model, 'default_cfg') if cfg is None or 'url' not in cfg or not cfg['url']: - _logger.warning("Pretrained model URL is invalid, using random initialization.") + _logger.warning("Pretrained model URL does not exist, using random initialization.") return + url = cfg['url'] + + # Issue warning to move data if old env is set + if os.getenv('TORCH_MODEL_ZOO'): + _logger.warning('TORCH_MODEL_ZOO is deprecated, please use env TORCH_HOME instead') + + hub_dir = get_dir() + model_dir = os.path.join(hub_dir, 'checkpoints') + + os.makedirs(model_dir, exist_ok=True) + + parts = urlparse(url) + filename = os.path.basename(parts.path) + cached_file = os.path.join(model_dir, filename) + if not os.path.exists(cached_file): + _logger.info('Downloading: "{}" to {}\n'.format(url, cached_file)) + hash_prefix = None + if check_hash: + r = HASH_REGEX.search(filename) # r is Optional[Match[str]] + hash_prefix = r.group(1) if r else None + download_url_to_file(url, cached_file, hash_prefix, progress=progress) + + if load_fn is not None: + load_fn(model, cached_file) + elif hasattr(model, 'load_pretrained'): + model.load_pretrained(cached_file) + else: + _logger.warning("Valid function to load pretrained weights is not available, using random initialization.") + - state_dict = model_zoo.load_url(cfg['url'], progress=False, map_location='cpu') +def load_pretrained(model, cfg=None, num_classes=1000, in_chans=3, filter_fn=None, strict=True, progress=False): + if cfg is None: + cfg = getattr(model, 'default_cfg') + if cfg is None or 'url' not in cfg or not cfg['url']: + _logger.warning("Pretrained model URL does not exist, using random initialization.") + return + state_dict = load_state_dict_from_url(cfg['url'], progress=progress, map_location='cpu') if filter_fn is not None: state_dict = filter_fn(state_dict) @@ -269,6 +324,7 @@ def build_model_with_cfg( feature_cfg: dict = None, pretrained_strict: bool = True, pretrained_filter_fn: Callable = None, + pretrained_custom_load: bool = False, **kwargs): pruned = kwargs.pop('pruned', False) features = False @@ -289,10 +345,13 @@ def build_model_with_cfg( # for classification models, check class attr, then kwargs, then default to 1k, otherwise 0 for feats num_classes_pretrained = 0 if features else getattr(model, 'num_classes', kwargs.get('num_classes', 1000)) if pretrained: - load_pretrained( - model, - num_classes=num_classes_pretrained, in_chans=kwargs.get('in_chans', 3), - filter_fn=pretrained_filter_fn, strict=pretrained_strict) + if pretrained_custom_load: + load_custom_pretrained(model) + else: + load_pretrained( + model, + num_classes=num_classes_pretrained, in_chans=kwargs.get('in_chans', 3), + filter_fn=pretrained_filter_fn, strict=pretrained_strict) if features: feature_cls = FeatureListNet diff --git a/timm/models/layers/__init__.py b/timm/models/layers/__init__.py index dac1beb8..142377a9 100644 --- a/timm/models/layers/__init__.py +++ b/timm/models/layers/__init__.py @@ -7,7 +7,7 @@ from .classifier import ClassifierHead, create_classifier from .cond_conv2d import CondConv2d, get_condconv_initializer from .config import is_exportable, is_scriptable, is_no_jit, set_exportable, set_scriptable, set_no_jit,\ set_layer_config -from .conv2d_same import Conv2dSame +from .conv2d_same import Conv2dSame, conv2d_same from .conv_bn_act import ConvBnAct from .create_act import create_act_layer, get_act_layer, get_act_fn from .create_attn import create_attn @@ -20,8 +20,8 @@ from .helpers import to_ntuple, to_2tuple, to_3tuple, to_4tuple from .inplace_abn import InplaceAbn from .linear import Linear from .mixed_conv2d import MixedConv2d -from .norm_act import BatchNormAct2d -from .padding import get_padding +from .norm_act import BatchNormAct2d, GroupNormAct +from .padding import get_padding, get_same_padding, pad_same from .pool2d_same import AvgPool2dSame, create_pool2d from .se import SEModule from .selective_kernel import SelectiveKernelConv diff --git a/timm/models/layers/classifier.py b/timm/models/layers/classifier.py index 89fe5458..516cc6c9 100644 --- a/timm/models/layers/classifier.py +++ b/timm/models/layers/classifier.py @@ -9,31 +9,43 @@ from .adaptive_avgmax_pool import SelectAdaptivePool2d from .linear import Linear -def create_classifier(num_features, num_classes, pool_type='avg', use_conv=False): - flatten = not use_conv # flatten when we use a Linear layer after pooling +def _create_pool(num_features, num_classes, pool_type='avg', use_conv=False): + flatten_in_pool = not use_conv # flatten when we use a Linear layer after pooling if not pool_type: assert num_classes == 0 or use_conv,\ 'Pooling can only be disabled if classifier is also removed or conv classifier is used' - flatten = False # disable flattening if pooling is pass-through (no pooling) - global_pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=flatten) + flatten_in_pool = False # disable flattening if pooling is pass-through (no pooling) + global_pool = SelectAdaptivePool2d(pool_type=pool_type, flatten=flatten_in_pool) num_pooled_features = num_features * global_pool.feat_mult() + return global_pool, num_pooled_features + + +def _create_fc(num_features, num_classes, pool_type='avg', use_conv=False): if num_classes <= 0: fc = nn.Identity() # pass-through (no classifier) elif use_conv: - fc = nn.Conv2d(num_pooled_features, num_classes, 1, bias=True) + fc = nn.Conv2d(num_features, num_classes, 1, bias=True) else: # NOTE: using my Linear wrapper that fixes AMP + torchscript casting issue - fc = Linear(num_pooled_features, num_classes, bias=True) + fc = Linear(num_features, num_classes, bias=True) + return fc + + +def create_classifier(num_features, num_classes, pool_type='avg', use_conv=False): + global_pool, num_pooled_features = _create_pool(num_features, num_classes, pool_type, use_conv=use_conv) + fc = _create_fc(num_pooled_features, num_classes, use_conv=use_conv) return global_pool, fc class ClassifierHead(nn.Module): """Classifier head w/ configurable global pooling and dropout.""" - def __init__(self, in_chs, num_classes, pool_type='avg', drop_rate=0.): + def __init__(self, in_chs, num_classes, pool_type='avg', drop_rate=0., use_conv=False): super(ClassifierHead, self).__init__() self.drop_rate = drop_rate - self.global_pool, self.fc = create_classifier(in_chs, num_classes, pool_type=pool_type) + self.global_pool, num_pooled_features = _create_pool(in_chs, num_classes, pool_type, use_conv=use_conv) + self.fc = _create_fc(num_pooled_features, num_classes, use_conv=use_conv) + self.flatten_after_fc = use_conv and pool_type def forward(self, x): x = self.global_pool(x) diff --git a/timm/models/layers/norm_act.py b/timm/models/layers/norm_act.py index bddf9b26..e3fe3940 100644 --- a/timm/models/layers/norm_act.py +++ b/timm/models/layers/norm_act.py @@ -68,8 +68,8 @@ class BatchNormAct2d(nn.BatchNorm2d): class GroupNormAct(nn.GroupNorm): - - def __init__(self, num_groups, num_channels, eps=1e-5, affine=True, + # NOTE num_channel and num_groups order flipped for easier layer swaps / binding of fixed args + def __init__(self, num_channels, num_groups, eps=1e-5, affine=True, apply_act=True, act_layer=nn.ReLU, inplace=True, drop_block=None): super(GroupNormAct, self).__init__(num_groups, num_channels, eps=eps, affine=affine) if isinstance(act_layer, str): diff --git a/timm/models/nasnet.py b/timm/models/nasnet.py index 18b3725f..60e1a276 100644 --- a/timm/models/nasnet.py +++ b/timm/models/nasnet.py @@ -403,7 +403,7 @@ class ReductionCell1(nn.Module): class NASNetALarge(nn.Module): """NASNetALarge (6 @ 4032) """ - def __init__(self, num_classes=1000, in_chans=1, stem_size=96, channel_multiplier=2, + def __init__(self, num_classes=1000, in_chans=3, stem_size=96, channel_multiplier=2, num_features=4032, output_stride=32, drop_rate=0., global_pool='avg', pad_type='same'): super(NASNetALarge, self).__init__() self.num_classes = num_classes diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py new file mode 100644 index 00000000..6611ae49 --- /dev/null +++ b/timm/models/resnetv2.py @@ -0,0 +1,578 @@ +"""Pre-Activation ResNet v2 with GroupNorm and Weight Standardization. + +A PyTorch implementation of ResNetV2 adapted from the Google Big-Transfoer (BiT) source code +at https://github.com/google-research/big_transfer to match timm interfaces. The BiT weights have +been included here as pretrained models from their original .NPZ checkpoints. + +Additionally, supports non pre-activation bottleneck for use as a backbone for Vision Transfomers (ViT) and +extra padding support to allow porting of official Hybrid ResNet pretrained weights from +https://github.com/google-research/vision_transformer + +Thanks to the Google team for the above two repositories and associated papers. + +Original copyright of Google code below, modifications by Ross Wightman, Copyright 2020. +""" +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict # pylint: disable=g-importing-member + +import torch +import torch.nn as nn +import torch.nn.functional as F +from functools import partial + +from timm.data import IMAGENET_INCEPTION_MEAN, IMAGENET_INCEPTION_STD +from .helpers import build_model_with_cfg +from .registry import register_model +from .layers import get_padding, GroupNormAct, ClassifierHead, DropPath, AvgPool2dSame, create_pool2d, conv2d_same + + +def _cfg(url='', **kwargs): + return { + 'url': url, + 'num_classes': 1000, 'input_size': (3, 480, 480), 'pool_size': (7, 7), + 'crop_pct': 1.0, 'interpolation': 'bilinear', + 'mean': IMAGENET_INCEPTION_MEAN, 'std': IMAGENET_INCEPTION_STD, + 'first_conv': 'stem.conv', 'classifier': 'head.fc', + **kwargs + } + + +default_cfgs = { + # pretrained on imagenet21k, finetuned on imagenet1k + 'resnetv2_50x1_bitm': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R50x1-ILSVRC2012.npz'), + 'resnetv2_50x3_bitm': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R50x3-ILSVRC2012.npz'), + 'resnetv2_101x1_bitm': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R101x1-ILSVRC2012.npz'), + 'resnetv2_101x3_bitm': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R101x3-ILSVRC2012.npz'), + 'resnetv2_152x2_bitm': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R152x2-ILSVRC2012.npz'), + 'resnetv2_152x4_bitm': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R152x4-ILSVRC2012.npz'), + + # trained on imagenet-21k + 'resnetv2_50x1_bitm_in21k': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R50x1.npz', + num_classes=21843), + 'resnetv2_50x3_bitm_in21k': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R50x3.npz', + num_classes=21843), + 'resnetv2_101x1_bitm_in21k': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R101x1.npz', + num_classes=21843), + 'resnetv2_101x3_bitm_in21k': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R101x3.npz', + num_classes=21843), + 'resnetv2_152x2_bitm_in21k': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R152x2.npz', + num_classes=21843), + 'resnetv2_152x4_bitm_in21k': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-M-R152x4.npz', + num_classes=21843), + + + # trained on imagenet-1k + 'resnetv2_50x1_bits': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-S-R50x1-ILSVRC2012.npz'), + 'resnetv2_50x3_bits': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-S-R50x3-ILSVRC2012.npz'), + 'resnetv2_101x1_bits': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-S-R101x3-ILSVRC2012.npz'), + 'resnetv2_101x3_bits': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-S-R101x3-ILSVRC2012.npz'), + 'resnetv2_152x2_bits': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-S-R152x2-ILSVRC2012.npz'), + 'resnetv2_152x4_bits': _cfg( + url='https://storage.googleapis.com/bit_models/BiT-S-R152x4-ILSVRC2012.npz'), +} + + +def make_div(v, divisor=8): + min_value = divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + if new_v < 0.9 * v: + new_v += divisor + return new_v + + +class StdConv2d(nn.Conv2d): + + def __init__( + self, in_channel, out_channels, kernel_size, stride=1, dilation=1, bias=False, groups=1, eps=1e-5): + padding = get_padding(kernel_size, stride, dilation) + super().__init__( + in_channel, out_channels, kernel_size, stride=stride, + padding=padding, dilation=dilation, bias=bias, groups=groups) + self.eps = eps + + def forward(self, x): + w = self.weight + v, m = torch.var_mean(w, dim=[1, 2, 3], keepdim=True, unbiased=False) + w = (w - m) / (torch.sqrt(v) + self.eps) + x = F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups) + return x + + +class StdConv2dSame(nn.Conv2d): + """StdConv2d w/ TF compatible SAME padding. Used for ViT Hybrid model. + """ + def __init__( + self, in_channel, out_channels, kernel_size, stride=1, dilation=1, bias=False, groups=1, eps=1e-5): + padding = get_padding(kernel_size, stride, dilation) + super().__init__( + in_channel, out_channels, kernel_size, stride=stride, + padding=padding, dilation=dilation, bias=bias, groups=groups) + self.eps = eps + + def forward(self, x): + w = self.weight + v, m = torch.var_mean(w, dim=[1, 2, 3], keepdim=True, unbiased=False) + w = (w - m) / (torch.sqrt(v) + self.eps) + x = conv2d_same(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups) + return x + + +def tf2th(conv_weights): + """Possibly convert HWIO to OIHW.""" + if conv_weights.ndim == 4: + conv_weights = conv_weights.transpose([3, 2, 0, 1]) + return torch.from_numpy(conv_weights) + + +class PreActBottleneck(nn.Module): + """Pre-activation (v2) bottleneck block. + + Follows the implementation of "Identity Mappings in Deep Residual Networks": + https://github.com/KaimingHe/resnet-1k-layers/blob/master/resnet-pre-act.lua + + Except it puts the stride on 3x3 conv when available. + """ + + def __init__( + self, in_chs, out_chs=None, bottle_ratio=0.25, stride=1, dilation=1, first_dilation=None, groups=1, + act_layer=None, conv_layer=None, norm_layer=None, proj_layer=None, drop_path_rate=0.): + super().__init__() + first_dilation = first_dilation or dilation + conv_layer = conv_layer or StdConv2d + norm_layer = norm_layer or partial(GroupNormAct, num_groups=32) + out_chs = out_chs or in_chs + mid_chs = make_div(out_chs * bottle_ratio) + + if proj_layer is not None: + self.downsample = proj_layer( + in_chs, out_chs, stride=stride, dilation=dilation, first_dilation=first_dilation, preact=True, + conv_layer=conv_layer, norm_layer=norm_layer) + else: + self.downsample = None + + self.norm1 = norm_layer(in_chs) + self.conv1 = conv_layer(in_chs, mid_chs, 1) + self.norm2 = norm_layer(mid_chs) + self.conv2 = conv_layer(mid_chs, mid_chs, 3, stride=stride, dilation=first_dilation, groups=groups) + self.norm3 = norm_layer(mid_chs) + self.conv3 = conv_layer(mid_chs, out_chs, 1) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity() + + def forward(self, x): + x_preact = self.norm1(x) + + # shortcut branch + shortcut = x + if self.downsample is not None: + shortcut = self.downsample(x_preact) + + # residual branch + x = self.conv1(x_preact) + x = self.conv2(self.norm2(x)) + x = self.conv3(self.norm3(x)) + x = self.drop_path(x) + return x + shortcut + + +class Bottleneck(nn.Module): + """Non Pre-activation bottleneck block, equiv to V1.5/V1b Bottleneck. Used for ViT. + """ + def __init__( + self, in_chs, out_chs=None, bottle_ratio=0.25, stride=1, dilation=1, first_dilation=None, groups=1, + act_layer=None, conv_layer=None, norm_layer=None, proj_layer=None, drop_path_rate=0.): + super().__init__() + first_dilation = first_dilation or dilation + act_layer = act_layer or nn.ReLU + conv_layer = conv_layer or StdConv2d + norm_layer = norm_layer or partial(GroupNormAct, num_groups=32) + out_chs = out_chs or in_chs + mid_chs = make_div(out_chs * bottle_ratio) + + if proj_layer is not None: + self.downsample = proj_layer( + in_chs, out_chs, stride=stride, dilation=dilation, preact=False, + conv_layer=conv_layer, norm_layer=norm_layer) + else: + self.downsample = None + + self.conv1 = conv_layer(in_chs, mid_chs, 1) + self.norm1 = norm_layer(mid_chs) + self.conv2 = conv_layer(mid_chs, mid_chs, 3, stride=stride, dilation=first_dilation, groups=groups) + self.norm2 = norm_layer(mid_chs) + self.conv3 = conv_layer(mid_chs, out_chs, 1) + self.norm3 = norm_layer(out_chs, apply_act=False) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity() + self.act3 = act_layer(inplace=True) + + def forward(self, x): + # shortcut branch + shortcut = x + if self.downsample is not None: + shortcut = self.downsample(x) + + # residual + x = self.conv1(x) + x = self.norm1(x) + x = self.conv2(x) + x = self.norm2(x) + x = self.conv3(x) + x = self.norm3(x) + x = self.act3(x + shortcut) + return x + + +class DownsampleConv(nn.Module): + def __init__( + self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, preact=True, + conv_layer=None, norm_layer=None): + super(DownsampleConv, self).__init__() + self.conv = conv_layer(in_chs, out_chs, 1, stride=stride) + self.norm = nn.Identity() if preact else norm_layer(out_chs, apply_act=False) + + def forward(self, x): + return self.norm(self.conv(x)) + + +class DownsampleAvg(nn.Module): + def __init__( + self, in_chs, out_chs, stride=1, dilation=1, first_dilation=None, + preact=True, conv_layer=None, norm_layer=None): + """ AvgPool Downsampling as in 'D' ResNet variants. This is not in RegNet space but I might experiment.""" + super(DownsampleAvg, self).__init__() + avg_stride = stride if dilation == 1 else 1 + if stride > 1 or dilation > 1: + avg_pool_fn = AvgPool2dSame if avg_stride == 1 and dilation > 1 else nn.AvgPool2d + self.pool = avg_pool_fn(2, avg_stride, ceil_mode=True, count_include_pad=False) + else: + self.pool = nn.Identity() + self.conv = conv_layer(in_chs, out_chs, 1, stride=1) + self.norm = nn.Identity() if preact else norm_layer(out_chs, apply_act=False) + + def forward(self, x): + return self.norm(self.conv(self.pool(x))) + + +class ResNetStage(nn.Module): + """ResNet Stage.""" + def __init__(self, in_chs, out_chs, stride, dilation, depth, bottle_ratio=0.25, groups=1, + avg_down=False, block_dpr=None, block_fn=PreActBottleneck, + act_layer=None, conv_layer=None, norm_layer=None, **block_kwargs): + super(ResNetStage, self).__init__() + first_dilation = 1 if dilation in (1, 2) else 2 + layer_kwargs = dict(act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer) + proj_layer = DownsampleAvg if avg_down else DownsampleConv + prev_chs = in_chs + self.blocks = nn.Sequential() + for block_idx in range(depth): + drop_path_rate = block_dpr[block_idx] if block_dpr else 0. + stride = stride if block_idx == 0 else 1 + self.blocks.add_module(str(block_idx), block_fn( + prev_chs, out_chs, stride=stride, dilation=dilation, bottle_ratio=bottle_ratio, groups=groups, + first_dilation=first_dilation, proj_layer=proj_layer, drop_path_rate=drop_path_rate, + **layer_kwargs, **block_kwargs)) + prev_chs = out_chs + first_dilation = dilation + proj_layer = None + + def forward(self, x): + x = self.blocks(x) + return x + + +def create_stem(in_chs, out_chs, stem_type='', preact=True, conv_layer=None, norm_layer=None): + stem = OrderedDict() + assert stem_type in ('', 'fixed', 'same', 'deep', 'deep_fixed', 'deep_same') + + # NOTE conv padding mode can be changed by overriding the conv_layer def + if 'deep' in stem_type: + # A 3 deep 3x3 conv stack as in ResNet V1D models + mid_chs = out_chs // 2 + stem['conv1'] = conv_layer(in_chs, mid_chs, kernel_size=3, stride=2) + stem['conv2'] = conv_layer(mid_chs, mid_chs, kernel_size=3, stride=1) + stem['conv3'] = conv_layer(mid_chs, out_chs, kernel_size=3, stride=1) + else: + # The usual 7x7 stem conv + stem['conv'] = conv_layer(in_chs, out_chs, kernel_size=7, stride=2) + + if not preact: + stem['norm'] = norm_layer(out_chs) + + if 'fixed' in stem_type: + # 'fixed' SAME padding approximation that is used in BiT models + stem['pad'] = nn.ConstantPad2d(1, 0) + stem['pool'] = nn.MaxPool2d(kernel_size=3, stride=2, padding=0) + elif 'same' in stem_type: + # full, input size based 'SAME' padding, used in ViT Hybrid model + stem['pool'] = create_pool2d('max', kernel_size=3, stride=2, padding='same') + else: + # the usual PyTorch symmetric padding + stem['pool'] = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + return nn.Sequential(stem) + + +class ResNetV2(nn.Module): + """Implementation of Pre-activation (v2) ResNet mode. + """ + + def __init__(self, layers, channels=(256, 512, 1024, 2048), + num_classes=1000, in_chans=3, global_pool='avg', output_stride=32, + width_factor=1, stem_chs=64, stem_type='', avg_down=False, preact=True, + act_layer=nn.ReLU, conv_layer=StdConv2d, norm_layer=partial(GroupNormAct, num_groups=32), + drop_rate=0., drop_path_rate=0.): + super().__init__() + self.num_classes = num_classes + self.drop_rate = drop_rate + wf = width_factor + + self.feature_info = [] + stem_chs = make_div(stem_chs * wf) + self.stem = create_stem(in_chans, stem_chs, stem_type, preact, conv_layer=conv_layer, norm_layer=norm_layer) + if not preact: + self.feature_info.append(dict(num_chs=stem_chs, reduction=4, module='stem')) + + prev_chs = stem_chs + curr_stride = 4 + dilation = 1 + block_fn = PreActBottleneck if preact else Bottleneck + self.stages = nn.Sequential() + for stage_idx, (d, c) in enumerate(zip(layers, channels)): + out_chs = make_div(c * wf) + stride = 1 if stage_idx == 0 else 2 + if curr_stride >= output_stride: + dilation *= stride + stride = 1 + if preact: + self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{stage_idx}.norm1')] + stage = ResNetStage( + prev_chs, out_chs, stride=stride, dilation=dilation, depth=d, avg_down=avg_down, + act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer, block_fn=block_fn) + prev_chs = out_chs + curr_stride *= stride + if not preact: + self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{stage_idx}')] + self.stages.add_module(str(stage_idx), stage) + + self.num_features = prev_chs + self.norm = norm_layer(self.num_features) if preact else nn.Identity() + if preact: + self.feature_info += [dict(num_chs=self.num_features, reduction=curr_stride, module=f'norm')] + self.head = ClassifierHead( + self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate, use_conv=True) + + for n, m in self.named_modules(): + if isinstance(m, nn.Linear) or ('.fc' in n and isinstance(m, nn.Conv2d)): + nn.init.normal_(m.weight, mean=0.0, std=0.01) + nn.init.zeros_(m.bias) + elif isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + + def get_classifier(self): + return self.head.fc + + def reset_classifier(self, num_classes, global_pool='avg'): + self.head = ClassifierHead( + self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate, use_conv=True) + + def forward_features(self, x): + x = self.stem(x) + x = self.stages(x) + x = self.norm(x) + return x + + def forward(self, x): + x = self.forward_features(x) + x = self.head(x) + if not self.head.global_pool.is_identity(): + x = x.flatten(1) # conv classifier, flatten if pooling isn't pass-through (disabled) + return x + + def load_pretrained(self, checkpoint_path, prefix='resnet/'): + import numpy as np + weights = np.load(checkpoint_path) + with torch.no_grad(): + self.stem.conv.weight.copy_(tf2th(weights[f'{prefix}root_block/standardized_conv2d/kernel'])) + self.norm.weight.copy_(tf2th(weights[f'{prefix}group_norm/gamma'])) + self.norm.bias.copy_(tf2th(weights[f'{prefix}group_norm/beta'])) + self.head.fc.weight.copy_(tf2th(weights[f'{prefix}head/conv2d/kernel'])) + self.head.fc.bias.copy_(tf2th(weights[f'{prefix}head/conv2d/bias'])) + for i, (sname, stage) in enumerate(self.stages.named_children()): + for j, (bname, block) in enumerate(stage.blocks.named_children()): + convname = 'standardized_conv2d' + block_prefix = f'{prefix}block{i + 1}/unit{j + 1:02d}/' + block.conv1.weight.copy_(tf2th(weights[f'{block_prefix}a/{convname}/kernel'])) + block.conv2.weight.copy_(tf2th(weights[f'{block_prefix}b/{convname}/kernel'])) + block.conv3.weight.copy_(tf2th(weights[f'{block_prefix}c/{convname}/kernel'])) + block.norm1.weight.copy_(tf2th(weights[f'{block_prefix}a/group_norm/gamma'])) + block.norm2.weight.copy_(tf2th(weights[f'{block_prefix}b/group_norm/gamma'])) + block.norm3.weight.copy_(tf2th(weights[f'{block_prefix}c/group_norm/gamma'])) + block.norm1.bias.copy_(tf2th(weights[f'{block_prefix}a/group_norm/beta'])) + block.norm2.bias.copy_(tf2th(weights[f'{block_prefix}b/group_norm/beta'])) + block.norm3.bias.copy_(tf2th(weights[f'{block_prefix}c/group_norm/beta'])) + if block.downsample is not None: + w = weights[f'{block_prefix}a/proj/{convname}/kernel'] + block.downsample.conv.weight.copy_(tf2th(w)) + + +def _create_resnetv2(variant, pretrained=False, **kwargs): + # FIXME feature map extraction is not setup properly for pre-activation mode right now + return build_model_with_cfg( + ResNetV2, variant, pretrained, default_cfg=default_cfgs[variant], pretrained_custom_load=True, + feature_cfg=dict(flatten_sequential=True), **kwargs) + + +@register_model +def resnetv2_50x1_bitm(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_50x1_bitm', pretrained=pretrained, + layers=[3, 4, 6, 3], width_factor=1, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_50x3_bitm(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_50x3_bitm', pretrained=pretrained, + layers=[3, 4, 6, 3], width_factor=3, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_101x1_bitm(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_101x1_bitm', pretrained=pretrained, + layers=[3, 4, 23, 3], width_factor=1, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_101x3_bitm(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_101x3_bitm', pretrained=pretrained, + layers=[3, 4, 23, 3], width_factor=3, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_152x2_bitm(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_152x2_bitm', pretrained=pretrained, + layers=[3, 8, 36, 3], width_factor=2, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_152x4_bitm(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_152x4_bitm', pretrained=pretrained, + layers=[3, 8, 36, 3], width_factor=4, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_50x1_bitm_in21k(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_50x1_bitm', pretrained=pretrained, + layers=[3, 4, 6, 3], width_factor=1, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_50x3_bitm_in21k(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_50x3_bitm', pretrained=pretrained, + layers=[3, 4, 6, 3], width_factor=3, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_101x1_bitm_in21k(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_101x1_bitm', pretrained=pretrained, + layers=[3, 4, 23, 3], width_factor=1, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_101x3_bitm_in21k(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_101x3_bitm', pretrained=pretrained, + layers=[3, 4, 23, 3], width_factor=3, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_152x2_bitm_in21k(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_152x2_bitm', pretrained=pretrained, + layers=[3, 8, 36, 3], width_factor=2, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_152x4_bitm_in21k(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_152x4_bitm', pretrained=pretrained, + layers=[3, 8, 36, 3], width_factor=4, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_50x1_bits(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_50x1_bits', pretrained=pretrained, + layers=[3, 4, 6, 3], width_factor=1, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_50x3_bits(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_50x3_bits', pretrained=pretrained, + layers=[3, 4, 6, 3], width_factor=3, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_101x1_bits(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_101x1_bits', pretrained=pretrained, + layers=[3, 4, 23, 3], width_factor=1, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_101x3_bits(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_101x3_bits', pretrained=pretrained, + layers=[3, 4, 23, 3], width_factor=3, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_152x2_bits(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_152x2_bits', pretrained=pretrained, + layers=[3, 8, 36, 3], width_factor=2, stem_type='fixed', **kwargs) + + +@register_model +def resnetv2_152x4_bits(pretrained=False, **kwargs): + return _create_resnetv2( + 'resnetv2_152x4_bits', pretrained=pretrained, + layers=[3, 8, 36, 3], width_factor=4, stem_type='fixed', **kwargs) + diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 72f3a61a..9b96e04e 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -23,11 +23,13 @@ Hacked together by / Copyright 2020 Ross Wightman import torch import torch.nn as nn from functools import partial +from collections import OrderedDict from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import load_pretrained from .layers import DropPath, to_2tuple, trunc_normal_ from .resnet import resnet26d, resnet50d +from .resnetv2 import ResNetV2, StdConv2dSame from .registry import register_model @@ -43,14 +45,19 @@ def _cfg(url='', **kwargs): default_cfgs = { - # patch models + # patch models (my experiments) 'vit_small_patch16_224': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/vit_small_p16_224-15ec54c9.pth', ), + + # patch models (weights ported from official JAX impl) 'vit_base_patch16_224': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), ), + 'vit_base_patch32_224': _cfg( + url='', # no official model weights for this combo, only for in21k + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), 'vit_base_patch16_384': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_384-83fb41ba.pth', input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), @@ -60,15 +67,38 @@ default_cfgs = { 'vit_large_patch16_224': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p16_224-4ee7a4dc.pth', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), + 'vit_large_patch32_224': _cfg( + url='', # no official model weights for this combo, only for in21k + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), 'vit_large_patch16_384': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p16_384-b3be5167.pth', input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), 'vit_large_patch32_384': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p32_384-9b920ba8.pth', input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), - 'vit_huge_patch16_224': _cfg(), - 'vit_huge_patch32_384': _cfg(input_size=(3, 384, 384)), - # hybrid models + + # patch models, imagenet21k (weights ported from official JAX impl) + 'vit_base_patch16_224_in21k': _cfg( + url='', + num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), + 'vit_base_patch32_224_in21k': _cfg( + url='', + num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), + 'vit_large_patch16_224_in21k': _cfg( + url='', + num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), + 'vit_large_patch32_224_in21k': _cfg( + url='', + num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), + 'vit_huge_patch14_224_in21k': _cfg( + url='', + num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), + + # hybrid models (weights ported from official JAX impl) + 'vit_base_resnet50_384': _cfg( + input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), + + # hybrid models (my experiments) 'vit_small_resnet26d_224': _cfg(), 'vit_small_resnet50d_s3_224': _cfg(), 'vit_base_resnet26d_224': _cfg(), @@ -184,20 +214,26 @@ class HybridEmbed(nn.Module): training = backbone.training if training: backbone.eval() - o = self.backbone(torch.zeros(1, in_chans, img_size[0], img_size[1]))[-1] + o = self.backbone(torch.zeros(1, in_chans, img_size[0], img_size[1])) + if isinstance(o, (list, tuple)): + o = o[-1] # last feature if backbone outputs list/tuple of features feature_size = o.shape[-2:] feature_dim = o.shape[1] backbone.train(training) else: feature_size = to_2tuple(feature_size) - feature_dim = self.backbone.feature_info.channels()[-1] + if hasattr(self.backbone, 'feature_info'): + feature_dim = self.backbone.feature_info.channels()[-1] + else: + feature_dim = self.backbone.num_features self.num_patches = feature_size[0] * feature_size[1] - self.proj = nn.Linear(feature_dim, embed_dim) + self.proj = nn.Conv2d(feature_dim, embed_dim, 1) def forward(self, x): - x = self.backbone(x)[-1] - x = x.flatten(2).transpose(1, 2) - x = self.proj(x) + x = self.backbone(x) + if isinstance(x, (list, tuple)): + x = x[-1] # last feature if backbone outputs list/tuple of features + x = self.proj(x).flatten(2).transpose(1, 2) return x @@ -205,8 +241,8 @@ class VisionTransformer(nn.Module): """ Vision Transformer with support for patch or hybrid CNN input stage """ def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, - num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop_rate=0., attn_drop_rate=0., - drop_path_rate=0., hybrid_backbone=None, norm_layer=nn.LayerNorm): + num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, representation_size=None, + drop_rate=0., attn_drop_rate=0., drop_path_rate=0., hybrid_backbone=None, norm_layer=nn.LayerNorm): super().__init__() self.num_classes = num_classes self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models @@ -231,9 +267,14 @@ class VisionTransformer(nn.Module): for i in range(depth)]) self.norm = norm_layer(embed_dim) - # NOTE as per official impl, we could have a pre-logits representation dense layer + tanh here - #self.repr = nn.Linear(embed_dim, representation_size) - #self.repr_act = nn.Tanh() + # Representation layer + if representation_size: + self.pre_logits = nn.Sequential(OrderedDict([ + ('fc', nn.Linear(embed_dim, representation_size)), + ('act', nn.Tanh()) + ])) + else: + self.pre_logits = nn.Identity() # Classifier head self.head = nn.Linear(embed_dim, num_classes) if num_classes > 0 else nn.Identity() @@ -279,6 +320,7 @@ class VisionTransformer(nn.Module): def forward(self, x): x = self.forward_features(x) + x = self.pre_logits(x) x = self.head(x) return x @@ -318,6 +360,17 @@ def vit_base_patch16_224(pretrained=False, **kwargs): return model +@register_model +def vit_base_patch32_224(pretrained=False, **kwargs): + model = VisionTransformer( + img_size=224, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_base_patch32_224'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + return model + + @register_model def vit_base_patch16_384(pretrained=False, **kwargs): model = VisionTransformer( @@ -351,6 +404,17 @@ def vit_large_patch16_224(pretrained=False, **kwargs): return model +@register_model +def vit_large_patch32_224(pretrained=False, **kwargs): + model = VisionTransformer( + img_size=224, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_large_patch32_224'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + return model + + @register_model def vit_large_patch16_384(pretrained=False, **kwargs): model = VisionTransformer( @@ -374,17 +438,72 @@ def vit_large_patch32_384(pretrained=False, **kwargs): @register_model -def vit_huge_patch16_224(pretrained=False, **kwargs): - model = VisionTransformer(patch_size=16, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, **kwargs) - model.default_cfg = default_cfgs['vit_huge_patch16_224'] +def vit_base_patch16_224_in21k(pretrained=False, **kwargs): + model = VisionTransformer( + patch_size=16, num_classes=21843, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_base_patch16_224_in21k'] + if pretrained: + load_pretrained( + model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter) + return model + + +@register_model +def vit_base_patch32_224_in21k(pretrained=False, **kwargs): + model = VisionTransformer( + img_size=224, num_classes=21843, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, + qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_base_patch32_224_in21k'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + return model + + +@register_model +def vit_large_patch16_224_in21k(pretrained=False, **kwargs): + model = VisionTransformer( + patch_size=16, num_classes=21843, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_large_patch16_224_in21k'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) return model @register_model -def vit_huge_patch32_384(pretrained=False, **kwargs): +def vit_large_patch32_224_in21k(pretrained=False, **kwargs): model = VisionTransformer( - img_size=384, patch_size=32, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, **kwargs) - model.default_cfg = default_cfgs['vit_huge_patch32_384'] + img_size=224, num_classes=21843, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_large_patch32_224_in21k'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + return model + + +@register_model +def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): + model = VisionTransformer( + img_size=224, patch_size=14, num_classes=21843, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, + qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_huge_patch14_224_in21k'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + return model + + +@register_model +def vit_base_resnet50_384(pretrained=False, **kwargs): + # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head + backbone = ResNetV2( + layers=(3, 4, 9), preact=False, stem_type='same', conv_layer=StdConv2dSame, num_classes=0, global_pool='') + model = VisionTransformer( + img_size=384, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, + qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_base_resnet50_384'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) return model diff --git a/train.py b/train.py index ca406655..98e4ddd4 100755 --- a/train.py +++ b/train.py @@ -76,8 +76,8 @@ parser.add_argument('--resume', default='', type=str, metavar='PATH', help='Resume full model and optimizer state from checkpoint (default: none)') parser.add_argument('--no-resume-opt', action='store_true', default=False, help='prevent resume of optimizer state when resuming model') -parser.add_argument('--num-classes', type=int, default=1000, metavar='N', - help='number of label classes (default: 1000)') +parser.add_argument('--num-classes', type=int, default=None, metavar='N', + help='number of label classes (Model default if None)') parser.add_argument('--gp', default=None, type=str, metavar='POOL', help='Global pool type, one of (fast, avg, max, avgmax, avgmaxc). Model default if None.') parser.add_argument('--img-size', type=int, default=None, metavar='N', From ce69de70d3354571bc9e39726e92a0a2b28e2a1f Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 28 Dec 2020 14:37:06 -0800 Subject: [PATCH 03/20] Add 21k weight urls to vision_transformer. Cleanup feature_info for preact ResNetV2 (BiT) models --- timm/models/resnetv2.py | 146 ++++++++++++++++-------------- timm/models/vision_transformer.py | 76 +++++++++------- validate.py | 2 +- 3 files changed, 123 insertions(+), 101 deletions(-) diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py index 6611ae49..3ce0605a 100644 --- a/timm/models/resnetv2.py +++ b/timm/models/resnetv2.py @@ -8,7 +8,9 @@ Additionally, supports non pre-activation bottleneck for use as a backbone for V extra padding support to allow porting of official Hybrid ResNet pretrained weights from https://github.com/google-research/vision_transformer -Thanks to the Google team for the above two repositories and associated papers. +Thanks to the Google team for the above two repositories and associated papers: +* Big Transfer (BiT): General Visual Representation Learning - https://arxiv.org/abs/1912.11370 +* An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale - https://arxiv.org/abs/2010.11929 Original copyright of Google code below, modifications by Ross Wightman, Copyright 2020. """ @@ -86,19 +88,19 @@ default_cfgs = { num_classes=21843), - # trained on imagenet-1k - 'resnetv2_50x1_bits': _cfg( - url='https://storage.googleapis.com/bit_models/BiT-S-R50x1-ILSVRC2012.npz'), - 'resnetv2_50x3_bits': _cfg( - url='https://storage.googleapis.com/bit_models/BiT-S-R50x3-ILSVRC2012.npz'), - 'resnetv2_101x1_bits': _cfg( - url='https://storage.googleapis.com/bit_models/BiT-S-R101x3-ILSVRC2012.npz'), - 'resnetv2_101x3_bits': _cfg( - url='https://storage.googleapis.com/bit_models/BiT-S-R101x3-ILSVRC2012.npz'), - 'resnetv2_152x2_bits': _cfg( - url='https://storage.googleapis.com/bit_models/BiT-S-R152x2-ILSVRC2012.npz'), - 'resnetv2_152x4_bits': _cfg( - url='https://storage.googleapis.com/bit_models/BiT-S-R152x4-ILSVRC2012.npz'), + # trained on imagenet-1k, NOTE not overly interesting set of weights, leaving disabled for now + # 'resnetv2_50x1_bits': _cfg( + # url='https://storage.googleapis.com/bit_models/BiT-S-R50x1.npz'), + # 'resnetv2_50x3_bits': _cfg( + # url='https://storage.googleapis.com/bit_models/BiT-S-R50x3.npz'), + # 'resnetv2_101x1_bits': _cfg( + # url='https://storage.googleapis.com/bit_models/BiT-S-R101x3.npz'), + # 'resnetv2_101x3_bits': _cfg( + # url='https://storage.googleapis.com/bit_models/BiT-S-R101x3.npz'), + # 'resnetv2_152x2_bits': _cfg( + # url='https://storage.googleapis.com/bit_models/BiT-S-R152x2.npz'), + # 'resnetv2_152x4_bits': _cfg( + # url='https://storage.googleapis.com/bit_models/BiT-S-R152x4.npz'), } @@ -358,8 +360,8 @@ class ResNetV2(nn.Module): self.feature_info = [] stem_chs = make_div(stem_chs * wf) self.stem = create_stem(in_chans, stem_chs, stem_type, preact, conv_layer=conv_layer, norm_layer=norm_layer) - if not preact: - self.feature_info.append(dict(num_chs=stem_chs, reduction=4, module='stem')) + # NOTE no, reduction 2 feature if preact + self.feature_info.append(dict(num_chs=stem_chs, reduction=2, module='' if preact else 'stem.norm')) prev_chs = stem_chs curr_stride = 4 @@ -372,21 +374,19 @@ class ResNetV2(nn.Module): if curr_stride >= output_stride: dilation *= stride stride = 1 - if preact: - self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{stage_idx}.norm1')] stage = ResNetStage( prev_chs, out_chs, stride=stride, dilation=dilation, depth=d, avg_down=avg_down, act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer, block_fn=block_fn) prev_chs = out_chs curr_stride *= stride - if not preact: - self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=f'stages.{stage_idx}')] + feat_name = f'stages.{stage_idx}' + if preact: + feat_name = f'stages.{stage_idx + 1}.blocks.0.norm1' if (stage_idx + 1) != len(channels) else 'norm' + self.feature_info += [dict(num_chs=prev_chs, reduction=curr_stride, module=feat_name)] self.stages.add_module(str(stage_idx), stage) self.num_features = prev_chs self.norm = norm_layer(self.num_features) if preact else nn.Identity() - if preact: - self.feature_info += [dict(num_chs=self.num_features, reduction=curr_stride, module=f'norm')] self.head = ClassifierHead( self.num_features, num_classes, pool_type=global_pool, drop_rate=self.drop_rate, use_conv=True) @@ -446,9 +446,15 @@ class ResNetV2(nn.Module): def _create_resnetv2(variant, pretrained=False, **kwargs): # FIXME feature map extraction is not setup properly for pre-activation mode right now + preact = kwargs.get('preact', True) + feature_cfg = dict(flatten_sequential=True) + if preact: + feature_cfg['feature_cls'] = 'hook' + feature_cfg['out_indices'] = (1, 2, 3, 4) # no stride 2, 0 level feat for preact + return build_model_with_cfg( ResNetV2, variant, pretrained, default_cfg=default_cfgs[variant], pretrained_custom_load=True, - feature_cfg=dict(flatten_sequential=True), **kwargs) + feature_cfg=feature_cfg, **kwargs) @register_model @@ -496,83 +502,85 @@ def resnetv2_152x4_bitm(pretrained=False, **kwargs): @register_model def resnetv2_50x1_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_50x1_bitm', pretrained=pretrained, + 'resnetv2_50x1_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), layers=[3, 4, 6, 3], width_factor=1, stem_type='fixed', **kwargs) @register_model def resnetv2_50x3_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_50x3_bitm', pretrained=pretrained, + 'resnetv2_50x3_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), layers=[3, 4, 6, 3], width_factor=3, stem_type='fixed', **kwargs) @register_model def resnetv2_101x1_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_101x1_bitm', pretrained=pretrained, + 'resnetv2_101x1_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), layers=[3, 4, 23, 3], width_factor=1, stem_type='fixed', **kwargs) @register_model def resnetv2_101x3_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_101x3_bitm', pretrained=pretrained, + 'resnetv2_101x3_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), layers=[3, 4, 23, 3], width_factor=3, stem_type='fixed', **kwargs) @register_model def resnetv2_152x2_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_152x2_bitm', pretrained=pretrained, + 'resnetv2_152x2_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), layers=[3, 8, 36, 3], width_factor=2, stem_type='fixed', **kwargs) @register_model def resnetv2_152x4_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_152x4_bitm', pretrained=pretrained, + 'resnetv2_152x4_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), layers=[3, 8, 36, 3], width_factor=4, stem_type='fixed', **kwargs) -@register_model -def resnetv2_50x1_bits(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50x1_bits', pretrained=pretrained, - layers=[3, 4, 6, 3], width_factor=1, stem_type='fixed', **kwargs) - - -@register_model -def resnetv2_50x3_bits(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_50x3_bits', pretrained=pretrained, - layers=[3, 4, 6, 3], width_factor=3, stem_type='fixed', **kwargs) - - -@register_model -def resnetv2_101x1_bits(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_101x1_bits', pretrained=pretrained, - layers=[3, 4, 23, 3], width_factor=1, stem_type='fixed', **kwargs) - - -@register_model -def resnetv2_101x3_bits(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_101x3_bits', pretrained=pretrained, - layers=[3, 4, 23, 3], width_factor=3, stem_type='fixed', **kwargs) - - -@register_model -def resnetv2_152x2_bits(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_152x2_bits', pretrained=pretrained, - layers=[3, 8, 36, 3], width_factor=2, stem_type='fixed', **kwargs) - - -@register_model -def resnetv2_152x4_bits(pretrained=False, **kwargs): - return _create_resnetv2( - 'resnetv2_152x4_bits', pretrained=pretrained, - layers=[3, 8, 36, 3], width_factor=4, stem_type='fixed', **kwargs) +# NOTE the 'S' versions of the model weights arent as interesting as original 21k or transfer to 1K M. +# @register_model +# def resnetv2_50x1_bits(pretrained=False, **kwargs): +# return _create_resnetv2( +# 'resnetv2_50x1_bits', pretrained=pretrained, +# layers=[3, 4, 6, 3], width_factor=1, stem_type='fixed', **kwargs) +# +# +# @register_model +# def resnetv2_50x3_bits(pretrained=False, **kwargs): +# return _create_resnetv2( +# 'resnetv2_50x3_bits', pretrained=pretrained, +# layers=[3, 4, 6, 3], width_factor=3, stem_type='fixed', **kwargs) +# +# +# @register_model +# def resnetv2_101x1_bits(pretrained=False, **kwargs): +# return _create_resnetv2( +# 'resnetv2_101x1_bits', pretrained=pretrained, +# layers=[3, 4, 23, 3], width_factor=1, stem_type='fixed', **kwargs) +# +# +# @register_model +# def resnetv2_101x3_bits(pretrained=False, **kwargs): +# return _create_resnetv2( +# 'resnetv2_101x3_bits', pretrained=pretrained, +# layers=[3, 4, 23, 3], width_factor=3, stem_type='fixed', **kwargs) +# +# +# @register_model +# def resnetv2_152x2_bits(pretrained=False, **kwargs): +# return _create_resnetv2( +# 'resnetv2_152x2_bits', pretrained=pretrained, +# layers=[3, 8, 36, 3], width_factor=2, stem_type='fixed', **kwargs) +# +# +# @register_model +# def resnetv2_152x4_bits(pretrained=False, **kwargs): +# return _create_resnetv2( +# 'resnetv2_152x4_bits', pretrained=pretrained, +# layers=[3, 8, 36, 3], width_factor=4, stem_type='fixed', **kwargs) +# diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 9b96e04e..ff5bd676 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -79,23 +79,27 @@ default_cfgs = { # patch models, imagenet21k (weights ported from official JAX impl) 'vit_base_patch16_224_in21k': _cfg( - url='', + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_patch16_224_in21k-e5005f0a.pth', num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), 'vit_base_patch32_224_in21k': _cfg( - url='', + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_patch32_224_in21k-8db57226.pth', num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), 'vit_large_patch16_224_in21k': _cfg( - url='', + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch16_224_in21k-606da67d.pth', num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), 'vit_large_patch32_224_in21k': _cfg( - url='', + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_patch32_224_in21k-9046d2e7.pth', num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), 'vit_huge_patch14_224_in21k': _cfg( - url='', + url='', # FIXME I have weights for this but > 2GB limit for github release binaries num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), # hybrid models (weights ported from official JAX impl) + 'vit_base_resnet50_224_in21k': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_224_in21k-6f7c7740.pth', + mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9), 'vit_base_resnet50_384': _cfg( + url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_384-9fd3c705.pth', input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), # hybrid models (my experiments) @@ -269,6 +273,7 @@ class VisionTransformer(nn.Module): # Representation layer if representation_size: + self.num_features = representation_size self.pre_logits = nn.Sequential(OrderedDict([ ('fc', nn.Linear(embed_dim, representation_size)), ('act', nn.Tanh()) @@ -315,12 +320,12 @@ class VisionTransformer(nn.Module): for blk in self.blocks: x = blk(x) - x = self.norm(x) - return x[:, 0] + x = self.norm(x)[:, 0] + x = self.pre_logits(x) + return x def forward(self, x): x = self.forward_features(x) - x = self.pre_logits(x) x = self.head(x) return x @@ -407,7 +412,7 @@ def vit_large_patch16_224(pretrained=False, **kwargs): @register_model def vit_large_patch32_224(pretrained=False, **kwargs): model = VisionTransformer( - img_size=224, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, + img_size=224, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) model.default_cfg = default_cfgs['vit_large_patch32_224'] if pretrained: @@ -418,7 +423,7 @@ def vit_large_patch32_224(pretrained=False, **kwargs): @register_model def vit_large_patch16_384(pretrained=False, **kwargs): model = VisionTransformer( - img_size=384, patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, + img_size=384, patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) model.default_cfg = default_cfgs['vit_large_patch16_384'] if pretrained: @@ -426,22 +431,12 @@ def vit_large_patch16_384(pretrained=False, **kwargs): return model -@register_model -def vit_large_patch32_384(pretrained=False, **kwargs): - model = VisionTransformer( - img_size=384, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_large_patch32_384'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) - return model - - @register_model def vit_base_patch16_224_in21k(pretrained=False, **kwargs): + num_classes = kwargs.get('num_classes', 21843) model = VisionTransformer( - patch_size=16, num_classes=21843, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + patch_size=16, num_classes=num_classes, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) model.default_cfg = default_cfgs['vit_base_patch16_224_in21k'] if pretrained: load_pretrained( @@ -451,9 +446,10 @@ def vit_base_patch16_224_in21k(pretrained=False, **kwargs): @register_model def vit_base_patch32_224_in21k(pretrained=False, **kwargs): + num_classes = kwargs.get('num_classes', 21843) model = VisionTransformer( - img_size=224, num_classes=21843, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, - qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + img_size=224, num_classes=num_classes, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, + qkv_bias=True, representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) model.default_cfg = default_cfgs['vit_base_patch32_224_in21k'] if pretrained: load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) @@ -462,9 +458,10 @@ def vit_base_patch32_224_in21k(pretrained=False, **kwargs): @register_model def vit_large_patch16_224_in21k(pretrained=False, **kwargs): + num_classes = kwargs.get('num_classes', 21843) model = VisionTransformer( - patch_size=16, num_classes=21843, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + patch_size=16, num_classes=num_classes, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, + representation_size=1024, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) model.default_cfg = default_cfgs['vit_large_patch16_224_in21k'] if pretrained: load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) @@ -473,9 +470,10 @@ def vit_large_patch16_224_in21k(pretrained=False, **kwargs): @register_model def vit_large_patch32_224_in21k(pretrained=False, **kwargs): + num_classes = kwargs.get('num_classes', 21843) model = VisionTransformer( - img_size=224, num_classes=21843, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + img_size=224, num_classes=num_classes, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, + qkv_bias=True, representation_size=1024, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) model.default_cfg = default_cfgs['vit_large_patch32_224_in21k'] if pretrained: load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) @@ -484,15 +482,31 @@ def vit_large_patch32_224_in21k(pretrained=False, **kwargs): @register_model def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): + num_classes = kwargs.get('num_classes', 21843) model = VisionTransformer( - img_size=224, patch_size=14, num_classes=21843, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, - qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + img_size=224, patch_size=14, num_classes=num_classes, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, + qkv_bias=True, representation_size=1280, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) model.default_cfg = default_cfgs['vit_huge_patch14_224_in21k'] if pretrained: load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) return model +@register_model +def vit_base_resnet50_224_in21k(pretrained=False, **kwargs): + # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head + num_classes = kwargs.get('num_classes', 21843) + backbone = ResNetV2( + layers=(3, 4, 9), preact=False, stem_type='same', conv_layer=StdConv2dSame, num_classes=0, global_pool='') + model = VisionTransformer( + img_size=224, num_classes=num_classes, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + hybrid_backbone=backbone, representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['vit_base_resnet50_224_in21k'] + if pretrained: + load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + return model + + @register_model def vit_base_resnet50_384(pretrained=False, **kwargs): # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head diff --git a/validate.py b/validate.py index 645dfd1d..4eedd6fb 100755 --- a/validate.py +++ b/validate.py @@ -60,7 +60,7 @@ parser.add_argument('--std', type=float, nargs='+', default=None, metavar='STD' help='Override std deviation of of dataset') parser.add_argument('--interpolation', default='', type=str, metavar='NAME', help='Image resize interpolation type (overrides model)') -parser.add_argument('--num-classes', type=int, default=1000, +parser.add_argument('--num-classes', type=int, default=None, help='Number classes in dataset') parser.add_argument('--class-map', default='', type=str, metavar='FILENAME', help='path to class to idx mapping file (default: "")') From e35e9760a64ee687960b8ae14c6b77c223d598aa Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 28 Dec 2020 14:39:05 -0800 Subject: [PATCH 04/20] More work on dataset / parser split and imagenet21k (tar) support --- timm/data/dataset.py | 30 ++++- timm/data/parsers/__init__.py | 2 +- timm/data/parsers/class_map.py | 1 + timm/data/parsers/constants.py | 2 - .../data/parsers/parser_image_class_in_tar.py | 107 ++++++++++++++++++ timm/data/parsers/parser_image_folder.py | 11 +- timm/data/parsers/parser_image_tar.py | 19 ++-- timm/data/parsers/parser_in21k_tar.py | 104 ----------------- 8 files changed, 144 insertions(+), 132 deletions(-) create mode 100644 timm/data/parsers/parser_image_class_in_tar.py delete mode 100644 timm/data/parsers/parser_in21k_tar.py diff --git a/timm/data/dataset.py b/timm/data/dataset.py index 8013c846..42a46eef 100644 --- a/timm/data/dataset.py +++ b/timm/data/dataset.py @@ -5,32 +5,50 @@ Hacked together by / Copyright 2020 Ross Wightman import torch.utils.data as data import os import torch +import logging -from .parsers import ParserImageFolder, ParserImageTar +from PIL import Image + +from .parsers import ParserImageFolder, ParserImageTar, ParserImageClassInTar + +_logger = logging.getLogger(__name__) + + +_ERROR_RETRY = 50 class ImageDataset(data.Dataset): def __init__( self, - img_root, + root, parser=None, class_map='', load_bytes=False, transform=None, ): - self.img_root = img_root if parser is None: - if os.path.isfile(img_root) and os.path.splitext(img_root)[1] == '.tar': - parser = ParserImageTar(img_root, load_bytes=load_bytes, class_map=class_map) + if os.path.isfile(root) and os.path.splitext(root)[1] == '.tar': + parser = ParserImageTar(root, class_map=class_map) else: - parser = ParserImageFolder(img_root, load_bytes=load_bytes, class_map=class_map) + parser = ParserImageFolder(root, class_map=class_map) self.parser = parser self.load_bytes = load_bytes self.transform = transform + self._consecutive_errors = 0 def __getitem__(self, index): img, target = self.parser[index] + try: + img = img.read() if self.load_bytes else Image.open(img).convert('RGB') + except Exception as e: + _logger.warning(f'Skipped sample (index {index}, file {self.parser.filename(index)}). {str(e)}') + self._consecutive_errors += 1 + if self._consecutive_errors < _ERROR_RETRY: + return self.__getitem__((index + 1) % len(self.parser)) + else: + raise e + self._consecutive_errors = 0 if self.transform is not None: img = self.transform(img) if target is None: diff --git a/timm/data/parsers/__init__.py b/timm/data/parsers/__init__.py index c502eec8..4ecb3a22 100644 --- a/timm/data/parsers/__init__.py +++ b/timm/data/parsers/__init__.py @@ -1,4 +1,4 @@ from .parser import Parser from .parser_image_folder import ParserImageFolder from .parser_image_tar import ParserImageTar -from .parser_in21k_tar import ParserIn21kTar \ No newline at end of file +from .parser_image_class_in_tar import ParserImageClassInTar \ No newline at end of file diff --git a/timm/data/parsers/class_map.py b/timm/data/parsers/class_map.py index f5fa7e2a..9ef4d1fa 100644 --- a/timm/data/parsers/class_map.py +++ b/timm/data/parsers/class_map.py @@ -1,3 +1,4 @@ +import os def load_class_map(filename, root=''): diff --git a/timm/data/parsers/constants.py b/timm/data/parsers/constants.py index 6e3be34b..e7ba484e 100644 --- a/timm/data/parsers/constants.py +++ b/timm/data/parsers/constants.py @@ -1,3 +1 @@ IMG_EXTENSIONS = ('.png', '.jpg', '.jpeg') - - diff --git a/timm/data/parsers/parser_image_class_in_tar.py b/timm/data/parsers/parser_image_class_in_tar.py new file mode 100644 index 00000000..f43ff359 --- /dev/null +++ b/timm/data/parsers/parser_image_class_in_tar.py @@ -0,0 +1,107 @@ +import os +import tarfile +import pickle +from glob import glob +import numpy as np + +from timm.utils.misc import natural_key + +from .parser import Parser +from .class_map import load_class_map +from .constants import IMG_EXTENSIONS + + +def extract_tarinfos(root, class_name_to_idx=None, cache_filename=None, extensions=None): + tar_filenames = glob(os.path.join(root, '*.tar'), recursive=True) + assert len(tar_filenames) + num_tars = len(tar_filenames) + + cache_path = '' + if cache_filename is not None: + cache_path = os.path.join(root, cache_filename) + if os.path.exists(cache_path): + with open(cache_path, 'rb') as pf: + tarinfo_map = pickle.load(pf) + else: + tarinfo_map = {} + for fi, fn in enumerate(tar_filenames): + if fi % 1000 == 0: + print(f'DEBUG: tar {fi}/{num_tars}') + # cannot keep this open across processes, reopen later + name = os.path.splitext(os.path.basename(fn))[0] + with tarfile.open(fn) as tf: + if extensions is None: + # assume all files are valid samples + class_tarinfos = tf.getmembers() + else: + class_tarinfos = [m for m in tf.getmembers() if os.path.splitext(m.name)[1].lower() in extensions] + tarinfo_map[name] = dict(tarinfos=class_tarinfos) + print(f'DEBUG: {len(class_tarinfos)} images for class {name}') + tarinfo_map = {k: v for k, v in sorted(tarinfo_map.items(), key=lambda k: natural_key(k[0]))} + if cache_path: + with open(cache_path, 'wb') as pf: + pickle.dump(tarinfo_map, pf, protocol=pickle.HIGHEST_PROTOCOL) + + tarinfos = [] + targets = [] + build_class_map = False + if class_name_to_idx is None: + class_name_to_idx = {} + build_class_map = True + for i, (name, metadata) in enumerate(tarinfo_map.items()): + class_idx = i + if build_class_map: + class_name_to_idx[name] = i + else: + if name not in class_name_to_idx: + # only samples with class in class mapping are added + continue + class_idx = class_name_to_idx[name] + num_samples = len(metadata['tarinfos']) + tarinfos.extend(metadata['tarinfos']) + targets.extend([class_idx] * num_samples) + + return tarinfos, np.array(targets), class_name_to_idx + + +class ParserImageClassInTar(Parser): + """ Multi-tarfile dataset parser where there is one .tar file per class + """ + + CACHE_FILENAME = '_tarinfos.pickle' + + def __init__(self, root, class_map=''): + super().__init__() + + class_name_to_idx = None + if class_map: + class_name_to_idx = load_class_map(class_map, root) + assert os.path.isdir(root) + self.root = root + self.tarinfos, self.targets, self.class_name_to_idx = extract_tarinfos( + self.root, class_name_to_idx=class_name_to_idx, + cache_filename=self.CACHE_FILENAME, extensions=IMG_EXTENSIONS) + self.class_idx_to_name = {v: k for k, v in self.class_name_to_idx.items()} + self.tarfiles = {} # to open lazily + self.cache_tarfiles = False + + def __len__(self): + return len(self.tarinfos) + + def __getitem__(self, index): + tarinfo = self.tarinfos[index] + target = self.targets[index] + class_name = self.class_idx_to_name[target] + if self.cache_tarfiles: + tf = self.tarfiles.setdefault( + class_name, tarfile.open(os.path.join(self.root, class_name + '.tar'))) + else: + tf = tarfile.open(os.path.join(self.root, class_name + '.tar')) + fileobj = tf.extractfile(tarinfo) + return fileobj, target + + def _filename(self, index, basename=False, absolute=False): + filename = self.tarinfos[index].name + if basename: + filename = os.path.basename(filename) + return filename diff --git a/timm/data/parsers/parser_image_folder.py b/timm/data/parsers/parser_image_folder.py index 8a61007f..93b16e40 100644 --- a/timm/data/parsers/parser_image_folder.py +++ b/timm/data/parsers/parser_image_folder.py @@ -2,7 +2,6 @@ import os import io import torch -from PIL import Image from timm.utils.misc import natural_key from .parser import Parser @@ -37,25 +36,21 @@ class ParserImageFolder(Parser): def __init__( self, root, - load_bytes=False, class_map=''): super().__init__() self.root = root - self.load_bytes = load_bytes - class_to_idx = None if class_map: class_to_idx = load_class_map(class_map, root) self.samples, self.class_to_idx = find_images_and_targets(root, class_to_idx=class_to_idx) if len(self.samples) == 0: - raise RuntimeError(f'Found 0 images in subfolders of {root}. ' - f'Supported image extensions are {", ".join(IMG_EXTENSIONS)}') + raise RuntimeError( + f'Found 0 images in subfolders of {root}. Supported image extensions are {", ".join(IMG_EXTENSIONS)}') def __getitem__(self, index): path, target = self.samples[index] - img = open(path, 'rb').read() if self.load_bytes else Image.open(path).convert('RGB') - return img, target + return open(path, 'rb'), target def __len__(self): return len(self.samples) diff --git a/timm/data/parsers/parser_image_tar.py b/timm/data/parsers/parser_image_tar.py index 504e71e8..657b56f9 100644 --- a/timm/data/parsers/parser_image_tar.py +++ b/timm/data/parsers/parser_image_tar.py @@ -1,16 +1,13 @@ import os -import io -import torch import tarfile from .parser import Parser from .class_map import load_class_map from .constants import IMG_EXTENSIONS -from PIL import Image from timm.utils.misc import natural_key -def extract_tar_info(tarfile, class_to_idx=None, sort=True): +def extract_tarinfo(tarfile, class_to_idx=None, sort=True): files = [] labels = [] for ti in tarfile.getmembers(): @@ -33,8 +30,9 @@ def extract_tar_info(tarfile, class_to_idx=None, sort=True): class ParserImageTar(Parser): - - def __init__(self, root, load_bytes=False, class_map=''): + """ Single tarfile dataset where classes are mapped to folders within tar + """ + def __init__(self, root, class_map=''): super().__init__() class_to_idx = None @@ -42,19 +40,18 @@ class ParserImageTar(Parser): class_to_idx = load_class_map(class_map, root) assert os.path.isfile(root) self.root = root + with tarfile.open(root) as tf: # cannot keep this open across processes, reopen later - self.samples, self.class_to_idx = extract_tar_info(tf, class_to_idx) + self.samples, self.class_to_idx = extract_tarinfo(tf, class_to_idx) self.imgs = self.samples self.tarfile = None # lazy init in __getitem__ - self.load_bytes = load_bytes def __getitem__(self, index): if self.tarfile is None: self.tarfile = tarfile.open(self.root) tarinfo, target = self.samples[index] - iob = self.tarfile.extractfile(tarinfo) - img = iob.read() if self.load_bytes else Image.open(iob).convert('RGB') - return img, target + fileobj = self.tarfile.extractfile(tarinfo) + return fileobj, target def __len__(self): return len(self.samples) diff --git a/timm/data/parsers/parser_in21k_tar.py b/timm/data/parsers/parser_in21k_tar.py deleted file mode 100644 index da7e9d26..00000000 --- a/timm/data/parsers/parser_in21k_tar.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -import io -import re -import torch -import tarfile -import pickle -from glob import glob -import numpy as np - -import torch.utils.data as data - -from timm.utils.misc import natural_key - -from .constants import IMG_EXTENSIONS - - -def load_class_map(filename, root=''): - class_map_path = filename - if not os.path.exists(class_map_path): - class_map_path = os.path.join(root, filename) - assert os.path.exists(class_map_path), 'Cannot locate specified class map file (%s)' % filename - class_map_ext = os.path.splitext(filename)[-1].lower() - if class_map_ext == '.txt': - with open(class_map_path) as f: - class_to_idx = {v.strip(): k for k, v in enumerate(f)} - else: - assert False, 'Unsupported class map extension' - return class_to_idx - - -class ParserIn21kTar(data.Dataset): - - CACHE_FILENAME = 'class_info.pickle' - - def __init__(self, root, class_map=''): - - class_to_idx = None - if class_map: - class_to_idx = load_class_map(class_map, root) - assert os.path.isdir(root) - self.root = root - tar_filenames = glob(os.path.join(self.root, '*.tar'), recursive=True) - assert len(tar_filenames) - num_tars = len(tar_filenames) - - if os.path.exists(self.CACHE_FILENAME): - with open(self.CACHE_FILENAME, 'rb') as pf: - class_info = pickle.load(pf) - else: - class_info = {} - for fi, fn in enumerate(tar_filenames): - if fi % 1000 == 0: - print(f'DEBUG: tar {fi}/{num_tars}') - # cannot keep this open across processes, reopen later - name = os.path.splitext(os.path.basename(fn))[0] - img_tarinfos = [] - with tarfile.open(fn) as tf: - img_tarinfos.extend(tf.getmembers()) - class_info[name] = dict(img_tarinfos=img_tarinfos) - print(f'DEBUG: {len(img_tarinfos)} images for synset {name}') - class_info = {k: v for k, v in sorted(class_info.items())} - - with open('class_info.pickle', 'wb') as pf: - pickle.dump(class_info, pf, protocol=pickle.HIGHEST_PROTOCOL) - - if class_to_idx is not None: - out_dict = {} - for k, v in class_info.items(): - if k in class_to_idx: - class_idx = class_to_idx[k] - v['class_idx'] = class_idx - out_dict[k] = v - class_info = {k: v for k, v in sorted(out_dict.items(), key=lambda x: x[1]['class_idx'])} - else: - for i, (k, v) in enumerate(class_info.items()): - v['class_idx'] = i - - self.img_infos = [] - self.targets = [] - self.tarnames = [] - for k, v in class_info.items(): - num_samples = len(v['img_tarinfos']) - self.img_infos.extend(v['img_tarinfos']) - self.targets.extend([v['class_idx']] * num_samples) - self.tarnames.extend([k] * num_samples) - self.targets = np.array(self.targets) # separate, uniform np array are more memory efficient - self.tarnames = np.array(self.tarnames) - - self.tarfiles = {} # to open lazily - del class_info - - def __len__(self): - return len(self.img_infos) - - def __getitem__(self, idx): - img_tarinfo = self.img_infos[idx] - name = self.tarnames[idx] - tf = self.tarfiles.setdefault(name, tarfile.open(os.path.join(self.root, name + '.tar'))) - img_bytes = tf.extractfile(img_tarinfo) - if self.targets: - target = self.targets[idx] - else: - target = None - return img_bytes, target From e553480b67c95b49b6f21a2696546c2bb8d19ddb Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 28 Dec 2020 14:39:29 -0800 Subject: [PATCH 05/20] Add 21843 synset txt for google 21k models like BiT/ViT --- results/imagenet21k_goog_synsets.txt | 21843 +++++++++++++++++++++++++ 1 file changed, 21843 insertions(+) create mode 100644 results/imagenet21k_goog_synsets.txt diff --git a/results/imagenet21k_goog_synsets.txt b/results/imagenet21k_goog_synsets.txt new file mode 100644 index 00000000..e276a97b --- /dev/null +++ b/results/imagenet21k_goog_synsets.txt @@ -0,0 +1,21843 @@ +n00004475 +n00005787 +n00006024 +n00006484 +n00007846 +n00015388 +n00017222 +n00021265 +n00021939 +n00120010 +n00141669 +n00288000 +n00288190 +n00288384 +n00324978 +n00326094 +n00433458 +n00433661 +n00433802 +n00434075 +n00439826 +n00440039 +n00440218 +n00440382 +n00440509 +n00440643 +n00440747 +n00440941 +n00441073 +n00441824 +n00442115 +n00442437 +n00442847 +n00442981 +n00443231 +n00443375 +n00443517 +n00443692 +n00443803 +n00443917 +n00444142 +n00444340 +n00444490 +n00444651 +n00444846 +n00444937 +n00445055 +n00445226 +n00445351 +n00445685 +n00445802 +n00446311 +n00446411 +n00446493 +n00446632 +n00446804 +n00446980 +n00447073 +n00447221 +n00447361 +n00447463 +n00447540 +n00447957 +n00448126 +n00448232 +n00448466 +n00448640 +n00448748 +n00448872 +n00448958 +n00449054 +n00449168 +n00449295 +n00449517 +n00449695 +n00449796 +n00449892 +n00449977 +n00450070 +n00450335 +n00450700 +n00450866 +n00450998 +n00451186 +n00451370 +n00451563 +n00451635 +n00451768 +n00451866 +n00452034 +n00452152 +n00452293 +n00452734 +n00452864 +n00453126 +n00453313 +n00453396 +n00453478 +n00453631 +n00453935 +n00454237 +n00454395 +n00454493 +n00454624 +n00454855 +n00454983 +n00455076 +n00455173 +n00456465 +n00463246 +n00463543 +n00464277 +n00464478 +n00464651 +n00464894 +n00466273 +n00466377 +n00466524 +n00466630 +n00466712 +n00466880 +n00467320 +n00467536 +n00467719 +n00467995 +n00468299 +n00468480 +n00469651 +n00470554 +n00470682 +n00470830 +n00470966 +n00471437 +n00471613 +n00474568 +n00474657 +n00474769 +n00474881 +n00475014 +n00475142 +n00475273 +n00475403 +n00475535 +n00475661 +n00475787 +n00476140 +n00476235 +n00476389 +n00477392 +n00477639 +n00477827 +n00478262 +n00479076 +n00479440 +n00479616 +n00479734 +n00479887 +n00480211 +n00480366 +n00480508 +n00480885 +n00480993 +n00481803 +n00481938 +n00482122 +n00482298 +n00483205 +n00483313 +n00483409 +n00483508 +n00483605 +n00483705 +n00483848 +n00523513 +n00812526 +n00825773 +n00887544 +n01035504 +n01035667 +n01055165 +n01314388 +n01314663 +n01314781 +n01314910 +n01315213 +n01315330 +n01315581 +n01315805 +n01316422 +n01316579 +n01316734 +n01316949 +n01317089 +n01317294 +n01317391 +n01317541 +n01317813 +n01317916 +n01318053 +n01318279 +n01318381 +n01318478 +n01318660 +n01318894 +n01319001 +n01319187 +n01319467 +n01319685 +n01320872 +n01321123 +n01321230 +n01321456 +n01321579 +n01321770 +n01321854 +n01322221 +n01322343 +n01322508 +n01322604 +n01322685 +n01322898 +n01322983 +n01323068 +n01323155 +n01323261 +n01323355 +n01323493 +n01323599 +n01323781 +n01324305 +n01324431 +n01324610 +n01324799 +n01324916 +n01325060 +n01326291 +n01327909 +n01329186 +n01330126 +n01330497 +n01332181 +n01333082 +n01333483 +n01333610 +n01334217 +n01334690 +n01335218 +n01337191 +n01337734 +n01338685 +n01339083 +n01339336 +n01339471 +n01339801 +n01340014 +n01340522 +n01340785 +n01340935 +n01341090 +n01342269 +n01347583 +n01349735 +n01350226 +n01350701 +n01351170 +n01351315 +n01357328 +n01357507 +n01358572 +n01359762 +n01362336 +n01363719 +n01365474 +n01365885 +n01366700 +n01367772 +n01368672 +n01369358 +n01369484 +n01374703 +n01374846 +n01375204 +n01376237 +n01376437 +n01376543 +n01377278 +n01377510 +n01377694 +n01378545 +n01379389 +n01380610 +n01380754 +n01381044 +n01382033 +n01384084 +n01384164 +n01384687 +n01385017 +n01385330 +n01386007 +n01386182 +n01386354 +n01387065 +n01389507 +n01390123 +n01390763 +n01392275 +n01392380 +n01393486 +n01394040 +n01394492 +n01394771 +n01395254 +n01396048 +n01396617 +n01397114 +n01397690 +n01397871 +n01400247 +n01400391 +n01402600 +n01403457 +n01404365 +n01404495 +n01405007 +n01405616 +n01407798 +n01410457 +n01411450 +n01412694 +n01413457 +n01414216 +n01415626 +n01415920 +n01416213 +n01418498 +n01418620 +n01419332 +n01419573 +n01419888 +n01421333 +n01421807 +n01422185 +n01422335 +n01422450 +n01423302 +n01423617 +n01424420 +n01425223 +n01427399 +n01429172 +n01438208 +n01438581 +n01439121 +n01439514 +n01439808 +n01440160 +n01440242 +n01440467 +n01440764 +n01441117 +n01441272 +n01441425 +n01441910 +n01442450 +n01442710 +n01442972 +n01443243 +n01443537 +n01443831 +n01444339 +n01444783 +n01445429 +n01445593 +n01445857 +n01446152 +n01446589 +n01446760 +n01447139 +n01447331 +n01447658 +n01447946 +n01448291 +n01448594 +n01448951 +n01449374 +n01449712 +n01449980 +n01450661 +n01450950 +n01451115 +n01451295 +n01451426 +n01451863 +n01452345 +n01453087 +n01453475 +n01453742 +n01454545 +n01454856 +n01455317 +n01455461 +n01455778 +n01456137 +n01456454 +n01456756 +n01457082 +n01457407 +n01457852 +n01458746 +n01458842 +n01459791 +n01460303 +n01461315 +n01461646 +n01462042 +n01462544 +n01462803 +n01464844 +n01466257 +n01467336 +n01467804 +n01468238 +n01468712 +n01469103 +n01469723 +n01470145 +n01470479 +n01470733 +n01470895 +n01471682 +n01472303 +n01472502 +n01473806 +n01474283 +n01474864 +n01475232 +n01475940 +n01476418 +n01477080 +n01477525 +n01477875 +n01478511 +n01478969 +n01479213 +n01479820 +n01480106 +n01480516 +n01480880 +n01481331 +n01481498 +n01482071 +n01482330 +n01483021 +n01483522 +n01483830 +n01484097 +n01484285 +n01484447 +n01484562 +n01484850 +n01485479 +n01486010 +n01486540 +n01486838 +n01487506 +n01488038 +n01488918 +n01489501 +n01489709 +n01489920 +n01490112 +n01490360 +n01490670 +n01491006 +n01491361 +n01491661 +n01491874 +n01492357 +n01492569 +n01492708 +n01492860 +n01493146 +n01493541 +n01493829 +n01494041 +n01494475 +n01494757 +n01494882 +n01495006 +n01495493 +n01495701 +n01496331 +n01497118 +n01497413 +n01497738 +n01498041 +n01498406 +n01498699 +n01498989 +n01499396 +n01499732 +n01500091 +n01500476 +n01500854 +n01501160 +n01501641 +n01501777 +n01501948 +n01502101 +n01503061 +n01503976 +n01504179 +n01504344 +n01514668 +n01514752 +n01514859 +n01514926 +n01515078 +n01515217 +n01515303 +n01516212 +n01517389 +n01517565 +n01517966 +n01518878 +n01519563 +n01519873 +n01520576 +n01521399 +n01521756 +n01522450 +n01523105 +n01524359 +n01524761 +n01525720 +n01526521 +n01526766 +n01527194 +n01527347 +n01527617 +n01527917 +n01528396 +n01528654 +n01528845 +n01529672 +n01530439 +n01530575 +n01531178 +n01531344 +n01531512 +n01531639 +n01531811 +n01531971 +n01532325 +n01532511 +n01532829 +n01533000 +n01533339 +n01533481 +n01533651 +n01533893 +n01534155 +n01534433 +n01534582 +n01534762 +n01535140 +n01535469 +n01535690 +n01536035 +n01536186 +n01536334 +n01536644 +n01536780 +n01537134 +n01537544 +n01537895 +n01538059 +n01538200 +n01538362 +n01538630 +n01538955 +n01539272 +n01539573 +n01539925 +n01540090 +n01540233 +n01540566 +n01540832 +n01541102 +n01541386 +n01541760 +n01541922 +n01542168 +n01542433 +n01542786 +n01543175 +n01543383 +n01543632 +n01543936 +n01544208 +n01544389 +n01544704 +n01545574 +n01546039 +n01546506 +n01546921 +n01547832 +n01548301 +n01548492 +n01548694 +n01548865 +n01549053 +n01549430 +n01549641 +n01549886 +n01550172 +n01550761 +n01551080 +n01551300 +n01551711 +n01552034 +n01552333 +n01552813 +n01553142 +n01553527 +n01553762 +n01554017 +n01554448 +n01555004 +n01555305 +n01555809 +n01556182 +n01556514 +n01557185 +n01557962 +n01558149 +n01558307 +n01558461 +n01558594 +n01558765 +n01558993 +n01559160 +n01559477 +n01559639 +n01559804 +n01560105 +n01560280 +n01560419 +n01560636 +n01560793 +n01560935 +n01561181 +n01561452 +n01561732 +n01562014 +n01562265 +n01562451 +n01563128 +n01563449 +n01563746 +n01563945 +n01564101 +n01564217 +n01564394 +n01564773 +n01564914 +n01565078 +n01565345 +n01565599 +n01565930 +n01566207 +n01566645 +n01567133 +n01567678 +n01567879 +n01568132 +n01568294 +n01568720 +n01568892 +n01569060 +n01569262 +n01569423 +n01569566 +n01569836 +n01569971 +n01570267 +n01570421 +n01570676 +n01570839 +n01571410 +n01571904 +n01572328 +n01572489 +n01572654 +n01572782 +n01573074 +n01573240 +n01573360 +n01573627 +n01573898 +n01574045 +n01574390 +n01574560 +n01574801 +n01575117 +n01575401 +n01575745 +n01576076 +n01576358 +n01576695 +n01577035 +n01577458 +n01577659 +n01577941 +n01578180 +n01578575 +n01579028 +n01579149 +n01579260 +n01579410 +n01579578 +n01579729 +n01580077 +n01580379 +n01580490 +n01580772 +n01580870 +n01581166 +n01581434 +n01581730 +n01581874 +n01581984 +n01582220 +n01582398 +n01582498 +n01582856 +n01583209 +n01583495 +n01583828 +n01584225 +n01584695 +n01584853 +n01585121 +n01585287 +n01585422 +n01585715 +n01586020 +n01586374 +n01586941 +n01587278 +n01587526 +n01587834 +n01588002 +n01588431 +n01588725 +n01588996 +n01589286 +n01589718 +n01589893 +n01590220 +n01591005 +n01591123 +n01591301 +n01591697 +n01592084 +n01592257 +n01592387 +n01592540 +n01592694 +n01593028 +n01593282 +n01593553 +n01594004 +n01594372 +n01594787 +n01594968 +n01595168 +n01595450 +n01595624 +n01595974 +n01596273 +n01596608 +n01597022 +n01597336 +n01597737 +n01597906 +n01598074 +n01598271 +n01598588 +n01598988 +n01599159 +n01599269 +n01599388 +n01599556 +n01599741 +n01600085 +n01600341 +n01600657 +n01601068 +n01601410 +n01601694 +n01602080 +n01602209 +n01602630 +n01602832 +n01603000 +n01603152 +n01603600 +n01603812 +n01603953 +n01604330 +n01604968 +n01605630 +n01606097 +n01606177 +n01606522 +n01606672 +n01606809 +n01606978 +n01607309 +n01607429 +n01607600 +n01607812 +n01607962 +n01608265 +n01608432 +n01608814 +n01609062 +n01609391 +n01609751 +n01609956 +n01610100 +n01610226 +n01610552 +n01610955 +n01611472 +n01611674 +n01611800 +n01611969 +n01612122 +n01612275 +n01612476 +n01612628 +n01612955 +n01613177 +n01613294 +n01613615 +n01613807 +n01614038 +n01614343 +n01614556 +n01614925 +n01615121 +n01615303 +n01615458 +n01615703 +n01616086 +n01616318 +n01616551 +n01616764 +n01617095 +n01617443 +n01617766 +n01618082 +n01618503 +n01618922 +n01619310 +n01619536 +n01619835 +n01620135 +n01620414 +n01620735 +n01621127 +n01621635 +n01622120 +n01622352 +n01622483 +n01622779 +n01622959 +n01623110 +n01623425 +n01623615 +n01623706 +n01623880 +n01624115 +n01624212 +n01624305 +n01624537 +n01624833 +n01625121 +n01625562 +n01627424 +n01628331 +n01628770 +n01629276 +n01629819 +n01629962 +n01630148 +n01630284 +n01630670 +n01630901 +n01631175 +n01631354 +n01631512 +n01631663 +n01632047 +n01632308 +n01632458 +n01632601 +n01632777 +n01632952 +n01633406 +n01633781 +n01634227 +n01634522 +n01635027 +n01635176 +n01635480 +n01636127 +n01636352 +n01636510 +n01636829 +n01637112 +n01637338 +n01637615 +n01637932 +n01638194 +n01638329 +n01638722 +n01639187 +n01639765 +n01640846 +n01641206 +n01641391 +n01641577 +n01641739 +n01641930 +n01642097 +n01642257 +n01642391 +n01642539 +n01642943 +n01643255 +n01643507 +n01643896 +n01644373 +n01644900 +n01645466 +n01645776 +n01646292 +n01646388 +n01646555 +n01646648 +n01646802 +n01646902 +n01647033 +n01647180 +n01647303 +n01647466 +n01647640 +n01648139 +n01648356 +n01648620 +n01649170 +n01649412 +n01649556 +n01649726 +n01650167 +n01650690 +n01650901 +n01651059 +n01651285 +n01651487 +n01651641 +n01651778 +n01652026 +n01652297 +n01653026 +n01653223 +n01653509 +n01653773 +n01654083 +n01654637 +n01654863 +n01655344 +n01661091 +n01661592 +n01661818 +n01662060 +n01662622 +n01662784 +n01663401 +n01663782 +n01664065 +n01664369 +n01664492 +n01664674 +n01664990 +n01665541 +n01665932 +n01666228 +n01666585 +n01667114 +n01667432 +n01667778 +n01668091 +n01668436 +n01668665 +n01668892 +n01669191 +n01669372 +n01669654 +n01670092 +n01670535 +n01670802 +n01671125 +n01671479 +n01671705 +n01672032 +n01672432 +n01672611 +n01673282 +n01674216 +n01674464 +n01674990 +n01675352 +n01675722 +n01676755 +n01677366 +n01677747 +n01678043 +n01678343 +n01678657 +n01679005 +n01679307 +n01679626 +n01679962 +n01680264 +n01680478 +n01680655 +n01680813 +n01680983 +n01681328 +n01681653 +n01681940 +n01682172 +n01682435 +n01682714 +n01683201 +n01683558 +n01684133 +n01684578 +n01684741 +n01685439 +n01685808 +n01686044 +n01686220 +n01686403 +n01686609 +n01686808 +n01687128 +n01687290 +n01687665 +n01687978 +n01688243 +n01688961 +n01689081 +n01689411 +n01689811 +n01690149 +n01690466 +n01691217 +n01691652 +n01691951 +n01692333 +n01692523 +n01692864 +n01693175 +n01693334 +n01693783 +n01694178 +n01694311 +n01694709 +n01694955 +n01695060 +n01696633 +n01697178 +n01697457 +n01697611 +n01697749 +n01697978 +n01698434 +n01698640 +n01698782 +n01699040 +n01699254 +n01699675 +n01701551 +n01701859 +n01702256 +n01702479 +n01703011 +n01703161 +n01703569 +n01704103 +n01704323 +n01704626 +n01705010 +n01705591 +n01705934 +n01707294 +n01708106 +n01708998 +n01709484 +n01709876 +n01710177 +n01711160 +n01712008 +n01712752 +n01713170 +n01713764 +n01714231 +n01715888 +n01717016 +n01717229 +n01717467 +n01718096 +n01718414 +n01719403 +n01721174 +n01721898 +n01722670 +n01722998 +n01723579 +n01724231 +n01724840 +n01725086 +n01725713 +n01726203 +n01726692 +n01727646 +n01728266 +n01728572 +n01728920 +n01729322 +n01729672 +n01729977 +n01730185 +n01730307 +n01730563 +n01730812 +n01730960 +n01731137 +n01731277 +n01731545 +n01731764 +n01731941 +n01732093 +n01732244 +n01732614 +n01732789 +n01732989 +n01733214 +n01733466 +n01733757 +n01733957 +n01734104 +n01734418 +n01734637 +n01734808 +n01735189 +n01735439 +n01735577 +n01735728 +n01736032 +n01736375 +n01736796 +n01737021 +n01737472 +n01737728 +n01737875 +n01738065 +n01738306 +n01738601 +n01738731 +n01739094 +n01739381 +n01739647 +n01739871 +n01740131 +n01740551 +n01740885 +n01741232 +n01741442 +n01741562 +n01741943 +n01742172 +n01742447 +n01742821 +n01743086 +n01743605 +n01743936 +n01744100 +n01744270 +n01744401 +n01744555 +n01745125 +n01745484 +n01745902 +n01746191 +n01746359 +n01746952 +n01747285 +n01747589 +n01747885 +n01748264 +n01748389 +n01748686 +n01748906 +n01749244 +n01749582 +n01749742 +n01749939 +n01750167 +n01750437 +n01750743 +n01751036 +n01751215 +n01751472 +n01751748 +n01752165 +n01752585 +n01752736 +n01753032 +n01753180 +n01753488 +n01753959 +n01754370 +n01754533 +n01754876 +n01755581 +n01755740 +n01755952 +n01756089 +n01756291 +n01756508 +n01756733 +n01756916 +n01757115 +n01757343 +n01757677 +n01757901 +n01758141 +n01758757 +n01758895 +n01767661 +n01768244 +n01769347 +n01770081 +n01770393 +n01770795 +n01771100 +n01771417 +n01771766 +n01772222 +n01772664 +n01773157 +n01773549 +n01773797 +n01774097 +n01774384 +n01774750 +n01775062 +n01775370 +n01775730 +n01776192 +n01776313 +n01776705 +n01777304 +n01777467 +n01777649 +n01777909 +n01778217 +n01778487 +n01778621 +n01778801 +n01779148 +n01779463 +n01779629 +n01779939 +n01780142 +n01780426 +n01780696 +n01781071 +n01781570 +n01781698 +n01781875 +n01782209 +n01782516 +n01783017 +n01783706 +n01784293 +n01784675 +n01785667 +n01786646 +n01787006 +n01787191 +n01787835 +n01788291 +n01788579 +n01788864 +n01789386 +n01789740 +n01790171 +n01790304 +n01790398 +n01790557 +n01790711 +n01790812 +n01791107 +n01791314 +n01791388 +n01791463 +n01791625 +n01791954 +n01792042 +n01792158 +n01792429 +n01792530 +n01792640 +n01792808 +n01792955 +n01793085 +n01793159 +n01793249 +n01793340 +n01793435 +n01793565 +n01793715 +n01794158 +n01794344 +n01794651 +n01795088 +n01795545 +n01795735 +n01795900 +n01796019 +n01796105 +n01796340 +n01796519 +n01796729 +n01797020 +n01797307 +n01797601 +n01797886 +n01798168 +n01798484 +n01798706 +n01798839 +n01798979 +n01799302 +n01799679 +n01800195 +n01800424 +n01800633 +n01801088 +n01801479 +n01801672 +n01801876 +n01802159 +n01802721 +n01803078 +n01803362 +n01803641 +n01803893 +n01804163 +n01804478 +n01804653 +n01804921 +n01805070 +n01805321 +n01805801 +n01806061 +n01806143 +n01806297 +n01806364 +n01806467 +n01806567 +n01806847 +n01807105 +n01807496 +n01807828 +n01808140 +n01808291 +n01808596 +n01809106 +n01809371 +n01809752 +n01810268 +n01810700 +n01811243 +n01811909 +n01812187 +n01812337 +n01812662 +n01812866 +n01813088 +n01813385 +n01813532 +n01813658 +n01813948 +n01814217 +n01814370 +n01814549 +n01814620 +n01814755 +n01814921 +n01815036 +n01815270 +n01815601 +n01816017 +n01816140 +n01816474 +n01816887 +n01817263 +n01817346 +n01817953 +n01818299 +n01818515 +n01818832 +n01819115 +n01819313 +n01819465 +n01819734 +n01820052 +n01820348 +n01820546 +n01820801 +n01821076 +n01821203 +n01821554 +n01821869 +n01822300 +n01822602 +n01823013 +n01823414 +n01823740 +n01824035 +n01824344 +n01824575 +n01824749 +n01825278 +n01825930 +n01826364 +n01826680 +n01826844 +n01827403 +n01827793 +n01828096 +n01828556 +n01828970 +n01829413 +n01829869 +n01830042 +n01830479 +n01830915 +n01831360 +n01831712 +n01832167 +n01832493 +n01832813 +n01833112 +n01833415 +n01833805 +n01834177 +n01834540 +n01835276 +n01835769 +n01835918 +n01836087 +n01836673 +n01837072 +n01837526 +n01838038 +n01838598 +n01839086 +n01839330 +n01839598 +n01839750 +n01839949 +n01840120 +n01840412 +n01840775 +n01841102 +n01841288 +n01841441 +n01841679 +n01841943 +n01842235 +n01842504 +n01842788 +n01843065 +n01843383 +n01843719 +n01844231 +n01844551 +n01844746 +n01844917 +n01845132 +n01845477 +n01846331 +n01847000 +n01847089 +n01847170 +n01847253 +n01847407 +n01847806 +n01847978 +n01848123 +n01848323 +n01848453 +n01848555 +n01848648 +n01848840 +n01848976 +n01849157 +n01849466 +n01849676 +n01849863 +n01850192 +n01850373 +n01850553 +n01850873 +n01851038 +n01851207 +n01851375 +n01851573 +n01851731 +n01851895 +n01852142 +n01852329 +n01852400 +n01852671 +n01852861 +n01853195 +n01853498 +n01853666 +n01853870 +n01854415 +n01854700 +n01854838 +n01855032 +n01855188 +n01855476 +n01855672 +n01856072 +n01856155 +n01856380 +n01856553 +n01856890 +n01857079 +n01857325 +n01857512 +n01857632 +n01857851 +n01858281 +n01858441 +n01858780 +n01858845 +n01858906 +n01859190 +n01859325 +n01859496 +n01859689 +n01859852 +n01860002 +n01860187 +n01860497 +n01860864 +n01861148 +n01861330 +n01861778 +n01862399 +n01871265 +n01871543 +n01871875 +n01872401 +n01872772 +n01873310 +n01874434 +n01874928 +n01875313 +n01875610 +n01876034 +n01876326 +n01876667 +n01877134 +n01877606 +n01877812 +n01878061 +n01878335 +n01878639 +n01878929 +n01879217 +n01879509 +n01879837 +n01880152 +n01880473 +n01880716 +n01880813 +n01881171 +n01881564 +n01881857 +n01882125 +n01882714 +n01883070 +n01883513 +n01883920 +n01884104 +n01884203 +n01884476 +n01884834 +n01885158 +n01885498 +n01886045 +n01886756 +n01887474 +n01887623 +n01887787 +n01887896 +n01888045 +n01888181 +n01888264 +n01888411 +n01889074 +n01889520 +n01889849 +n01890144 +n01890564 +n01890860 +n01891013 +n01891274 +n01891633 +n01892030 +n01892145 +n01892385 +n01892551 +n01892744 +n01893021 +n01893164 +n01893399 +n01893825 +n01894207 +n01894522 +n01894956 +n01896844 +n01897257 +n01897426 +n01897536 +n01897667 +n01898593 +n01899894 +n01900150 +n01903234 +n01903346 +n01903498 +n01904029 +n01904806 +n01904886 +n01905321 +n01905661 +n01906749 +n01907287 +n01907738 +n01908042 +n01908958 +n01909422 +n01909788 +n01909906 +n01910252 +n01910747 +n01911063 +n01911403 +n01911839 +n01912152 +n01912454 +n01912809 +n01913166 +n01913346 +n01913440 +n01914163 +n01914609 +n01914830 +n01915700 +n01915811 +n01916187 +n01916388 +n01916481 +n01916588 +n01916925 +n01917289 +n01917611 +n01917882 +n01918744 +n01919385 +n01920051 +n01920438 +n01921059 +n01922303 +n01922717 +n01922948 +n01923025 +n01923404 +n01923890 +n01924800 +n01924916 +n01925270 +n01925695 +n01925916 +n01926379 +n01926689 +n01927159 +n01927456 +n01927928 +n01928215 +n01928517 +n01928865 +n01929186 +n01930112 +n01930852 +n01931140 +n01931520 +n01931714 +n01932151 +n01932936 +n01933151 +n01933478 +n01933988 +n01934440 +n01934844 +n01935176 +n01935395 +n01936391 +n01936671 +n01936858 +n01937579 +n01937909 +n01938454 +n01938735 +n01940736 +n01941223 +n01941340 +n01942177 +n01942869 +n01943087 +n01943541 +n01943899 +n01944118 +n01944390 +n01944812 +n01944955 +n01945143 +n01945340 +n01945685 +n01945845 +n01946277 +n01946630 +n01946827 +n01947139 +n01947396 +n01947997 +n01948446 +n01948573 +n01949085 +n01949499 +n01949973 +n01950731 +n01951274 +n01951613 +n01952029 +n01952712 +n01953361 +n01953594 +n01953762 +n01954516 +n01955084 +n01955933 +n01956344 +n01956481 +n01956764 +n01957335 +n01958038 +n01958346 +n01958435 +n01958531 +n01959029 +n01959492 +n01959985 +n01960177 +n01960459 +n01961234 +n01961600 +n01961985 +n01962506 +n01962788 +n01963317 +n01963479 +n01963571 +n01964049 +n01964271 +n01964441 +n01964957 +n01965252 +n01965529 +n01965889 +n01966377 +n01966586 +n01967094 +n01967308 +n01967963 +n01968315 +n01968897 +n01969726 +n01970164 +n01970667 +n01971094 +n01971280 +n01971620 +n01971850 +n01972131 +n01972541 +n01973148 +n01974773 +n01975687 +n01976146 +n01976868 +n01976957 +n01977485 +n01978010 +n01978136 +n01978287 +n01978455 +n01978587 +n01978930 +n01979269 +n01979526 +n01979874 +n01980166 +n01980655 +n01981276 +n01981702 +n01982068 +n01982347 +n01982650 +n01983048 +n01983481 +n01983674 +n01983829 +n01984245 +n01984695 +n01985128 +n01985493 +n01985797 +n01986214 +n01986806 +n01987076 +n01987545 +n01987727 +n01988203 +n01988701 +n01988869 +n01989516 +n01989869 +n01990007 +n01990516 +n01990800 +n01991028 +n01991520 +n01992262 +n01992423 +n01992773 +n01993525 +n01993830 +n01994910 +n01995514 +n01995686 +n01996280 +n01996585 +n01997119 +n01997825 +n01998183 +n01998741 +n01999186 +n01999767 +n02000954 +n02002075 +n02002556 +n02002724 +n02003037 +n02003204 +n02003577 +n02003839 +n02004131 +n02004492 +n02004855 +n02005399 +n02005790 +n02006063 +n02006364 +n02006656 +n02006985 +n02007284 +n02007558 +n02008041 +n02008497 +n02008643 +n02008796 +n02009229 +n02009380 +n02009508 +n02009750 +n02009912 +n02010272 +n02010453 +n02010728 +n02011016 +n02011281 +n02011460 +n02011805 +n02011943 +n02012185 +n02012849 +n02013177 +n02013567 +n02013706 +n02014237 +n02014524 +n02014941 +n02015357 +n02015554 +n02015797 +n02016066 +n02016358 +n02016659 +n02016816 +n02016956 +n02017213 +n02017475 +n02017725 +n02018027 +n02018207 +n02018368 +n02018795 +n02019190 +n02019438 +n02019929 +n02020219 +n02020578 +n02021050 +n02021281 +n02021795 +n02022684 +n02023341 +n02023855 +n02023992 +n02024185 +n02024479 +n02024763 +n02025043 +n02025239 +n02025389 +n02026059 +n02026629 +n02026948 +n02027075 +n02027357 +n02027492 +n02027897 +n02028035 +n02028175 +n02028342 +n02028451 +n02028727 +n02028900 +n02029087 +n02029378 +n02029706 +n02030035 +n02030224 +n02030287 +n02030568 +n02030837 +n02030996 +n02031298 +n02031585 +n02031934 +n02032222 +n02032355 +n02032480 +n02032769 +n02033041 +n02033208 +n02033324 +n02033561 +n02033779 +n02033882 +n02034129 +n02034295 +n02034661 +n02034971 +n02035210 +n02035402 +n02035656 +n02036053 +n02036228 +n02036711 +n02037110 +n02037464 +n02037869 +n02038141 +n02038466 +n02038993 +n02039171 +n02039497 +n02039780 +n02040266 +n02040505 +n02041085 +n02041246 +n02041678 +n02041875 +n02042046 +n02042180 +n02042472 +n02042759 +n02043063 +n02043333 +n02043808 +n02044178 +n02044517 +n02044778 +n02044908 +n02045369 +n02045596 +n02045864 +n02046171 +n02046759 +n02046939 +n02047045 +n02047260 +n02047411 +n02047517 +n02047614 +n02047975 +n02048115 +n02048353 +n02048698 +n02049088 +n02049532 +n02050004 +n02050313 +n02050442 +n02050586 +n02050809 +n02051059 +n02051474 +n02051845 +n02052204 +n02052365 +n02052775 +n02053083 +n02053425 +n02053584 +n02054036 +n02054502 +n02054711 +n02055107 +n02055658 +n02055803 +n02056228 +n02056570 +n02056728 +n02057035 +n02057330 +n02057731 +n02057898 +n02058221 +n02058594 +n02058747 +n02059162 +n02059541 +n02059852 +n02060133 +n02060411 +n02060569 +n02060889 +n02061217 +n02061560 +n02061853 +n02062017 +n02062430 +n02062744 +n02063224 +n02063662 +n02064000 +n02064338 +n02064816 +n02065026 +n02065263 +n02065407 +n02065726 +n02066245 +n02066707 +n02067240 +n02067603 +n02067768 +n02068206 +n02068541 +n02068974 +n02069412 +n02069701 +n02069974 +n02070174 +n02070430 +n02070624 +n02070776 +n02071028 +n02071294 +n02071636 +n02072040 +n02072493 +n02072798 +n02073250 +n02073831 +n02074367 +n02074726 +n02075296 +n02075612 +n02075927 +n02076196 +n02076402 +n02076779 +n02077152 +n02077384 +n02077658 +n02077787 +n02077923 +n02078292 +n02078574 +n02078738 +n02079005 +n02079389 +n02079851 +n02080146 +n02080415 +n02080713 +n02081060 +n02081571 +n02081798 +n02081927 +n02082056 +n02082190 +n02082791 +n02083346 +n02083672 +n02083780 +n02084071 +n02084732 +n02084861 +n02085019 +n02085118 +n02085272 +n02085374 +n02085620 +n02085782 +n02085936 +n02086079 +n02086240 +n02086346 +n02086478 +n02086646 +n02086753 +n02086910 +n02087046 +n02087122 +n02087314 +n02087394 +n02087551 +n02088094 +n02088238 +n02088364 +n02088466 +n02088632 +n02088745 +n02088839 +n02088992 +n02089078 +n02089232 +n02089468 +n02089555 +n02089725 +n02089867 +n02089973 +n02090129 +n02090253 +n02090379 +n02090475 +n02090622 +n02090721 +n02090827 +n02091032 +n02091134 +n02091244 +n02091467 +n02091635 +n02091831 +n02092002 +n02092173 +n02092339 +n02092468 +n02093056 +n02093256 +n02093428 +n02093647 +n02093754 +n02093859 +n02093991 +n02094114 +n02094258 +n02094433 +n02094562 +n02094721 +n02094931 +n02095050 +n02095212 +n02095314 +n02095412 +n02095570 +n02095727 +n02095889 +n02096051 +n02096177 +n02096294 +n02096437 +n02096585 +n02096756 +n02097047 +n02097130 +n02097209 +n02097298 +n02097474 +n02097658 +n02097786 +n02097967 +n02098105 +n02098286 +n02098413 +n02098550 +n02098806 +n02098906 +n02099029 +n02099267 +n02099429 +n02099601 +n02099712 +n02099849 +n02099997 +n02100236 +n02100399 +n02100583 +n02100735 +n02100877 +n02101006 +n02101108 +n02101388 +n02101556 +n02101670 +n02101861 +n02102040 +n02102177 +n02102318 +n02102480 +n02102605 +n02102806 +n02102973 +n02103181 +n02103406 +n02103841 +n02104029 +n02104184 +n02104280 +n02104365 +n02104523 +n02104882 +n02105056 +n02105162 +n02105251 +n02105412 +n02105505 +n02105641 +n02105855 +n02106030 +n02106166 +n02106382 +n02106550 +n02106662 +n02106854 +n02106966 +n02107142 +n02107312 +n02107420 +n02107574 +n02107683 +n02107908 +n02108000 +n02108089 +n02108254 +n02108422 +n02108551 +n02108672 +n02108915 +n02109047 +n02109150 +n02109256 +n02109391 +n02109525 +n02109687 +n02109811 +n02109961 +n02110063 +n02110185 +n02110341 +n02110532 +n02110627 +n02110806 +n02110958 +n02111129 +n02111277 +n02111500 +n02111626 +n02111889 +n02112018 +n02112137 +n02112350 +n02112497 +n02112706 +n02112826 +n02113023 +n02113186 +n02113335 +n02113624 +n02113712 +n02113799 +n02113892 +n02113978 +n02114100 +n02114367 +n02114548 +n02114712 +n02114855 +n02115012 +n02115096 +n02115335 +n02115641 +n02115913 +n02116185 +n02116450 +n02116738 +n02117135 +n02117512 +n02117646 +n02117900 +n02118176 +n02118333 +n02118643 +n02118707 +n02119022 +n02119247 +n02119359 +n02119477 +n02119634 +n02119789 +n02120079 +n02120278 +n02120505 +n02120997 +n02121620 +n02121808 +n02122298 +n02122430 +n02122510 +n02122580 +n02122725 +n02122810 +n02122878 +n02122948 +n02123045 +n02123159 +n02123242 +n02123394 +n02123478 +n02123597 +n02123785 +n02123917 +n02124075 +n02124157 +n02124313 +n02124484 +n02124623 +n02125010 +n02125081 +n02125311 +n02125494 +n02125689 +n02125872 +n02126028 +n02126139 +n02126317 +n02126640 +n02126787 +n02127052 +n02127292 +n02127381 +n02127482 +n02127586 +n02127678 +n02127808 +n02128385 +n02128598 +n02128669 +n02128757 +n02128925 +n02129165 +n02129463 +n02129530 +n02129604 +n02129837 +n02129923 +n02129991 +n02130086 +n02130308 +n02130545 +n02130925 +n02131653 +n02132136 +n02132320 +n02132466 +n02132580 +n02132788 +n02133161 +n02133400 +n02133704 +n02134084 +n02134418 +n02134971 +n02135220 +n02135610 +n02135844 +n02136103 +n02136285 +n02136452 +n02136794 +n02137015 +n02137302 +n02137549 +n02137722 +n02137888 +n02138169 +n02138441 +n02138647 +n02138777 +n02139199 +n02139671 +n02140049 +n02140179 +n02140268 +n02140491 +n02140858 +n02141306 +n02141611 +n02141713 +n02142407 +n02142734 +n02142898 +n02143142 +n02143439 +n02143891 +n02144251 +n02144593 +n02144936 +n02145424 +n02145910 +n02146201 +n02146371 +n02146700 +n02146879 +n02147173 +n02147328 +n02147591 +n02147947 +n02148088 +n02148512 +n02148835 +n02148991 +n02149420 +n02149653 +n02149861 +n02150134 +n02150482 +n02150885 +n02151230 +n02152740 +n02152881 +n02152991 +n02153109 +n02153203 +n02153809 +n02156732 +n02156871 +n02157206 +n02157285 +n02159955 +n02160947 +n02161225 +n02161338 +n02161457 +n02161588 +n02162561 +n02163008 +n02163297 +n02164464 +n02165105 +n02165456 +n02165877 +n02166229 +n02166567 +n02166826 +n02167151 +n02167505 +n02167820 +n02167944 +n02168245 +n02168427 +n02168699 +n02169023 +n02169218 +n02169497 +n02169705 +n02169974 +n02170400 +n02170599 +n02170738 +n02170993 +n02171164 +n02171453 +n02171869 +n02172182 +n02172518 +n02172678 +n02172761 +n02172870 +n02173113 +n02173373 +n02173784 +n02174001 +n02174355 +n02174659 +n02175014 +n02175569 +n02175916 +n02176261 +n02176439 +n02176747 +n02176916 +n02177196 +n02177506 +n02177775 +n02177972 +n02178411 +n02178717 +n02179012 +n02179192 +n02179340 +n02179891 +n02180233 +n02180427 +n02180875 +n02181235 +n02181477 +n02181724 +n02182045 +n02182355 +n02182642 +n02182930 +n02183096 +n02183507 +n02183857 +n02184473 +n02184589 +n02184720 +n02185167 +n02185481 +n02186153 +n02186717 +n02187150 +n02187279 +n02187554 +n02187900 +n02188699 +n02189363 +n02189670 +n02190166 +n02190790 +n02191273 +n02191773 +n02191979 +n02192252 +n02192513 +n02192814 +n02193009 +n02193163 +n02194249 +n02194750 +n02195091 +n02195526 +n02195819 +n02196119 +n02196344 +n02196896 +n02197185 +n02197689 +n02197877 +n02198129 +n02198532 +n02198859 +n02199170 +n02199502 +n02200198 +n02200509 +n02200630 +n02200850 +n02201000 +n02201497 +n02201626 +n02202006 +n02202124 +n02202287 +n02202678 +n02203152 +n02203592 +n02203978 +n02204249 +n02204722 +n02204907 +n02205219 +n02205673 +n02206270 +n02206856 +n02207179 +n02207345 +n02207449 +n02207647 +n02207805 +n02208280 +n02208498 +n02208848 +n02208979 +n02209111 +n02209354 +n02209624 +n02209964 +n02210427 +n02210921 +n02211444 +n02211627 +n02211896 +n02212062 +n02212602 +n02212958 +n02213107 +n02213239 +n02213543 +n02213663 +n02213788 +n02214096 +n02214341 +n02214499 +n02214660 +n02214773 +n02215161 +n02215621 +n02215770 +n02216211 +n02216365 +n02216740 +n02217563 +n02217839 +n02218134 +n02218371 +n02218713 +n02219015 +n02219486 +n02220055 +n02220225 +n02220518 +n02220804 +n02221083 +n02221414 +n02221571 +n02221715 +n02221820 +n02222035 +n02222321 +n02222582 +n02223266 +n02223520 +n02224023 +n02224713 +n02225081 +n02225798 +n02226183 +n02226429 +n02226821 +n02226970 +n02227247 +n02227604 +n02227966 +n02228341 +n02228697 +n02229156 +n02229544 +n02229765 +n02230023 +n02230187 +n02230480 +n02230634 +n02231052 +n02231487 +n02231803 +n02232223 +n02233338 +n02233943 +n02234355 +n02234570 +n02234848 +n02235205 +n02236044 +n02236241 +n02236355 +n02236896 +n02237424 +n02237581 +n02237868 +n02238235 +n02238358 +n02238594 +n02238887 +n02239192 +n02239528 +n02239774 +n02240068 +n02240517 +n02241008 +n02241426 +n02241569 +n02241799 +n02242137 +n02242455 +n02243209 +n02243562 +n02243878 +n02244173 +n02244515 +n02244797 +n02245111 +n02245443 +n02246011 +n02246628 +n02246941 +n02247216 +n02247511 +n02247655 +n02248062 +n02248368 +n02248510 +n02248887 +n02249134 +n02249515 +n02249809 +n02250280 +n02250822 +n02251067 +n02251233 +n02251593 +n02251775 +n02252226 +n02252799 +n02252972 +n02253127 +n02253264 +n02253494 +n02253715 +n02253913 +n02254246 +n02254697 +n02254901 +n02255023 +n02255391 +n02256172 +n02256656 +n02257003 +n02257284 +n02257715 +n02257985 +n02258198 +n02258508 +n02258629 +n02259212 +n02259377 +n02259708 +n02259987 +n02260421 +n02260863 +n02261063 +n02261419 +n02261757 +n02262178 +n02262449 +n02262803 +n02263378 +n02264021 +n02264232 +n02264363 +n02264591 +n02264885 +n02265330 +n02266050 +n02266269 +n02266421 +n02266864 +n02267208 +n02267483 +n02268148 +n02268443 +n02268853 +n02269196 +n02269340 +n02269522 +n02269657 +n02270011 +n02270200 +n02270623 +n02270945 +n02271222 +n02271570 +n02271897 +n02272286 +n02272552 +n02272871 +n02273392 +n02274024 +n02274259 +n02274822 +n02275560 +n02275773 +n02276078 +n02276258 +n02276355 +n02276749 +n02276902 +n02277094 +n02277268 +n02277422 +n02277742 +n02278024 +n02278210 +n02278463 +n02278839 +n02278980 +n02279257 +n02279637 +n02279972 +n02280458 +n02280649 +n02281015 +n02281136 +n02281267 +n02281406 +n02281787 +n02282257 +n02282385 +n02282553 +n02282903 +n02283077 +n02283201 +n02283617 +n02283951 +n02284224 +n02284611 +n02284884 +n02285179 +n02285548 +n02285801 +n02286089 +n02286425 +n02286654 +n02287004 +n02287352 +n02287622 +n02287799 +n02287987 +n02288122 +n02288268 +n02288789 +n02289307 +n02289610 +n02289988 +n02290340 +n02290664 +n02290870 +n02291220 +n02291572 +n02291748 +n02292085 +n02292401 +n02292692 +n02293352 +n02293868 +n02294097 +n02294407 +n02294577 +n02295064 +n02295390 +n02295870 +n02296021 +n02296276 +n02296612 +n02296912 +n02297294 +n02297442 +n02297819 +n02297938 +n02298095 +n02298218 +n02298541 +n02299039 +n02299157 +n02299378 +n02299505 +n02299846 +n02300173 +n02300554 +n02300797 +n02301452 +n02301935 +n02302244 +n02302459 +n02302620 +n02302969 +n02303284 +n02303585 +n02303777 +n02304036 +n02304432 +n02304657 +n02304797 +n02305085 +n02305407 +n02305636 +n02305929 +n02306433 +n02306825 +n02307176 +n02307325 +n02307515 +n02307681 +n02307910 +n02308033 +n02308139 +n02308471 +n02308618 +n02308735 +n02309120 +n02309242 +n02309337 +n02309841 +n02310000 +n02310149 +n02310334 +n02310585 +n02310717 +n02310941 +n02311060 +n02311617 +n02311748 +n02312006 +n02312175 +n02312325 +n02312427 +n02312640 +n02312912 +n02313008 +n02313360 +n02313709 +n02315487 +n02315821 +n02316707 +n02317335 +n02317781 +n02318167 +n02318687 +n02319095 +n02319308 +n02319555 +n02319829 +n02320127 +n02320465 +n02321170 +n02321529 +n02322047 +n02322992 +n02323449 +n02323902 +n02324045 +n02324431 +n02324514 +n02324587 +n02324850 +n02325366 +n02325722 +n02325884 +n02326074 +n02326432 +n02326763 +n02326862 +n02327028 +n02327175 +n02327435 +n02327656 +n02327842 +n02328009 +n02328150 +n02328429 +n02328820 +n02328942 +n02329401 +n02330245 +n02331046 +n02331309 +n02331842 +n02332156 +n02332447 +n02332755 +n02332954 +n02333190 +n02333546 +n02333733 +n02333819 +n02333909 +n02334201 +n02334460 +n02334728 +n02335127 +n02335231 +n02336011 +n02336275 +n02336641 +n02336826 +n02337001 +n02337171 +n02337332 +n02337598 +n02337902 +n02338145 +n02338449 +n02338722 +n02338901 +n02339282 +n02339376 +n02339922 +n02340186 +n02340358 +n02340640 +n02340930 +n02341288 +n02341475 +n02341616 +n02341974 +n02342250 +n02342534 +n02342885 +n02343058 +n02343320 +n02343772 +n02344175 +n02344270 +n02344408 +n02344528 +n02344918 +n02345078 +n02345340 +n02345600 +n02345774 +n02345997 +n02346170 +n02346627 +n02346998 +n02347274 +n02347573 +n02347744 +n02348173 +n02348788 +n02349205 +n02349390 +n02349557 +n02349847 +n02350105 +n02350357 +n02350670 +n02350989 +n02351343 +n02351870 +n02352002 +n02352290 +n02352591 +n02352932 +n02353172 +n02353411 +n02353861 +n02354162 +n02354320 +n02354621 +n02354781 +n02355227 +n02355477 +n02356381 +n02356612 +n02356798 +n02356977 +n02357111 +n02357401 +n02357585 +n02357911 +n02358091 +n02358390 +n02358584 +n02358712 +n02358890 +n02359047 +n02359324 +n02359556 +n02359667 +n02359915 +n02360282 +n02360480 +n02360781 +n02360933 +n02361090 +n02361337 +n02361587 +n02361706 +n02361850 +n02362194 +n02363005 +n02363245 +n02363351 +n02363996 +n02364520 +n02364673 +n02364840 +n02365108 +n02365480 +n02366002 +n02366301 +n02366579 +n02366959 +n02367492 +n02367812 +n02368116 +n02368399 +n02368821 +n02369293 +n02369555 +n02369680 +n02369935 +n02370137 +n02370525 +n02370806 +n02371344 +n02372140 +n02372584 +n02372952 +n02373336 +n02374149 +n02374451 +n02375302 +n02375438 +n02375757 +n02375862 +n02376542 +n02376679 +n02376791 +n02376918 +n02377063 +n02377181 +n02377291 +n02377388 +n02377480 +n02377603 +n02377703 +n02378149 +n02378299 +n02378415 +n02378541 +n02378625 +n02378755 +n02378870 +n02378969 +n02379081 +n02379183 +n02379329 +n02379430 +n02379630 +n02379743 +n02379908 +n02380052 +n02380335 +n02380464 +n02380583 +n02380745 +n02380875 +n02381004 +n02381119 +n02381261 +n02381364 +n02381460 +n02381609 +n02381831 +n02382039 +n02382132 +n02382204 +n02382338 +n02382437 +n02382635 +n02382750 +n02382850 +n02382948 +n02383231 +n02384741 +n02384858 +n02385002 +n02385098 +n02385214 +n02385580 +n02385676 +n02385776 +n02385898 +n02386014 +n02386141 +n02386224 +n02386310 +n02386496 +n02386746 +n02386853 +n02386968 +n02387093 +n02387254 +n02387346 +n02387452 +n02387722 +n02387887 +n02387983 +n02388143 +n02388276 +n02388453 +n02388588 +n02388735 +n02388832 +n02388917 +n02389026 +n02389128 +n02389261 +n02389346 +n02389559 +n02389779 +n02389865 +n02389943 +n02390015 +n02390101 +n02390258 +n02390454 +n02390640 +n02390738 +n02390834 +n02390938 +n02391049 +n02391234 +n02391373 +n02391508 +n02391617 +n02391994 +n02392434 +n02392555 +n02392824 +n02393161 +n02393580 +n02393807 +n02393940 +n02394477 +n02395003 +n02395406 +n02395694 +n02395855 +n02395931 +n02396014 +n02396088 +n02396157 +n02396427 +n02396796 +n02397096 +n02397529 +n02397744 +n02397987 +n02398521 +n02399000 +n02401031 +n02402010 +n02402175 +n02402425 +n02403003 +n02403153 +n02403231 +n02403325 +n02403454 +n02403740 +n02403820 +n02403920 +n02404028 +n02404186 +n02404432 +n02404573 +n02404906 +n02405101 +n02405302 +n02405440 +n02405577 +n02405692 +n02405799 +n02405929 +n02406046 +n02406174 +n02406432 +n02406533 +n02406647 +n02406749 +n02406859 +n02406952 +n02407071 +n02407172 +n02407276 +n02407390 +n02407521 +n02407625 +n02407763 +n02407959 +n02408429 +n02408660 +n02408817 +n02409038 +n02409202 +n02409508 +n02409870 +n02410011 +n02410141 +n02410509 +n02410702 +n02410900 +n02411206 +n02411705 +n02411999 +n02412080 +n02412210 +n02412440 +n02412629 +n02412700 +n02412787 +n02412909 +n02412977 +n02413050 +n02413131 +n02413484 +n02413593 +n02413717 +n02413824 +n02413917 +n02414043 +n02414209 +n02414290 +n02414442 +n02414578 +n02414763 +n02414904 +n02415130 +n02415253 +n02415435 +n02415577 +n02415829 +n02416104 +n02416519 +n02416820 +n02416880 +n02416964 +n02417070 +n02417242 +n02417387 +n02417534 +n02417663 +n02417785 +n02417914 +n02418064 +n02418465 +n02418770 +n02419056 +n02419336 +n02419634 +n02419796 +n02420509 +n02420828 +n02421136 +n02421449 +n02421792 +n02422106 +n02422391 +n02422699 +n02423022 +n02423218 +n02423362 +n02423589 +n02424085 +n02424305 +n02424486 +n02424589 +n02424695 +n02424909 +n02425086 +n02425228 +n02425532 +n02425887 +n02426176 +n02426481 +n02426813 +n02427032 +n02427183 +n02427470 +n02427576 +n02427724 +n02428089 +n02428349 +n02428508 +n02428842 +n02429456 +n02430045 +n02430559 +n02430643 +n02430748 +n02430830 +n02431122 +n02431337 +n02431441 +n02431542 +n02431628 +n02431785 +n02431976 +n02432291 +n02432511 +n02432704 +n02432983 +n02433318 +n02433546 +n02433729 +n02433925 +n02434190 +n02434415 +n02434712 +n02434954 +n02435216 +n02435517 +n02435853 +n02436224 +n02436353 +n02436645 +n02437136 +n02437312 +n02437482 +n02437616 +n02437971 +n02438173 +n02438272 +n02438580 +n02439033 +n02439398 +n02441326 +n02441942 +n02442172 +n02442336 +n02442446 +n02442572 +n02442668 +n02442845 +n02443015 +n02443114 +n02443346 +n02443484 +n02443808 +n02443959 +n02444251 +n02444819 +n02445004 +n02445171 +n02445394 +n02445715 +n02446206 +n02446352 +n02446645 +n02447021 +n02447366 +n02447762 +n02448060 +n02448318 +n02448633 +n02448885 +n02449183 +n02449350 +n02449699 +n02450034 +n02450295 +n02450426 +n02450561 +n02450677 +n02450829 +n02451125 +n02451415 +n02451575 +n02453108 +n02453611 +n02454379 +n02454794 +n02455135 +n02455428 +n02455720 +n02456008 +n02456275 +n02456962 +n02457408 +n02457945 +n02458135 +n02458517 +n02459190 +n02460009 +n02460451 +n02460817 +n02461128 +n02461830 +n02462213 +n02469248 +n02469472 +n02469914 +n02470238 +n02470325 +n02470709 +n02470899 +n02471300 +n02471762 +n02472293 +n02472987 +n02473307 +n02473554 +n02473720 +n02473857 +n02473983 +n02474110 +n02474282 +n02474605 +n02474777 +n02475078 +n02475358 +n02475669 +n02476219 +n02476567 +n02476870 +n02477028 +n02477187 +n02477329 +n02477516 +n02477782 +n02478239 +n02478875 +n02479332 +n02480153 +n02480495 +n02480855 +n02481103 +n02481235 +n02481366 +n02481500 +n02481823 +n02482060 +n02482286 +n02482474 +n02482650 +n02483092 +n02483362 +n02483708 +n02484322 +n02484473 +n02484975 +n02485225 +n02485371 +n02485536 +n02485688 +n02485988 +n02486261 +n02486410 +n02486657 +n02486908 +n02487079 +n02487347 +n02487547 +n02487675 +n02487847 +n02488003 +n02488291 +n02488415 +n02488702 +n02488894 +n02489166 +n02489589 +n02490219 +n02490597 +n02490811 +n02491107 +n02491329 +n02491474 +n02492035 +n02492356 +n02492660 +n02492948 +n02493224 +n02493509 +n02493793 +n02494079 +n02494383 +n02495242 +n02496052 +n02496913 +n02497673 +n02498153 +n02498743 +n02499022 +n02499316 +n02499568 +n02499808 +n02500267 +n02500596 +n02501583 +n02501923 +n02502006 +n02502514 +n02502807 +n02503127 +n02503517 +n02503756 +n02504013 +n02504458 +n02504770 +n02505063 +n02505238 +n02505485 +n02505998 +n02506947 +n02507148 +n02507649 +n02508021 +n02508213 +n02508346 +n02508742 +n02509197 +n02509515 +n02509815 +n02510455 +n02511730 +n02512053 +n02512752 +n02512830 +n02512938 +n02513248 +n02513355 +n02513560 +n02513727 +n02513805 +n02513939 +n02514041 +n02515214 +n02515713 +n02516188 +n02516776 +n02517442 +n02517938 +n02518324 +n02518622 +n02519148 +n02519340 +n02519472 +n02519686 +n02519862 +n02520147 +n02520525 +n02520810 +n02521646 +n02522399 +n02522637 +n02522722 +n02522866 +n02523110 +n02523427 +n02523877 +n02524202 +n02524524 +n02524659 +n02524928 +n02525382 +n02525703 +n02526121 +n02526425 +n02526818 +n02527057 +n02527271 +n02527622 +n02528163 +n02529293 +n02529772 +n02530052 +n02530188 +n02530421 +n02530637 +n02530831 +n02530999 +n02531114 +n02531625 +n02532028 +n02532272 +n02532451 +n02532602 +n02532786 +n02532918 +n02533209 +n02533545 +n02533834 +n02534165 +n02534559 +n02534734 +n02535080 +n02535163 +n02535258 +n02535537 +n02535759 +n02536165 +n02536456 +n02536864 +n02537085 +n02537319 +n02537525 +n02537716 +n02538010 +n02538216 +n02538406 +n02538562 +n02538985 +n02539424 +n02539573 +n02539894 +n02540412 +n02540983 +n02541257 +n02541687 +n02542017 +n02542432 +n02542958 +n02543255 +n02543565 +n02544274 +n02545841 +n02546028 +n02546331 +n02546627 +n02547014 +n02547733 +n02548247 +n02548689 +n02548884 +n02549248 +n02549376 +n02549989 +n02550203 +n02550460 +n02550655 +n02551134 +n02551668 +n02552171 +n02553028 +n02554730 +n02555863 +n02556373 +n02556846 +n02557182 +n02557318 +n02557591 +n02557749 +n02557909 +n02558206 +n02558860 +n02559144 +n02559383 +n02559862 +n02560110 +n02561108 +n02561381 +n02561514 +n02561661 +n02561803 +n02561937 +n02562315 +n02562796 +n02562971 +n02563079 +n02563182 +n02563648 +n02563792 +n02563949 +n02564270 +n02564403 +n02564720 +n02564935 +n02565072 +n02565324 +n02565573 +n02566109 +n02566489 +n02566665 +n02567334 +n02567633 +n02568087 +n02568447 +n02568959 +n02569484 +n02569631 +n02569905 +n02570164 +n02570484 +n02570838 +n02571167 +n02571652 +n02571810 +n02572196 +n02572484 +n02573249 +n02573704 +n02574271 +n02574910 +n02575325 +n02575590 +n02576223 +n02576575 +n02576906 +n02577041 +n02577164 +n02577403 +n02577662 +n02577952 +n02578233 +n02578454 +n02578771 +n02578928 +n02579303 +n02579557 +n02579762 +n02579928 +n02580336 +n02580679 +n02580830 +n02581108 +n02581482 +n02581642 +n02581957 +n02582220 +n02582349 +n02582721 +n02583567 +n02583890 +n02584145 +n02584449 +n02585872 +n02586238 +n02586543 +n02587051 +n02587300 +n02587479 +n02587618 +n02587877 +n02588286 +n02588794 +n02588945 +n02589062 +n02589196 +n02589316 +n02589623 +n02589796 +n02590094 +n02590495 +n02590702 +n02590987 +n02591330 +n02591613 +n02591911 +n02592055 +n02592371 +n02592734 +n02593019 +n02593191 +n02593453 +n02593679 +n02594250 +n02594942 +n02595056 +n02595339 +n02595702 +n02596067 +n02596252 +n02596381 +n02596720 +n02597004 +n02597367 +n02597608 +n02597818 +n02597972 +n02598134 +n02598573 +n02598878 +n02599052 +n02599347 +n02599557 +n02599958 +n02600298 +n02600503 +n02600798 +n02601344 +n02601767 +n02601921 +n02602059 +n02602405 +n02602760 +n02603317 +n02603540 +n02603862 +n02604157 +n02604480 +n02604954 +n02605316 +n02605703 +n02605936 +n02606052 +n02606384 +n02606751 +n02607072 +n02607201 +n02607470 +n02607862 +n02608284 +n02608547 +n02608860 +n02608996 +n02609302 +n02609823 +n02610066 +n02610373 +n02610664 +n02610980 +n02611561 +n02611898 +n02612167 +n02613181 +n02613572 +n02613820 +n02614140 +n02614482 +n02614653 +n02614978 +n02615298 +n02616128 +n02616397 +n02616851 +n02617537 +n02618094 +n02618513 +n02618827 +n02619165 +n02619550 +n02619861 +n02620167 +n02620578 +n02621258 +n02621908 +n02622249 +n02622547 +n02622712 +n02622955 +n02623445 +n02624167 +n02624551 +n02624807 +n02624987 +n02625258 +n02625612 +n02625851 +n02626089 +n02626265 +n02626471 +n02626762 +n02627037 +n02627292 +n02627532 +n02627835 +n02628062 +n02628259 +n02628600 +n02629230 +n02629716 +n02630281 +n02630615 +n02630739 +n02631041 +n02631330 +n02631475 +n02631628 +n02631775 +n02632039 +n02632494 +n02633422 +n02633677 +n02633977 +n02634545 +n02635154 +n02635580 +n02636170 +n02636405 +n02636550 +n02636854 +n02637179 +n02637475 +n02637977 +n02638596 +n02639087 +n02639605 +n02639922 +n02640242 +n02640626 +n02640857 +n02641379 +n02642107 +n02642644 +n02643112 +n02643316 +n02643566 +n02643836 +n02644113 +n02644360 +n02644501 +n02644665 +n02644817 +n02645538 +n02645691 +n02645953 +n02646667 +n02646892 +n02648035 +n02648625 +n02648916 +n02649218 +n02649546 +n02650050 +n02650413 +n02650541 +n02651060 +n02652132 +n02652668 +n02653145 +n02653497 +n02653786 +n02654112 +n02654425 +n02654745 +n02655020 +n02655523 +n02655848 +n02656032 +n02656301 +n02656670 +n02656969 +n02657368 +n02657694 +n02658079 +n02658531 +n02658811 +n02659176 +n02659478 +n02659808 +n02660091 +n02660208 +n02660519 +n02660640 +n02661017 +n02661473 +n02661618 +n02662239 +n02662397 +n02662559 +n02662825 +n02662993 +n02663211 +n02663485 +n02663849 +n02664285 +n02664642 +n02665250 +n02665985 +n02666196 +n02666501 +n02666624 +n02666943 +n02667093 +n02667244 +n02667379 +n02667478 +n02667576 +n02667693 +n02668393 +n02668613 +n02669295 +n02669442 +n02669534 +n02669723 +n02670186 +n02670382 +n02670683 +n02670935 +n02671780 +n02672152 +n02672371 +n02672831 +n02675077 +n02675219 +n02675522 +n02676097 +n02676261 +n02676566 +n02676670 +n02676938 +n02677028 +n02677136 +n02677436 +n02677718 +n02678010 +n02678384 +n02678897 +n02679142 +n02679257 +n02679961 +n02680110 +n02680512 +n02680638 +n02680754 +n02681392 +n02682311 +n02682407 +n02682569 +n02682811 +n02682922 +n02683183 +n02683323 +n02683454 +n02683558 +n02683791 +n02684248 +n02684356 +n02684515 +n02684649 +n02684962 +n02685082 +n02685253 +n02685365 +n02685701 +n02685995 +n02686121 +n02686227 +n02686379 +n02686568 +n02687172 +n02687423 +n02687682 +n02687821 +n02687992 +n02688273 +n02688443 +n02689144 +n02689274 +n02689434 +n02689748 +n02689819 +n02690373 +n02690715 +n02691156 +n02692086 +n02692232 +n02692513 +n02692680 +n02692877 +n02693246 +n02693413 +n02693540 +n02694045 +n02694279 +n02694426 +n02694662 +n02694966 +n02695627 +n02695762 +n02696165 +n02696246 +n02696569 +n02696843 +n02697022 +n02697221 +n02697576 +n02697675 +n02697876 +n02698244 +n02698473 +n02698634 +n02699494 +n02699629 +n02699770 +n02699915 +n02700064 +n02700258 +n02700895 +n02701002 +n02701260 +n02701730 +n02702989 +n02703124 +n02703275 +n02704645 +n02704792 +n02704949 +n02705201 +n02705429 +n02705944 +n02706221 +n02706806 +n02708093 +n02708224 +n02708433 +n02708555 +n02708711 +n02708885 +n02709101 +n02709367 +n02709637 +n02709763 +n02709908 +n02710044 +n02710201 +n02710324 +n02710429 +n02710600 +n02711237 +n02711780 +n02712545 +n02712643 +n02713003 +n02713218 +n02713364 +n02713496 +n02714315 +n02714535 +n02714751 +n02715229 +n02715513 +n02715712 +n02716626 +n02720048 +n02720576 +n02721813 +n02723165 +n02724722 +n02725872 +n02726017 +n02726210 +n02726305 +n02726681 +n02727016 +n02727141 +n02727426 +n02727825 +n02728440 +n02729222 +n02729837 +n02729965 +n02730265 +n02730568 +n02730930 +n02731251 +n02731398 +n02731629 +n02731900 +n02732072 +n02732572 +n02732827 +n02733213 +n02733524 +n02734725 +n02734835 +n02735268 +n02735361 +n02735538 +n02735688 +n02736396 +n02736798 +n02737351 +n02737660 +n02738031 +n02738271 +n02738449 +n02738535 +n02738741 +n02738859 +n02738978 +n02739123 +n02739427 +n02739550 +n02739668 +n02739889 +n02740061 +n02740300 +n02740533 +n02740764 +n02741367 +n02741475 +n02742070 +n02742194 +n02742322 +n02742468 +n02742753 +n02743426 +n02744323 +n02744844 +n02744961 +n02745492 +n02745611 +n02745816 +n02746008 +n02746225 +n02746365 +n02746595 +n02746683 +n02746978 +n02747063 +n02747177 +n02747672 +n02747802 +n02748183 +n02748359 +n02748491 +n02749169 +n02749292 +n02749479 +n02749670 +n02749790 +n02749953 +n02750070 +n02750169 +n02750320 +n02750652 +n02751067 +n02751215 +n02751295 +n02751490 +n02752199 +n02752496 +n02752615 +n02752810 +n02752917 +n02753044 +n02753394 +n02753710 +n02754103 +n02754656 +n02755140 +n02755352 +n02755529 +n02755675 +n02755823 +n02755984 +n02756098 +n02756854 +n02756977 +n02757061 +n02757337 +n02757462 +n02757714 +n02757810 +n02757927 +n02758134 +n02758490 +n02758863 +n02758960 +n02759257 +n02759387 +n02759700 +n02759963 +n02760099 +n02760199 +n02760298 +n02760429 +n02760658 +n02760855 +n02761034 +n02761206 +n02761392 +n02761557 +n02761696 +n02761834 +n02762169 +n02762371 +n02762508 +n02762725 +n02762909 +n02763083 +n02763198 +n02763306 +n02763604 +n02763714 +n02763901 +n02764044 +n02764398 +n02764505 +n02764614 +n02764779 +n02764935 +n02765028 +n02766168 +n02766320 +n02766534 +n02766792 +n02767038 +n02767147 +n02767433 +n02767665 +n02767956 +n02768114 +n02768226 +n02768433 +n02768655 +n02768973 +n02769075 +n02769290 +n02769669 +n02769748 +n02769963 +n02770078 +n02770211 +n02770585 +n02770721 +n02770830 +n02771004 +n02771166 +n02771286 +n02771547 +n02771750 +n02772101 +n02772435 +n02772554 +n02772700 +n02773037 +n02773838 +n02774152 +n02774630 +n02774921 +n02775039 +n02775178 +n02775483 +n02775689 +n02775813 +n02775897 +n02776007 +n02776205 +n02776505 +n02776631 +n02776825 +n02776978 +n02777100 +n02777292 +n02777402 +n02777638 +n02777734 +n02777927 +n02778131 +n02778294 +n02778456 +n02778588 +n02778669 +n02779435 +n02779609 +n02779719 +n02779971 +n02780315 +n02780445 +n02780588 +n02780704 +n02780815 +n02781121 +n02781213 +n02781338 +n02781517 +n02781764 +n02782093 +n02782432 +n02782602 +n02782681 +n02782778 +n02783035 +n02783161 +n02783324 +n02783459 +n02783900 +n02783994 +n02784124 +n02784998 +n02785648 +n02786058 +n02786198 +n02786331 +n02786463 +n02786611 +n02786736 +n02786837 +n02787120 +n02787269 +n02787435 +n02787622 +n02788021 +n02788148 +n02788386 +n02788462 +n02788572 +n02788689 +n02789487 +n02790669 +n02790823 +n02790996 +n02791124 +n02791270 +n02791532 +n02791665 +n02791795 +n02792409 +n02792552 +n02792948 +n02793089 +n02793199 +n02793296 +n02793414 +n02793495 +n02793684 +n02793842 +n02793930 +n02794008 +n02794156 +n02794368 +n02794474 +n02794664 +n02794779 +n02794972 +n02795169 +n02795528 +n02795670 +n02795783 +n02795978 +n02796207 +n02796318 +n02796412 +n02796623 +n02796995 +n02797295 +n02797535 +n02797692 +n02797881 +n02799071 +n02799175 +n02799323 +n02799897 +n02800213 +n02800497 +n02800675 +n02800940 +n02801047 +n02801184 +n02801450 +n02801525 +n02801823 +n02801938 +n02802215 +n02802426 +n02802544 +n02802721 +n02802990 +n02803349 +n02803539 +n02803666 +n02803809 +n02803934 +n02804123 +n02804252 +n02804414 +n02804515 +n02804610 +n02805283 +n02805845 +n02805983 +n02806088 +n02806379 +n02806530 +n02806762 +n02806875 +n02806992 +n02807133 +n02807523 +n02807616 +n02807731 +n02808185 +n02808304 +n02808440 +n02808829 +n02808968 +n02809105 +n02809241 +n02809364 +n02809491 +n02809605 +n02809736 +n02810139 +n02810270 +n02810471 +n02810782 +n02811059 +n02811204 +n02811350 +n02811468 +n02811618 +n02811719 +n02811936 +n02812201 +n02812342 +n02812631 +n02812785 +n02812949 +n02813252 +n02813399 +n02813544 +n02813645 +n02813752 +n02813981 +n02814116 +n02814338 +n02814428 +n02814533 +n02814774 +n02814860 +n02815478 +n02815749 +n02815834 +n02815950 +n02816494 +n02816656 +n02816768 +n02817031 +n02817251 +n02817386 +n02817516 +n02817650 +n02817799 +n02818135 +n02818254 +n02818687 +n02818832 +n02819697 +n02820085 +n02820210 +n02820556 +n02820675 +n02821202 +n02821415 +n02821543 +n02821627 +n02821943 +n02822064 +n02822220 +n02822399 +n02822579 +n02822762 +n02822865 +n02823124 +n02823335 +n02823428 +n02823510 +n02823586 +n02823750 +n02823848 +n02823964 +n02824058 +n02824152 +n02824319 +n02824448 +n02825153 +n02825240 +n02825442 +n02825657 +n02825872 +n02825961 +n02826068 +n02826259 +n02826459 +n02826589 +n02826683 +n02826812 +n02826886 +n02827148 +n02827606 +n02828115 +n02828299 +n02828427 +n02828884 +n02829246 +n02829353 +n02829510 +n02829596 +n02830157 +n02831237 +n02831335 +n02831595 +n02831724 +n02831894 +n02831998 +n02833040 +n02833140 +n02833275 +n02833403 +n02833793 +n02834027 +n02834397 +n02834506 +n02834642 +n02834778 +n02835271 +n02835412 +n02835551 +n02835724 +n02835829 +n02835915 +n02836035 +n02836174 +n02836268 +n02836392 +n02836513 +n02836607 +n02836900 +n02837134 +n02837567 +n02837789 +n02837887 +n02838014 +n02838178 +n02838345 +n02838577 +n02838728 +n02838958 +n02839110 +n02839351 +n02839592 +n02839910 +n02840134 +n02840245 +n02840515 +n02840619 +n02841063 +n02841187 +n02841315 +n02841506 +n02841641 +n02841847 +n02842133 +n02842573 +n02842809 +n02843029 +n02843158 +n02843276 +n02843465 +n02843553 +n02843684 +n02843777 +n02843909 +n02844056 +n02844214 +n02844307 +n02844714 +n02845130 +n02845293 +n02845985 +n02846141 +n02846260 +n02846511 +n02846619 +n02846733 +n02846874 +n02847461 +n02847631 +n02847852 +n02848118 +n02848216 +n02848523 +n02848806 +n02848921 +n02849154 +n02849885 +n02850060 +n02850358 +n02850732 +n02850950 +n02851099 +n02851795 +n02851939 +n02852043 +n02852173 +n02852360 +n02853016 +n02853218 +n02853336 +n02853745 +n02853870 +n02854378 +n02854532 +n02854630 +n02854739 +n02854926 +n02855089 +n02855390 +n02855701 +n02855793 +n02855925 +n02856013 +n02856237 +n02856362 +n02857365 +n02857477 +n02857644 +n02857907 +n02858304 +n02859184 +n02859343 +n02859443 +n02859557 +n02859729 +n02859955 +n02860415 +n02860640 +n02860847 +n02861022 +n02861147 +n02861286 +n02861387 +n02861509 +n02861658 +n02861777 +n02861886 +n02862048 +n02862916 +n02863014 +n02863176 +n02863340 +n02863426 +n02863536 +n02863638 +n02863750 +n02864122 +n02864504 +n02864593 +n02864987 +n02865351 +n02865665 +n02865931 +n02866106 +n02866386 +n02866578 +n02867401 +n02867592 +n02867715 +n02867966 +n02868240 +n02868429 +n02868546 +n02868638 +n02868975 +n02869155 +n02869249 +n02869563 +n02869737 +n02869837 +n02870526 +n02870676 +n02870772 +n02870880 +n02871005 +n02871147 +n02871314 +n02871439 +n02871525 +n02871631 +n02871824 +n02871963 +n02872333 +n02872529 +n02872752 +n02873520 +n02873623 +n02873733 +n02873839 +n02874086 +n02874214 +n02874336 +n02874442 +n02874537 +n02874642 +n02874750 +n02875436 +n02875626 +n02875948 +n02876084 +n02876326 +n02876457 +n02876657 +n02877266 +n02877513 +n02877642 +n02877765 +n02877962 +n02878107 +n02878222 +n02878425 +n02878534 +n02878628 +n02878796 +n02879087 +n02879309 +n02879422 +n02879517 +n02879718 +n02880189 +n02880393 +n02880546 +n02880842 +n02880940 +n02881193 +n02881546 +n02881757 +n02881906 +n02882190 +n02882301 +n02882483 +n02882647 +n02882894 +n02883004 +n02883101 +n02883205 +n02883344 +n02884225 +n02884450 +n02884859 +n02884994 +n02885108 +n02885233 +n02885338 +n02885462 +n02885882 +n02886321 +n02886434 +n02886599 +n02887079 +n02887209 +n02887489 +n02887832 +n02887970 +n02888270 +n02888429 +n02888569 +n02888898 +n02889425 +n02889646 +n02889856 +n02889996 +n02890188 +n02890351 +n02890513 +n02890662 +n02890804 +n02890940 +n02891188 +n02891788 +n02892201 +n02892304 +n02892392 +n02892499 +n02892626 +n02892767 +n02892948 +n02893269 +n02893418 +n02893608 +n02893692 +n02893941 +n02894024 +n02894158 +n02894337 +n02894605 +n02894847 +n02895008 +n02895154 +n02895328 +n02895438 +n02896074 +n02896294 +n02896442 +n02896694 +n02896856 +n02896949 +n02897097 +n02897389 +n02897820 +n02898093 +n02898173 +n02898269 +n02898369 +n02898585 +n02898711 +n02899439 +n02900160 +n02900459 +n02900594 +n02900705 +n02900857 +n02900987 +n02901114 +n02901259 +n02901377 +n02901481 +n02901620 +n02901793 +n02901901 +n02902079 +n02902687 +n02902816 +n02902916 +n02903006 +n02903126 +n02903204 +n02903727 +n02903852 +n02904109 +n02904233 +n02904505 +n02904640 +n02904803 +n02904927 +n02905036 +n02905152 +n02905886 +n02906734 +n02906963 +n02907082 +n02907296 +n02907391 +n02907656 +n02907873 +n02908123 +n02908217 +n02908773 +n02908951 +n02909053 +n02909165 +n02909285 +n02909706 +n02909870 +n02910145 +n02910241 +n02910353 +n02910542 +n02910701 +n02910864 +n02910964 +n02911332 +n02911485 +n02912065 +n02912319 +n02912557 +n02912894 +n02913152 +n02914991 +n02915904 +n02916065 +n02916179 +n02916350 +n02916936 +n02917067 +n02917377 +n02917521 +n02917607 +n02917742 +n02917964 +n02918112 +n02918330 +n02918455 +n02918595 +n02918831 +n02918964 +n02919148 +n02919308 +n02919414 +n02919648 +n02919792 +n02919890 +n02919976 +n02920083 +n02920164 +n02920259 +n02920369 +n02920503 +n02920658 +n02921029 +n02921195 +n02921292 +n02921406 +n02921592 +n02921756 +n02921884 +n02922159 +n02922292 +n02922461 +n02922578 +n02922798 +n02922877 +n02923129 +n02923535 +n02923682 +n02923915 +n02924116 +n02925009 +n02925107 +n02925385 +n02925519 +n02925666 +n02926426 +n02926591 +n02927053 +n02927161 +n02927764 +n02927887 +n02928049 +n02928299 +n02928413 +n02928608 +n02929184 +n02929289 +n02929462 +n02929582 +n02929923 +n02930080 +n02930214 +n02930339 +n02930645 +n02930766 +n02931013 +n02931148 +n02931294 +n02931417 +n02931836 +n02932019 +n02932400 +n02932523 +n02932693 +n02932891 +n02933112 +n02933340 +n02933462 +n02933649 +n02933750 +n02933990 +n02934168 +n02934451 +n02935017 +n02935387 +n02935490 +n02935658 +n02935891 +n02936176 +n02936281 +n02936402 +n02936570 +n02936714 +n02936921 +n02937010 +n02937336 +n02937958 +n02938218 +n02938321 +n02938886 +n02939185 +n02939763 +n02939866 +n02940289 +n02940385 +n02940570 +n02940706 +n02941095 +n02941228 +n02941845 +n02942015 +n02942147 +n02942349 +n02942460 +n02942699 +n02943241 +n02943465 +n02943686 +n02943871 +n02943964 +n02944075 +n02944146 +n02944256 +n02944459 +n02944579 +n02944826 +n02945161 +n02945813 +n02945964 +n02946127 +n02946270 +n02946348 +n02946509 +n02946753 +n02946824 +n02946921 +n02947212 +n02947660 +n02947818 +n02947977 +n02948072 +n02948293 +n02948403 +n02948557 +n02948834 +n02948942 +n02949084 +n02949202 +n02949356 +n02949542 +n02950018 +n02950120 +n02950186 +n02950256 +n02950482 +n02950632 +n02950826 +n02950943 +n02951358 +n02951585 +n02951703 +n02951843 +n02952109 +n02952237 +n02952374 +n02952485 +n02952585 +n02952674 +n02952798 +n02952935 +n02953056 +n02953197 +n02953455 +n02953552 +n02953673 +n02953850 +n02954163 +n02954340 +n02954938 +n02955065 +n02955247 +n02955540 +n02955767 +n02956393 +n02956699 +n02956795 +n02956883 +n02957008 +n02957135 +n02957252 +n02957427 +n02957755 +n02957862 +n02958343 +n02959942 +n02960352 +n02960690 +n02960903 +n02961035 +n02961225 +n02961451 +n02961544 +n02961947 +n02962061 +n02962200 +n02962414 +n02962843 +n02962938 +n02963159 +n02963302 +n02963503 +n02963692 +n02963821 +n02963987 +n02964075 +n02964196 +n02964295 +n02964634 +n02964843 +n02964934 +n02965024 +n02965122 +n02965216 +n02965300 +n02965529 +n02965783 +n02966068 +n02966193 +n02966545 +n02966687 +n02966786 +n02966942 +n02967081 +n02967170 +n02967294 +n02967407 +n02967540 +n02967626 +n02967782 +n02967991 +n02968074 +n02968210 +n02968333 +n02968473 +n02969010 +n02969163 +n02969323 +n02969527 +n02969634 +n02969886 +n02970408 +n02970534 +n02970685 +n02970849 +n02971167 +n02971356 +n02971473 +n02971579 +n02971691 +n02971940 +n02972397 +n02972714 +n02972934 +n02973017 +n02973236 +n02973805 +n02973904 +n02974003 +n02974348 +n02974454 +n02974565 +n02974697 +n02975212 +n02975589 +n02975994 +n02976123 +n02976249 +n02976350 +n02976455 +n02976552 +n02976641 +n02976815 +n02976939 +n02977058 +n02977330 +n02977438 +n02977619 +n02977936 +n02978055 +n02978205 +n02978367 +n02978478 +n02978753 +n02978881 +n02979074 +n02979186 +n02979290 +n02979399 +n02979516 +n02979836 +n02980036 +n02980203 +n02980441 +n02980625 +n02981024 +n02981198 +n02981321 +n02981565 +n02981792 +n02981911 +n02982232 +n02982416 +n02982515 +n02982599 +n02983072 +n02983189 +n02983357 +n02983507 +n02983904 +n02984061 +n02984203 +n02984469 +n02984699 +n02985137 +n02985606 +n02985828 +n02985963 +n02986066 +n02986160 +n02986348 +n02987047 +n02987379 +n02987492 +n02987706 +n02987823 +n02987950 +n02988066 +n02988156 +n02988304 +n02988486 +n02988679 +n02988963 +n02989099 +n02990373 +n02990758 +n02991048 +n02991302 +n02991847 +n02992032 +n02992211 +n02992368 +n02992529 +n02992795 +n02993194 +n02993368 +n02993546 +n02994573 +n02994743 +n02995345 +n02995871 +n02995998 +n02997391 +n02997607 +n02997910 +n02998003 +n02998107 +n02998563 +n02998696 +n02998841 +n02999138 +n02999410 +n02999936 +n03000134 +n03000247 +n03000530 +n03000684 +n03001115 +n03001282 +n03001540 +n03001627 +n03002096 +n03002210 +n03002341 +n03002555 +n03002711 +n03002816 +n03002948 +n03003091 +n03003633 +n03004275 +n03004409 +n03004531 +n03004620 +n03004713 +n03004824 +n03005033 +n03005147 +n03005285 +n03005515 +n03005619 +n03006626 +n03006788 +n03006903 +n03007130 +n03007297 +n03007444 +n03007591 +n03008177 +n03008817 +n03008976 +n03009111 +n03009269 +n03009794 +n03010473 +n03010656 +n03010795 +n03010915 +n03011018 +n03011355 +n03011741 +n03012013 +n03012159 +n03012373 +n03012499 +n03012644 +n03012734 +n03012897 +n03013006 +n03013438 +n03013580 +n03013850 +n03014440 +n03014705 +n03015149 +n03015254 +n03015478 +n03015631 +n03015851 +n03016209 +n03016389 +n03016609 +n03016737 +n03016868 +n03016953 +n03017070 +n03017168 +n03017698 +n03017835 +n03018209 +n03018349 +n03018614 +n03018712 +n03018848 +n03019198 +n03019304 +n03019434 +n03019685 +n03019806 +n03019938 +n03020034 +n03020416 +n03020692 +n03021228 +n03024064 +n03024233 +n03024333 +n03024518 +n03025070 +n03025165 +n03025250 +n03025886 +n03026506 +n03026907 +n03027001 +n03027108 +n03027250 +n03027505 +n03027625 +n03028079 +n03028596 +n03028785 +n03029066 +n03029197 +n03029296 +n03029445 +n03029925 +n03030262 +n03030353 +n03030557 +n03030880 +n03031012 +n03031152 +n03031422 +n03031756 +n03032252 +n03032453 +n03032811 +n03033267 +n03033362 +n03033986 +n03034244 +n03034405 +n03034516 +n03034663 +n03035252 +n03035510 +n03035715 +n03035832 +n03036022 +n03036149 +n03036244 +n03036341 +n03036469 +n03036701 +n03036866 +n03037108 +n03037228 +n03037404 +n03037590 +n03037709 +n03038041 +n03038281 +n03038480 +n03038685 +n03038870 +n03039015 +n03039259 +n03039353 +n03039493 +n03039827 +n03039947 +n03040229 +n03040376 +n03040836 +n03041114 +n03041265 +n03041449 +n03041632 +n03041810 +n03042139 +n03042384 +n03042490 +n03042697 +n03042829 +n03042984 +n03043173 +n03043274 +n03043423 +n03043693 +n03043798 +n03043958 +n03044671 +n03044801 +n03044934 +n03045074 +n03045228 +n03045337 +n03045698 +n03045800 +n03046029 +n03046133 +n03046257 +n03046802 +n03046921 +n03047052 +n03047171 +n03047690 +n03047799 +n03047941 +n03048883 +n03049066 +n03049326 +n03049457 +n03049782 +n03049924 +n03050026 +n03050453 +n03050546 +n03050655 +n03050864 +n03051041 +n03051249 +n03051396 +n03051540 +n03052464 +n03052917 +n03053047 +n03053976 +n03054491 +n03054605 +n03054901 +n03055159 +n03055418 +n03055670 +n03055857 +n03056097 +n03056215 +n03056288 +n03056493 +n03056583 +n03056873 +n03057021 +n03057541 +n03057636 +n03057724 +n03057841 +n03057920 +n03058107 +n03058603 +n03058949 +n03059103 +n03059236 +n03059366 +n03059685 +n03059934 +n03060728 +n03061050 +n03061211 +n03061345 +n03061505 +n03061674 +n03061819 +n03061893 +n03062015 +n03062122 +n03062245 +n03062336 +n03062651 +n03062798 +n03062985 +n03063073 +n03063199 +n03063338 +n03063485 +n03063599 +n03063689 +n03063834 +n03063968 +n03064250 +n03064350 +n03064562 +n03064758 +n03064935 +n03065243 +n03065424 +n03065708 +n03066232 +n03066359 +n03066464 +n03066849 +n03067093 +n03067212 +n03067339 +n03067518 +n03068181 +n03068998 +n03069752 +n03070059 +n03070193 +n03070396 +n03070587 +n03070854 +n03071021 +n03071160 +n03071288 +n03071552 +n03072056 +n03072201 +n03072440 +n03072682 +n03073296 +n03073384 +n03073545 +n03073694 +n03073977 +n03074380 +n03074855 +n03075097 +n03075248 +n03075370 +n03075500 +n03075634 +n03075768 +n03075946 +n03076411 +n03076623 +n03076708 +n03077442 +n03077616 +n03077741 +n03078287 +n03078506 +n03078670 +n03078802 +n03078995 +n03079136 +n03079230 +n03079494 +n03079616 +n03079741 +n03080309 +n03080497 +n03080633 +n03080731 +n03080904 +n03081859 +n03081986 +n03082127 +n03082280 +n03082450 +n03082656 +n03082807 +n03082979 +n03084420 +n03084834 +n03085013 +n03085219 +n03085333 +n03085602 +n03085781 +n03085915 +n03086183 +n03086457 +n03086580 +n03086670 +n03086868 +n03087069 +n03087245 +n03087366 +n03087521 +n03087643 +n03087816 +n03088389 +n03088580 +n03088707 +n03089477 +n03089624 +n03089753 +n03089879 +n03090000 +n03090172 +n03090437 +n03090710 +n03090856 +n03091044 +n03091223 +n03091374 +n03091907 +n03092053 +n03092166 +n03092314 +n03092476 +n03092656 +n03092883 +n03093427 +n03093792 +n03094159 +n03094503 +n03095699 +n03095965 +n03096439 +n03096960 +n03097362 +n03097535 +n03097673 +n03098140 +n03098515 +n03098688 +n03098806 +n03098959 +n03099147 +n03099274 +n03099454 +n03099622 +n03099771 +n03099945 +n03100240 +n03100346 +n03100490 +n03100897 +n03101156 +n03101302 +n03101375 +n03101517 +n03101664 +n03101796 +n03101986 +n03102371 +n03102516 +n03102654 +n03102859 +n03103128 +n03103396 +n03103563 +n03103904 +n03104019 +n03104512 +n03105088 +n03105214 +n03105306 +n03105467 +n03105645 +n03105810 +n03105974 +n03106722 +n03106898 +n03107046 +n03107488 +n03107716 +n03108455 +n03108624 +n03108759 +n03108853 +n03109033 +n03109150 +n03109253 +n03109693 +n03109881 +n03110202 +n03110669 +n03111041 +n03111177 +n03111296 +n03111690 +n03112240 +n03112719 +n03112869 +n03113152 +n03113505 +n03113657 +n03113835 +n03114041 +n03114236 +n03114379 +n03114504 +n03114743 +n03114839 +n03115014 +n03115180 +n03115400 +n03115663 +n03115762 +n03115897 +n03116008 +n03116163 +n03116530 +n03116767 +n03117199 +n03117642 +n03118346 +n03118969 +n03119203 +n03119396 +n03119510 +n03120198 +n03120491 +n03120778 +n03121040 +n03121190 +n03121298 +n03121431 +n03121897 +n03122073 +n03122202 +n03122295 +n03122748 +n03123553 +n03123666 +n03123809 +n03123917 +n03124043 +n03124170 +n03124313 +n03124474 +n03124590 +n03125057 +n03125588 +n03125729 +n03125870 +n03126090 +n03126385 +n03126580 +n03126707 +n03126927 +n03127024 +n03127203 +n03127408 +n03127531 +n03127747 +n03127925 +n03128085 +n03128248 +n03128427 +n03128519 +n03129001 +n03129471 +n03129636 +n03129753 +n03129848 +n03130066 +n03130233 +n03130563 +n03130761 +n03130866 +n03131193 +n03131574 +n03131669 +n03131967 +n03132076 +n03132261 +n03132438 +n03132666 +n03132776 +n03133050 +n03133415 +n03133878 +n03134118 +n03134232 +n03134394 +n03134739 +n03134853 +n03135030 +n03135532 +n03135656 +n03135788 +n03135917 +n03136051 +n03136254 +n03136369 +n03136504 +n03137473 +n03137579 +n03138128 +n03138217 +n03138344 +n03138669 +n03139089 +n03139464 +n03139640 +n03139998 +n03140126 +n03140292 +n03140431 +n03140546 +n03140652 +n03140771 +n03140900 +n03141065 +n03141327 +n03141455 +n03141612 +n03141702 +n03141823 +n03142099 +n03142205 +n03142325 +n03142431 +n03142679 +n03143400 +n03143572 +n03143754 +n03144156 +n03144873 +n03144982 +n03145147 +n03145277 +n03145384 +n03145522 +n03145719 +n03145843 +n03146219 +n03146342 +n03146449 +n03146560 +n03146687 +n03146777 +n03146846 +n03147084 +n03147156 +n03147280 +n03147509 +n03148324 +n03148518 +n03148727 +n03148808 +n03149135 +n03149401 +n03149686 +n03149810 +n03150232 +n03150511 +n03150661 +n03150795 +n03151077 +n03152303 +n03152951 +n03153246 +n03153585 +n03153948 +n03154073 +n03154316 +n03154446 +n03154616 +n03154745 +n03154895 +n03155178 +n03155502 +n03155915 +n03156071 +n03156279 +n03156405 +n03156767 +n03157348 +n03158186 +n03158414 +n03158668 +n03158796 +n03158885 +n03159535 +n03159640 +n03160001 +n03160186 +n03160309 +n03160740 +n03161016 +n03161450 +n03161893 +n03162297 +n03162460 +n03162556 +n03162714 +n03162818 +n03163222 +n03163381 +n03163488 +n03163798 +n03163973 +n03164192 +n03164344 +n03164605 +n03164722 +n03164929 +n03165096 +n03165211 +n03165466 +n03165616 +n03165823 +n03165955 +n03166120 +n03166514 +n03166600 +n03166685 +n03166809 +n03166951 +n03167153 +n03167978 +n03168107 +n03168217 +n03168543 +n03168663 +n03168774 +n03168933 +n03169063 +n03169176 +n03170292 +n03170459 +n03170635 +n03170872 +n03171228 +n03171356 +n03171635 +n03171910 +n03172038 +n03172738 +n03172965 +n03173270 +n03173387 +n03173929 +n03174079 +n03174450 +n03174731 +n03175081 +n03175189 +n03175301 +n03175457 +n03175604 +n03175843 +n03175983 +n03176238 +n03176386 +n03176594 +n03176763 +n03177059 +n03177165 +n03177708 +n03178000 +n03178173 +n03178430 +n03178538 +n03178674 +n03179701 +n03179910 +n03180011 +n03180384 +n03180504 +n03180732 +n03180865 +n03180969 +n03181293 +n03181667 +n03182140 +n03182232 +n03182912 +n03183080 +n03185868 +n03186199 +n03186285 +n03186818 +n03187037 +n03187153 +n03187268 +n03187595 +n03187751 +n03188290 +n03188531 +n03188725 +n03188871 +n03189083 +n03189311 +n03189818 +n03190458 +n03191286 +n03191451 +n03191561 +n03191776 +n03192543 +n03192907 +n03193107 +n03193260 +n03193423 +n03193597 +n03193754 +n03194170 +n03194297 +n03194812 +n03194992 +n03195332 +n03195485 +n03195799 +n03195959 +n03196062 +n03196217 +n03196324 +n03196598 +n03196990 +n03197201 +n03197337 +n03197446 +n03198223 +n03198500 +n03199358 +n03199488 +n03199647 +n03199775 +n03199901 +n03200231 +n03200357 +n03200539 +n03200701 +n03200906 +n03201035 +n03201208 +n03201529 +n03201638 +n03201776 +n03201895 +n03201996 +n03202354 +n03202481 +n03202760 +n03202940 +n03203089 +n03203806 +n03204134 +n03204306 +n03204436 +n03204558 +n03204955 +n03205143 +n03205304 +n03205458 +n03205574 +n03205669 +n03205903 +n03206023 +n03206158 +n03206282 +n03206405 +n03206602 +n03206718 +n03206908 +n03207305 +n03207548 +n03207630 +n03207743 +n03207835 +n03207941 +n03208556 +n03208938 +n03209359 +n03209477 +n03209666 +n03209910 +n03210245 +n03210372 +n03210552 +n03210683 +n03211117 +n03211413 +n03211616 +n03211789 +n03212114 +n03212247 +n03212406 +n03212811 +n03213014 +n03213361 +n03213538 +n03213715 +n03213826 +n03214253 +n03214450 +n03214582 +n03214966 +n03215076 +n03215191 +n03215337 +n03215508 +n03215749 +n03215930 +n03216199 +n03216402 +n03216562 +n03216710 +n03216828 +n03217653 +n03217739 +n03217889 +n03218198 +n03218446 +n03219010 +n03219135 +n03219483 +n03219612 +n03219859 +n03219966 +n03220095 +n03220237 +n03220513 +n03220692 +n03221059 +n03221351 +n03221540 +n03221720 +n03222176 +n03222318 +n03222516 +n03222722 +n03222857 +n03223162 +n03223299 +n03223441 +n03223553 +n03223686 +n03223923 +n03224490 +n03224603 +n03224753 +n03224893 +n03225108 +n03225458 +n03225616 +n03225777 +n03225988 +n03226090 +n03226254 +n03226375 +n03226538 +n03226880 +n03227010 +n03227184 +n03227317 +n03227721 +n03227856 +n03228016 +n03228254 +n03228365 +n03228533 +n03228692 +n03228796 +n03228967 +n03229115 +n03229244 +n03229526 +n03231160 +n03231368 +n03231819 +n03232309 +n03232417 +n03232543 +n03232815 +n03232923 +n03233123 +n03233624 +n03233744 +n03233905 +n03234164 +n03234952 +n03235042 +n03235180 +n03235327 +n03235796 +n03235979 +n03236093 +n03236217 +n03236423 +n03236580 +n03236735 +n03237212 +n03237340 +n03237416 +n03237639 +n03237839 +n03237992 +n03238131 +n03238286 +n03238586 +n03238762 +n03238879 +n03239054 +n03239259 +n03239607 +n03239726 +n03240140 +n03240683 +n03240892 +n03241093 +n03241335 +n03241496 +n03241903 +n03242120 +n03242264 +n03242390 +n03242506 +n03242995 +n03243218 +n03243625 +n03244047 +n03244231 +n03244388 +n03244775 +n03244919 +n03245271 +n03245421 +n03245724 +n03245889 +n03246197 +n03246312 +n03246454 +n03246653 +n03246933 +n03247083 +n03247351 +n03247495 +n03248835 +n03249342 +n03249569 +n03249956 +n03250089 +n03250279 +n03250405 +n03250588 +n03250847 +n03250952 +n03251100 +n03251280 +n03251533 +n03251766 +n03251932 +n03252231 +n03252324 +n03252422 +n03252637 +n03252787 +n03253071 +n03253187 +n03253279 +n03253714 +n03253796 +n03253886 +n03254046 +n03254189 +n03254374 +n03254625 +n03254737 +n03254862 +n03255030 +n03255167 +n03255322 +n03255488 +n03255899 +n03256032 +n03256166 +n03256472 +n03256631 +n03256788 +n03256928 +n03257065 +n03257210 +n03257586 +n03258192 +n03258330 +n03258456 +n03258577 +n03258905 +n03259009 +n03259280 +n03259401 +n03259505 +n03260206 +n03260504 +n03260733 +n03260849 +n03261019 +n03261263 +n03261395 +n03261603 +n03261776 +n03262072 +n03262248 +n03262519 +n03262717 +n03262809 +n03262932 +n03263076 +n03263338 +n03263640 +n03263758 +n03264906 +n03265032 +n03265754 +n03266195 +n03266371 +n03266620 +n03266749 +n03267113 +n03267468 +n03267696 +n03267821 +n03268142 +n03268311 +n03268645 +n03268790 +n03268918 +n03269073 +n03269203 +n03269401 +n03270165 +n03270695 +n03270854 +n03271030 +n03271260 +n03271376 +n03271574 +n03271765 +n03271865 +n03272010 +n03272125 +n03272239 +n03272383 +n03272562 +n03272810 +n03272940 +n03273061 +n03273551 +n03273740 +n03273913 +n03274265 +n03274435 +n03274561 +n03274796 +n03275125 +n03275311 +n03275566 +n03275681 +n03275864 +n03276179 +n03276696 +n03276839 +n03277004 +n03277149 +n03277459 +n03277602 +n03277771 +n03278248 +n03278914 +n03279153 +n03279364 +n03279508 +n03279804 +n03279918 +n03280216 +n03280394 +n03280644 +n03281145 +n03281524 +n03281673 +n03282060 +n03282295 +n03282401 +n03283221 +n03283413 +n03283827 +n03284308 +n03284482 +n03284743 +n03284886 +n03284981 +n03285578 +n03285730 +n03285912 +n03286572 +n03287351 +n03287733 +n03288003 +n03288500 +n03288643 +n03288742 +n03288886 +n03289660 +n03289985 +n03290096 +n03290195 +n03290653 +n03291413 +n03291551 +n03291741 +n03291819 +n03291963 +n03292085 +n03292362 +n03292475 +n03292603 +n03292736 +n03292960 +n03293095 +n03293741 +n03293863 +n03294048 +n03294604 +n03294833 +n03295012 +n03295140 +n03295246 +n03295928 +n03296081 +n03296217 +n03296328 +n03296478 +n03296963 +n03297103 +n03297226 +n03297495 +n03297644 +n03297735 +n03298089 +n03298352 +n03298716 +n03298858 +n03299406 +n03300216 +n03300443 +n03301175 +n03301291 +n03301389 +n03301568 +n03301833 +n03301940 +n03302671 +n03302790 +n03302938 +n03303217 +n03303669 +n03303831 +n03304197 +n03304323 +n03304465 +n03305300 +n03305522 +n03305953 +n03306385 +n03306869 +n03307037 +n03307573 +n03307792 +n03308152 +n03308481 +n03308614 +n03309110 +n03309356 +n03309465 +n03309687 +n03309808 +n03313333 +n03314227 +n03314378 +n03314608 +n03314780 +n03314884 +n03315644 +n03315805 +n03315990 +n03316105 +n03316406 +n03316873 +n03317233 +n03317510 +n03317673 +n03317788 +n03317889 +n03318136 +n03318294 +n03318865 +n03318983 +n03319167 +n03319457 +n03319576 +n03319745 +n03320046 +n03320262 +n03320421 +n03320519 +n03320845 +n03320959 +n03321103 +n03321419 +n03321563 +n03321843 +n03321954 +n03322570 +n03322704 +n03322836 +n03322940 +n03323096 +n03323211 +n03323319 +n03323703 +n03324629 +n03324814 +n03324928 +n03325088 +n03325288 +n03325403 +n03325584 +n03325691 +n03325941 +n03326073 +n03326371 +n03326475 +n03326660 +n03326795 +n03326948 +n03327133 +n03327234 +n03327553 +n03327691 +n03327841 +n03328201 +n03329302 +n03329536 +n03329663 +n03330002 +n03330665 +n03330792 +n03330947 +n03331077 +n03331244 +n03331599 +n03332005 +n03332173 +n03332271 +n03332393 +n03332591 +n03332784 +n03332989 +n03333129 +n03333252 +n03333349 +n03333610 +n03333711 +n03333851 +n03334017 +n03334291 +n03334382 +n03334492 +n03334912 +n03335030 +n03335333 +n03335461 +n03335846 +n03336168 +n03336282 +n03336575 +n03336742 +n03336839 +n03337140 +n03337383 +n03337494 +n03337822 +n03338287 +n03338821 +n03339296 +n03339529 +n03339643 +n03340009 +n03340723 +n03340923 +n03341035 +n03341153 +n03341297 +n03341606 +n03342015 +n03342127 +n03342262 +n03342432 +n03342657 +n03342863 +n03342961 +n03343047 +n03343234 +n03343354 +n03343560 +n03343737 +n03343853 +n03344305 +n03344393 +n03344509 +n03344642 +n03344784 +n03344935 +n03345487 +n03345837 +n03346135 +n03346289 +n03346455 +n03347037 +n03347472 +n03347617 +n03348142 +n03348868 +n03349020 +n03349296 +n03349367 +n03349469 +n03349599 +n03349771 +n03349892 +n03350204 +n03350352 +n03350456 +n03350602 +n03351151 +n03351262 +n03351434 +n03351979 +n03352232 +n03352366 +n03352628 +n03352961 +n03353281 +n03353951 +n03354207 +n03354903 +n03355468 +n03355768 +n03355925 +n03356038 +n03356279 +n03356446 +n03356559 +n03356858 +n03356982 +n03357081 +n03357267 +n03357716 +n03358172 +n03358380 +n03358726 +n03358841 +n03359137 +n03359285 +n03359436 +n03359566 +n03360133 +n03360300 +n03360431 +n03360622 +n03360731 +n03361109 +n03361297 +n03361380 +n03361550 +n03361683 +n03362639 +n03362771 +n03362890 +n03363363 +n03363549 +n03363749 +n03364008 +n03364156 +n03364599 +n03364937 +n03365231 +n03365374 +n03365592 +n03365991 +n03366464 +n03366721 +n03366823 +n03366974 +n03367059 +n03367321 +n03367410 +n03367545 +n03367875 +n03367969 +n03368048 +n03368352 +n03369276 +n03369407 +n03369512 +n03369866 +n03370387 +n03370646 +n03371875 +n03372029 +n03372549 +n03372822 +n03372933 +n03373237 +n03373611 +n03373943 +n03374102 +n03374282 +n03374372 +n03374473 +n03374570 +n03374649 +n03374838 +n03375171 +n03375329 +n03375575 +n03376159 +n03376279 +n03376595 +n03376771 +n03376938 +n03378005 +n03378174 +n03378342 +n03378442 +n03378593 +n03378765 +n03379051 +n03379204 +n03379343 +n03379719 +n03379828 +n03379989 +n03380301 +n03380647 +n03380724 +n03380867 +n03381126 +n03381231 +n03381450 +n03381565 +n03381776 +n03382104 +n03382292 +n03382413 +n03382533 +n03382708 +n03382856 +n03382969 +n03383099 +n03383211 +n03383378 +n03383468 +n03383562 +n03383821 +n03384167 +n03384352 +n03384891 +n03385295 +n03385557 +n03386011 +n03386343 +n03386544 +n03386726 +n03386870 +n03387323 +n03387653 +n03388043 +n03388183 +n03388323 +n03388549 +n03388711 +n03388990 +n03389611 +n03389761 +n03389889 +n03389983 +n03390075 +n03390327 +n03390673 +n03390786 +n03390983 +n03391301 +n03391613 +n03391770 +n03392648 +n03392741 +n03393017 +n03393199 +n03393324 +n03393761 +n03393912 +n03394149 +n03394272 +n03394480 +n03394649 +n03394916 +n03395256 +n03395401 +n03395514 +n03395859 +n03396074 +n03396580 +n03396654 +n03396997 +n03397087 +n03397266 +n03397412 +n03397532 +n03397947 +n03398153 +n03398228 +n03399579 +n03399677 +n03399761 +n03399971 +n03400231 +n03400972 +n03401129 +n03401279 +n03401721 +n03402188 +n03402369 +n03402511 +n03402785 +n03402941 +n03403643 +n03404012 +n03404149 +n03404251 +n03404360 +n03404449 +n03404900 +n03405111 +n03405265 +n03405595 +n03405725 +n03406759 +n03406966 +n03407369 +n03407865 +n03408054 +n03408264 +n03408340 +n03408444 +n03409297 +n03409393 +n03409591 +n03409920 +n03410022 +n03410147 +n03410303 +n03410423 +n03410571 +n03410740 +n03410938 +n03411079 +n03411208 +n03411339 +n03411927 +n03412058 +n03412220 +n03412387 +n03412511 +n03412906 +n03413124 +n03413264 +n03413428 +n03413684 +n03413828 +n03414029 +n03414162 +n03414676 +n03415252 +n03415486 +n03415626 +n03415749 +n03415868 +n03416094 +n03416489 +n03416640 +n03416775 +n03416900 +n03417042 +n03417202 +n03417345 +n03417749 +n03417970 +n03418158 +n03418242 +n03418402 +n03418618 +n03418749 +n03418915 +n03419014 +n03420345 +n03420801 +n03420935 +n03421117 +n03421324 +n03421485 +n03421669 +n03421768 +n03421960 +n03422072 +n03422484 +n03422589 +n03422771 +n03423099 +n03423224 +n03423306 +n03423479 +n03423568 +n03423719 +n03423877 +n03424204 +n03424325 +n03424489 +n03424630 +n03424862 +n03425241 +n03425325 +n03425413 +n03425595 +n03425769 +n03426134 +n03426285 +n03426462 +n03426574 +n03426871 +n03427202 +n03427296 +n03428090 +n03428226 +n03428349 +n03429003 +n03429137 +n03429288 +n03429682 +n03429771 +n03429914 +n03430091 +n03430313 +n03430418 +n03430551 +n03430959 +n03431243 +n03431570 +n03431745 +n03432061 +n03432129 +n03432360 +n03432509 +n03433247 +n03433637 +n03433877 +n03434188 +n03434285 +n03434830 +n03435593 +n03435743 +n03435991 +n03436075 +n03436182 +n03436417 +n03436549 +n03436656 +n03436772 +n03436891 +n03436990 +n03437184 +n03437295 +n03437430 +n03437581 +n03437741 +n03437829 +n03437941 +n03438071 +n03438257 +n03438661 +n03438780 +n03438863 +n03439348 +n03439631 +n03439814 +n03440216 +n03440682 +n03440876 +n03441112 +n03441345 +n03441465 +n03441582 +n03442288 +n03442487 +n03442597 +n03442756 +n03443005 +n03443149 +n03443371 +n03443543 +n03443912 +n03444034 +n03445326 +n03445617 +n03445777 +n03445924 +n03446070 +n03446268 +n03446832 +n03447075 +n03447358 +n03447447 +n03447721 +n03447894 +n03448031 +n03448590 +n03448696 +n03448956 +n03449217 +n03449309 +n03449451 +n03449564 +n03449858 +n03450230 +n03450516 +n03450734 +n03450881 +n03450974 +n03451120 +n03451253 +n03451365 +n03451711 +n03451798 +n03452267 +n03452449 +n03452594 +n03452741 +n03453231 +n03453320 +n03453443 +n03454110 +n03454211 +n03454442 +n03454536 +n03454707 +n03454885 +n03455355 +n03455488 +n03455642 +n03455802 +n03456024 +n03456186 +n03456299 +n03456447 +n03456548 +n03456665 +n03457008 +n03457451 +n03457686 +n03457902 +n03458271 +n03458422 +n03459328 +n03459591 +n03459775 +n03459914 +n03460040 +n03460147 +n03460297 +n03460455 +n03460899 +n03461288 +n03461385 +n03461651 +n03461882 +n03461988 +n03462110 +n03462315 +n03462747 +n03462972 +n03463185 +n03463381 +n03463666 +n03464053 +n03464467 +n03464628 +n03464952 +n03465040 +n03465151 +n03465320 +n03465426 +n03465500 +n03465605 +n03465718 +n03465818 +n03466162 +n03466493 +n03466600 +n03466839 +n03466947 +n03467068 +n03467254 +n03467380 +n03467517 +n03467796 +n03467887 +n03467984 +n03468570 +n03468696 +n03468821 +n03469031 +n03469175 +n03469493 +n03469832 +n03469903 +n03470005 +n03470222 +n03470387 +n03470629 +n03470948 +n03471030 +n03471190 +n03471347 +n03471779 +n03472232 +n03472535 +n03472672 +n03472796 +n03472937 +n03473078 +n03473227 +n03473465 +n03473817 +n03473966 +n03474167 +n03474352 +n03474779 +n03474896 +n03475581 +n03475674 +n03475823 +n03475961 +n03476083 +n03476313 +n03476542 +n03476684 +n03476991 +n03477143 +n03477303 +n03477410 +n03477512 +n03477773 +n03477902 +n03478589 +n03478756 +n03478907 +n03479121 +n03479266 +n03479397 +n03479502 +n03480579 +n03480719 +n03480973 +n03481172 +n03481521 +n03482001 +n03482128 +n03482252 +n03482405 +n03482523 +n03482877 +n03483086 +n03483230 +n03483316 +n03483531 +n03483637 +n03483823 +n03483971 +n03484083 +n03484487 +n03484576 +n03484809 +n03484931 +n03485198 +n03485309 +n03485407 +n03485575 +n03485794 +n03487090 +n03487331 +n03487444 +n03487533 +n03487642 +n03487774 +n03487886 +n03488111 +n03488188 +n03488438 +n03488603 +n03488784 +n03488887 +n03489048 +n03489162 +n03490006 +n03490119 +n03490324 +n03490449 +n03490649 +n03490784 +n03490884 +n03491032 +n03491724 +n03491988 +n03492087 +n03492250 +n03492542 +n03492922 +n03493219 +n03493792 +n03493911 +n03494278 +n03494537 +n03494706 +n03495039 +n03495258 +n03495570 +n03495671 +n03495941 +n03496183 +n03496296 +n03496486 +n03496612 +n03496892 +n03497100 +n03497352 +n03497657 +n03498441 +n03498536 +n03498662 +n03498781 +n03498866 +n03498962 +n03499354 +n03499468 +n03499907 +n03500090 +n03500209 +n03500295 +n03500389 +n03500457 +n03500557 +n03500699 +n03500838 +n03500971 +n03501152 +n03501288 +n03501520 +n03501614 +n03502200 +n03502331 +n03502509 +n03502777 +n03502897 +n03503097 +n03503233 +n03503358 +n03503477 +n03503567 +n03503718 +n03503997 +n03504205 +n03504293 +n03504723 +n03505015 +n03505133 +n03505383 +n03505504 +n03505667 +n03505764 +n03506028 +n03506184 +n03506370 +n03506560 +n03506727 +n03506880 +n03507241 +n03507458 +n03507658 +n03507963 +n03508101 +n03508485 +n03508881 +n03509394 +n03509608 +n03509843 +n03510072 +n03510244 +n03510384 +n03510487 +n03510583 +n03510866 +n03510987 +n03511175 +n03511333 +n03512030 +n03512147 +n03512452 +n03512624 +n03512911 +n03513137 +n03513376 +n03514129 +n03514340 +n03514451 +n03514693 +n03514894 +n03515338 +n03515934 +n03516266 +n03516367 +n03516647 +n03516844 +n03516996 +n03517509 +n03517647 +n03517760 +n03517899 +n03517982 +n03518135 +n03518230 +n03518305 +n03518445 +n03518631 +n03518829 +n03518943 +n03519081 +n03519226 +n03519387 +n03519674 +n03519848 +n03520493 +n03521076 +n03521431 +n03521544 +n03521675 +n03521771 +n03521899 +n03522003 +n03522100 +n03522634 +n03522863 +n03522990 +n03523134 +n03523398 +n03523506 +n03523987 +n03524150 +n03524287 +n03524425 +n03524574 +n03524745 +n03524976 +n03525074 +n03525252 +n03525454 +n03525693 +n03525827 +n03526062 +n03527149 +n03527444 +n03527565 +n03527675 +n03528100 +n03528263 +n03528523 +n03528901 +n03529175 +n03529444 +n03529629 +n03529860 +n03530189 +n03530511 +n03530642 +n03530910 +n03531281 +n03531447 +n03531546 +n03531691 +n03531982 +n03532342 +n03532672 +n03532919 +n03533014 +n03533392 +n03533486 +n03533654 +n03533845 +n03534580 +n03534695 +n03534776 +n03535024 +n03535284 +n03535647 +n03535780 +n03536122 +n03536568 +n03536761 +n03537085 +n03537241 +n03537412 +n03537550 +n03538037 +n03538179 +n03538300 +n03538406 +n03538542 +n03538634 +n03538817 +n03538957 +n03539103 +n03539293 +n03539433 +n03539546 +n03539678 +n03539754 +n03540090 +n03540267 +n03540476 +n03540595 +n03540914 +n03541091 +n03541269 +n03541393 +n03541537 +n03541696 +n03541923 +n03542333 +n03542605 +n03542727 +n03542860 +n03543012 +n03543112 +n03543254 +n03543394 +n03543511 +n03543603 +n03543735 +n03543945 +n03544143 +n03544238 +n03544360 +n03545150 +n03545470 +n03545585 +n03545756 +n03545961 +n03546112 +n03546235 +n03546340 +n03547054 +n03547229 +n03547397 +n03547530 +n03547861 +n03548086 +n03548195 +n03548320 +n03548402 +n03548533 +n03548626 +n03548930 +n03549199 +n03549350 +n03549473 +n03549589 +n03549732 +n03549897 +n03550153 +n03550289 +n03550420 +n03551084 +n03551395 +n03551582 +n03551790 +n03552001 +n03552449 +n03552749 +n03553019 +n03553248 +n03553486 +n03554375 +n03554460 +n03554645 +n03555006 +n03555217 +n03555426 +n03555564 +n03555662 +n03555862 +n03555996 +n03556173 +n03556679 +n03556811 +n03556992 +n03557270 +n03557360 +n03557590 +n03557692 +n03557840 +n03558007 +n03558176 +n03558404 +n03558633 +n03558739 +n03559373 +n03559531 +n03559999 +n03560430 +n03560860 +n03561047 +n03561169 +n03561573 +n03562565 +n03563200 +n03563460 +n03563710 +n03563967 +n03564849 +n03565288 +n03565565 +n03565710 +n03565830 +n03565991 +n03566193 +n03566329 +n03566555 +n03566730 +n03566860 +n03567066 +n03567635 +n03567788 +n03567912 +n03568117 +n03568818 +n03569014 +n03569174 +n03569293 +n03569494 +n03571280 +n03571439 +n03571625 +n03571853 +n03571942 +n03572107 +n03572205 +n03572321 +n03572631 +n03573574 +n03573848 +n03574243 +n03574416 +n03574555 +n03574816 +n03575958 +n03576215 +n03576443 +n03576955 +n03577090 +n03577312 +n03577474 +n03577672 +n03577818 +n03578055 +n03578251 +n03578656 +n03578981 +n03579538 +n03579982 +n03580518 +n03580615 +n03580845 +n03580990 +n03581125 +n03581531 +n03581897 +n03582508 +n03582959 +n03583419 +n03583621 +n03584254 +n03584400 +n03584829 +n03585073 +n03585337 +n03585438 +n03585551 +n03585682 +n03585778 +n03585875 +n03586219 +n03586631 +n03586911 +n03587205 +n03588216 +n03588841 +n03588951 +n03589313 +n03589513 +n03589672 +n03589791 +n03590306 +n03590475 +n03590588 +n03590841 +n03590932 +n03591116 +n03591313 +n03591592 +n03591798 +n03591901 +n03592245 +n03592669 +n03592773 +n03592931 +n03593122 +n03593222 +n03593526 +n03593862 +n03594010 +n03594148 +n03594277 +n03594523 +n03594734 +n03594945 +n03595055 +n03595264 +n03595409 +n03595523 +n03595614 +n03595860 +n03596099 +n03596285 +n03596543 +n03597147 +n03597317 +n03597916 +n03598151 +n03598299 +n03598385 +n03598515 +n03598646 +n03598783 +n03598930 +n03599486 +n03599964 +n03600285 +n03600475 +n03600722 +n03600977 +n03601442 +n03601638 +n03601840 +n03602081 +n03602194 +n03602365 +n03602686 +n03602790 +n03602883 +n03603442 +n03603594 +n03603722 +n03604156 +n03604311 +n03604400 +n03604536 +n03604629 +n03604763 +n03604843 +n03605417 +n03605504 +n03605598 +n03605722 +n03605915 +n03606106 +n03606251 +n03606347 +n03606465 +n03607029 +n03607186 +n03607527 +n03607659 +n03607923 +n03608504 +n03609147 +n03609235 +n03609397 +n03609542 +n03609786 +n03609959 +n03610098 +n03610418 +n03610524 +n03610682 +n03610836 +n03610992 +n03612010 +n03612814 +n03612965 +n03613294 +n03613592 +n03614007 +n03614383 +n03614532 +n03614782 +n03614887 +n03615300 +n03615406 +n03615563 +n03615655 +n03615790 +n03616091 +n03616225 +n03616428 +n03616763 +n03616979 +n03617095 +n03617312 +n03617480 +n03617594 +n03617834 +n03618101 +n03618339 +n03618546 +n03618678 +n03618797 +n03618982 +n03619050 +n03619196 +n03619275 +n03619396 +n03619650 +n03619793 +n03619890 +n03620052 +n03620353 +n03620967 +n03621049 +n03621377 +n03621694 +n03622058 +n03622401 +n03622526 +n03622839 +n03622931 +n03623198 +n03623338 +n03623556 +n03624134 +n03624400 +n03624767 +n03625355 +n03625539 +n03625646 +n03625943 +n03626115 +n03626272 +n03626418 +n03626502 +n03626760 +n03627232 +n03627954 +n03628071 +n03628215 +n03628421 +n03628511 +n03628728 +n03628831 +n03628984 +n03629100 +n03629231 +n03629520 +n03629643 +n03630262 +n03630383 +n03631177 +n03631811 +n03631922 +n03632100 +n03632577 +n03632729 +n03632852 +n03632963 +n03633091 +n03633341 +n03633632 +n03633886 +n03634034 +n03634899 +n03635032 +n03635108 +n03635330 +n03635516 +n03635668 +n03635932 +n03636248 +n03636649 +n03637027 +n03637181 +n03637318 +n03637480 +n03637787 +n03637898 +n03638014 +n03638180 +n03638623 +n03638743 +n03638883 +n03639077 +n03639230 +n03639497 +n03639675 +n03639880 +n03640850 +n03640988 +n03641569 +n03641947 +n03642144 +n03642341 +n03642444 +n03642573 +n03642806 +n03643149 +n03643253 +n03643491 +n03643737 +n03643907 +n03644073 +n03644378 +n03644858 +n03645011 +n03645168 +n03645290 +n03645577 +n03646020 +n03646148 +n03646296 +n03646809 +n03646916 +n03647423 +n03647520 +n03648219 +n03648431 +n03648667 +n03649003 +n03649161 +n03649288 +n03649674 +n03649797 +n03649909 +n03650551 +n03651388 +n03651605 +n03651843 +n03652100 +n03652389 +n03652729 +n03652826 +n03652932 +n03653110 +n03653220 +n03653454 +n03653583 +n03653740 +n03653833 +n03653975 +n03654576 +n03654826 +n03655072 +n03655470 +n03655720 +n03656484 +n03656957 +n03657121 +n03657239 +n03657511 +n03658102 +n03658185 +n03658635 +n03658858 +n03659292 +n03659686 +n03659809 +n03659950 +n03660124 +n03660562 +n03660909 +n03661043 +n03661340 +n03662301 +n03662452 +n03662601 +n03662719 +n03662887 +n03663433 +n03663531 +n03663910 +n03664159 +n03664675 +n03664840 +n03664943 +n03665232 +n03665366 +n03665851 +n03665924 +n03666238 +n03666362 +n03666591 +n03666917 +n03667060 +n03667235 +n03667552 +n03667664 +n03667829 +n03668067 +n03668279 +n03668488 +n03668803 +n03669245 +n03669534 +n03669886 +n03670208 +n03671914 +n03672521 +n03672827 +n03673027 +n03673270 +n03673450 +n03673767 +n03674270 +n03674440 +n03674731 +n03674842 +n03675076 +n03675235 +n03675445 +n03675558 +n03675907 +n03676087 +n03676483 +n03676623 +n03676759 +n03677115 +n03677682 +n03677766 +n03678558 +n03678729 +n03678879 +n03679384 +n03679712 +n03680248 +n03680355 +n03680512 +n03680734 +n03680858 +n03680942 +n03681477 +n03681813 +n03682380 +n03682487 +n03682877 +n03683079 +n03683341 +n03683457 +n03683606 +n03683708 +n03683995 +n03684143 +n03684224 +n03684489 +n03684611 +n03684740 +n03684823 +n03685307 +n03685486 +n03685640 +n03685820 +n03686130 +n03686363 +n03686470 +n03686924 +n03687137 +n03687928 +n03688066 +n03688192 +n03688405 +n03688504 +n03688605 +n03688707 +n03688832 +n03688943 +n03689157 +n03689570 +n03690168 +n03690279 +n03690473 +n03690851 +n03690938 +n03691459 +n03691817 +n03692004 +n03692136 +n03692272 +n03692379 +n03692522 +n03692842 +n03693293 +n03693474 +n03693707 +n03693860 +n03694196 +n03694356 +n03694639 +n03694761 +n03694949 +n03695122 +n03695452 +n03695616 +n03695753 +n03695857 +n03695957 +n03696065 +n03696301 +n03696445 +n03696568 +n03696746 +n03696909 +n03697007 +n03697366 +n03697552 +n03697812 +n03697913 +n03698123 +n03698226 +n03698360 +n03698604 +n03698723 +n03698815 +n03699280 +n03699591 +n03699754 +n03699975 +n03700963 +n03701191 +n03701391 +n03701640 +n03701790 +n03702248 +n03702440 +n03702582 +n03703075 +n03703203 +n03703463 +n03703590 +n03703730 +n03703862 +n03703945 +n03704549 +n03704834 +n03705379 +n03705808 +n03706229 +n03706415 +n03706653 +n03706939 +n03707171 +n03707372 +n03707597 +n03707766 +n03708036 +n03708425 +n03708843 +n03708962 +n03709206 +n03709363 +n03709545 +n03709644 +n03709823 +n03709960 +n03710079 +n03710193 +n03710294 +n03710421 +n03710528 +n03710637 +n03710721 +n03710937 +n03711044 +n03711711 +n03711999 +n03712111 +n03712337 +n03712444 +n03712887 +n03712981 +n03713069 +n03713151 +n03713436 +n03714235 +n03715114 +n03715275 +n03715386 +n03715669 +n03715892 +n03716228 +n03716887 +n03716966 +n03717131 +n03717285 +n03717447 +n03717622 +n03718212 +n03718335 +n03718458 +n03718581 +n03718699 +n03718789 +n03718935 +n03719053 +n03719343 +n03719560 +n03719743 +n03720005 +n03720163 +n03720665 +n03720891 +n03721047 +n03721252 +n03721384 +n03721590 +n03722007 +n03722288 +n03722646 +n03722944 +n03723153 +n03723267 +n03723439 +n03723781 +n03723885 +n03724066 +n03724176 +n03724417 +n03724538 +n03724623 +n03724756 +n03724870 +n03725035 +n03725506 +n03725600 +n03725717 +n03725869 +n03726116 +n03726233 +n03726371 +n03726516 +n03726760 +n03726993 +n03727067 +n03727465 +n03727605 +n03727837 +n03727946 +n03728437 +n03728982 +n03729131 +n03729308 +n03729402 +n03729482 +n03729647 +n03729826 +n03729951 +n03730153 +n03730334 +n03730494 +n03730655 +n03730788 +n03730893 +n03731019 +n03731483 +n03731695 +n03731882 +n03732020 +n03732114 +n03732458 +n03732543 +n03732658 +n03733131 +n03733281 +n03733465 +n03733547 +n03733644 +n03733805 +n03733925 +n03735637 +n03735963 +n03736064 +n03736147 +n03736269 +n03736372 +n03736470 +n03736970 +n03738066 +n03738241 +n03738472 +n03739518 +n03739693 +n03742019 +n03742115 +n03742238 +n03743016 +n03743279 +n03743902 +n03744276 +n03744684 +n03744840 +n03745146 +n03745487 +n03745571 +n03746005 +n03746155 +n03746330 +n03746486 +n03748162 +n03749504 +n03749634 +n03749807 +n03750206 +n03750437 +n03750614 +n03751065 +n03751269 +n03751458 +n03751590 +n03751757 +n03752071 +n03752185 +n03752398 +n03752922 +n03753077 +n03753514 +n03757604 +n03758089 +n03758220 +n03758894 +n03758992 +n03759243 +n03759432 +n03759661 +n03759954 +n03760310 +n03760671 +n03760944 +n03761084 +n03761588 +n03761731 +n03762238 +n03762332 +n03762434 +n03762602 +n03762982 +n03763727 +n03763968 +n03764276 +n03764606 +n03764736 +n03764822 +n03764995 +n03765128 +n03765467 +n03765561 +n03765934 +n03766044 +n03766218 +n03766322 +n03766508 +n03766600 +n03766697 +n03766935 +n03767112 +n03767203 +n03767459 +n03767745 +n03767966 +n03768132 +n03768683 +n03768823 +n03768916 +n03769610 +n03769722 +n03769881 +n03770085 +n03770224 +n03770316 +n03770439 +n03770520 +n03770679 +n03770834 +n03770954 +n03772077 +n03772269 +n03772584 +n03772674 +n03773035 +n03773504 +n03773835 +n03774327 +n03774461 +n03775071 +n03775199 +n03775388 +n03775546 +n03775636 +n03775747 +n03775847 +n03776167 +n03776460 +n03776877 +n03776997 +n03777126 +n03777568 +n03777754 +n03778459 +n03778817 +n03779000 +n03779128 +n03779246 +n03779370 +n03779884 +n03780047 +n03780799 +n03781055 +n03781244 +n03781467 +n03781594 +n03781683 +n03781787 +n03782006 +n03782190 +n03782794 +n03782929 +n03783304 +n03783430 +n03783575 +n03783873 +n03784139 +n03784270 +n03784793 +n03784896 +n03785016 +n03785142 +n03785237 +n03785499 +n03785721 +n03786096 +n03786194 +n03786313 +n03786621 +n03786715 +n03786901 +n03787032 +n03787523 +n03788047 +n03788195 +n03788365 +n03788498 +n03788601 +n03788914 +n03789171 +n03789400 +n03789603 +n03789794 +n03789946 +n03790230 +n03790512 +n03790755 +n03790953 +n03791053 +n03791235 +n03792048 +n03792334 +n03792526 +n03792782 +n03792972 +n03793489 +n03793850 +n03794056 +n03794136 +n03794798 +n03795123 +n03795269 +n03795758 +n03795976 +n03796181 +n03796401 +n03796522 +n03796605 +n03796848 +n03796974 +n03797062 +n03797182 +n03797264 +n03797390 +n03797896 +n03798061 +n03798442 +n03798610 +n03798982 +n03799113 +n03799240 +n03799375 +n03799610 +n03799876 +n03800371 +n03800485 +n03800563 +n03800772 +n03800933 +n03801353 +n03801533 +n03801671 +n03801760 +n03801880 +n03802007 +n03802228 +n03802393 +n03802643 +n03802800 +n03802973 +n03803116 +n03803284 +n03803780 +n03804211 +n03804744 +n03805180 +n03805280 +n03805374 +n03805503 +n03805725 +n03805933 +n03807334 +n03809211 +n03809312 +n03809603 +n03809686 +n03809802 +n03810412 +n03810952 +n03811295 +n03811444 +n03811847 +n03811965 +n03812263 +n03812382 +n03812789 +n03812924 +n03813078 +n03813176 +n03813946 +n03814528 +n03814639 +n03814727 +n03814817 +n03814906 +n03815149 +n03815278 +n03815482 +n03815615 +n03816005 +n03816136 +n03816394 +n03816530 +n03816849 +n03817191 +n03817331 +n03817522 +n03817647 +n03818001 +n03818343 +n03819047 +n03819336 +n03819448 +n03819595 +n03819994 +n03820154 +n03820318 +n03820728 +n03820950 +n03821145 +n03821424 +n03821518 +n03822171 +n03822361 +n03822504 +n03822656 +n03822767 +n03823111 +n03823216 +n03823312 +n03823673 +n03823906 +n03824197 +n03824284 +n03824381 +n03824589 +n03824713 +n03824999 +n03825080 +n03825271 +n03825442 +n03825673 +n03825788 +n03825913 +n03826039 +n03826186 +n03827420 +n03827536 +n03828020 +n03829340 +n03829857 +n03829954 +n03831203 +n03831382 +n03831757 +n03832144 +n03832673 +n03833907 +n03834040 +n03834472 +n03834604 +n03835197 +n03835729 +n03835941 +n03836062 +n03836451 +n03836602 +n03836906 +n03836976 +n03837422 +n03837606 +n03837698 +n03837869 +n03838024 +n03838298 +n03838748 +n03838899 +n03839172 +n03839276 +n03839424 +n03839671 +n03839795 +n03840327 +n03840681 +n03840823 +n03841011 +n03841143 +n03841290 +n03841666 +n03842012 +n03842156 +n03842276 +n03842377 +n03842585 +n03842754 +n03842986 +n03843092 +n03843316 +n03843438 +n03843555 +n03843883 +n03844045 +n03844233 +n03844550 +n03844673 +n03844815 +n03844965 +n03845107 +n03845190 +n03845990 +n03846100 +n03846234 +n03846431 +n03846677 +n03846772 +n03846970 +n03847471 +n03847823 +n03848033 +n03848168 +n03848348 +n03848537 +n03849275 +n03849412 +n03849679 +n03849814 +n03849943 +n03850053 +n03850245 +n03850492 +n03850613 +n03851341 +n03851787 +n03852280 +n03852544 +n03852688 +n03853291 +n03853924 +n03854065 +n03854421 +n03854506 +n03854722 +n03854815 +n03855214 +n03855333 +n03855464 +n03855604 +n03855756 +n03855908 +n03856012 +n03856335 +n03856465 +n03856728 +n03857026 +n03857156 +n03857291 +n03857687 +n03857828 +n03858085 +n03858183 +n03858418 +n03858533 +n03858837 +n03859000 +n03859170 +n03859280 +n03859495 +n03859608 +n03859958 +n03860234 +n03860404 +n03861048 +n03861271 +n03861430 +n03861596 +n03861842 +n03862379 +n03862676 +n03862862 +n03863108 +n03863262 +n03863657 +n03863783 +n03863923 +n03864139 +n03864356 +n03864692 +n03865288 +n03865371 +n03865557 +n03865820 +n03865949 +n03866082 +n03867854 +n03868044 +n03868242 +n03868324 +n03868406 +n03868643 +n03868763 +n03868863 +n03869838 +n03869976 +n03870105 +n03870290 +n03870546 +n03870672 +n03870980 +n03871083 +n03871371 +n03871524 +n03871628 +n03871724 +n03871860 +n03872016 +n03872167 +n03872273 +n03873416 +n03873699 +n03873848 +n03873996 +n03874138 +n03874293 +n03874487 +n03874599 +n03874823 +n03875218 +n03875806 +n03875955 +n03876111 +n03876231 +n03877351 +n03877472 +n03877674 +n03877845 +n03878066 +n03878211 +n03878294 +n03878418 +n03878511 +n03878674 +n03878828 +n03878963 +n03879456 +n03879705 +n03880032 +n03880129 +n03880323 +n03880531 +n03881305 +n03881404 +n03881534 +n03882611 +n03882960 +n03883054 +n03883385 +n03883524 +n03883664 +n03883773 +n03883944 +n03884397 +n03884554 +n03884639 +n03884778 +n03884926 +n03885028 +n03885194 +n03885293 +n03885410 +n03885535 +n03885669 +n03885788 +n03885904 +n03886053 +n03886641 +n03886762 +n03886940 +n03887185 +n03887330 +n03887512 +n03887697 +n03887899 +n03888022 +n03888257 +n03888605 +n03888808 +n03888998 +n03889397 +n03889503 +n03889626 +n03889726 +n03889871 +n03890093 +n03890233 +n03890358 +n03890514 +n03891051 +n03891251 +n03891332 +n03891538 +n03892178 +n03892425 +n03892557 +n03892728 +n03893935 +n03894051 +n03894379 +n03894677 +n03894933 +n03895038 +n03895170 +n03895866 +n03896103 +n03896233 +n03896419 +n03896526 +n03896628 +n03896984 +n03897130 +n03897634 +n03897943 +n03898129 +n03898271 +n03898395 +n03898633 +n03898787 +n03899100 +n03899612 +n03899768 +n03899933 +n03900028 +n03900194 +n03900301 +n03900393 +n03900979 +n03901229 +n03901338 +n03901750 +n03901974 +n03902125 +n03902220 +n03902482 +n03902756 +n03903133 +n03903290 +n03903424 +n03903733 +n03903868 +n03904060 +n03904183 +n03904433 +n03904657 +n03904782 +n03904909 +n03905361 +n03905540 +n03905730 +n03905947 +n03906106 +n03906224 +n03906463 +n03906590 +n03906789 +n03906894 +n03906997 +n03907475 +n03907654 +n03907908 +n03908111 +n03908204 +n03908456 +n03908618 +n03908714 +n03909020 +n03909160 +n03909406 +n03909516 +n03909658 +n03911406 +n03911513 +n03911658 +n03911767 +n03911866 +n03912218 +n03912821 +n03913343 +n03913930 +n03914106 +n03914337 +n03914438 +n03914583 +n03914831 +n03915118 +n03915320 +n03915437 +n03915900 +n03916031 +n03916289 +n03916385 +n03916470 +n03916720 +n03917048 +n03917198 +n03917327 +n03917814 +n03918074 +n03918480 +n03918737 +n03919096 +n03919289 +n03919430 +n03919808 +n03920288 +n03920384 +n03920641 +n03920737 +n03920867 +n03923379 +n03923564 +n03923692 +n03923918 +n03924069 +n03924407 +n03924532 +n03924679 +n03926148 +n03926412 +n03926876 +n03927091 +n03927299 +n03927539 +n03927792 +n03928116 +n03928589 +n03928814 +n03928994 +n03929091 +n03929202 +n03929443 +n03929660 +n03929855 +n03930229 +n03930313 +n03930431 +n03930515 +n03930630 +n03931044 +n03931765 +n03931885 +n03931980 +n03932080 +n03932670 +n03933391 +n03933933 +n03934042 +n03934229 +n03934311 +n03934565 +n03934656 +n03934890 +n03935116 +n03935234 +n03935335 +n03935883 +n03936269 +n03936466 +n03937543 +n03937835 +n03937931 +n03938037 +n03938244 +n03938401 +n03938522 +n03938725 +n03939062 +n03939178 +n03939281 +n03939440 +n03939565 +n03939677 +n03939844 +n03940256 +n03940894 +n03941013 +n03941231 +n03941417 +n03941586 +n03941684 +n03941887 +n03942028 +n03942600 +n03942813 +n03942920 +n03943115 +n03943266 +n03943623 +n03943714 +n03943833 +n03943920 +n03944024 +n03944138 +n03944341 +n03945459 +n03945615 +n03945817 +n03945928 +n03946076 +n03946162 +n03947111 +n03947343 +n03947466 +n03947798 +n03947888 +n03948242 +n03948459 +n03948830 +n03948950 +n03949145 +n03949317 +n03949761 +n03950228 +n03950359 +n03950537 +n03950647 +n03950899 +n03951068 +n03951213 +n03951453 +n03951800 +n03951971 +n03952150 +n03952576 +n03953020 +n03953416 +n03953901 +n03954393 +n03954731 +n03955296 +n03955489 +n03955809 +n03955941 +n03956157 +n03956331 +n03956531 +n03956623 +n03956785 +n03956922 +n03957315 +n03957420 +n03957762 +n03957991 +n03958227 +n03958338 +n03958630 +n03958752 +n03959014 +n03959123 +n03959227 +n03959701 +n03960374 +n03960490 +n03961394 +n03961630 +n03961711 +n03961828 +n03961939 +n03962525 +n03962685 +n03962852 +n03962932 +n03963028 +n03963198 +n03963294 +n03963483 +n03963645 +n03964495 +n03964611 +n03965456 +n03965907 +n03966206 +n03966325 +n03966582 +n03966751 +n03966976 +n03967270 +n03967396 +n03967562 +n03967942 +n03968293 +n03968479 +n03968581 +n03968728 +n03969510 +n03970156 +n03970363 +n03970546 +n03971218 +n03971321 +n03971960 +n03972146 +n03972372 +n03972524 +n03973003 +n03973285 +n03973402 +n03973520 +n03973628 +n03973839 +n03973945 +n03974070 +n03974915 +n03975035 +n03975657 +n03975788 +n03975926 +n03976105 +n03976268 +n03976467 +n03976657 +n03977158 +n03977266 +n03977430 +n03977592 +n03977966 +n03978421 +n03978575 +n03978686 +n03978815 +n03978966 +n03979377 +n03979492 +n03980026 +n03980478 +n03980874 +n03980986 +n03981094 +n03981340 +n03981566 +n03981760 +n03981924 +n03982232 +n03982331 +n03982430 +n03982642 +n03982767 +n03982895 +n03983396 +n03983499 +n03983612 +n03983712 +n03983928 +n03984125 +n03984234 +n03984381 +n03984643 +n03984759 +n03985069 +n03985232 +n03985441 +n03985881 +n03986071 +n03986224 +n03986355 +n03986562 +n03986704 +n03986857 +n03986949 +n03987266 +n03987376 +n03987674 +n03987865 +n03987990 +n03988170 +n03988758 +n03988926 +n03989199 +n03989349 +n03989447 +n03989665 +n03989777 +n03989898 +n03990474 +n03991062 +n03991202 +n03991321 +n03991443 +n03991646 +n03991837 +n03992325 +n03992436 +n03992509 +n03992703 +n03992975 +n03993053 +n03993180 +n03993403 +n03993703 +n03993878 +n03994008 +n03994297 +n03994417 +n03994614 +n03994757 +n03995018 +n03995265 +n03995372 +n03995535 +n03995661 +n03995856 +n03996004 +n03996145 +n03996416 +n03996849 +n03997274 +n03997484 +n03997875 +n03998194 +n03998333 +n03998673 +n03999064 +n03999160 +n03999621 +n03999992 +n04000311 +n04000480 +n04000592 +n04000716 +n04000998 +n04001132 +n04001265 +n04001397 +n04001499 +n04001661 +n04001845 +n04002262 +n04002371 +n04002629 +n04003241 +n04003359 +n04003856 +n04004099 +n04004210 +n04004475 +n04004767 +n04004990 +n04005197 +n04005630 +n04005912 +n04006067 +n04006227 +n04006330 +n04006411 +n04007415 +n04007664 +n04008385 +n04008634 +n04009552 +n04009801 +n04009923 +n04010057 +n04010779 +n04010927 +n04011827 +n04012084 +n04012482 +n04012665 +n04013060 +n04013176 +n04013600 +n04013729 +n04014297 +n04015204 +n04015786 +n04015908 +n04016240 +n04016479 +n04016576 +n04016684 +n04016846 +n04017571 +n04017807 +n04018155 +n04018399 +n04018667 +n04019101 +n04019335 +n04019541 +n04019696 +n04019881 +n04020087 +n04020298 +n04020744 +n04020912 +n04021028 +n04021164 +n04021362 +n04021503 +n04021704 +n04021798 +n04022332 +n04022434 +n04022708 +n04022866 +n04023021 +n04023119 +n04023249 +n04023422 +n04023695 +n04023962 +n04024137 +n04024274 +n04024862 +n04024983 +n04025508 +n04025633 +n04026053 +n04026180 +n04026417 +n04026813 +n04026918 +n04027023 +n04027367 +n04027706 +n04027820 +n04027935 +n04028074 +n04028221 +n04028315 +n04028581 +n04028764 +n04029416 +n04029647 +n04029734 +n04029913 +n04030054 +n04030161 +n04030274 +n04030414 +n04030518 +n04030846 +n04030965 +n04031884 +n04032509 +n04032603 +n04032936 +n04033287 +n04033425 +n04033557 +n04033801 +n04033901 +n04033995 +n04034262 +n04034367 +n04035231 +n04035634 +n04035748 +n04035836 +n04035912 +n04036155 +n04036303 +n04036776 +n04036963 +n04037076 +n04037220 +n04037298 +n04037443 +n04037873 +n04037964 +n04038231 +n04038338 +n04038440 +n04038727 +n04039041 +n04039209 +n04039381 +n04039742 +n04039848 +n04040247 +n04040373 +n04040540 +n04040759 +n04041069 +n04041243 +n04041408 +n04041544 +n04041747 +n04042076 +n04042204 +n04042358 +n04042632 +n04042795 +n04042985 +n04043168 +n04043411 +n04043733 +n04044307 +n04044498 +n04044716 +n04044955 +n04045085 +n04045255 +n04045397 +n04045644 +n04045787 +n04045941 +n04046091 +n04046277 +n04046400 +n04046590 +n04046974 +n04047139 +n04047401 +n04047733 +n04047834 +n04048441 +n04049303 +n04049405 +n04049585 +n04049753 +n04050066 +n04050313 +n04050600 +n04050933 +n04051269 +n04051439 +n04051549 +n04051705 +n04051825 +n04052235 +n04052346 +n04052442 +n04052658 +n04052757 +n04053508 +n04053677 +n04053767 +n04054361 +n04054566 +n04054670 +n04055180 +n04055447 +n04055700 +n04055861 +n04056073 +n04056180 +n04056413 +n04056932 +n04057047 +n04057215 +n04057435 +n04057673 +n04057846 +n04057981 +n04058096 +n04058239 +n04058486 +n04058594 +n04058721 +n04059157 +n04059298 +n04059399 +n04059516 +n04059947 +n04060198 +n04060448 +n04060647 +n04060904 +n04061681 +n04061793 +n04061969 +n04062179 +n04062428 +n04062644 +n04062807 +n04063154 +n04063373 +n04063868 +n04064213 +n04064401 +n04064747 +n04064862 +n04065272 +n04065464 +n04065789 +n04065909 +n04066023 +n04066270 +n04066388 +n04066476 +n04066767 +n04067143 +n04067231 +n04067353 +n04067472 +n04067658 +n04067818 +n04067921 +n04068441 +n04068601 +n04069166 +n04069276 +n04069434 +n04069582 +n04069777 +n04070003 +n04070207 +n04070415 +n04070545 +n04070727 +n04070964 +n04071102 +n04071263 +n04071393 +n04072193 +n04072551 +n04072960 +n04073425 +n04073948 +n04074185 +n04074963 +n04075291 +n04075468 +n04075715 +n04075813 +n04075916 +n04076052 +n04076284 +n04076713 +n04077430 +n04077594 +n04077734 +n04077889 +n04078002 +n04078574 +n04078955 +n04079106 +n04079244 +n04079603 +n04079933 +n04080138 +n04080454 +n04080705 +n04080833 +n04081281 +n04081699 +n04081844 +n04082344 +n04082562 +n04082710 +n04082886 +n04083113 +n04083309 +n04083649 +n04083800 +n04084517 +n04084682 +n04084889 +n04085017 +n04085574 +n04085873 +n04086066 +n04086273 +n04086446 +n04086663 +n04086794 +n04086937 +n04087126 +n04087432 +n04087709 +n04087826 +n04088229 +n04088343 +n04088441 +n04088696 +n04088797 +n04089152 +n04089376 +n04089666 +n04089836 +n04089976 +n04090263 +n04090548 +n04090781 +n04091097 +n04091466 +n04091584 +n04091693 +n04092168 +n04093157 +n04093223 +n04093625 +n04093775 +n04093915 +n04094060 +n04094250 +n04094438 +n04094608 +n04094720 +n04094859 +n04095109 +n04095210 +n04095342 +n04095577 +n04095938 +n04096066 +n04096733 +n04096848 +n04097085 +n04097373 +n04097622 +n04097760 +n04097866 +n04098169 +n04098260 +n04098399 +n04098513 +n04098795 +n04099003 +n04099175 +n04099429 +n04099969 +n04100174 +n04100519 +n04101375 +n04101497 +n04101701 +n04101860 +n04102037 +n04102162 +n04102285 +n04102406 +n04102618 +n04102760 +n04102872 +n04102962 +n04103094 +n04103206 +n04103364 +n04103665 +n04103769 +n04103918 +n04104147 +n04104384 +n04104500 +n04104770 +n04104925 +n04105068 +n04105438 +n04105704 +n04105893 +n04107598 +n04107743 +n04107984 +n04108268 +n04108822 +n04108999 +n04110068 +n04110178 +n04110281 +n04110439 +n04110654 +n04110841 +n04110955 +n04111190 +n04111414 +n04111531 +n04111668 +n04111962 +n04112147 +n04112252 +n04112430 +n04112579 +n04112654 +n04112752 +n04112921 +n04113038 +n04113194 +n04113316 +n04113406 +n04113641 +n04113765 +n04113968 +n04114069 +n04114301 +n04114428 +n04114719 +n04114844 +n04114996 +n04115144 +n04115256 +n04115456 +n04115542 +n04115802 +n04115996 +n04116098 +n04116294 +n04116389 +n04116512 +n04117216 +n04117464 +n04117639 +n04118021 +n04118538 +n04118635 +n04118776 +n04119091 +n04119230 +n04119360 +n04119478 +n04119630 +n04119751 +n04120489 +n04120695 +n04120842 +n04121228 +n04121342 +n04121426 +n04121511 +n04121728 +n04122262 +n04122349 +n04122492 +n04122578 +n04122685 +n04122825 +n04123026 +n04123123 +n04123228 +n04123317 +n04123448 +n04123567 +n04123740 +n04124098 +n04124202 +n04124370 +n04124488 +n04124573 +n04124887 +n04125021 +n04125116 +n04125257 +n04125541 +n04125692 +n04125853 +n04126066 +n04126244 +n04126541 +n04126659 +n04126852 +n04126980 +n04127117 +n04127249 +n04127395 +n04127521 +n04127633 +n04127904 +n04128413 +n04128499 +n04128710 +n04128837 +n04129490 +n04129688 +n04129766 +n04130143 +n04130257 +n04130566 +n04130907 +n04131015 +n04131113 +n04131208 +n04131368 +n04131499 +n04131690 +n04131811 +n04131929 +n04132158 +n04132465 +n04132603 +n04132829 +n04132985 +n04133114 +n04133789 +n04134008 +n04134170 +n04134523 +n04134632 +n04135024 +n04135118 +n04135315 +n04135710 +n04135933 +n04136045 +n04136161 +n04136333 +n04136510 +n04136800 +n04137089 +n04137217 +n04137355 +n04137444 +n04137773 +n04137897 +n04138131 +n04138261 +n04138869 +n04138977 +n04139140 +n04139395 +n04139859 +n04140064 +n04140539 +n04140631 +n04140777 +n04140853 +n04141076 +n04141198 +n04141327 +n04141712 +n04141838 +n04141975 +n04142175 +n04142327 +n04142434 +n04142731 +n04142999 +n04143140 +n04143365 +n04143897 +n04144241 +n04144539 +n04144651 +n04145863 +n04146050 +n04146343 +n04146504 +n04146614 +n04146862 +n04146976 +n04147183 +n04147291 +n04147495 +n04147793 +n04147916 +n04148054 +n04148285 +n04148464 +n04148579 +n04148703 +n04149083 +n04149374 +n04149813 +n04150153 +n04150273 +n04150371 +n04150980 +n04151108 +n04151581 +n04151940 +n04152387 +n04152593 +n04153025 +n04153330 +n04153751 +n04154152 +n04154340 +n04154565 +n04154753 +n04154854 +n04154938 +n04155068 +n04155177 +n04155457 +n04155625 +n04155735 +n04155889 +n04156040 +n04156140 +n04156297 +n04156411 +n04156591 +n04156814 +n04156946 +n04157099 +n04157320 +n04158002 +n04158138 +n04158250 +n04158672 +n04158807 +n04158956 +n04160036 +n04160261 +n04160372 +n04160586 +n04160847 +n04161010 +n04161358 +n04161981 +n04162433 +n04162706 +n04163530 +n04164002 +n04164199 +n04164406 +n04164757 +n04164868 +n04165409 +n04165675 +n04165945 +n04166111 +n04166281 +n04166436 +n04167346 +n04167489 +n04167661 +n04168084 +n04168199 +n04168472 +n04168541 +n04168840 +n04169437 +n04169597 +n04170037 +n04170384 +n04170515 +n04170694 +n04170933 +n04171208 +n04171459 +n04171629 +n04171831 +n04172107 +n04172230 +n04172342 +n04172512 +n04172607 +n04172776 +n04172904 +n04173046 +n04173172 +n04173511 +n04173907 +n04174026 +n04174101 +n04174234 +n04174500 +n04174705 +n04175039 +n04175147 +n04175574 +n04176068 +n04176190 +n04176295 +n04176528 +n04177041 +n04177329 +n04177545 +n04177654 +n04177755 +n04177820 +n04177931 +n04178190 +n04178329 +n04178668 +n04179126 +n04179712 +n04179824 +n04179913 +n04180063 +n04180229 +n04180888 +n04181083 +n04181228 +n04181561 +n04181718 +n04182152 +n04182322 +n04183217 +n04183329 +n04183957 +n04184095 +n04184316 +n04184435 +n04184600 +n04184880 +n04185071 +n04185529 +n04185804 +n04185946 +n04186051 +n04186268 +n04186455 +n04186624 +n04186848 +n04187061 +n04187233 +n04187547 +n04187751 +n04187885 +n04187970 +n04188064 +n04188179 +n04189092 +n04189282 +n04189651 +n04189816 +n04190052 +n04190376 +n04190464 +n04190747 +n04190997 +n04191150 +n04191595 +n04191943 +n04192238 +n04192361 +n04192521 +n04192698 +n04192858 +n04193179 +n04193377 +n04193742 +n04193883 +n04194009 +n04194127 +n04194289 +n04196080 +n04196502 +n04196803 +n04196925 +n04197110 +n04197391 +n04197781 +n04197878 +n04198015 +n04198233 +n04198355 +n04198453 +n04198562 +n04198722 +n04198797 +n04199027 +n04200000 +n04200258 +n04200537 +n04200800 +n04200908 +n04201064 +n04201297 +n04201733 +n04202142 +n04202282 +n04202417 +n04203356 +n04204081 +n04204238 +n04204347 +n04204755 +n04205062 +n04205318 +n04205505 +n04205613 +n04206070 +n04206225 +n04206356 +n04206570 +n04206790 +n04207151 +n04207343 +n04207596 +n04207763 +n04207903 +n04208065 +n04208210 +n04208427 +n04208582 +n04208760 +n04208936 +n04209133 +n04209239 +n04209509 +n04209613 +n04209811 +n04210012 +n04210120 +n04210288 +n04210390 +n04210591 +n04210858 +n04211001 +n04211219 +n04211356 +n04211528 +n04211857 +n04211970 +n04212165 +n04212282 +n04212467 +n04212810 +n04213105 +n04213264 +n04213353 +n04213530 +n04214046 +n04214282 +n04214413 +n04214649 +n04215153 +n04215402 +n04215588 +n04215800 +n04215910 +n04216634 +n04216860 +n04216963 +n04217387 +n04217546 +n04217718 +n04217882 +n04218564 +n04218921 +n04219185 +n04219424 +n04219580 +n04220250 +n04220805 +n04221076 +n04221673 +n04221823 +n04222210 +n04222307 +n04222470 +n04222723 +n04222847 +n04223066 +n04223170 +n04223299 +n04224395 +n04224543 +n04224842 +n04225031 +n04225222 +n04225729 +n04225987 +n04226322 +n04226464 +n04226537 +n04226826 +n04226962 +n04227050 +n04227144 +n04227519 +n04227787 +n04227900 +n04228054 +n04228215 +n04228422 +n04228581 +n04228693 +n04229007 +n04229107 +n04229480 +n04229620 +n04229737 +n04229816 +n04229959 +n04230387 +n04230487 +n04230603 +n04230707 +n04230808 +n04231272 +n04231693 +n04231905 +n04232153 +n04232312 +n04232437 +n04232800 +n04233027 +n04233124 +n04233295 +n04233715 +n04233832 +n04234160 +n04234260 +n04234455 +n04234670 +n04234763 +n04234887 +n04235291 +n04235646 +n04235771 +n04235860 +n04236001 +n04236377 +n04236702 +n04236809 +n04236935 +n04237174 +n04237287 +n04237423 +n04238128 +n04238321 +n04238617 +n04238763 +n04238953 +n04239074 +n04239218 +n04239333 +n04239436 +n04239639 +n04239786 +n04239900 +n04240434 +n04240752 +n04240867 +n04241042 +n04241249 +n04241394 +n04241573 +n04242084 +n04242315 +n04242408 +n04242587 +n04242704 +n04243003 +n04243142 +n04243251 +n04243546 +n04243941 +n04244379 +n04244847 +n04244997 +n04245218 +n04245412 +n04245508 +n04245847 +n04246060 +n04246271 +n04246459 +n04246731 +n04246855 +n04247011 +n04247440 +n04247544 +n04247630 +n04247736 +n04247876 +n04248209 +n04248396 +n04248507 +n04248851 +n04249415 +n04249582 +n04249882 +n04250224 +n04250473 +n04250599 +n04250692 +n04250850 +n04251144 +n04251701 +n04251791 +n04252077 +n04252225 +n04252331 +n04252560 +n04252653 +n04253057 +n04253168 +n04253304 +n04253931 +n04254009 +n04254120 +n04254450 +n04254680 +n04254777 +n04255163 +n04255346 +n04255499 +n04255586 +n04255670 +n04255768 +n04255899 +n04256318 +n04256520 +n04256758 +n04256891 +n04257223 +n04257684 +n04257790 +n04257986 +n04258138 +n04258333 +n04258438 +n04258618 +n04258732 +n04258859 +n04259202 +n04259468 +n04259630 +n04260192 +n04260364 +n04260589 +n04261116 +n04261281 +n04261369 +n04261506 +n04261638 +n04261767 +n04261868 +n04262161 +n04262530 +n04262678 +n04262869 +n04263257 +n04263336 +n04263502 +n04263760 +n04263950 +n04264134 +n04264233 +n04264361 +n04264485 +n04264628 +n04264765 +n04264914 +n04265275 +n04265428 +n04265904 +n04266014 +n04266162 +n04266375 +n04266486 +n04266849 +n04266968 +n04267091 +n04267165 +n04267246 +n04267435 +n04267577 +n04267985 +n04268142 +n04268275 +n04268418 +n04268565 +n04268799 +n04269086 +n04269270 +n04269502 +n04269668 +n04269822 +n04269944 +n04270147 +n04270371 +n04270576 +n04270891 +n04271148 +n04271531 +n04271793 +n04271891 +n04272054 +n04272389 +n04272782 +n04272928 +n04273064 +n04273285 +n04273569 +n04273659 +n04273796 +n04273972 +n04274686 +n04274985 +n04275093 +n04275175 +n04275283 +n04275548 +n04275661 +n04275904 +n04277352 +n04277493 +n04277669 +n04277826 +n04278247 +n04278353 +n04278447 +n04278605 +n04278932 +n04279063 +n04279172 +n04279353 +n04279462 +n04279858 +n04279987 +n04280259 +n04280373 +n04280487 +n04280845 +n04280970 +n04281260 +n04281375 +n04281571 +n04281998 +n04282231 +n04282494 +n04282872 +n04282992 +n04283096 +n04283255 +n04283378 +n04283585 +n04283784 +n04283905 +n04284002 +n04284341 +n04284438 +n04284572 +n04284869 +n04285008 +n04285146 +n04285622 +n04285803 +n04285965 +n04286128 +n04286575 +n04286960 +n04287351 +n04287451 +n04287747 +n04287898 +n04287986 +n04288165 +n04288272 +n04288533 +n04288673 +n04289027 +n04289195 +n04289449 +n04289576 +n04289690 +n04289827 +n04290079 +n04290259 +n04290507 +n04290615 +n04290762 +n04291069 +n04291242 +n04291759 +n04291992 +n04292080 +n04292221 +n04292414 +n04292572 +n04292921 +n04293119 +n04293258 +n04293744 +n04294212 +n04294426 +n04294614 +n04294879 +n04295081 +n04295353 +n04295571 +n04295777 +n04295881 +n04296562 +n04297098 +n04297750 +n04297847 +n04298053 +n04298661 +n04298765 +n04299215 +n04299370 +n04299963 +n04300358 +n04300509 +n04300643 +n04301000 +n04301242 +n04301474 +n04301760 +n04302200 +n04302863 +n04302988 +n04303095 +n04303258 +n04303357 +n04303497 +n04304215 +n04304375 +n04304680 +n04305016 +n04305210 +n04305323 +n04305471 +n04305572 +n04305947 +n04306080 +n04306592 +n04306847 +n04307419 +n04307767 +n04307878 +n04307986 +n04308084 +n04308273 +n04308397 +n04308583 +n04308807 +n04308915 +n04309049 +n04309348 +n04309548 +n04309833 +n04310018 +n04310157 +n04310507 +n04310604 +n04310721 +n04310904 +n04311004 +n04311174 +n04311595 +n04312020 +n04312154 +n04312432 +n04312654 +n04312756 +n04312916 +n04313220 +n04313503 +n04313628 +n04314107 +n04314216 +n04314522 +n04314632 +n04314914 +n04315342 +n04315713 +n04315828 +n04315948 +n04316498 +n04316815 +n04316924 +n04317063 +n04317175 +n04317325 +n04317420 +n04317833 +n04317976 +n04318131 +n04318787 +n04318892 +n04318982 +n04319545 +n04319774 +n04319937 +n04320405 +n04320598 +n04320871 +n04320973 +n04321121 +n04321453 +n04322026 +n04322531 +n04322692 +n04322801 +n04323519 +n04323819 +n04324120 +n04324297 +n04324387 +n04324515 +n04325041 +n04325208 +n04325704 +n04325804 +n04325968 +n04326547 +n04326676 +n04326799 +n04326896 +n04327204 +n04327544 +n04327682 +n04328054 +n04328186 +n04328329 +n04328580 +n04328703 +n04328946 +n04329477 +n04329681 +n04329834 +n04329958 +n04330109 +n04330189 +n04330267 +n04330340 +n04330669 +n04330746 +n04330896 +n04330998 +n04331277 +n04331443 +n04331639 +n04331765 +n04331892 +n04332074 +n04332243 +n04332580 +n04332987 +n04333129 +n04333869 +n04334105 +n04334365 +n04334504 +n04334599 +n04335209 +n04335435 +n04335693 +n04335886 +n04336792 +n04337157 +n04337287 +n04337503 +n04337650 +n04338517 +n04338963 +n04339062 +n04339191 +n04339638 +n04339879 +n04340019 +n04340521 +n04340750 +n04340935 +n04341133 +n04341288 +n04341414 +n04341686 +n04343511 +n04343630 +n04343740 +n04344003 +n04344734 +n04344873 +n04345028 +n04345201 +n04345787 +n04346003 +n04346157 +n04346328 +n04346428 +n04346511 +n04346679 +n04346855 +n04347119 +n04347519 +n04347754 +n04348070 +n04348184 +n04348359 +n04348988 +n04349189 +n04349306 +n04349401 +n04349913 +n04350104 +n04350235 +n04350458 +n04350581 +n04350688 +n04350769 +n04350905 +n04351550 +n04351699 +n04353573 +n04354026 +n04354182 +n04354387 +n04354487 +n04354589 +n04355115 +n04355267 +n04355338 +n04355511 +n04355684 +n04355821 +n04355933 +n04356056 +n04356595 +n04356772 +n04356925 +n04357121 +n04357314 +n04357531 +n04357930 +n04358117 +n04358256 +n04358491 +n04358707 +n04358874 +n04359034 +n04359124 +n04359217 +n04359335 +n04359500 +n04359589 +n04360501 +n04360798 +n04360914 +n04361095 +n04361260 +n04361937 +n04362624 +n04362821 +n04362972 +n04363082 +n04363210 +n04363412 +n04363671 +n04363777 +n04363874 +n04363991 +n04364160 +n04364397 +n04364545 +n04364827 +n04364994 +n04365112 +n04365229 +n04365328 +n04365484 +n04365751 +n04366033 +n04366116 +n04366367 +n04366832 +n04367011 +n04367371 +n04367480 +n04367746 +n04367950 +n04368109 +n04368235 +n04368365 +n04368496 +n04368695 +n04368840 +n04369025 +n04369282 +n04369485 +n04369618 +n04370048 +n04370288 +n04370456 +n04370600 +n04370774 +n04370955 +n04371050 +n04371430 +n04371563 +n04371774 +n04371979 +n04372370 +n04373089 +n04373428 +n04373563 +n04373704 +n04373795 +n04373894 +n04374315 +n04374521 +n04374735 +n04374907 +n04375080 +n04375241 +n04375405 +n04375615 +n04375775 +n04375926 +n04376400 +n04376876 +n04377057 +n04378489 +n04378651 +n04378956 +n04379096 +n04379243 +n04379964 +n04380255 +n04380346 +n04380533 +n04380916 +n04381073 +n04381450 +n04381587 +n04381724 +n04381860 +n04381994 +n04382334 +n04382438 +n04382537 +n04382695 +n04382880 +n04383015 +n04383130 +n04383301 +n04383839 +n04383923 +n04384593 +n04384910 +n04385079 +n04385157 +n04385536 +n04385799 +n04386051 +n04386456 +n04386664 +n04386792 +n04387095 +n04387201 +n04387261 +n04387400 +n04387531 +n04387706 +n04387932 +n04388040 +n04388162 +n04388473 +n04388574 +n04388743 +n04389033 +n04389430 +n04389521 +n04389718 +n04389854 +n04389999 +n04390483 +n04390577 +n04390873 +n04390977 +n04391445 +n04391838 +n04392113 +n04392526 +n04392764 +n04392985 +n04393095 +n04393301 +n04393549 +n04393808 +n04393913 +n04394031 +n04394261 +n04394421 +n04394630 +n04395024 +n04395106 +n04395332 +n04395651 +n04395875 +n04396226 +n04396335 +n04396650 +n04396808 +n04396902 +n04397027 +n04397168 +n04397261 +n04397452 +n04397645 +n04397768 +n04397860 +n04398044 +n04398497 +n04398688 +n04398834 +n04398951 +n04399046 +n04399158 +n04399537 +n04399846 +n04400109 +n04400289 +n04400499 +n04400737 +n04400899 +n04401088 +n04401578 +n04401680 +n04401828 +n04401949 +n04402057 +n04402342 +n04402449 +n04402580 +n04402746 +n04402984 +n04403413 +n04403524 +n04403638 +n04403925 +n04404072 +n04404200 +n04404412 +n04404817 +n04404997 +n04405540 +n04405762 +n04405907 +n04406239 +n04406552 +n04406687 +n04406817 +n04407257 +n04407435 +n04407686 +n04408871 +n04409011 +n04409128 +n04409279 +n04409384 +n04409515 +n04409625 +n04409806 +n04409911 +n04410086 +n04410365 +n04410485 +n04410565 +n04410663 +n04410760 +n04410886 +n04411019 +n04411264 +n04411835 +n04411966 +n04412097 +n04412300 +n04412416 +n04413151 +n04413419 +n04413969 +n04414101 +n04414199 +n04414319 +n04414476 +n04414675 +n04414909 +n04415257 +n04415663 +n04415815 +n04416005 +n04416901 +n04417086 +n04417180 +n04417361 +n04417672 +n04417809 +n04418357 +n04418644 +n04419073 +n04419642 +n04419868 +n04420024 +n04420720 +n04421083 +n04421258 +n04421417 +n04421582 +n04421740 +n04421872 +n04422409 +n04422566 +n04422727 +n04422875 +n04423552 +n04423687 +n04423845 +n04424692 +n04425804 +n04425977 +n04426184 +n04426316 +n04426427 +n04427216 +n04427473 +n04427559 +n04427715 +n04427857 +n04428008 +n04428191 +n04428382 +n04428634 +n04429038 +n04429376 +n04430475 +n04430605 +n04430896 +n04431025 +n04431436 +n04431648 +n04431745 +n04431925 +n04432043 +n04432203 +n04432662 +n04432785 +n04433377 +n04433585 +n04434207 +n04434531 +n04434932 +n04435180 +n04435552 +n04435653 +n04435759 +n04435870 +n04436012 +n04436185 +n04436329 +n04436401 +n04436542 +n04436832 +n04436992 +n04437276 +n04437380 +n04437670 +n04437953 +n04438304 +n04438507 +n04438643 +n04438897 +n04439505 +n04439585 +n04439712 +n04440597 +n04440963 +n04441093 +n04441528 +n04441662 +n04441790 +n04442312 +n04442441 +n04442582 +n04442741 +n04443164 +n04443257 +n04443433 +n04443766 +n04444121 +n04444218 +n04444749 +n04444953 +n04445040 +n04445154 +n04445327 +n04445610 +n04445782 +n04445952 +n04446162 +n04446276 +n04446844 +n04447028 +n04447156 +n04447276 +n04447443 +n04447861 +n04448070 +n04448185 +n04448361 +n04449290 +n04449449 +n04449550 +n04449700 +n04449966 +n04450133 +n04450243 +n04450465 +n04450640 +n04450749 +n04450994 +n04451139 +n04451318 +n04451636 +n04451818 +n04452528 +n04452615 +n04452757 +n04452848 +n04453037 +n04453156 +n04453390 +n04453666 +n04453910 +n04454654 +n04454792 +n04454908 +n04455048 +n04455250 +n04455579 +n04455652 +n04456011 +n04456115 +n04456472 +n04456734 +n04457157 +n04457326 +n04457474 +n04457638 +n04457767 +n04457910 +n04458201 +n04458633 +n04458843 +n04459018 +n04459122 +n04459243 +n04459362 +n04459610 +n04459773 +n04459909 +n04460130 +n04461437 +n04461570 +n04461696 +n04461879 +n04462011 +n04462240 +n04462576 +n04463679 +n04464125 +n04464615 +n04464852 +n04465050 +n04465203 +n04465358 +n04465501 +n04465666 +n04466871 +n04467099 +n04467307 +n04467506 +n04467665 +n04467899 +n04468005 +n04469003 +n04469251 +n04469514 +n04469684 +n04469813 +n04470741 +n04471148 +n04471315 +n04471632 +n04471912 +n04472243 +n04472563 +n04472726 +n04472961 +n04473108 +n04473275 +n04473884 +n04474035 +n04474187 +n04474466 +n04475309 +n04475411 +n04475496 +n04475631 +n04475749 +n04475900 +n04476116 +n04476259 +n04476526 +n04476831 +n04476972 +n04477219 +n04477387 +n04477548 +n04477725 +n04478066 +n04478383 +n04478512 +n04478657 +n04479046 +n04479287 +n04479405 +n04479526 +n04479694 +n04479823 +n04479939 +n04480033 +n04480141 +n04480303 +n04480527 +n04480853 +n04480995 +n04481524 +n04481642 +n04482177 +n04482297 +n04482393 +n04482975 +n04483073 +n04483307 +n04483925 +n04484024 +n04484432 +n04485082 +n04485423 +n04485586 +n04485750 +n04485884 +n04486054 +n04486213 +n04486322 +n04486616 +n04486934 +n04487081 +n04487394 +n04487724 +n04487894 +n04488202 +n04488427 +n04488530 +n04488742 +n04488857 +n04489008 +n04489695 +n04489817 +n04490091 +n04491312 +n04491388 +n04491638 +n04491769 +n04491934 +n04492060 +n04492157 +n04492375 +n04492749 +n04493109 +n04493259 +n04493381 +n04494204 +n04495051 +n04495183 +n04495310 +n04495450 +n04495555 +n04495698 +n04495843 +n04496614 +n04496726 +n04496872 +n04497249 +n04497442 +n04497570 +n04497801 +n04498275 +n04498389 +n04498523 +n04498873 +n04499062 +n04499300 +n04499446 +n04499554 +n04499810 +n04500060 +n04500390 +n04501127 +n04501281 +n04501370 +n04501550 +n04501837 +n04501947 +n04502059 +n04502197 +n04502502 +n04502670 +n04502851 +n04502989 +n04503073 +n04503155 +n04503269 +n04503413 +n04503499 +n04503593 +n04503705 +n04504038 +n04504141 +n04504770 +n04505036 +n04505345 +n04505470 +n04505888 +n04506289 +n04506402 +n04506506 +n04506688 +n04506895 +n04506994 +n04507155 +n04507326 +n04507453 +n04507689 +n04508163 +n04508489 +n04508949 +n04509171 +n04509260 +n04509417 +n04509592 +n04510706 +n04511002 +n04513827 +n04513998 +n04514095 +n04514241 +n04514648 +n04515003 +n04515444 +n04515729 +n04515890 +n04516116 +n04516214 +n04516354 +n04516672 +n04517211 +n04517408 +n04517823 +n04517999 +n04518132 +n04518343 +n04518643 +n04518764 +n04519153 +n04519536 +n04519728 +n04519887 +n04520170 +n04520382 +n04520784 +n04520962 +n04521571 +n04521863 +n04521987 +n04522168 +n04523525 +n04523831 +n04524142 +n04524313 +n04524594 +n04524716 +n04524941 +n04525038 +n04525191 +n04525305 +n04525417 +n04525584 +n04525821 +n04526520 +n04526800 +n04526964 +n04527648 +n04528079 +n04528968 +n04529108 +n04529681 +n04529962 +n04530283 +n04530456 +n04530566 +n04531098 +n04531873 +n04532022 +n04532106 +n04532398 +n04532504 +n04532670 +n04532831 +n04533042 +n04533199 +n04533499 +n04533594 +n04533700 +n04533802 +n04533946 +n04534127 +n04534359 +n04534520 +n04534895 +n04535252 +n04535370 +n04535524 +n04536153 +n04536335 +n04536465 +n04536595 +n04536765 +n04536866 +n04537436 +n04538249 +n04538403 +n04538552 +n04538878 +n04539053 +n04539203 +n04539407 +n04539794 +n04540053 +n04540255 +n04540397 +n04540761 +n04541136 +n04541320 +n04541662 +n04541777 +n04541987 +n04542095 +n04542329 +n04542474 +n04542595 +n04542715 +n04542858 +n04542943 +n04543158 +n04543509 +n04543636 +n04543772 +n04543924 +n04543996 +n04544325 +n04544450 +n04545305 +n04545471 +n04545748 +n04545858 +n04545984 +n04546081 +n04546194 +n04546340 +n04546595 +n04546855 +n04547592 +n04548280 +n04548362 +n04549028 +n04549122 +n04549629 +n04549721 +n04549919 +n04550184 +n04550676 +n04551055 +n04551833 +n04552097 +n04552348 +n04552551 +n04552696 +n04553389 +n04553561 +n04553703 +n04554211 +n04554406 +n04554684 +n04554871 +n04554998 +n04555291 +n04555400 +n04555600 +n04555700 +n04555897 +n04556408 +n04556533 +n04556664 +n04556948 +n04557308 +n04557522 +n04557648 +n04557751 +n04558059 +n04558199 +n04558478 +n04558804 +n04559023 +n04559166 +n04559451 +n04559620 +n04559730 +n04559910 +n04559994 +n04560113 +n04560292 +n04560502 +n04560619 +n04560804 +n04560882 +n04561010 +n04561287 +n04561422 +n04561734 +n04561857 +n04561965 +n04562122 +n04562262 +n04562496 +n04562935 +n04563020 +n04563204 +n04563413 +n04563560 +n04563790 +n04564278 +n04564581 +n04565039 +n04565375 +n04566257 +n04566561 +n04566756 +n04567098 +n04567593 +n04567746 +n04568069 +n04568557 +n04568713 +n04568841 +n04569063 +n04569520 +n04569822 +n04570118 +n04570214 +n04570416 +n04570532 +n04570815 +n04570958 +n04571292 +n04571566 +n04571686 +n04571800 +n04571958 +n04572121 +n04572235 +n04572935 +n04573045 +n04573281 +n04573379 +n04573513 +n04573625 +n04573832 +n04573937 +n04574067 +n04574348 +n04574471 +n04574606 +n04574999 +n04575723 +n04575824 +n04576002 +n04576211 +n04576971 +n04577139 +n04577293 +n04577426 +n04577567 +n04577769 +n04578112 +n04578329 +n04578559 +n04578708 +n04578801 +n04578934 +n04579056 +n04579145 +n04579230 +n04579432 +n04579667 +n04579986 +n04580493 +n04581102 +n04581595 +n04581829 +n04582205 +n04582349 +n04582771 +n04582869 +n04583022 +n04583212 +n04583620 +n04583888 +n04583967 +n04584056 +n04584207 +n04584373 +n04585128 +n04585318 +n04585456 +n04585626 +n04585745 +n04585980 +n04586072 +n04586581 +n04586932 +n04587327 +n04587404 +n04587559 +n04587648 +n04588739 +n04589190 +n04589325 +n04589434 +n04589593 +n04589890 +n04590021 +n04590129 +n04590263 +n04590553 +n04590746 +n04590933 +n04591056 +n04591157 +n04591249 +n04591359 +n04591517 +n04591631 +n04591713 +n04591887 +n04592005 +n04592099 +n04592356 +n04592465 +n04592596 +n04592741 +n04593077 +n04593185 +n04593376 +n04593524 +n04593629 +n04593866 +n04594114 +n04594218 +n04594489 +n04594742 +n04594828 +n04594919 +n04595028 +n04595285 +n04595501 +n04595611 +n04595762 +n04595855 +n04596116 +n04596492 +n04596742 +n04596852 +n04597066 +n04597309 +n04597400 +n04597804 +n04597913 +n04598136 +n04598318 +n04598416 +n04598582 +n04598965 +n04599124 +n04599235 +n04600312 +n04600486 +n04600912 +n04601041 +n04601159 +n04601938 +n04602762 +n04602840 +n04602956 +n04603399 +n04603729 +n04603872 +n04604276 +n04604644 +n04604806 +n04605057 +n04605163 +n04605321 +n04605446 +n04605572 +n04605726 +n04606251 +n04606574 +n04607035 +n04607242 +n04607640 +n04607759 +n04607869 +n04607982 +n04608329 +n04608435 +n04608567 +n04608809 +n04608923 +n04609531 +n04609651 +n04609811 +n04610013 +n04610176 +n04610274 +n04610503 +n04610676 +n04611351 +n04611795 +n04611916 +n04612026 +n04612159 +n04612257 +n04612373 +n04612504 +n04612840 +n04613015 +n04613158 +n04613696 +n04613939 +n04614505 +n04614655 +n04614844 +n04615149 +n04615226 +n04615644 +n04682018 +n04950713 +n04950952 +n04951071 +n04951186 +n04951373 +n04951716 +n04951875 +n04953296 +n04953678 +n04955160 +n04957356 +n04957589 +n04958634 +n04958865 +n04959061 +n04959230 +n04959672 +n04960277 +n04960582 +n04961062 +n04961331 +n04961691 +n04962062 +n04962240 +n04963111 +n04963307 +n04963588 +n04963740 +n04964001 +n04964799 +n04964878 +n04965179 +n04965451 +n04965661 +n04966543 +n04966941 +n04967191 +n04967561 +n04967674 +n04967801 +n04967882 +n04968056 +n04968139 +n04968749 +n04968895 +n04969242 +n04969540 +n04969798 +n04969952 +n04970059 +n04970312 +n04970398 +n04970470 +n04970631 +n04970916 +n04971211 +n04971313 +n04972350 +n04972451 +n04972801 +n04973020 +n04973291 +n04973386 +n04973585 +n04973669 +n04973816 +n04974145 +n04974340 +n04974859 +n04975739 +n04976319 +n04976952 +n04977412 +n04978561 +n04979002 +n04979307 +n04981658 +n05102764 +n05218119 +n05233741 +n05235879 +n05238282 +n05239437 +n05241218 +n05241485 +n05241662 +n05242070 +n05242239 +n05242928 +n05244421 +n05244755 +n05244934 +n05245192 +n05257476 +n05257967 +n05258051 +n05258627 +n05259914 +n05260127 +n05260240 +n05261310 +n05262422 +n05262534 +n05262698 +n05263183 +n05263316 +n05263448 +n05265736 +n05266096 +n05266879 +n05278922 +n05279953 +n05282652 +n05285623 +n05302499 +n05314075 +n05399034 +n05399243 +n05399356 +n05418717 +n05427346 +n05442594 +n05447757 +n05448704 +n05448827 +n05449196 +n05449661 +n05449959 +n05450617 +n05451099 +n05451384 +n05453412 +n05453657 +n05453815 +n05454833 +n05454978 +n05455113 +n05458173 +n05458576 +n05459101 +n05459457 +n05459769 +n05460759 +n05464534 +n05467054 +n05467758 +n05468098 +n05468739 +n05469664 +n05469861 +n05475397 +n05482922 +n05486510 +n05491154 +n05526957 +n05538625 +n05539947 +n05541509 +n05542893 +n05545879 +n05571341 +n05578095 +n05581932 +n05584746 +n05586759 +n05604434 +n05716342 +n06008896 +n06209940 +n06254669 +n06255081 +n06255613 +n06259898 +n06262567 +n06262943 +n06263202 +n06263369 +n06263609 +n06263762 +n06263895 +n06266417 +n06266633 +n06266710 +n06266878 +n06266973 +n06267145 +n06267564 +n06267655 +n06267758 +n06267893 +n06267991 +n06271778 +n06272290 +n06272612 +n06272803 +n06273207 +n06273294 +n06273414 +n06273555 +n06273743 +n06273890 +n06273986 +n06274092 +n06274292 +n06274546 +n06274760 +n06274921 +n06275095 +n06275353 +n06275471 +n06276501 +n06276697 +n06276902 +n06277025 +n06277135 +n06277280 +n06278338 +n06278475 +n06281040 +n06281175 +n06340977 +n06359193 +n06359467 +n06359657 +n06415688 +n06417096 +n06418693 +n06419354 +n06423496 +n06470073 +n06591815 +n06592078 +n06592281 +n06592421 +n06595351 +n06596179 +n06596364 +n06596474 +n06596607 +n06596727 +n06596845 +n06613686 +n06614901 +n06616216 +n06618653 +n06625062 +n06785654 +n06793231 +n06794110 +n06874185 +n06883725 +n06892775 +n06998748 +n07005523 +n07248320 +n07273802 +n07461050 +n07556406 +n07556637 +n07556872 +n07556970 +n07557165 +n07557434 +n07560193 +n07560331 +n07560422 +n07560542 +n07560652 +n07560903 +n07561112 +n07561590 +n07561848 +n07562017 +n07562172 +n07562379 +n07562495 +n07562651 +n07562881 +n07562984 +n07563207 +n07563366 +n07563642 +n07563800 +n07564008 +n07564101 +n07564292 +n07564515 +n07564629 +n07564796 +n07564971 +n07565083 +n07565161 +n07565259 +n07565608 +n07565725 +n07565945 +n07566092 +n07566231 +n07566340 +n07566863 +n07567039 +n07567139 +n07567390 +n07567611 +n07567707 +n07567980 +n07568095 +n07568241 +n07568389 +n07568502 +n07568625 +n07568818 +n07568991 +n07569106 +n07569423 +n07569543 +n07569644 +n07569873 +n07570021 +n07570530 +n07570720 +n07572353 +n07572616 +n07572858 +n07572957 +n07573103 +n07573347 +n07573453 +n07573563 +n07573696 +n07574176 +n07574426 +n07574504 +n07574602 +n07574780 +n07574923 +n07575076 +n07575226 +n07575392 +n07575510 +n07575726 +n07575984 +n07576182 +n07576438 +n07576577 +n07576781 +n07576969 +n07577144 +n07577374 +n07577538 +n07577657 +n07577772 +n07577918 +n07578093 +n07579575 +n07579688 +n07579787 +n07579917 +n07580053 +n07580253 +n07580359 +n07580470 +n07580592 +n07581249 +n07581346 +n07581607 +n07581775 +n07581931 +n07582027 +n07582152 +n07582277 +n07582441 +n07582609 +n07582811 +n07582892 +n07582970 +n07583066 +n07583197 +n07583865 +n07583978 +n07584110 +n07584228 +n07584332 +n07584423 +n07584593 +n07584859 +n07584938 +n07585015 +n07585107 +n07585208 +n07585474 +n07585557 +n07585644 +n07585758 +n07585906 +n07585997 +n07586099 +n07586179 +n07586318 +n07586485 +n07586604 +n07586718 +n07586894 +n07587023 +n07587111 +n07587206 +n07587331 +n07587441 +n07587618 +n07587700 +n07587819 +n07587962 +n07588111 +n07588193 +n07588299 +n07588419 +n07588574 +n07588688 +n07588817 +n07588947 +n07589458 +n07589543 +n07589724 +n07589872 +n07589967 +n07590068 +n07590177 +n07590320 +n07590502 +n07590611 +n07590752 +n07590841 +n07590974 +n07591049 +n07591162 +n07591236 +n07591330 +n07591473 +n07591586 +n07591813 +n07591961 +n07592094 +n07592317 +n07592400 +n07592481 +n07592656 +n07592768 +n07592922 +n07593004 +n07593107 +n07593199 +n07593471 +n07593774 +n07593972 +n07594066 +n07594155 +n07594250 +n07594737 +n07594840 +n07595051 +n07595180 +n07595368 +n07595649 +n07595751 +n07595914 +n07596046 +n07596160 +n07596362 +n07596452 +n07596566 +n07596684 +n07596967 +n07597145 +n07597263 +n07597365 +n07598256 +n07598529 +n07598622 +n07598734 +n07598928 +n07599068 +n07599161 +n07599242 +n07599383 +n07599468 +n07599554 +n07599649 +n07599783 +n07599911 +n07599998 +n07600177 +n07600285 +n07600394 +n07600506 +n07600696 +n07600895 +n07601025 +n07601175 +n07601290 +n07601407 +n07601572 +n07601686 +n07601809 +n07602650 +n07604956 +n07605040 +n07605198 +n07605282 +n07605380 +n07605474 +n07605597 +n07605693 +n07605804 +n07605944 +n07606058 +n07606191 +n07606278 +n07606419 +n07606538 +n07606669 +n07606764 +n07606933 +n07607027 +n07607138 +n07607361 +n07607492 +n07607605 +n07607707 +n07607832 +n07607967 +n07608098 +n07608245 +n07608339 +n07608429 +n07608533 +n07608641 +n07608721 +n07608866 +n07608980 +n07609083 +n07609215 +n07609316 +n07609407 +n07609549 +n07609632 +n07609728 +n07609840 +n07610295 +n07610502 +n07610620 +n07610746 +n07610890 +n07611046 +n07611148 +n07611267 +n07611358 +n07611733 +n07611839 +n07611991 +n07612137 +n07612273 +n07612367 +n07612530 +n07612632 +n07612996 +n07613158 +n07613266 +n07613480 +n07613671 +n07613815 +n07614103 +n07614198 +n07614348 +n07614500 +n07614730 +n07614825 +n07615052 +n07615190 +n07615289 +n07615460 +n07615569 +n07615671 +n07615774 +n07615954 +n07616046 +n07616174 +n07616265 +n07616386 +n07616487 +n07616590 +n07616748 +n07616906 +n07617051 +n07617188 +n07617344 +n07617447 +n07617526 +n07617611 +n07617708 +n07617839 +n07617932 +n07618029 +n07618119 +n07618281 +n07618432 +n07618587 +n07618684 +n07618871 +n07619004 +n07619208 +n07619301 +n07619409 +n07619508 +n07619881 +n07620047 +n07620145 +n07620327 +n07620597 +n07620689 +n07621264 +n07621497 +n07621618 +n07623136 +n07624466 +n07624666 +n07624757 +n07624924 +n07625061 +n07625324 +n07627931 +n07628068 +n07628181 +n07631926 +n07639069 +n07641928 +n07642361 +n07642471 +n07642742 +n07642833 +n07642933 +n07643026 +n07643200 +n07643306 +n07643474 +n07643577 +n07643679 +n07643764 +n07643891 +n07643981 +n07644244 +n07648913 +n07648997 +n07650792 +n07650903 +n07651025 +n07654148 +n07654298 +n07655067 +n07655263 +n07663899 +n07665438 +n07666176 +n07672914 +n07678586 +n07678729 +n07678953 +n07679034 +n07679140 +n07679356 +n07680168 +n07680313 +n07680416 +n07680517 +n07680655 +n07680761 +n07680932 +n07681264 +n07681355 +n07681450 +n07681691 +n07681805 +n07681926 +n07682197 +n07682316 +n07682477 +n07682624 +n07682808 +n07682952 +n07683039 +n07683138 +n07683265 +n07683360 +n07683490 +n07683617 +n07683786 +n07684084 +n07684164 +n07684289 +n07684422 +n07684517 +n07684600 +n07684938 +n07685031 +n07685118 +n07685218 +n07685303 +n07685399 +n07685546 +n07685730 +n07685918 +n07686021 +n07686202 +n07686299 +n07686461 +n07686634 +n07686720 +n07686873 +n07687053 +n07687211 +n07687381 +n07687469 +n07687626 +n07687789 +n07688021 +n07688130 +n07688265 +n07688412 +n07688624 +n07688757 +n07688898 +n07689003 +n07689217 +n07689313 +n07689490 +n07689624 +n07689757 +n07689842 +n07690019 +n07690152 +n07690273 +n07690431 +n07690511 +n07690585 +n07690739 +n07690892 +n07691091 +n07691237 +n07691539 +n07691650 +n07691758 +n07691863 +n07691954 +n07692114 +n07692248 +n07692405 +n07692517 +n07692614 +n07692887 +n07693048 +n07693223 +n07693439 +n07693590 +n07693725 +n07693889 +n07693972 +n07694169 +n07694403 +n07694516 +n07694659 +n07694839 +n07695187 +n07695284 +n07695410 +n07695504 +n07695652 +n07695742 +n07695878 +n07695965 +n07696403 +n07696527 +n07696625 +n07696728 +n07696839 +n07696977 +n07697100 +n07697313 +n07697408 +n07697537 +n07697699 +n07697825 +n07698250 +n07698401 +n07698543 +n07698672 +n07698782 +n07700003 +n07703889 +n07704054 +n07704205 +n07704305 +n07705931 +n07707451 +n07708124 +n07708398 +n07708512 +n07708685 +n07708798 +n07709046 +n07709172 +n07709333 +n07709701 +n07709881 +n07710007 +n07710283 +n07710616 +n07710952 +n07711080 +n07711232 +n07711371 +n07711569 +n07711683 +n07711799 +n07711907 +n07712063 +n07712267 +n07712382 +n07712559 +n07712748 +n07712856 +n07712959 +n07713074 +n07713267 +n07713395 +n07713763 +n07713895 +n07714078 +n07714188 +n07714287 +n07714448 +n07714571 +n07714802 +n07714895 +n07714990 +n07715103 +n07715221 +n07715407 +n07715561 +n07715721 +n07716034 +n07716203 +n07716358 +n07716504 +n07716649 +n07716750 +n07716906 +n07717070 +n07717410 +n07717556 +n07717714 +n07717858 +n07718068 +n07718195 +n07718329 +n07718472 +n07718671 +n07718747 +n07718920 +n07719058 +n07719213 +n07719330 +n07719437 +n07719616 +n07719756 +n07719839 +n07719980 +n07720084 +n07720185 +n07720277 +n07720442 +n07720615 +n07720875 +n07721018 +n07721118 +n07721195 +n07721325 +n07721456 +n07721678 +n07721833 +n07721942 +n07722052 +n07722217 +n07722390 +n07722485 +n07722666 +n07722763 +n07722888 +n07723039 +n07723177 +n07723330 +n07723559 +n07723753 +n07723968 +n07724078 +n07724173 +n07724269 +n07724492 +n07724654 +n07724819 +n07724943 +n07725158 +n07725255 +n07725376 +n07725531 +n07725663 +n07725789 +n07725888 +n07726009 +n07726095 +n07726230 +n07726386 +n07726525 +n07726672 +n07726796 +n07727048 +n07727140 +n07727252 +n07727377 +n07727458 +n07727578 +n07727741 +n07727868 +n07728053 +n07728181 +n07728284 +n07728391 +n07728585 +n07728708 +n07728804 +n07729000 +n07729142 +n07729225 +n07729384 +n07729485 +n07729828 +n07729926 +n07730033 +n07730207 +n07730320 +n07730406 +n07730562 +n07730708 +n07730855 +n07731006 +n07731122 +n07731284 +n07731436 +n07731587 +n07731767 +n07731952 +n07732168 +n07732302 +n07732433 +n07732525 +n07732636 +n07732747 +n07732904 +n07733005 +n07733124 +n07733217 +n07733394 +n07733567 +n07733712 +n07733847 +n07734017 +n07734183 +n07734292 +n07734417 +n07734555 +n07734744 +n07734879 +n07735052 +n07735179 +n07735294 +n07735404 +n07735510 +n07735687 +n07735803 +n07735981 +n07736087 +n07736256 +n07736371 +n07736527 +n07736692 +n07736813 +n07736971 +n07737081 +n07737594 +n07737745 +n07738105 +n07738224 +n07739035 +n07739125 +n07739344 +n07739506 +n07739923 +n07740033 +n07740115 +n07740220 +n07740342 +n07740461 +n07740597 +n07740744 +n07740855 +n07740954 +n07741138 +n07741235 +n07741357 +n07741461 +n07741623 +n07741706 +n07741804 +n07741888 +n07742012 +n07742224 +n07742313 +n07742415 +n07742513 +n07742605 +n07742704 +n07743224 +n07743384 +n07743544 +n07743723 +n07743902 +n07744057 +n07744246 +n07744430 +n07744559 +n07744682 +n07744811 +n07745046 +n07745197 +n07745357 +n07745466 +n07745661 +n07745940 +n07746038 +n07746186 +n07746334 +n07746551 +n07746749 +n07746910 +n07747055 +n07747607 +n07747811 +n07747951 +n07748157 +n07748276 +n07748416 +n07748574 +n07748753 +n07748912 +n07749095 +n07749192 +n07749312 +n07749446 +n07749582 +n07749731 +n07749870 +n07749969 +n07750146 +n07750299 +n07750449 +n07750586 +n07750736 +n07750872 +n07751004 +n07751148 +n07751280 +n07751451 +n07751737 +n07751858 +n07751977 +n07752109 +n07752264 +n07752377 +n07752514 +n07752602 +n07752664 +n07752782 +n07752874 +n07752966 +n07753113 +n07753275 +n07753448 +n07753592 +n07753743 +n07753980 +n07754155 +n07754279 +n07754451 +n07754684 +n07754894 +n07755089 +n07755262 +n07755411 +n07755619 +n07755707 +n07755929 +n07756096 +n07756325 +n07756499 +n07756641 +n07756838 +n07756951 +n07757132 +n07757312 +n07757511 +n07757602 +n07757753 +n07757874 +n07757990 +n07758125 +n07758260 +n07758407 +n07758582 +n07758680 +n07758950 +n07759194 +n07759324 +n07759424 +n07759576 +n07759691 +n07759816 +n07760070 +n07760153 +n07760297 +n07760395 +n07760501 +n07760673 +n07760755 +n07760859 +n07761141 +n07761309 +n07761611 +n07761777 +n07761954 +n07762114 +n07762244 +n07762373 +n07762534 +n07762740 +n07762913 +n07763107 +n07763290 +n07763483 +n07763629 +n07763792 +n07763987 +n07764155 +n07764315 +n07764486 +n07764630 +n07764847 +n07765073 +n07765208 +n07765361 +n07765517 +n07765612 +n07765728 +n07765862 +n07765999 +n07766173 +n07766409 +n07766530 +n07766723 +n07766891 +n07767002 +n07767171 +n07767344 +n07767549 +n07767709 +n07767847 +n07768068 +n07768139 +n07768230 +n07768318 +n07768423 +n07768590 +n07768694 +n07768858 +n07769102 +n07769306 +n07769465 +n07769584 +n07769731 +n07769886 +n07770034 +n07770180 +n07770439 +n07770571 +n07770763 +n07770869 +n07771082 +n07771212 +n07771405 +n07771539 +n07771731 +n07771891 +n07772026 +n07772147 +n07772274 +n07772413 +n07772788 +n07772935 +n07773428 +n07774182 +n07774295 +n07774479 +n07774596 +n07774719 +n07774842 +n07775050 +n07775197 +n07783827 +n07785487 +n07800091 +n07800487 +n07800636 +n07800740 +n07801007 +n07801091 +n07801342 +n07801508 +n07801709 +n07801779 +n07801892 +n07802026 +n07802152 +n07802246 +n07802417 +n07802767 +n07802863 +n07802963 +n07803093 +n07803213 +n07803310 +n07803408 +n07803545 +n07803779 +n07803895 +n07803992 +n07804152 +n07804323 +n07804543 +n07804657 +n07804771 +n07804900 +n07805006 +n07805254 +n07805389 +n07805478 +n07805594 +n07805731 +n07805966 +n07806043 +n07806120 +n07806221 +n07806633 +n07806774 +n07806879 +n07807002 +n07807171 +n07807317 +n07807472 +n07807594 +n07807710 +n07807834 +n07807922 +n07808022 +n07808166 +n07808268 +n07808352 +n07808479 +n07808587 +n07808675 +n07808806 +n07808904 +n07809096 +n07809368 +n07810531 +n07810907 +n07811416 +n07812046 +n07812184 +n07812662 +n07812790 +n07812913 +n07813107 +n07813324 +n07813495 +n07813579 +n07813717 +n07813833 +n07814007 +n07814203 +n07814390 +n07814487 +n07814634 +n07814790 +n07814925 +n07815163 +n07815294 +n07815424 +n07815588 +n07815839 +n07815956 +n07816052 +n07816164 +n07816296 +n07816398 +n07816575 +n07816726 +n07816839 +n07817024 +n07817160 +n07817315 +n07817465 +n07817599 +n07817758 +n07817871 +n07818029 +n07818133 +n07818277 +n07818422 +n07818572 +n07818689 +n07818825 +n07818995 +n07819166 +n07819303 +n07819480 +n07819682 +n07819769 +n07819896 +n07820036 +n07820145 +n07820297 +n07820497 +n07820683 +n07820814 +n07820960 +n07821107 +n07821260 +n07821404 +n07821610 +n07821758 +n07821919 +n07822053 +n07822197 +n07822323 +n07822518 +n07822687 +n07822845 +n07823105 +n07823280 +n07823369 +n07823460 +n07823591 +n07823698 +n07823814 +n07823951 +n07824191 +n07824268 +n07824383 +n07824502 +n07824702 +n07824863 +n07824988 +n07825194 +n07825399 +n07825496 +n07825597 +n07825717 +n07825850 +n07825972 +n07826091 +n07826250 +n07826340 +n07826453 +n07826544 +n07826653 +n07826930 +n07827130 +n07827284 +n07827410 +n07827554 +n07827750 +n07827896 +n07828041 +n07828156 +n07828275 +n07828378 +n07828642 +n07828987 +n07829248 +n07829331 +n07829412 +n07830493 +n07830593 +n07830690 +n07830841 +n07830986 +n07831146 +n07831267 +n07831450 +n07831663 +n07831821 +n07831955 +n07832099 +n07832202 +n07832307 +n07832416 +n07832592 +n07832741 +n07832902 +n07833333 +n07833535 +n07833672 +n07833816 +n07833951 +n07834065 +n07834160 +n07834286 +n07834507 +n07834618 +n07834774 +n07834872 +n07835051 +n07835173 +n07835331 +n07835457 +n07835547 +n07835701 +n07835823 +n07835921 +n07836077 +n07836269 +n07836456 +n07836600 +n07836731 +n07836838 +n07837002 +n07837110 +n07837234 +n07837362 +n07837545 +n07837630 +n07837755 +n07837912 +n07838073 +n07838233 +n07838441 +n07838551 +n07838659 +n07838811 +n07838905 +n07839055 +n07839172 +n07839312 +n07839478 +n07839593 +n07839730 +n07839864 +n07840027 +n07840124 +n07840219 +n07840304 +n07840395 +n07840520 +n07840672 +n07840804 +n07841037 +n07841345 +n07841495 +n07841639 +n07841800 +n07841907 +n07842044 +n07842130 +n07842202 +n07842308 +n07842433 +n07842605 +n07842753 +n07842972 +n07843117 +n07843220 +n07843348 +n07843464 +n07843636 +n07843775 +n07844042 +n07844604 +n07844786 +n07844867 +n07845087 +n07845166 +n07845335 +n07845421 +n07845495 +n07845571 +n07845702 +n07845775 +n07845863 +n07846014 +n07846143 +n07846274 +n07846359 +n07846471 +n07846557 +n07846688 +n07846802 +n07846938 +n07847047 +n07847198 +n07847453 +n07847585 +n07847706 +n07847827 +n07847917 +n07848093 +n07848196 +n07848338 +n07848771 +n07848936 +n07849026 +n07849186 +n07849336 +n07849506 +n07849619 +n07849733 +n07849912 +n07850083 +n07850219 +n07850329 +n07851054 +n07851298 +n07851443 +n07851554 +n07851641 +n07851767 +n07851926 +n07852045 +n07852229 +n07852302 +n07852376 +n07852452 +n07852532 +n07852614 +n07852712 +n07852833 +n07852919 +n07853125 +n07853232 +n07853345 +n07853445 +n07853560 +n07853648 +n07853762 +n07853852 +n07853946 +n07854066 +n07854184 +n07854266 +n07854348 +n07854455 +n07854614 +n07854707 +n07854813 +n07854982 +n07855105 +n07855188 +n07855317 +n07855413 +n07855510 +n07855603 +n07855721 +n07855812 +n07855907 +n07856045 +n07856186 +n07856270 +n07856756 +n07856895 +n07856992 +n07857076 +n07857170 +n07857356 +n07857598 +n07857731 +n07857959 +n07858114 +n07858197 +n07858336 +n07858484 +n07858595 +n07858841 +n07858978 +n07859142 +n07859284 +n07859583 +n07859796 +n07859951 +n07860103 +n07860208 +n07860331 +n07860447 +n07860548 +n07860629 +n07860805 +n07860988 +n07861158 +n07861247 +n07861334 +n07861557 +n07861681 +n07861813 +n07861983 +n07862095 +n07862244 +n07862348 +n07862461 +n07862611 +n07862770 +n07862946 +n07863107 +n07863229 +n07863374 +n07863547 +n07863644 +n07863802 +n07863935 +n07864065 +n07864198 +n07864317 +n07864475 +n07864638 +n07864756 +n07864934 +n07865105 +n07865196 +n07865484 +n07865575 +n07865700 +n07865788 +n07866015 +n07866151 +n07866277 +n07866409 +n07866571 +n07866723 +n07866868 +n07867021 +n07867164 +n07867324 +n07867421 +n07867616 +n07867751 +n07867883 +n07868045 +n07868200 +n07868340 +n07868508 +n07868684 +n07868830 +n07868955 +n07869111 +n07869291 +n07869391 +n07869522 +n07869611 +n07869775 +n07869937 +n07870069 +n07870167 +n07870313 +n07870478 +n07870620 +n07870734 +n07870894 +n07871065 +n07871234 +n07871335 +n07871436 +n07871588 +n07871720 +n07871810 +n07872593 +n07872748 +n07873057 +n07873198 +n07873348 +n07873464 +n07873679 +n07873807 +n07874063 +n07874159 +n07874259 +n07874343 +n07874441 +n07874531 +n07874674 +n07874780 +n07874995 +n07875086 +n07875152 +n07875267 +n07875436 +n07875560 +n07875693 +n07875835 +n07875926 +n07876026 +n07876189 +n07876281 +n07876460 +n07876550 +n07876651 +n07876775 +n07876893 +n07877187 +n07877299 +n07877675 +n07877849 +n07877961 +n07878145 +n07878283 +n07878479 +n07878647 +n07878785 +n07878926 +n07879072 +n07879174 +n07879350 +n07879450 +n07879560 +n07879659 +n07879821 +n07879953 +n07880080 +n07880213 +n07880325 +n07880458 +n07880751 +n07880880 +n07880968 +n07881117 +n07881205 +n07881404 +n07881525 +n07881625 +n07881800 +n07882420 +n07882497 +n07882886 +n07883031 +n07883156 +n07883251 +n07883384 +n07883510 +n07883661 +n07884567 +n07885705 +n07886057 +n07886176 +n07886317 +n07886463 +n07886572 +n07886849 +n07887099 +n07887192 +n07887304 +n07887461 +n07887634 +n07887967 +n07888058 +n07888229 +n07888378 +n07888465 +n07888816 +n07888909 +n07889193 +n07889274 +n07889510 +n07889814 +n07889990 +n07890068 +n07890226 +n07890352 +n07890540 +n07890617 +n07890750 +n07890890 +n07890970 +n07891095 +n07891189 +n07891309 +n07891433 +n07891726 +n07892418 +n07892512 +n07892813 +n07893253 +n07893425 +n07893528 +n07893642 +n07893792 +n07893891 +n07894102 +n07894298 +n07894451 +n07894551 +n07894703 +n07894799 +n07894965 +n07895100 +n07895237 +n07895435 +n07895595 +n07895710 +n07895839 +n07895962 +n07896060 +n07896165 +n07896287 +n07896422 +n07896560 +n07896661 +n07896765 +n07896893 +n07896994 +n07897116 +n07897200 +n07897438 +n07897600 +n07897750 +n07897865 +n07897975 +n07898117 +n07898247 +n07898333 +n07898443 +n07898617 +n07898745 +n07898895 +n07899003 +n07899108 +n07899292 +n07899434 +n07899533 +n07899660 +n07899769 +n07899899 +n07899976 +n07900225 +n07900406 +n07900616 +n07900734 +n07900825 +n07900958 +n07901355 +n07901457 +n07901587 +n07902121 +n07902336 +n07902443 +n07902520 +n07902698 +n07902799 +n07902937 +n07903101 +n07903208 +n07903543 +n07903643 +n07903731 +n07903841 +n07903962 +n07904072 +n07904293 +n07904395 +n07904637 +n07904760 +n07904865 +n07904934 +n07905038 +n07905296 +n07905386 +n07905474 +n07905618 +n07905770 +n07905979 +n07906111 +n07906284 +n07906572 +n07906718 +n07906877 +n07907037 +n07907161 +n07907342 +n07907429 +n07907548 +n07907831 +n07907943 +n07908411 +n07908567 +n07908647 +n07908812 +n07908923 +n07909129 +n07909231 +n07909362 +n07909504 +n07909593 +n07909714 +n07909811 +n07909954 +n07910048 +n07910152 +n07910245 +n07910379 +n07910538 +n07910656 +n07910799 +n07910970 +n07911061 +n07911249 +n07911371 +n07911677 +n07912093 +n07912211 +n07913180 +n07913300 +n07913393 +n07913537 +n07913644 +n07913774 +n07913882 +n07914006 +n07914128 +n07914271 +n07914413 +n07914586 +n07914686 +n07914777 +n07914887 +n07914995 +n07915094 +n07915213 +n07915366 +n07915491 +n07915618 +n07915800 +n07915918 +n07916041 +n07916183 +n07916319 +n07916437 +n07916582 +n07917133 +n07917272 +n07917392 +n07917507 +n07917618 +n07917791 +n07917874 +n07917951 +n07918028 +n07918193 +n07918309 +n07918706 +n07918879 +n07919165 +n07919310 +n07919441 +n07919572 +n07919665 +n07919787 +n07919894 +n07920052 +n07920222 +n07920349 +n07920540 +n07920663 +n07920872 +n07920989 +n07921090 +n07921239 +n07921360 +n07921455 +n07921615 +n07921834 +n07921948 +n07922041 +n07922147 +n07922512 +n07922607 +n07922764 +n07922955 +n07923748 +n07924033 +n07924276 +n07924366 +n07924443 +n07924560 +n07924655 +n07924747 +n07924834 +n07924955 +n07925116 +n07925229 +n07925327 +n07925423 +n07925500 +n07925608 +n07925708 +n07925808 +n07925966 +n07926250 +n07926346 +n07926442 +n07926540 +n07926785 +n07926920 +n07927070 +n07927197 +n07927512 +n07927716 +n07927836 +n07927931 +n07928163 +n07928264 +n07928367 +n07928488 +n07928578 +n07928696 +n07928790 +n07928887 +n07928998 +n07929172 +n07929351 +n07929519 +n07929940 +n07930062 +n07930205 +n07930315 +n07930433 +n07930554 +n07930864 +n07931001 +n07931096 +n07931280 +n07931452 +n07931612 +n07931733 +n07931870 +n07932039 +n07932323 +n07932454 +n07932614 +n07932762 +n07932841 +n07933154 +n07933274 +n07933530 +n07933652 +n07933799 +n07933891 +n07934032 +n07934152 +n07934282 +n07934373 +n07934530 +n07934678 +n07934800 +n07934908 +n07935043 +n07935152 +n07935288 +n07935379 +n07935504 +n07935737 +n07935878 +n07936015 +n07936093 +n07936263 +n07936459 +n07936548 +n07936745 +n07936979 +n07937069 +n07937344 +n07937461 +n07937621 +n07938007 +n07938149 +n07938313 +n07938594 +n07942152 +n07951464 +n07954211 +n07977870 +n08079613 +n08182379 +n08238463 +n08242223 +n08249459 +n08253141 +n08256735 +n08376250 +n08385989 +n08492354 +n08492461 +n08494231 +n08495908 +n08496334 +n08500819 +n08500989 +n08501887 +n08505018 +n08506347 +n08511017 +n08517010 +n08517676 +n08518171 +n08519299 +n08521623 +n08523340 +n08524735 +n08539072 +n08539276 +n08540532 +n08547468 +n08547544 +n08551296 +n08554440 +n08555333 +n08555710 +n08558770 +n08558963 +n08559155 +n08560295 +n08569482 +n08571275 +n08571642 +n08571898 +n08573674 +n08573842 +n08578517 +n08579266 +n08579352 +n08580944 +n08583292 +n08583455 +n08583554 +n08583682 +n08584914 +n08586978 +n08589670 +n08596076 +n08597579 +n08598301 +n08598568 +n08599174 +n08599292 +n08611339 +n08611421 +n08613733 +n08614632 +n08616050 +n08618831 +n08619112 +n08623676 +n08628141 +n08633683 +n08640531 +n08640739 +n08640962 +n08643267 +n08644045 +n08645104 +n08645212 +n08645318 +n08647264 +n08648917 +n08649711 +n08651104 +n08652376 +n08658309 +n08658918 +n08659242 +n08659331 +n08659446 +n08659861 +n08661878 +n08662427 +n08663051 +n08663703 +n08663860 +n08673039 +n08674344 +n08676253 +n08677424 +n08677801 +n08678783 +n08679167 +n08679269 +n08679562 +n08685188 +n08782627 +n08896327 +n09032191 +n09186592 +n09189157 +n09191635 +n09193551 +n09193705 +n09194227 +n09199101 +n09201998 +n09203827 +n09205509 +n09206896 +n09206985 +n09208496 +n09209025 +n09210862 +n09213434 +n09213565 +n09214060 +n09214269 +n09214916 +n09215023 +n09215437 +n09217230 +n09218315 +n09218494 +n09218641 +n09219233 +n09223487 +n09224725 +n09226869 +n09228055 +n09229709 +n09230041 +n09230202 +n09231117 +n09233446 +n09233603 +n09238926 +n09239302 +n09242389 +n09245515 +n09246464 +n09247410 +n09248153 +n09248399 +n09249034 +n09249155 +n09251407 +n09255070 +n09256479 +n09257843 +n09259025 +n09259219 +n09260907 +n09262690 +n09263912 +n09264803 +n09265620 +n09266604 +n09267854 +n09268007 +n09269341 +n09269472 +n09269882 +n09270160 +n09270657 +n09270735 +n09274152 +n09274305 +n09279986 +n09281252 +n09282208 +n09283193 +n09283405 +n09283514 +n09283767 +n09283866 +n09287415 +n09287968 +n09288635 +n09289331 +n09289596 +n09290350 +n09290444 +n09294877 +n09295210 +n09295946 +n09300306 +n09300905 +n09302616 +n09303008 +n09303528 +n09304750 +n09305031 +n09305898 +n09308572 +n09308743 +n09309046 +n09309168 +n09309292 +n09310616 +n09315159 +n09319604 +n09325824 +n09326662 +n09327077 +n09327538 +n09330378 +n09331251 +n09332890 +n09335693 +n09335809 +n09336555 +n09337048 +n09337253 +n09338013 +n09339810 +n09344198 +n09344324 +n09344724 +n09348460 +n09349648 +n09351905 +n09352849 +n09353815 +n09354511 +n09357346 +n09357447 +n09359803 +n09361517 +n09362316 +n09362945 +n09366017 +n09366317 +n09375606 +n09376198 +n09376526 +n09376786 +n09381242 +n09382099 +n09384106 +n09389867 +n09391386 +n09391644 +n09391774 +n09392402 +n09393524 +n09393605 +n09396465 +n09396608 +n09398076 +n09398677 +n09399592 +n09400584 +n09400987 +n09402944 +n09403086 +n09403211 +n09403427 +n09403734 +n09405078 +n09405787 +n09406793 +n09409512 +n09409752 +n09410224 +n09411189 +n09411295 +n09415584 +n09415671 +n09416076 +n09416890 +n09421031 +n09421799 +n09421951 +n09422190 +n09422631 +n09425019 +n09425344 +n09428293 +n09428628 +n09429630 +n09432283 +n09432990 +n09433312 +n09433442 +n09433839 +n09435739 +n09436444 +n09436708 +n09437454 +n09438844 +n09438940 +n09439032 +n09439213 +n09442595 +n09443281 +n09443641 +n09444783 +n09445008 +n09445289 +n09447666 +n09448690 +n09450163 +n09451237 +n09452291 +n09452395 +n09452760 +n09453008 +n09454153 +n09454412 +n09454744 +n09456207 +n09457979 +n09458269 +n09459979 +n09460046 +n09461069 +n09462600 +n09463226 +n09464486 +n09466678 +n09467696 +n09468604 +n09470027 +n09470222 +n09472413 +n09472597 +n09474010 +n09474412 +n09474765 +n09475044 +n09475179 +n09475925 +n09476123 +n09478210 +n09480959 +n09481120 +n09493983 +n09495962 +n09505153 +n09537660 +n09556121 +n09605110 +n09606009 +n09606527 +n09607630 +n09607782 +n09607903 +n09608709 +n09610255 +n09610405 +n09611722 +n09612700 +n09613118 +n09613191 +n09613690 +n09615336 +n09616573 +n09616922 +n09617161 +n09617435 +n09617577 +n09617696 +n09618760 +n09618880 +n09618957 +n09619168 +n09619452 +n09620078 +n09620794 +n09621232 +n09622049 +n09622302 +n09624168 +n09624559 +n09624899 +n09625401 +n09626238 +n09627807 +n09627906 +n09629065 +n09629246 +n09629752 +n09631129 +n09632274 +n09632518 +n09633969 +n09635534 +n09635635 +n09635973 +n09636339 +n09637339 +n09638454 +n09638875 +n09639382 +n09639919 +n09640327 +n09640715 +n09641002 +n09641578 +n09643799 +n09644152 +n09644657 +n09648743 +n09648911 +n09649067 +n09650729 +n09650839 +n09650989 +n09651123 +n09651968 +n09652149 +n09653144 +n09653438 +n09654079 +n09654518 +n09654898 +n09655213 +n09655466 +n09656077 +n09657206 +n09657748 +n09658254 +n09658398 +n09658815 +n09658921 +n09659039 +n09659188 +n09660010 +n09660240 +n09661873 +n09662038 +n09662661 +n09662951 +n09663248 +n09663786 +n09663999 +n09664556 +n09664908 +n09665367 +n09665545 +n09666349 +n09666476 +n09666883 +n09667358 +n09668199 +n09668437 +n09668562 +n09668988 +n09669631 +n09670280 +n09670521 +n09670909 +n09671089 +n09672590 +n09672725 +n09672840 +n09673091 +n09674412 +n09674786 +n09675045 +n09675673 +n09675799 +n09675922 +n09676021 +n09676247 +n09676884 +n09677427 +n09678747 +n09679028 +n09679170 +n09679925 +n09680908 +n09681107 +n09681234 +n09681973 +n09683180 +n09683757 +n09683924 +n09684082 +n09684901 +n09685233 +n09685806 +n09686262 +n09686401 +n09688233 +n09688804 +n09689435 +n09689958 +n09690083 +n09690208 +n09690496 +n09690621 +n09690864 +n09691604 +n09691729 +n09691858 +n09692125 +n09692915 +n09693244 +n09693982 +n09694664 +n09694771 +n09695019 +n09695132 +n09695514 +n09695620 +n09695979 +n09696456 +n09696585 +n09696763 +n09697401 +n09697986 +n09698644 +n09699020 +n09699642 +n09700125 +n09700964 +n09701148 +n09701833 +n09702134 +n09702673 +n09703101 +n09703344 +n09703485 +n09703708 +n09703809 +n09703932 +n09704057 +n09704157 +n09704283 +n09705003 +n09705124 +n09705671 +n09705784 +n09706029 +n09706255 +n09707061 +n09707289 +n09707735 +n09708750 +n09708889 +n09709531 +n09709673 +n09710041 +n09710164 +n09710886 +n09711132 +n09711435 +n09712324 +n09712448 +n09712696 +n09712967 +n09713108 +n09714120 +n09714694 +n09715165 +n09715303 +n09715427 +n09716047 +n09716933 +n09717233 +n09718217 +n09718811 +n09718936 +n09719309 +n09719794 +n09720033 +n09720256 +n09720595 +n09720702 +n09720842 +n09721244 +n09721444 +n09722064 +n09722658 +n09722817 +n09723067 +n09723819 +n09723944 +n09724234 +n09724533 +n09724656 +n09724785 +n09725000 +n09725229 +n09725546 +n09725653 +n09725772 +n09725935 +n09726621 +n09726811 +n09727440 +n09727826 +n09728137 +n09728285 +n09729062 +n09729156 +n09730077 +n09730204 +n09730824 +n09731343 +n09731436 +n09731571 +n09732170 +n09733459 +n09733793 +n09734185 +n09734450 +n09734535 +n09734639 +n09735258 +n09735654 +n09736485 +n09736798 +n09736945 +n09737050 +n09737161 +n09737453 +n09738121 +n09738400 +n09740724 +n09741074 +n09741331 +n09741722 +n09741816 +n09741904 +n09741999 +n09742101 +n09742315 +n09742927 +n09743487 +n09743601 +n09743792 +n09744161 +n09744346 +n09744462 +n09744679 +n09744834 +n09745229 +n09745324 +n09745834 +n09745933 +n09746936 +n09747191 +n09747495 +n09748101 +n09748408 +n09748648 +n09748889 +n09749386 +n09750282 +n09750641 +n09750770 +n09750891 +n09751076 +n09751496 +n09751622 +n09751895 +n09752023 +n09752519 +n09753348 +n09753792 +n09754152 +n09754217 +n09754633 +n09754907 +n09755086 +n09755241 +n09755555 +n09755788 +n09755893 +n09756049 +n09756195 +n09756961 +n09757449 +n09758173 +n09758885 +n09759501 +n09760290 +n09760609 +n09760913 +n09761068 +n09761753 +n09762011 +n09762385 +n09763272 +n09763784 +n09764201 +n09764598 +n09764732 +n09764900 +n09765118 +n09765278 +n09767197 +n09769076 +n09769525 +n09769929 +n09770179 +n09770359 +n09771435 +n09772330 +n09772746 +n09772930 +n09773962 +n09774167 +n09774783 +n09775907 +n09776346 +n09776642 +n09776807 +n09777870 +n09778266 +n09778537 +n09778783 +n09778927 +n09779124 +n09779280 +n09779461 +n09779790 +n09780395 +n09780828 +n09780984 +n09781398 +n09781504 +n09781650 +n09782167 +n09782397 +n09782855 +n09783537 +n09783776 +n09783884 +n09784043 +n09784160 +n09784564 +n09785236 +n09785659 +n09785891 +n09786115 +n09787534 +n09787765 +n09788073 +n09788237 +n09789150 +n09789566 +n09789898 +n09790047 +n09790482 +n09791014 +n09791419 +n09791816 +n09792125 +n09792555 +n09792969 +n09793141 +n09793352 +n09793946 +n09794550 +n09794668 +n09795010 +n09795124 +n09795334 +n09796809 +n09796974 +n09797742 +n09797873 +n09797998 +n09798096 +n09800469 +n09800964 +n09801102 +n09801275 +n09801533 +n09802445 +n09802641 +n09802951 +n09804230 +n09805151 +n09805324 +n09805475 +n09806944 +n09807075 +n09808080 +n09808591 +n09809279 +n09809538 +n09809749 +n09809925 +n09810166 +n09811568 +n09811712 +n09811852 +n09813219 +n09814252 +n09814381 +n09814488 +n09814567 +n09814660 +n09815455 +n09815790 +n09816654 +n09816771 +n09817174 +n09817386 +n09818022 +n09819477 +n09820044 +n09820263 +n09821831 +n09822830 +n09823153 +n09823287 +n09823502 +n09823832 +n09824135 +n09824609 +n09825096 +n09825750 +n09826204 +n09826605 +n09826821 +n09827246 +n09827363 +n09828216 +n09828403 +n09828988 +n09830194 +n09830400 +n09830629 +n09830759 +n09830926 +n09831962 +n09832456 +n09832633 +n09832978 +n09833111 +n09833275 +n09833441 +n09833536 +n09833751 +n09833997 +n09834258 +n09834378 +n09834699 +n09834885 +n09835017 +n09835153 +n09835230 +n09835348 +n09835506 +n09836160 +n09836343 +n09836519 +n09836786 +n09837459 +n09837720 +n09838295 +n09838370 +n09838621 +n09839702 +n09840217 +n09840435 +n09840520 +n09841188 +n09841515 +n09841696 +n09842047 +n09842288 +n09842395 +n09842528 +n09842823 +n09843443 +n09843602 +n09843716 +n09843824 +n09844457 +n09844898 +n09845401 +n09845849 +n09846142 +n09846469 +n09846586 +n09846755 +n09846894 +n09847267 +n09847344 +n09847543 +n09848110 +n09848489 +n09849167 +n09849990 +n09850760 +n09850974 +n09851165 +n09851575 +n09853541 +n09853645 +n09853881 +n09854218 +n09854421 +n09854915 +n09855433 +n09856401 +n09856671 +n09856827 +n09857007 +n09858165 +n09858299 +n09858733 +n09859152 +n09859285 +n09859684 +n09859975 +n09861287 +n09861599 +n09861863 +n09861946 +n09862183 +n09862621 +n09863031 +n09863339 +n09863749 +n09863936 +n09864632 +n09864968 +n09865068 +n09865162 +n09865398 +n09865672 +n09865744 +n09866115 +n09866354 +n09866559 +n09866661 +n09866817 +n09866922 +n09867069 +n09867154 +n09867311 +n09868270 +n09868782 +n09868899 +n09869317 +n09869447 +n09869578 +n09870096 +n09871095 +n09871229 +n09871681 +n09871867 +n09871952 +n09872066 +n09872557 +n09873348 +n09873473 +n09873769 +n09873899 +n09874428 +n09874725 +n09874862 +n09875025 +n09875979 +n09876701 +n09877288 +n09877587 +n09877750 +n09877951 +n09878921 +n09879552 +n09880189 +n09880741 +n09881265 +n09881358 +n09881895 +n09883047 +n09883452 +n09883807 +n09885059 +n09885866 +n09886403 +n09886540 +n09888635 +n09889065 +n09889170 +n09889691 +n09889941 +n09890192 +n09890749 +n09891730 +n09892262 +n09892513 +n09892693 +n09893191 +n09893344 +n09893502 +n09893600 +n09894143 +n09894445 +n09894654 +n09894909 +n09895222 +n09895480 +n09895561 +n09895701 +n09895902 +n09896170 +n09896311 +n09896401 +n09896685 +n09896826 +n09898020 +n09899289 +n09899671 +n09899782 +n09899929 +n09901337 +n09901502 +n09901642 +n09901786 +n09901921 +n09902128 +n09902353 +n09902731 +n09902851 +n09902954 +n09903153 +n09903501 +n09903639 +n09903936 +n09904208 +n09904837 +n09905050 +n09905185 +n09905530 +n09906293 +n09906449 +n09906704 +n09907804 +n09908769 +n09909660 +n09909929 +n09910222 +n09910374 +n09910556 +n09910840 +n09911226 +n09912431 +n09912681 +n09912907 +n09912995 +n09913329 +n09913455 +n09913593 +n09915434 +n09915651 +n09916348 +n09917214 +n09917345 +n09917481 +n09917593 +n09918248 +n09918554 +n09918867 +n09919061 +n09919200 +n09919451 +n09919899 +n09920106 +n09920283 +n09920901 +n09921034 +n09923003 +n09923186 +n09923418 +n09923561 +n09923673 +n09923996 +n09924106 +n09924195 +n09924313 +n09924437 +n09924996 +n09927089 +n09927451 +n09928136 +n09928451 +n09928845 +n09929202 +n09929298 +n09929577 +n09930257 +n09930628 +n09930876 +n09931165 +n09931418 +n09931640 +n09932098 +n09932336 +n09932508 +n09932788 +n09933020 +n09933098 +n09933842 +n09933972 +n09934337 +n09934488 +n09934774 +n09935107 +n09935434 +n09936825 +n09936892 +n09937056 +n09937688 +n09937802 +n09937903 +n09938080 +n09938449 +n09938991 +n09940725 +n09940818 +n09941089 +n09941571 +n09941787 +n09941964 +n09942697 +n09942970 +n09943239 +n09943811 +n09944022 +n09944160 +n09944430 +n09945021 +n09945223 +n09945319 +n09945603 +n09945745 +n09946814 +n09947127 +n09950457 +n09950728 +n09951070 +n09951274 +n09951524 +n09951616 +n09952163 +n09953052 +n09953350 +n09953615 +n09954355 +n09954639 +n09955406 +n09955944 +n09956578 +n09957523 +n09958133 +n09958292 +n09958447 +n09958569 +n09959142 +n09959658 +n09960688 +n09961198 +n09961331 +n09961469 +n09961605 +n09961739 +n09962966 +n09964202 +n09964411 +n09965515 +n09965787 +n09966470 +n09966554 +n09967063 +n09967406 +n09967555 +n09967816 +n09967967 +n09968259 +n09968652 +n09968741 +n09968845 +n09970088 +n09970192 +n09970402 +n09970822 +n09971273 +n09971385 +n09971839 +n09972010 +n09972458 +n09972587 +n09974648 +n09975425 +n09976024 +n09976283 +n09976429 +n09976728 +n09976917 +n09978442 +n09979321 +n09979913 +n09980458 +n09980805 +n09980985 +n09981092 +n09981278 +n09981540 +n09981939 +n09982152 +n09982525 +n09983314 +n09983572 +n09983889 +n09984960 +n09985470 +n09985809 +n09985978 +n09986450 +n09986700 +n09986904 +n09987045 +n09987161 +n09987239 +n09988063 +n09988311 +n09988493 +n09988703 +n09989502 +n09990415 +n09990690 +n09990777 +n09991740 +n09991867 +n09992538 +n09992837 +n09993252 +n09993651 +n09994400 +n09994673 +n09994808 +n09994878 +n09995829 +n09996039 +n09996304 +n09996481 +n09997622 +n09998788 +n09999135 +n10000294 +n10000459 +n10000787 +n10001217 +n10001481 +n10001764 +n10002257 +n10002760 +n10003476 +n10004718 +n10005006 +n10005934 +n10006177 +n10006748 +n10007684 +n10007809 +n10007995 +n10008123 +n10008254 +n10009162 +n10009276 +n10009484 +n10009671 +n10010062 +n10010243 +n10010632 +n10010767 +n10010864 +n10011360 +n10011486 +n10012484 +n10013811 +n10015215 +n10015485 +n10015792 +n10015897 +n10017272 +n10017422 +n10018747 +n10018861 +n10019072 +n10019187 +n10019406 +n10020366 +n10020533 +n10020670 +n10020807 +n10020890 +n10022908 +n10023264 +n10023506 +n10023656 +n10024025 +n10024362 +n10024937 +n10025060 +n10025295 +n10025391 +n10025635 +n10026976 +n10027246 +n10027590 +n10028402 +n10028541 +n10029068 +n10030277 +n10032987 +n10033412 +n10033572 +n10033663 +n10033888 +n10034201 +n10034614 +n10035952 +n10036266 +n10036444 +n10036692 +n10036929 +n10037080 +n10037385 +n10037588 +n10037922 +n10038119 +n10038409 +n10038620 +n10039271 +n10039946 +n10040240 +n10040698 +n10040945 +n10041373 +n10041887 +n10042690 +n10042845 +n10043024 +n10043491 +n10043643 +n10044682 +n10044879 +n10047199 +n10047459 +n10048117 +n10048367 +n10048612 +n10048836 +n10049363 +n10050043 +n10050880 +n10051026 +n10051761 +n10051861 +n10051975 +n10052694 +n10053439 +n10053808 +n10054657 +n10055297 +n10055410 +n10055566 +n10055730 +n10055847 +n10056103 +n10056611 +n10056719 +n10057271 +n10058411 +n10058962 +n10059067 +n10060075 +n10060175 +n10060352 +n10061043 +n10061195 +n10061431 +n10061882 +n10062042 +n10062176 +n10062275 +n10062492 +n10062594 +n10062716 +n10062905 +n10062996 +n10063635 +n10063919 +n10064831 +n10064977 +n10065758 +n10066206 +n10066314 +n10067011 +n10067305 +n10067600 +n10067968 +n10068234 +n10068425 +n10069296 +n10069981 +n10070108 +n10070377 +n10070449 +n10070563 +n10070711 +n10071332 +n10071557 +n10072054 +n10074249 +n10074578 +n10074735 +n10074841 +n10075299 +n10075693 +n10076224 +n10076483 +n10076604 +n10076957 +n10077106 +n10077593 +n10077879 +n10078131 +n10078719 +n10078806 +n10079399 +n10079893 +n10080117 +n10080508 +n10080869 +n10081204 +n10081842 +n10082043 +n10082299 +n10082423 +n10082562 +n10082687 +n10082997 +n10083677 +n10083823 +n10084043 +n10084295 +n10085101 +n10085869 +n10086383 +n10086744 +n10087434 +n10087736 +n10088200 +n10090745 +n10091349 +n10091450 +n10091564 +n10091651 +n10091861 +n10091997 +n10092488 +n10092643 +n10092794 +n10092978 +n10093167 +n10093475 +n10093818 +n10094320 +n10094584 +n10094782 +n10095265 +n10095420 +n10095769 +n10095869 +n10096126 +n10096508 +n10097262 +n10097477 +n10097590 +n10097842 +n10097995 +n10098245 +n10098388 +n10098517 +n10098624 +n10098710 +n10098862 +n10099002 +n10099375 +n10101308 +n10101634 +n10101981 +n10102800 +n10103155 +n10103228 +n10103921 +n10104064 +n10104487 +n10104756 +n10104888 +n10105085 +n10105733 +n10105906 +n10106387 +n10106509 +n10106995 +n10107173 +n10107303 +n10108018 +n10108089 +n10108464 +n10108832 +n10109443 +n10109662 +n10109826 +n10110093 +n10110731 +n10110893 +n10111358 +n10111779 +n10111903 +n10112129 +n10113249 +n10113583 +n10113869 +n10114476 +n10114550 +n10114662 +n10115430 +n10115946 +n10116370 +n10116478 +n10116702 +n10117017 +n10117267 +n10117415 +n10117739 +n10117851 +n10118301 +n10118743 +n10118844 +n10119609 +n10120330 +n10120671 +n10121026 +n10121246 +n10121714 +n10121800 +n10122300 +n10122531 +n10123122 +n10123844 +n10126177 +n10126424 +n10126708 +n10127186 +n10127689 +n10128519 +n10128748 +n10129338 +n10129825 +n10130686 +n10130877 +n10131151 +n10131268 +n10131590 +n10131815 +n10132035 +n10132502 +n10134178 +n10134396 +n10134760 +n10134982 +n10135129 +n10135197 +n10135297 +n10136615 +n10136959 +n10137825 +n10138369 +n10138472 +n10139077 +n10139651 +n10140051 +n10140597 +n10140683 +n10140783 +n10140929 +n10141364 +n10141732 +n10142166 +n10142391 +n10142537 +n10142747 +n10142946 +n10143172 +n10143595 +n10143725 +n10144338 +n10145239 +n10145340 +n10145480 +n10145590 +n10145774 +n10145902 +n10146002 +n10146104 +n10146416 +n10146816 +n10146927 +n10147121 +n10147262 +n10147710 +n10147935 +n10148035 +n10148305 +n10148825 +n10149436 +n10149867 +n10150071 +n10150794 +n10150940 +n10151133 +n10151261 +n10151367 +n10151570 +n10151760 +n10152306 +n10152616 +n10152763 +n10153155 +n10153414 +n10153594 +n10153865 +n10154013 +n10154186 +n10154601 +n10155222 +n10155600 +n10155849 +n10156629 +n10156831 +n10157016 +n10157128 +n10157271 +n10158506 +n10159045 +n10159289 +n10159533 +n10160188 +n10160280 +n10160412 +n10161622 +n10162016 +n10162194 +n10162354 +n10164025 +n10164233 +n10164492 +n10165448 +n10166189 +n10166394 +n10167152 +n10167361 +n10167565 +n10167838 +n10168012 +n10168183 +n10168584 +n10168837 +n10169147 +n10169241 +n10169419 +n10169796 +n10170060 +n10170681 +n10170866 +n10171219 +n10171456 +n10171567 +n10172080 +n10173410 +n10173579 +n10173665 +n10173771 +n10174253 +n10174330 +n10174445 +n10174589 +n10174695 +n10174971 +n10175248 +n10175725 +n10176913 +n10177150 +n10178077 +n10178216 +n10179069 +n10180580 +n10180791 +n10180923 +n10181445 +n10181547 +n10181799 +n10181878 +n10182190 +n10182402 +n10183347 +n10183931 +n10184505 +n10185148 +n10185483 +n10185793 +n10186068 +n10186143 +n10186216 +n10186350 +n10186686 +n10186774 +n10187130 +n10187491 +n10187990 +n10188715 +n10188856 +n10188957 +n10189278 +n10189597 +n10190122 +n10190516 +n10191001 +n10191388 +n10191613 +n10192839 +n10193650 +n10194231 +n10194775 +n10195056 +n10195155 +n10195261 +n10195593 +n10196404 +n10196725 +n10197392 +n10198437 +n10198832 +n10199251 +n10200246 +n10200781 +n10202225 +n10202624 +n10202763 +n10203949 +n10204177 +n10204833 +n10205231 +n10205344 +n10205457 +n10205714 +n10206173 +n10206506 +n10206629 +n10207077 +n10207169 +n10208189 +n10208847 +n10208950 +n10209082 +n10209731 +n10210137 +n10210512 +n10210648 +n10210911 +n10211036 +n10211666 +n10211830 +n10212231 +n10212501 +n10212780 +n10213034 +n10213429 +n10214062 +n10214390 +n10215623 +n10216106 +n10216403 +n10217208 +n10218043 +n10218164 +n10218292 +n10219240 +n10219453 +n10219879 +n10220080 +n10220924 +n10221312 +n10221520 +n10222170 +n10222259 +n10222497 +n10222716 +n10223069 +n10223177 +n10223606 +n10224578 +n10225219 +n10225931 +n10226413 +n10227166 +n10227266 +n10227393 +n10227490 +n10227698 +n10227793 +n10227985 +n10228278 +n10228468 +n10228592 +n10228712 +n10229883 +n10230216 +n10233248 +n10235024 +n10235269 +n10235385 +n10236304 +n10236521 +n10236842 +n10237069 +n10237196 +n10237464 +n10237556 +n10237676 +n10237799 +n10238272 +n10238375 +n10239928 +n10240082 +n10240235 +n10240417 +n10240821 +n10241024 +n10241300 +n10242328 +n10243137 +n10243273 +n10243483 +n10243664 +n10243872 +n10244108 +n10244359 +n10244913 +n10245029 +n10245341 +n10245507 +n10245639 +n10245863 +n10246317 +n10246395 +n10246703 +n10247358 +n10247880 +n10248008 +n10248198 +n10248377 +n10249191 +n10249270 +n10249459 +n10249869 +n10249950 +n10250712 +n10251329 +n10251612 +n10252075 +n10252222 +n10252354 +n10252547 +n10253122 +n10253296 +n10253479 +n10253611 +n10253703 +n10255459 +n10257221 +n10258602 +n10258786 +n10259348 +n10259780 +n10259997 +n10260473 +n10260706 +n10260800 +n10261211 +n10261511 +n10261624 +n10261862 +n10262343 +n10262445 +n10262561 +n10262655 +n10262880 +n10263146 +n10263411 +n10263790 +n10265281 +n10265801 +n10265891 +n10266016 +n10266328 +n10266848 +n10267166 +n10267311 +n10267865 +n10268629 +n10269199 +n10269289 +n10271677 +n10272782 +n10272913 +n10273064 +n10274173 +n10274318 +n10274815 +n10275249 +n10275395 +n10275848 +n10276045 +n10276477 +n10276942 +n10277027 +n10277638 +n10277815 +n10277912 +n10278456 +n10279018 +n10279778 +n10280034 +n10280130 +n10280598 +n10280674 +n10281546 +n10281770 +n10281896 +n10282482 +n10282672 +n10283170 +n10283366 +n10283546 +n10284064 +n10284871 +n10284965 +n10286282 +n10286539 +n10286749 +n10288964 +n10289039 +n10289176 +n10289462 +n10289766 +n10290422 +n10290541 +n10290813 +n10290919 +n10291110 +n10291469 +n10291822 +n10291942 +n10292316 +n10293332 +n10293590 +n10293861 +n10294020 +n10294139 +n10295371 +n10295479 +n10296176 +n10296444 +n10297234 +n10297367 +n10297531 +n10297841 +n10298202 +n10298271 +n10298647 +n10298912 +n10299125 +n10299250 +n10299700 +n10299875 +n10300041 +n10300154 +n10300303 +n10300500 +n10300654 +n10300829 +n10302576 +n10302700 +n10302905 +n10303037 +n10303814 +n10304086 +n10304650 +n10304914 +n10305635 +n10305802 +n10306004 +n10306279 +n10306496 +n10306595 +n10306890 +n10307114 +n10308066 +n10308168 +n10308275 +n10308504 +n10308653 +n10308732 +n10310783 +n10311506 +n10311661 +n10312287 +n10312491 +n10312600 +n10313000 +n10313239 +n10313441 +n10313724 +n10314054 +n10314182 +n10314517 +n10314836 +n10315217 +n10315456 +n10315561 +n10315730 +n10316360 +n10316527 +n10316862 +n10317007 +n10317500 +n10317963 +n10318293 +n10318607 +n10318686 +n10319313 +n10320484 +n10320863 +n10321126 +n10321340 +n10321632 +n10321882 +n10322238 +n10323634 +n10323752 +n10323999 +n10324560 +n10325549 +n10325774 +n10326776 +n10327143 +n10327987 +n10328123 +n10328328 +n10328437 +n10328696 +n10328941 +n10329035 +n10330593 +n10330931 +n10331098 +n10331167 +n10331258 +n10331347 +n10331841 +n10332110 +n10332385 +n10332861 +n10332953 +n10333044 +n10333165 +n10333317 +n10333439 +n10333601 +n10333838 +n10334009 +n10334461 +n10334782 +n10335246 +n10335801 +n10335931 +n10336411 +n10336904 +n10337488 +n10338231 +n10338391 +n10339179 +n10339251 +n10339717 +n10340312 +n10341243 +n10341343 +n10341446 +n10341573 +n10341955 +n10342180 +n10342367 +n10342543 +n10342893 +n10342992 +n10343088 +n10343355 +n10343449 +n10343554 +n10343869 +n10344121 +n10344203 +n10344319 +n10344656 +n10344774 +n10345015 +n10345100 +n10345302 +n10345422 +n10345659 +n10346015 +n10347204 +n10347446 +n10348526 +n10349243 +n10349750 +n10349836 +n10350220 +n10350774 +n10351064 +n10353016 +n10353355 +n10353928 +n10354265 +n10354754 +n10355142 +n10355306 +n10355449 +n10355688 +n10355806 +n10356450 +n10356877 +n10357012 +n10357613 +n10357737 +n10358032 +n10358124 +n10358575 +n10359117 +n10359422 +n10359546 +n10359659 +n10360366 +n10360747 +n10361060 +n10361194 +n10361296 +n10361525 +n10362003 +n10362319 +n10362557 +n10363445 +n10363573 +n10364198 +n10364502 +n10365514 +n10366145 +n10366276 +n10366966 +n10368291 +n10368528 +n10368624 +n10368711 +n10368798 +n10369095 +n10369317 +n10369417 +n10369528 +n10369699 +n10369955 +n10370381 +n10370955 +n10371052 +n10371221 +n10371330 +n10371450 +n10373390 +n10373525 +n10374541 +n10374849 +n10374943 +n10375052 +n10375314 +n10375402 +n10376523 +n10376890 +n10377021 +n10377185 +n10377291 +n10377542 +n10377633 +n10378026 +n10378113 +n10378780 +n10379376 +n10380126 +n10380499 +n10380672 +n10381804 +n10381981 +n10382157 +n10382302 +n10382480 +n10382710 +n10382825 +n10383094 +n10383237 +n10383505 +n10383816 +n10384214 +n10384392 +n10384496 +n10385566 +n10386196 +n10386754 +n10386874 +n10386984 +n10387196 +n10387324 +n10387836 +n10389865 +n10389976 +n10390600 +n10390698 +n10390807 +n10391416 +n10393909 +n10394434 +n10394786 +n10395073 +n10395209 +n10395390 +n10395828 +n10396106 +n10396337 +n10396727 +n10396908 +n10397001 +n10397142 +n10397392 +n10399130 +n10400003 +n10400108 +n10400205 +n10400437 +n10400618 +n10400998 +n10401204 +n10401331 +n10401639 +n10402709 +n10402824 +n10403633 +n10403876 +n10404426 +n10404998 +n10405540 +n10405694 +n10406266 +n10406391 +n10406765 +n10407310 +n10407954 +n10408809 +n10409459 +n10409752 +n10410246 +n10410996 +n10411356 +n10411551 +n10411867 +n10414239 +n10414768 +n10414865 +n10415037 +n10416567 +n10417288 +n10417424 +n10417551 +n10417682 +n10417843 +n10417969 +n10418101 +n10418735 +n10419047 +n10419472 +n10419630 +n10419785 +n10420031 +n10420277 +n10420507 +n10420649 +n10421016 +n10421470 +n10421956 +n10422405 +n10425946 +n10426454 +n10426630 +n10427223 +n10427359 +n10427764 +n10428004 +n10431122 +n10431625 +n10432189 +n10432441 +n10432875 +n10432957 +n10433077 +n10433452 +n10433610 +n10433737 +n10435169 +n10435251 +n10435716 +n10435988 +n10436334 +n10437014 +n10437137 +n10437262 +n10437698 +n10438172 +n10438619 +n10438842 +n10439373 +n10439523 +n10439727 +n10439851 +n10441037 +n10441124 +n10441694 +n10441962 +n10442093 +n10442232 +n10442417 +n10442573 +n10443032 +n10443659 +n10443830 +n10444194 +n10448322 +n10448455 +n10449664 +n10450038 +n10450161 +n10450303 +n10451450 +n10451590 +n10451858 +n10453184 +n10455619 +n10456070 +n10456138 +n10456696 +n10457214 +n10457444 +n10457903 +n10458111 +n10458356 +n10458596 +n10459882 +n10460033 +n10461060 +n10462588 +n10462751 +n10462860 +n10464052 +n10464542 +n10464711 +n10464870 +n10465002 +n10465451 +n10465831 +n10466198 +n10466564 +n10466918 +n10467179 +n10467395 +n10468750 +n10469611 +n10469874 +n10470779 +n10471640 +n10471732 +n10471859 +n10472129 +n10472447 +n10473453 +n10473562 +n10473789 +n10473917 +n10474064 +n10474343 +n10474446 +n10474645 +n10475835 +n10475940 +n10476467 +n10477713 +n10477955 +n10478118 +n10478293 +n10478462 +n10478827 +n10478960 +n10479135 +n10479328 +n10481167 +n10481268 +n10482054 +n10482220 +n10482587 +n10482921 +n10483138 +n10483395 +n10483799 +n10483890 +n10484858 +n10485298 +n10485883 +n10486166 +n10486236 +n10486561 +n10487182 +n10487363 +n10487592 +n10488016 +n10488309 +n10488656 +n10489426 +n10490421 +n10491998 +n10492086 +n10492727 +n10493199 +n10493419 +n10493685 +n10493835 +n10493922 +n10494195 +n10494373 +n10495167 +n10495421 +n10495555 +n10495756 +n10496393 +n10496489 +n10497135 +n10497534 +n10497645 +n10498046 +n10498699 +n10498816 +n10498986 +n10499110 +n10499232 +n10499355 +n10499631 +n10499857 +n10500217 +n10500419 +n10500603 +n10500824 +n10500942 +n10501453 +n10501635 +n10502046 +n10502329 +n10502950 +n10503818 +n10504090 +n10504206 +n10505347 +n10505613 +n10505732 +n10505942 +n10506336 +n10506544 +n10506915 +n10507070 +n10507380 +n10507482 +n10507565 +n10507692 +n10508141 +n10508379 +n10508710 +n10509063 +n10509161 +n10509810 +n10510245 +n10510974 +n10511771 +n10512201 +n10512372 +n10512708 +n10512859 +n10513509 +n10513823 +n10513938 +n10514051 +n10514121 +n10514255 +n10514429 +n10514784 +n10515863 +n10516527 +n10517137 +n10517283 +n10518349 +n10519126 +n10519494 +n10519984 +n10520286 +n10520544 +n10520964 +n10521100 +n10521662 +n10521853 +n10522035 +n10522324 +n10522759 +n10523341 +n10524076 +n10524223 +n10524869 +n10525134 +n10525436 +n10525617 +n10525878 +n10526534 +n10527147 +n10527334 +n10528023 +n10528148 +n10528493 +n10529231 +n10530150 +n10530383 +n10530571 +n10530959 +n10531109 +n10531445 +n10531838 +n10533874 +n10533983 +n10536134 +n10536274 +n10536416 +n10537708 +n10537906 +n10538629 +n10538733 +n10538853 +n10539015 +n10539160 +n10539278 +n10540114 +n10540252 +n10540656 +n10541833 +n10542608 +n10542761 +n10542888 +n10543161 +n10543937 +n10544232 +n10544748 +n10545792 +n10546428 +n10546633 +n10548419 +n10548537 +n10548681 +n10549510 +n10550252 +n10550369 +n10550468 +n10551576 +n10552393 +n10553140 +n10553235 +n10554024 +n10554141 +n10554846 +n10555059 +n10555430 +n10556033 +n10556518 +n10556704 +n10556825 +n10557246 +n10557854 +n10559009 +n10559288 +n10559508 +n10559683 +n10559996 +n10560106 +n10560637 +n10561222 +n10561320 +n10561736 +n10562135 +n10562283 +n10562509 +n10562968 +n10563314 +n10563403 +n10563711 +n10564098 +n10565502 +n10565667 +n10566072 +n10567613 +n10567722 +n10567848 +n10568200 +n10568358 +n10568443 +n10568608 +n10568915 +n10569011 +n10569179 +n10570019 +n10570704 +n10571907 +n10572706 +n10572889 +n10573957 +n10574311 +n10574538 +n10574840 +n10575463 +n10575594 +n10575787 +n10576223 +n10576316 +n10576676 +n10576818 +n10576962 +n10577182 +n10577284 +n10577710 +n10577820 +n10578021 +n10578162 +n10578471 +n10578656 +n10579062 +n10579549 +n10580030 +n10580437 +n10580535 +n10581648 +n10581890 +n10582604 +n10582746 +n10583387 +n10583790 +n10585077 +n10585217 +n10585628 +n10586166 +n10586265 +n10586444 +n10586903 +n10586998 +n10588074 +n10588357 +n10588724 +n10588965 +n10589666 +n10590146 +n10590239 +n10590452 +n10590903 +n10591072 +n10591811 +n10592049 +n10592811 +n10593521 +n10594147 +n10594523 +n10594857 +n10595164 +n10595647 +n10596517 +n10596899 +n10597505 +n10597745 +n10597889 +n10598013 +n10598181 +n10598459 +n10598904 +n10599215 +n10599806 +n10601234 +n10601362 +n10602119 +n10602470 +n10602985 +n10603528 +n10603851 +n10604275 +n10604380 +n10604634 +n10604880 +n10604979 +n10605253 +n10605737 +n10607291 +n10607478 +n10609092 +n10609198 +n10610465 +n10610850 +n10611267 +n10611613 +n10612210 +n10612373 +n10612518 +n10613996 +n10614507 +n10614629 +n10615179 +n10615334 +n10616578 +n10617024 +n10617193 +n10617397 +n10618234 +n10618342 +n10618465 +n10618685 +n10618848 +n10619492 +n10619642 +n10619888 +n10620212 +n10620586 +n10620758 +n10621294 +n10621400 +n10621514 +n10622053 +n10624074 +n10624310 +n10624437 +n10624540 +n10625860 +n10626630 +n10627252 +n10628097 +n10628644 +n10629329 +n10629647 +n10629939 +n10630093 +n10630188 +n10631131 +n10631309 +n10631654 +n10632576 +n10633298 +n10633450 +n10634464 +n10634849 +n10634990 +n10635788 +n10636488 +n10637483 +n10638922 +n10639238 +n10639359 +n10639637 +n10639817 +n10641223 +n10642596 +n10642705 +n10643095 +n10643837 +n10643937 +n10644598 +n10645017 +n10645223 +n10646032 +n10646140 +n10646433 +n10646641 +n10646780 +n10646942 +n10647745 +n10648237 +n10648696 +n10649197 +n10649308 +n10650162 +n10652605 +n10652703 +n10654015 +n10654211 +n10654321 +n10654827 +n10654932 +n10655169 +n10655442 +n10655594 +n10655730 +n10655986 +n10656120 +n10656223 +n10656969 +n10657306 +n10657556 +n10657835 +n10658304 +n10659042 +n10659762 +n10660128 +n10660621 +n10660883 +n10661002 +n10661216 +n10661563 +n10661732 +n10663315 +n10663549 +n10665302 +n10665587 +n10665698 +n10666752 +n10667477 +n10667709 +n10667863 +n10668450 +n10668666 +n10669991 +n10671042 +n10671613 +n10671736 +n10671898 +n10672371 +n10672540 +n10672662 +n10673296 +n10673776 +n10674130 +n10674713 +n10675010 +n10675142 +n10675609 +n10676018 +n10676434 +n10676569 +n10678937 +n10679174 +n10679503 +n10679610 +n10679723 +n10680609 +n10680796 +n10681194 +n10681557 +n10682713 +n10682953 +n10683675 +n10684146 +n10684630 +n10684827 +n10685398 +n10686073 +n10686517 +n10686694 +n10686885 +n10688356 +n10688811 +n10689306 +n10690268 +n10690421 +n10690648 +n10691318 +n10691937 +n10692090 +n10692482 +n10692883 +n10693235 +n10693334 +n10693824 +n10694258 +n10694939 +n10695450 +n10696101 +n10696508 +n10697135 +n10697282 +n10698368 +n10699558 +n10699752 +n10699981 +n10700105 +n10700201 +n10700640 +n10700963 +n10701180 +n10701644 +n10701962 +n10702167 +n10702615 +n10703221 +n10703336 +n10703480 +n10703692 +n10704238 +n10704712 +n10704886 +n10705448 +n10705615 +n10706812 +n10707134 +n10707233 +n10707707 +n10708292 +n10708454 +n10709529 +n10710171 +n10710259 +n10710778 +n10710913 +n10711483 +n10711766 +n10712229 +n10712374 +n10712474 +n10712690 +n10712835 +n10713254 +n10713686 +n10713843 +n10714195 +n10715030 +n10715347 +n10715789 +n10716576 +n10716864 +n10717055 +n10717196 +n10717337 +n10718131 +n10718349 +n10718509 +n10718665 +n10718952 +n10719036 +n10719132 +n10719267 +n10719807 +n10720197 +n10720453 +n10720964 +n10721124 +n10721321 +n10721612 +n10721708 +n10721819 +n10722029 +n10722575 +n10722965 +n10723230 +n10723597 +n10724132 +n10724372 +n10724570 +n10725280 +n10726031 +n10726786 +n10727016 +n10727171 +n10727458 +n10728117 +n10728233 +n10728624 +n10728998 +n10729330 +n10730542 +n10730728 +n10731013 +n10731732 +n10732010 +n10732521 +n10732854 +n10732967 +n10733820 +n10734394 +n10734741 +n10734891 +n10734963 +n10735173 +n10735298 +n10735984 +n10737103 +n10737264 +n10738111 +n10738215 +n10738670 +n10738871 +n10739135 +n10739297 +n10739391 +n10740594 +n10740732 +n10740868 +n10741152 +n10741367 +n10741493 +n10742005 +n10742111 +n10742546 +n10742997 +n10743124 +n10743356 +n10744078 +n10744164 +n10745006 +n10745770 +n10746931 +n10747119 +n10747424 +n10747548 +n10747965 +n10748142 +n10748506 +n10748620 +n10749928 +n10750031 +n10750188 +n10750640 +n10751026 +n10751152 +n10751265 +n10751710 +n10752480 +n10753061 +n10753182 +n10753339 +n10753442 +n10753989 +n10754189 +n10754281 +n10754449 +n10755080 +n10755164 +n10755394 +n10755648 +n10756061 +n10756148 +n10756261 +n10756641 +n10756837 +n10757050 +n10757492 +n10758337 +n10758445 +n10758949 +n10759151 +n10759331 +n10759982 +n10760199 +n10760622 +n10760951 +n10761190 +n10761326 +n10761519 +n10762212 +n10762480 +n10763075 +n10763245 +n10763383 +n10763620 +n10764465 +n10764622 +n10764719 +n10765305 +n10765587 +n10765679 +n10765885 +n10766260 +n10768148 +n10768272 +n10768903 +n10769084 +n10769188 +n10769321 +n10769459 +n10771066 +n10772092 +n10772580 +n10772937 +n10773665 +n10773800 +n10774329 +n10774756 +n10775003 +n10775128 +n10776052 +n10776339 +n10776887 +n10777299 +n10778044 +n10778148 +n10778711 +n10778999 +n10779610 +n10779897 +n10779995 +n10780284 +n10780632 +n10781236 +n10781817 +n10782362 +n10782471 +n10782791 +n10782940 +n10783240 +n10783539 +n10783646 +n10783734 +n10784113 +n10784544 +n10784922 +n10785480 +n10787470 +n10788852 +n10789415 +n10789709 +n10791115 +n10791221 +n10791820 +n10791890 +n10792335 +n10792506 +n10792856 +n10793570 +n10793799 +n10794014 +n10801561 +n10801802 +n10802507 +n10802621 +n10802953 +n10803031 +n10803282 +n10803978 +n10804287 +n10804636 +n10804732 +n10805501 +n10806113 +n10994097 +n11100798 +n11196627 +n11242849 +n11318824 +n11346873 +n11448153 +n11487732 +n11508382 +n11511327 +n11524451 +n11530008 +n11531193 +n11531334 +n11532682 +n11533212 +n11533999 +n11536567 +n11536673 +n11537327 +n11539289 +n11542137 +n11542640 +n11544015 +n11545350 +n11545524 +n11545714 +n11547562 +n11547855 +n11548728 +n11548870 +n11549009 +n11549245 +n11549779 +n11549895 +n11552133 +n11552386 +n11552594 +n11552806 +n11552976 +n11553240 +n11553522 +n11596108 +n11597657 +n11598287 +n11598686 +n11598886 +n11599324 +n11600372 +n11601177 +n11601333 +n11601918 +n11602091 +n11602478 +n11602873 +n11603246 +n11603462 +n11603835 +n11604046 +n11608250 +n11609475 +n11609684 +n11609862 +n11610047 +n11610215 +n11610437 +n11610602 +n11610823 +n11611087 +n11611233 +n11611356 +n11611561 +n11611758 +n11612018 +n11612235 +n11612349 +n11612575 +n11612923 +n11613219 +n11613459 +n11613692 +n11613867 +n11614039 +n11614250 +n11614420 +n11614713 +n11615026 +n11615259 +n11615387 +n11615607 +n11615812 +n11615967 +n11616260 +n11616486 +n11616662 +n11616852 +n11617090 +n11617272 +n11617631 +n11617878 +n11618079 +n11618290 +n11618525 +n11618861 +n11619227 +n11619455 +n11619687 +n11619845 +n11620016 +n11620389 +n11620673 +n11621029 +n11621281 +n11621547 +n11621727 +n11621950 +n11622184 +n11622368 +n11622591 +n11622771 +n11623105 +n11623815 +n11623967 +n11624192 +n11624531 +n11625003 +n11625223 +n11625391 +n11625632 +n11625804 +n11626010 +n11626152 +n11626409 +n11626585 +n11626826 +n11627168 +n11627512 +n11627714 +n11627908 +n11628087 +n11628456 +n11628793 +n11629047 +n11629354 +n11630017 +n11630489 +n11631159 +n11631405 +n11631619 +n11631854 +n11631985 +n11632167 +n11632376 +n11632619 +n11632929 +n11633284 +n11634736 +n11635152 +n11635433 +n11635830 +n11636204 +n11636835 +n11639084 +n11639306 +n11639445 +n11640132 +n11643835 +n11644046 +n11644226 +n11644462 +n11644872 +n11645163 +n11645590 +n11645914 +n11646167 +n11646344 +n11646517 +n11646694 +n11646955 +n11647306 +n11647703 +n11647868 +n11648039 +n11648268 +n11648776 +n11649150 +n11649359 +n11649878 +n11650160 +n11650307 +n11650430 +n11650558 +n11650759 +n11652039 +n11652217 +n11652376 +n11652578 +n11652753 +n11652966 +n11653126 +n11653570 +n11653904 +n11654293 +n11654438 +n11654984 +n11655152 +n11655592 +n11655974 +n11656123 +n11656549 +n11656771 +n11657585 +n11658331 +n11658544 +n11658709 +n11659248 +n11659627 +n11660300 +n11661372 +n11661909 +n11662128 +n11662371 +n11662585 +n11662937 +n11663263 +n11664418 +n11665372 +n11666854 +n11668117 +n11669786 +n11669921 +n11672269 +n11672400 +n11674019 +n11674332 +n11675025 +n11675404 +n11675738 +n11676500 +n11676743 +n11676850 +n11677485 +n11677902 +n11678010 +n11678299 +n11678377 +n11679378 +n11680457 +n11680596 +n11682659 +n11683216 +n11683838 +n11684264 +n11684499 +n11684654 +n11685091 +n11685621 +n11686195 +n11686652 +n11686780 +n11686912 +n11687071 +n11687432 +n11687789 +n11687964 +n11688069 +n11688378 +n11689197 +n11689367 +n11689483 +n11689678 +n11689815 +n11689957 +n11690088 +n11690254 +n11690455 +n11691046 +n11691857 +n11692265 +n11692792 +n11693981 +n11694300 +n11694469 +n11694664 +n11694866 +n11695085 +n11695285 +n11695599 +n11695974 +n11696450 +n11696935 +n11697560 +n11697802 +n11698042 +n11698245 +n11699442 +n11699751 +n11700058 +n11700279 +n11700864 +n11701066 +n11701302 +n11702713 +n11703669 +n11704093 +n11704620 +n11704791 +n11705171 +n11705387 +n11705573 +n11705776 +n11706325 +n11706761 +n11706942 +n11707229 +n11707827 +n11708658 +n11708857 +n11709045 +n11709205 +n11709674 +n11710136 +n11710393 +n11710658 +n11710827 +n11710987 +n11711289 +n11711537 +n11711764 +n11711971 +n11712282 +n11713164 +n11713370 +n11713763 +n11714382 +n11715430 +n11715678 +n11716698 +n11717399 +n11717577 +n11718296 +n11718681 +n11719286 +n11720353 +n11720643 +n11720891 +n11721337 +n11721642 +n11722036 +n11722342 +n11722466 +n11722621 +n11722982 +n11723227 +n11723452 +n11723770 +n11723986 +n11724109 +n11724660 +n11725015 +n11725311 +n11725480 +n11725623 +n11725821 +n11725973 +n11726145 +n11726269 +n11726433 +n11726707 +n11727091 +n11727358 +n11727540 +n11727738 +n11728099 +n11728769 +n11728945 +n11729142 +n11729478 +n11729860 +n11730015 +n11730458 +n11730602 +n11730750 +n11730933 +n11731157 +n11731659 +n11732052 +n11732567 +n11733054 +n11733312 +n11733548 +n11734493 +n11734698 +n11735053 +n11735570 +n11735977 +n11736362 +n11736694 +n11736851 +n11737009 +n11737125 +n11737534 +n11738547 +n11738997 +n11739365 +n11739978 +n11740414 +n11741175 +n11741350 +n11741575 +n11741797 +n11742310 +n11742878 +n11744011 +n11744108 +n11744471 +n11745817 +n11746600 +n11747468 +n11748002 +n11748811 +n11749112 +n11749603 +n11750173 +n11750508 +n11750989 +n11751765 +n11751974 +n11752578 +n11752798 +n11752937 +n11753143 +n11753355 +n11753562 +n11753700 +n11754893 +n11756092 +n11756329 +n11756669 +n11756870 +n11757017 +n11757190 +n11757653 +n11757851 +n11758122 +n11758276 +n11758483 +n11758799 +n11759224 +n11759404 +n11759609 +n11759853 +n11760785 +n11761202 +n11761650 +n11761836 +n11762018 +n11762433 +n11762927 +n11763142 +n11763625 +n11763874 +n11764478 +n11764814 +n11765568 +n11766046 +n11766189 +n11766432 +n11767354 +n11767877 +n11768816 +n11769176 +n11769621 +n11769803 +n11770256 +n11771147 +n11771539 +n11771746 +n11771924 +n11772408 +n11772879 +n11773408 +n11773628 +n11773987 +n11774513 +n11774972 +n11775340 +n11775626 +n11776234 +n11777080 +n11778092 +n11778257 +n11779300 +n11780148 +n11780424 +n11781176 +n11782036 +n11782266 +n11782761 +n11782878 +n11783162 +n11783920 +n11784126 +n11784497 +n11785276 +n11785668 +n11785875 +n11786131 +n11786539 +n11786843 +n11787190 +n11788039 +n11788727 +n11789066 +n11789438 +n11789589 +n11789962 +n11790089 +n11790788 +n11790936 +n11791341 +n11791569 +n11792029 +n11792341 +n11792742 +n11793403 +n11793779 +n11794024 +n11794139 +n11794519 +n11795049 +n11795216 +n11795580 +n11796005 +n11796188 +n11797321 +n11797508 +n11797981 +n11798270 +n11798496 +n11798688 +n11798978 +n11799331 +n11799732 +n11800236 +n11800565 +n11801392 +n11801665 +n11801891 +n11802410 +n11802586 +n11802800 +n11802995 +n11805255 +n11805544 +n11805956 +n11806219 +n11806369 +n11806521 +n11806679 +n11806814 +n11807108 +n11807525 +n11807696 +n11807979 +n11808299 +n11808468 +n11808721 +n11808932 +n11809094 +n11809271 +n11809437 +n11809594 +n11809754 +n11810030 +n11810358 +n11811059 +n11811473 +n11811706 +n11811921 +n11812094 +n11812910 +n11813077 +n11814584 +n11814996 +n11815491 +n11815721 +n11815918 +n11816121 +n11816336 +n11816649 +n11816829 +n11817160 +n11817501 +n11817914 +n11818069 +n11818636 +n11819509 +n11819912 +n11820965 +n11821184 +n11822300 +n11823043 +n11823305 +n11823436 +n11823756 +n11824146 +n11824344 +n11824747 +n11825351 +n11825749 +n11826198 +n11826569 +n11827541 +n11828577 +n11828973 +n11829205 +n11829672 +n11829922 +n11830045 +n11830252 +n11830400 +n11830714 +n11830906 +n11831100 +n11831297 +n11831521 +n11832214 +n11832480 +n11832671 +n11832899 +n11833373 +n11833749 +n11834272 +n11834654 +n11834890 +n11835251 +n11836327 +n11836722 +n11837204 +n11837351 +n11837562 +n11837743 +n11837970 +n11838413 +n11838916 +n11839460 +n11839568 +n11839823 +n11840067 +n11840246 +n11840476 +n11840764 +n11841247 +n11843441 +n11844371 +n11844892 +n11845557 +n11845793 +n11845913 +n11846312 +n11846425 +n11846765 +n11847169 +n11848479 +n11848867 +n11849271 +n11849467 +n11849871 +n11849983 +n11850521 +n11850918 +n11851258 +n11851578 +n11851839 +n11852028 +n11852148 +n11852531 +n11853079 +n11853356 +n11853813 +n11854479 +n11855274 +n11855435 +n11855553 +n11855842 +n11856573 +n11857696 +n11857875 +n11858077 +n11858703 +n11858814 +n11859275 +n11859472 +n11859737 +n11860208 +n11860555 +n11861238 +n11861487 +n11861641 +n11861853 +n11862835 +n11863467 +n11863877 +n11865071 +n11865276 +n11865429 +n11865574 +n11865874 +n11866248 +n11866706 +n11867311 +n11868814 +n11869351 +n11869689 +n11870044 +n11870418 +n11870747 +n11871059 +n11871496 +n11871748 +n11872146 +n11872324 +n11872658 +n11873182 +n11873612 +n11874081 +n11874423 +n11874878 +n11875523 +n11875691 +n11875938 +n11876204 +n11876432 +n11876634 +n11876803 +n11877193 +n11877283 +n11877473 +n11877646 +n11877860 +n11878101 +n11878283 +n11878633 +n11879054 +n11879722 +n11879895 +n11881189 +n11882074 +n11882237 +n11882426 +n11882636 +n11882821 +n11882972 +n11883328 +n11883628 +n11883945 +n11884384 +n11884967 +n11885856 +n11887119 +n11887310 +n11887476 +n11887750 +n11888061 +n11888424 +n11888800 +n11889205 +n11889619 +n11890022 +n11890150 +n11890884 +n11891175 +n11892029 +n11892181 +n11892637 +n11892817 +n11893640 +n11893916 +n11894327 +n11894558 +n11894770 +n11895092 +n11895472 +n11895714 +n11896141 +n11896722 +n11897116 +n11897466 +n11898639 +n11898775 +n11899223 +n11899762 +n11899921 +n11900569 +n11901294 +n11901452 +n11901597 +n11901759 +n11901977 +n11902200 +n11902389 +n11902709 +n11902982 +n11903333 +n11903671 +n11904109 +n11904274 +n11905392 +n11905749 +n11906127 +n11906514 +n11906917 +n11907100 +n11907405 +n11907689 +n11908549 +n11908846 +n11909864 +n11910271 +n11910460 +n11910666 +n11915214 +n11915658 +n11915899 +n11916467 +n11916696 +n11917407 +n11917835 +n11918286 +n11918473 +n11918808 +n11919447 +n11919761 +n11919975 +n11920133 +n11920498 +n11920663 +n11920998 +n11921395 +n11921792 +n11922661 +n11922755 +n11922839 +n11922926 +n11923174 +n11923397 +n11923637 +n11924014 +n11924445 +n11924849 +n11925303 +n11925450 +n11925898 +n11926365 +n11926833 +n11926976 +n11927215 +n11927740 +n11928352 +n11928858 +n11929743 +n11930038 +n11930203 +n11930353 +n11930571 +n11930788 +n11930994 +n11931135 +n11931540 +n11931918 +n11932745 +n11932927 +n11933099 +n11933257 +n11933387 +n11933546 +n11933728 +n11933903 +n11934041 +n11934239 +n11934463 +n11934616 +n11934807 +n11935027 +n11935187 +n11935330 +n11935469 +n11935627 +n11935715 +n11935794 +n11935877 +n11935953 +n11936027 +n11936113 +n11936199 +n11936287 +n11936369 +n11936448 +n11936539 +n11936624 +n11936707 +n11936782 +n11936864 +n11936946 +n11937023 +n11937102 +n11937195 +n11937278 +n11937360 +n11937446 +n11937692 +n11938556 +n11939180 +n11939491 +n11939699 +n11940006 +n11940349 +n11940599 +n11940750 +n11941094 +n11941478 +n11941924 +n11942659 +n11943133 +n11943407 +n11943660 +n11943992 +n11944196 +n11944751 +n11944954 +n11945367 +n11945514 +n11945783 +n11946051 +n11946313 +n11946727 +n11946918 +n11947251 +n11947629 +n11947802 +n11948044 +n11948264 +n11948469 +n11948864 +n11949015 +n11949402 +n11949857 +n11950345 +n11950686 +n11950877 +n11951052 +n11951511 +n11951820 +n11952346 +n11952541 +n11953038 +n11953339 +n11953610 +n11953884 +n11954161 +n11954345 +n11954484 +n11954642 +n11954798 +n11955040 +n11955153 +n11955532 +n11955896 +n11956348 +n11956850 +n11957317 +n11957514 +n11957678 +n11958080 +n11958499 +n11958888 +n11959259 +n11959632 +n11959862 +n11960245 +n11960673 +n11961100 +n11961446 +n11961871 +n11962272 +n11962667 +n11962994 +n11963572 +n11963932 +n11964446 +n11964848 +n11965218 +n11965627 +n11965962 +n11966083 +n11966215 +n11966385 +n11966617 +n11966896 +n11967142 +n11967315 +n11967744 +n11967878 +n11968519 +n11968704 +n11968931 +n11969166 +n11969607 +n11969806 +n11970101 +n11970298 +n11970586 +n11971248 +n11971406 +n11971783 +n11971927 +n11972291 +n11972759 +n11972959 +n11973341 +n11973634 +n11973749 +n11974373 +n11974557 +n11974888 +n11975254 +n11976170 +n11976314 +n11976511 +n11976933 +n11977303 +n11977660 +n11977887 +n11978233 +n11978551 +n11978713 +n11978961 +n11979187 +n11979354 +n11979527 +n11979715 +n11979964 +n11980318 +n11980682 +n11981192 +n11981475 +n11982115 +n11982545 +n11982939 +n11983375 +n11983606 +n11984144 +n11984542 +n11985053 +n11985321 +n11985739 +n11985903 +n11986511 +n11986729 +n11987126 +n11987349 +n11987511 +n11988132 +n11988596 +n11988893 +n11989087 +n11989393 +n11989869 +n11990167 +n11990313 +n11990627 +n11990920 +n11991263 +n11991549 +n11991777 +n11992479 +n11992806 +n11993203 +n11993444 +n11993675 +n11994150 +n11995092 +n11995396 +n11996251 +n11996677 +n11997032 +n11997160 +n11997969 +n11998492 +n11998888 +n11999278 +n11999656 +n12000191 +n12001294 +n12001707 +n12001924 +n12002428 +n12002651 +n12002826 +n12003167 +n12003696 +n12004120 +n12004547 +n12004987 +n12005656 +n12006306 +n12006766 +n12006930 +n12007196 +n12007406 +n12007766 +n12008252 +n12008487 +n12008749 +n12009047 +n12009420 +n12009792 +n12010628 +n12010815 +n12011370 +n12011620 +n12012111 +n12012253 +n12012510 +n12013035 +n12013511 +n12013701 +n12014085 +n12014355 +n12014923 +n12015221 +n12015525 +n12015959 +n12016434 +n12016567 +n12016777 +n12016914 +n12017127 +n12017326 +n12017511 +n12017664 +n12017853 +n12018014 +n12018100 +n12018188 +n12018271 +n12018363 +n12018447 +n12018530 +n12018760 +n12019035 +n12019827 +n12020184 +n12020507 +n12020736 +n12020941 +n12022054 +n12022382 +n12022821 +n12023108 +n12023407 +n12023726 +n12024176 +n12024445 +n12024690 +n12024805 +n12025220 +n12026018 +n12026476 +n12026981 +n12027222 +n12027658 +n12028424 +n12029039 +n12029635 +n12030092 +n12030654 +n12030908 +n12031139 +n12031388 +n12031547 +n12031927 +n12032429 +n12032686 +n12033139 +n12033504 +n12033709 +n12034141 +n12034384 +n12034594 +n12035631 +n12035907 +n12036067 +n12036226 +n12036939 +n12037499 +n12037691 +n12038038 +n12038208 +n12038406 +n12038585 +n12038760 +n12038898 +n12039317 +n12041446 +n12043444 +n12043673 +n12043836 +n12044041 +n12044467 +n12044784 +n12045157 +n12045514 +n12045860 +n12046028 +n12046428 +n12046815 +n12047345 +n12047884 +n12048056 +n12048399 +n12048928 +n12049282 +n12049562 +n12050533 +n12050959 +n12051103 +n12051514 +n12051792 +n12052267 +n12052447 +n12052787 +n12053405 +n12053690 +n12053962 +n12054195 +n12055073 +n12055516 +n12056099 +n12056217 +n12056601 +n12056758 +n12056990 +n12057211 +n12057447 +n12057660 +n12057895 +n12058192 +n12058630 +n12058822 +n12059314 +n12059625 +n12060546 +n12061104 +n12061380 +n12061614 +n12062105 +n12062468 +n12062626 +n12062781 +n12063211 +n12063639 +n12064389 +n12064591 +n12065316 +n12065649 +n12065777 +n12066018 +n12066261 +n12066451 +n12066630 +n12066821 +n12067029 +n12067193 +n12067433 +n12067672 +n12067817 +n12068138 +n12068432 +n12068615 +n12069009 +n12069217 +n12069679 +n12070016 +n12070381 +n12070583 +n12070712 +n12071259 +n12071477 +n12071744 +n12072210 +n12072722 +n12073217 +n12073554 +n12073991 +n12074408 +n12074867 +n12075010 +n12075151 +n12075299 +n12075830 +n12076223 +n12076577 +n12076852 +n12077244 +n12077944 +n12078172 +n12078451 +n12078747 +n12079120 +n12079523 +n12079963 +n12080395 +n12080588 +n12080820 +n12081215 +n12081649 +n12082131 +n12083113 +n12083591 +n12083847 +n12084158 +n12084400 +n12084555 +n12084890 +n12085267 +n12085664 +n12086012 +n12086192 +n12086539 +n12086778 +n12087961 +n12088223 +n12088327 +n12088495 +n12088909 +n12089320 +n12089496 +n12089846 +n12090890 +n12091213 +n12091377 +n12091550 +n12091697 +n12091953 +n12092262 +n12092417 +n12092629 +n12092930 +n12093329 +n12093600 +n12093885 +n12094244 +n12094401 +n12094612 +n12095020 +n12095281 +n12095412 +n12095543 +n12095647 +n12095934 +n12096089 +n12096395 +n12096563 +n12096674 +n12097396 +n12097556 +n12098403 +n12098524 +n12098827 +n12099342 +n12100187 +n12101870 +n12102133 +n12103680 +n12103894 +n12104104 +n12104238 +n12104501 +n12104734 +n12105125 +n12105353 +n12105828 +n12105981 +n12106134 +n12106323 +n12107002 +n12107191 +n12107710 +n12107970 +n12108432 +n12108613 +n12108871 +n12109365 +n12109827 +n12110085 +n12110236 +n12110352 +n12110475 +n12110778 +n12111238 +n12111627 +n12112008 +n12112337 +n12112609 +n12112918 +n12113195 +n12113323 +n12113657 +n12114010 +n12114590 +n12115180 +n12116058 +n12116429 +n12116734 +n12117017 +n12117235 +n12117326 +n12117695 +n12117912 +n12118414 +n12118661 +n12119099 +n12119238 +n12119390 +n12119539 +n12119717 +n12120347 +n12120578 +n12121033 +n12121187 +n12121610 +n12122442 +n12122725 +n12122918 +n12123648 +n12123741 +n12124172 +n12124627 +n12124818 +n12125001 +n12125183 +n12125584 +n12126084 +n12126360 +n12126736 +n12127460 +n12127575 +n12127768 +n12128071 +n12128306 +n12128490 +n12129134 +n12129738 +n12129986 +n12130549 +n12131405 +n12131550 +n12132092 +n12132956 +n12133151 +n12133462 +n12133682 +n12134025 +n12134486 +n12134695 +n12134836 +n12135049 +n12135576 +n12135729 +n12135898 +n12136392 +n12136581 +n12136720 +n12137120 +n12137569 +n12137791 +n12137954 +n12138110 +n12138248 +n12138444 +n12138578 +n12139196 +n12139575 +n12139793 +n12139921 +n12140511 +n12140759 +n12140903 +n12141167 +n12141385 +n12141495 +n12142085 +n12142357 +n12142450 +n12143065 +n12143215 +n12143405 +n12143676 +n12144313 +n12144580 +n12144987 +n12145148 +n12145477 +n12146311 +n12146488 +n12146654 +n12147226 +n12147835 +n12148757 +n12150722 +n12150969 +n12151170 +n12151615 +n12152031 +n12152251 +n12152532 +n12152722 +n12153033 +n12153224 +n12153580 +n12153741 +n12153914 +n12154114 +n12154773 +n12155009 +n12155583 +n12155773 +n12156679 +n12156819 +n12157056 +n12157179 +n12157769 +n12158031 +n12158443 +n12158798 +n12159055 +n12159388 +n12159555 +n12159804 +n12159942 +n12160125 +n12160303 +n12160490 +n12160857 +n12161056 +n12161285 +n12161577 +n12161744 +n12161969 +n12162181 +n12162425 +n12162758 +n12163035 +n12163279 +n12164363 +n12164656 +n12164881 +n12165170 +n12165384 +n12165758 +n12166128 +n12166424 +n12166793 +n12166929 +n12167075 +n12167436 +n12167602 +n12168565 +n12169099 +n12170585 +n12171098 +n12171316 +n12171966 +n12172364 +n12172481 +n12172906 +n12173069 +n12173664 +n12173912 +n12174311 +n12174521 +n12174926 +n12175181 +n12175370 +n12175598 +n12176453 +n12176709 +n12176953 +n12177129 +n12177455 +n12178129 +n12178780 +n12178896 +n12179122 +n12179632 +n12180168 +n12180456 +n12180885 +n12181352 +n12181612 +n12182049 +n12182276 +n12183026 +n12183452 +n12183816 +n12184095 +n12184468 +n12184912 +n12185254 +n12185859 +n12186352 +n12186554 +n12186839 +n12187247 +n12187663 +n12187891 +n12188289 +n12188635 +n12189429 +n12189779 +n12189987 +n12190410 +n12190869 +n12191240 +n12192132 +n12192877 +n12193334 +n12193665 +n12194147 +n12194613 +n12195391 +n12195533 +n12195734 +n12196129 +n12196336 +n12196527 +n12196694 +n12196954 +n12197359 +n12197601 +n12198286 +n12198793 +n12199266 +n12199399 +n12199790 +n12199982 +n12200143 +n12200504 +n12200905 +n12201331 +n12201580 +n12201938 +n12202936 +n12203529 +n12203699 +n12203896 +n12204032 +n12204175 +n12204730 +n12205460 +n12205694 +n12214789 +n12215022 +n12215210 +n12215579 +n12215824 +n12216215 +n12216628 +n12216968 +n12217453 +n12217851 +n12218274 +n12218490 +n12218868 +n12219668 +n12220019 +n12220496 +n12220829 +n12221191 +n12221368 +n12221522 +n12221801 +n12222090 +n12222493 +n12222900 +n12223160 +n12223569 +n12223764 +n12224978 +n12225222 +n12225349 +n12225563 +n12226932 +n12227658 +n12227909 +n12228229 +n12228387 +n12228689 +n12228886 +n12229111 +n12229651 +n12229887 +n12230540 +n12230794 +n12231192 +n12231709 +n12232114 +n12232280 +n12232851 +n12233249 +n12234318 +n12234669 +n12235051 +n12235479 +n12236160 +n12236546 +n12236768 +n12236977 +n12237152 +n12237486 +n12237641 +n12237855 +n12238756 +n12238913 +n12239240 +n12239647 +n12239880 +n12240150 +n12240477 +n12240965 +n12241192 +n12241426 +n12241880 +n12242123 +n12242409 +n12242850 +n12243109 +n12243693 +n12244153 +n12244458 +n12244650 +n12244819 +n12245319 +n12245695 +n12245885 +n12246037 +n12246232 +n12246773 +n12246941 +n12247202 +n12247407 +n12247963 +n12248141 +n12248359 +n12248574 +n12248780 +n12248941 +n12249122 +n12249294 +n12249542 +n12251001 +n12251278 +n12251740 +n12252168 +n12252383 +n12252866 +n12253229 +n12253487 +n12253664 +n12253835 +n12254168 +n12255225 +n12256112 +n12256325 +n12256522 +n12256708 +n12256920 +n12257570 +n12257725 +n12258101 +n12258885 +n12259316 +n12260799 +n12261359 +n12261571 +n12261808 +n12262018 +n12262185 +n12262553 +n12263038 +n12263204 +n12263410 +n12263588 +n12263738 +n12263987 +n12264512 +n12264786 +n12265083 +n12265394 +n12265600 +n12266217 +n12266528 +n12266644 +n12266796 +n12266984 +n12267133 +n12267265 +n12267411 +n12267534 +n12267677 +n12267931 +n12268246 +n12269241 +n12269406 +n12269652 +n12270027 +n12270278 +n12270460 +n12270741 +n12270946 +n12271187 +n12271451 +n12271643 +n12271933 +n12272239 +n12272432 +n12272735 +n12272883 +n12273114 +n12273344 +n12273515 +n12273768 +n12273939 +n12274151 +n12274358 +n12274630 +n12274863 +n12275131 +n12275317 +n12275489 +n12275675 +n12275888 +n12276110 +n12276314 +n12276477 +n12276628 +n12276872 +n12277150 +n12277334 +n12277578 +n12277800 +n12278107 +n12278371 +n12278650 +n12278865 +n12279060 +n12279293 +n12279458 +n12279772 +n12280060 +n12280364 +n12281241 +n12281788 +n12281974 +n12282235 +n12282527 +n12282737 +n12282933 +n12283147 +n12283395 +n12283542 +n12283790 +n12284262 +n12284821 +n12285049 +n12285195 +n12285369 +n12285512 +n12285705 +n12285900 +n12286068 +n12286197 +n12286826 +n12286988 +n12287195 +n12287642 +n12287836 +n12288005 +n12288823 +n12289310 +n12289433 +n12289585 +n12290748 +n12290975 +n12291143 +n12291459 +n12291671 +n12291959 +n12292463 +n12292877 +n12293723 +n12294124 +n12294331 +n12294542 +n12294723 +n12294871 +n12295033 +n12295237 +n12295429 +n12295796 +n12296045 +n12296432 +n12296735 +n12296929 +n12297110 +n12297280 +n12297507 +n12297846 +n12298165 +n12299640 +n12300840 +n12301180 +n12301445 +n12301613 +n12301766 +n12302071 +n12302248 +n12302565 +n12303083 +n12303462 +n12304115 +n12304286 +n12304420 +n12304703 +n12304899 +n12305089 +n12305293 +n12305475 +n12305654 +n12305819 +n12305986 +n12306089 +n12306270 +n12306717 +n12306938 +n12307076 +n12307240 +n12307756 +n12308112 +n12308447 +n12308907 +n12309277 +n12309630 +n12310021 +n12310349 +n12310638 +n12311045 +n12311224 +n12311413 +n12311579 +n12312110 +n12312728 +n12315060 +n12315245 +n12315598 +n12315999 +n12316444 +n12316572 +n12317296 +n12318378 +n12318782 +n12318965 +n12319204 +n12319414 +n12320010 +n12320414 +n12320627 +n12320806 +n12321077 +n12321395 +n12321669 +n12321873 +n12322099 +n12322501 +n12322699 +n12323665 +n12324056 +n12324222 +n12324388 +n12324558 +n12324906 +n12325234 +n12325787 +n12327022 +n12327528 +n12327846 +n12328398 +n12328567 +n12328801 +n12329260 +n12329473 +n12330239 +n12330469 +n12330587 +n12330891 +n12331066 +n12331263 +n12331655 +n12331788 +n12332030 +n12332218 +n12332555 +n12333053 +n12333530 +n12333771 +n12333961 +n12334153 +n12334293 +n12334891 +n12335483 +n12335664 +n12335800 +n12335937 +n12336092 +n12336224 +n12336333 +n12336586 +n12336727 +n12336973 +n12337131 +n12337246 +n12337391 +n12337617 +n12337800 +n12337922 +n12338034 +n12338146 +n12338258 +n12338454 +n12338655 +n12338796 +n12338979 +n12339526 +n12339831 +n12340383 +n12340581 +n12340755 +n12341542 +n12341931 +n12342299 +n12342498 +n12342852 +n12343480 +n12343753 +n12344283 +n12344483 +n12344700 +n12344837 +n12345280 +n12345899 +n12346578 +n12346813 +n12346986 +n12347158 +n12349315 +n12349711 +n12350032 +n12350758 +n12351091 +n12351790 +n12352287 +n12352639 +n12352844 +n12352990 +n12353203 +n12353431 +n12353754 +n12355760 +n12356023 +n12356395 +n12356960 +n12357485 +n12357968 +n12358293 +n12360108 +n12360534 +n12360684 +n12360817 +n12360958 +n12361135 +n12361560 +n12361754 +n12361946 +n12362274 +n12362514 +n12362668 +n12363301 +n12363768 +n12364604 +n12364940 +n12365158 +n12365285 +n12365462 +n12365900 +n12366053 +n12366186 +n12366313 +n12366675 +n12366870 +n12367611 +n12368028 +n12368257 +n12368451 +n12369066 +n12369309 +n12369476 +n12369665 +n12369845 +n12370174 +n12370549 +n12371202 +n12371439 +n12371704 +n12372233 +n12373100 +n12373739 +n12374418 +n12374705 +n12374862 +n12375769 +n12377198 +n12377494 +n12378249 +n12378753 +n12378963 +n12379531 +n12380761 +n12381511 +n12382233 +n12382875 +n12383737 +n12383894 +n12384037 +n12384227 +n12384375 +n12384569 +n12384680 +n12384839 +n12385429 +n12385566 +n12385830 +n12386945 +n12387103 +n12387633 +n12387839 +n12388143 +n12388293 +n12388858 +n12388989 +n12389130 +n12389501 +n12389727 +n12389932 +n12390099 +n12390314 +n12392070 +n12392549 +n12392765 +n12393269 +n12394118 +n12394328 +n12394638 +n12395068 +n12395289 +n12395463 +n12395906 +n12396091 +n12396924 +n12397431 +n12399132 +n12399384 +n12399534 +n12399656 +n12399899 +n12400489 +n12400720 +n12400924 +n12401335 +n12401684 +n12401893 +n12402051 +n12402348 +n12402596 +n12402840 +n12403075 +n12403276 +n12403513 +n12403994 +n12404729 +n12405714 +n12406304 +n12406488 +n12406715 +n12406902 +n12407079 +n12407222 +n12407396 +n12407545 +n12407715 +n12407890 +n12408077 +n12408280 +n12408466 +n12408717 +n12408873 +n12409231 +n12409470 +n12409651 +n12409840 +n12411461 +n12412355 +n12412606 +n12412987 +n12413165 +n12413301 +n12413419 +n12413642 +n12413880 +n12414035 +n12414159 +n12414329 +n12414449 +n12414818 +n12414932 +n12415595 +n12416073 +n12416423 +n12416703 +n12417836 +n12418221 +n12418507 +n12419037 +n12419878 +n12420124 +n12420535 +n12420722 +n12421137 +n12421467 +n12421683 +n12421917 +n12422129 +n12422559 +n12425281 +n12426623 +n12426749 +n12427184 +n12427391 +n12427566 +n12427757 +n12427946 +n12428076 +n12428242 +n12428412 +n12428747 +n12429352 +n12430198 +n12430471 +n12430675 +n12431434 +n12432069 +n12432356 +n12432574 +n12432707 +n12433081 +n12433178 +n12433769 +n12433952 +n12434106 +n12434483 +n12434634 +n12434775 +n12434985 +n12435152 +n12435486 +n12435649 +n12435777 +n12435965 +n12436090 +n12436907 +n12437513 +n12437769 +n12437930 +n12439154 +n12439830 +n12441183 +n12441390 +n12441552 +n12441958 +n12442548 +n12443323 +n12443736 +n12444095 +n12444898 +n12446200 +n12446519 +n12446737 +n12446908 +n12447121 +n12447346 +n12447581 +n12447891 +n12448136 +n12448361 +n12448700 +n12449296 +n12449526 +n12449784 +n12449934 +n12450344 +n12450607 +n12450840 +n12451070 +n12451240 +n12451399 +n12451566 +n12451915 +n12452256 +n12452480 +n12452673 +n12452836 +n12453018 +n12453186 +n12453714 +n12453857 +n12454159 +n12454436 +n12454556 +n12454705 +n12454793 +n12454949 +n12455950 +n12457091 +n12458550 +n12458713 +n12458874 +n12459629 +n12460146 +n12460697 +n12460957 +n12461109 +n12461466 +n12461673 +n12462032 +n12462221 +n12462582 +n12462805 +n12463134 +n12463743 +n12463975 +n12464128 +n12464476 +n12464649 +n12465557 +n12466727 +n12467018 +n12467197 +n12467433 +n12467592 +n12468545 +n12468719 +n12469517 +n12470092 +n12470512 +n12470907 +n12472024 +n12473608 +n12473840 +n12474167 +n12474418 +n12475035 +n12475242 +n12475774 +n12476510 +n12477163 +n12477401 +n12477583 +n12477747 +n12477983 +n12478768 +n12479537 +n12480456 +n12480895 +n12481150 +n12481289 +n12481458 +n12482437 +n12482668 +n12482893 +n12483282 +n12483427 +n12483625 +n12483841 +n12484244 +n12484784 +n12485653 +n12485981 +n12486574 +n12487058 +n12488454 +n12488709 +n12489046 +n12489676 +n12489815 +n12490490 +n12491017 +n12491435 +n12491826 +n12492106 +n12492460 +n12492682 +n12492900 +n12493208 +n12493426 +n12493868 +n12494794 +n12495146 +n12495670 +n12495895 +n12496427 +n12496949 +n12497669 +n12498055 +n12498457 +n12499163 +n12499757 +n12499979 +n12500309 +n12500518 +n12500751 +n12501202 +n12504570 +n12504783 +n12505253 +n12506181 +n12506341 +n12506991 +n12507379 +n12507823 +n12508309 +n12508618 +n12508762 +n12509109 +n12509476 +n12509665 +n12509821 +n12509993 +n12510343 +n12510774 +n12511488 +n12511856 +n12512095 +n12512294 +n12512674 +n12513172 +n12513613 +n12513933 +n12514138 +n12514592 +n12514992 +n12515393 +n12515711 +n12515925 +n12516165 +n12516584 +n12516828 +n12517077 +n12517445 +n12517642 +n12518013 +n12518481 +n12519089 +n12519563 +n12520406 +n12521186 +n12521394 +n12522188 +n12522678 +n12522894 +n12523141 +n12523475 +n12523850 +n12524188 +n12525168 +n12525513 +n12525753 +n12526178 +n12526516 +n12526754 +n12527081 +n12527738 +n12528109 +n12528382 +n12528549 +n12528768 +n12528974 +n12529220 +n12529500 +n12529905 +n12530629 +n12530818 +n12531328 +n12531727 +n12532564 +n12532886 +n12533190 +n12533437 +n12534208 +n12534625 +n12534862 +n12536291 +n12537253 +n12537569 +n12538209 +n12539074 +n12539306 +n12539832 +n12540250 +n12540647 +n12540966 +n12541157 +n12541403 +n12542043 +n12542240 +n12543186 +n12543455 +n12543639 +n12543826 +n12544240 +n12544539 +n12545232 +n12545635 +n12545865 +n12546183 +n12546420 +n12546617 +n12546962 +n12547215 +n12547503 +n12548280 +n12548564 +n12548804 +n12549005 +n12549192 +n12549420 +n12549799 +n12550210 +n12550408 +n12551173 +n12551457 +n12552309 +n12552893 +n12553742 +n12554029 +n12554526 +n12554729 +n12554911 +n12555255 +n12555859 +n12556656 +n12557064 +n12557438 +n12557556 +n12557681 +n12558230 +n12558425 +n12558680 +n12559044 +n12559518 +n12560282 +n12560621 +n12560775 +n12561169 +n12561309 +n12561594 +n12562141 +n12562577 +n12562785 +n12563045 +n12563702 +n12564083 +n12564613 +n12565102 +n12565912 +n12566331 +n12566954 +n12567950 +n12568186 +n12568649 +n12569037 +n12569616 +n12569851 +n12570394 +n12570703 +n12570972 +n12571781 +n12572546 +n12572759 +n12572858 +n12573256 +n12573474 +n12573647 +n12573911 +n12574320 +n12574470 +n12574866 +n12575322 +n12575812 +n12576323 +n12576451 +n12576695 +n12577362 +n12577895 +n12578255 +n12578626 +n12578916 +n12579038 +n12579404 +n12579822 +n12580012 +n12580654 +n12580786 +n12580896 +n12581110 +n12582231 +n12582665 +n12582846 +n12583126 +n12583401 +n12583681 +n12583855 +n12584191 +n12584365 +n12584715 +n12585137 +n12585373 +n12585629 +n12586298 +n12586499 +n12586725 +n12586989 +n12587132 +n12587487 +n12587803 +n12588320 +n12588780 +n12589142 +n12589458 +n12589687 +n12589841 +n12590232 +n12590499 +n12590600 +n12590715 +n12591017 +n12591351 +n12591702 +n12592058 +n12592544 +n12592839 +n12593122 +n12593341 +n12593994 +n12594324 +n12594989 +n12595699 +n12595964 +n12596148 +n12596345 +n12596709 +n12596849 +n12597134 +n12597466 +n12597798 +n12598027 +n12599185 +n12599435 +n12599661 +n12599874 +n12600095 +n12600267 +n12601494 +n12601805 +n12602262 +n12602434 +n12602612 +n12602980 +n12603273 +n12603449 +n12603672 +n12604228 +n12604460 +n12604639 +n12604845 +n12605683 +n12606438 +n12606545 +n12607456 +n12609379 +n12610328 +n12610740 +n12611640 +n12612170 +n12612811 +n12613706 +n12614096 +n12614477 +n12614625 +n12615232 +n12615710 +n12616248 +n12616630 +n12616996 +n12617559 +n12618146 +n12618727 +n12620196 +n12620546 +n12620969 +n12621410 +n12621619 +n12621945 +n12622297 +n12622875 +n12623077 +n12623211 +n12623818 +n12624381 +n12624568 +n12625003 +n12625383 +n12625670 +n12625823 +n12626674 +n12626878 +n12627119 +n12627347 +n12627526 +n12628356 +n12628705 +n12628986 +n12629305 +n12629666 +n12630763 +n12630999 +n12631331 +n12631637 +n12631932 +n12632335 +n12632733 +n12633061 +n12633638 +n12633994 +n12634211 +n12634429 +n12634734 +n12634986 +n12635151 +n12635359 +n12635532 +n12635744 +n12635955 +n12636224 +n12636885 +n12637123 +n12637485 +n12638218 +n12638556 +n12638753 +n12638964 +n12639168 +n12639376 +n12639584 +n12639736 +n12639910 +n12640081 +n12640284 +n12640435 +n12640607 +n12640839 +n12641007 +n12641180 +n12641413 +n12641931 +n12642090 +n12642200 +n12642435 +n12642600 +n12642964 +n12643113 +n12643313 +n12643473 +n12643688 +n12643877 +n12644283 +n12644902 +n12645174 +n12645530 +n12646072 +n12646197 +n12646397 +n12646605 +n12646740 +n12646950 +n12647231 +n12647376 +n12647560 +n12647787 +n12647893 +n12648045 +n12648196 +n12648424 +n12648693 +n12648888 +n12649065 +n12649317 +n12649539 +n12649866 +n12650038 +n12650229 +n12650379 +n12650556 +n12650805 +n12650915 +n12651229 +n12651611 +n12651821 +n12653218 +n12653436 +n12653633 +n12654227 +n12654857 +n12655062 +n12655245 +n12655351 +n12655498 +n12655605 +n12655726 +n12655869 +n12656369 +n12656528 +n12656685 +n12656909 +n12657082 +n12657755 +n12658118 +n12658308 +n12658481 +n12658603 +n12658715 +n12658846 +n12659064 +n12659356 +n12659539 +n12660601 +n12661045 +n12661227 +n12661538 +n12662074 +n12662379 +n12662772 +n12663023 +n12663254 +n12663359 +n12663804 +n12664005 +n12664187 +n12664469 +n12664710 +n12665048 +n12665271 +n12665659 +n12665857 +n12666050 +n12666159 +n12666369 +n12666965 +n12667406 +n12667582 +n12667964 +n12668131 +n12669803 +n12670334 +n12670758 +n12670962 +n12671651 +n12672289 +n12673588 +n12674120 +n12674685 +n12674895 +n12675299 +n12675515 +n12675876 +n12676134 +n12676370 +n12676534 +n12676703 +n12677120 +n12677331 +n12677612 +n12677841 +n12678794 +n12679023 +n12679432 +n12679593 +n12679876 +n12680402 +n12680652 +n12680864 +n12681376 +n12681579 +n12681893 +n12682411 +n12682668 +n12682882 +n12683096 +n12683407 +n12683571 +n12683791 +n12684379 +n12685431 +n12685831 +n12686077 +n12686274 +n12686496 +n12686676 +n12686877 +n12687044 +n12687462 +n12687698 +n12687957 +n12688187 +n12688372 +n12688716 +n12689305 +n12690653 +n12691428 +n12691661 +n12692024 +n12692160 +n12692521 +n12692714 +n12693244 +n12693352 +n12693865 +n12694486 +n12695144 +n12695975 +n12696492 +n12696830 +n12697152 +n12697514 +n12698027 +n12698435 +n12698598 +n12698774 +n12699031 +n12699301 +n12699922 +n12700088 +n12700357 +n12702124 +n12703190 +n12703383 +n12703557 +n12703716 +n12703856 +n12704041 +n12704343 +n12704513 +n12705013 +n12705220 +n12705458 +n12705698 +n12705978 +n12706410 +n12707199 +n12707781 +n12708293 +n12708654 +n12708941 +n12709103 +n12709349 +n12709688 +n12709901 +n12710295 +n12710415 +n12710577 +n12710693 +n12710917 +n12711182 +n12711398 +n12711596 +n12711817 +n12711984 +n12712320 +n12712626 +n12713063 +n12713358 +n12713521 +n12713866 +n12714254 +n12714755 +n12714949 +n12715195 +n12715914 +n12716400 +n12716594 +n12717072 +n12717224 +n12717644 +n12718074 +n12718483 +n12718995 +n12719684 +n12719944 +n12720200 +n12720354 +n12721122 +n12721477 +n12722071 +n12723062 +n12723610 +n12724942 +n12725521 +n12725738 +n12725940 +n12726159 +n12726357 +n12726528 +n12726670 +n12726902 +n12727101 +n12727301 +n12727518 +n12727729 +n12727960 +n12728164 +n12728322 +n12728508 +n12728656 +n12728864 +n12729023 +n12729164 +n12729315 +n12729521 +n12729729 +n12729950 +n12730143 +n12730370 +n12730544 +n12730776 +n12731029 +n12731401 +n12731835 +n12732009 +n12732252 +n12732491 +n12732605 +n12732756 +n12732966 +n12733218 +n12733428 +n12733647 +n12733870 +n12734070 +n12734215 +n12735160 +n12736603 +n12736999 +n12737383 +n12737898 +n12738259 +n12739332 +n12739966 +n12740967 +n12741222 +n12741586 +n12741792 +n12742290 +n12742741 +n12742878 +n12743009 +n12743352 +n12743823 +n12743976 +n12744142 +n12744387 +n12744850 +n12745386 +n12745564 +n12746884 +n12747120 +n12748248 +n12749049 +n12749456 +n12749679 +n12749852 +n12750076 +n12750767 +n12751172 +n12751675 +n12752205 +n12753007 +n12753245 +n12753573 +n12753762 +n12754003 +n12754174 +n12754311 +n12754468 +n12754648 +n12754781 +n12754981 +n12755225 +n12755387 +n12755559 +n12755727 +n12755876 +n12756457 +n12757115 +n12757303 +n12757458 +n12757668 +n12757816 +n12757930 +n12758014 +n12758099 +n12758176 +n12758250 +n12758325 +n12758399 +n12758471 +n12758555 +n12759273 +n12759668 +n12760539 +n12760875 +n12761284 +n12761702 +n12761905 +n12762049 +n12762405 +n12762896 +n12763529 +n12764008 +n12764202 +n12764507 +n12764978 +n12765115 +n12765402 +n12765846 +n12766043 +n12766595 +n12766869 +n12767208 +n12767423 +n12767648 +n12768369 +n12768682 +n12768809 +n12768933 +n12769065 +n12769219 +n12769318 +n12770529 +n12770892 +n12771085 +n12771192 +n12771390 +n12771597 +n12771890 +n12772753 +n12772908 +n12773142 +n12773651 +n12773917 +n12774299 +n12774641 +n12775070 +n12775393 +n12775717 +n12775919 +n12776558 +n12776774 +n12777436 +n12777680 +n12777778 +n12777892 +n12778398 +n12778605 +n12779603 +n12779851 +n12780325 +n12780563 +n12781940 +n12782530 +n12782915 +n12783316 +n12783730 +n12784371 +n12784889 +n12785724 +n12785889 +n12786273 +n12786464 +n12786836 +n12787364 +n12788854 +n12789054 +n12789554 +n12789977 +n12790430 +n12791064 +n12791329 +n12793015 +n12793284 +n12793494 +n12793695 +n12793886 +n12794135 +n12794367 +n12794568 +n12794985 +n12795209 +n12795352 +n12795555 +n12796022 +n12796385 +n12796849 +n12797368 +n12797860 +n12798284 +n12798910 +n12799269 +n12799776 +n12800049 +n12800586 +n12801072 +n12801520 +n12801781 +n12801966 +n12803226 +n12803754 +n12803958 +n12804352 +n12805146 +n12805561 +n12805762 +n12806015 +n12806732 +n12807251 +n12807409 +n12807624 +n12807773 +n12808007 +n12809868 +n12810007 +n12810151 +n12810595 +n12811027 +n12811713 +n12812235 +n12812478 +n12812801 +n12813189 +n12814643 +n12814857 +n12814960 +n12815198 +n12815668 +n12815838 +n12816508 +n12816942 +n12817464 +n12817694 +n12817855 +n12818004 +n12818346 +n12818601 +n12818966 +n12819141 +n12819354 +n12819728 +n12820113 +n12820669 +n12820853 +n12821505 +n12821895 +n12822115 +n12822466 +n12822769 +n12822955 +n12823717 +n12823859 +n12824053 +n12824289 +n12824735 +n12825497 +n12826143 +n12827270 +n12827537 +n12827907 +n12828220 +n12828379 +n12828520 +n12828791 +n12828977 +n12829582 +n12829975 +n12830222 +n12830568 +n12831141 +n12831535 +n12831932 +n12832315 +n12832538 +n12832822 +n12833149 +n12833985 +n12834190 +n12834798 +n12834938 +n12835331 +n12835766 +n12836212 +n12836337 +n12836508 +n12836862 +n12837052 +n12837259 +n12837466 +n12837803 +n12839574 +n12839979 +n12840168 +n12840362 +n12840502 +n12840749 +n12841007 +n12841193 +n12841354 +n12842302 +n12842519 +n12842642 +n12842887 +n12843144 +n12843316 +n12843557 +n12843970 +n12844409 +n12844939 +n12845187 +n12845413 +n12845908 +n12846335 +n12846690 +n12847008 +n12847374 +n12847927 +n12848499 +n12849061 +n12849279 +n12849416 +n12849952 +n12850168 +n12850336 +n12850906 +n12851094 +n12851469 +n12851860 +n12852234 +n12852428 +n12852570 +n12853080 +n12853287 +n12853482 +n12854048 +n12854193 +n12854600 +n12855365 +n12855494 +n12855710 +n12855886 +n12856091 +n12856287 +n12856479 +n12856680 +n12857204 +n12857779 +n12858150 +n12858397 +n12858618 +n12858871 +n12858987 +n12859153 +n12859272 +n12859679 +n12859986 +n12860365 +n12860978 +n12861345 +n12861541 +n12861892 +n12862512 +n12862828 +n12863234 +n12863624 +n12864160 +n12865037 +n12865562 +n12865708 +n12865824 +n12866002 +n12866162 +n12866333 +n12866459 +n12866635 +n12866968 +n12867184 +n12867449 +n12867826 +n12868019 +n12868880 +n12869061 +n12869478 +n12869668 +n12870048 +n12870225 +n12870535 +n12870682 +n12870891 +n12871272 +n12871696 +n12871859 +n12872458 +n12872914 +n12873341 +n12873984 +n12875269 +n12875697 +n12875861 +n12876899 +n12877244 +n12877493 +n12877637 +n12877838 +n12878169 +n12878325 +n12878784 +n12879068 +n12879527 +n12879963 +n12880244 +n12880462 +n12880638 +n12880799 +n12881105 +n12881913 +n12882158 +n12882779 +n12882945 +n12883265 +n12883628 +n12884100 +n12884260 +n12885045 +n12885265 +n12885510 +n12885754 +n12886185 +n12886402 +n12886600 +n12886831 +n12887293 +n12887532 +n12887713 +n12888016 +n12888234 +n12888457 +n12889219 +n12889412 +n12889579 +n12889713 +n12890265 +n12890490 +n12890685 +n12890928 +n12891093 +n12891305 +n12891469 +n12891643 +n12891824 +n12892013 +n12893463 +n12893993 +n12895298 +n12895811 +n12896615 +n12897118 +n12897788 +n12897999 +n12898342 +n12898774 +n12899166 +n12899537 +n12899752 +n12899971 +n12900783 +n12901724 +n12902466 +n12902662 +n12903014 +n12903367 +n12903503 +n12903964 +n12904314 +n12904562 +n12904938 +n12905135 +n12905412 +n12906214 +n12906498 +n12906771 +n12907057 +n12907671 +n12907857 +n12908093 +n12908645 +n12908854 +n12909421 +n12909614 +n12909759 +n12909917 +n12911079 +n12911264 +n12911440 +n12911673 +n12911914 +n12912274 +n12912670 +n12912801 +n12913144 +n12913524 +n12913791 +n12914923 +n12915140 +n12915568 +n12915811 +n12916179 +n12916511 +n12917901 +n12918609 +n12918810 +n12918991 +n12919195 +n12919403 +n12919646 +n12919847 +n12920043 +n12920204 +n12920521 +n12920719 +n12920955 +n12921315 +n12921499 +n12921660 +n12921868 +n12922119 +n12922458 +n12922763 +n12923108 +n12923257 +n12924623 +n12925179 +n12925583 +n12926039 +n12926480 +n12926689 +n12927013 +n12927194 +n12927494 +n12927758 +n12928071 +n12928307 +n12928491 +n12928819 +n12929403 +n12929600 +n12930778 +n12930951 +n12931231 +n12931542 +n12931906 +n12932173 +n12932365 +n12932706 +n12932966 +n12933274 +n12934036 +n12934174 +n12934479 +n12934685 +n12934985 +n12935166 +n12935609 +n12936155 +n12936826 +n12937130 +n12938081 +n12938193 +n12938445 +n12938667 +n12939104 +n12939282 +n12939479 +n12939874 +n12940226 +n12940609 +n12941220 +n12941536 +n12941717 +n12942025 +n12942395 +n12942572 +n12942729 +n12943049 +n12943443 +n12943912 +n12944095 +n12945177 +n12945366 +n12945549 +n12946849 +n12947313 +n12947544 +n12947756 +n12947895 +n12948053 +n12948251 +n12948495 +n12949160 +n12949361 +n12950126 +n12950314 +n12950796 +n12951146 +n12951835 +n12952165 +n12952469 +n12952590 +n12952717 +n12953206 +n12953484 +n12953712 +n12954353 +n12954799 +n12955414 +n12955840 +n12956170 +n12956367 +n12956588 +n12956922 +n12957608 +n12957803 +n12957924 +n12958261 +n12958615 +n12959074 +n12959538 +n12960378 +n12960552 +n12960863 +n12961242 +n12961393 +n12961536 +n12961879 +n12963628 +n12964920 +n12965626 +n12965951 +n12966804 +n12966945 +n12968136 +n12968309 +n12969131 +n12969425 +n12969670 +n12969927 +n12970193 +n12970293 +n12970733 +n12971400 +n12971804 +n12972136 +n12973443 +n12973791 +n12973937 +n12974987 +n12975804 +n12976198 +n12976554 +n12978076 +n12979316 +n12979829 +n12980080 +n12980840 +n12981086 +n12981301 +n12981443 +n12981954 +n12982468 +n12982590 +n12982915 +n12983048 +n12983654 +n12983873 +n12983961 +n12984267 +n12984489 +n12984595 +n12985420 +n12985773 +n12985857 +n12986227 +n12987056 +n12987423 +n12987535 +n12988158 +n12988341 +n12988572 +n12989007 +n12989938 +n12990597 +n12991184 +n12991837 +n12992177 +n12992868 +n12994892 +n12995601 +n12997654 +n12997919 +n12998815 +n13000891 +n13001041 +n13001206 +n13001366 +n13001529 +n13001930 +n13002209 +n13002750 +n13002925 +n13003061 +n13003254 +n13003522 +n13003712 +n13004423 +n13004640 +n13004826 +n13004992 +n13005329 +n13005984 +n13006171 +n13006631 +n13006894 +n13007034 +n13007417 +n13007629 +n13008157 +n13008315 +n13008485 +n13008689 +n13008839 +n13009085 +n13009244 +n13009429 +n13009656 +n13010694 +n13010951 +n13011221 +n13011595 +n13012253 +n13012469 +n13012973 +n13013534 +n13013764 +n13013965 +n13014097 +n13014265 +n13014409 +n13014581 +n13014741 +n13014879 +n13015509 +n13015688 +n13016076 +n13016289 +n13017102 +n13017240 +n13017439 +n13017610 +n13017789 +n13017979 +n13018088 +n13018232 +n13018407 +n13018906 +n13019496 +n13019643 +n13019835 +n13020191 +n13020481 +n13020964 +n13021166 +n13021332 +n13021543 +n13021689 +n13021867 +n13022210 +n13022709 +n13022903 +n13023134 +n13024012 +n13024500 +n13024653 +n13025647 +n13025854 +n13026015 +n13027557 +n13027879 +n13028611 +n13028937 +n13029122 +n13029326 +n13029610 +n13029760 +n13030337 +n13030616 +n13030852 +n13031193 +n13031323 +n13031474 +n13032115 +n13032381 +n13032618 +n13032923 +n13033134 +n13033396 +n13033577 +n13033879 +n13034062 +n13034555 +n13034788 +n13035241 +n13035389 +n13035707 +n13035925 +n13036116 +n13036312 +n13036804 +n13037406 +n13037585 +n13037805 +n13038068 +n13038376 +n13038577 +n13038744 +n13039349 +n13040303 +n13040629 +n13040796 +n13041312 +n13041943 +n13042134 +n13042316 +n13042982 +n13043926 +n13044375 +n13044778 +n13045210 +n13045594 +n13045975 +n13046130 +n13046669 +n13047862 +n13048447 +n13049953 +n13050397 +n13050705 +n13050940 +n13051346 +n13052014 +n13052248 +n13052670 +n13052931 +n13053608 +n13054073 +n13054560 +n13055423 +n13055577 +n13055792 +n13055949 +n13056135 +n13056349 +n13056607 +n13056799 +n13057054 +n13057242 +n13057422 +n13057639 +n13058037 +n13058272 +n13058608 +n13059298 +n13059657 +n13060017 +n13060190 +n13061172 +n13061348 +n13061471 +n13061704 +n13062421 +n13063269 +n13063514 +n13064111 +n13064457 +n13065089 +n13065514 +n13066129 +n13066448 +n13066979 +n13067191 +n13067330 +n13067532 +n13067672 +n13068255 +n13068434 +n13068735 +n13068917 +n13069224 +n13069773 +n13070308 +n13070875 +n13071371 +n13071553 +n13071815 +n13072031 +n13072209 +n13072350 +n13072528 +n13072706 +n13072863 +n13073055 +n13073703 +n13074619 +n13074814 +n13075020 +n13075272 +n13075441 +n13075684 +n13075847 +n13076041 +n13076405 +n13076643 +n13076831 +n13077033 +n13077295 +n13078021 +n13079073 +n13079419 +n13079567 +n13080306 +n13080866 +n13081229 +n13081999 +n13082568 +n13083023 +n13083461 +n13084184 +n13084834 +n13085113 +n13085747 +n13090018 +n13090871 +n13091620 +n13091774 +n13091982 +n13092078 +n13092240 +n13092385 +n13092987 +n13093275 +n13093629 +n13094145 +n13094273 +n13095013 +n13096779 +n13098515 +n13098962 +n13099833 +n13099999 +n13100156 +n13100677 +n13102648 +n13102775 +n13103023 +n13103660 +n13103750 +n13103877 +n13104059 +n13107694 +n13107807 +n13107891 +n13108131 +n13108323 +n13108481 +n13108545 +n13108662 +n13108841 +n13109733 +n13110915 +n13111174 +n13111340 +n13111504 +n13111881 +n13112035 +n13112201 +n13118330 +n13118707 +n13119870 +n13120211 +n13120958 +n13121104 +n13121349 +n13122364 +n13123309 +n13123431 +n13123841 +n13124358 +n13124654 +n13125117 +n13126050 +n13126856 +n13127001 +n13127303 +n13127666 +n13127843 +n13128278 +n13128582 +n13128976 +n13129078 +n13130014 +n13130161 +n13130726 +n13131028 +n13131618 +n13132034 +n13132156 +n13132338 +n13132486 +n13132656 +n13132756 +n13132940 +n13133140 +n13133233 +n13133316 +n13133613 +n13133932 +n13134302 +n13134531 +n13134844 +n13134947 +n13135692 +n13135832 +n13136316 +n13136556 +n13136781 +n13137010 +n13137225 +n13137409 +n13137672 +n13137951 +n13138155 +n13138308 +n13138658 +n13138842 +n13139055 +n13139321 +n13139482 +n13139647 +n13139837 +n13140049 +n13140367 +n13141141 +n13141415 +n13141564 +n13141797 +n13141972 +n13142182 +n13142504 +n13142907 +n13143285 +n13143758 +n13144084 +n13145040 +n13145250 +n13145444 +n13146403 +n13146583 +n13146928 +n13147153 +n13147270 +n13147386 +n13147532 +n13147689 +n13147918 +n13148208 +n13148384 +n13149296 +n13149970 +n13150378 +n13150592 +n13150894 +n13151082 +n13152339 +n13154388 +n13154494 +n13154841 +n13155095 +n13155305 +n13155611 +n13156986 +n13157137 +n13157346 +n13157481 +n13157684 +n13157971 +n13158167 +n13158512 +n13158605 +n13158714 +n13158815 +n13159357 +n13159691 +n13159890 +n13160116 +n13160254 +n13160365 +n13160604 +n13160831 +n13160938 +n13161151 +n13161254 +n13161904 +n13163553 +n13163649 +n13163991 +n13164501 +n13170840 +n13171210 +n13171797 +n13172923 +n13173132 +n13173259 +n13173488 +n13173697 +n13173882 +n13174354 +n13174670 +n13174823 +n13175682 +n13176363 +n13176714 +n13177048 +n13177529 +n13177768 +n13177884 +n13178284 +n13178707 +n13179056 +n13179804 +n13180534 +n13180875 +n13181055 +n13181244 +n13181406 +n13181811 +n13182164 +n13182338 +n13182799 +n13182937 +n13183056 +n13183489 +n13184394 +n13185269 +n13185658 +n13186388 +n13186546 +n13187367 +n13188096 +n13188268 +n13188462 +n13188767 +n13190060 +n13190747 +n13191148 +n13191620 +n13191884 +n13192625 +n13193143 +n13193269 +n13193466 +n13193642 +n13193856 +n13194036 +n13194212 +n13194572 +n13194758 +n13194918 +n13195341 +n13195761 +n13196003 +n13196234 +n13196369 +n13196738 +n13197274 +n13197507 +n13198054 +n13198482 +n13198914 +n13199717 +n13199970 +n13200193 +n13200542 +n13200651 +n13200986 +n13201423 +n13201566 +n13201969 +n13202125 +n13202355 +n13202602 +n13205058 +n13205249 +n13206178 +n13206817 +n13207094 +n13207335 +n13207572 +n13207736 +n13207923 +n13208302 +n13208705 +n13208965 +n13209129 +n13209270 +n13209460 +n13209808 +n13210350 +n13210597 +n13211020 +n13211790 +n13212025 +n13212175 +n13212379 +n13212559 +n13213066 +n13213397 +n13213577 +n13214217 +n13214340 +n13214485 +n13215258 +n13215586 +n13217005 +n13219422 +n13219833 +n13219976 +n13220122 +n13220355 +n13220525 +n13220663 +n13221529 +n13222877 +n13222985 +n13223090 +n13223588 +n13223710 +n13223843 +n13224673 +n13224922 +n13225244 +n13225365 +n13225617 +n13226320 +n13226871 +n13228017 +n13228536 +n13229543 +n13229951 +n13230190 +n13230662 +n13230843 +n13231078 +n13231678 +n13231919 +n13232106 +n13232363 +n13232779 +n13233727 +n13234114 +n13234519 +n13234678 +n13234857 +n13235011 +n13235159 +n13235319 +n13235503 +n13235766 +n13236100 +n13237188 +n13237508 +n13238375 +n13238654 +n13238988 +n13239177 +n13239736 +n13239921 +n13240362 +n13252672 +n13354021 +n13555775 +n13579829 +n13650447 +n13653902 +n13862407 +n13862552 +n13862780 +n13863020 +n13863186 +n13863473 +n13863771 +n13864035 +n13864153 +n13864965 +n13865298 +n13865483 +n13865904 +n13866144 +n13866626 +n13866827 +n13867005 +n13867492 +n13868248 +n13868371 +n13868515 +n13868944 +n13869045 +n13869547 +n13869788 +n13869896 +n13871717 +n13872592 +n13872822 +n13873361 +n13873502 +n13873917 +n13874073 +n13874558 +n13875392 +n13875571 +n13875884 +n13876561 +n13877547 +n13877667 +n13878306 +n13879049 +n13879320 +n13879816 +n13880199 +n13880415 +n13880551 +n13880704 +n13880994 +n13881512 +n13881644 +n13882201 +n13882276 +n13882487 +n13882563 +n13882639 +n13882713 +n13882961 +n13883603 +n13883763 +n13884261 +n13884384 +n13884930 +n13885011 +n13886260 +n13888491 +n13889066 +n13889331 +n13891547 +n13891937 +n13893786 +n13894154 +n13894434 +n13895262 +n13896100 +n13896217 +n13897198 +n13897528 +n13897996 +n13898207 +n13898315 +n13898645 +n13899735 +n13900287 +n13900422 +n13901211 +n13901321 +n13901423 +n13901490 +n13901858 +n13902048 +n13902336 +n13902793 +n13903079 +n13905121 +n13905275 +n13905792 +n13906484 +n13906669 +n13906767 +n13906936 +n13907272 +n13908201 +n13908580 +n13911045 +n13912260 +n13912540 +n13914141 +n13914265 +n13914608 +n13915023 +n13915113 +n13915209 +n13915305 +n13915999 +n13916363 +n13916721 +n13917690 +n13917785 +n13918274 +n13918387 +n13918717 +n13919547 +n13919919 +n13926786 +n14131950 +n14175579 +n14564779 +n14582716 +n14583400 +n14585392 +n14592309 +n14603798 +n14633206 +n14685296 +n14696793 +n14698884 +n14714645 +n14720833 +n14765422 +n14785065 +n14786943 +n14804958 +n14810561 +n14820180 +n14821852 +n14844693 +n14853210 +n14858292 +n14867545 +n14891255 +n14899328 +n14900184 +n14900342 +n14908027 +n14909584 +n14914945 +n14915184 +n14919819 +n14938389 +n14941787 +n14942411 +n14973585 +n14974264 +n14975598 +n14976759 +n14976871 +n14977188 +n14977504 +n14992287 +n14993378 +n15005577 +n15006012 +n15019030 +n15048888 +n15060326 +n15060688 +n15062057 +n15067877 +n15075141 +n15086247 +n15089258 +n15089472 +n15089645 +n15089803 +n15090065 +n15090238 +n15090742 +n15091129 +n15091304 +n15091473 +n15091669 +n15091846 +n15092059 +n15092227 +n15092409 +n15092650 +n15092751 +n15092942 +n15093049 +n15093137 +n15093298 +n15102359 +n15102455 +n15102894 From fd9061dbf74df86431128ce0ca0547858c7600ca Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 4 Jan 2021 12:16:06 -0800 Subject: [PATCH 06/20] Remove debug print from train.py --- train.py | 1 - 1 file changed, 1 deletion(-) diff --git a/train.py b/train.py index 1de70dcc..4bb68399 100755 --- a/train.py +++ b/train.py @@ -332,7 +332,6 @@ def main(): scriptable=args.torchscript, checkpoint_path=args.initial_checkpoint) - print(model) if args.local_rank == 0: _logger.info('Model %s created, param count: %d' % (args.model, sum([m.numel() for m in model.parameters()]))) From 20516abc18d12104ffebde3cc6f42c1713047bb5 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 4 Jan 2021 23:21:39 -0800 Subject: [PATCH 07/20] Fix some broken tests for ResNetV2 BiT models --- tests/test_models.py | 2 +- timm/models/resnetv2.py | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/test_models.py b/tests/test_models.py index a62625d9..f406555a 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -15,7 +15,7 @@ if hasattr(torch._C, '_jit_set_profiling_executor'): if 'GITHUB_ACTIONS' in os.environ: # and 'Linux' in platform.system(): # GitHub Linux runner is slower and hits memory limits sooner than MacOS, exclude bigger models - EXCLUDE_FILTERS = ['*efficientnet_l2*', '*resnext101_32x48d', 'vit_*'] + EXCLUDE_FILTERS = ['*efficientnet_l2*', '*resnext101_32x48d', 'vit_*', '*in21k', '*152x4_bitm'] else: EXCLUDE_FILTERS = ['vit_*'] MAX_FWD_SIZE = 384 diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py index 3ce0605a..731f5dca 100644 --- a/timm/models/resnetv2.py +++ b/timm/models/resnetv2.py @@ -331,7 +331,7 @@ def create_stem(in_chs, out_chs, stem_type='', preact=True, conv_layer=None, nor if 'fixed' in stem_type: # 'fixed' SAME padding approximation that is used in BiT models - stem['pad'] = nn.ConstantPad2d(1, 0) + stem['pad'] = nn.ConstantPad2d(1, 0.) stem['pool'] = nn.MaxPool2d(kernel_size=3, stride=2, padding=0) elif 'same' in stem_type: # full, input size based 'SAME' padding, used in ViT Hybrid model @@ -421,7 +421,12 @@ class ResNetV2(nn.Module): import numpy as np weights = np.load(checkpoint_path) with torch.no_grad(): - self.stem.conv.weight.copy_(tf2th(weights[f'{prefix}root_block/standardized_conv2d/kernel'])) + stem_conv_w = tf2th(weights[f'{prefix}root_block/standardized_conv2d/kernel']) + if self.stem.conv.weight.shape[1] == 1: + self.stem.conv.weight.copy_(stem_conv_w.sum(dim=1, keepdim=True)) + # FIXME handle > 3 in_chans? + else: + self.stem.conv.weight.copy_(stem_conv_w) self.norm.weight.copy_(tf2th(weights[f'{prefix}group_norm/gamma'])) self.norm.bias.copy_(tf2th(weights[f'{prefix}group_norm/beta'])) self.head.fc.weight.copy_(tf2th(weights[f'{prefix}head/conv2d/kernel'])) From 855d6cc2171f309cd819e8ec1cafd117685b1be8 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 15 Jan 2021 17:26:20 -0800 Subject: [PATCH 08/20] More dataset work including factories and a tensorflow datasets (TFDS) wrapper * Add parser/dataset factory methods for more flexible dataset & parser creation * Add dataset parser that wraps TFDS image classification datasets * Tweak num_classes handling bug for 21k models * Add initial deit models so they can be benchmarked in next csv results runs --- timm/data/__init__.py | 14 +- timm/data/dataset.py | 52 ++++++- timm/data/dataset_factory.py | 29 ++++ timm/data/loader.py | 17 ++- timm/data/parsers/__init__.py | 5 +- timm/data/parsers/parser_factory.py | 29 ++++ timm/data/parsers/parser_tfds.py | 201 ++++++++++++++++++++++++++++ timm/models/helpers.py | 6 +- timm/models/resnetv2.py | 12 +- timm/models/vision_transformer.py | 84 ++++++++++-- train.py | 35 +++-- validate.py | 12 +- 12 files changed, 431 insertions(+), 65 deletions(-) create mode 100644 timm/data/dataset_factory.py create mode 100644 timm/data/parsers/parser_factory.py create mode 100644 timm/data/parsers/parser_tfds.py diff --git a/timm/data/__init__.py b/timm/data/__init__.py index 1dd8ac57..7d3cb2b4 100644 --- a/timm/data/__init__.py +++ b/timm/data/__init__.py @@ -1,10 +1,12 @@ -from .constants import * +from .auto_augment import RandAugment, AutoAugment, rand_augment_ops, auto_augment_policy,\ + rand_augment_transform, auto_augment_transform from .config import resolve_data_config -from .dataset import ImageDataset, AugMixDataset -from .transforms import * +from .constants import * +from .dataset import ImageDataset, IterableImageDataset, AugMixDataset +from .dataset_factory import create_dataset from .loader import create_loader -from .transforms_factory import create_transform from .mixup import Mixup, FastCollateMixup -from .auto_augment import RandAugment, AutoAugment, rand_augment_ops, auto_augment_policy,\ - rand_augment_transform, auto_augment_transform +from .parsers import create_parser from .real_labels import RealLabelsImagenet +from .transforms import * +from .transforms_factory import create_transform \ No newline at end of file diff --git a/timm/data/dataset.py b/timm/data/dataset.py index 42a46eef..a7c5ebed 100644 --- a/timm/data/dataset.py +++ b/timm/data/dataset.py @@ -9,7 +9,7 @@ import logging from PIL import Image -from .parsers import ParserImageFolder, ParserImageTar, ParserImageClassInTar +from .parsers import create_parser _logger = logging.getLogger(__name__) @@ -27,11 +27,8 @@ class ImageDataset(data.Dataset): load_bytes=False, transform=None, ): - if parser is None: - if os.path.isfile(root) and os.path.splitext(root)[1] == '.tar': - parser = ParserImageTar(root, class_map=class_map) - else: - parser = ParserImageFolder(root, class_map=class_map) + if parser is None or isinstance(parser, str): + parser = create_parser(parser or '', root=root, class_map=class_map) self.parser = parser self.load_bytes = load_bytes self.transform = transform @@ -65,6 +62,49 @@ class ImageDataset(data.Dataset): return self.parser.filenames(basename, absolute) +class IterableImageDataset(data.IterableDataset): + + def __init__( + self, + root, + parser=None, + split='train', + is_training=False, + batch_size=None, + class_map='', + load_bytes=False, + transform=None, + ): + assert parser is not None + if isinstance(parser, str): + self.parser = create_parser( + parser, root=root, split=split, is_training=is_training, batch_size=batch_size) + else: + self.parser = parser + self.transform = transform + self._consecutive_errors = 0 + + def __iter__(self): + for img, target in self.parser: + if self.transform is not None: + img = self.transform(img) + if target is None: + target = torch.tensor(-1, dtype=torch.long) + yield img, target + + def __len__(self): + if hasattr(self.parser, '__len__'): + return len(self.parser) + else: + return 0 + + def filename(self, index, basename=False, absolute=False): + assert False, 'Filename lookup by index not supported, use filenames().' + + def filenames(self, basename=False, absolute=False): + return self.parser.filenames(basename, absolute) + + class AugMixDataset(torch.utils.data.Dataset): """Dataset wrapper to perform AugMix or other clean/augmentation mixes""" diff --git a/timm/data/dataset_factory.py b/timm/data/dataset_factory.py new file mode 100644 index 00000000..b2c9688f --- /dev/null +++ b/timm/data/dataset_factory.py @@ -0,0 +1,29 @@ +import os + +from .dataset import IterableImageDataset, ImageDataset + + +def _search_split(root, split): + # look for sub-folder with name of split in root and use that if it exists + split_name = split.split('[')[0] + try_root = os.path.join(root, split_name) + if os.path.exists(try_root): + return try_root + if split_name == 'validation': + try_root = os.path.join(root, 'val') + if os.path.exists(try_root): + return try_root + return root + + +def create_dataset(name, root, split='validation', search_split=True, is_training=False, batch_size=None, **kwargs): + name = name.lower() + if name.startswith('tfds'): + ds = IterableImageDataset( + root, parser=name, split=split, is_training=is_training, batch_size=batch_size, **kwargs) + else: + # FIXME support more advance split cfg for ImageFolder/Tar datasets in the future + if search_split and os.path.isdir(root): + root = _search_split(root, split) + ds = ImageDataset(root, parser=name, **kwargs) + return ds diff --git a/timm/data/loader.py b/timm/data/loader.py index 317f77df..76144669 100644 --- a/timm/data/loader.py +++ b/timm/data/loader.py @@ -153,7 +153,8 @@ def create_loader( pin_memory=False, fp16=False, tf_preprocessing=False, - use_multi_epochs_loader=False + use_multi_epochs_loader=False, + persistent_workers=True, ): re_num_splits = 0 if re_split: @@ -183,7 +184,7 @@ def create_loader( ) sampler = None - if distributed: + if distributed and not isinstance(dataset, torch.utils.data.IterableDataset): if is_training: sampler = torch.utils.data.distributed.DistributedSampler(dataset) else: @@ -199,16 +200,20 @@ def create_loader( if use_multi_epochs_loader: loader_class = MultiEpochsDataLoader - loader = loader_class( - dataset, + loader_args = dict( batch_size=batch_size, - shuffle=sampler is None and is_training, + shuffle=not isinstance(dataset, torch.utils.data.IterableDataset) and sampler is None and is_training, num_workers=num_workers, sampler=sampler, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=is_training, - ) + persistent_workers=persistent_workers) + try: + loader = loader_class(dataset, **loader_args) + except TypeError as e: + loader_args.pop('persistent_workers') # only in Pytorch 1.7+ + loader = loader_class(dataset, **loader_args) if use_prefetcher: prefetch_re_prob = re_prob if is_training and not no_aug else 0. loader = PrefetchLoader( diff --git a/timm/data/parsers/__init__.py b/timm/data/parsers/__init__.py index 4ecb3a22..eeb44e37 100644 --- a/timm/data/parsers/__init__.py +++ b/timm/data/parsers/__init__.py @@ -1,4 +1 @@ -from .parser import Parser -from .parser_image_folder import ParserImageFolder -from .parser_image_tar import ParserImageTar -from .parser_image_class_in_tar import ParserImageClassInTar \ No newline at end of file +from .parser_factory import create_parser diff --git a/timm/data/parsers/parser_factory.py b/timm/data/parsers/parser_factory.py new file mode 100644 index 00000000..ce9aa35f --- /dev/null +++ b/timm/data/parsers/parser_factory.py @@ -0,0 +1,29 @@ +import os + +from .parser_image_folder import ParserImageFolder +from .parser_image_tar import ParserImageTar +from .parser_image_class_in_tar import ParserImageClassInTar + + +def create_parser(name, root, split='train', **kwargs): + name = name.lower() + name = name.split('/', 2) + prefix = '' + if len(name) > 1: + prefix = name[0] + name = name[-1] + + # FIXME improve the selection right now just tfds prefix or fallback path, will need options to + # explicitly select other options shortly + if prefix == 'tfds': + from .parser_tfds import ParserTfds # defer tensorflow import + parser = ParserTfds(root, name, split=split, shuffle=kwargs.pop('shuffle', False), **kwargs) + else: + assert os.path.exists(root) + # default fallback path (backwards compat), use image tar if root is a .tar file, otherwise image folder + # FIXME support split here, in parser? + if os.path.isfile(root) and os.path.splitext(root)[1] == '.tar': + parser = ParserImageTar(root, **kwargs) + else: + parser = ParserImageFolder(root, **kwargs) + return parser diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py new file mode 100644 index 00000000..39a9243a --- /dev/null +++ b/timm/data/parsers/parser_tfds.py @@ -0,0 +1,201 @@ +""" Dataset parser interface that wraps TFDS datasets + +Wraps many (most?) TFDS image-classification datasets +from https://github.com/tensorflow/datasets +https://www.tensorflow.org/datasets/catalog/overview#image_classification + +Hacked together by / Copyright 2020 Ross Wightman +""" +import os +import io +import math +import torch +import torch.distributed as dist +from PIL import Image + +try: + import tensorflow as tf + tf.config.set_visible_devices([], 'GPU') # Hands off my GPU! (or pip install tensorflow-cpu) + import tensorflow_datasets as tfds +except ImportError as e: + print(e) + print("Please install tensorflow_datasets package `pip install tensorflow-datasets`.") + exit(1) +from .parser import Parser + + +MAX_TP_SIZE = 8 # maximum TF threadpool size, only doing jpeg decodes and queuing activities +SHUFFLE_SIZE = 16834 # samples to shuffle in DS queue +PREFETCH_SIZE = 4096 # samples to prefetch + + +class ParserTfds(Parser): + """ Wrap Tensorflow Datasets for use in PyTorch + + There several things to be aware of: + * To prevent excessive samples being dropped per epoch w/ distributed training or multiplicity of + dataloader workers, the train iterator wraps to avoid returning partial batches that trigger drop_last + https://github.com/pytorch/pytorch/issues/33413 + * With PyTorch IterableDatasets, each worker in each replica operates in isolation, the final batch + from each worker could be a different size. For training this is avoid by option above, for + validation extra samples are inserted iff distributed mode is enabled so the batches being reduced + across replicas are of same size. This will slightlyalter the results, distributed validation will not be + 100% correct. This is similar to common handling in DistributedSampler for normal Datasets but a bit worse + since there are to N * J extra samples. + * The sharding (splitting of dataset into TFRecord) files imposes limitations on the number of + replicas and dataloader workers you can use. For really small datasets that only contain a few shards + you may have to train non-distributed w/ 1-2 dataloader workers. This may not be a huge concern as the + benefit of distributed training or fast dataloading should be much less for small datasets. + * This wrapper is currently configured to return individual, decompressed image samples from the TFDS + dataset. The augmentation (transforms) and batching is still done in PyTorch. It would be possible + to specify TF augmentation fn and return augmented batches w/ some modifications to other downstream + components. + + """ + def __init__(self, root, name, split='train', shuffle=False, is_training=False, batch_size=None): + super().__init__() + self.root = root + self.split = split + self.shuffle = shuffle + self.is_training = is_training + if self.is_training: + assert batch_size is not None,\ + "Must specify batch_size in training mode for reasonable behaviour w/ TFDS wrapper" + self.batch_size = batch_size + + self.builder = tfds.builder(name, data_dir=root) + # NOTE: please use tfds command line app to download & prepare datasets, I don't want to trigger + # it by default here as it's caused issues generating unwanted paths in data directories. + self.num_samples = self.builder.info.splits[split].num_examples + self.ds = None # initialized lazily on each dataloader worker process + + self.worker_info = None + self.dist_rank = 0 + self.dist_num_replicas = 1 + if dist.is_available() and dist.is_initialized() and dist.get_world_size() > 1: + self.dist_rank = dist.get_rank() + self.dist_num_replicas = dist.get_world_size() + + def _lazy_init(self): + """ Lazily initialize the dataset. + + This is necessary to init the Tensorflow dataset pipeline in the (dataloader) process that + will be using the dataset instance. The __init__ method is called on the main process, + this will be called in a dataloader worker process. + + NOTE: There will be problems if you try to re-use this dataset across different loader/worker + instances once it has been initialized. Do not call any dataset methods that can call _lazy_init + before it is passed to dataloader. + """ + worker_info = torch.utils.data.get_worker_info() + + # setup input context to split dataset across distributed processes + split = self.split + num_workers = 1 + if worker_info is not None: + self.worker_info = worker_info + num_workers = worker_info.num_workers + worker_id = worker_info.id + + # FIXME I need to spend more time figuring out the best way to distribute/split data across + # combo of distributed replicas + dataloader worker processes + """ + InputContext will assign subset of underlying TFRecord files to each 'pipeline' if used. + My understanding is that using split, the underling TFRecord files will shuffle (shuffle_files=True) + between the splits each iteration but that could be wrong. + Possible split options include: + * InputContext for both distributed & worker processes (current) + * InputContext for distributed and sub-splits for worker processes + * sub-splits for both + """ + # split_size = self.num_samples // num_workers + # start = worker_id * split_size + # if worker_id == num_workers - 1: + # split = split + '[{}:]'.format(start) + # else: + # split = split + '[{}:{}]'.format(start, start + split_size) + + input_context = tf.distribute.InputContext( + num_input_pipelines=self.dist_num_replicas * num_workers, + input_pipeline_id=self.dist_rank * num_workers + worker_id, + num_replicas_in_sync=self.dist_num_replicas # FIXME does this have any impact? + ) + + read_config = tfds.ReadConfig(input_context=input_context) + ds = self.builder.as_dataset(split=split, shuffle_files=self.shuffle, read_config=read_config) + # avoid overloading threading w/ combo fo TF ds threads + PyTorch workers + ds.options().experimental_threading.private_threadpool_size = max(1, MAX_TP_SIZE // num_workers) + ds.options().experimental_threading.max_intra_op_parallelism = 1 + if self.is_training: + # to prevent excessive drop_last batch behaviour w/ IterableDatasets + # see warnings at https://pytorch.org/docs/stable/data.html#multi-process-data-loading + ds = ds.repeat() # allow wrap around and break iteration manually + if self.shuffle: + ds = ds.shuffle(min(self.num_samples // self._num_pipelines, SHUFFLE_SIZE), seed=0) + ds = ds.prefetch(min(self.num_samples // self._num_pipelines, PREFETCH_SIZE)) + self.ds = tfds.as_numpy(ds) + + def __iter__(self): + if self.ds is None: + self._lazy_init() + # compute a rounded up sample count that is used to: + # 1. make batches even cross workers & replicas in distributed validation. + # This adds extra samples and will slightly alter validation results. + # 2. determine loop ending condition in training w/ repeat enabled so that only full batch_size + # batches are produced (underlying tfds iter wraps around) + target_sample_count = math.ceil(self.num_samples / self._num_pipelines) + if self.is_training: + # round up to nearest batch_size per worker-replica + target_sample_count = math.ceil(target_sample_count / self.batch_size) * self.batch_size + sample_count = 0 + for sample in self.ds: + img = Image.fromarray(sample['image'], mode='RGB') + yield img, sample['label'] + sample_count += 1 + if self.is_training and sample_count >= target_sample_count: + # Need to break out of loop when repeat() is enabled for training w/ oversampling + # this results in 'extra' samples per epoch but seems more desirable than dropping + # up to N*J batches per epoch (where N = num distributed processes, and J = num worker processes) + break + if not self.is_training and self.dist_num_replicas and 0 < sample_count < target_sample_count: + # Validation batch padding only done for distributed training where results are reduced across nodes. + # For single process case, it won't matter if workers return different batch sizes. + # FIXME this needs more testing, possible for sharding / split api to cause differences of > 1? + assert target_sample_count - sample_count == 1 # should only be off by 1 or sharding is not optimal + yield img, sample['label'] # yield prev sample again + sample_count += 1 + + @property + def _num_workers(self): + return 1 if self.worker_info is None else self.worker_info.num_workers + + @property + def _num_pipelines(self): + return self._num_workers * self.dist_num_replicas + + def __len__(self): + # this is just an estimate and does not factor in extra samples added to pad batches based on + # complete worker & replica info (not available until init in dataloader). + return math.ceil(self.num_samples / self.dist_num_replicas) + + def _filename(self, index, basename=False, absolute=False): + assert False, "Not supported" # no random access to samples + + def filenames(self, basename=False, absolute=False): + """ Return all filenames in dataset, overrides base""" + if self.ds is None: + self._lazy_init() + names = [] + for sample in self.ds: + if len(names) > self.num_samples: + break # safety for ds.repeat() case + if 'file_name' in sample: + name = sample['file_name'] + elif 'filename' in sample: + name = sample['filename'] + elif 'id' in sample: + name = sample['id'] + else: + assert False, "No supported name field present" + names.append(name) + return names diff --git a/timm/models/helpers.py b/timm/models/helpers.py index 2a15e528..96f551e3 100644 --- a/timm/models/helpers.py +++ b/timm/models/helpers.py @@ -11,7 +11,11 @@ from typing import Callable import torch import torch.nn as nn -from torch.hub import get_dir, load_state_dict_from_url, download_url_to_file, urlparse, HASH_REGEX +from torch.hub import load_state_dict_from_url, download_url_to_file, urlparse, HASH_REGEX +try: + from torch.hub import get_dir +except ImportError: + from torch.hub import _get_torch_home as get_dir from .features import FeatureListNet, FeatureDictNet, FeatureHookNet from .layers import Conv2dSame, Linear diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py index 731f5dca..f51d6357 100644 --- a/timm/models/resnetv2.py +++ b/timm/models/resnetv2.py @@ -507,42 +507,42 @@ def resnetv2_152x4_bitm(pretrained=False, **kwargs): @register_model def resnetv2_50x1_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_50x1_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), + 'resnetv2_50x1_bitm_in21k', pretrained=pretrained, num_classes=kwargs.pop('num_classes', 21843), layers=[3, 4, 6, 3], width_factor=1, stem_type='fixed', **kwargs) @register_model def resnetv2_50x3_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_50x3_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), + 'resnetv2_50x3_bitm_in21k', pretrained=pretrained, num_classes=kwargs.pop('num_classes', 21843), layers=[3, 4, 6, 3], width_factor=3, stem_type='fixed', **kwargs) @register_model def resnetv2_101x1_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_101x1_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), + 'resnetv2_101x1_bitm_in21k', pretrained=pretrained, num_classes=kwargs.pop('num_classes', 21843), layers=[3, 4, 23, 3], width_factor=1, stem_type='fixed', **kwargs) @register_model def resnetv2_101x3_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_101x3_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), + 'resnetv2_101x3_bitm_in21k', pretrained=pretrained, num_classes=kwargs.pop('num_classes', 21843), layers=[3, 4, 23, 3], width_factor=3, stem_type='fixed', **kwargs) @register_model def resnetv2_152x2_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_152x2_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), + 'resnetv2_152x2_bitm_in21k', pretrained=pretrained, num_classes=kwargs.pop('num_classes', 21843), layers=[3, 8, 36, 3], width_factor=2, stem_type='fixed', **kwargs) @register_model def resnetv2_152x4_bitm_in21k(pretrained=False, **kwargs): return _create_resnetv2( - 'resnetv2_152x4_bitm_in21k', pretrained=pretrained, num_classes=kwargs.get('num_classes', 21843), + 'resnetv2_152x4_bitm_in21k', pretrained=pretrained, num_classes=kwargs.pop('num_classes', 21843), layers=[3, 8, 36, 3], width_factor=4, stem_type='fixed', **kwargs) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index ff5bd676..076010ab 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -5,12 +5,6 @@ A PyTorch implement of Vision Transformers as described in The official jax code is released and available at https://github.com/google-research/vision_transformer -Status/TODO: -* Models updated to be compatible with official impl. Args added to support backward compat for old PyTorch weights. -* Weights ported from official jax impl for 384x384 base and small models, 16x16 and 32x32 patches. -* Trained (supervised on ImageNet-1k) my custom 'small' patch model to 77.9, 'base' to 79.4 top-1 with this code. -* Hopefully find time and GPUs for SSL or unsupervised pretraining on OpenImages w/ ImageNet fine-tune in future. - Acknowledgments: * The paper authors for releasing code and weights, thanks! * I fixed my class token impl based on Phil Wang's https://github.com/lucidrains/vit-pytorch ... check it out @@ -18,6 +12,9 @@ for some einops/einsum fun * Simple transformer style inspired by Andrej Karpathy's https://github.com/karpathy/minGPT * Bert reference code checks against Huggingface Transformers and Tensorflow Bert +DeiT model defs and weights from https://github.com/facebookresearch/deit, +paper `DeiT: Data-efficient Image Transformers` - https://arxiv.org/abs/2012.12877 + Hacked together by / Copyright 2020 Ross Wightman """ import torch @@ -50,7 +47,7 @@ default_cfgs = { url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/vit_small_p16_224-15ec54c9.pth', ), - # patch models (weights ported from official JAX impl) + # patch models (weights ported from official Google JAX impl) 'vit_base_patch16_224': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_p16_224-80ecf9dd.pth', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), @@ -77,7 +74,7 @@ default_cfgs = { url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_large_p32_384-9b920ba8.pth', input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), - # patch models, imagenet21k (weights ported from official JAX impl) + # patch models, imagenet21k (weights ported from official Google JAX impl) 'vit_base_patch16_224_in21k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_patch16_224_in21k-e5005f0a.pth', num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), @@ -94,7 +91,7 @@ default_cfgs = { url='', # FIXME I have weights for this but > 2GB limit for github release binaries num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)), - # hybrid models (weights ported from official JAX impl) + # hybrid models (weights ported from official Google JAX impl) 'vit_base_resnet50_224_in21k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_224_in21k-6f7c7740.pth', mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9), @@ -107,6 +104,17 @@ default_cfgs = { 'vit_small_resnet50d_s3_224': _cfg(), 'vit_base_resnet26d_224': _cfg(), 'vit_base_resnet50d_224': _cfg(), + + # deit models (FB weights) + 'deit_tiny_patch16_224': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_tiny_patch16_224-a1311bcf.pth'), + 'deit_small_patch16_224': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_small_patch16_224-cd65a155.pth'), + 'deit_base_patch16_224': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_base_patch16_224-b5f2ef4d.pth',), + 'deit_base_patch16_384': _cfg( + url='', # no weights yet + input_size=(3, 384, 384)), } @@ -433,7 +441,7 @@ def vit_large_patch16_384(pretrained=False, **kwargs): @register_model def vit_base_patch16_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.get('num_classes', 21843) + num_classes = kwargs.pop('num_classes', 21843) model = VisionTransformer( patch_size=16, num_classes=num_classes, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) @@ -446,7 +454,7 @@ def vit_base_patch16_224_in21k(pretrained=False, **kwargs): @register_model def vit_base_patch32_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.get('num_classes', 21843) + num_classes = kwargs.pop('num_classes', 21843) model = VisionTransformer( img_size=224, num_classes=num_classes, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) @@ -458,7 +466,7 @@ def vit_base_patch32_224_in21k(pretrained=False, **kwargs): @register_model def vit_large_patch16_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.get('num_classes', 21843) + num_classes = kwargs.pop('num_classes', 21843) model = VisionTransformer( patch_size=16, num_classes=num_classes, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, representation_size=1024, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) @@ -482,7 +490,7 @@ def vit_large_patch32_224_in21k(pretrained=False, **kwargs): @register_model def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.get('num_classes', 21843) + num_classes = kwargs.pop('num_classes', 21843) model = VisionTransformer( img_size=224, patch_size=14, num_classes=num_classes, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, qkv_bias=True, representation_size=1280, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) @@ -495,7 +503,7 @@ def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): @register_model def vit_base_resnet50_224_in21k(pretrained=False, **kwargs): # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head - num_classes = kwargs.get('num_classes', 21843) + num_classes = kwargs.pop('num_classes', 21843) backbone = ResNetV2( layers=(3, 4, 9), preact=False, stem_type='same', conv_layer=StdConv2dSame, num_classes=0, global_pool='') model = VisionTransformer( @@ -559,3 +567,51 @@ def vit_base_resnet50d_224(pretrained=False, **kwargs): img_size=224, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) model.default_cfg = default_cfgs['vit_base_resnet50d_224'] return model + + +@register_model +def deit_tiny_patch16_224(pretrained=False, **kwargs): + model = VisionTransformer( + patch_size=16, embed_dim=192, depth=12, num_heads=3, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['deit_tiny_patch16_224'] + if pretrained: + load_pretrained( + model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) + return model + + +@register_model +def deit_small_patch16_224(pretrained=False, **kwargs): + model = VisionTransformer( + patch_size=16, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['deit_small_patch16_224'] + if pretrained: + load_pretrained( + model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) + return model + + +@register_model +def deit_base_patch16_224(pretrained=False, **kwargs): + model = VisionTransformer( + patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['deit_base_patch16_224'] + if pretrained: + load_pretrained( + model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) + return model + + +@register_model +def deit_base_patch16_384(pretrained=False, **kwargs): + model = VisionTransformer( + img_size=384, patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model.default_cfg = default_cfgs['deit_base_patch16_384'] + if pretrained: + load_pretrained( + model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) + return model diff --git a/train.py b/train.py index 4bb68399..b31199f9 100755 --- a/train.py +++ b/train.py @@ -28,7 +28,7 @@ import torch.nn as nn import torchvision.utils from torch.nn.parallel import DistributedDataParallel as NativeDDP -from timm.data import ImageDataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset +from timm.data import create_dataset, create_loader, resolve_data_config, Mixup, FastCollateMixup, AugMixDataset from timm.models import create_model, resume_checkpoint, load_checkpoint, convert_splitbn_model from timm.utils import * from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy, JsdCrossEntropy @@ -64,8 +64,14 @@ parser.add_argument('-c', '--config', default='', type=str, metavar='FILE', parser = argparse.ArgumentParser(description='PyTorch ImageNet Training') # Dataset / Model parameters -parser.add_argument('data', metavar='DIR', +parser.add_argument('data_dir', metavar='DIR', help='path to dataset') +parser.add_argument('--dataset', '-d', metavar='NAME', default='', + help='dataset type (default: ImageFolder/ImageTar if empty)') +parser.add_argument('--train-split', metavar='NAME', default='train', + help='dataset train split (default: train)') +parser.add_argument('--val-split', metavar='NAME', default='validation', + help='dataset validation split (default: validation)') parser.add_argument('--model', default='resnet101', type=str, metavar='MODEL', help='Name of model to train (default: "countception"') parser.add_argument('--pretrained', action='store_true', default=False, @@ -437,19 +443,10 @@ def main(): _logger.info('Scheduled epochs: {}'.format(num_epochs)) # create the train and eval datasets - train_dir = os.path.join(args.data, 'train') - if not os.path.exists(train_dir): - _logger.error('Training folder does not exist at: {}'.format(train_dir)) - exit(1) - dataset_train = ImageDataset(train_dir) - - eval_dir = os.path.join(args.data, 'val') - if not os.path.isdir(eval_dir): - eval_dir = os.path.join(args.data, 'validation') - if not os.path.isdir(eval_dir): - _logger.error('Validation folder does not exist at: {}'.format(eval_dir)) - exit(1) - dataset_eval = ImageDataset(eval_dir) + dataset_train = create_dataset( + args.dataset, root=args.data_dir, split=args.train_split, is_training=True, batch_size=args.batch_size) + dataset_eval = create_dataset( + args.dataset, root=args.data_dir, split=args.val_split, is_training=False, batch_size=args.batch_size) # setup mixup / cutmix collate_fn = None @@ -553,10 +550,10 @@ def main(): try: for epoch in range(start_epoch, num_epochs): - if args.distributed: - loader_train.sampler.set_epoch(epoch) + if args.distributed and hasattr(loader_train.sampler, 'set_epoch'): + loader_train.set_epoch(epoch) - train_metrics = train_epoch( + train_metrics = train_one_epoch( epoch, model, loader_train, optimizer, train_loss_fn, args, lr_scheduler=lr_scheduler, saver=saver, output_dir=output_dir, amp_autocast=amp_autocast, loss_scaler=loss_scaler, model_ema=model_ema, mixup_fn=mixup_fn) @@ -594,7 +591,7 @@ def main(): _logger.info('*** Best metric: {0} (epoch {1})'.format(best_metric, best_epoch)) -def train_epoch( +def train_one_epoch( epoch, model, loader, optimizer, loss_fn, args, lr_scheduler=None, saver=None, output_dir='', amp_autocast=suppress, loss_scaler=None, model_ema=None, mixup_fn=None): diff --git a/validate.py b/validate.py index d9ba377c..be977cc2 100755 --- a/validate.py +++ b/validate.py @@ -20,7 +20,7 @@ from collections import OrderedDict from contextlib import suppress from timm.models import create_model, apply_test_time_pool, load_checkpoint, is_model, list_models -from timm.data import ImageDataset, create_loader, resolve_data_config, RealLabelsImagenet +from timm.data import create_dataset, create_loader, resolve_data_config, RealLabelsImagenet from timm.utils import accuracy, AverageMeter, natural_key, setup_default_logging, set_jit_legacy has_apex = False @@ -44,7 +44,11 @@ _logger = logging.getLogger('validate') parser = argparse.ArgumentParser(description='PyTorch ImageNet Validation') parser.add_argument('data', metavar='DIR', help='path to dataset') -parser.add_argument('--model', '-m', metavar='MODEL', default='dpn92', +parser.add_argument('--dataset', '-d', metavar='NAME', default='', + help='dataset type (default: ImageFolder/ImageTar if empty)') +parser.add_argument('--split', metavar='NAME', default='validation', + help='dataset split (default: validation)') +parser.add_argument('--model', '-m', metavar='NAME', default='dpn92', help='model architecture (default: dpn92)') parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', help='number of data loading workers (default: 2)') @@ -159,7 +163,9 @@ def validate(args): criterion = nn.CrossEntropyLoss().cuda() - dataset = ImageDataset(args.data, load_bytes=args.tf_preprocessing, class_map=args.class_map) + dataset = create_dataset( + root=args.data, name=args.dataset, split=args.split, + load_bytes=args.tf_preprocessing, class_map=args.class_map) if args.valid_labels: with open(args.valid_labels, 'r') as f: From 58ccf431502d1850b7613d36220f635c0636d834 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 16 Jan 2021 12:30:02 -0800 Subject: [PATCH 09/20] Add BiT references and knowledge distill links to readme/docs --- README.md | 5 +++++ docs/models.md | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/README.md b/README.md index dc8423c6..a6d21164 100644 --- a/README.md +++ b/README.md @@ -130,6 +130,7 @@ All model architecture families include variants with pretrained weights. The ar A full version of the list below with source links can be found in the [documentation](https://rwightman.github.io/pytorch-image-models/models/). +* Big Transfer ResNetV2 (BiT) - https://arxiv.org/abs/1912.11370 * CspNet (Cross-Stage Partial Networks) - https://arxiv.org/abs/1911.11929 * DenseNet - https://arxiv.org/abs/1608.06993 * DLA - https://arxiv.org/abs/1707.06484 @@ -242,6 +243,10 @@ One of the greatest assets of PyTorch is the community and their contributions. * Albumentations - https://github.com/albumentations-team/albumentations * Kornia - https://github.com/kornia/kornia +### Knowledge Distillation +* RepDistiller - https://github.com/HobbitLong/RepDistiller +* torchdistill - https://github.com/yoshitomo-matsubara/torchdistill + ### Metric Learning * PyTorch Metric Learning - https://github.com/KevinMusgrave/pytorch-metric-learning diff --git a/docs/models.md b/docs/models.md index f62cdf5f..5522eb2d 100644 --- a/docs/models.md +++ b/docs/models.md @@ -10,6 +10,10 @@ Most included models have pretrained weights. The weights are either: The validation results for the pretrained weights can be found [here](results.md) +## Big Transfer ResNetV2 (BiT) [[resnetv2.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/resnetv2.py)] +* Paper: `Big Transfer (BiT): General Visual Representation Learning` - https://arxiv.org/abs/1912.11370 +* Reference code: https://github.com/google-research/big_transfer + ## Cross-Stage Partial Networks [[cspnet.py](https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/cspnet.py)] * Paper: `CSPNet: A New Backbone that can Enhance Learning Capability of CNN` - https://arxiv.org/abs/1911.11929 * Reference impl: https://github.com/WongKinYiu/CrossStagePartialNetworks From 0a1668f63e983cad40facb85b7e0bfb0083f2973 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 16 Jan 2021 12:37:10 -0800 Subject: [PATCH 10/20] Update tests --- tests/test_models.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/tests/test_models.py b/tests/test_models.py index f406555a..17d592d4 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -13,18 +13,23 @@ if hasattr(torch._C, '_jit_set_profiling_executor'): torch._C._jit_set_profiling_executor(True) torch._C._jit_set_profiling_mode(False) +# transformer models don't support many of the spatial / feature based model functionalities +NON_STD_FILTERS = ['vit_*', 'deit_*'] + +# exclude models that cause specific test failures if 'GITHUB_ACTIONS' in os.environ: # and 'Linux' in platform.system(): # GitHub Linux runner is slower and hits memory limits sooner than MacOS, exclude bigger models - EXCLUDE_FILTERS = ['*efficientnet_l2*', '*resnext101_32x48d', 'vit_*', '*in21k', '*152x4_bitm'] + EXCLUDE_FILTERS = ['*efficientnet_l2*', '*resnext101_32x48d', '*in21k', '*152x4_bitm'] + NON_STD_FILTERS else: - EXCLUDE_FILTERS = ['vit_*'] + EXCLUDE_FILTERS = NON_STD_FILTERS + MAX_FWD_SIZE = 384 MAX_BWD_SIZE = 128 MAX_FWD_FEAT_SIZE = 448 @pytest.mark.timeout(120) -@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS[:-1])) +@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS[:-2])) @pytest.mark.parametrize('batch_size', [1]) def test_model_forward(model_name, batch_size): """Run a single forward pass with each model""" @@ -68,7 +73,7 @@ def test_model_backward(model_name, batch_size): @pytest.mark.timeout(120) -@pytest.mark.parametrize('model_name', list_models(exclude_filters=['vit_*'])) +@pytest.mark.parametrize('model_name', list_models(exclude_filters=NON_STD_FILTERS)) @pytest.mark.parametrize('batch_size', [1]) def test_model_default_cfgs(model_name, batch_size): """Run a single forward pass with each model""" @@ -121,7 +126,7 @@ if 'GITHUB_ACTIONS' not in os.environ: create_model(model_name, pretrained=True, in_chans=in_chans) @pytest.mark.timeout(120) - @pytest.mark.parametrize('model_name', list_models(pretrained=True, exclude_filters=['vit_*'])) + @pytest.mark.parametrize('model_name', list_models(pretrained=True, exclude_filters=NON_STD_FILTERS)) @pytest.mark.parametrize('batch_size', [1]) def test_model_features_pretrained(model_name, batch_size): """Create that pretrained weights load when features_only==True.""" From d55bcc0fee3adac6a814beef5be3e871902b4b27 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 16 Jan 2021 16:32:03 -0800 Subject: [PATCH 11/20] Finishing adding stochastic depth support to BiT ResNetV2 models --- timm/models/resnetv2.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/timm/models/resnetv2.py b/timm/models/resnetv2.py index f51d6357..1acc5eb0 100644 --- a/timm/models/resnetv2.py +++ b/timm/models/resnetv2.py @@ -249,6 +249,7 @@ class Bottleneck(nn.Module): x = self.norm2(x) x = self.conv3(x) x = self.norm3(x) + x = self.drop_path(x) x = self.act3(x + shortcut) return x @@ -366,9 +367,10 @@ class ResNetV2(nn.Module): prev_chs = stem_chs curr_stride = 4 dilation = 1 + block_dprs = [x.tolist() for x in torch.linspace(0, drop_path_rate, sum(layers)).split(layers)] block_fn = PreActBottleneck if preact else Bottleneck self.stages = nn.Sequential() - for stage_idx, (d, c) in enumerate(zip(layers, channels)): + for stage_idx, (d, c, bdpr) in enumerate(zip(layers, channels, block_dprs)): out_chs = make_div(c * wf) stride = 1 if stage_idx == 0 else 2 if curr_stride >= output_stride: @@ -376,7 +378,7 @@ class ResNetV2(nn.Module): stride = 1 stage = ResNetStage( prev_chs, out_chs, stride=stride, dilation=dilation, depth=d, avg_down=avg_down, - act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer, block_fn=block_fn) + act_layer=act_layer, conv_layer=conv_layer, norm_layer=norm_layer, block_dpr=bdpr, block_fn=block_fn) prev_chs = out_chs curr_stride *= stride feat_name = f'stages.{stage_idx}' From 9d5d4b8df631a826ce5f1b56e6e9c8b3c5ea1973 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sat, 16 Jan 2021 16:32:21 -0800 Subject: [PATCH 12/20] Fix silly train.py typo during dataset work --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index b31199f9..a4010e1f 100755 --- a/train.py +++ b/train.py @@ -551,7 +551,7 @@ def main(): try: for epoch in range(start_epoch, num_epochs): if args.distributed and hasattr(loader_train.sampler, 'set_epoch'): - loader_train.set_epoch(epoch) + loader_train.sampler.set_epoch(epoch) train_metrics = train_one_epoch( epoch, model, loader_train, optimizer, train_loss_fn, args, From 55f7dfa9ea8bab0296c774dd7234d602b8396ce5 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 18 Jan 2021 16:11:02 -0800 Subject: [PATCH 13/20] Refactor vision_transformer entrpy fns, add pos embedding resize support for fine tuning, add some deit models for testing --- tests/test_models.py | 4 +- timm/models/vision_transformer.py | 307 +++++++++++++++--------------- 2 files changed, 153 insertions(+), 158 deletions(-) diff --git a/tests/test_models.py b/tests/test_models.py index 17d592d4..dee4fbe7 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -14,7 +14,7 @@ if hasattr(torch._C, '_jit_set_profiling_executor'): torch._C._jit_set_profiling_mode(False) # transformer models don't support many of the spatial / feature based model functionalities -NON_STD_FILTERS = ['vit_*', 'deit_*'] +NON_STD_FILTERS = ['vit_*'] # exclude models that cause specific test failures if 'GITHUB_ACTIONS' in os.environ: # and 'Linux' in platform.system(): @@ -29,7 +29,7 @@ MAX_FWD_FEAT_SIZE = 448 @pytest.mark.timeout(120) -@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS[:-2])) +@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS[:-1])) @pytest.mark.parametrize('batch_size', [1]) def test_model_forward(model_name, batch_size): """Run a single forward pass with each model""" diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 076010ab..a832cce3 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -17,11 +17,15 @@ paper `DeiT: Data-efficient Image Transformers` - https://arxiv.org/abs/2012.128 Hacked together by / Copyright 2020 Ross Wightman """ -import torch -import torch.nn as nn +import math +import logging from functools import partial from collections import OrderedDict +import torch +import torch.nn as nn +import torch.nn.functional as F + from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD from .helpers import load_pretrained from .layers import DropPath, to_2tuple, trunc_normal_ @@ -29,6 +33,8 @@ from .resnet import resnet26d, resnet50d from .resnetv2 import ResNetV2, StdConv2dSame from .registry import register_model +_logger = logging.getLogger(__name__) + def _cfg(url='', **kwargs): return { @@ -94,7 +100,7 @@ default_cfgs = { # hybrid models (weights ported from official Google JAX impl) 'vit_base_resnet50_224_in21k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_224_in21k-6f7c7740.pth', - mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9), + num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9), 'vit_base_resnet50_384': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_384-9fd3c705.pth', input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), @@ -106,15 +112,15 @@ default_cfgs = { 'vit_base_resnet50d_224': _cfg(), # deit models (FB weights) - 'deit_tiny_patch16_224': _cfg( + 'vit_deit_tiny_patch16_224': _cfg( url='https://dl.fbaipublicfiles.com/deit/deit_tiny_patch16_224-a1311bcf.pth'), - 'deit_small_patch16_224': _cfg( + 'vit_deit_small_patch16_224': _cfg( url='https://dl.fbaipublicfiles.com/deit/deit_small_patch16_224-cd65a155.pth'), - 'deit_base_patch16_224': _cfg( + 'vit_deit_base_patch16_224': _cfg( url='https://dl.fbaipublicfiles.com/deit/deit_base_patch16_224-b5f2ef4d.pth',), - 'deit_base_patch16_384': _cfg( - url='', # no weights yet - input_size=(3, 384, 384)), + 'vit_deit_base_patch16_384': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_base_patch16_384-8de9b5d1.pth', + input_size=(3, 384, 384), crop_pct=1.0), } @@ -253,11 +259,12 @@ class VisionTransformer(nn.Module): """ Vision Transformer with support for patch or hybrid CNN input stage """ def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, - num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, representation_size=None, - drop_rate=0., attn_drop_rate=0., drop_path_rate=0., hybrid_backbone=None, norm_layer=nn.LayerNorm): + num_heads=12, mlp_ratio=4., qkv_bias=True, qk_scale=None, representation_size=None, + drop_rate=0., attn_drop_rate=0., drop_path_rate=0., hybrid_backbone=None, norm_layer=None): super().__init__() self.num_classes = num_classes self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + norm_layer = norm_layer or partial(nn.LayerNorm, eps=1e-6) if hybrid_backbone is not None: self.patch_embed = HybridEmbed( @@ -290,7 +297,7 @@ class VisionTransformer(nn.Module): self.pre_logits = nn.Identity() # Classifier head - self.head = nn.Linear(embed_dim, num_classes) if num_classes > 0 else nn.Identity() + self.head = nn.Linear(self.num_features, num_classes) if num_classes > 0 else nn.Identity() trunc_normal_(self.pos_embed, std=.02) trunc_normal_(self.cls_token, std=.02) @@ -338,180 +345,196 @@ class VisionTransformer(nn.Module): return x -def _conv_filter(state_dict, patch_size=16): +def resize_pos_embed(posemb, posemb_new): + # Rescale the grid of position embeddings when loading from state_dict + # Adapted from + # https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224 + _logger.info('Resized position embedding: %s to %s', posemb.shape, posemb_new.shape) + ntok_new = posemb_new.shape[1] + if True: + posemb_tok, posemb_grid = posemb[:, :1], posemb[0, 1:] + ntok_new -= 1 + else: + posemb_tok, posemb_grid = posemb[:, :0], posemb[0] + gs_old = int(math.sqrt(len(posemb_grid))) + gs_new = int(math.sqrt(ntok_new)) + _logger.info('Position embedding grid-size from %s to %s', gs_old, gs_new) + posemb_grid = posemb_grid.reshape(1, gs_old, gs_old, -1).permute(0, 3, 1, 2) + posemb_grid = F.interpolate(posemb_grid, size=(gs_new, gs_new), mode='bilinear') + posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_new * gs_new, -1) + posemb = torch.cat([posemb_tok, posemb_grid], dim=1) + state_dict['pos_embed'] = posemb + return state_dict + + +def checkpoint_filter_fn(state_dict, model): """ convert patch embedding weight from manual patchify + linear proj to conv""" out_dict = {} + if 'model' in state_dict: + # for deit models + state_dict = state_dict['model'] for k, v in state_dict.items(): - if 'patch_embed.proj.weight' in k: - v = v.reshape((v.shape[0], 3, patch_size, patch_size)) + if 'patch_embed.proj.weight' in k and len(v.shape) < 4: + # for old models that I trained prior to conv based patchification + v = v.reshape(model.patch_embed.proj.weight.shape) + elif k == 'pos_embed' and v.shape != model.pos_embed.shape: + # to resize pos embedding when using model at different size from pretrained weights + v = resize_pos_embed(v, model.pos_embed) out_dict[k] = v return out_dict +def _create_vision_transformer(variant, pretrained=False, **kwargs): + default_cfg = default_cfgs[variant] + default_num_classes = default_cfg['num_classes'] + default_img_size = default_cfg['input_size'][-1] + + num_classes = kwargs.pop('num_classes', default_num_classes) + img_size = kwargs.pop('img_size', default_img_size) + repr_size = kwargs.pop('representation_size', None) + if repr_size is not None and num_classes != default_num_classes: + # remove representation layer if fine-tuning + _logger.info("Removing representation layer for fine-tuning.") + repr_size = None + + model = VisionTransformer(img_size=img_size, num_classes=num_classes, representation_size=repr_size, **kwargs) + model.default_cfg = default_cfg + + if pretrained: + load_pretrained( + model, num_classes=num_classes, in_chans=kwargs.get('in_chans', 3), + filter_fn=partial(checkpoint_filter_fn, model=model)) + return model + + @register_model def vit_small_patch16_224(pretrained=False, **kwargs): + model_kwargs = dict( + patch_size=16, embed_dim=768, depth=8, num_heads=8, mlp_ratio=3., + qkv_bias=False, norm_layer=nn.LayerNorm, **kwargs) if pretrained: # NOTE my scale was wrong for original weights, leaving this here until I have better ones for this model - kwargs.setdefault('qk_scale', 768 ** -0.5) - model = VisionTransformer(patch_size=16, embed_dim=768, depth=8, num_heads=8, mlp_ratio=3., **kwargs) - model.default_cfg = default_cfgs['vit_small_patch16_224'] - if pretrained: - load_pretrained( - model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter) + model_kwargs.setdefault('qk_scale', 768 ** -0.5) + model = _create_vision_transformer('vit_small_patch16_224', pretrained=pretrained, **model_kwargs) return model @register_model def vit_base_patch16_224(pretrained=False, **kwargs): - model = VisionTransformer( - patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_patch16_224'] - if pretrained: - load_pretrained( - model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **model_kwargs) return model @register_model def vit_base_patch32_224(pretrained=False, **kwargs): - model = VisionTransformer( - img_size=224, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_patch32_224'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **model_kwargs) return model @register_model def vit_base_patch16_384(pretrained=False, **kwargs): - model = VisionTransformer( - img_size=384, patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_patch16_384'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **model_kwargs) return model @register_model def vit_base_patch32_384(pretrained=False, **kwargs): - model = VisionTransformer( - img_size=384, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, + model_kwargs = dict( + patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_patch32_384'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **model_kwargs) return model @register_model def vit_large_patch16_224(pretrained=False, **kwargs): - model = VisionTransformer( - patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_large_patch16_224'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **model_kwargs) return model @register_model def vit_large_patch32_224(pretrained=False, **kwargs): - model = VisionTransformer( - img_size=224, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_large_patch32_224'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **model_kwargs) return model @register_model def vit_large_patch16_384(pretrained=False, **kwargs): - model = VisionTransformer( - img_size=384, patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_large_patch16_384'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **model_kwargs) return model @register_model def vit_base_patch16_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.pop('num_classes', 21843) - model = VisionTransformer( - patch_size=16, num_classes=num_classes, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_patch16_224_in21k'] - if pretrained: - load_pretrained( - model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=_conv_filter) + model_kwargs = dict( + patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_in21k', pretrained=pretrained, **model_kwargs) + return model + + +@register_model +def vit_base_patch16_384_in21k(pretrained=False, **kwargs): + model_kwargs = dict( + patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) + model = _create_vision_transformer('vit_base_patch16_224_in21k', pretrained=pretrained, **model_kwargs) return model @register_model def vit_base_patch32_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.pop('num_classes', 21843) - model = VisionTransformer( - img_size=224, num_classes=num_classes, patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, - qkv_bias=True, representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_patch32_224_in21k'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict( + patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) + model = _create_vision_transformer('vit_base_patch32_224_in21k', pretrained=pretrained, **model_kwargs) return model @register_model def vit_large_patch16_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.pop('num_classes', 21843) - model = VisionTransformer( - patch_size=16, num_classes=num_classes, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, qkv_bias=True, - representation_size=1024, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_large_patch16_224_in21k'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict( + patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) + model = _create_vision_transformer('vit_large_patch16_224_in21k', pretrained=pretrained, **model_kwargs) return model +# @register_model +# def vit_large_patch16_384_in21k(pretrained=False, **kwargs): +# model_kwargs = dict( +# patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) +# model = _create_vision_transformer('vit_large_patch16_224_in21k', pretrained=pretrained, **model_kwargs) +# return model + + @register_model def vit_large_patch32_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.get('num_classes', 21843) - model = VisionTransformer( - img_size=224, num_classes=num_classes, patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, - qkv_bias=True, representation_size=1024, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_large_patch32_224_in21k'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict( + patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) + model = _create_vision_transformer('vit_large_patch32_224_in21k', pretrained=pretrained, **model_kwargs) return model @register_model def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): - num_classes = kwargs.pop('num_classes', 21843) - model = VisionTransformer( - img_size=224, patch_size=14, num_classes=num_classes, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, - qkv_bias=True, representation_size=1280, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_huge_patch14_224_in21k'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict( + patch_size=14, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, representation_size=1280, **kwargs) + model = _create_vision_transformer('vit_huge_patch14_224_in21k', pretrained=pretrained, **model_kwargs) return model @register_model def vit_base_resnet50_224_in21k(pretrained=False, **kwargs): # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head - num_classes = kwargs.pop('num_classes', 21843) backbone = ResNetV2( layers=(3, 4, 9), preact=False, stem_type='same', conv_layer=StdConv2dSame, num_classes=0, global_pool='') - model = VisionTransformer( - img_size=224, num_classes=num_classes, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - hybrid_backbone=backbone, representation_size=768, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_resnet50_224_in21k'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict( + embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, + representation_size=768, **kwargs) + model = _create_vision_transformer('vit_base_resnet50_224_in21k', pretrained=pretrained, **model_kwargs) return model @@ -520,12 +543,8 @@ def vit_base_resnet50_384(pretrained=False, **kwargs): # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head backbone = ResNetV2( layers=(3, 4, 9), preact=False, stem_type='same', conv_layer=StdConv2dSame, num_classes=0, global_pool='') - model = VisionTransformer( - img_size=384, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, - qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['vit_base_resnet50_384'] - if pretrained: - load_pretrained(model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3)) + model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) + model = _create_vision_transformer('vit_base_resnet50_384', pretrained=pretrained, **model_kwargs) return model @@ -533,9 +552,8 @@ def vit_base_resnet50_384(pretrained=False, **kwargs): def vit_small_resnet26d_224(pretrained=False, **kwargs): pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing backbone = resnet26d(pretrained=pretrained_backbone, features_only=True, out_indices=[4]) - model = VisionTransformer( - img_size=224, embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) - model.default_cfg = default_cfgs['vit_small_resnet26d_224'] + model_kwargs = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) + model = _create_vision_transformer('vit_small_resnet26d_224', pretrained=pretrained, **model_kwargs) return model @@ -543,9 +561,8 @@ def vit_small_resnet26d_224(pretrained=False, **kwargs): def vit_small_resnet50d_s3_224(pretrained=False, **kwargs): pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing backbone = resnet50d(pretrained=pretrained_backbone, features_only=True, out_indices=[3]) - model = VisionTransformer( - img_size=224, embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) - model.default_cfg = default_cfgs['vit_small_resnet50d_s3_224'] + model_kwargs = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) + model = _create_vision_transformer('vit_small_resnet50d_s3_224', pretrained=pretrained, **model_kwargs) return model @@ -553,9 +570,8 @@ def vit_small_resnet50d_s3_224(pretrained=False, **kwargs): def vit_base_resnet26d_224(pretrained=False, **kwargs): pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing backbone = resnet26d(pretrained=pretrained_backbone, features_only=True, out_indices=[4]) - model = VisionTransformer( - img_size=224, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) - model.default_cfg = default_cfgs['vit_base_resnet26d_224'] + model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) + model = _create_vision_transformer('vit_base_resnet26d_224', pretrained=pretrained, **model_kwargs) return model @@ -563,55 +579,34 @@ def vit_base_resnet26d_224(pretrained=False, **kwargs): def vit_base_resnet50d_224(pretrained=False, **kwargs): pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing backbone = resnet50d(pretrained=pretrained_backbone, features_only=True, out_indices=[4]) - model = VisionTransformer( - img_size=224, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) - model.default_cfg = default_cfgs['vit_base_resnet50d_224'] + model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) + model = _create_vision_transformer('vit_base_resnet50d_224', pretrained=pretrained, **model_kwargs) return model @register_model -def deit_tiny_patch16_224(pretrained=False, **kwargs): - model = VisionTransformer( - patch_size=16, embed_dim=192, depth=12, num_heads=3, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['deit_tiny_patch16_224'] - if pretrained: - load_pretrained( - model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) +def vit_deit_tiny_patch16_224(pretrained=False, **kwargs): + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_deit_tiny_patch16_224', pretrained=pretrained, **model_kwargs) return model @register_model -def deit_small_patch16_224(pretrained=False, **kwargs): - model = VisionTransformer( - patch_size=16, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['deit_small_patch16_224'] - if pretrained: - load_pretrained( - model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) +def vit_deit_small_patch16_224(pretrained=False, **kwargs): + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_deit_small_patch16_224', pretrained=pretrained, **model_kwargs) return model @register_model -def deit_base_patch16_224(pretrained=False, **kwargs): - model = VisionTransformer( - patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['deit_base_patch16_224'] - if pretrained: - load_pretrained( - model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) +def vit_deit_base_patch16_224(pretrained=False, **kwargs): + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_deit_base_patch16_224', pretrained=pretrained, **model_kwargs) return model @register_model -def deit_base_patch16_384(pretrained=False, **kwargs): - model = VisionTransformer( - img_size=384, patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) - model.default_cfg = default_cfgs['deit_base_patch16_384'] - if pretrained: - load_pretrained( - model, num_classes=model.num_classes, in_chans=kwargs.get('in_chans', 3), filter_fn=lambda x: x['model']) +def vit_deit_base_patch16_384(pretrained=False, **kwargs): + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_deit_base_patch16_384', pretrained=pretrained, **model_kwargs) return model From 5d4c3d0af355cd09946e5ad0fc89b1cde76347d5 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 22 Jan 2021 10:52:04 -0800 Subject: [PATCH 14/20] Add enhanced ParserImageInTar that can read images from tars within tars, folders with multiple tars, etc. Additional comment cleanup. --- timm/data/parsers/parser_factory.py | 4 +- .../data/parsers/parser_image_class_in_tar.py | 107 --------- timm/data/parsers/parser_image_folder.py | 9 +- timm/data/parsers/parser_image_in_tar.py | 219 ++++++++++++++++++ timm/data/parsers/parser_image_tar.py | 9 + timm/data/parsers/parser_tfds.py | 18 +- train.py | 2 +- 7 files changed, 247 insertions(+), 121 deletions(-) delete mode 100644 timm/data/parsers/parser_image_class_in_tar.py create mode 100644 timm/data/parsers/parser_image_in_tar.py diff --git a/timm/data/parsers/parser_factory.py b/timm/data/parsers/parser_factory.py index ce9aa35f..419ffe89 100644 --- a/timm/data/parsers/parser_factory.py +++ b/timm/data/parsers/parser_factory.py @@ -2,7 +2,7 @@ import os from .parser_image_folder import ParserImageFolder from .parser_image_tar import ParserImageTar -from .parser_image_class_in_tar import ParserImageClassInTar +from .parser_image_in_tar import ParserImageInTar def create_parser(name, root, split='train', **kwargs): @@ -23,7 +23,7 @@ def create_parser(name, root, split='train', **kwargs): # default fallback path (backwards compat), use image tar if root is a .tar file, otherwise image folder # FIXME support split here, in parser? if os.path.isfile(root) and os.path.splitext(root)[1] == '.tar': - parser = ParserImageTar(root, **kwargs) + parser = ParserImageInTar(root, **kwargs) else: parser = ParserImageFolder(root, **kwargs) return parser diff --git a/timm/data/parsers/parser_image_class_in_tar.py b/timm/data/parsers/parser_image_class_in_tar.py deleted file mode 100644 index f43ff359..00000000 --- a/timm/data/parsers/parser_image_class_in_tar.py +++ /dev/null @@ -1,107 +0,0 @@ -import os -import tarfile -import pickle -from glob import glob -import numpy as np - -from timm.utils.misc import natural_key - -from .parser import Parser -from .class_map import load_class_map -from .constants import IMG_EXTENSIONS - - -def extract_tarinfos(root, class_name_to_idx=None, cache_filename=None, extensions=None): - tar_filenames = glob(os.path.join(root, '*.tar'), recursive=True) - assert len(tar_filenames) - num_tars = len(tar_filenames) - - cache_path = '' - if cache_filename is not None: - cache_path = os.path.join(root, cache_filename) - if os.path.exists(cache_path): - with open(cache_path, 'rb') as pf: - tarinfo_map = pickle.load(pf) - else: - tarinfo_map = {} - for fi, fn in enumerate(tar_filenames): - if fi % 1000 == 0: - print(f'DEBUG: tar {fi}/{num_tars}') - # cannot keep this open across processes, reopen later - name = os.path.splitext(os.path.basename(fn))[0] - with tarfile.open(fn) as tf: - if extensions is None: - # assume all files are valid samples - class_tarinfos = tf.getmembers() - else: - class_tarinfos = [m for m in tf.getmembers() if os.path.splitext(m.name)[1].lower() in extensions] - tarinfo_map[name] = dict(tarinfos=class_tarinfos) - print(f'DEBUG: {len(class_tarinfos)} images for class {name}') - tarinfo_map = {k: v for k, v in sorted(tarinfo_map.items(), key=lambda k: natural_key(k[0]))} - if cache_path: - with open(cache_path, 'wb') as pf: - pickle.dump(tarinfo_map, pf, protocol=pickle.HIGHEST_PROTOCOL) - - tarinfos = [] - targets = [] - build_class_map = False - if class_name_to_idx is None: - class_name_to_idx = {} - build_class_map = True - for i, (name, metadata) in enumerate(tarinfo_map.items()): - class_idx = i - if build_class_map: - class_name_to_idx[name] = i - else: - if name not in class_name_to_idx: - # only samples with class in class mapping are added - continue - class_idx = class_name_to_idx[name] - num_samples = len(metadata['tarinfos']) - tarinfos.extend(metadata['tarinfos']) - targets.extend([class_idx] * num_samples) - - return tarinfos, np.array(targets), class_name_to_idx - - -class ParserImageClassInTar(Parser): - """ Multi-tarfile dataset parser where there is one .tar file per class - """ - - CACHE_FILENAME = '_tarinfos.pickle' - - def __init__(self, root, class_map=''): - super().__init__() - - class_name_to_idx = None - if class_map: - class_name_to_idx = load_class_map(class_map, root) - assert os.path.isdir(root) - self.root = root - self.tarinfos, self.targets, self.class_name_to_idx = extract_tarinfos( - self.root, class_name_to_idx=class_name_to_idx, - cache_filename=self.CACHE_FILENAME, extensions=IMG_EXTENSIONS) - self.class_idx_to_name = {v: k for k, v in self.class_name_to_idx.items()} - self.tarfiles = {} # to open lazily - self.cache_tarfiles = False - - def __len__(self): - return len(self.tarinfos) - - def __getitem__(self, index): - tarinfo = self.tarinfos[index] - target = self.targets[index] - class_name = self.class_idx_to_name[target] - if self.cache_tarfiles: - tf = self.tarfiles.setdefault( - class_name, tarfile.open(os.path.join(self.root, class_name + '.tar'))) - else: - tf = tarfile.open(os.path.join(self.root, class_name + '.tar')) - fileobj = tf.extractfile(tarinfo) - return fileobj, target - - def _filename(self, index, basename=False, absolute=False): - filename = self.tarinfos[index].name - if basename: - filename = os.path.basename(filename) - return filename diff --git a/timm/data/parsers/parser_image_folder.py b/timm/data/parsers/parser_image_folder.py index d2a597d9..ed349009 100644 --- a/timm/data/parsers/parser_image_folder.py +++ b/timm/data/parsers/parser_image_folder.py @@ -1,6 +1,11 @@ +""" A dataset parser that reads images from folders + +Folders are scannerd recursively to find image files. Labels are based +on the folder hierarchy, just leaf folders by default. + +Hacked together by / Copyright 2020 Ross Wightman +""" import os -import io -import torch from timm.utils.misc import natural_key diff --git a/timm/data/parsers/parser_image_in_tar.py b/timm/data/parsers/parser_image_in_tar.py new file mode 100644 index 00000000..fd561bcb --- /dev/null +++ b/timm/data/parsers/parser_image_in_tar.py @@ -0,0 +1,219 @@ +""" A dataset parser that reads tarfile based datasets + +This parser can read and extract image samples from: +* a single tar of image files +* a folder of multiple tarfiles containing imagefiles +* a tar of tars containing image files + +Labels are based on the combined folder and/or tar name structure. + +Hacked together by / Copyright 2020 Ross Wightman +""" +import os +import tarfile +import pickle +import logging +import numpy as np +from glob import glob +from typing import List, Dict + +from timm.utils.misc import natural_key + +from .parser import Parser +from .class_map import load_class_map +from .constants import IMG_EXTENSIONS + + +_logger = logging.getLogger(__name__) +CACHE_FILENAME_SUFFIX = '_tarinfos.pickle' + + +class TarState: + + def __init__(self, tf: tarfile.TarFile = None, ti: tarfile.TarInfo = None): + self.tf: tarfile.TarFile = tf + self.ti: tarfile.TarInfo = ti + self.children: Dict[str, TarState] = {} # child states (tars within tars) + + def reset(self): + self.tf = None + + +def _extract_tarinfo(tf: tarfile.TarFile, parent_info: Dict, extensions=IMG_EXTENSIONS): + sample_count = 0 + for i, ti in enumerate(tf): + if not ti.isfile(): + continue + dirname, basename = os.path.split(ti.path) + name, ext = os.path.splitext(basename) + ext = ext.lower() + if ext == '.tar': + with tarfile.open(fileobj=tf.extractfile(ti), mode='r|') as ctf: + child_info = dict( + name=ti.name, path=os.path.join(parent_info['path'], name), ti=ti, children=[], samples=[]) + sample_count += _extract_tarinfo(ctf, child_info, extensions=extensions) + _logger.debug(f'{i}/?. Extracted child tarinfos from {ti.name}. {len(child_info["samples"])} images.') + parent_info['children'].append(child_info) + elif ext in extensions: + parent_info['samples'].append(ti) + sample_count += 1 + return sample_count + + +def extract_tarinfos(root, class_name_to_idx=None, cache_tarinfo=None, extensions=IMG_EXTENSIONS, sort=True): + root_is_tar = False + if os.path.isfile(root): + assert os.path.splitext(root)[-1].lower() == '.tar' + tar_filenames = [root] + root, root_name = os.path.split(root) + root_name = os.path.splitext(root_name)[0] + root_is_tar = True + else: + root_name = root.strip(os.path.sep).split(os.path.sep)[-1] + tar_filenames = glob(os.path.join(root, '*.tar'), recursive=True) + num_tars = len(tar_filenames) + tar_bytes = sum([os.path.getsize(f) for f in tar_filenames]) + assert num_tars, f'No .tar files found at specified path ({root}).' + + _logger.info(f'Scanning {tar_bytes/1024**2:.2f}MB of tar files...') + info = dict(tartrees=[]) + cache_path = '' + if cache_tarinfo is None: + cache_tarinfo = True if tar_bytes > 10*1024**3 else False # FIXME magic number, 10GB + if cache_tarinfo: + cache_filename = '_' + root_name + CACHE_FILENAME_SUFFIX + cache_path = os.path.join(root, cache_filename) + if os.path.exists(cache_path): + _logger.info(f'Reading tar info from cache file {cache_path}.') + with open(cache_path, 'rb') as pf: + info = pickle.load(pf) + assert len(info['tartrees']) == num_tars, "Cached tartree len doesn't match number of tarfiles" + else: + for i, fn in enumerate(tar_filenames): + path = '' if root_is_tar else os.path.splitext(os.path.basename(fn))[0] + with tarfile.open(fn, mode='r|') as tf: # tarinfo scans done in streaming mode + parent_info = dict(name=os.path.relpath(fn, root), path=path, ti=None, children=[], samples=[]) + num_samples = _extract_tarinfo(tf, parent_info, extensions=extensions) + num_children = len(parent_info["children"]) + _logger.debug( + f'{i}/{num_tars}. Extracted tarinfos from {fn}. {num_children} children, {num_samples} samples.') + info['tartrees'].append(parent_info) + if cache_path: + _logger.info(f'Writing tar info to cache file {cache_path}.') + with open(cache_path, 'wb') as pf: + pickle.dump(info, pf) + + samples = [] + labels = [] + build_class_map = False + if class_name_to_idx is None: + build_class_map = True + + # Flatten tartree info into lists of samples and targets w/ targets based on label id via + # class map arg or from unique paths. + # NOTE: currently only flattening up to two-levels, filesystem .tars and then one level of sub-tar children + # this covers my current use cases and keeps things a little easier to test for now. + tarfiles = [] + + def _label_from_paths(*path, leaf_only=True): + path = os.path.join(*path).strip(os.path.sep) + return path.split(os.path.sep)[-1] if leaf_only else path.replace(os.path.sep, '_') + + def _add_samples(info, fn): + added = 0 + for s in info['samples']: + label = _label_from_paths(info['path'], os.path.dirname(s.path)) + if not build_class_map and label not in class_name_to_idx: + continue + samples.append((s, fn, info['ti'])) + labels.append(label) + added += 1 + return added + + _logger.info(f'Collecting samples and building tar states.') + for parent_info in info['tartrees']: + # if tartree has children, we assume all samples are at the child level + tar_name = None if root_is_tar else parent_info['name'] + tar_state = TarState() + parent_added = 0 + for child_info in parent_info['children']: + child_added = _add_samples(child_info, fn=tar_name) + if child_added: + tar_state.children[child_info['name']] = TarState(ti=child_info['ti']) + parent_added += child_added + parent_added += _add_samples(parent_info, fn=tar_name) + if parent_added: + tarfiles.append((tar_name, tar_state)) + del info + + if build_class_map: + # build class index + sorted_labels = list(sorted(set(labels), key=natural_key)) + class_name_to_idx = {c: idx for idx, c in enumerate(sorted_labels)} + + _logger.info(f'Mapping targets and sorting samples.') + samples_and_targets = [(s, class_name_to_idx[l]) for s, l in zip(samples, labels) if l in class_name_to_idx] + if sort: + samples_and_targets = sorted(samples_and_targets, key=lambda k: natural_key(k[0][0].path)) + + _logger.info(f'Finished processing {len(samples_and_targets)} samples across {len(tarfiles)} tar files.') + return samples_and_targets, class_name_to_idx, tarfiles + + +class ParserImageInTar(Parser): + """ Multi-tarfile dataset parser where there is one .tar file per class + """ + + def __init__(self, root, class_map='', cache_tarfiles=True, cache_tarinfo=None): + super().__init__() + + class_name_to_idx = None + if class_map: + class_name_to_idx = load_class_map(class_map, root) + self.root = root + self.samples_and_targets, self.class_name_to_idx, tarfiles = extract_tarinfos( + self.root, + class_name_to_idx=class_name_to_idx, + cache_tarinfo=cache_tarinfo, + extensions=IMG_EXTENSIONS) + self.class_idx_to_name = {v: k for k, v in self.class_name_to_idx.items()} + if len(tarfiles) == 1 and tarfiles[0][0] is None: + self.root_is_tar = True + self.tar_state = tarfiles[0][1] + else: + self.root_is_tar = False + self.tar_state = dict(tarfiles) + self.cache_tarfiles = cache_tarfiles + + def __len__(self): + return len(self.samples_and_targets) + + def __getitem__(self, index): + sample, target = self.samples_and_targets[index] + sample_ti, parent_fn, child_ti = sample + parent_abs = os.path.join(self.root, parent_fn) if parent_fn else self.root + + tf = None + cache_state = None + if self.cache_tarfiles: + cache_state = self.tar_state if self.root_is_tar else self.tar_state[parent_fn] + tf = cache_state.tf + if tf is None: + tf = tarfile.open(parent_abs) + if self.cache_tarfiles: + cache_state.tf = tf + if child_ti is not None: + ctf = cache_state.children[child_ti.name].tf if self.cache_tarfiles else None + if ctf is None: + ctf = tarfile.open(fileobj=tf.extractfile(child_ti)) + if self.cache_tarfiles: + cache_state.children[child_ti.name].tf = ctf + tf = ctf + + return tf.extractfile(sample_ti), target + + def _filename(self, index, basename=False, absolute=False): + filename = self.samples_and_targets[index][0][0].name + if basename: + filename = os.path.basename(filename) + return filename diff --git a/timm/data/parsers/parser_image_tar.py b/timm/data/parsers/parser_image_tar.py index 657b56f9..467537f4 100644 --- a/timm/data/parsers/parser_image_tar.py +++ b/timm/data/parsers/parser_image_tar.py @@ -1,3 +1,10 @@ +""" A dataset parser that reads single tarfile based datasets + +This parser can read datasets consisting if a single tarfile containing images. +I am planning to deprecated it in favour of ParerImageInTar. + +Hacked together by / Copyright 2020 Ross Wightman +""" import os import tarfile @@ -31,6 +38,8 @@ def extract_tarinfo(tarfile, class_to_idx=None, sort=True): class ParserImageTar(Parser): """ Single tarfile dataset where classes are mapped to folders within tar + NOTE: This class is being deprecated in favour of the more capable ParserImageInTar that can + operate on folders of tars or tars in tars. """ def __init__(self, root, class_map=''): super().__init__() diff --git a/timm/data/parsers/parser_tfds.py b/timm/data/parsers/parser_tfds.py index 39a9243a..15361cb5 100644 --- a/timm/data/parsers/parser_tfds.py +++ b/timm/data/parsers/parser_tfds.py @@ -37,14 +37,14 @@ class ParserTfds(Parser): dataloader workers, the train iterator wraps to avoid returning partial batches that trigger drop_last https://github.com/pytorch/pytorch/issues/33413 * With PyTorch IterableDatasets, each worker in each replica operates in isolation, the final batch - from each worker could be a different size. For training this is avoid by option above, for - validation extra samples are inserted iff distributed mode is enabled so the batches being reduced - across replicas are of same size. This will slightlyalter the results, distributed validation will not be + from each worker could be a different size. For training this is worked around by option above, for + validation extra samples are inserted iff distributed mode is enabled so that the batches being reduced + across replicas are of same size. This will slightly alter the results, distributed validation will not be 100% correct. This is similar to common handling in DistributedSampler for normal Datasets but a bit worse - since there are to N * J extra samples. + since there are up to N * J extra samples with IterableDatasets. * The sharding (splitting of dataset into TFRecord) files imposes limitations on the number of replicas and dataloader workers you can use. For really small datasets that only contain a few shards - you may have to train non-distributed w/ 1-2 dataloader workers. This may not be a huge concern as the + you may have to train non-distributed w/ 1-2 dataloader workers. This is likely not a huge concern as the benefit of distributed training or fast dataloading should be much less for small datasets. * This wrapper is currently configured to return individual, decompressed image samples from the TFDS dataset. The augmentation (transforms) and batching is still done in PyTorch. It would be possible @@ -64,8 +64,8 @@ class ParserTfds(Parser): self.batch_size = batch_size self.builder = tfds.builder(name, data_dir=root) - # NOTE: please use tfds command line app to download & prepare datasets, I don't want to trigger - # it by default here as it's caused issues generating unwanted paths in data directories. + # NOTE: please use tfds command line app to download & prepare datasets, I don't want to call + # download_and_prepare() by default here as it's caused issues generating unwanted paths. self.num_samples = self.builder.info.splits[split].num_examples self.ds = None # initialized lazily on each dataloader worker process @@ -102,7 +102,7 @@ class ParserTfds(Parser): """ InputContext will assign subset of underlying TFRecord files to each 'pipeline' if used. My understanding is that using split, the underling TFRecord files will shuffle (shuffle_files=True) - between the splits each iteration but that could be wrong. + between the splits each iteration, but that understanding could be wrong. Possible split options include: * InputContext for both distributed & worker processes (current) * InputContext for distributed and sub-splits for worker processes @@ -154,7 +154,7 @@ class ParserTfds(Parser): sample_count += 1 if self.is_training and sample_count >= target_sample_count: # Need to break out of loop when repeat() is enabled for training w/ oversampling - # this results in 'extra' samples per epoch but seems more desirable than dropping + # this results in extra samples per epoch but seems more desirable than dropping # up to N*J batches per epoch (where N = num distributed processes, and J = num worker processes) break if not self.is_training and self.dist_num_replicas and 0 < sample_count < target_sample_count: diff --git a/train.py b/train.py index a4010e1f..94c417b4 100755 --- a/train.py +++ b/train.py @@ -283,7 +283,7 @@ def _parse_args(): def main(): - setup_default_logging(log_path='./train.log') + setup_default_logging() args, args_text = _parse_args() args.prefetcher = not args.no_prefetcher From 22748f1a2dcdccdee927e4c86cf089a9529e432d Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Fri, 22 Jan 2021 16:54:33 -0800 Subject: [PATCH 15/20] Convert samples/targets in ParserImageInTar to numpy arrays, slightly less mem usage for massive datasets. Add a few more se/eca model defs to resnet.py --- timm/data/parsers/parser_image_in_tar.py | 17 ++++---- timm/models/resnet.py | 52 ++++++++++++++++++++++++ 2 files changed, 62 insertions(+), 7 deletions(-) diff --git a/timm/data/parsers/parser_image_in_tar.py b/timm/data/parsers/parser_image_in_tar.py index fd561bcb..c6ada962 100644 --- a/timm/data/parsers/parser_image_in_tar.py +++ b/timm/data/parsers/parser_image_in_tar.py @@ -155,9 +155,11 @@ def extract_tarinfos(root, class_name_to_idx=None, cache_tarinfo=None, extension samples_and_targets = [(s, class_name_to_idx[l]) for s, l in zip(samples, labels) if l in class_name_to_idx] if sort: samples_and_targets = sorted(samples_and_targets, key=lambda k: natural_key(k[0][0].path)) - - _logger.info(f'Finished processing {len(samples_and_targets)} samples across {len(tarfiles)} tar files.') - return samples_and_targets, class_name_to_idx, tarfiles + samples, targets = zip(*samples_and_targets) + samples = np.array(samples) + targets = np.array(targets) + _logger.info(f'Finished processing {len(samples)} samples across {len(tarfiles)} tar files.') + return samples, targets, class_name_to_idx, tarfiles class ParserImageInTar(Parser): @@ -171,7 +173,7 @@ class ParserImageInTar(Parser): if class_map: class_name_to_idx = load_class_map(class_map, root) self.root = root - self.samples_and_targets, self.class_name_to_idx, tarfiles = extract_tarinfos( + self.samples, self.targets, self.class_name_to_idx, tarfiles = extract_tarinfos( self.root, class_name_to_idx=class_name_to_idx, cache_tarinfo=cache_tarinfo, @@ -186,10 +188,11 @@ class ParserImageInTar(Parser): self.cache_tarfiles = cache_tarfiles def __len__(self): - return len(self.samples_and_targets) + return len(self.samples) def __getitem__(self, index): - sample, target = self.samples_and_targets[index] + sample = self.samples[index] + target = self.targets[index] sample_ti, parent_fn, child_ti = sample parent_abs = os.path.join(self.root, parent_fn) if parent_fn else self.root @@ -213,7 +216,7 @@ class ParserImageInTar(Parser): return tf.extractfile(sample_ti), target def _filename(self, index, basename=False, absolute=False): - filename = self.samples_and_targets[index][0][0].name + filename = self.samples[index][0].name if basename: filename = os.path.basename(filename) return filename diff --git a/timm/models/resnet.py b/timm/models/resnet.py index be0652bf..0ad35902 100644 --- a/timm/models/resnet.py +++ b/timm/models/resnet.py @@ -162,6 +162,12 @@ default_cfgs = { 'seresnet152d_320': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/seresnet152d_ra2-04464dd2.pth', interpolation='bicubic', first_conv='conv1.0', input_size=(3, 320, 320), crop_pct=1.0, pool_size=(10, 10)), + 'seresnet200d': _cfg( + url='', + interpolation='bicubic', first_conv='conv1.0', input_size=(3, 256, 256), crop_pct=0.94, pool_size=(8, 8)), + 'seresnet269d': _cfg( + url='', + interpolation='bicubic', first_conv='conv1.0', input_size=(3, 256, 256), crop_pct=0.94, pool_size=(8, 8)), # Squeeze-Excitation ResNeXts, to eventually replace the models in senet.py @@ -216,6 +222,12 @@ default_cfgs = { url='https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45610/outputs/ECAResNet101D_P_75a3370e.pth', interpolation='bicubic', first_conv='conv1.0'), + 'ecaresnet200d': _cfg( + url='', + interpolation='bicubic', first_conv='conv1.0', input_size=(3, 256, 256), crop_pct=0.94, pool_size=(8, 8)), + 'ecaresnet269d': _cfg( + url='', + interpolation='bicubic', first_conv='conv1.0', input_size=(3, 256, 256), crop_pct=0.94, pool_size=(8, 8)), # Efficient Channel Attention ResNeXts 'ecaresnext26tn_32x4d': _cfg( @@ -1123,6 +1135,26 @@ def ecaresnet101d_pruned(pretrained=False, **kwargs): return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **model_args) +@register_model +def ecaresnet200d(pretrained=False, **kwargs): + """Constructs a ResNet-200-D model with ECA. + """ + model_args = dict( + block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, + block_args=dict(attn_layer='eca'), **kwargs) + return _create_resnet('ecaresnet200d', pretrained, **model_args) + + +@register_model +def ecaresnet269d(pretrained=False, **kwargs): + """Constructs a ResNet-269-D model with ECA. + """ + model_args = dict( + block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True, + block_args=dict(attn_layer='eca'), **kwargs) + return _create_resnet('ecaresnet269d', pretrained, **model_args) + + @register_model def ecaresnext26tn_32x4d(pretrained=False, **kwargs): """Constructs an ECA-ResNeXt-26-TN model. @@ -1198,6 +1230,26 @@ def seresnet152d(pretrained=False, **kwargs): return _create_resnet('seresnet152d', pretrained, **model_args) +@register_model +def seresnet200d(pretrained=False, **kwargs): + """Constructs a ResNet-200-D model with SE attn. + """ + model_args = dict( + block=Bottleneck, layers=[3, 24, 36, 3], stem_width=32, stem_type='deep', avg_down=True, + block_args=dict(attn_layer='se'), **kwargs) + return _create_resnet('seresnet200d', pretrained, **model_args) + + +@register_model +def seresnet269d(pretrained=False, **kwargs): + """Constructs a ResNet-269-D model with SE attn. + """ + model_args = dict( + block=Bottleneck, layers=[3, 30, 48, 8], stem_width=32, stem_type='deep', avg_down=True, + block_args=dict(attn_layer='se'), **kwargs) + return _create_resnet('seresnet269d', pretrained, **model_args) + + @register_model def seresnet152d_320(pretrained=False, **kwargs): model_args = dict( From c16e9650371d167dcb38669aa1280caba2c69dcd Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Sun, 24 Jan 2021 23:18:35 -0800 Subject: [PATCH 16/20] Add some ViT comments and fix a few minor issues. --- timm/models/vision_transformer.py | 145 ++++++++++++++++++++++-------- 1 file changed, 110 insertions(+), 35 deletions(-) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index a832cce3..90122090 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -100,10 +100,10 @@ default_cfgs = { # hybrid models (weights ported from official Google JAX impl) 'vit_base_resnet50_224_in21k': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_224_in21k-6f7c7740.pth', - num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9), + num_classes=21843, mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=0.9, first_conv='patch_embed.backbone.stem.conv'), 'vit_base_resnet50_384': _cfg( url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_vit_base_resnet50_384-9fd3c705.pth', - input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0), + input_size=(3, 384, 384), mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5), crop_pct=1.0, first_conv='patch_embed.backbone.stem.conv'), # hybrid models (my experiments) 'vit_small_resnet26d_224': _cfg(), @@ -256,11 +256,33 @@ class HybridEmbed(nn.Module): class VisionTransformer(nn.Module): - """ Vision Transformer with support for patch or hybrid CNN input stage + """ Vision Transformer + + A PyTorch impl of : `An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale` - + https://arxiv.org/abs/2010.11929 """ def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4., qkv_bias=True, qk_scale=None, representation_size=None, drop_rate=0., attn_drop_rate=0., drop_path_rate=0., hybrid_backbone=None, norm_layer=None): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + num_classes (int): number of classes for classification head + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + qk_scale (float): override default qk scale of head_dim ** -0.5 if set + representation_size (Optional[int]): enable and set representation layer (pre-logits) to this value if set + drop_rate (float): dropout rate + attn_drop_rate (float): attention dropout rate + drop_path_rate (float): stochastic depth rate + hybrid_backbone (nn.Module): CNN backbone to use in-place of PatchEmbed module + norm_layer: (nn.Module): normalization layer + """ super().__init__() self.num_classes = num_classes self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models @@ -346,8 +368,7 @@ class VisionTransformer(nn.Module): def resize_pos_embed(posemb, posemb_new): - # Rescale the grid of position embeddings when loading from state_dict - # Adapted from + # Rescale the grid of position embeddings when loading from state_dict. Adapted from # https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224 _logger.info('Resized position embedding: %s to %s', posemb.shape, posemb_new.shape) ntok_new = posemb_new.shape[1] @@ -363,22 +384,21 @@ def resize_pos_embed(posemb, posemb_new): posemb_grid = F.interpolate(posemb_grid, size=(gs_new, gs_new), mode='bilinear') posemb_grid = posemb_grid.permute(0, 2, 3, 1).reshape(1, gs_new * gs_new, -1) posemb = torch.cat([posemb_tok, posemb_grid], dim=1) - state_dict['pos_embed'] = posemb - return state_dict + return posemb def checkpoint_filter_fn(state_dict, model): """ convert patch embedding weight from manual patchify + linear proj to conv""" out_dict = {} if 'model' in state_dict: - # for deit models + # For deit models state_dict = state_dict['model'] for k, v in state_dict.items(): if 'patch_embed.proj.weight' in k and len(v.shape) < 4: - # for old models that I trained prior to conv based patchification + # For old models that I trained prior to conv based patchification v = v.reshape(model.patch_embed.proj.weight.shape) elif k == 'pos_embed' and v.shape != model.pos_embed.shape: - # to resize pos embedding when using model at different size from pretrained weights + # To resize pos embedding when using model at different size from pretrained weights v = resize_pos_embed(v, model.pos_embed) out_dict[k] = v return out_dict @@ -393,8 +413,9 @@ def _create_vision_transformer(variant, pretrained=False, **kwargs): img_size = kwargs.pop('img_size', default_img_size) repr_size = kwargs.pop('representation_size', None) if repr_size is not None and num_classes != default_num_classes: - # remove representation layer if fine-tuning - _logger.info("Removing representation layer for fine-tuning.") + # Remove representation layer if fine-tuning. This may not always be the desired action, + # but I feel better than doing nothing by default for fine-tuning. Perhaps a better interface? + _logger.warning("Removing representation layer for fine-tuning.") repr_size = None model = VisionTransformer(img_size=img_size, num_classes=num_classes, representation_size=repr_size, **kwargs) @@ -409,6 +430,7 @@ def _create_vision_transformer(variant, pretrained=False, **kwargs): @register_model def vit_small_patch16_224(pretrained=False, **kwargs): + """ My custom 'small' ViT model. Depth=8, heads=8= mlp_ratio=3.""" model_kwargs = dict( patch_size=16, embed_dim=768, depth=8, num_heads=8, mlp_ratio=3., qkv_bias=False, norm_layer=nn.LayerNorm, **kwargs) @@ -421,6 +443,9 @@ def vit_small_patch16_224(pretrained=False, **kwargs): @register_model def vit_base_patch16_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -428,6 +453,8 @@ def vit_base_patch16_224(pretrained=False, **kwargs): @register_model def vit_base_patch32_224(pretrained=False, **kwargs): + """ ViT-Base (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. + """ model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **model_kwargs) return model @@ -435,6 +462,9 @@ def vit_base_patch32_224(pretrained=False, **kwargs): @register_model def vit_base_patch16_384(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **model_kwargs) return model @@ -442,6 +472,9 @@ def vit_base_patch16_384(pretrained=False, **kwargs): @register_model def vit_base_patch32_384(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict( patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) @@ -451,6 +484,9 @@ def vit_base_patch32_384(pretrained=False, **kwargs): @register_model def vit_large_patch16_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -458,6 +494,8 @@ def vit_large_patch16_224(pretrained=False, **kwargs): @register_model def vit_large_patch32_224(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. + """ model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **model_kwargs) return model @@ -465,21 +503,29 @@ def vit_large_patch32_224(pretrained=False, **kwargs): @register_model def vit_large_patch16_384(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **model_kwargs) return model @register_model -def vit_base_patch16_224_in21k(pretrained=False, **kwargs): - model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) - model = _create_vision_transformer('vit_base_patch16_224_in21k', pretrained=pretrained, **model_kwargs) +def vit_large_patch32_384(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **model_kwargs) return model @register_model -def vit_base_patch16_384_in21k(pretrained=False, **kwargs): +def vit_base_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict( patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) model = _create_vision_transformer('vit_base_patch16_224_in21k', pretrained=pretrained, **model_kwargs) @@ -488,6 +534,9 @@ def vit_base_patch16_384_in21k(pretrained=False, **kwargs): @register_model def vit_base_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict( patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) model = _create_vision_transformer('vit_base_patch32_224_in21k', pretrained=pretrained, **model_kwargs) @@ -496,22 +545,20 @@ def vit_base_patch32_224_in21k(pretrained=False, **kwargs): @register_model def vit_large_patch16_224_in21k(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict( patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) model = _create_vision_transformer('vit_large_patch16_224_in21k', pretrained=pretrained, **model_kwargs) return model -# @register_model -# def vit_large_patch16_384_in21k(pretrained=False, **kwargs): -# model_kwargs = dict( -# patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) -# model = _create_vision_transformer('vit_large_patch16_224_in21k', pretrained=pretrained, **model_kwargs) -# return model - - @register_model def vit_large_patch32_224_in21k(pretrained=False, **kwargs): + """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + """ model_kwargs = dict( patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) model = _create_vision_transformer('vit_large_patch32_224_in21k', pretrained=pretrained, **model_kwargs) @@ -520,6 +567,10 @@ def vit_large_patch32_224_in21k(pretrained=False, **kwargs): @register_model def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): + """ ViT-Huge model (ViT-H/14) from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + NOTE: converted weights not currently available, too large for github release hosting. + """ model_kwargs = dict( patch_size=14, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, representation_size=1280, **kwargs) model = _create_vision_transformer('vit_huge_patch14_224_in21k', pretrained=pretrained, **model_kwargs) @@ -528,9 +579,13 @@ def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): @register_model def vit_base_resnet50_224_in21k(pretrained=False, **kwargs): + """ R50+ViT-B/16 hybrid model from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. + """ # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head backbone = ResNetV2( - layers=(3, 4, 9), preact=False, stem_type='same', conv_layer=StdConv2dSame, num_classes=0, global_pool='') + layers=(3, 4, 9), num_classes=0, global_pool='', in_chans=kwargs.get('in_chans', 3), + preact=False, stem_type='same', conv_layer=StdConv2dSame) model_kwargs = dict( embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, representation_size=768, **kwargs) @@ -540,9 +595,13 @@ def vit_base_resnet50_224_in21k(pretrained=False, **kwargs): @register_model def vit_base_resnet50_384(pretrained=False, **kwargs): + """ R50+ViT-B/16 hybrid from original paper (https://arxiv.org/abs/2010.11929). + ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. + """ # create a ResNetV2 w/o pre-activation, that uses StdConv and GroupNorm and has 3 stages, no head backbone = ResNetV2( - layers=(3, 4, 9), preact=False, stem_type='same', conv_layer=StdConv2dSame, num_classes=0, global_pool='') + layers=(3, 4, 9), num_classes=0, global_pool='', in_chans=kwargs.get('in_chans', 3), + preact=False, stem_type='same', conv_layer=StdConv2dSame) model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_base_resnet50_384', pretrained=pretrained, **model_kwargs) return model @@ -550,8 +609,9 @@ def vit_base_resnet50_384(pretrained=False, **kwargs): @register_model def vit_small_resnet26d_224(pretrained=False, **kwargs): - pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing - backbone = resnet26d(pretrained=pretrained_backbone, features_only=True, out_indices=[4]) + """ Custom ViT small hybrid w/ ResNet26D stride 32. No pretrained weights. + """ + backbone = resnet26d(pretrained=pretrained, features_only=True, out_indices=[4]) model_kwargs = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_small_resnet26d_224', pretrained=pretrained, **model_kwargs) return model @@ -559,8 +619,9 @@ def vit_small_resnet26d_224(pretrained=False, **kwargs): @register_model def vit_small_resnet50d_s3_224(pretrained=False, **kwargs): - pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing - backbone = resnet50d(pretrained=pretrained_backbone, features_only=True, out_indices=[3]) + """ Custom ViT small hybrid w/ ResNet50D 3-stages, stride 16. No pretrained weights. + """ + backbone = resnet50d(pretrained=pretrained, features_only=True, out_indices=[3]) model_kwargs = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_small_resnet50d_s3_224', pretrained=pretrained, **model_kwargs) return model @@ -568,8 +629,9 @@ def vit_small_resnet50d_s3_224(pretrained=False, **kwargs): @register_model def vit_base_resnet26d_224(pretrained=False, **kwargs): - pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing - backbone = resnet26d(pretrained=pretrained_backbone, features_only=True, out_indices=[4]) + """ Custom ViT base hybrid w/ ResNet26D stride 32. No pretrained weights. + """ + backbone = resnet26d(pretrained=pretrained, features_only=True, out_indices=[4]) model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_base_resnet26d_224', pretrained=pretrained, **model_kwargs) return model @@ -577,8 +639,9 @@ def vit_base_resnet26d_224(pretrained=False, **kwargs): @register_model def vit_base_resnet50d_224(pretrained=False, **kwargs): - pretrained_backbone = kwargs.get('pretrained_backbone', True) # default to True for now, for testing - backbone = resnet50d(pretrained=pretrained_backbone, features_only=True, out_indices=[4]) + """ Custom ViT base hybrid w/ ResNet50D stride 32. No pretrained weights. + """ + backbone = resnet50d(pretrained=pretrained, features_only=True, out_indices=[4]) model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_base_resnet50d_224', pretrained=pretrained, **model_kwargs) return model @@ -586,6 +649,9 @@ def vit_base_resnet50d_224(pretrained=False, **kwargs): @register_model def vit_deit_tiny_patch16_224(pretrained=False, **kwargs): + """ DeiT-tiny model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_deit_tiny_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -593,6 +659,9 @@ def vit_deit_tiny_patch16_224(pretrained=False, **kwargs): @register_model def vit_deit_small_patch16_224(pretrained=False, **kwargs): + """ DeiT-small model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_deit_small_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -600,6 +669,9 @@ def vit_deit_small_patch16_224(pretrained=False, **kwargs): @register_model def vit_deit_base_patch16_224(pretrained=False, **kwargs): + """ DeiT base model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_deit_base_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -607,6 +679,9 @@ def vit_deit_base_patch16_224(pretrained=False, **kwargs): @register_model def vit_deit_base_patch16_384(pretrained=False, **kwargs): + """ DeiT base model @ 384x384 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) model = _create_vision_transformer('vit_deit_base_patch16_384', pretrained=pretrained, **model_kwargs) return model From bb50ac470867eb681f688350e046d9abf5ad3bb8 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 25 Jan 2021 11:05:23 -0800 Subject: [PATCH 17/20] Add DeiT distilled weights and distilled model def. Remove some redudant ViT model args. --- timm/models/vision_transformer.py | 160 ++++++++++++++++++++++++------ 1 file changed, 130 insertions(+), 30 deletions(-) diff --git a/timm/models/vision_transformer.py b/timm/models/vision_transformer.py index 90122090..ff2510f1 100644 --- a/timm/models/vision_transformer.py +++ b/timm/models/vision_transformer.py @@ -121,6 +121,15 @@ default_cfgs = { 'vit_deit_base_patch16_384': _cfg( url='https://dl.fbaipublicfiles.com/deit/deit_base_patch16_384-8de9b5d1.pth', input_size=(3, 384, 384), crop_pct=1.0), + 'vit_deit_tiny_distilled_patch16_224': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_tiny_distilled_patch16_224-b40b3cf7.pth'), + 'vit_deit_small_distilled_patch16_224': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_small_distilled_patch16_224-649709d9.pth'), + 'vit_deit_base_distilled_patch16_224': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_base_distilled_patch16_224-df68dfff.pth', ), + 'vit_deit_base_distilled_patch16_384': _cfg( + url='https://dl.fbaipublicfiles.com/deit/deit_base_distilled_patch16_384-d0272ac0.pth', + input_size=(3, 384, 384), crop_pct=1.0), } @@ -367,6 +376,53 @@ class VisionTransformer(nn.Module): return x +class DistilledVisionTransformer(VisionTransformer): + """ Vision Transformer with distillation token. + + Paper: `Training data-efficient image transformers & distillation through attention` - + https://arxiv.org/abs/2012.12877 + + This impl of distilled ViT is taken from https://github.com/facebookresearch/deit + """ + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.dist_token = nn.Parameter(torch.zeros(1, 1, self.embed_dim)) + num_patches = self.patch_embed.num_patches + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 2, self.embed_dim)) + self.head_dist = nn.Linear(self.embed_dim, self.num_classes) if self.num_classes > 0 else nn.Identity() + + trunc_normal_(self.dist_token, std=.02) + trunc_normal_(self.pos_embed, std=.02) + self.head_dist.apply(self._init_weights) + + def forward_features(self, x): + B = x.shape[0] + x = self.patch_embed(x) + + cls_tokens = self.cls_token.expand(B, -1, -1) # stole cls_tokens impl from Phil Wang, thanks + dist_token = self.dist_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, dist_token, x), dim=1) + + x = x + self.pos_embed + x = self.pos_drop(x) + + for blk in self.blocks: + x = blk(x) + + x = self.norm(x) + return x[:, 0], x[:, 1] + + def forward(self, x): + x, x_dist = self.forward_features(x) + x = self.head(x) + x_dist = self.head_dist(x_dist) + if self.training: + return x, x_dist + else: + # during inference, return the average of both classifier predictions + return (x + x_dist) / 2 + + def resize_pos_embed(posemb, posemb_new): # Rescale the grid of position embeddings when loading from state_dict. Adapted from # https://github.com/google-research/vision_transformer/blob/00883dd691c63a6830751563748663526e811cee/vit_jax/checkpoint.py#L224 @@ -396,7 +452,8 @@ def checkpoint_filter_fn(state_dict, model): for k, v in state_dict.items(): if 'patch_embed.proj.weight' in k and len(v.shape) < 4: # For old models that I trained prior to conv based patchification - v = v.reshape(model.patch_embed.proj.weight.shape) + O, I, H, W = model.patch_embed.proj.weight.shape + v = v.reshape(O, -1, H, W) elif k == 'pos_embed' and v.shape != model.pos_embed.shape: # To resize pos embedding when using model at different size from pretrained weights v = resize_pos_embed(v, model.pos_embed) @@ -404,7 +461,7 @@ def checkpoint_filter_fn(state_dict, model): return out_dict -def _create_vision_transformer(variant, pretrained=False, **kwargs): +def _create_vision_transformer(variant, pretrained=False, distilled=False, **kwargs): default_cfg = default_cfgs[variant] default_num_classes = default_cfg['num_classes'] default_img_size = default_cfg['input_size'][-1] @@ -418,7 +475,8 @@ def _create_vision_transformer(variant, pretrained=False, **kwargs): _logger.warning("Removing representation layer for fine-tuning.") repr_size = None - model = VisionTransformer(img_size=img_size, num_classes=num_classes, representation_size=repr_size, **kwargs) + model_cls = DistilledVisionTransformer if distilled else VisionTransformer + model = model_cls(img_size=img_size, num_classes=num_classes, representation_size=repr_size, **kwargs) model.default_cfg = default_cfg if pretrained: @@ -446,7 +504,7 @@ def vit_base_patch16_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) model = _create_vision_transformer('vit_base_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -455,7 +513,7 @@ def vit_base_patch16_224(pretrained=False, **kwargs): def vit_base_patch32_224(pretrained=False, **kwargs): """ ViT-Base (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. """ - model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) model = _create_vision_transformer('vit_base_patch32_224', pretrained=pretrained, **model_kwargs) return model @@ -465,7 +523,7 @@ def vit_base_patch16_384(pretrained=False, **kwargs): """ ViT-Base model (ViT-B/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) model = _create_vision_transformer('vit_base_patch16_384', pretrained=pretrained, **model_kwargs) return model @@ -475,9 +533,7 @@ def vit_base_patch32_384(pretrained=False, **kwargs): """ ViT-Base model (ViT-B/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, - norm_layer=partial(nn.LayerNorm, eps=1e-6), **kwargs) + model_kwargs = dict(patch_size=32, embed_dim=768, depth=12, num_heads=12, **kwargs) model = _create_vision_transformer('vit_base_patch32_384', pretrained=pretrained, **model_kwargs) return model @@ -487,7 +543,7 @@ def vit_large_patch16_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 224x224, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) model = _create_vision_transformer('vit_large_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -496,7 +552,7 @@ def vit_large_patch16_224(pretrained=False, **kwargs): def vit_large_patch32_224(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). No pretrained weights. """ - model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) model = _create_vision_transformer('vit_large_patch32_224', pretrained=pretrained, **model_kwargs) return model @@ -506,7 +562,7 @@ def vit_large_patch16_384(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/16) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs) model = _create_vision_transformer('vit_large_patch16_384', pretrained=pretrained, **model_kwargs) return model @@ -516,7 +572,7 @@ def vit_large_patch32_384(pretrained=False, **kwargs): """ ViT-Large model (ViT-L/32) from original paper (https://arxiv.org/abs/2010.11929). ImageNet-1k weights fine-tuned from in21k @ 384x384, source https://github.com/google-research/vision_transformer. """ - model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=32, embed_dim=1024, depth=24, num_heads=16, **kwargs) model = _create_vision_transformer('vit_large_patch32_384', pretrained=pretrained, **model_kwargs) return model @@ -527,7 +583,7 @@ def vit_base_patch16_224_in21k(pretrained=False, **kwargs): ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. """ model_kwargs = dict( - patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) + patch_size=16, embed_dim=768, depth=12, num_heads=12, representation_size=768, **kwargs) model = _create_vision_transformer('vit_base_patch16_224_in21k', pretrained=pretrained, **model_kwargs) return model @@ -538,7 +594,7 @@ def vit_base_patch32_224_in21k(pretrained=False, **kwargs): ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. """ model_kwargs = dict( - patch_size=32, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, representation_size=768, **kwargs) + patch_size=32, embed_dim=768, depth=12, num_heads=12, representation_size=768, **kwargs) model = _create_vision_transformer('vit_base_patch32_224_in21k', pretrained=pretrained, **model_kwargs) return model @@ -549,7 +605,7 @@ def vit_large_patch16_224_in21k(pretrained=False, **kwargs): ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. """ model_kwargs = dict( - patch_size=16, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) + patch_size=16, embed_dim=1024, depth=24, num_heads=16, representation_size=1024, **kwargs) model = _create_vision_transformer('vit_large_patch16_224_in21k', pretrained=pretrained, **model_kwargs) return model @@ -560,7 +616,7 @@ def vit_large_patch32_224_in21k(pretrained=False, **kwargs): ImageNet-21k weights @ 224x224, source https://github.com/google-research/vision_transformer. """ model_kwargs = dict( - patch_size=32, embed_dim=1024, depth=24, num_heads=16, mlp_ratio=4, representation_size=1024, **kwargs) + patch_size=32, embed_dim=1024, depth=24, num_heads=16, representation_size=1024, **kwargs) model = _create_vision_transformer('vit_large_patch32_224_in21k', pretrained=pretrained, **model_kwargs) return model @@ -572,7 +628,7 @@ def vit_huge_patch14_224_in21k(pretrained=False, **kwargs): NOTE: converted weights not currently available, too large for github release hosting. """ model_kwargs = dict( - patch_size=14, embed_dim=1280, depth=32, num_heads=16, mlp_ratio=4, representation_size=1280, **kwargs) + patch_size=14, embed_dim=1280, depth=32, num_heads=16, representation_size=1280, **kwargs) model = _create_vision_transformer('vit_huge_patch14_224_in21k', pretrained=pretrained, **model_kwargs) return model @@ -587,7 +643,7 @@ def vit_base_resnet50_224_in21k(pretrained=False, **kwargs): layers=(3, 4, 9), num_classes=0, global_pool='', in_chans=kwargs.get('in_chans', 3), preact=False, stem_type='same', conv_layer=StdConv2dSame) model_kwargs = dict( - embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, + embed_dim=768, depth=12, num_heads=12, hybrid_backbone=backbone, representation_size=768, **kwargs) model = _create_vision_transformer('vit_base_resnet50_224_in21k', pretrained=pretrained, **model_kwargs) return model @@ -602,7 +658,7 @@ def vit_base_resnet50_384(pretrained=False, **kwargs): backbone = ResNetV2( layers=(3, 4, 9), num_classes=0, global_pool='', in_chans=kwargs.get('in_chans', 3), preact=False, stem_type='same', conv_layer=StdConv2dSame) - model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) + model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_base_resnet50_384', pretrained=pretrained, **model_kwargs) return model @@ -611,7 +667,7 @@ def vit_base_resnet50_384(pretrained=False, **kwargs): def vit_small_resnet26d_224(pretrained=False, **kwargs): """ Custom ViT small hybrid w/ ResNet26D stride 32. No pretrained weights. """ - backbone = resnet26d(pretrained=pretrained, features_only=True, out_indices=[4]) + backbone = resnet26d(pretrained=pretrained, in_chans=kwargs.get('in_chans', 3), features_only=True, out_indices=[4]) model_kwargs = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_small_resnet26d_224', pretrained=pretrained, **model_kwargs) return model @@ -621,7 +677,7 @@ def vit_small_resnet26d_224(pretrained=False, **kwargs): def vit_small_resnet50d_s3_224(pretrained=False, **kwargs): """ Custom ViT small hybrid w/ ResNet50D 3-stages, stride 16. No pretrained weights. """ - backbone = resnet50d(pretrained=pretrained, features_only=True, out_indices=[3]) + backbone = resnet50d(pretrained=pretrained, in_chans=kwargs.get('in_chans', 3), features_only=True, out_indices=[3]) model_kwargs = dict(embed_dim=768, depth=8, num_heads=8, mlp_ratio=3, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_small_resnet50d_s3_224', pretrained=pretrained, **model_kwargs) return model @@ -631,8 +687,8 @@ def vit_small_resnet50d_s3_224(pretrained=False, **kwargs): def vit_base_resnet26d_224(pretrained=False, **kwargs): """ Custom ViT base hybrid w/ ResNet26D stride 32. No pretrained weights. """ - backbone = resnet26d(pretrained=pretrained, features_only=True, out_indices=[4]) - model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) + backbone = resnet26d(pretrained=pretrained, in_chans=kwargs.get('in_chans', 3), features_only=True, out_indices=[4]) + model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_base_resnet26d_224', pretrained=pretrained, **model_kwargs) return model @@ -641,8 +697,8 @@ def vit_base_resnet26d_224(pretrained=False, **kwargs): def vit_base_resnet50d_224(pretrained=False, **kwargs): """ Custom ViT base hybrid w/ ResNet50D stride 32. No pretrained weights. """ - backbone = resnet50d(pretrained=pretrained, features_only=True, out_indices=[4]) - model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, hybrid_backbone=backbone, **kwargs) + backbone = resnet50d(pretrained=pretrained, in_chans=kwargs.get('in_chans', 3), features_only=True, out_indices=[4]) + model_kwargs = dict(embed_dim=768, depth=12, num_heads=12, hybrid_backbone=backbone, **kwargs) model = _create_vision_transformer('vit_base_resnet50d_224', pretrained=pretrained, **model_kwargs) return model @@ -652,7 +708,7 @@ def vit_deit_tiny_patch16_224(pretrained=False, **kwargs): """ DeiT-tiny model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). ImageNet-1k weights from https://github.com/facebookresearch/deit. """ - model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) model = _create_vision_transformer('vit_deit_tiny_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -662,7 +718,7 @@ def vit_deit_small_patch16_224(pretrained=False, **kwargs): """ DeiT-small model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). ImageNet-1k weights from https://github.com/facebookresearch/deit. """ - model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) model = _create_vision_transformer('vit_deit_small_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -672,7 +728,7 @@ def vit_deit_base_patch16_224(pretrained=False, **kwargs): """ DeiT base model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). ImageNet-1k weights from https://github.com/facebookresearch/deit. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) model = _create_vision_transformer('vit_deit_base_patch16_224', pretrained=pretrained, **model_kwargs) return model @@ -682,6 +738,50 @@ def vit_deit_base_patch16_384(pretrained=False, **kwargs): """ DeiT base model @ 384x384 from paper (https://arxiv.org/abs/2012.12877). ImageNet-1k weights from https://github.com/facebookresearch/deit. """ - model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, **kwargs) + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) model = _create_vision_transformer('vit_deit_base_patch16_384', pretrained=pretrained, **model_kwargs) return model + + +@register_model +def vit_deit_tiny_distilled_patch16_224(pretrained=False, **kwargs): + """ DeiT-tiny distilled model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ + model_kwargs = dict(patch_size=16, embed_dim=192, depth=12, num_heads=3, **kwargs) + model = _create_vision_transformer( + 'vit_deit_tiny_distilled_patch16_224', pretrained=pretrained, distilled=True, **model_kwargs) + return model + + +@register_model +def vit_deit_small_distilled_patch16_224(pretrained=False, **kwargs): + """ DeiT-small distilled model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ + model_kwargs = dict(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs) + model = _create_vision_transformer( + 'vit_deit_small_distilled_patch16_224', pretrained=pretrained, distilled=True, **model_kwargs) + return model + + +@register_model +def vit_deit_base_distilled_patch16_224(pretrained=False, **kwargs): + """ DeiT-base distilled model @ 224x224 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer( + 'vit_deit_base_distilled_patch16_224', pretrained=pretrained, distilled=True, **model_kwargs) + return model + + +@register_model +def vit_deit_base_distilled_patch16_384(pretrained=False, **kwargs): + """ DeiT-base distilled model @ 384x384 from paper (https://arxiv.org/abs/2012.12877). + ImageNet-1k weights from https://github.com/facebookresearch/deit. + """ + model_kwargs = dict(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs) + model = _create_vision_transformer( + 'vit_deit_base_distilled_patch16_384', pretrained=pretrained, distilled=True, **model_kwargs) + return model \ No newline at end of file From 587780e56b03b38769da3b1260abf3f3a8496ce9 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 25 Jan 2021 11:22:11 -0800 Subject: [PATCH 18/20] Update README.md and bump version to 0.4.0 --- README.md | 12 ++++++++++++ timm/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index a6d21164..da938f8a 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,17 @@ ## What's New +### Jan 25, 2021 +* Add ResNetV2 Big Transfer (BiT) models w/ ImageNet-1k and 21k weights from https://github.com/google-research/big_transfer +* Add official R50+ViT-B/16 hybrid models + weights from https://github.com/google-research/vision_transformer +* Add model defs and weights for DeiT Vision Transformer models from https://github.com/facebookresearch/deit +* Refactor dataset classes into ImageDataset/IterableImageDataset + dataset specific parser classes +* Add Tensorflow-Datasets (TFDS) wrapper to allow use of TFDS image classification sets with train script + * Ex: `train.py /data/tfds --dataset tfds/oxford_iiit_pet --val-split test --model resnet50 -b 256 --amp --num-classes 37 --opt adamw --lr 3e-4 --weight-decay .001 --pretrained -j 2` +* Add improved .tar dataset parser that reads images from .tar, folder of .tar files, or .tar within .tar + * Run validation on full ImageNet-21k directly from tar w/ BiT model: `validate.py /data/fall11_whole.tar --model resnetv2_50x1_bitm_in21k --amp` +* Models in this update should be stable w/ possible exception of ViT/BiT, possibility of some regressions with train/val scripts and dataset handling + ### Jan 3, 2021 * Add SE-ResNet-152D weights * 256x256 val, 0.94 crop top-1 - 83.75 @@ -132,6 +143,7 @@ A full version of the list below with source links can be found in the [document * Big Transfer ResNetV2 (BiT) - https://arxiv.org/abs/1912.11370 * CspNet (Cross-Stage Partial Networks) - https://arxiv.org/abs/1911.11929 +* DeiT (Vision Transformer) - https://arxiv.org/abs/2012.12877 * DenseNet - https://arxiv.org/abs/1608.06993 * DLA - https://arxiv.org/abs/1707.06484 * DPN (Dual-Path Network) - https://arxiv.org/abs/1707.01629 diff --git a/timm/version.py b/timm/version.py index 80eb7f98..abeeedbf 100644 --- a/timm/version.py +++ b/timm/version.py @@ -1 +1 @@ -__version__ = '0.3.3' +__version__ = '0.4.0' From 38d8f67570828e341280b776e8ea232d750a4a8b Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 25 Jan 2021 11:53:34 -0800 Subject: [PATCH 19/20] Fix potential issue with change to num_classes arg in train/validate.py defaulting to None (rely on model def / default_cfg) --- timm/models/helpers.py | 1 + train.py | 3 +++ validate.py | 3 +++ 3 files changed, 7 insertions(+) diff --git a/timm/models/helpers.py b/timm/models/helpers.py index 96f551e3..562a01c5 100644 --- a/timm/models/helpers.py +++ b/timm/models/helpers.py @@ -198,6 +198,7 @@ def load_pretrained(model, cfg=None, num_classes=1000, in_chans=3, filter_fn=Non classifier_name = cfg['classifier'] if num_classes == 1000 and cfg['num_classes'] == 1001: + # FIXME this special case is problematic as number of pretrained weight sources increases # special case for imagenet trained models with extra background class in pretrained weights classifier_weight = state_dict[classifier_name + '.weight'] state_dict[classifier_name + '.weight'] = classifier_weight[1:] diff --git a/train.py b/train.py index 94c417b4..aa8e6553 100755 --- a/train.py +++ b/train.py @@ -337,6 +337,9 @@ def main(): bn_eps=args.bn_eps, scriptable=args.torchscript, checkpoint_path=args.initial_checkpoint) + if args.num_classes is None: + assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' + args.num_classes = model.num_classes # FIXME handle model default vs config num_classes more elegantly if args.local_rank == 0: _logger.info('Model %s created, param count: %d' % diff --git a/validate.py b/validate.py index be977cc2..5b5f98cf 100755 --- a/validate.py +++ b/validate.py @@ -137,6 +137,9 @@ def validate(args): in_chans=3, global_pool=args.gp, scriptable=args.torchscript) + if args.num_classes is None: + assert hasattr(model, 'num_classes'), 'Model must have `num_classes` attr if not set on cmd line/config.' + args.num_classes = model.num_classes if args.checkpoint: load_checkpoint(model, args.checkpoint, args.use_ema) From 745bc5f723b642509ced3386a7b5119d37756cd1 Mon Sep 17 00:00:00 2001 From: Ross Wightman Date: Mon, 25 Jan 2021 12:03:13 -0800 Subject: [PATCH 20/20] Mention 21k ViT weights in README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index da938f8a..70c9efcc 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,8 @@ ### Jan 25, 2021 * Add ResNetV2 Big Transfer (BiT) models w/ ImageNet-1k and 21k weights from https://github.com/google-research/big_transfer * Add official R50+ViT-B/16 hybrid models + weights from https://github.com/google-research/vision_transformer +* ImageNet-21k ViT weights are added w/ model defs and representation layer (pre logits) support + * NOTE: ImageNet-21k classifier heads were zero'd in original weights, they are only useful for transfer learning * Add model defs and weights for DeiT Vision Transformer models from https://github.com/facebookresearch/deit * Refactor dataset classes into ImageDataset/IterableImageDataset + dataset specific parser classes * Add Tensorflow-Datasets (TFDS) wrapper to allow use of TFDS image classification sets with train script