Fix MultiEpochsDataLoader when there's no batching

pull/212/head
Santiago Castro 4 years ago committed by GitHub
parent 6e9d6172c8
commit feaa3abc51
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -230,12 +230,15 @@ class MultiEpochsDataLoader(torch.utils.data.DataLoader):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self._DataLoader__initialized = False self._DataLoader__initialized = False
self.batch_sampler = _RepeatSampler(self.batch_sampler) if self.batch_sampler is None:
self.sampler = RepeatSampler(self.sampler)
else:
self.batch_sampler = RepeatSampler(self.batch_sampler)
self._DataLoader__initialized = True self._DataLoader__initialized = True
self.iterator = super().__iter__() self.iterator = super().__iter__()
def __len__(self): def __len__(self):
return len(self.batch_sampler.sampler) return len(self.sampler) if self.batch_sampler is None else len(self.batch_sampler.sampler)
def __iter__(self): def __iter__(self):
for i in range(len(self)): for i in range(len(self)):

Loading…
Cancel
Save