comment some debug logs for WDS dataset

pull/1479/head
Ross Wightman 2 years ago
parent e9dccc918c
commit d3961536c9

@ -228,7 +228,7 @@ if wds is not None:
seed = pytorch_worker_seed() + epoch seed = pytorch_worker_seed() + epoch
else: else:
seed = self.seed + epoch seed = self.seed + epoch
_logger.info(f'shuffle seed: {self.seed}, {seed}, epoch: {epoch}') # FIXME temporary # _logger.info(f'shuffle seed: {self.seed}, {seed}, epoch: {epoch}') # FIXME temporary
rng = random.Random(seed) rng = random.Random(seed)
return _shuffle(src, self.bufsize, self.initial, rng) return _shuffle(src, self.bufsize, self.initial, rng)
@ -429,11 +429,11 @@ class ReaderWds(Reader):
ds = self.ds ds = self.ds
i = 0 i = 0
_logger.info(f'start {i}, {self.worker_id}') # FIXME temporary debug # _logger.info(f'start {i}, {self.worker_id}') # FIXME temporary debug
for sample in ds: for sample in ds:
yield sample[self.image_key], sample[self.target_key] yield sample[self.image_key], sample[self.target_key]
i += 1 i += 1
_logger.info(f'end {i}, {self.worker_id}') # FIXME temporary debug # _logger.info(f'end {i}, {self.worker_id}') # FIXME temporary debug
def __len__(self): def __len__(self):
num_samples = self._num_samples_per_worker() * self.num_workers num_samples = self._num_samples_per_worker() * self.num_workers

Loading…
Cancel
Save