Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions python/paddle/fluid/dataloader/dataloader_iter.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def __init__(self, loader):
self._places = loader.places
self._return_list = loader.return_list
self._batch_sampler = loader.batch_sampler
self._drop_last = loader.drop_last
self._auto_collate_batch = loader.auto_collate_batch
self._num_workers = loader.num_workers
self._use_buffer_reader = loader.use_buffer_reader
Expand Down Expand Up @@ -111,7 +112,7 @@ def __init__(self, loader):

self._dataset_fetcher = _DatasetKind.create_fetcher(
self._dataset_kind, self._dataset, self._auto_collate_batch,
self._collate_fn, True)
self._collate_fn, self._drop_last)

# NOTE: _structrue_infos used to record the data structure of
# batch to restore batch structure after reading Tensor
Expand Down Expand Up @@ -309,8 +310,8 @@ def _init_workers(self):
args=(self._dataset, self._dataset_kind, indices_queue,
self._data_queue, self._workers_done_event,
self._auto_collate_batch, self._collate_fn,
self._worker_init_fn, i, self._num_workers,
self._use_shared_memory))
self._drop_last, self._worker_init_fn, i,
self._num_workers, self._use_shared_memory))
worker.daemon = True
worker.start()
self._workers.append(worker)
Expand Down
7 changes: 4 additions & 3 deletions python/paddle/fluid/dataloader/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def mix(x, y):


def _worker_loop(dataset, dataset_kind, indices_queue, out_queue, done_event,
auto_collate_batch, collate_fn, init_fn, worker_id,
auto_collate_batch, collate_fn, drop_last, init_fn, worker_id,
num_workers, use_shared_memory):
try:
# NOTE: [ mmap files clear ] When the child process exits unexpectedly,
Expand Down Expand Up @@ -282,8 +282,9 @@ def _worker_loop(dataset, dataset_kind, indices_queue, out_queue, done_event,
try:
if init_fn is not None:
init_fn(worker_id)
fetcher = _DatasetKind.create_fetcher(
dataset_kind, dataset, auto_collate_batch, collate_fn, True)
fetcher = _DatasetKind.create_fetcher(dataset_kind, dataset,
auto_collate_batch,
collate_fn, drop_last)
except:
init_exception = _WorkerException(worker_id)

Expand Down
1 change: 1 addition & 0 deletions python/paddle/fluid/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,6 +401,7 @@ def __init__(self,
shuffle=shuffle,
drop_last=drop_last)

self.drop_last = drop_last
self.auto_collate_batch = self.batch_sampler is not None

self.pin_memory = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -397,5 +397,30 @@ def test_main(self):
assert out == outp


class TestDatasetWithDropLast(unittest.TestCase):
def run_main(self, dataset, num_samples, batch_size):
for num_workers in [0, 1]:
for drop_last in [True, False]:
steps = (num_samples + (1 - int(drop_last)) * \
(batch_size - 1)) // batch_size
dataloader = DataLoader(
dataset,
batch_size=batch_size,
drop_last=drop_last,
num_workers=num_workers)
datas = []
for data in dataloader:
datas.append(data)
assert len(datas) == steps

def test_map_dataset(self):
dataset = RandomDataset(10)
self.run_main(dataset, 10, 3)

def test_iterable_dataset(self):
dataset = RandomIterableDataset(10)
self.run_main(dataset, 10, 3)


if __name__ == '__main__':
unittest.main()
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def _collate_fn(sample_list):
indices_queue.put(None)
_worker_loop(loader._dataset, 0, indices_queue,
loader._data_queue, loader._workers_done_event,
True, _collate_fn, _init_fn, 0, 1,
True, _collate_fn, True, _init_fn, 0, 1,
loader._use_shared_memory)
self.assertTrue(False)
except AssertionError:
Expand Down Expand Up @@ -224,7 +224,7 @@ def _collate_fn(sample_list):
loader._workers_done_event.set()
_worker_loop(loader._dataset, 0, indices_queue,
loader._data_queue, loader._workers_done_event,
True, _collate_fn, _init_fn, 0, 1,
True, _collate_fn, True, _init_fn, 0, 1,
loader._use_shared_memory)
self.assertTrue(True)
except AssertionError:
Expand Down