Skip to content

Commit

Permalink
fix DatasetFactory api to paddle.base (PaddlePaddle#953)
Browse files Browse the repository at this point in the history
  • Loading branch information
danleifeng authored Oct 11, 2023
1 parent 9f8bb6e commit eb869a1
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion tools/feature_importance.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def run_server(self):
fleet.run_server()

def wait_and_prepare_dataset(self):
dataset = paddle.DatasetFactory().create_dataset("InMemoryDataset")
dataset = paddle.base.DatasetFactory().create_dataset("InMemoryDataset")
dataset.set_use_var(self.input_data)
train_data_dir = self.config.get("runner.data_dir", "")
dataset.set_batch_size(int(config.get("runner.batch_size", "1")))
Expand Down
2 changes: 1 addition & 1 deletion tools/static_ps_offline_infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def run_server(self):
fleet.run_server()

def wait_and_prepare_dataset(self):
dataset = paddle.DatasetFactory().create_dataset("InMemoryDataset")
dataset = paddle.base.DatasetFactory().create_dataset("InMemoryDataset")
dataset.set_use_var(self.input_data)
train_data_dir = self.config.get("runner.data_dir", "")
dataset.set_batch_size(self.config.get('runner.batch_size'))
Expand Down
6 changes: 3 additions & 3 deletions tools/static_ps_online_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def run_server(self):

def wait_and_prepare_dataset(self, day, pass_index):
train_data_path = self.config.get("runner.train_data_dir", [])
dataset = paddle.DatasetFactory().create_dataset(self.reader_type)
dataset = paddle.base.DatasetFactory().create_dataset(self.reader_type)
dataset.set_use_var(self.input_data)
dataset.set_batch_size(self.config.get('runner.train_batch_size', 1))
dataset.set_thread(self.config.get('runner.train_thread_num', 12))
Expand Down Expand Up @@ -197,7 +197,7 @@ def prefetch_next_dataset(self, day, pass_index):
if not next_data_ready:
next_dataset = None
else:
next_dataset = paddle.DatasetFactory().create_dataset(
next_dataset = paddle.base.DatasetFactory().create_dataset(
self.reader_type)
next_dataset.set_use_var(self.input_data)
next_dataset.set_batch_size(
Expand Down Expand Up @@ -227,7 +227,7 @@ def prefetch_next_dataset(self, day, pass_index):

def wait_and_prepare_infer_dataset(self, day, pass_index):
test_data_path = self.config.get("runner.infer_data_dir", [])
dataset = paddle.DatasetFactory().create_dataset(self.reader_type)
dataset = paddle.base.DatasetFactory().create_dataset(self.reader_type)
dataset.set_use_var(self.input_data)
dataset.set_batch_size(self.config.get('runner.infer_batch_size', 1))
dataset.set_thread(self.config.get('runner.infer_thread_num', 1))
Expand Down
2 changes: 1 addition & 1 deletion tools/utils/static_ps/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def generate_dataset(self, config, chunk_index, pass_num):
graph_config["batch_size"] = config.infer_batch_size
graph_config["samples"] = str_infer_samples

dataset = paddle.DatasetFactory().create_dataset("InMemoryDataset")
dataset = paddle.base.DatasetFactory().create_dataset("InMemoryDataset")
dataset.set_feed_type("SlotRecordInMemoryDataFeed")
dataset.set_use_var(self.holder_list)
dataset.set_graph_config(graph_config)
Expand Down

0 comments on commit eb869a1

Please sign in to comment.