Skip to content

Commit ac9cb9f

Browse files
committed
Fix
1 parent 04fe843 commit ac9cb9f

File tree

54 files changed

+102
-219
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+102
-219
lines changed

test/book/notest_understand_sentiment.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -72,9 +72,7 @@ def train(
7272
dict_dim = len(word_dict)
7373
class_dim = 2
7474

75-
data = paddle.static.data(
76-
name="words", shape=[-1, 1], dtype="int64", lod_level=1
77-
)
75+
data = paddle.static.data(name="words", shape=[-1, 1], dtype="int64")
7876
label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64")
7977

8078
if not parallel:

test/deprecated/ir/inference/test_trt_multiclass_nms3_op_deprecated.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,9 +118,9 @@ class number
118118
import paddle
119119
from ppdet.modeling import ops
120120
boxes = paddle.static.data(name='bboxes', shape=[81, 4],
121-
dtype='float32', lod_level=1)
121+
dtype='float32')
122122
scores = paddle.static.data(name='scores', shape=[81],
123-
dtype='float32', lod_level=1)
123+
dtype='float32')
124124
out, index = ops.multiclass_nms(bboxes=boxes,
125125
scores=scores,
126126
background_label=0,

test/deprecated/legacy_test/dist_fleet_ctr.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,13 +67,11 @@ def net(self, args, is_train=True, batch_size=4, lr=0.01):
6767
name="dnn_data",
6868
shape=[-1, 1],
6969
dtype="int64",
70-
lod_level=1,
7170
)
7271
lr_data = paddle.static.data(
7372
name="lr_data",
7473
shape=[-1, 1],
7574
dtype="int64",
76-
lod_level=1,
7775
)
7876
label = paddle.static.data(
7977
name="click",

test/deprecated/legacy_test/test_dataset.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def test_run_with_dump(self):
105105
slots_vars = []
106106
for slot in slots:
107107
var = paddle.static.data(
108-
name=slot, shape=[-1, 1], dtype="int64", lod_level=1
108+
name=slot, shape=[-1, 1], dtype="int64"
109109
)
110110
slots_vars.append(var)
111111

@@ -201,7 +201,7 @@ def test_set_download_cmd(self):
201201
slots_vars = []
202202
for slot in slots:
203203
var = paddle.static.data(
204-
name=slot, shape=[-1, 1], dtype="int64", lod_level=1
204+
name=slot, shape=[-1, 1], dtype="int64"
205205
)
206206
slots_vars.append(var)
207207

@@ -267,7 +267,7 @@ def test_in_memory_dataset_run(self):
267267
slots_vars = []
268268
for slot in slots:
269269
var = paddle.static.data(
270-
name=slot, shape=[-1, 1], dtype="int64", lod_level=1
270+
name=slot, shape=[-1, 1], dtype="int64"
271271
)
272272
slots_vars.append(var)
273273

@@ -365,12 +365,12 @@ def test_in_memory_dataset_masterpatch(self):
365365
with base.program_guard(train_program, startup_program):
366366
for slot in slots[:2]:
367367
var = paddle.static.data(
368-
name=slot, shape=[-1, 1], dtype="int64", lod_level=1
368+
name=slot, shape=[-1, 1], dtype="int64"
369369
)
370370
slots_vars.append(var)
371371
for slot in slots[2:]:
372372
var = paddle.static.data(
373-
name=slot, shape=[-1, 1], dtype="float32", lod_level=1
373+
name=slot, shape=[-1, 1], dtype="float32"
374374
)
375375
slots_vars.append(var)
376376

@@ -521,7 +521,7 @@ def test_in_memory_dataset_run_2(self):
521521
slots_vars = []
522522
for slot in slots:
523523
var = paddle.static.data(
524-
name=slot, shape=[-1, 1], dtype="float32", lod_level=1
524+
name=slot, shape=[-1, 1], dtype="float32"
525525
)
526526
slots_vars.append(var)
527527

@@ -645,7 +645,7 @@ def test_queue_dataset_run(self):
645645
slots_vars = []
646646
for slot in slots:
647647
var = paddle.static.data(
648-
name=slot, shape=[-1, 1], dtype="int64", lod_level=1
648+
name=slot, shape=[-1, 1], dtype="int64"
649649
)
650650
slots_vars.append(var)
651651

@@ -724,7 +724,7 @@ def test_queue_dataset_run_2(self):
724724
slots_vars = []
725725
for slot in slots:
726726
var = paddle.static.data(
727-
name=slot, shape=[-1, 1], dtype="float32", lod_level=1
727+
name=slot, shape=[-1, 1], dtype="float32"
728728
)
729729
slots_vars.append(var)
730730

@@ -793,7 +793,7 @@ def test_queue_dataset_run_3(self):
793793
slots_vars = []
794794
for slot in slots:
795795
var = paddle.static.data(
796-
name=slot, shape=[None, 1], dtype="int64", lod_level=1
796+
name=slot, shape=[None, 1], dtype="int64"
797797
)
798798
slots_vars.append(var)
799799

@@ -861,7 +861,7 @@ def test_run_with_inmemory_dataset_train_debug_mode(self):
861861
slots_vars = []
862862
for slot in slots:
863863
var = paddle.static.data(
864-
name=slot, shape=[-1, 1], dtype="int64", lod_level=1
864+
name=slot, shape=[-1, 1], dtype="int64"
865865
)
866866
slots_vars.append(var)
867867

@@ -927,7 +927,7 @@ def test_cuda_in_memory_dataset_run(self):
927927
slots_vars = []
928928
for slot in slots:
929929
var = paddle.static.data(
930-
name=slot, shape=[-1, 1], dtype="int64", lod_level=1
930+
name=slot, shape=[-1, 1], dtype="int64"
931931
)
932932
slots_vars.append(var)
933933

@@ -1021,7 +1021,7 @@ def test_dataset_fleet(self):
10211021
slots_vars = []
10221022
for slot in slots:
10231023
var = paddle.static.data(
1024-
name=slot, shape=[-1, 1], dtype="float32", lod_level=1
1024+
name=slot, shape=[-1, 1], dtype="float32"
10251025
)
10261026
slots_vars.append(var)
10271027
fake_cost = paddle.subtract(slots_vars[0], slots_vars[-1])
@@ -1093,7 +1093,7 @@ def test_dataset_fleet2(self):
10931093
slots_vars = []
10941094
for slot in slots:
10951095
var = paddle.static.data(
1096-
name=slot, shape=[-1, 1], dtype="float32", lod_level=1
1096+
name=slot, shape=[-1, 1], dtype="float32"
10971097
)
10981098
slots_vars.append(var)
10991099
fake_cost = paddle.subtract(slots_vars[0], slots_vars[-1])
@@ -1226,7 +1226,7 @@ def test_bosps_dataset_fleet2(self):
12261226
slots_vars = []
12271227
for slot in slots:
12281228
var = paddle.static.data(
1229-
name=slot, shape=[-1, 1], dtype="float32", lod_level=1
1229+
name=slot, shape=[-1, 1], dtype="float32"
12301230
)
12311231
slots_vars.append(var)
12321232
fake_cost = paddle.subtract(slots_vars[0], slots_vars[-1])

test/deprecated/legacy_test/test_dist_fleet_a_sync_optimizer_auto_async_deprecated.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,6 @@ def test_a_sync_optimizer3(self):
5050
name="x",
5151
shape=[-1, 1],
5252
dtype="int64",
53-
lod_level=1,
5453
)
5554
x_embedding = paddle.static.nn.embedding(
5655
is_distributed=False,

test/deprecated/legacy_test/test_dist_fleet_heter_program_deprecated.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,9 +70,7 @@ def build_input(self):
7070
)
7171

7272
sparse_input_ids = [
73-
paddle.static.data(
74-
name="C" + str(i), shape=[-1, 1], lod_level=1, dtype="int64"
75-
)
73+
paddle.static.data(name="C" + str(i), shape=[-1, 1], dtype="int64")
7674
for i in range(1, 27)
7775
]
7876

test/deprecated/legacy_test/test_dist_fleet_ps13_deprecated.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -75,9 +75,7 @@ def get_loss(cos_q_pt, cos_q_nt):
7575
is_sparse = True
7676

7777
# query
78-
q = paddle.static.data(
79-
name="query_ids", shape=[-1, 1], dtype="int64", lod_level=1
80-
)
78+
q = paddle.static.data(name="query_ids", shape=[-1, 1], dtype="int64")
8179
# embedding
8280
q_emb = paddle.static.nn.sparse_embedding(
8381
input=q,
@@ -108,7 +106,7 @@ def get_loss(cos_q_pt, cos_q_nt):
108106
label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64")
109107
# pt
110108
pt = paddle.static.data(
111-
name="pos_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
109+
name="pos_title_ids", shape=[-1, 1], dtype="int64"
112110
)
113111
# embedding
114112
pt_emb = paddle.static.nn.sparse_embedding(
@@ -139,7 +137,7 @@ def get_loss(cos_q_pt, cos_q_nt):
139137
)
140138
# nt
141139
nt = paddle.static.data(
142-
name="neg_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
140+
name="neg_title_ids", shape=[-1, 1], dtype="int64"
143141
)
144142
# embedding
145143
nt_emb = paddle.static.nn.sparse_embedding(

test/deprecated/legacy_test/test_dist_fleet_ps2_deprecated.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -74,9 +74,7 @@ def get_loss(cos_q_pt, cos_q_nt):
7474
is_sparse = True
7575

7676
# query
77-
q = paddle.static.data(
78-
name="query_ids", shape=[-1, 1], dtype="int64", lod_level=1
79-
)
77+
q = paddle.static.data(name="query_ids", shape=[-1, 1], dtype="int64")
8078
# embedding
8179
q_emb = paddle.static.nn.sparse_embedding(
8280
input=q,
@@ -108,7 +106,7 @@ def get_loss(cos_q_pt, cos_q_nt):
108106
label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64")
109107
# pt
110108
pt = paddle.static.data(
111-
name="pos_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
109+
name="pos_title_ids", shape=[-1, 1], dtype="int64"
112110
)
113111
# embedding
114112
pt_emb = paddle.static.nn.sparse_embedding(
@@ -139,7 +137,7 @@ def get_loss(cos_q_pt, cos_q_nt):
139137
)
140138
# nt
141139
nt = paddle.static.data(
142-
name="neg_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
140+
name="neg_title_ids", shape=[-1, 1], dtype="int64"
143141
)
144142
# embedding
145143
nt_emb = paddle.static.nn.sparse_embedding(

test/deprecated/legacy_test/test_dist_fleet_ps3_deprecated.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,7 @@ def get_loss(cos_q_pt, cos_q_nt):
7171
is_sparse = False
7272

7373
# query
74-
q = paddle.static.data(
75-
name="query_ids", shape=[-1, 1], dtype="int64", lod_level=1
76-
)
74+
q = paddle.static.data(name="query_ids", shape=[-1, 1], dtype="int64")
7775
# embedding
7876
q_emb = paddle.static.nn.embedding(
7977
input=q,
@@ -106,7 +104,7 @@ def get_loss(cos_q_pt, cos_q_nt):
106104
label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64")
107105
# pt
108106
pt = paddle.static.data(
109-
name="pos_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
107+
name="pos_title_ids", shape=[-1, 1], dtype="int64"
110108
)
111109
# embedding
112110
pt_emb = paddle.static.nn.embedding(
@@ -139,7 +137,7 @@ def get_loss(cos_q_pt, cos_q_nt):
139137
)
140138
# nt
141139
nt = paddle.static.data(
142-
name="neg_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
140+
name="neg_title_ids", shape=[-1, 1], dtype="int64"
143141
)
144142
# embedding
145143
nt_emb = paddle.static.nn.embedding(

test/deprecated/legacy_test/test_dist_fleet_ps4_deprecated.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,7 @@ def get_loss(cos_q_pt, cos_q_nt):
7171
is_sparse = True
7272

7373
# query
74-
q = paddle.static.data(
75-
name="query_ids", shape=[-1, 1], dtype="int64", lod_level=1
76-
)
74+
q = paddle.static.data(name="query_ids", shape=[-1, 1], dtype="int64")
7775
# embedding
7876
q_emb = paddle.static.nn.sparse_embedding(
7977
input=q,
@@ -104,7 +102,7 @@ def get_loss(cos_q_pt, cos_q_nt):
104102
label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64")
105103
# pt
106104
pt = paddle.static.data(
107-
name="pos_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
105+
name="pos_title_ids", shape=[-1, 1], dtype="int64"
108106
)
109107
# embedding
110108
pt_emb = paddle.static.nn.sparse_embedding(
@@ -135,7 +133,7 @@ def get_loss(cos_q_pt, cos_q_nt):
135133
)
136134
# nt
137135
nt = paddle.static.data(
138-
name="neg_title_ids", shape=[-1, 1], dtype="int64", lod_level=1
136+
name="neg_title_ids", shape=[-1, 1], dtype="int64"
139137
)
140138
# embedding
141139
nt_emb = paddle.static.nn.sparse_embedding(

0 commit comments

Comments
 (0)