Skip to content

Commit 3d1ac72

Browse files
authored
Merge pull request #8176 from kexinzhao/inf_rnn_encode_decode
Add Inference example and unit test for rnn_encoder_decoder
2 parents f605d00 + 64800cf commit 3d1ac72

File tree

7 files changed

+250
-29
lines changed

7 files changed

+250
-29
lines changed

paddle/framework/block_desc.cc

+1-2
Original file line numberDiff line numberDiff line change
@@ -162,9 +162,8 @@ BlockDesc::BlockDesc(const BlockDesc &other, proto::BlockDesc *desc,
162162
: prog_(prog), desc_(desc) {
163163
need_update_ = true;
164164
for (auto &op : other.ops_) {
165-
ops_.emplace_back(new OpDesc(*op, this));
165+
ops_.emplace_back(new OpDesc(*op->Proto(), prog, this));
166166
}
167-
168167
for (auto &it : other.vars_) {
169168
auto *var = new VarDesc(*it.second);
170169
vars_[it.first].reset(var);

paddle/framework/op_desc.cc

+2-3
Original file line numberDiff line numberDiff line change
@@ -125,11 +125,10 @@ OpDesc::OpDesc(const proto::OpDesc &desc, ProgramDesc *prog, BlockDesc *block)
125125
// restore attrs_
126126
for (const proto::OpDesc::Attr &attr : desc_.attrs()) {
127127
std::string attr_name = attr.name();
128+
// The sub_block referred to by the BLOCK attr hasn't been added
129+
// to ProgramDesc class yet, we skip setting BLOCK attr here.
128130
if (attr.type() != proto::AttrType::BLOCK) {
129131
attrs_[attr_name] = GetAttrValue(attr);
130-
} else {
131-
auto bid = attr.block_idx();
132-
attrs_[attr_name] = prog->MutableBlock(bid);
133132
}
134133
}
135134
this->block_ = block;

paddle/framework/program_desc.cc

+20-1
Original file line numberDiff line numberDiff line change
@@ -43,18 +43,37 @@ ProgramDesc::ProgramDesc() {
4343

4444
ProgramDesc::ProgramDesc(const ProgramDesc &o) {
4545
desc_ = o.desc_;
46-
4746
for (int i = 0; i < desc_.blocks_size(); ++i) {
4847
auto *block = desc_.mutable_blocks(i);
4948
blocks_.emplace_back(new BlockDesc(*o.blocks_[i], block, this));
5049
}
50+
for (auto &block : blocks_) {
51+
for (auto *op : block->AllOps()) {
52+
for (const auto &attr : op->Proto()->attrs()) {
53+
if (attr.type() == proto::AttrType::BLOCK) {
54+
size_t blk_idx = attr.block_idx();
55+
op->SetBlockAttr(attr.name(), *this->MutableBlock(blk_idx));
56+
}
57+
}
58+
}
59+
}
5160
}
5261

5362
ProgramDesc::ProgramDesc(const proto::ProgramDesc &desc) {
5463
desc_ = desc;
5564
for (auto &block_desc : *desc_.mutable_blocks()) {
5665
blocks_.emplace_back(new BlockDesc(this, &block_desc));
5766
}
67+
for (auto &block : blocks_) {
68+
for (auto *op : block->AllOps()) {
69+
for (const auto &attr : op->Proto()->attrs()) {
70+
if (attr.type() == proto::AttrType::BLOCK) {
71+
size_t blk_idx = attr.block_idx();
72+
op->SetBlockAttr(attr.name(), *this->MutableBlock(blk_idx));
73+
}
74+
}
75+
}
76+
}
5877
}
5978

6079
ProgramDesc::ProgramDesc(const std::string &binary_str) {

paddle/framework/prune.cc

+67-17
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,28 @@ bool IsTarget(const proto::OpDesc& op_desc) {
4949
return false;
5050
}
5151

52-
void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
53-
int block_id) {
54-
// TODO(tonyyang-svail):
55-
// - will change to use multiple blocks for RNN op and Cond Op
52+
int GetSubBlockIndex(const proto::OpDesc& op_desc) {
53+
for (auto& attr : op_desc.attrs()) {
54+
if (attr.type() == proto::AttrType::BLOCK) {
55+
PADDLE_ENFORCE(attr.has_block_idx());
56+
return attr.block_idx();
57+
}
58+
}
59+
return -1;
60+
}
61+
62+
bool HasSubBlock(const proto::OpDesc& op_desc) {
63+
return GetSubBlockIndex(op_desc) > 0;
64+
}
5665

66+
// block_id is the idx of the current block in the input desc
67+
// parent_block_id is the idx of the parent of the current block
68+
// in the output desc, -1 means the current block is global block
69+
// dependent_vars is passed recursively from the parent block to
70+
// the child block to help pruning
71+
void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
72+
int block_id, int parent_block_id,
73+
std::set<std::string>& dependent_vars) {
5774
auto& block = input.blocks(block_id);
5875
auto& ops = block.ops();
5976

@@ -72,19 +89,16 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
7289
expect_fetch = (op_desc.type() == kFetchOpType);
7390
}
7491

75-
std::set<std::string> dependent_vars;
7692
std::vector<bool> should_run;
7793
for (auto op_iter = ops.rbegin(); op_iter != ops.rend(); ++op_iter) {
7894
auto& op_desc = *op_iter;
79-
8095
if (IsTarget(op_desc) || HasDependentVar(op_desc, dependent_vars)) {
8196
// insert its input to the dependency graph
8297
for (auto& var : op_desc.inputs()) {
8398
for (auto& argu : var.arguments()) {
8499
dependent_vars.insert(argu);
85100
}
86101
}
87-
88102
should_run.push_back(true);
89103
} else {
90104
should_run.push_back(false);
@@ -95,45 +109,81 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
95109
// we reverse the should_run vector
96110
std::reverse(should_run.begin(), should_run.end());
97111

98-
*output = input;
99-
auto* op_field = output->mutable_blocks(block_id)->mutable_ops();
112+
// copy the current block from input to output
113+
auto* block_field = output->mutable_blocks();
114+
*block_field->Add() = input.blocks(block_id);
115+
116+
int output_block_id = output->blocks_size() - 1;
117+
auto* output_block = output->mutable_blocks(output_block_id);
118+
output_block->set_idx(output_block_id);
119+
output_block->set_parent_idx(parent_block_id);
120+
121+
auto* op_field = output_block->mutable_ops();
100122
op_field->Clear();
101123
for (size_t i = 0; i < should_run.size(); ++i) {
102124
if (should_run[i]) {
103-
*op_field->Add() = input.blocks(block_id).ops(i);
125+
auto* op = op_field->Add();
126+
*op = input.blocks(block_id).ops(i);
127+
if (HasSubBlock(*op)) {
128+
// create sub_block_dependent_vars here to help prune the sub block
129+
std::set<std::string> sub_block_dependent_vars;
130+
for (auto& var : op->inputs()) {
131+
for (auto& argu : var.arguments()) {
132+
sub_block_dependent_vars.insert(argu);
133+
}
134+
}
135+
for (auto& var : op->outputs()) {
136+
for (auto& argu : var.arguments()) {
137+
sub_block_dependent_vars.insert(argu);
138+
}
139+
}
140+
// GetSubBlockIndex(*op) is the idx of the sub_block in the input desc
141+
// output_block_id is the idx of the current block in the output desc
142+
prune_impl(input, output, GetSubBlockIndex(*op), output_block_id,
143+
sub_block_dependent_vars);
144+
}
104145
}
105146
}
106147

107148
// remove the VarDescs in BlockDesc that are not referenced in
108149
// the pruned OpDescs
109150
std::unordered_map<std::string, proto::VarDesc> var_map;
110-
auto* var_field = output->mutable_blocks(block_id)->mutable_vars();
151+
auto* var_field = output->mutable_blocks(output_block_id)->mutable_vars();
111152
for (const auto& var : *var_field) {
112153
var_map[var.name()] = var;
113154
}
114155

115-
var_field->Clear();
156+
std::set<std::string> var_names;
116157
for (const auto& op : *op_field) {
117-
// add VarDescs of all input arguments for each OpDesc
118158
auto& input_field = op.inputs();
119159
for (auto& input_var : input_field) {
120160
for (auto& arg : input_var.arguments()) {
121-
*var_field->Add() = var_map[arg];
161+
if (var_map.count(arg) != 0) {
162+
var_names.insert(arg);
163+
}
122164
}
123165
}
124-
// add VarDescs of all output arguments for each OpDesc
125166
auto& output_field = op.outputs();
126167
for (auto& output_var : output_field) {
127168
for (auto& arg : output_var.arguments()) {
128-
*var_field->Add() = var_map[arg];
169+
if (var_map.count(arg) != 0) {
170+
var_names.insert(arg);
171+
}
129172
}
130173
}
131174
}
175+
176+
var_field->Clear();
177+
for (const auto& name : var_names) {
178+
*var_field->Add() = var_map[name];
179+
}
132180
}
133181

134182
// TODO(fengjiayi): Prune() could be inplaced to avoid unnecessary copies
135183
void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output) {
136-
prune_impl(input, output, 0);
184+
std::set<std::string> dependent_vars;
185+
output->clear_blocks();
186+
prune_impl(input, output, 0, -1, dependent_vars);
137187
}
138188

139189
void inference_optimize_impl(const proto::ProgramDesc& input,

paddle/inference/tests/book/CMakeLists.txt

+1
Original file line numberDiff line numberDiff line change
@@ -27,3 +27,4 @@ endfunction(inference_test)
2727
inference_test(recognize_digits ARGS mlp)
2828
inference_test(image_classification ARGS vgg resnet)
2929
inference_test(label_semantic_roles)
30+
inference_test(rnn_encoder_decoder)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
2+
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License. */
14+
15+
#include <gtest/gtest.h>
16+
#include "gflags/gflags.h"
17+
#include "test_helper.h"
18+
19+
DEFINE_string(dirname, "", "Directory of the inference model.");
20+
21+
TEST(inference, rnn_encoder_decoder) {
22+
if (FLAGS_dirname.empty()) {
23+
LOG(FATAL) << "Usage: ./example --dirname=path/to/your/model";
24+
}
25+
26+
LOG(INFO) << "FLAGS_dirname: " << FLAGS_dirname << std::endl;
27+
std::string dirname = FLAGS_dirname;
28+
29+
// 0. Call `paddle::framework::InitDevices()` initialize all the devices
30+
// In unittests, this is done in paddle/testing/paddle_gtest_main.cc
31+
32+
paddle::framework::LoDTensor word_data, trg_word;
33+
paddle::framework::LoD lod{{0, 4, 10}};
34+
35+
SetupLoDTensor(
36+
word_data, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
37+
SetupLoDTensor(
38+
trg_word, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
39+
40+
std::vector<paddle::framework::LoDTensor*> cpu_feeds;
41+
cpu_feeds.push_back(&word_data);
42+
cpu_feeds.push_back(&trg_word);
43+
44+
paddle::framework::LoDTensor output1;
45+
std::vector<paddle::framework::LoDTensor*> cpu_fetchs1;
46+
cpu_fetchs1.push_back(&output1);
47+
48+
// Run inference on CPU
49+
TestInference<paddle::platform::CPUPlace, float>(
50+
dirname, cpu_feeds, cpu_fetchs1);
51+
LOG(INFO) << output1.lod();
52+
LOG(INFO) << output1.dims();
53+
54+
#ifdef PADDLE_WITH_CUDA
55+
paddle::framework::LoDTensor output2;
56+
std::vector<paddle::framework::LoDTensor*> cpu_fetchs2;
57+
cpu_fetchs2.push_back(&output2);
58+
59+
// Run inference on CUDA GPU
60+
TestInference<paddle::platform::CUDAPlace, float>(
61+
dirname, cpu_feeds, cpu_fetchs2);
62+
LOG(INFO) << output2.lod();
63+
LOG(INFO) << output2.dims();
64+
65+
CheckError<float>(output1, output2);
66+
#endif
67+
}

0 commit comments

Comments
 (0)