From 623ec28b659c7b21e10a4d367ee865da0898e31d Mon Sep 17 00:00:00 2001 From: Shixiaowei02 <39303645+Shixiaowei02@users.noreply.github.com> Date: Thu, 17 Oct 2019 17:51:47 +0800 Subject: [PATCH] fix bugs --- .../analysis/ir_passes/lite_subgraph_pass.cc | 9 +++++++++ paddle/fluid/inference/lite/test_predictor.cc | 17 ++++------------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/paddle/fluid/inference/analysis/ir_passes/lite_subgraph_pass.cc b/paddle/fluid/inference/analysis/ir_passes/lite_subgraph_pass.cc index 7c351106ab770..2fcc7e8220b12 100644 --- a/paddle/fluid/inference/analysis/ir_passes/lite_subgraph_pass.cc +++ b/paddle/fluid/inference/analysis/ir_passes/lite_subgraph_pass.cc @@ -168,6 +168,9 @@ void OrganizeProgram(Node *merged_node, subgraph.size()); std::unordered_set io_var_nodes = GetRelatedIOVarNodes(subgraph); + for (const auto* node: io_var_nodes) { + LOG(INFO) << "IO Variable Name: " << node->Name(); + } std::vector subgraph_ops; for (auto *op_node : subgraph) { @@ -178,6 +181,9 @@ void OrganizeProgram(Node *merged_node, ModifyEngineProgram(merged_node, host_program, engine_program, host_sub_block, io_var_nodes, subgraph_ops); *repetitive_params = ExtractParameters(io_var_nodes); + for (const auto& param: *repetitive_params) { + LOG(INFO) << "Repetitive param: " << param; + } host_program->Flush(); engine_program->Flush(); @@ -199,6 +205,7 @@ void LiteSubgraphPass::SetUpEngine(framework::ProgramDesc* program, std::ostringstream os; platform::CPUDeviceContext ctx; for (const auto& param: params) { + LOG(INFO) << "Serialize param: " << param; PADDLE_ENFORCE_NOT_NULL(scope->FindVar(param), "Block should already have a '%s' variable", param); auto* tensor = scope->FindVar(param)->GetMutable(); @@ -262,6 +269,8 @@ void LiteSubgraphPass::ApplyImpl( auto teller = [&lite_ops_filter](const Node *node) { if (!node->IsOp() || !node->Op()) return false; + else if (node->Op()->Type() == "feed" || node->Op()->Type() == "fetch") + return false; else if (std::find(lite_ops_filter.begin(), lite_ops_filter.end(), node->Op()->Type()) != lite_ops_filter.end()) return false; diff --git a/paddle/fluid/inference/lite/test_predictor.cc b/paddle/fluid/inference/lite/test_predictor.cc index 43bde192c71ed..a92a8920b392d 100644 --- a/paddle/fluid/inference/lite/test_predictor.cc +++ b/paddle/fluid/inference/lite/test_predictor.cc @@ -14,7 +14,6 @@ #include #include -#include #include "lite/api/paddle_use_kernels.h" #include "lite/api/paddle_use_ops.h" @@ -23,14 +22,9 @@ #include "paddle/fluid/platform/enforce.h" #include "paddle/fluid/inference/api/paddle_inference_api.h" -namespace paddle { -namespace lite { - -using paddle::AnalysisConfig; - -TEST(AnalysisPredictor, Lite) { - - AnalysisConfig config; +int main() { + LOG(INFO) << "Hello World!"; + paddle::AnalysisConfig config; config.SetModel("/shixiaowei02/Paddle_lite/xingzhaolong/leaky_relu_model"); config.SwitchUseFeedFetchOps(false); config.EnableUseGpu(10, 1); @@ -39,8 +33,5 @@ TEST(AnalysisPredictor, Lite) { auto predictor = CreatePaddlePredictor(config); PADDLE_ENFORCE_NOT_NULL(predictor.get()); + return 0; } - - -} // namespace lite -} // namespace paddle