Skip to content

Commit 2ea1a2b

Browse files
committed
remove vector inputnames creating
1 parent 12e18f1 commit 2ea1a2b

File tree

2 files changed

+18
-21
lines changed

2 files changed

+18
-21
lines changed

paddle/fluid/framework/operator.cc

Lines changed: 12 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2371,10 +2371,7 @@ Scope* OperatorWithKernel::PrepareData(
23712371
};
23722372

23732373
if (run_phi_kernel_) {
2374-
std::vector<std::string> input_names;
2375-
for (auto& input_name : kernel_signature_->input_names) {
2376-
input_names.emplace_back(input_name);
2377-
}
2374+
const auto& input_names = kernel_signature_->input_names;
23782375
const auto& input_defs = phi_kernel_->args_def().input_defs();
23792376
PADDLE_ENFORCE_EQ(input_names.size(),
23802377
input_defs.size(),
@@ -2384,7 +2381,6 @@ Scope* OperatorWithKernel::PrepareData(
23842381
input_names.size(),
23852382
input_defs.size()));
23862383
for (size_t i = 0; i < input_defs.size(); ++i) {
2387-
const auto& input_defs = phi_kernel_->args_def().input_defs();
23882384
auto& in_def = input_defs.at(i);
23892385
std::string input_name = input_names[i];
23902386
auto iter = ctx->inputs.find(input_name);
@@ -2396,19 +2392,18 @@ Scope* OperatorWithKernel::PrepareData(
23962392
no_buffer_ins && no_buffer_ins->count(input_name) > 0;
23972393
prepare_input_data(input_name, &ins_vector, &in_def, should_skip_input);
23982394
}
2399-
// For input that is Extra
2400-
for (auto& var_name_item : Inputs()) {
2401-
if (std::find(input_names.begin(),
2402-
input_names.end(),
2403-
var_name_item.first) == input_names.end()) {
2404-
bool should_skip_input =
2405-
no_buffer_ins && no_buffer_ins->count(var_name_item.first) > 0;
2406-
std::vector<Variable*>& input_vars = ctx->inputs[var_name_item.first];
2407-
prepare_input_data(
2408-
var_name_item.first, &input_vars, nullptr, should_skip_input);
2409-
}
2395+
#ifdef PADDLE_WITH_MKLDNN
2396+
// For input that is Extra, only MKLDNN will use Extra Inputs
2397+
auto& extra_input_names =
2398+
paddle::operators::ExtraInfoUtils::Instance().GetExtraInputNamesMap(
2399+
Type());
2400+
for (const auto& input_name : extra_input_names) {
2401+
bool should_skip_input =
2402+
no_buffer_ins && no_buffer_ins->count(input_name) > 0;
2403+
std::vector<Variable*>& input_vars = ctx->inputs[input_name];
2404+
prepare_input_data(input_name, &input_vars, nullptr, should_skip_input);
24102405
}
2411-
2406+
#endif
24122407
} else {
24132408
for (auto& var_name_item : Inputs()) {
24142409
bool should_skip_input =

paddle/fluid/operators/ops_extra_info.h

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -88,9 +88,7 @@ const std::unordered_map<std::string, ExtraAttrPropertySet>
8888
{"use_cudnn", ExtraAttrProperty::SCHEDULE},
8989
{"use_mkldnn", ExtraAttrProperty::SCHEDULE},
9090
// ONEDNN dedicated attributes
91-
{"Activation_scale", ExtraAttrProperty::ONEDNN},
9291
{"Bias", ExtraAttrProperty::ONEDNN},
93-
{"Bias_scales", ExtraAttrProperty::ONEDNN},
9492
{"data_format", ExtraAttrProperty::ONEDNN},
9593
{"force_fp32_output", ExtraAttrProperty::ONEDNN},
9694
{"fuse_activation", ExtraAttrProperty::ONEDNN},
@@ -112,7 +110,6 @@ const std::unordered_map<std::string, ExtraAttrPropertySet>
112110
{"fused_transpose_X", ExtraAttrProperty::ONEDNN},
113111
{"fused_transpose_Y", ExtraAttrProperty::ONEDNN},
114112
{"mkldnn_data_type", ExtraAttrProperty::ONEDNN},
115-
{"Output_shift_scale", ExtraAttrProperty::ONEDNN},
116113
{"ResidualData", ExtraAttrProperty::ONEDNN},
117114
{"scale_x", ExtraAttrProperty::ONEDNN},
118115
{"scale_y", ExtraAttrProperty::ONEDNN},
@@ -123,9 +120,13 @@ const std::unordered_map<std::string, ExtraAttrPropertySet>
123120
{"Scale_y", ExtraAttrProperty::ONEDNN},
124121
{"Scale_out", ExtraAttrProperty::ONEDNN},
125122
{"Scale_weights", ExtraAttrProperty::ONEDNN},
126-
{"Sum_scale", ExtraAttrProperty::ONEDNN},
127123
{"x_data_format", ExtraAttrProperty::ONEDNN},
128124
{"y_data_format", ExtraAttrProperty::ONEDNN},
125+
// ONEDNN pass dedicated attributes
126+
{"Activation_scale", ExtraAttrProperty::ONEDNN},
127+
{"Bias_scales", ExtraAttrProperty::ONEDNN},
128+
{"Output_shift_scale", ExtraAttrProperty::ONEDNN},
129+
{"Sum_scale", ExtraAttrProperty::ONEDNN},
129130
// GPUDNN dedicated attributes
130131
{"exhaustive_search", ExtraAttrProperty::GPUDNN},
131132
{"fuse_relu_before_depthwise_conv", ExtraAttrProperty::GPUDNN},
@@ -220,6 +221,7 @@ class ExtraInfoUtils {
220221
std::vector<std::function<void(framework::AttributeMap*, bool)>>
221222
empty_extra_attrs_checker_{};
222223

224+
// TODO(chenweihang): move these extra inputs into op_compat.yaml
223225
std::unordered_map<std::string, std::vector<std::string>>
224226
g_extra_input_names_map_ = {{"conv2d", {"Bias", "ResidualData"}},
225227
{"conv2d_grad", {"Bias"}}};

0 commit comments

Comments
 (0)