Skip to content

Replace phi::errors to ::common::errors in cinn [fluid_ops] part2 #67114

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Aug 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
118 changes: 59 additions & 59 deletions paddle/cinn/hlir/op/broadcast.cc
Original file line number Diff line number Diff line change
Expand Up @@ -74,37 +74,37 @@ std::shared_ptr<OpStrategy> StrategyForBroadcast(
PADDLE_ENFORCE_NE(
args.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of %s compute is empty! Please check.",
op_name));
CINNValuePack pack_args = args[0];
PADDLE_ENFORCE_GE(pack_args.size(),
2U,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"At least 2 input tensors for %s compute, but got %d",
op_name,
pack_args.size()));
PADDLE_ENFORCE_GE(pack_args.size(),
3U,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"At least 3 input tensors for %s compute, but got %d",
op_name,
pack_args.size()));
PADDLE_ENFORCE_EQ(
pack_args[2].is_string(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required pack_args[2] must be a string. Please check."));
std::string tensor_name = pack_args[2].operator std::string();
Expr A_expr = pack_args[0];
Expr B_expr = pack_args[1];
PADDLE_ENFORCE_NOT_NULL(
A_expr.as_tensor(),
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required Input must be a tensor. Please check."));
PADDLE_ENFORCE_NOT_NULL(
B_expr.as_tensor(),
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required Input must be a tensor. Please check."));
ir::Tensor A = A_expr.as_tensor_ref();
ir::Tensor B = B_expr.as_tensor_ref();
Expand Down Expand Up @@ -143,37 +143,37 @@ std::shared_ptr<OpStrategy> StrategyForBroadcastSymbolic(
PADDLE_ENFORCE_NE(
args.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of %s compute is empty! Please check.",
op_name));
CINNValuePack pack_args = args[0];
PADDLE_ENFORCE_GE(pack_args.size(),
2U,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"At least 2 input tensors for %s compute, but got %d",
op_name,
pack_args.size()));
PADDLE_ENFORCE_GE(pack_args.size(),
3U,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"At least 3 input tensors for %s compute, but got %d",
op_name,
pack_args.size()));
PADDLE_ENFORCE_EQ(
pack_args[2].is_string(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required pack_args[2] must be a string. Please check."));
std::string tensor_name = pack_args[2].operator std::string();
Expr A_expr = pack_args[0];
Expr B_expr = pack_args[1];
PADDLE_ENFORCE_NOT_NULL(
A_expr.as_tensor(),
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required Input must be a tensor. Please check."));
PADDLE_ENFORCE_NOT_NULL(
B_expr.as_tensor(),
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required Input must be a tensor. Please check."));
ir::Tensor A = A_expr.as_tensor_ref();
ir::Tensor B = B_expr.as_tensor_ref();
Expand Down Expand Up @@ -206,14 +206,14 @@ std::shared_ptr<OpStrategy> StrategyForBroadcastTo(
PADDLE_ENFORCE_GE(
attrs.attr_store.count("out_shape"),
1,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The attrs.attr_store doesn't have the attribute of 'out_shape'."));
out_shape = absl::get<std::vector<int>>(attrs.attr_store.at("out_shape"));
PADDLE_ENFORCE_GE(
attrs.attr_store.count("broadcast_axes"),
1,
phi::errors::InvalidArgument("The attrs.attr_store doesn't have the "
"attribute of 'broadcast_axes'."));
::common::errors::InvalidArgument("The attrs.attr_store doesn't have the "
"attribute of 'broadcast_axes'."));
broadcast_axes =
absl::get<std::vector<int>>(attrs.attr_store.at("broadcast_axes"));
VLOG(3) << "broadcast out shape: " << utils::Join(out_shape, ", ");
Expand All @@ -224,30 +224,30 @@ std::shared_ptr<OpStrategy> StrategyForBroadcastTo(
PADDLE_ENFORCE_NE(
args.empty(),
true,
phi::errors::InvalidArgument("The input argument of broadcast_to "
"compute is empty! Please check."));
::common::errors::InvalidArgument("The input argument of broadcast_to "
"compute is empty! Please check."));
CINNValuePack pack_args = args[0];
PADDLE_ENFORCE_NE(
pack_args.empty(),
true,
phi::errors::InvalidArgument("The input tensors of broadcast_to "
"compute is empty! Please check."));
::common::errors::InvalidArgument("The input tensors of broadcast_to "
"compute is empty! Please check."));
PADDLE_ENFORCE_GE(
pack_args.size(),
2U,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required at least 2 input tensors, but got %d", pack_args.size()));
PADDLE_ENFORCE_EQ(
pack_args[1].is_string(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required pack_args[1] must be a string. Please check."));
std::string tensor_name = pack_args[1].operator std::string();

Expr A_expr = pack_args[0];
PADDLE_ENFORCE_NOT_NULL(
A_expr.as_tensor(),
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Required Input must be a tensor. Please check."));
ir::Tensor A = A_expr.as_tensor_ref();
auto out = pe::BroadcastTo(A, out_shape, broadcast_axes, tensor_name);
Expand All @@ -271,7 +271,7 @@ std::shared_ptr<OpStrategy> StrategyForBroadcastToSymbolic(
const Target &target) {
PADDLE_ENFORCE_EQ(output_shapes.size(),
1,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The size of output_shapes must be 1, but got %d.",
output_shapes.size()));
std::vector<ir::Expr> out_shape(output_shapes[0].size());
Expand All @@ -281,42 +281,42 @@ std::shared_ptr<OpStrategy> StrategyForBroadcastToSymbolic(
[](const ir::Dim &dim) { return dim->dim_expr; });
VLOG(3) << "broadcast out shape: " << utils::Join(out_shape, ", ");

framework::CINNCompute broadcast_to_compute(
[=](lang::Args args, lang::RetValue *ret) {
PADDLE_ENFORCE_NE(
args.empty(),
true,
phi::errors::InvalidArgument("The input argument of broadcast_to "
"compute is empty! Please check."));
CINNValuePack pack_args = args[0];
PADDLE_ENFORCE_NE(
pack_args.empty(),
true,
phi::errors::InvalidArgument("The input tensors of broadcast_to "
"compute is empty! Please check."));
std::string tensor_name = [&] {
if (pack_args.size() == 2) {
return pack_args[1].operator std::string();
} else {
PADDLE_ENFORCE_EQ(pack_args.size(),
3,
::common::errors::InvalidArgument(
"The number of input tensors is wrong. "
"The expected inputs is 3, but now is %d.",
pack_args.size()));
return pack_args[2].operator std::string();
}
}();

Expr A_expr = pack_args[0];
PADDLE_ENFORCE_NOT_NULL(
A_expr.as_tensor(),
phi::errors::InvalidArgument(
"Required Input must be a tensor. Please check."));
ir::Tensor A = A_expr.as_tensor_ref();
auto out = pe::BroadcastTo(A, out_shape, tensor_name);
*ret = CINNValuePack{{CINNValue(out)}};
});
framework::CINNCompute broadcast_to_compute([=](lang::Args args,
lang::RetValue *ret) {
PADDLE_ENFORCE_NE(
args.empty(),
true,
::common::errors::InvalidArgument("The input argument of broadcast_to "
"compute is empty! Please check."));
CINNValuePack pack_args = args[0];
PADDLE_ENFORCE_NE(
pack_args.empty(),
true,
::common::errors::InvalidArgument("The input tensors of broadcast_to "
"compute is empty! Please check."));
std::string tensor_name = [&] {
if (pack_args.size() == 2) {
return pack_args[1].operator std::string();
} else {
PADDLE_ENFORCE_EQ(pack_args.size(),
3,
::common::errors::InvalidArgument(
"The number of input tensors is wrong. "
"The expected inputs is 3, but now is %d.",
pack_args.size()));
return pack_args[2].operator std::string();
}
}();

Expr A_expr = pack_args[0];
PADDLE_ENFORCE_NOT_NULL(
A_expr.as_tensor(),
::common::errors::InvalidArgument(
"Required Input must be a tensor. Please check."));
ir::Tensor A = A_expr.as_tensor_ref();
auto out = pe::BroadcastTo(A, out_shape, tensor_name);
*ret = CINNValuePack{{CINNValue(out)}};
});

auto strategy = std::make_shared<framework::OpStrategy>();
strategy->AddImpl(
Expand Down
22 changes: 11 additions & 11 deletions paddle/cinn/hlir/op/contrib/argmax.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ std::vector<ir::Tensor> Argmax(const Tensor &in_tensor,
PADDLE_ENFORCE_GT(
ndim,
0,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The dimension of input tensor must be greater than 0."));

int pos_axis = axis;
Expand All @@ -61,18 +61,18 @@ std::vector<ir::Tensor> Argmax(const Tensor &in_tensor,
PADDLE_ENFORCE_LT(
pos_axis,
ndim,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The axis must be less than the dimension of input tensor."));
PADDLE_ENFORCE_GE(pos_axis,
0,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The axis must be greater than or equal to 0."));

std::vector<Expr> output_shape;
for (int i = 0; i < shape.size(); ++i) {
PADDLE_ENFORCE_EQ(shape[i].is_constant(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Input tensor's shape should be constant value."));
if (pos_axis == i) {
if (keep_dims) {
Expand Down Expand Up @@ -126,28 +126,28 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmax(
PADDLE_ENFORCE_EQ(
!args.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmax compute is empty! Please check."));
cinn::common::CINNValuePack pack_args = args[0];
std::string tensor_name = UniqName("Argmax_out");
PADDLE_ENFORCE_GE(pack_args.size(),
1U,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"There should be 1 input args for argmax compute"));
Expr in_expr = pack_args[0];
PADDLE_ENFORCE_NOT_NULL(
in_expr.as_tensor(),
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmax compute is null."));
Tensor in_tensor = in_expr.as_tensor_ref();
PADDLE_ENFORCE_EQ(pack_args.size(),
2U,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmax compute must be 2."));
PADDLE_ENFORCE_EQ(
pack_args[1].is_string(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmax compute must be string."));
tensor_name = pack_args[1].operator std::string();
std::vector<ir::Tensor> out_tensor =
Expand All @@ -164,7 +164,7 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmax(
PADDLE_ENFORCE_EQ(
!args.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmax_schedule is empty! Please check."));
cinn::common::CINNValuePack arg_pack = args[0];
std::vector<Expr> vec_ast;
Expand All @@ -177,7 +177,7 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmax(
PADDLE_ENFORCE_EQ(
!vec_ast.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmax_schedule is empty! Please check."));
ir::ModuleExpr mod_expr(vec_ast);
ir::IRSchedule ir_sch(mod_expr);
Expand Down
14 changes: 7 additions & 7 deletions paddle/cinn/hlir/op/contrib/argmin.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ std::vector<Tensor> Argmin(const Tensor &in_tensor,
PADDLE_ENFORCE_GT(
ndim,
0,
phi::errors::InvalidArgument("tensor's dim must be more than 0"));
::common::errors::InvalidArgument("tensor's dim must be more than 0"));

int pos_axis = axis;
if (axis < 0) {
Expand All @@ -64,7 +64,7 @@ std::vector<Tensor> Argmin(const Tensor &in_tensor,
for (int i = 0; i < shape.size(); ++i) {
PADDLE_ENFORCE_EQ(shape[i].is_constant(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"Input tensor's shape should be constant value."));
if (pos_axis == i) {
if (keep_dims) {
Expand Down Expand Up @@ -116,22 +116,22 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmin(
PADDLE_ENFORCE_EQ(
!args.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmin compute is empty! Please check."));
cinn::common::CINNValuePack pack_args = args[0];
CHECK_GE(pack_args.size(), 1U)
<< "There should be 1 input args for argmax compute";
Expr in_expr = pack_args[0];
PADDLE_ENFORCE_NOT_NULL(
in_expr.as_tensor(),
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmin compute is not tensor."));
Tensor in_tensor = in_expr.as_tensor_ref();
CHECK_EQ(pack_args.size(), 2U);
PADDLE_ENFORCE_EQ(
pack_args[1].is_string(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmin compute is not string."));
std::string tensor_name = pack_args[1].operator std::string();
auto out_tensor = Argmin(in_tensor, target, axis, keep_dims, tensor_name);
Expand All @@ -147,7 +147,7 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmin(
PADDLE_ENFORCE_EQ(
!args.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmin schedule is empty! Please check."));
cinn::common::CINNValuePack arg_pack = args[0];
std::vector<Expr> vec_ast;
Expand All @@ -160,7 +160,7 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmin(
PADDLE_ENFORCE_EQ(
!vec_ast.empty(),
true,
phi::errors::InvalidArgument(
::common::errors::InvalidArgument(
"The input argument of argmin schedule is empty! Please check."));
ir::ModuleExpr mod_expr(vec_ast);
ir::IRSchedule ir_sch(mod_expr);
Expand Down
Loading