Skip to content

Commit 0e582ac

Browse files
authored
【Error Message No. 11 No. 23 No.28 BUAA】rewrite error message (#66788)
* first try * 指针报错 * phi not platform
1 parent 52adacd commit 0e582ac

File tree

7 files changed

+244
-120
lines changed

7 files changed

+244
-120
lines changed

paddle/cinn/adt/generate_map_expr.cc

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,11 @@ hlir::framework::OpPatternKind GetOpPatternKind(const ::pir::Operation* node) {
170170
bool CollectRewrittenReductionOpStmts(const OpStmt& op_stmt,
171171
List<OpStmt>* ret) {
172172
const auto& [op, inputs, outputs] = op_stmt.tuple();
173-
CHECK(op.Has<const ::pir::Operation*>());
173+
PADDLE_ENFORCE_EQ(
174+
op.Has<const ::pir::Operation*>(),
175+
true,
176+
phi::errors::InvalidArgument(
177+
"The op should have a value of type ::pir::Operation*"));
174178
if (GetOpPatternKind(op.Get<const ::pir::Operation*>()) ==
175179
hlir::framework::OpPatternKind::kReduction) {
176180
tReduceInit<const ::pir::Operation*> init_op{
@@ -234,7 +238,10 @@ std::vector<std::shared_ptr<IGroup>> GenerateIGroups(
234238
std::vector<std::shared_ptr<IGroup>> ret{};
235239

236240
List<OpStmt> op_stmts = MakeOpStmts(group);
237-
CHECK(!op_stmts->empty());
241+
PADDLE_ENFORCE_EQ(
242+
!op_stmts->empty(),
243+
true,
244+
phi::errors::InvalidArgument("The op_stmts should not be empty"));
238245

239246
PartitionIGroupOpStmts(op_stmts, [&](const auto& igroup_spec) {
240247
ret.push_back(MakeIGroup(igroup_spec));
@@ -271,9 +278,12 @@ std::unordered_map<Variable, const Value> MakeSdIterator2Iterator(
271278
std::unordered_map<Variable, const Value> ret{};
272279

273280
for (std::size_t i = 0; i < igroup.loop_iterators()->size(); ++i) {
274-
CHECK(ret.emplace(igroup.loop_iterators()->at(i),
275-
igroup.loop_iterators()->at(i))
276-
.second);
281+
PADDLE_ENFORCE_EQ(
282+
ret.emplace(igroup.loop_iterators()->at(i),
283+
igroup.loop_iterators()->at(i))
284+
.second,
285+
true,
286+
phi::errors::InvalidArgument("The loop iterator should be unique"));
277287
}
278288

279289
return ret;
@@ -334,7 +344,10 @@ LoopDescriptor4IterVarT MakeGetterLoopDescriptor4IterVar(
334344
using Cache = std::unordered_map<Iterator, LoopDescriptor>;
335345
const auto& sd_iter2sd = std::make_shared<Cache>();
336346
for (std::size_t i = 0; i < loop_iters->size(); ++i) {
337-
CHECK(sd_iter2sd->emplace(loop_iters->at(i), sd->at(i)).second);
347+
PADDLE_ENFORCE_EQ(
348+
sd_iter2sd->emplace(loop_iters->at(i), sd->at(i)).second,
349+
true,
350+
phi::errors::InvalidArgument("The loop iterator should be unique"));
338351
}
339352
return [sd_iter2sd](const auto& sd_iter) { return sd_iter2sd->at(sd_iter); };
340353
}
@@ -343,7 +356,10 @@ TreeMerger<Stmt> MakeTreeMerger(const MapIr& map_ir) {
343356
using Cache = std::unordered_map<OpStmt, LoopIterators>;
344357
auto cache = std::make_shared<Cache>();
345358
for (const auto& op_stmt : *(map_ir.op_stmts())) {
346-
CHECK(cache->emplace(op_stmt, map_ir.loop_iterators()).second);
359+
PADDLE_ENFORCE_EQ(
360+
cache->emplace(op_stmt, map_ir.loop_iterators()).second,
361+
true,
362+
phi::errors::InvalidArgument("The op_stmt should be unique"));
347363
}
348364

349365
TreeMerger<Stmt> tree_merger{};
@@ -365,7 +381,10 @@ MapStmt<Stmt> MakeMapStmt(const MapIrList& map_irs) {
365381
1UL,
366382
::common::errors::InvalidArgument(
367383
"The size of stmts should be 1, but got %d.", stmts->size()));
368-
CHECK(stmts->at(0).Has<MapStmt<Stmt>>());
384+
PADDLE_ENFORCE_EQ(stmts->at(0).Has<MapStmt<Stmt>>(),
385+
true,
386+
phi::errors::InvalidArgument(
387+
"The stmts should have a value of type MapStmt<Stmt>"));
369388
return stmts->at(0).Get<MapStmt<Stmt>>();
370389
}
371390

paddle/cinn/adt/inline_translator.h

Lines changed: 28 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,10 @@ struct InlineTranslator final {
3333
using DstTree = Tree<MapT, DstLeaf>;
3434

3535
static DstTree Call(const SrcTree& src_tree) {
36-
CHECK((src_tree.template Has<MapT<SrcTree>>()));
36+
PADDLE_ENFORCE_EQ((src_tree.template Has<MapT<SrcTree>>()),
37+
true,
38+
phi::errors::InvalidArgument(
39+
"src_tree.template should have <MapT<SrcTree>>()"));
3740
const MapT<DstTree> dst_tree =
3841
CallMap(src_tree.template Get<MapT<SrcTree>>());
3942

@@ -97,7 +100,10 @@ struct InlineTranslator final {
97100
const auto& arg = op_call_children->at(arg_index);
98101
const auto& arg_leaf = arg.template Get<Load<TensorT>>();
99102
const auto& [arg_tensor] = arg_leaf.tuple();
100-
CHECK(producer_tensor == arg_tensor);
103+
PADDLE_ENFORCE_EQ(producer_tensor == arg_tensor,
104+
true,
105+
phi::errors::InvalidArgument(
106+
"producer_tensor should be equal to arg_tensor"));
101107
List<OpExpr> ret{};
102108
ret->assign(op_call_children->begin(), op_call_children->end());
103109
ret->at(arg_index) = producer_tree;
@@ -108,12 +114,20 @@ struct InlineTranslator final {
108114
static void CheckConsumerPosIsLoadTensor(const DstLeaf& consumer,
109115
int arg_index) {
110116
const auto& [tensor, consumer_tree] = consumer.tuple();
111-
CHECK((consumer_tree.template Has<OpCallT<OpExpr>>()));
117+
PADDLE_ENFORCE_EQ(
118+
(consumer_tree.template Has<OpCallT<OpExpr>>()),
119+
true,
120+
phi::errors::InvalidArgument(
121+
"consumer_tree.template should have <OpCallT<OpExpr>>()"));
112122
const auto& op_call = consumer_tree.template Get<OpCallT<OpExpr>>();
113123
const auto& op_call_children =
114124
InlineTranslatorTrait<OpCallT>::GetTreeInnerNodeChildren(op_call);
115125
const auto& op_call_child = op_call_children->at(arg_index);
116-
CHECK((op_call_child.template Has<Load<TensorT>>()));
126+
PADDLE_ENFORCE_EQ(
127+
(op_call_child.template Has<Load<TensorT>>()),
128+
true,
129+
phi::errors::InvalidArgument(
130+
"op_call_child.template should have <Load<TensorT>>()"));
117131
}
118132

119133
template <typename DoEachT>
@@ -163,8 +177,12 @@ struct InlineTranslator final {
163177
std::unordered_map<int, DstLeaf> index2dst_leaf{};
164178
// Init dst leaves
165179
for (int i = 0; i < size; ++i) {
166-
CHECK(index2dst_leaf.emplace(i, NaiveTranslateLeaf(*std::next(begin, i)))
167-
.second);
180+
PADDLE_ENFORCE_EQ(
181+
index2dst_leaf.emplace(i, NaiveTranslateLeaf(*std::next(begin, i)))
182+
.second,
183+
true,
184+
phi::errors::InvalidArgument(
185+
"index2dst_leaf.emplace should return true"));
168186
}
169187
// Inline dst leaves
170188
for (int producer_i = 0; producer_i < size; ++producer_i) {
@@ -195,7 +213,10 @@ struct InlineTranslator final {
195213
// using SrcLeaf = Store<TensorT, OpCallT<Load<TensorT>>>;
196214
// using DstLeaf = Store<TensorT, OpExpr>;
197215
static DstLeaf NaiveTranslateLeaf(const SrcTree& src_tree) {
198-
CHECK(src_tree.template Has<SrcLeaf>());
216+
PADDLE_ENFORCE_EQ(src_tree.template Has<SrcLeaf>(),
217+
true,
218+
phi::errors::InvalidArgument(
219+
"src_tree.template should have <SrcLeaf>()"));
199220
const auto& [tensor, op_call] = src_tree.template Get<SrcLeaf>().tuple();
200221
const List<Load<TensorT>>& src_loads =
201222
InlineTranslatorTrait<OpCallT>::GetTreeInnerNodeChildren(op_call);

paddle/cinn/hlir/op/contrib/argmax.cc

Lines changed: 51 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,10 @@ std::vector<ir::Tensor> Argmax(const Tensor &in_tensor,
7070

7171
std::vector<Expr> output_shape;
7272
for (int i = 0; i < shape.size(); ++i) {
73-
CHECK(shape[i].is_constant())
74-
<< "Input tensor's shape should be constant value.";
73+
PADDLE_ENFORCE_EQ(shape[i].is_constant(),
74+
true,
75+
phi::errors::InvalidArgument(
76+
"Input tensor's shape should be constant value."));
7577
if (pos_axis == i) {
7678
if (keep_dims) {
7779
output_shape.push_back(Expr(1));
@@ -119,40 +121,51 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmax(
119121
keep_dims = absl::get<bool>(attrs.attr_store.at("keep_dim"));
120122
}
121123

122-
framework::CINNCompute argmax_compute(
123-
[=](lang::Args args, lang::RetValue *ret) {
124-
CHECK(!args.empty())
125-
<< "The input argument of argmax compute is empty! Please check.";
126-
cinn::common::CINNValuePack pack_args = args[0];
127-
std::string tensor_name = UniqName("Argmax_out");
128-
PADDLE_ENFORCE_GE(
129-
pack_args.size(),
130-
1U,
131-
phi::errors::InvalidArgument(
132-
"There should be 1 input args for argmax compute"));
133-
Expr in_expr = pack_args[0];
134-
CHECK(in_expr.as_tensor());
135-
Tensor in_tensor = in_expr.as_tensor_ref();
136-
PADDLE_ENFORCE_EQ(
137-
pack_args.size(),
138-
2U,
139-
phi::errors::InvalidArgument(
140-
"The input argument of argmax compute must be 2."));
141-
CHECK(pack_args[1].is_string());
142-
tensor_name = pack_args[1].operator std::string();
143-
std::vector<ir::Tensor> out_tensor =
144-
Argmax(in_tensor, target, axis, keep_dims, tensor_name);
145-
146-
std::vector<CINNValue> cinn_values{CINNValue(out_tensor[0]),
147-
CINNValue(out_tensor[1]),
148-
CINNValue(out_tensor[2])};
149-
*ret = cinn::common::CINNValuePack{cinn_values};
150-
});
124+
framework::CINNCompute argmax_compute([=](lang::Args args,
125+
lang::RetValue *ret) {
126+
PADDLE_ENFORCE_EQ(
127+
!args.empty(),
128+
true,
129+
phi::errors::InvalidArgument(
130+
"The input argument of argmax compute is empty! Please check."));
131+
cinn::common::CINNValuePack pack_args = args[0];
132+
std::string tensor_name = UniqName("Argmax_out");
133+
PADDLE_ENFORCE_GE(pack_args.size(),
134+
1U,
135+
phi::errors::InvalidArgument(
136+
"There should be 1 input args for argmax compute"));
137+
Expr in_expr = pack_args[0];
138+
PADDLE_ENFORCE_NOT_NULL(
139+
in_expr.as_tensor(),
140+
phi::errors::InvalidArgument(
141+
"The input argument of argmax compute is null."));
142+
Tensor in_tensor = in_expr.as_tensor_ref();
143+
PADDLE_ENFORCE_EQ(pack_args.size(),
144+
2U,
145+
phi::errors::InvalidArgument(
146+
"The input argument of argmax compute must be 2."));
147+
PADDLE_ENFORCE_EQ(
148+
pack_args[1].is_string(),
149+
true,
150+
phi::errors::InvalidArgument(
151+
"The input argument of argmax compute must be string."));
152+
tensor_name = pack_args[1].operator std::string();
153+
std::vector<ir::Tensor> out_tensor =
154+
Argmax(in_tensor, target, axis, keep_dims, tensor_name);
155+
156+
std::vector<CINNValue> cinn_values{CINNValue(out_tensor[0]),
157+
CINNValue(out_tensor[1]),
158+
CINNValue(out_tensor[2])};
159+
*ret = cinn::common::CINNValuePack{cinn_values};
160+
});
151161

152162
framework::CINNSchedule argmax_schedule([=](lang::Args args,
153163
lang::RetValue *ret) {
154-
CHECK(!args.empty())
155-
<< "The input argument of argmax_schedule is empty! Please check.\n";
164+
PADDLE_ENFORCE_EQ(
165+
!args.empty(),
166+
true,
167+
phi::errors::InvalidArgument(
168+
"The input argument of argmax_schedule is empty! Please check."));
156169
cinn::common::CINNValuePack arg_pack = args[0];
157170
std::vector<Expr> vec_ast;
158171
for (int i = 0; i < arg_pack.size(); i++) {
@@ -161,7 +174,11 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmax(
161174
vec_ast.emplace_back(temp);
162175
}
163176
}
164-
CHECK(!vec_ast.empty());
177+
PADDLE_ENFORCE_EQ(
178+
!vec_ast.empty(),
179+
true,
180+
phi::errors::InvalidArgument(
181+
"The input argument of argmax_schedule is empty! Please check."));
165182
ir::ModuleExpr mod_expr(vec_ast);
166183
ir::IRSchedule ir_sch(mod_expr);
167184
ir_sch.MergeExprs();

paddle/cinn/hlir/op/contrib/argmin.cc

Lines changed: 44 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,10 @@ std::vector<Tensor> Argmin(const Tensor &in_tensor,
6262

6363
std::vector<Expr> output_shape;
6464
for (int i = 0; i < shape.size(); ++i) {
65-
CHECK(shape[i].is_constant())
66-
<< "Input tensor's shape should be constant value.";
65+
PADDLE_ENFORCE_EQ(shape[i].is_constant(),
66+
true,
67+
phi::errors::InvalidArgument(
68+
"Input tensor's shape should be constant value."));
6769
if (pos_axis == i) {
6870
if (keep_dims) {
6971
output_shape.push_back(Expr(1));
@@ -109,32 +111,44 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmin(
109111
keep_dims = absl::get<bool>(attrs.attr_store.at("keep_dim"));
110112
}
111113

112-
framework::CINNCompute argmin_compute(
113-
[=](lang::Args args, lang::RetValue *ret) {
114-
CHECK(!args.empty())
115-
<< "The input argument of argmin compute is empty! Please check.";
116-
cinn::common::CINNValuePack pack_args = args[0];
117-
CHECK_GE(pack_args.size(), 1U)
118-
<< "There should be 1 input args for argmax compute";
119-
Expr in_expr = pack_args[0];
120-
CHECK(in_expr.as_tensor());
121-
Tensor in_tensor = in_expr.as_tensor_ref();
122-
CHECK_EQ(pack_args.size(), 2U);
123-
CHECK(pack_args[1].is_string());
124-
std::string tensor_name = pack_args[1].operator std::string();
125-
auto out_tensor =
126-
Argmin(in_tensor, target, axis, keep_dims, tensor_name);
127-
128-
std::vector<CINNValue> cinn_values{CINNValue(out_tensor[0]),
129-
CINNValue(out_tensor[1]),
130-
CINNValue(out_tensor[2])};
131-
*ret = cinn::common::CINNValuePack{cinn_values};
132-
});
114+
framework::CINNCompute argmin_compute([=](lang::Args args,
115+
lang::RetValue *ret) {
116+
PADDLE_ENFORCE_EQ(
117+
!args.empty(),
118+
true,
119+
phi::errors::InvalidArgument(
120+
"The input argument of argmin compute is empty! Please check."));
121+
cinn::common::CINNValuePack pack_args = args[0];
122+
CHECK_GE(pack_args.size(), 1U)
123+
<< "There should be 1 input args for argmax compute";
124+
Expr in_expr = pack_args[0];
125+
PADDLE_ENFORCE_NOT_NULL(
126+
in_expr.as_tensor(),
127+
phi::errors::InvalidArgument(
128+
"The input argument of argmin compute is not tensor."));
129+
Tensor in_tensor = in_expr.as_tensor_ref();
130+
CHECK_EQ(pack_args.size(), 2U);
131+
PADDLE_ENFORCE_EQ(
132+
pack_args[1].is_string(),
133+
true,
134+
phi::errors::InvalidArgument(
135+
"The input argument of argmin compute is not string."));
136+
std::string tensor_name = pack_args[1].operator std::string();
137+
auto out_tensor = Argmin(in_tensor, target, axis, keep_dims, tensor_name);
138+
139+
std::vector<CINNValue> cinn_values{CINNValue(out_tensor[0]),
140+
CINNValue(out_tensor[1]),
141+
CINNValue(out_tensor[2])};
142+
*ret = cinn::common::CINNValuePack{cinn_values};
143+
});
133144

134145
framework::CINNSchedule argmin_schedule([=](lang::Args args,
135146
lang::RetValue *ret) {
136-
CHECK(!args.empty())
137-
<< "The input argument of arange_schedule is empty! Please check.\n";
147+
PADDLE_ENFORCE_EQ(
148+
!args.empty(),
149+
true,
150+
phi::errors::InvalidArgument(
151+
"The input argument of argmin schedule is empty! Please check."));
138152
cinn::common::CINNValuePack arg_pack = args[0];
139153
std::vector<Expr> vec_ast;
140154
for (int i = 0; i < arg_pack.size(); i++) {
@@ -143,7 +157,11 @@ std::shared_ptr<framework::OpStrategy> StrategyForArgmin(
143157
vec_ast.emplace_back(temp);
144158
}
145159
}
146-
CHECK(!vec_ast.empty());
160+
PADDLE_ENFORCE_EQ(
161+
!vec_ast.empty(),
162+
true,
163+
phi::errors::InvalidArgument(
164+
"The input argument of argmin schedule is empty! Please check."));
147165
ir::ModuleExpr mod_expr(vec_ast);
148166
ir::IRSchedule ir_sch(mod_expr);
149167
ir_sch.MergeExprs();

0 commit comments

Comments
 (0)