Skip to content

Commit 92abe74

Browse files
authored
Fix (#66646)
1 parent 7a6fdb9 commit 92abe74

File tree

184 files changed

+1269
-1240
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

184 files changed

+1269
-1240
lines changed

paddle/fluid/operators/activation_op.h

Lines changed: 32 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -53,13 +53,13 @@ inline void ExtractActivationTensor(const framework::ExecutionContext& context,
5353
phi::DenseTensor** Out) {
5454
auto x_var = context.InputVar("X");
5555
auto out_var = context.OutputVar("Out");
56-
PADDLE_ENFORCE_NOT_NULL(
57-
x_var,
58-
phi::errors::NotFound("Cannot get input Variable X, variable name = %s",
59-
context.InputName("X")));
56+
PADDLE_ENFORCE_NOT_NULL(x_var,
57+
common::errors::NotFound(
58+
"Cannot get input Variable X, variable name = %s",
59+
context.InputName("X")));
6060
PADDLE_ENFORCE_NOT_NULL(
6161
out_var,
62-
phi::errors::NotFound(
62+
common::errors::NotFound(
6363
"Cannot get output Variable Out, variable name = %s",
6464
context.OutputName("Out")));
6565
if (CanBeUsedBySelectedRows.count(context.Type())) {
@@ -73,9 +73,9 @@ inline void ExtractActivationTensor(const framework::ExecutionContext& context,
7373

7474
PADDLE_ENFORCE_NOT_NULL(
7575
*Out,
76-
phi::errors::NotFound("Cannot get the tensor from the Variable "
77-
"Output(Out), variable name = %s",
78-
context.OutputName("Out")));
76+
common::errors::NotFound("Cannot get the tensor from the Variable "
77+
"Output(Out), variable name = %s",
78+
context.OutputName("Out")));
7979
}
8080

8181
template <ActBwdOpFwdDeps kDepValue>
@@ -94,21 +94,23 @@ inline void ExtractActivationGradTensor(
9494
out_var = context.InputVar("Out");
9595
PADDLE_ENFORCE_NOT_NULL(
9696
out_var,
97-
phi::errors::NotFound(
97+
common::errors::NotFound(
9898
"Cannot get input Variable Out, variable name = %s",
9999
context.InputName("Out")));
100100
}
101101

102102
PADDLE_ENFORCE_NOT_NULL(
103103
out_grad_var,
104-
phi::errors::NotFound("Cannot get input Variable %s, variable name = %s",
105-
framework::GradVarName("Out"),
106-
context.InputName(framework::GradVarName("Out"))));
104+
common::errors::NotFound(
105+
"Cannot get input Variable %s, variable name = %s",
106+
framework::GradVarName("Out"),
107+
context.InputName(framework::GradVarName("Out"))));
107108
PADDLE_ENFORCE_NOT_NULL(
108109
x_grad_var,
109-
phi::errors::NotFound("Cannot get output Variable %s, variable name = %s",
110-
framework::GradVarName("X"),
111-
context.OutputName(framework::GradVarName("X"))));
110+
common::errors::NotFound(
111+
"Cannot get output Variable %s, variable name = %s",
112+
framework::GradVarName("X"),
113+
context.OutputName(framework::GradVarName("X"))));
112114

113115
if (CanBeUsedBySelectedRows.count(context.Type())) {
114116
*dOut = paddle::framework::GetLoDTensorOrSelectedRowsValueFromVar(
@@ -135,19 +137,19 @@ inline void ExtractActivationGradTensor(
135137
}
136138
}
137139

138-
PADDLE_ENFORCE_NOT_NULL(
139-
*dX,
140-
phi::errors::NotFound("Cannot get the tensor from the Variable "
141-
"Output(Out), variable name = %s",
142-
context.OutputName(framework::GradVarName("X"))));
140+
PADDLE_ENFORCE_NOT_NULL(*dX,
141+
common::errors::NotFound(
142+
"Cannot get the tensor from the Variable "
143+
"Output(Out), variable name = %s",
144+
context.OutputName(framework::GradVarName("X"))));
143145

144146
if (static_cast<int>(kDepValue) & static_cast<int>(ActBwdOpFwdDeps::kDepX)) {
145147
auto x_var = context.InputVar("X");
146148
PADDLE_ENFORCE_NOT_NULL(
147149
x_var,
148-
phi::errors::NotFound("Cannot get the tensor from the "
149-
"Variable Input(X), variable name = %s",
150-
context.InputName("X")));
150+
common::errors::NotFound("Cannot get the tensor from the "
151+
"Variable Input(X), variable name = %s",
152+
context.InputName("X")));
151153
if (CanBeUsedBySelectedRows.count(context.Type())) {
152154
*X = paddle::framework::GetLoDTensorOrSelectedRowsValueFromVar(*x_var);
153155
} else {
@@ -384,24 +386,26 @@ inline void ExtractDoubleGradTensorWithInputDOut(
384386
auto ddo_var = ctx.OutputVar("DDOut");
385387
PADDLE_ENFORCE_NOT_NULL(
386388
ddx_var,
387-
phi::errors::NotFound("Cannot get input Variable Out, variable name = %s",
388-
ctx.InputName("DDX")));
389+
common::errors::NotFound(
390+
"Cannot get input Variable Out, variable name = %s",
391+
ctx.InputName("DDX")));
389392
*ddX = ctx.Input<phi::DenseTensor>("DDX");
390393
if (ddo_var) {
391394
*ddOut = ctx.Output<phi::DenseTensor>("DDOut");
392395
}
393396
PADDLE_ENFORCE_NOT_NULL(
394397
ddX,
395-
phi::errors::NotFound(
398+
common::errors::NotFound(
396399
"Cannot get the tensor from the Variable DDX, variable name = %s",
397400
ctx.OutputName("DDX")));
398401

399402
// extract x(input), dx(output)
400403
auto x_var = ctx.InputVar("X");
401404
PADDLE_ENFORCE_NOT_NULL(
402405
x_var,
403-
phi::errors::NotFound("Cannot get input Variable Out, variable name = %s",
404-
ctx.InputName("X")));
406+
common::errors::NotFound(
407+
"Cannot get input Variable Out, variable name = %s",
408+
ctx.InputName("X")));
405409
auto dx_var = ctx.OutputVar("DX");
406410
*X = ctx.Input<phi::DenseTensor>("X");
407411
if (dx_var) {

paddle/fluid/operators/array_operator.h

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -34,15 +34,16 @@ class ArrayOp : public framework::OperatorBase {
3434
size_t GetOffset(const framework::Scope &scope,
3535
const phi::Place &place) const {
3636
auto *i = scope.FindVar(Input("I"));
37-
PADDLE_ENFORCE_NOT_NULL(i, phi::errors::NotFound("Input(I) is not found."));
37+
PADDLE_ENFORCE_NOT_NULL(i,
38+
common::errors::NotFound("Input(I) is not found."));
3839
auto &i_tensor = i->Get<phi::DenseTensor>();
39-
PADDLE_ENFORCE_EQ(
40-
i_tensor.numel(),
41-
1,
42-
phi::errors::InvalidArgument("Input(I) must have numel 1. "
43-
"But received %d, and it's shape is [%s].",
44-
i_tensor.numel(),
45-
i_tensor.dims()));
40+
PADDLE_ENFORCE_EQ(i_tensor.numel(),
41+
1,
42+
common::errors::InvalidArgument(
43+
"Input(I) must have numel 1. "
44+
"But received %d, and it's shape is [%s].",
45+
i_tensor.numel(),
46+
i_tensor.dims()));
4647

4748
// get device context from pool
4849
phi::DeviceContextPool &pool = phi::DeviceContextPool::Instance();

paddle/fluid/operators/assert_op.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,12 @@ class AssertOp : public framework::OperatorBase {
5858
scope.FindVar(Input(kCond.data()));
5959
PADDLE_ENFORCE_NOT_NULL(
6060
cond_var_ptr,
61-
phi::errors::NotFound("Input(Condition) of AssertOp is not found."));
61+
common::errors::NotFound("Input(Condition) of AssertOp is not found."));
6262
const phi::DenseTensor &cond = cond_var_ptr->Get<phi::DenseTensor>();
6363
PADDLE_ENFORCE_EQ(
6464
cond.numel(),
6565
1,
66-
phi::errors::InvalidArgument(
66+
common::errors::InvalidArgument(
6767
"The numel of Input(Condition) of AssertOp must be 1. But now "
6868
"the Condition's shape is %s.",
6969
cond.dims().to_str()));
@@ -83,7 +83,7 @@ class AssertOp : public framework::OperatorBase {
8383
formatter.Print(x_tensor, name);
8484
}
8585

86-
PADDLE_THROW(phi::errors::InvalidArgument(
86+
PADDLE_THROW(common::errors::InvalidArgument(
8787
"The condition variable '%s' of AssertOp must be "
8888
"true, but received false",
8989
Input(kCond.data())));

paddle/fluid/operators/assign_op.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ class AssignFunctor {
6363
PADDLE_ENFORCE_EQ(
6464
true,
6565
false,
66-
phi::errors::PermissionDenied(
66+
common::errors::PermissionDenied(
6767
"Not support type for assign op with type %s", typeid(T).name()));
6868
}
6969

paddle/fluid/operators/batch_norm_op.cc

Lines changed: 27 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -57,12 +57,12 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
5757
// make sure Mean/MeanOut and Variance/VarianceOut share memory in Python
5858
PADDLE_ENFORCE_EQ(ctx->Inputs("Mean")[0],
5959
ctx->Outputs("MeanOut")[0],
60-
phi::errors::InvalidArgument(
60+
common::errors::InvalidArgument(
6161
"Mean and MeanOut should share the same memory"));
6262
PADDLE_ENFORCE_EQ(
6363
ctx->Inputs("Variance")[0],
6464
ctx->Outputs("VarianceOut")[0],
65-
phi::errors::InvalidArgument(
65+
common::errors::InvalidArgument(
6666
"Variance and VarianceOut should share the same memory"));
6767

6868
const auto x_dims = ctx->GetInputDim("X");
@@ -71,7 +71,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
7171
PADDLE_ENFORCE_EQ(
7272
(x_dims[i] == -1) || (x_dims[i] > 0),
7373
true,
74-
phi::errors::InvalidArgument(
74+
common::errors::InvalidArgument(
7575
"Each dimension of input tensor is expected to be -1 or a "
7676
"positive number, but received %d. Input's shape is [%s].",
7777
x_dims[i],
@@ -85,7 +85,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
8585
auto mom = ctx->Inputs("MomentumTensor");
8686
PADDLE_ENFORCE_EQ(mom.size(),
8787
1,
88-
phi::errors::InvalidArgument(
88+
common::errors::InvalidArgument(
8989
"The input tensor MomentumTensor's size must be 1"
9090
"But received: MomentumTensor's size is [%d]",
9191
mom.size()));
@@ -94,7 +94,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
9494
PADDLE_ENFORCE_GE(
9595
x_dims.size(),
9696
2,
97-
phi::errors::InvalidArgument(
97+
common::errors::InvalidArgument(
9898
"ShapeError: the dimension of input "
9999
"X must greater than or equal to 2. But received: the shape of input "
100100
"X = [%s], the dimension of input X =[%d]",
@@ -103,7 +103,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
103103
PADDLE_ENFORCE_LE(
104104
x_dims.size(),
105105
5,
106-
phi::errors::InvalidArgument(
106+
common::errors::InvalidArgument(
107107
"ShapeError: the dimension of input X "
108108
"must smaller than or equal to 5. But received: the shape of input X "
109109
"= [%s], the dimension of input X = [%d]",
@@ -121,7 +121,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
121121
PADDLE_ENFORCE_EQ(
122122
scale_dim.size(),
123123
1UL,
124-
phi::errors::InvalidArgument(
124+
common::errors::InvalidArgument(
125125
"ShapeError: the dimension of scale must equal to 1."
126126
"But received: the shape of scale is [%s], the dimension "
127127
"of scale is [%d]",
@@ -134,7 +134,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
134134
PADDLE_ENFORCE_EQ(
135135
bias_dim.size(),
136136
1UL,
137-
phi::errors::InvalidArgument(
137+
common::errors::InvalidArgument(
138138
"ShapeError: the dimension of bias must equal to 1."
139139
"But received: the shape of bias is [%s],the dimension "
140140
"of bias is [%d]",
@@ -153,14 +153,14 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
153153
if (check) {
154154
PADDLE_ENFORCE_EQ(ctx->GetInputDim("Scale")[0],
155155
C,
156-
phi::errors::InvalidArgument(
156+
common::errors::InvalidArgument(
157157
"ShapeError: the shape of scale must equal to [%d]"
158158
"But received: the shape of scale is [%d]",
159159
C,
160160
ctx->GetInputDim("Scale")[0]));
161161
PADDLE_ENFORCE_EQ(ctx->GetInputDim("Bias")[0],
162162
C,
163-
phi::errors::InvalidArgument(
163+
common::errors::InvalidArgument(
164164
"ShapeError: the shape of bias must equal to [%d]"
165165
"But received: the shape of bias is [%d]",
166166
C,
@@ -195,25 +195,25 @@ phi::KernelKey BatchNormOp::GetExpectedKernelType(
195195
bn_param_type,
196196
framework::TransToProtoVarType(
197197
ctx.Input<phi::DenseTensor>("Scale")->dtype()),
198-
phi::errors::InvalidArgument("Scale input should be of float type"));
198+
common::errors::InvalidArgument("Scale input should be of float type"));
199199
}
200200
if (ctx.HasInput("Bias")) {
201201
PADDLE_ENFORCE_EQ(
202202
bn_param_type,
203203
framework::TransToProtoVarType(
204204
ctx.Input<phi::DenseTensor>("Bias")->dtype()),
205-
phi::errors::InvalidArgument("Bias input should be of float type"));
205+
common::errors::InvalidArgument("Bias input should be of float type"));
206206
}
207207
PADDLE_ENFORCE_EQ(
208208
bn_param_type,
209209
framework::TransToProtoVarType(
210210
ctx.Input<phi::DenseTensor>("Mean")->dtype()),
211-
phi::errors::InvalidArgument("Mean input should be of float type"));
212-
PADDLE_ENFORCE_EQ(
213-
bn_param_type,
214-
framework::TransToProtoVarType(
215-
ctx.Input<phi::DenseTensor>("Variance")->dtype()),
216-
phi::errors::InvalidArgument("Variance input should be of float type"));
211+
common::errors::InvalidArgument("Mean input should be of float type"));
212+
PADDLE_ENFORCE_EQ(bn_param_type,
213+
framework::TransToProtoVarType(
214+
ctx.Input<phi::DenseTensor>("Variance")->dtype()),
215+
common::errors::InvalidArgument(
216+
"Variance input should be of float type"));
217217
return phi::KernelKey(input_data_type, ctx.GetPlace());
218218
}
219219

@@ -254,11 +254,11 @@ void BatchNormOpMaker::Make() {
254254
PADDLE_ENFORCE_GE(
255255
epsilon,
256256
0.0f,
257-
phi::errors::InvalidArgument(
257+
common::errors::InvalidArgument(
258258
"'epsilon' should be greater or equal than 0.0."));
259259
PADDLE_ENFORCE_LE(epsilon,
260260
0.001f,
261-
phi::errors::InvalidArgument(
261+
common::errors::InvalidArgument(
262262
"'epsilon' should be less or equal than 0.001."));
263263
});
264264
AddAttr<std::string>("data_layout", "").SetDefault("NCHW");
@@ -349,7 +349,7 @@ void BatchNormGradOp::InferShape(framework::InferShapeContext *ctx) const {
349349

350350
PADDLE_ENFORCE_EQ((has_scale_grad == has_bias_grad),
351351
true,
352-
phi::errors::NotFound(
352+
common::errors::NotFound(
353353
"Output(Scale@GRAD) and Output(Bias@GRAD) must be null "
354354
"or not be null at same time. But now, "
355355
"has Scale@Grad=[%d], has Bias@GRAD=[%d]",
@@ -361,7 +361,7 @@ void BatchNormGradOp::InferShape(framework::InferShapeContext *ctx) const {
361361
PADDLE_ENFORCE_EQ(
362362
!ctx->Attrs().Get<bool>("use_mkldnn"),
363363
true,
364-
phi::errors::InvalidArgument(
364+
common::errors::InvalidArgument(
365365
"Using global stats during training is not supported "
366366
"in oneDNN version of batch_norm_gradient kernel now."));
367367
}
@@ -391,15 +391,15 @@ phi::KernelKey BatchNormGradOp::GetExpectedKernelType(
391391
const auto *var = ctx.InputVar(framework::GradVarName("Y"));
392392
if (var == nullptr) {
393393
PADDLE_THROW(
394-
phi::errors::InvalidArgument("can't find gradient variable of Y"));
394+
common::errors::InvalidArgument("can't find gradient variable of Y"));
395395
}
396396
const phi::DenseTensor *t = nullptr;
397397
if (var->IsType<phi::DenseTensor>()) {
398398
t = &var->Get<phi::DenseTensor>();
399399
}
400400
if (t == nullptr) {
401401
PADDLE_THROW(
402-
phi::errors::InvalidArgument("gradient variable of Y is empty"));
402+
common::errors::InvalidArgument("gradient variable of Y is empty"));
403403
}
404404

405405
auto data_type = OperatorWithKernel::IndicateVarDataType(ctx, "X");
@@ -532,15 +532,16 @@ phi::KernelKey BatchNormDoubleGradOp::GetExpectedKernelType(
532532
const framework::ExecutionContext &ctx) const {
533533
const auto *var = ctx.InputVar("DY");
534534
if (var == nullptr) {
535-
PADDLE_THROW(phi::errors::NotFound("cannot find gradient variable of Y"));
535+
PADDLE_THROW(
536+
common::errors::NotFound("cannot find gradient variable of Y"));
536537
}
537538
const phi::DenseTensor *t = nullptr;
538539
if (var->IsType<phi::DenseTensor>()) {
539540
t = &var->Get<phi::DenseTensor>();
540541
}
541542
if (t == nullptr) {
542543
PADDLE_THROW(
543-
phi::errors::InvalidArgument("gradient variable of Y is empty"));
544+
common::errors::InvalidArgument("gradient variable of Y is empty"));
544545
}
545546
return phi::KernelKey(OperatorWithKernel::IndicateVarDataType(ctx, "X"),
546547
ctx.GetPlace());

paddle/fluid/operators/beam_search_decode_op.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ struct BeamSearchDecodeFunctor {
8383
template <typename T>
8484
void apply_mix() const {
8585
if (std::is_same<bool, T>::value) {
86-
PADDLE_THROW(phi::errors::InvalidArgument(
86+
PADDLE_THROW(common::errors::InvalidArgument(
8787
"beam search decode op does not support bool!"));
8888

8989
} else {

0 commit comments

Comments
 (0)