@@ -57,12 +57,12 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
5757 // make sure Mean/MeanOut and Variance/VarianceOut share memory in Python
5858 PADDLE_ENFORCE_EQ (ctx->Inputs (" Mean" )[0 ],
5959 ctx->Outputs (" MeanOut" )[0 ],
60- phi ::errors::InvalidArgument (
60+ common ::errors::InvalidArgument (
6161 " Mean and MeanOut should share the same memory" ));
6262 PADDLE_ENFORCE_EQ (
6363 ctx->Inputs (" Variance" )[0 ],
6464 ctx->Outputs (" VarianceOut" )[0 ],
65- phi ::errors::InvalidArgument (
65+ common ::errors::InvalidArgument (
6666 " Variance and VarianceOut should share the same memory" ));
6767
6868 const auto x_dims = ctx->GetInputDim (" X" );
@@ -71,7 +71,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
7171 PADDLE_ENFORCE_EQ (
7272 (x_dims[i] == -1 ) || (x_dims[i] > 0 ),
7373 true ,
74- phi ::errors::InvalidArgument (
74+ common ::errors::InvalidArgument (
7575 " Each dimension of input tensor is expected to be -1 or a "
7676 " positive number, but received %d. Input's shape is [%s]." ,
7777 x_dims[i],
@@ -85,7 +85,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
8585 auto mom = ctx->Inputs (" MomentumTensor" );
8686 PADDLE_ENFORCE_EQ (mom.size (),
8787 1 ,
88- phi ::errors::InvalidArgument (
88+ common ::errors::InvalidArgument (
8989 " The input tensor MomentumTensor's size must be 1"
9090 " But received: MomentumTensor's size is [%d]" ,
9191 mom.size ()));
@@ -94,7 +94,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
9494 PADDLE_ENFORCE_GE (
9595 x_dims.size (),
9696 2 ,
97- phi ::errors::InvalidArgument (
97+ common ::errors::InvalidArgument (
9898 " ShapeError: the dimension of input "
9999 " X must greater than or equal to 2. But received: the shape of input "
100100 " X = [%s], the dimension of input X =[%d]" ,
@@ -103,7 +103,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
103103 PADDLE_ENFORCE_LE (
104104 x_dims.size (),
105105 5 ,
106- phi ::errors::InvalidArgument (
106+ common ::errors::InvalidArgument (
107107 " ShapeError: the dimension of input X "
108108 " must smaller than or equal to 5. But received: the shape of input X "
109109 " = [%s], the dimension of input X = [%d]" ,
@@ -121,7 +121,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
121121 PADDLE_ENFORCE_EQ (
122122 scale_dim.size (),
123123 1UL ,
124- phi ::errors::InvalidArgument (
124+ common ::errors::InvalidArgument (
125125 " ShapeError: the dimension of scale must equal to 1."
126126 " But received: the shape of scale is [%s], the dimension "
127127 " of scale is [%d]" ,
@@ -134,7 +134,7 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
134134 PADDLE_ENFORCE_EQ (
135135 bias_dim.size (),
136136 1UL ,
137- phi ::errors::InvalidArgument (
137+ common ::errors::InvalidArgument (
138138 " ShapeError: the dimension of bias must equal to 1."
139139 " But received: the shape of bias is [%s],the dimension "
140140 " of bias is [%d]" ,
@@ -153,14 +153,14 @@ void BatchNormOp::InferShape(framework::InferShapeContext *ctx) const {
153153 if (check) {
154154 PADDLE_ENFORCE_EQ (ctx->GetInputDim (" Scale" )[0 ],
155155 C,
156- phi ::errors::InvalidArgument (
156+ common ::errors::InvalidArgument (
157157 " ShapeError: the shape of scale must equal to [%d]"
158158 " But received: the shape of scale is [%d]" ,
159159 C,
160160 ctx->GetInputDim (" Scale" )[0 ]));
161161 PADDLE_ENFORCE_EQ (ctx->GetInputDim (" Bias" )[0 ],
162162 C,
163- phi ::errors::InvalidArgument (
163+ common ::errors::InvalidArgument (
164164 " ShapeError: the shape of bias must equal to [%d]"
165165 " But received: the shape of bias is [%d]" ,
166166 C,
@@ -195,25 +195,25 @@ phi::KernelKey BatchNormOp::GetExpectedKernelType(
195195 bn_param_type,
196196 framework::TransToProtoVarType (
197197 ctx.Input <phi::DenseTensor>(" Scale" )->dtype ()),
198- phi ::errors::InvalidArgument (" Scale input should be of float type" ));
198+ common ::errors::InvalidArgument (" Scale input should be of float type" ));
199199 }
200200 if (ctx.HasInput (" Bias" )) {
201201 PADDLE_ENFORCE_EQ (
202202 bn_param_type,
203203 framework::TransToProtoVarType (
204204 ctx.Input <phi::DenseTensor>(" Bias" )->dtype ()),
205- phi ::errors::InvalidArgument (" Bias input should be of float type" ));
205+ common ::errors::InvalidArgument (" Bias input should be of float type" ));
206206 }
207207 PADDLE_ENFORCE_EQ (
208208 bn_param_type,
209209 framework::TransToProtoVarType (
210210 ctx.Input <phi::DenseTensor>(" Mean" )->dtype ()),
211- phi ::errors::InvalidArgument (" Mean input should be of float type" ));
212- PADDLE_ENFORCE_EQ (
213- bn_param_type,
214- framework::TransToProtoVarType (
215- ctx. Input <phi::DenseTensor>( " Variance " )-> dtype ()),
216- phi::errors::InvalidArgument ( " Variance input should be of float type" ));
211+ common ::errors::InvalidArgument (" Mean input should be of float type" ));
212+ PADDLE_ENFORCE_EQ (bn_param_type,
213+ framework::TransToProtoVarType (
214+ ctx. Input <phi::DenseTensor>( " Variance " )-> dtype ()),
215+ common::errors::InvalidArgument (
216+ " Variance input should be of float type" ));
217217 return phi::KernelKey (input_data_type, ctx.GetPlace ());
218218}
219219
@@ -254,11 +254,11 @@ void BatchNormOpMaker::Make() {
254254 PADDLE_ENFORCE_GE (
255255 epsilon,
256256 0 .0f ,
257- phi ::errors::InvalidArgument (
257+ common ::errors::InvalidArgument (
258258 " 'epsilon' should be greater or equal than 0.0." ));
259259 PADDLE_ENFORCE_LE (epsilon,
260260 0 .001f ,
261- phi ::errors::InvalidArgument (
261+ common ::errors::InvalidArgument (
262262 " 'epsilon' should be less or equal than 0.001." ));
263263 });
264264 AddAttr<std::string>(" data_layout" , " " ).SetDefault (" NCHW" );
@@ -349,7 +349,7 @@ void BatchNormGradOp::InferShape(framework::InferShapeContext *ctx) const {
349349
350350 PADDLE_ENFORCE_EQ ((has_scale_grad == has_bias_grad),
351351 true ,
352- phi ::errors::NotFound (
352+ common ::errors::NotFound (
353353 " Output(Scale@GRAD) and Output(Bias@GRAD) must be null "
354354 " or not be null at same time. But now, "
355355 " has Scale@Grad=[%d], has Bias@GRAD=[%d]" ,
@@ -361,7 +361,7 @@ void BatchNormGradOp::InferShape(framework::InferShapeContext *ctx) const {
361361 PADDLE_ENFORCE_EQ (
362362 !ctx->Attrs ().Get <bool >(" use_mkldnn" ),
363363 true ,
364- phi ::errors::InvalidArgument (
364+ common ::errors::InvalidArgument (
365365 " Using global stats during training is not supported "
366366 " in oneDNN version of batch_norm_gradient kernel now." ));
367367 }
@@ -391,15 +391,15 @@ phi::KernelKey BatchNormGradOp::GetExpectedKernelType(
391391 const auto *var = ctx.InputVar (framework::GradVarName (" Y" ));
392392 if (var == nullptr ) {
393393 PADDLE_THROW (
394- phi ::errors::InvalidArgument (" can't find gradient variable of Y" ));
394+ common ::errors::InvalidArgument (" can't find gradient variable of Y" ));
395395 }
396396 const phi::DenseTensor *t = nullptr ;
397397 if (var->IsType <phi::DenseTensor>()) {
398398 t = &var->Get <phi::DenseTensor>();
399399 }
400400 if (t == nullptr ) {
401401 PADDLE_THROW (
402- phi ::errors::InvalidArgument (" gradient variable of Y is empty" ));
402+ common ::errors::InvalidArgument (" gradient variable of Y is empty" ));
403403 }
404404
405405 auto data_type = OperatorWithKernel::IndicateVarDataType (ctx, " X" );
@@ -532,15 +532,16 @@ phi::KernelKey BatchNormDoubleGradOp::GetExpectedKernelType(
532532 const framework::ExecutionContext &ctx) const {
533533 const auto *var = ctx.InputVar (" DY" );
534534 if (var == nullptr ) {
535- PADDLE_THROW (phi::errors::NotFound (" cannot find gradient variable of Y" ));
535+ PADDLE_THROW (
536+ common::errors::NotFound (" cannot find gradient variable of Y" ));
536537 }
537538 const phi::DenseTensor *t = nullptr ;
538539 if (var->IsType <phi::DenseTensor>()) {
539540 t = &var->Get <phi::DenseTensor>();
540541 }
541542 if (t == nullptr ) {
542543 PADDLE_THROW (
543- phi ::errors::InvalidArgument (" gradient variable of Y is empty" ));
544+ common ::errors::InvalidArgument (" gradient variable of Y is empty" ));
544545 }
545546 return phi::KernelKey (OperatorWithKernel::IndicateVarDataType (ctx, " X" ),
546547 ctx.GetPlace ());
0 commit comments