@@ -448,7 +448,7 @@ def check_layernorm(self, x_np, gamma_np, beta_np, dtype):
448448 beta_static ,
449449 self .epsilon ,
450450 begin_norm_axis = 1 ,
451- )
451+ )[ 0 ]
452452 exe = paddle .static .Executor (self .place )
453453 out_s = exe .run (
454454 feed = {
@@ -498,7 +498,7 @@ def check_layernorm_int8(self, x_np, gamma_np, beta_np, dtype):
498498 quant_round_type = self .quant_round_type ,
499499 quant_max_bound = self .quant_max_bound ,
500500 quant_min_bound = self .quant_min_bound ,
501- )
501+ )[ 0 ]
502502 exe = paddle .static .Executor (self .place )
503503 out_s = exe .run (
504504 feed = {
@@ -546,7 +546,7 @@ def check_residual_bias_add(self, x_np, residual_np, bias_np, dtype):
546546 quant_round_type = self .quant_round_type ,
547547 quant_max_bound = self .quant_max_bound ,
548548 quant_min_bound = self .quant_min_bound ,
549- )
549+ )[ 0 ]
550550
551551 exe = paddle .static .Executor (self .place )
552552 out_s = exe .run (
@@ -556,7 +556,7 @@ def check_residual_bias_add(self, x_np, residual_np, bias_np, dtype):
556556 "bias_static" : bias_np .astype (dtype ),
557557 },
558558 fetch_list = [
559- outs [ 0 ]
559+ outs
560560 ], # NOTE: Only fetch `out`, because `residual_out` will not be initialized if both `norm_weight` and `norm_bias` are None.
561561 )
562562 return out_s , paddle_naive_residual_out
@@ -597,7 +597,7 @@ def check_residual_bias_layernorm(
597597 beta_static = paddle .static .data (
598598 name = "beta_static" , shape = [self .cols ], dtype = 'float32'
599599 )
600- outs = paddle .incubate .nn .functional .fused_layer_norm (
600+ outs , residual = paddle .incubate .nn .functional .fused_layer_norm (
601601 x_static ,
602602 gamma_static ,
603603 beta_static ,
@@ -606,7 +606,7 @@ def check_residual_bias_layernorm(
606606 residual_alpha = self .residual_alpha ,
607607 bias = bias_static ,
608608 residual = residual_static ,
609- )
609+ )[: 2 ]
610610
611611 exe = paddle .static .Executor (self .place )
612612 out_s = exe .run (
@@ -617,7 +617,7 @@ def check_residual_bias_layernorm(
617617 "residual_static" : residual_np .astype (dtype ),
618618 "bias_static" : bias_np .astype (dtype ),
619619 },
620- fetch_list = [outs ],
620+ fetch_list = [outs , residual ],
621621 )
622622 return out_s , paddle_naive_layernorm_out , paddle_naive_residual_out
623623
@@ -667,7 +667,7 @@ def check_residual_bias_layernorm_int8(
667667 beta_static = paddle .static .data (
668668 name = "beta_static" , shape = [self .cols ], dtype = 'float32'
669669 )
670- outs = paddle .incubate .nn .functional .fused_layer_norm (
670+ outs , residual = paddle .incubate .nn .functional .fused_layer_norm (
671671 x_static ,
672672 gamma_static ,
673673 beta_static ,
@@ -680,7 +680,7 @@ def check_residual_bias_layernorm_int8(
680680 quant_round_type = self .quant_round_type ,
681681 quant_max_bound = self .quant_max_bound ,
682682 quant_min_bound = self .quant_min_bound ,
683- )
683+ )[: 2 ]
684684
685685 exe = paddle .static .Executor (self .place )
686686 out_s = exe .run (
@@ -691,7 +691,7 @@ def check_residual_bias_layernorm_int8(
691691 "residual_static" : residual_np .astype (dtype ),
692692 "bias_static" : bias_np .astype (dtype ),
693693 },
694- fetch_list = [outs ],
694+ fetch_list = [outs , residual ],
695695 )
696696 return out_s , paddle_naive_layernorm_out , paddle_naive_residual_out
697697
@@ -847,7 +847,7 @@ def check_layernorm(self, x_np, gamma_np, beta_np, dtype):
847847
848848 paddle_layernorm_out = paddle .incubate .nn .functional .fused_layer_norm (
849849 x , gamma , beta , self .epsilon , begin_norm_axis = 1
850- )
850+ )[ 0 ]
851851 paddle_naive_layernorm_out = naive_layer_norm (
852852 x , gamma , beta , self .epsilon
853853 )
@@ -869,7 +869,7 @@ def check_residual_bias_add(self, x_np, residual_np, bias_np, dtype):
869869 bias = bias ,
870870 residual = residual ,
871871 residual_alpha = self .residual_alpha ,
872- )
872+ )[ 0 ]
873873
874874 paddle_naive_residual_out = naive_residual_bias_add (
875875 x , residual , bias , self .residual_alpha
@@ -919,7 +919,7 @@ def test_residual_bias_add(self):
919919 self .x_np , self .residual_np , self .bias_np , 'float32'
920920 )
921921 np .testing .assert_allclose (
922- paddle_residual_bias_out [ 0 ] .numpy (),
922+ paddle_residual_bias_out .numpy (),
923923 paddle_naive_residual_bias_out .numpy (),
924924 rtol = 1e-3 ,
925925 atol = 1e-3 ,
@@ -931,7 +931,7 @@ def test_layernorm(self):
931931 )
932932
933933 np .testing .assert_allclose (
934- paddle_layernorm [ 0 ] .numpy (),
934+ paddle_layernorm .numpy (),
935935 paddle_naive_layernorm .numpy (),
936936 rtol = 1e-3 ,
937937 atol = 1e-3 ,
@@ -1016,7 +1016,7 @@ def check_layernorm(self, x_np, gamma_np, beta_np, dtype):
10161016 beta_static ,
10171017 self .epsilon ,
10181018 begin_norm_axis = 1 ,
1019- )
1019+ )[ 0 ]
10201020 exe = paddle .static .Executor (self .place )
10211021 out_s = exe .run (
10221022 feed = {
@@ -1060,7 +1060,7 @@ def check_residual_bias_add(self, x_np, residual_np, bias_np, dtype):
10601060 bias = bias_static ,
10611061 residual = residual_static ,
10621062 residual_alpha = self .residual_alpha ,
1063- )
1063+ )[ 0 ]
10641064
10651065 exe = paddle .static .Executor (self .place )
10661066 out_s = exe .run (
@@ -1070,7 +1070,7 @@ def check_residual_bias_add(self, x_np, residual_np, bias_np, dtype):
10701070 "bias_static" : bias_np .astype (dtype ),
10711071 },
10721072 fetch_list = [
1073- outs [ 0 ]
1073+ outs
10741074 ], # NOTE: Only fetch `out`, because `residual_out` will not be initialized if both `norm_weight` and `norm_bias` are None.
10751075 )
10761076 return out_s , paddle_naive_residual_out
@@ -1111,7 +1111,7 @@ def check_residual_bias_layernorm(
11111111 beta_static = paddle .static .data (
11121112 name = "beta_static" , shape = [self .cols ], dtype = 'float32'
11131113 )
1114- outs = paddle .incubate .nn .functional .fused_layer_norm (
1114+ outs , residual = paddle .incubate .nn .functional .fused_layer_norm (
11151115 x_static ,
11161116 gamma_static ,
11171117 beta_static ,
@@ -1120,7 +1120,7 @@ def check_residual_bias_layernorm(
11201120 residual_alpha = self .residual_alpha ,
11211121 bias = bias_static ,
11221122 residual = residual_static ,
1123- )
1123+ )[: 2 ]
11241124
11251125 exe = paddle .static .Executor (self .place )
11261126 out_s = exe .run (
@@ -1131,7 +1131,7 @@ def check_residual_bias_layernorm(
11311131 "residual_static" : residual_np .astype (dtype ),
11321132 "bias_static" : bias_np .astype (dtype ),
11331133 },
1134- fetch_list = [outs ],
1134+ fetch_list = [outs , residual ],
11351135 )
11361136 return out_s , paddle_naive_layernorm_out , paddle_naive_residual_out
11371137
0 commit comments