Skip to content

Commit 4b822fd

Browse files
authored
Fix typos in test directory (#66981)
1 parent f04aa61 commit 4b822fd

16 files changed

+25
-25
lines changed

test/collective/README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,14 @@
77
the properties are the following:
88
* `name`: the test's name
99
* `os`: The supported operator system, ignoring case. If the test run in multiple operator systems, use ";" to split systems, for example, `apple;linux` means the test runs on both Apple and Linux. The supported values are `linux`,`win32` and `apple`. If the value is empty, this means the test runs on all operator systems.
10-
* `arch`: the device's architecture. similar to `os`, multiple values are splited by ";" and ignoring case. The supported architectures are `gpu`, `xpu` and `rocm`.
10+
* `arch`: the device's architecture. similar to `os`, multiple values are split by ";" and ignoring case. The supported architectures are `gpu`, `xpu` and `rocm`.
1111
* `timeout`: timeout of a unittest, whose unit is second. Blank means default.
1212
* `run_type`: run_type of a unittest. Supported values are `NIGHTLY`, `EXCLUSIVE`, `CINN`, `DIST`, `GPUPS`, `INFER`, `EXCLUSIVE:NIGHTLY`, `DIST:NIGHTLY`,which are case-insensitive.
1313
* `launcher`: the test launcher.Supported values are test_runner.py, dist_test.sh and custom scripts' name. Blank means test_runner.py.
1414
* `num_port`: the number of port used in a distributed unit test. Blank means automatically distributed port.
1515
* `run_serial`: whether in serial mode. the value can be 1 or 0.Default (empty) is 0. Blank means default.
16-
* `ENVS`: required environments. multiple environments are splited by ";".
17-
* `conditions`: extra required conditions for some tests. The value is a list of boolean expression in cmake programmer, splited with ";". For example, the value can be `WITH_DGC;NOT WITH_NCCL` or `WITH_NCCL;${NCCL_VERSION} VERSION_GREATER_EQUAL 2212`,The relationship between these expressions is a conjunction.
16+
* `ENVS`: required environments. multiple environments are split by ";".
17+
* `conditions`: extra required conditions for some tests. The value is a list of boolean expression in cmake programmer, split with ";". For example, the value can be `WITH_DGC;NOT WITH_NCCL` or `WITH_NCCL;${NCCL_VERSION} VERSION_GREATER_EQUAL 2212`,The relationship between these expressions is a conjunction.
1818

1919
### step 3. Generate CMakeLists.txt
2020
Run the cmd:

test/deprecated/legacy_test/test_group_norm_op_deprecated.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1529,7 +1529,7 @@ def get_static_desire(self, place):
15291529
if core._is_fwd_prim_enabled():
15301530
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
15311531
fwd_ops_new = [op.type for op in blocks[0].ops]
1532-
# Ensure that group_norm is splitted into small ops
1532+
# Ensure that group_norm is split into small ops
15331533
assert 'group_norm' not in fwd_ops_new
15341534

15351535
grads = paddle.static.gradients([output], [input_, scale_, bias_])
@@ -1621,7 +1621,7 @@ def test_static_comp(self):
16211621
if core._is_fwd_prim_enabled():
16221622
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
16231623
fwd_ops_new = [op.type for op in blocks[0].ops]
1624-
# Ensure that group_norm is splitted into small ops
1624+
# Ensure that group_norm is split into small ops
16251625
assert 'group_norm' not in fwd_ops_new
16261626

16271627
grads = paddle.static.gradients(

test/deprecated/legacy_test/test_instance_norm_op_deprecated.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,7 @@ def get_static_desire(self, place):
502502
if core._is_fwd_prim_enabled():
503503
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
504504
fwd_ops_new = [op.type for op in blocks[0].ops]
505-
# Ensure that instance_norm is splitted into small ops
505+
# Ensure that instance_norm is split into small ops
506506
assert 'instance_norm' not in fwd_ops_new
507507

508508
grads = paddle.static.gradients([output], [input_, scale_, bias_])
@@ -586,7 +586,7 @@ def test_static_comp(self):
586586
if core._is_fwd_prim_enabled():
587587
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
588588
fwd_ops_new = [op.type for op in blocks[0].ops]
589-
# Ensure that instance_norm is splitted into small ops
589+
# Ensure that instance_norm is split into small ops
590590
assert 'instance_norm' not in fwd_ops_new
591591

592592
grads = paddle.static.gradients(

test/deprecated/prim/composite_ops/test_composite_batch_norm_deprecated.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,7 @@ def cal_static(inputs, running_mean, running_variance, weight, bias, mode=None):
207207
if mode:
208208
primapi.to_prim(blocks)
209209
fwd_ops_new = [op.type for op in blocks[0].ops]
210-
# Ensure that batch_norm is splitted into small ops
210+
# Ensure that batch_norm is split into small ops
211211
assert (
212212
'batch_norm' not in fwd_ops_new
213213
and 'reduce_mean' not in fwd_ops_new

test/deprecated/prim/composite_ops/test_composite_gelu_deprecated.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ def cal_composite(self, inputs):
9090
primapi.to_prim(blocks)
9191

9292
fwd_ops_new = [op.type for op in blocks[0].ops]
93-
# Ensure that gelu is splitted into small ops
93+
# Ensure that gelu is split into small ops
9494
self.assertTrue('gelu' not in fwd_ops_new)
9595

9696
exe = paddle.static.Executor()

test/deprecated/prim/composite_ops/test_composite_gelu_grad_deprecated.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def cal_composite_grad(self, inputs):
9898
primapi.to_prim(blocks)
9999

100100
fwd_ops_new = [op.type for op in blocks[0].ops]
101-
# Ensure that gelu is splitted into small ops
101+
# Ensure that gelu is split into small ops
102102
self.assertTrue('gelu' not in fwd_ops_new)
103103

104104
z = paddle.static.gradients([y], x)

test/deprecated/prim/composite_ops/test_composite_layer_norm_deprecated.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def cal_composite(self, inputs, norm_shape, weight, bias):
166166
primapi.to_prim(blocks)
167167

168168
fwd_ops_new = [op.type for op in blocks[0].ops]
169-
# Ensure that layer_norm is splitted into small ops
169+
# Ensure that layer_norm is split into small ops
170170
self.assertTrue('layer_norm' not in fwd_ops_new)
171171

172172
exe = paddle.static.Executor()
@@ -205,7 +205,7 @@ def cal2_composite(self, inputs, norm_shape, weight, bias):
205205
primapi.to_prim(blocks)
206206

207207
fwd_ops_new = [op.type for op in blocks[0].ops]
208-
# Ensure that layer_norm is splitted into small ops
208+
# Ensure that layer_norm is split into small ops
209209
self.assertTrue('layer_norm' not in fwd_ops_new)
210210

211211
exe = paddle.static.Executor()

test/deprecated/prim/composite_ops/test_composite_layer_norm_grad_deprecated.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ def static_comp_forward(self, inputs, norm_shape, weight, bias, y_g):
222222
primapi.to_prim(blocks)
223223

224224
fwd_ops_new = [op.type for op in blocks[0].ops]
225-
# Ensure that layer_norm is splitted into small ops
225+
# Ensure that layer_norm is split into small ops
226226
self.assertTrue('layer_norm' not in fwd_ops_new)
227227

228228
z = paddle.static.gradients([y], [x, w, b], y_grad)
@@ -273,7 +273,7 @@ def static_comp_forward_withNone(
273273
primapi.to_prim(blocks)
274274

275275
fwd_ops_new = [op.type for op in blocks[0].ops]
276-
# Ensure that layer_norm is splitted into small ops
276+
# Ensure that layer_norm is split into small ops
277277
self.assertTrue('layer_norm' not in fwd_ops_new)
278278

279279
z = paddle.static.gradients([y], x, y_grad)
@@ -663,7 +663,7 @@ def static_comp_forward(self, inputs, norm_shape, weight, bias, y_grad):
663663
primapi.to_prim(blocks)
664664

665665
fwd_ops_new = [op.type for op in blocks[0].ops]
666-
# Ensure that layer_norm is splitted into small ops
666+
# Ensure that layer_norm is split into small ops
667667
self.assertTrue('layer_norm' not in fwd_ops_new)
668668

669669
z = paddle.static.gradients([y], x, y_g)

test/deprecated/prim/composite_ops/test_composite_mean_deprecated.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def cal_composite(self, inputs):
9393
primapi.to_prim(blocks)
9494

9595
fwd_ops_new = [op.type for op in blocks[0].ops]
96-
# Ensure that reduce_mean is splitted into small ops
96+
# Ensure that reduce_mean is split into small ops
9797
self.assertTrue('reduce_mean' not in fwd_ops_new)
9898

9999
exe = paddle.static.Executor()

test/deprecated/prim/composite_ops/test_composite_mean_grad_deprecated.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def cal_composite_grad(self, inputs):
9999
primapi.to_prim(blocks)
100100

101101
fwd_ops_new = [op.type for op in blocks[0].ops]
102-
# Ensure that reduce_mean is splitted into small ops
102+
# Ensure that reduce_mean is split into small ops
103103
self.assertTrue('reduce_mean' not in fwd_ops_new)
104104

105105
z = paddle.static.gradients([y], x)

0 commit comments

Comments
 (0)