Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions test/collective/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@
the properties are the following:
* `name`: the test's name
* `os`: The supported operator system, ignoring case. If the test run in multiple operator systems, use ";" to split systems, for example, `apple;linux` means the test runs on both Apple and Linux. The supported values are `linux`,`win32` and `apple`. If the value is empty, this means the test runs on all operator systems.
* `arch`: the device's architecture. similar to `os`, multiple values are splited by ";" and ignoring case. The supported architectures are `gpu`, `xpu` and `rocm`.
* `arch`: the device's architecture. similar to `os`, multiple values are split by ";" and ignoring case. The supported architectures are `gpu`, `xpu` and `rocm`.
* `timeout`: timeout of a unittest, whose unit is second. Blank means default.
* `run_type`: run_type of a unittest. Supported values are `NIGHTLY`, `EXCLUSIVE`, `CINN`, `DIST`, `GPUPS`, `INFER`, `EXCLUSIVE:NIGHTLY`, `DIST:NIGHTLY`,which are case-insensitive.
* `launcher`: the test launcher.Supported values are test_runner.py, dist_test.sh and custom scripts' name. Blank means test_runner.py.
* `num_port`: the number of port used in a distributed unit test. Blank means automatically distributed port.
* `run_serial`: whether in serial mode. the value can be 1 or 0.Default (empty) is 0. Blank means default.
* `ENVS`: required environments. multiple environments are splited by ";".
* `conditions`: extra required conditions for some tests. The value is a list of boolean expression in cmake programmer, splited with ";". For example, the value can be `WITH_DGC;NOT WITH_NCCL` or `WITH_NCCL;${NCCL_VERSION} VERSION_GREATER_EQUAL 2212`,The relationship between these expressions is a conjunction.
* `ENVS`: required environments. multiple environments are split by ";".
* `conditions`: extra required conditions for some tests. The value is a list of boolean expression in cmake programmer, split with ";". For example, the value can be `WITH_DGC;NOT WITH_NCCL` or `WITH_NCCL;${NCCL_VERSION} VERSION_GREATER_EQUAL 2212`,The relationship between these expressions is a conjunction.

### step 3. Generate CMakeLists.txt
Run the cmd:
Expand Down
4 changes: 2 additions & 2 deletions test/deprecated/legacy_test/test_group_norm_op_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -1529,7 +1529,7 @@ def get_static_desire(self, place):
if core._is_fwd_prim_enabled():
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that group_norm is splitted into small ops
# Ensure that group_norm is split into small ops
assert 'group_norm' not in fwd_ops_new

grads = paddle.static.gradients([output], [input_, scale_, bias_])
Expand Down Expand Up @@ -1621,7 +1621,7 @@ def test_static_comp(self):
if core._is_fwd_prim_enabled():
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that group_norm is splitted into small ops
# Ensure that group_norm is split into small ops
assert 'group_norm' not in fwd_ops_new

grads = paddle.static.gradients(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,7 @@ def get_static_desire(self, place):
if core._is_fwd_prim_enabled():
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that instance_norm is splitted into small ops
# Ensure that instance_norm is split into small ops
assert 'instance_norm' not in fwd_ops_new

grads = paddle.static.gradients([output], [input_, scale_, bias_])
Expand Down Expand Up @@ -586,7 +586,7 @@ def test_static_comp(self):
if core._is_fwd_prim_enabled():
paddle.incubate.autograd.primapi.to_prim(mp.blocks)
fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that instance_norm is splitted into small ops
# Ensure that instance_norm is split into small ops
assert 'instance_norm' not in fwd_ops_new

grads = paddle.static.gradients(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def cal_static(inputs, running_mean, running_variance, weight, bias, mode=None):
if mode:
primapi.to_prim(blocks)
fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that batch_norm is splitted into small ops
# Ensure that batch_norm is split into small ops
assert (
'batch_norm' not in fwd_ops_new
and 'reduce_mean' not in fwd_ops_new
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def cal_composite(self, inputs):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that gelu is splitted into small ops
# Ensure that gelu is split into small ops
self.assertTrue('gelu' not in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def cal_composite_grad(self, inputs):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that gelu is splitted into small ops
# Ensure that gelu is split into small ops
self.assertTrue('gelu' not in fwd_ops_new)

z = paddle.static.gradients([y], x)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def cal_composite(self, inputs, norm_shape, weight, bias):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that layer_norm is splitted into small ops
# Ensure that layer_norm is split into small ops
self.assertTrue('layer_norm' not in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down Expand Up @@ -205,7 +205,7 @@ def cal2_composite(self, inputs, norm_shape, weight, bias):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that layer_norm is splitted into small ops
# Ensure that layer_norm is split into small ops
self.assertTrue('layer_norm' not in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def static_comp_forward(self, inputs, norm_shape, weight, bias, y_g):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that layer_norm is splitted into small ops
# Ensure that layer_norm is split into small ops
self.assertTrue('layer_norm' not in fwd_ops_new)

z = paddle.static.gradients([y], [x, w, b], y_grad)
Expand Down Expand Up @@ -273,7 +273,7 @@ def static_comp_forward_withNone(
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that layer_norm is splitted into small ops
# Ensure that layer_norm is split into small ops
self.assertTrue('layer_norm' not in fwd_ops_new)

z = paddle.static.gradients([y], x, y_grad)
Expand Down Expand Up @@ -663,7 +663,7 @@ def static_comp_forward(self, inputs, norm_shape, weight, bias, y_grad):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that layer_norm is splitted into small ops
# Ensure that layer_norm is split into small ops
self.assertTrue('layer_norm' not in fwd_ops_new)

z = paddle.static.gradients([y], x, y_g)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def cal_composite(self, inputs):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that reduce_mean is splitted into small ops
# Ensure that reduce_mean is split into small ops
self.assertTrue('reduce_mean' not in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def cal_composite_grad(self, inputs):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that reduce_mean is splitted into small ops
# Ensure that reduce_mean is split into small ops
self.assertTrue('reduce_mean' not in fwd_ops_new)

z = paddle.static.gradients([y], x)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def cal_composite_grad(self, inputs):
paddle.incubate.autograd.primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that softmax is splitted into small ops
# Ensure that softmax is split into small ops
self.assertTrue('softmax' not in fwd_ops_new)

z = paddle.static.gradients([y], x)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def cal_composite(self, inputs):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that softmax is splitted into small ops
# Ensure that softmax is split into small ops
self.assertTrue('softmax' not in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def cal_composite_grad(self, inputs):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that softmax is splitted into small ops
# Ensure that softmax is split into small ops
self.assertTrue('softmax' not in fwd_ops_new)

z = paddle.static.gradients([y], x)
Expand Down
4 changes: 2 additions & 2 deletions test/deprecated/prim/prim/flags/test_prim_flags_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def not_in_blacklist(self):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that softmax is splitted into small ops
# Ensure that softmax is split into small ops
self.assertTrue('softmax' not in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down Expand Up @@ -122,7 +122,7 @@ def in_blacklist(self):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that softmax is splitted into small ops
# Ensure that softmax is split into small ops
self.assertTrue('softmax' in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down
2 changes: 1 addition & 1 deletion test/deprecated/prim/process/test_copy_op_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def cal_composite(self, inputs):
primapi.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that dropout is not splitted into small ops
# Ensure that dropout is not split into small ops
self.assertTrue('dropout' in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down
4 changes: 2 additions & 2 deletions test/prim/pir_prim/test_pir_prim_flags.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def not_in_blacklist(self, op_name):
z = decomp.decompose(main_program, [z])

fwd_ops_new = [op.name() for op in main_program.global_block().ops]
# Ensure that tanh is splitted into small ops
# Ensure that tanh is split into small ops
self.assertTrue(op_name not in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down Expand Up @@ -72,7 +72,7 @@ def in_blacklist(self, op_name):
z = decomp.decompose(main_program, [z])

fwd_ops_new = [op.name() for op in main_program.global_block().ops]
# Ensure that tanh is splitted into small ops
# Ensure that tanh is split into small ops
self.assertTrue(op_name in fwd_ops_new)

exe = paddle.static.Executor()
Expand Down