Skip to content

Commit 67ed7e1

Browse files
authored
[hybrid] [npu] fit npu nan/inf check (#35171)
1 parent 6e638d7 commit 67ed7e1

File tree

1 file changed

+7
-1
lines changed

1 file changed

+7
-1
lines changed

python/paddle/fluid/optimizer.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5323,7 +5323,13 @@ def _accumulate_gradients_with_fuse(self, main_block, fp16, fused_size):
53235323
"copy_data": False,
53245324
"use_align": True,
53255325
"dtype": grads[0].dtype,
5326-
self._op_role_key: self._op_role.Backward
5326+
self._op_role_key: self._op_role.Backward,
5327+
# On npu, the nan/inf check login is different with gpu.
5328+
# If there are some not initialized sections in the fused var,
5329+
# and the value in those sections are nan/inf, it will trigger the nan/inf check.
5330+
# To avoid these problematic triggers, set constant is needed for npu
5331+
"set_constant": core.is_compiled_with_npu(),
5332+
"constant": float(0.0),
53275333
})
53285334
offset += 1
53295335
# For the gradient_merged_fused_var, given a init value during the coalesce op

0 commit comments

Comments
 (0)