Skip to content

Commit 7eb71dd

Browse files
authored
[CodeStyle][Ruff][BUAA][D-[1-6]] Fix ruff RUF015 diagnostic for 6 files in python/paddle/ (#67225)
1 parent 8ce0089 commit 7eb71dd

File tree

6 files changed

+12
-12
lines changed

6 files changed

+12
-12
lines changed

paddle/fluid/pir/dialect/op_generator/api_gen.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -906,7 +906,7 @@ def _gen_one_impl(
906906
)
907907

908908
kernel_name = (
909-
list(dispatch_kernel.keys())[0]
909+
next(iter(dispatch_kernel.keys()))
910910
if dispatch_kernel and len(dispatch_kernel.keys()) == 1
911911
else op_name
912912
)

python/paddle/base/dygraph/tensor_patch_methods.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -763,7 +763,7 @@ def get_device_dtype_from_tensor(other):
763763
if len(invalid_keys) != 0:
764764
raise TypeError(
765765
"to() got an unexpected keyword argument "
766-
+ list(invalid_keys)[0]
766+
+ next(iter(invalid_keys))
767767
)
768768
if size_args > 0:
769769
if isinstance(args[0], paddle.Tensor):

python/paddle/base/dygraph/tracer.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ def eager_legacy_trace_op(
174174

175175
if op_type == 'load_combine':
176176
assert len(outputs.keys()) == 1
177-
key = list(outputs.keys())[0]
177+
key = next(iter(outputs.keys()))
178178
for j in range(len(returns)):
179179
returns[j]._share_underline_tensor_to(outputs[key][j])
180180
return
@@ -200,12 +200,12 @@ def eager_legacy_trace_op(
200200
)
201201
elif isinstance(returns, list):
202202
assert len(outputs.keys()) == 1
203-
key = list(outputs.keys())[0]
203+
key = next(iter(outputs.keys()))
204204
for j in range(len(returns)):
205205
outputs[key][j].reconstruct_from_(returns[j], False)
206206
else:
207207
assert len(outputs.keys()) == 1
208-
key = list(outputs.keys())[0]
208+
key = next(iter(outputs.keys()))
209209
if isinstance(outputs[key], list):
210210
outputs[key][0].reconstruct_from_(returns, False)
211211
else:
@@ -285,12 +285,12 @@ def eager_trace_op(
285285
outputs[retname][0].reconstruct_from_(returns[i], False)
286286
elif isinstance(returns, list):
287287
assert len(outputs.keys()) == 1
288-
key = list(outputs.keys())[0]
288+
key = next(iter(outputs.keys()))
289289
for j in range(len(returns)):
290290
outputs[key][j].reconstruct_from_(returns[j], False)
291291
else:
292292
assert len(outputs.keys()) == 1
293-
key = list(outputs.keys())[0]
293+
key = next(iter(outputs.keys()))
294294
if isinstance(outputs[key], list):
295295
outputs[key][0].reconstruct_from_(returns, False)
296296
else:

python/paddle/base/framework.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3367,8 +3367,8 @@ def find_name(var_list, name):
33673367
if type in special_op_attrs:
33683368
attrs = special_op_attrs.get(type, [])
33693369
for attr in attrs:
3370-
a_name = list(attr.keys())[0]
3371-
default_value = list(attr.values())[0]
3370+
a_name = next(iter(attr.keys()))
3371+
default_value = next(iter(attr.values()))
33723372
if (
33733373
a_name in op_attrs.keys()
33743374
and default_value != op_attrs[a_name]

python/paddle/distributed/auto_parallel/static/reshard.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1511,7 +1511,7 @@ def find_op_desc_seq(
15111511
if is_union_process_mesh_tensor:
15121512
assert (
15131513
len(set(source_dims_mapping)) == 1
1514-
and list(set(source_dims_mapping))[0] == -1
1514+
and next(iter(set(source_dims_mapping))) == -1
15151515
)
15161516
if set(target_process_group).intersection(
15171517
set(source_process_group)

python/paddle/distributed/fleet/utils/hybrid_parallel_inference.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -273,11 +273,11 @@ def _init_communication_group(self):
273273
# Create mp rings
274274
if self.num_mp > 1:
275275
mp_endpoints = [self.endpoints[mp_idx] for mp_idx in self.mp_group]
276-
mp_rank = [
276+
mp_rank = next(
277277
idx
278278
for idx, mp_idx in enumerate(self.mp_group)
279279
if mp_idx == self.rank
280-
][0]
280+
)
281281
collective_helper._init_communicator(
282282
self._startup_program,
283283
self.current_endpoint,

0 commit comments

Comments
 (0)