Skip to content

Commit 99f730f

Browse files
authored
[CodeStyle][Typos][A-[21-30]] Fix typos (allctor,almostly,alog,Aread,Allways,alway,ane,adn,expaned,annotaions,annotaion,architecure,architecuture,aer) (#69644)
1 parent bf458f3 commit 99f730f

File tree

14 files changed

+20
-29
lines changed

14 files changed

+20
-29
lines changed

_typos.toml

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ pash = 'pash'
1919
astroid = 'astroid'
2020
eles = 'eles'
2121
builer = 'builer'
22+
anc = 'anc'
2223

2324
# These words need to be fixed
2425
ontext = 'ontext'
@@ -71,7 +72,6 @@ bootom = 'bootom'
7172
Vetical = 'Vetical'
7273
mantain = 'mantain'
7374
patition = 'patition'
74-
almostly = 'almostly'
7575
Dowloading = 'Dowloading'
7676
Prepar = 'Prepar'
7777
precent = 'precent'
@@ -122,7 +122,6 @@ decalared = 'decalared'
122122
coalesc = 'coalesc'
123123
graident = 'graident'
124124
infered = 'infered'
125-
Allways = 'Allways'
126125
substitue = 'substitue'
127126
Ouput = 'Ouput'
128127
witk = 'witk'
@@ -132,7 +131,6 @@ staticly = 'staticly'
132131
emited = 'emited'
133132
repalce = 'repalce'
134133
GARD = 'GARD'
135-
annotaions = 'annotaions'
136134
gloabl = 'gloabl'
137135
devide = 'devide'
138136
zerp = 'zerp'
@@ -225,8 +223,6 @@ Rto = 'Rto'
225223
tunning = 'tunning'
226224
kerenl = 'kerenl'
227225
Temperarily = 'Temperarily'
228-
alway = 'alway'
229-
ane = 'ane'
230226
cliping = 'cliping'
231227
DEIVCE = 'DEIVCE'
232228
neeed = 'neeed'
@@ -540,7 +536,6 @@ instrinsics = 'instrinsics'
540536
outputing = 'outputing'
541537
hadler = 'hadler'
542538
qucik = 'qucik'
543-
alog = 'alog'
544539
exsit = 'exsit'
545540
deciamls = 'deciamls'
546541
uncorrectly = 'uncorrectly'
@@ -643,7 +638,6 @@ insid = 'insid'
643638
coodinate = 'coodinate'
644639
usefull = 'usefull'
645640
sqaure = 'sqaure'
646-
adn = 'adn'
647641
intialize = 'intialize'
648642
Taget = 'Taget'
649643
parm = 'parm'
@@ -729,7 +723,6 @@ compitable = 'compitable'
729723
comple = 'comple'
730724
dealed = 'dealed'
731725
ser = 'ser'
732-
anc = 'anc'
733726
contraints = 'contraints'
734727
propogated = 'propogated'
735728
beacuse = 'beacuse'
@@ -794,7 +787,6 @@ imformation = 'imformation'
794787
kernerl = 'kernerl'
795788
Boardcast = 'Boardcast'
796789
Greate = 'Greate'
797-
Alread = 'Alread'
798790
unkown = 'unkown'
799791
recevied = 'recevied'
800792
Normlized = 'Normlized'
@@ -803,7 +795,6 @@ orginal = 'orginal'
803795
Stati = 'Stati'
804796
Destory = 'Destory'
805797
seperately = 'seperately'
806-
alloctor = 'alloctor'
807798
fullfill = 'fullfill'
808799
Substitude = 'Substitude'
809800
producted = 'producted'

paddle/cinn/operator_fusion/graph_transformer/search_algorithm.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -140,8 +140,8 @@ struct SearchAlgorithm<ReverseTopoNodePairPattern,
140140
template <typename Kind, typename GraphMatcher, typename GraphOperation>
141141
void GraphTransformer(PatternGraph* graph) {
142142
VLOG(4) << "Start GraphTransformer...";
143-
auto alog = SearchAlgorithm<Kind, GraphMatcher, GraphOperation>(graph);
144-
alog();
143+
auto algo = SearchAlgorithm<Kind, GraphMatcher, GraphOperation>(graph);
144+
algo();
145145
}
146146

147147
} // namespace cinn::fusion

paddle/fluid/operators/fused/fused_attention_op.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -389,8 +389,8 @@ class FusedAttentionOpMaker : public framework::OpProtoAndCheckerMaker {
389389
"The qkv_w shape is (h, 3h), do transpose to it.")
390390
.SetDefault(false);
391391
AddAttr<bool>("pre_layer_norm",
392-
"if true, the attention op uses pre_layer_norm architecure, "
393-
"else, uses post_layer_norm architecuture. "
392+
"if true, the attention op uses pre_layer_norm architecture, "
393+
"else, uses post_layer_norm architecture. "
394394
"[default false].")
395395
.SetDefault(false);
396396
AddAttr<float>("epsilon",

paddle/fluid/platform/init.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ void SignalHandle(const char *data, int size) {
294294
// NOTE1: The glog FailureSignalHandler dumped messages
295295
// are deal with line by line
296296
auto signal_msg_dumper_ptr = SignalMessageDumper::Instance().Get();
297-
// NOTE2: we only deal with the time info ane signal info,
297+
// NOTE2: we only deal with the time info and signal info,
298298
// the stack trace will generated by paddle self
299299
if (StartsWith(data, "*** Aborted at")) {
300300
*signal_msg_dumper_ptr << "\n [TimeInfo: " << std::string(data, size - 1)

paddle/fluid/pybind/pybind.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2476,7 +2476,7 @@ All parameter, weight, gradient are variables in Paddle.
24762476

24772477
m.def("_is_program_version_supported", IsProgramVersionSupported);
24782478
#if defined(PADDLE_WITH_CUDA)
2479-
m.def("alloctor_dump", [](const phi::GPUPlace &place) {
2479+
m.def("allocator_dump", [](const phi::GPUPlace &place) {
24802480
auto allocator = std::dynamic_pointer_cast<
24812481
paddle::memory::allocation::AutoGrowthBestFitAllocator>(
24822482
paddle::memory::allocation::AllocatorFacade::Instance()

paddle/phi/infermeta/spmd_rules/cross_entropy_with_softmax.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ SpmdInfo CrossEntropyWithSoftmaxInferSpmdBase(const DistMetaTensor& x,
189189
// todo if softmax_normalize axis is sharded, notify downstream phi api to
190190
// select c_softmax_with_entropy_kernel.
191191

192-
// according to the phi api implementation, the softmax_out tensor will alway
192+
// according to the phi api implementation, the softmax_out tensor will always
193193
// be generated not matter the value of use_softmax.
194194
return {{x_dist_attr_dst, label_dist_attr_dst},
195195
{softmax_out_dist_attr_dst, loss_dist_attr_dst}};
@@ -363,7 +363,7 @@ SpmdInfo CrossEntropyWithSoftmaxInferSpmdReverse(
363363
<< str_join(x_dims_mapping) << "]\nLabel dims_mapping: ["
364364
<< str_join(label_dims_mapping) << "]\n\n";
365365

366-
// according to the phi api implementation, the softmax_out tensor will alway
366+
// according to the phi api implementation, the softmax_out tensor will always
367367
// be generated not matter the value of use_softmax.
368368
return {{x_dist_attr, label_dist_attr},
369369
{s_out_dist_attr_dst, loss_dist_attr_dst}};

python/paddle/distributed/passes/auto_parallel_fp16.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ def _keep_fp32_input(op, in_name):
124124

125125
# TODO check if bf16 and fp16 still share the same logic
126126
def _keep_fp32_output(op, out_name):
127-
# TODO(lizhiyu02): Support 'custom_white_list' adn 'custom_black_list' in amp_options
127+
# TODO(lizhiyu02): Support 'custom_white_list' and 'custom_black_list' in amp_options
128128
if not op.amp_options.enable:
129129
return True
130130
op_type = op.type

python/paddle/incubate/asp/supported_layer_list.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def _default_pruning(
7272
# In sparse training, layer weight matrices is viewed sparse matrix A, so
7373
# the math formula should be 'Act(WX + b)'. However, default formula in PaddlePaddle
7474
# is 'Act(XW + b)'. For enabling SPMMA, weights and inputs should be transposed
75-
# for computing, Act( (W^T X^T)^T + b). Therefore, we have to prune alog k dimension
75+
# for computing, Act( (W^T X^T)^T + b). Therefore, we have to prune along k dimension
7676
# of W^T, which is m dimension of W. Moreover, all mask generating functions in
7777
# asp/utils is row-major pruning. That is the reason we have to transpose weight
7878
# matrices before invoking create_mask. Then we transpose the result mask to make

python/paddle/nn/utils/transform_parameters.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
from paddle._typing import ShapeLike
3434

3535

36-
# input==output, inplace strategy of reshape has no cost almostly
36+
# input==output, inplace strategy of reshape has no cost almost
3737
def _inplace_reshape_dygraph(x: Tensor, shape: ShapeLike) -> None:
3838
x_shape = _create_tensor(dtype='int64')
3939
if in_dygraph_mode():

python/paddle/optimizer/optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1157,7 +1157,7 @@ def _create_optimization_pass(
11571157
# _create_accumulators method if it needs to create accumulators
11581158
# for parameters and extend _finish_update method to add custom ops.
11591159

1160-
# Allways called under program_guard use global block as loss block
1160+
# Always called under program_guard use global block as loss block
11611161
# But if current block is in control flow, append optimize op in the
11621162
# grad block of current block
11631163

0 commit comments

Comments
 (0)