Skip to content

Commit db6dd6f

Browse files
authored
【Hackathon 6th Fundable Projects 4 No.3】remove ExecutionStrategy (#65077)
* remove ExecutionStrategy * resolve conflict
1 parent 9f79511 commit db6dd6f

File tree

5 files changed

+2
-63
lines changed

5 files changed

+2
-63
lines changed

paddle/fluid/framework/details/execution_strategy.h

Lines changed: 0 additions & 51 deletions
This file was deleted.

paddle/fluid/framework/details/op_handle_base.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
#include <unordered_set>
2020
#include <vector>
2121

22-
#include "paddle/fluid/framework/details/execution_strategy.h"
2322
#include "paddle/fluid/framework/details/var_handle.h"
2423
#include "paddle/fluid/framework/ir/node.h"
2524
#include "paddle/fluid/platform/device_context.h"

paddle/fluid/framework/distributed_strategy.proto

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -183,13 +183,6 @@ message BuildStrategy {
183183
optional bool fuse_resunit = 21 [ default = false ];
184184
}
185185

186-
message ExecutionStrategy {
187-
optional int32 num_threads = 1 [ default = 1 ];
188-
optional int32 num_iteration_per_drop_scope = 2 [ default = 10 ];
189-
optional int32 num_iteration_per_run = 3 [ default = 1 ];
190-
optional bool use_thread_barrier = 4 [ default = false ];
191-
}
192-
193186
message GradientScaleConfig {
194187
// Optional value ['avg', 'sum', 'customized']
195188
// If avg, loss@grad will be divided by the number of devices,
@@ -428,7 +421,6 @@ message DistributedStrategy {
428421
optional QatConfig qat_configs = 117;
429422

430423
optional BuildStrategy build_strategy = 201;
431-
optional ExecutionStrategy execution_strategy = 202;
432424
optional GradientScaleConfig gradient_scale_configs = 203;
433425
}
434426

paddle/fluid/framework/ir/memory_optimize_pass/memory_reuse_pass.cc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -238,7 +238,6 @@ bool MemoryReusePass::IsInVarReusable(const details::VarHandle &in_var) const {
238238
* - it is the first version var. Otherwise, the var may be overwritten
239239
* in the second batch, which results in wrong calculation result.
240240
* It is critical especially when
241-
* ExecutionStrategy::num_iteration_per_drop_scope_ > 1.
242241
* - it has not reused other var's memory. It is not necessary to do memory
243242
* reuse twice for the same var.
244243
* - it is not a persistable var.

python/paddle/distributed/fleet/base/distributed_strategy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,8 +185,8 @@ def __init__(self):
185185
186186
DistributedStrategy can be serialized into protobuf file or deserialized from protobuf file
187187
188-
Users who run local training usually configure BuildStrategy and ExecutionStrategy, and
189-
DistributedStrategy supports configurations from BuildStrategy and ExecutionStrategy
188+
Users who run local training usually configure BuildStrategy, and
189+
DistributedStrategy supports configurations from BuildStrategy.
190190
191191
"""
192192
self.strategy = distributed_strategy_pb2.DistributedStrategy()

0 commit comments

Comments
 (0)