Skip to content

Commit 79bba4f

Browse files
authored
update conf for sharding overlap in auto_parallel static (#9456)
1 parent 619c1b9 commit 79bba4f

File tree

3 files changed

+3
-3
lines changed

3 files changed

+3
-3
lines changed

tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_13b/pretrain-llama2_13b.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
"pipeline_parallel_degree": 4,
1111
"sharding": "stage1",
1212
"data_parallel_config": "enable_allreduce_avg_in_gradinent_scale gradient_sync_after_accumulate",
13-
"sharding_parallel_config": "enable_stage2_overlap",
13+
"sharding_parallel_config": "enable_stage1_overlap",
1414
"tensor_parallel_config": "enable_mp_async_allreduce",
1515
"pipeline_parallel_config": "enable_send_recv_overlap enable_split_backward",
1616
"pipeline_schedule_mode": "VPP",

tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_70b/pretrain-llama2_70b.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
"virtual_pp_degree": 5,
5353
"pipeline_schedule_mode": "VPP",
5454
"data_parallel_config": "enable_allreduce_avg_in_gradinent_scale gradient_sync_after_accumulate",
55-
"sharding_parallel_config": "split_param enable_stage1_overlap",
55+
"sharding_parallel_config": "enable_stage1_overlap",
5656
"tensor_parallel_config": "enable_mp_async_allreduce",
5757
"max_seq_length": 4096,
5858
"to_static": true,

tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_7b/pretrain-llama2_7b.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
"pipeline_parallel_degree": 1,
1111
"sharding": "stage1",
1212
"data_parallel_config": "enable_allreduce_avg_in_gradinent_scale gradient_sync_after_accumulate",
13-
"sharding_parallel_config": "enable_stage2_overlap",
13+
"sharding_parallel_config": "enable_stage1_overlap",
1414
"tensor_parallel_config": "enable_mp_async_allreduce",
1515
"pipeline_parallel_config": "",
1616
"virtual_pp_degree": 1,

0 commit comments

Comments
 (0)