Skip to content

Commit 1ee6dd4

Browse files
committed
make sharding_first by default
1 parent 2064039 commit 1ee6dd4

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

paddlenlp/trainer/training_args.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -787,7 +787,7 @@ class TrainingArguments:
787787
"Following options are supported:\n"
788788
"- pp_first. the topo order is dp, pp, sharding, mp \n"
789789
"- sharding_first. the topo order is dp, sharding, pp, mp \n"
790-
"Default is None, for pp_first"
790+
"Default is None, for sharding_first"
791791
)
792792
},
793793
)
@@ -2082,7 +2082,7 @@ def _post_init_parallel_degree(self):
20822082
self.expert_tensor_parallel_degree = -1
20832083

20842084
if self.hybrid_parallel_topo_order is None:
2085-
self.hybrid_parallel_topo_order = "pp_first"
2085+
self.hybrid_parallel_topo_order = "sharding_first"
20862086
assert self.hybrid_parallel_topo_order in ["pp_first", "sharding_first"]
20872087

20882088
if self.use_hybrid_parallel and self.enable_auto_parallel:

0 commit comments

Comments
 (0)