File tree Expand file tree Collapse file tree 1 file changed +3
-2
lines changed
Expand file tree Collapse file tree 1 file changed +3
-2
lines changed Original file line number Diff line number Diff line change @@ -567,7 +567,7 @@ class TrainingArguments:
567567 used when the xla flag is set to true, and an auto wrapping policy is specified through
568568 fsdp_min_num_params or fsdp_transformer_layer_cls_to_wrap.
569569 tp_size (`int`, *optional*):
570- Use tp_size to enable pytorch 2.0 tensor parallelism. Set a value greater than 1 to activate TP. The same is
570+ Use tp_size to enable PyTorch tensor parallelism. Set a value greater than 1 to activate TP. The same is
571571 used to prepare device mesh internally.
572572 deepspeed (`str` or `dict`, *optional*):
573573 Use [Deepspeed](https://github.com/microsoft/deepspeed). This is an experimental feature and its API may
@@ -1246,7 +1246,7 @@ class TrainingArguments:
12461246 default = 0 ,
12471247 metadata = {
12481248 "help" : (
1249- "Use tp_size to enable pytorch 2.0 tensor parallelism."
1249+ "Use tp_size to enable pytorch tensor parallelism."
12501250 "Set a value greater than 1 to activate TP."
12511251 "The same is used to prepare device mesh internally."
12521252 )
@@ -1971,6 +1971,7 @@ def __post_init__(self):
19711971
19721972 if self .tp_size > 1 :
19731973 os .environ ["ACCELERATE_USE_TP" ] = "true"
1974+ os .environ ["TP_SIZE" ] = str (self .tp_size )
19741975 # accelerate integration for FSDP
19751976 if len (self .fsdp ) > 0 and not self .fsdp_config ["xla" ]:
19761977 os .environ ["ACCELERATE_USE_FSDP" ] = "true"
You can’t perform that action at this time.
0 commit comments