We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 175bfcd commit 7e8a2c2Copy full SHA for 7e8a2c2
src/transformers/trainer.py
@@ -5014,7 +5014,7 @@ def create_accelerator_and_postprocess(self):
5014
# deepspeed and accelerate flags covering both trainer args and accelerate launcher
5015
self.is_deepspeed_enabled = getattr(self.accelerator.state, "deepspeed_plugin", None) is not None
5016
self.is_fsdp_enabled = getattr(self.accelerator.state, "fsdp_plugin", None) is not None
5017
- self.is_tp_enabled = getattr(self.accelerator.state, "tp_plugin", None) is not None
+ self.is_tp_enabled = getattr(self.accelerator.state, "torch_tp_plugin", None) is not None
5018
# post accelerator creation setup
5019
if self.is_fsdp_enabled:
5020
fsdp_plugin = self.accelerator.state.fsdp_plugin
0 commit comments