You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
help="Decides Whether (true|false) intermediate activations are freed during the forward pass, and a checkpoint is left as a placeholder. (useful only when `use_fsdp` flag is passed).",
613
606
)
614
607
615
-
# tp args
616
-
tp_args=parser.add_argument_group("TP Arguments", "Arguments related to Tensor Parallelism using PyToch.")
617
-
tp_args.add_argument(
618
-
"--tp_size",
619
-
default=1,
620
-
type=int,
621
-
help="PyTorch Tensor Parallelism (TP) degree. Set a value greater than 1 to activate. (useful only when `use_tp` flag is passed)",
622
-
)
623
-
624
608
# megatron_lm args
625
609
megatron_lm_args=parser.add_argument_group("Megatron-LM Arguments", "Arguments related to Megatron-LM.")
0 commit comments