mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[tp] additional doc fixes (#94786)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/94786 Approved by: https://github.com/fduwjj
This commit is contained in:
parent
1f06a71797
commit
7522ca55f1
|
|
@ -4,8 +4,9 @@
|
||||||
Tensor Parallelism - torch.distributed.tensor.parallel
|
Tensor Parallelism - torch.distributed.tensor.parallel
|
||||||
======================================================
|
======================================================
|
||||||
|
|
||||||
Tensor Parallelism(TP) is built on top of DistributedTensor(DTensor) and
|
Tensor Parallelism(TP) is built on top of the PyTorch DistributedTensor
|
||||||
provides several Parallelism styles: Rowwise, Colwise and Pairwise Parallelism.
|
(`DTensor https://github.com/pytorch/pytorch/blob/master/torch/distributed/_tensor/README.md`__)
|
||||||
|
and provides several parallelism styles: Rowwise, Colwise and Pairwise Parallelism.
|
||||||
|
|
||||||
.. warning ::
|
.. warning ::
|
||||||
Tensor Parallelism APIs are experimental and subject to change.
|
Tensor Parallelism APIs are experimental and subject to change.
|
||||||
|
|
|
||||||
|
|
@ -68,7 +68,7 @@ def parallelize_module( # type: ignore[return]
|
||||||
|
|
||||||
Example::
|
Example::
|
||||||
>>> # xdoctest: +SKIP("distributed")
|
>>> # xdoctest: +SKIP("distributed")
|
||||||
>>> from torch.distributed._tensor.parallel import parallelize_module, PairwiseParallel
|
>>> from torch.distributed.tensor.parallel import parallelize_module, PairwiseParallel
|
||||||
>>>
|
>>>
|
||||||
>>> # Define the module.
|
>>> # Define the module.
|
||||||
>>> m = Model(...)
|
>>> m = Model(...)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user