mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 00:21:07 +01:00
[dtensor] redistributed by default take self mesh instead (#99060)
This PR switches redistribute to default use self mesh instead of the global mesh, which is more user friendly Pull Request resolved: https://github.com/pytorch/pytorch/pull/99060 Approved by: https://github.com/mrshenli
This commit is contained in:
parent
cdef4f073c
commit
55a1dc7f88
|
|
@ -332,13 +332,12 @@ class DTensor(torch.Tensor): # pyre-ignore[13]: pyre is bad at __new__
|
|||
|
||||
.. note:: `redistribute` is differentiable.
|
||||
"""
|
||||
# This API perform necessary transformations and get
|
||||
# a new DTensor with the new spec. i.e. for
|
||||
# sharding it's a reshard behavior.
|
||||
# Note that redistribute currently only supports out
|
||||
# NOTE: This redistribute API currently only supports out
|
||||
# of place redistribution, i.e. it always create a new
|
||||
# DTensor object and leave the original one unchanged.
|
||||
device_mesh = get_global_device_mesh() if device_mesh is None else device_mesh
|
||||
|
||||
# if device_mesh is not specified, use the current device_mesh
|
||||
device_mesh = device_mesh or self.device_mesh
|
||||
# raise error if new placements not specified
|
||||
if placements is None:
|
||||
raise RuntimeError("placements is needed for redistribute!")
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user