mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Add @requires_multicast_support to test_multimem_all_gather (#151227)
Fixes #ISSUE_NUMBER Pull Request resolved: https://github.com/pytorch/pytorch/pull/151227 Approved by: https://github.com/jeffdaily
This commit is contained in:
parent
83d88d128d
commit
7e5f6dcf7f
|
|
@ -1013,6 +1013,7 @@ class SymmMemCollectiveTest(MultiProcessTestCase):
|
|||
)
|
||||
|
||||
@skip_if_lt_x_gpu(4)
|
||||
@requires_multicast_support()
|
||||
@parametrize("align_bytes", [4, 8, 16])
|
||||
def test_multimem_all_gather(self, align_bytes: int) -> None:
|
||||
self._init_process()
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user