Skip distributed subprocess test internally as they don't work (#148909)

Follow up from https://github.com/pytorch/pytorch/pull/146098

Pull Request resolved: https://github.com/pytorch/pytorch/pull/148909
Approved by: https://github.com/janeyx99
This commit is contained in:
albanD 2025-03-11 02:07:41 +00:00 committed by PyTorch MergeBot
parent 457ff9b7ae
commit 8c45d44abb

View File

@ -8,6 +8,7 @@ import sys
import tempfile
import threading
import time
import unittest
from contextlib import nullcontext
from dataclasses import dataclass
from datetime import timedelta
@ -35,6 +36,8 @@ from torch.testing._internal.common_distributed import (
)
from torch.testing._internal.common_utils import (
instantiate_parametrized_tests,
IS_FBCODE,
IS_SANDCASTLE,
load_tests,
parametrize,
retry_on_connect_failures,
@ -1908,6 +1911,7 @@ class ProcessGroupWithDispatchedCollectivesTests(MultiProcessTestCase):
dist.destroy_process_group()
@unittest.skipIf(IS_FBCODE or IS_SANDCASTLE, "subprocess test fails in fbcode")
def test_default_process_group(self):
script = """
# Hide all GPUs