Skip optimizer overlap tests that have issues with NCCL async error handling

Skip these tests which sometimes have issues on unrelated PRs such as
https://github.com/pytorch/pytorch/runs/5291461671?check_suite_focus=true. See
https://github.com/pytorch/pytorch/issues/73259 for additional detail Skip
these tests which sometimes have issues on unrelated PRs such as
https://github.com/pytorch/pytorch/runs/5291461671?check_suite_focus=true. See
https://github.com/pytorch/pytorch/issues/73259 for additional details.

Differential Revision: [D34404857](https://our.internmc.facebook.com/intern/diff/D34404857/)

[ghstack-poisoned]
This commit is contained in:
Rohan Varma 2022-02-22 15:53:01 -08:00
parent 28339ddc25
commit 4bb27ae7d3

View File

@ -4064,8 +4064,8 @@ class DistributedTest:
dist.barrier() dist.barrier()
@sandcastle_skip_if( @sandcastle_skip_if(
BACKEND not in DistTestCases.backend_feature["ddp"], BACKEND == "nccl",
f"The {BACKEND} backend does not support DistributedDataParallel" "Issues with async error handling, see https://github.com/pytorch/pytorch/issues/73259"
) )
@skip_if_lt_x_gpu(2) @skip_if_lt_x_gpu(2)
@skip_if_rocm @skip_if_rocm
@ -4092,8 +4092,8 @@ class DistributedTest:
) )
@sandcastle_skip_if( @sandcastle_skip_if(
BACKEND not in DistTestCases.backend_feature["ddp"], BACKEND == "nccl",
f"The {BACKEND} backend does not support DistributedDataParallel" "Issues with async error handling, see https://github.com/pytorch/pytorch/issues/73259"
) )
@skip_if_lt_x_gpu(2) @skip_if_lt_x_gpu(2)
@skip_if_rocm @skip_if_rocm
@ -4113,8 +4113,8 @@ class DistributedTest:
) )
@sandcastle_skip_if( @sandcastle_skip_if(
BACKEND not in DistTestCases.backend_feature["ddp"], BACKEND == "nccl",
f"The {BACKEND} backend does not support DistributedDataParallel" "Issues with async error handling, see https://github.com/pytorch/pytorch/issues/73259"
) )
@skip_if_lt_x_gpu(2) @skip_if_lt_x_gpu(2)
@skip_if_rocm @skip_if_rocm