[ROCm] Skip test_blockwise_nvfp4_with_global_scale (#165968)

Disable the fp4 global_scale test till the feature is enabled on ROCm.

Fixes #166027.
Not really, but we're trading an issue for a test skip decorator since the test is parameterized.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/165968
Approved by: https://github.com/jeffdaily, https://github.com/drisspg
This commit is contained in:
Jagadish Krishnamoorthy 2025-10-22 04:23:05 +00:00 committed by PyTorch MergeBot
parent 2fde10d914
commit 34ed7a8f0d

View File

@ -37,6 +37,7 @@ from torch.testing._internal.common_utils import (
IS_WINDOWS,
parametrize,
run_tests,
skipIfRocm,
TEST_CUDA,
TestCase,
)
@ -1290,7 +1291,7 @@ class TestFP8Matmul(TestCase):
lp_data_expected = torch.tensor([0b10110010], dtype=torch.uint8)
torch.testing.assert_close(lp_data_actual, lp_data_expected, atol=0, rtol=0)
@skipIfRocm
@onlyCUDA
@unittest.skipIf(not PLATFORM_SUPPORTS_MX_GEMM, mx_skip_msg)
@parametrize("mkn", [