mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[ROCm] Skip test_blockwise_nvfp4_with_global_scale (#165968)
Disable the fp4 global_scale test till the feature is enabled on ROCm. Fixes #166027. Not really, but we're trading an issue for a test skip decorator since the test is parameterized. Pull Request resolved: https://github.com/pytorch/pytorch/pull/165968 Approved by: https://github.com/jeffdaily, https://github.com/drisspg
This commit is contained in:
parent
2fde10d914
commit
34ed7a8f0d
|
|
@ -37,6 +37,7 @@ from torch.testing._internal.common_utils import (
|
|||
IS_WINDOWS,
|
||||
parametrize,
|
||||
run_tests,
|
||||
skipIfRocm,
|
||||
TEST_CUDA,
|
||||
TestCase,
|
||||
)
|
||||
|
|
@ -1290,7 +1291,7 @@ class TestFP8Matmul(TestCase):
|
|||
lp_data_expected = torch.tensor([0b10110010], dtype=torch.uint8)
|
||||
torch.testing.assert_close(lp_data_actual, lp_data_expected, atol=0, rtol=0)
|
||||
|
||||
|
||||
@skipIfRocm
|
||||
@onlyCUDA
|
||||
@unittest.skipIf(not PLATFORM_SUPPORTS_MX_GEMM, mx_skip_msg)
|
||||
@parametrize("mkn", [
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user