mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[inductor] Fallback on complex64 kernels (#98155)
Later PRs in this stack fixe graph breaks in GoogleFnet which triggers errors from inductor trying to compile torch.complex64, this fixes that. Pull Request resolved: https://github.com/pytorch/pytorch/pull/98155 Approved by: https://github.com/anijain2305, https://github.com/ngimel
This commit is contained in:
parent
bc9dd969e1
commit
71d850a100
|
|
@ -2999,6 +2999,7 @@ else:
|
|||
self.assertEqual(dst, src.neg().conj_physical(), exact_dtype=False)
|
||||
|
||||
# FIXME: move to data movement test suite
|
||||
@skipIfTorchInductor("https://github.com/pytorch/pytorch/issues/98175")
|
||||
@onlyNativeDeviceTypes
|
||||
@dtypes(torch.int64, torch.float32, torch.complex64)
|
||||
def test_copy_transpose_math_view(self, device, dtype):
|
||||
|
|
|
|||
|
|
@ -1041,6 +1041,8 @@ def fallback_handler(kernel, add_to_fallback_set=True):
|
|||
|
||||
|
||||
def unsupported_output_tensor(t: torch._subclasses.FakeTensor):
|
||||
if t.dtype in (torch.complex32, torch.complex64, torch.complex128):
|
||||
return True
|
||||
return t.is_cpu and config.disable_cpp_codegen
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user