mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Guard test_lapack_empty with has_magma. (#9936)
Summary: CUDA lapack functions generally don't work unless has_magma is true. Pull Request resolved: https://github.com/pytorch/pytorch/pull/9936 Differential Revision: D9028579 Pulled By: gchanan fbshipit-source-id: 9b77e3b05253fd49bcabf604d0924ffa0e116055
This commit is contained in:
parent
bf32ea8094
commit
c0bacc6284
|
|
@ -6394,6 +6394,11 @@ class TestTorch(TestCase):
|
|||
devices = ['cpu'] if not torch.cuda.is_available() else ['cpu', 'cuda']
|
||||
for device in devices:
|
||||
|
||||
# need to init cuda to check has_magma
|
||||
empty = torch.randn((0, 0), device=device)
|
||||
if device == 'cuda' and not torch.cuda.has_magma:
|
||||
continue
|
||||
|
||||
def fn(torchfn, *args):
|
||||
return torchfn(*tuple(torch.randn(shape, device=device) if isinstance(shape, tuple) else shape
|
||||
for shape in args))
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user