mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary: Action following https://github.com/pytorch/pytorch/issues/66232 Pull Request resolved: https://github.com/pytorch/pytorch/pull/66808 Reviewed By: mrshenli Differential Revision: D31761414 Pulled By: janeyx99 fbshipit-source-id: baf8c49ff9c4bcda7b0ea0f6aafd26380586e72d
76 lines
2.9 KiB
Python
76 lines
2.9 KiB
Python
# Owner(s): ["oncall: jit"]
|
|
|
|
import os
|
|
import sys
|
|
|
|
import torch
|
|
|
|
# Make the helper files in test/ importable
|
|
pytorch_test_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
sys.path.append(pytorch_test_dir)
|
|
from torch.testing._internal.jit_utils import JitTestCase
|
|
|
|
if __name__ == '__main__':
|
|
raise RuntimeError("This test file is not meant to be run directly, use:\n\n"
|
|
"\tpython test/test_jit.py TESTNAME\n\n"
|
|
"instead.")
|
|
|
|
class TestTensorCreationOps(JitTestCase):
|
|
"""
|
|
A suite of tests for ops that create tensors.
|
|
"""
|
|
|
|
def test_randperm_default_dtype(self):
|
|
def randperm(x: int):
|
|
perm = torch.randperm(x)
|
|
# Have to perform assertion here because TorchScript returns dtypes
|
|
# as integers, which are not comparable against eager torch.dtype.
|
|
assert perm.dtype == torch.int64
|
|
|
|
self.checkScript(randperm, (3, ))
|
|
|
|
def test_randperm_specifed_dtype(self):
|
|
def randperm(x: int):
|
|
perm = torch.randperm(x, dtype=torch.float)
|
|
# Have to perform assertion here because TorchScript returns dtypes
|
|
# as integers, which are not comparable against eager torch.dtype.
|
|
assert perm.dtype == torch.float
|
|
|
|
self.checkScript(randperm, (3, ))
|
|
|
|
def test_triu_indices_default_dtype(self):
|
|
def triu_indices(rows: int, cols: int):
|
|
indices = torch.triu_indices(rows, cols)
|
|
# Have to perform assertion here because TorchScript returns dtypes
|
|
# as integers, which are not comparable against eager torch.dtype.
|
|
assert indices.dtype == torch.int64
|
|
|
|
self.checkScript(triu_indices, (3, 3))
|
|
|
|
def test_triu_indices_specified_dtype(self):
|
|
def triu_indices(rows: int, cols: int):
|
|
indices = torch.triu_indices(rows, cols, dtype=torch.float)
|
|
# Have to perform assertion here because TorchScript returns dtypes
|
|
# as integers, which are not comparable against eager torch.dtype.
|
|
assert indices.dtype == torch.float
|
|
|
|
self.checkScript(triu_indices, (3, 3))
|
|
|
|
def test_tril_indices_default_dtype(self):
|
|
def tril_indices(rows: int, cols: int):
|
|
indices = torch.tril_indices(rows, cols)
|
|
# Have to perform assertion here because TorchScript returns dtypes
|
|
# as integers, which are not comparable against eager torch.dtype.
|
|
assert indices.dtype == torch.int64
|
|
|
|
self.checkScript(tril_indices, (3, 3))
|
|
|
|
def test_tril_indices_specified_dtype(self):
|
|
def tril_indices(rows: int, cols: int):
|
|
indices = torch.tril_indices(rows, cols, dtype=torch.float)
|
|
# Have to perform assertion here because TorchScript returns dtypes
|
|
# as integers, which are not comparable against eager torch.dtype.
|
|
assert indices.dtype == torch.float
|
|
|
|
self.checkScript(tril_indices, (3, 3))
|