pytorch/test/custom_backend/test_custom_backend.py
Rong Rong (AI Infra) 806010b75e [BE] move more unittest.main() to run_tests() (#50923)
Summary:
Relate to https://github.com/pytorch/pytorch/issues/50483.

Everything except ONNX, detectron and release notes tests are moved to use common_utils.run_tests() to ensure CI reports XML correctly.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/50923

Reviewed By: samestep

Differential Revision: D26027621

Pulled By: walterddr

fbshipit-source-id: b04c03f10d1fe96181b720c4c3868e86e4c6281a
2021-01-25 07:23:09 -08:00

55 lines
1.6 KiB
Python

import os
import tempfile
import torch
from backend import Model, to_custom_backend, get_custom_backend_library_path
from torch.testing._internal.common_utils import TestCase, run_tests
class TestCustomBackend(TestCase):
def setUp(self):
# Load the library containing the custom backend.
self.library_path = get_custom_backend_library_path()
torch.ops.load_library(self.library_path)
# Create an instance of the test Module and lower it for
# the custom backend.
self.model = to_custom_backend(torch.jit.script(Model()))
def test_execute(self):
"""
Test execution using the custom backend.
"""
a = torch.randn(4)
b = torch.randn(4)
# The custom backend is hardcoded to compute f(a, b) = (a + b, a - b).
expected = (a + b, a - b)
out = self.model(a, b)
self.assertTrue(expected[0].allclose(out[0]))
self.assertTrue(expected[1].allclose(out[1]))
def test_save_load(self):
"""
Test that a lowered module can be executed correctly
after saving and loading.
"""
# Test execution before saving and loading to make sure
# the lowered module works in the first place.
self.test_execute()
# Save and load.
f = tempfile.NamedTemporaryFile(delete=False)
try:
f.close()
torch.jit.save(self.model, f.name)
loaded = torch.jit.load(f.name)
finally:
os.unlink(f.name)
self.model = loaded
# Test execution again.
self.test_execute()
if __name__ == "__main__":
run_tests()