mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary: Action following https://github.com/pytorch/pytorch/issues/66232 Pull Request resolved: https://github.com/pytorch/pytorch/pull/66808 Reviewed By: mrshenli Differential Revision: D31761414 Pulled By: janeyx99 fbshipit-source-id: baf8c49ff9c4bcda7b0ea0f6aafd26380586e72d
20 lines
631 B
Python
20 lines
631 B
Python
# Owner(s): ["oncall: jit"]
|
|
|
|
import torch
|
|
from torch.testing._internal.jit_utils import JitTestCase
|
|
|
|
class TestFuserCommon(JitTestCase):
|
|
def test_autodiff_fallback(self):
|
|
for rq in [True, False]:
|
|
@torch.jit.script
|
|
def fn(x):
|
|
return torch.max(x**2.0, x**3.0)
|
|
|
|
x = torch.randn(5, requires_grad=not rq)
|
|
# cause optimization to be created
|
|
for i in range(5):
|
|
fn(x)
|
|
# test fallback when optimization is not applicable
|
|
y = fn(torch.randn(5, requires_grad=rq))
|
|
self.assertEqual(y.requires_grad, rq)
|