[easy] Use config patch to toggle capture_scalar_output (#150036)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/150036
Approved by: https://github.com/angelayi
ghstack dependencies: #149087, #149667
This commit is contained in:
Animesh Jain 2025-03-26 12:36:48 -07:00 committed by PyTorch MergeBot
parent 999fa15ba8
commit 731b559f54

View File

@ -695,6 +695,7 @@ class GraphModule(torch.nn.Module):
res = opt_fn(x)
self.assertEqual(ref, res)
@torch._dynamo.config.patch(capture_scalar_outputs=True)
def test_pending_unbacked(self):
@mark_compile_region
def gn(x):
@ -707,7 +708,6 @@ class GraphModule(torch.nn.Module):
x = torch.randn(8)
torch._dynamo.mark_dynamic(x, 0)
ref = fn(x)
torch._dynamo.config.capture_scalar_outputs = True
opt_fn = torch.compile(
fn, backend="eager", fullgraph=True
) # Inductor fails with cpp compilation error