Fix GuardOnDataDependentSymNode in the normalize operator (#152039)

Test Plan:
Dumped the local net torch.package to local

Ran
```
buck2 run scripts/shengqin:test_model_export -- /tmp/mtia_local_torch_package {\"local\":null}
```
succeeded

Reviewed By: hongyang-zhao

Differential Revision: D73405271

Pull Request resolved: https://github.com/pytorch/pytorch/pull/152039
Approved by: https://github.com/houseroad
This commit is contained in:
Sheng Qin 2025-05-01 04:34:45 +00:00 committed by PyTorch MergeBot
parent 688adc9941
commit 18588fe2fc

View File

@ -135,6 +135,8 @@ def vector_norm(
*,
dtype: Optional[torch.dtype] = None,
) -> Tensor:
from torch.fx.experimental.symbolic_shapes import guard_size_oblivious
check_fp_or_complex(x.dtype, "linalg.vector_norm")
if isinstance(dim, Dim):
@ -164,7 +166,8 @@ def vector_norm(
is_ord_even = ord % 2 == 0 if isinstance(ord, IntLike) else ord % 2.0 == 0.0
if (dim is None and x.numel() == 1) or (
dim is not None and (x.ndim > 0 and all(x.shape[d] == 1 for d in dim))
dim is not None
and (x.ndim > 0 and all(guard_size_oblivious(x.shape[d] == 1) for d in dim))
):
if x.ndim > 64:
raise RuntimeError(