update types on dynamo configs (#146873)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/146873
Approved by: https://github.com/williamwen42
This commit is contained in:
Gabriel Ferns 2025-03-11 05:33:45 +00:00 committed by PyTorch MergeBot
parent 1fcc4bc109
commit 41e4728f74

View File

@ -16,7 +16,7 @@ import os
import sys
import tempfile
from os.path import abspath, dirname
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Callable, Literal, Optional, TYPE_CHECKING, Union
from torch._environment import is_fbcode
from torch.utils._config_module import Config, get_tristate_env, install_config_module
@ -110,7 +110,7 @@ assume_static_by_default = True
automatic_dynamic_shapes = True
# Valid options: "dynamic", "unbacked"
automatic_dynamic_shapes_mark_as = "dynamic"
automatic_dynamic_shapes_mark_as: Literal["dynamic", "unbacked"] = "dynamic"
# This flag changes how the shapes of parameters are treated.
# If this flag is set to True, then the shapes of torch.nn.Parameter as well as of torch.Tensor are attempted to be dynamic
@ -310,7 +310,15 @@ allow_unspec_int_on_nn_module = False
# Note that to avoid breaking the existing usage, mode 1 and mode 4 can be
# specified with a boolean value. True is using ddp_optimizer and False is
# no optimization.
optimize_ddp: Union[bool, str] = True
optimize_ddp: Union[
bool,
Literal[
"ddp_optimizer",
"python_reducer",
"python_reducer_without_compiled_forward",
"no_optimization",
],
] = True
# By default, Dynamo emits runtime asserts (e.g. torch._check, torch._check_is_size) in the graph.
# In some cases those asserts could be performance costly