[Fix] Adding missing f prefixes to formatted strings [2/N] (#164066)

As stated in the title.

* #164068
* #164067
* __->__ #164066
* #164065

Pull Request resolved: https://github.com/pytorch/pytorch/pull/164066
Approved by: https://github.com/Skylion007
This commit is contained in:
can-gaa-hou 2025-09-29 04:40:41 +00:00 committed by PyTorch MergeBot
parent ad32ed83b3
commit 7c7ae86991
9 changed files with 11 additions and 11 deletions

View File

@ -285,10 +285,10 @@ def _choose_qparams_affine(
max_val = torch.amax(input, dim=reduction_dims, keepdim=False)
else:
assert min_val is not None and max_val is not None, (
"Need to provide `min_val` and `max_val` when `input` is None, got: {min_val, max_val}"
f"Need to provide `min_val` and `max_val` when `input` is None, got: {min_val, max_val}"
)
assert min_val.dtype == max_val.dtype, (
"Expecting `min_val` and `max_val` to have the same dtype, got: {min_val.dtype, max_val.dtype}"
f"Expecting `min_val` and `max_val` to have the same dtype, got: {min_val.dtype, max_val.dtype}"
)
if scale_dtype is None:

View File

@ -453,7 +453,7 @@ def _prepare_input(
def _check_outputs_same_dtype_and_shape(output1, output2, eps, idx=None) -> None:
# Check that the returned outputs don't have different dtype or shape when you
# perturb the input
on_index = "on index {idx} " if idx is not None else ""
on_index = f"on index {idx} " if idx is not None else ""
assert output1.shape == output2.shape, (
f"Expected `func` to return outputs with the same shape"
f" when inputs are perturbed {on_index}by {eps}, but got:"

View File

@ -1421,7 +1421,7 @@ def _maybe_convert_scalar_types_to_dtypes(
if scalar_type is None:
dtypes.append(scalar_type)
elif scalar_type not in _SCALAR_TYPE_TO_DTYPE:
raise ValueError("Unrecognized scalar type {scalar_type}")
raise ValueError(f"Unrecognized scalar type {scalar_type}")
else:
dtypes.append(_SCALAR_TYPE_TO_DTYPE[scalar_type])
return dtypes

View File

@ -351,7 +351,7 @@ def _broadcast_state(
if isinstance(state, torch.Tensor):
assert state.dim() == 0, (
"For non-zero ranks, a tensor state should have zero dimension, "
"but got the state with shape {state.shape()}."
f"but got the state with shape {state.shape}."
)
return state
elif not isinstance(state, _PosDimTensorInfo):

View File

@ -1941,7 +1941,7 @@ class _PipelineScheduleRuntime(PipelineScheduleMulti):
stage_idx,
mb_index,
) not in fwd_recv_ops, (
"Recv twice for {stage_idx=} {mb_index=} without executing forward"
f"Recv twice for {stage_idx=} {mb_index=} without executing forward"
)
fwd_recv_ops[(stage_idx, mb_index)] = _batch_p2p(
stage.get_fwd_recv_ops(mb_index)

View File

@ -1648,7 +1648,7 @@ def constrain_range(
if max < min:
raise ValueError(
"Maximum value to constrain_as_size can't be less than the specified min value, "
"received min={min} and max={max}"
f"received min={min} and max={max}"
)
if isinstance(a, int):
@ -4095,7 +4095,7 @@ class ShapeEnv:
if max < min:
raise ValueError(
"Maximum value to constrain_as_size can't be less than the specified min value, "
"received min={min} and max={max}"
f"received min={min} and max={max}"
)
self.constrain_symbol_range(

View File

@ -416,7 +416,7 @@ def where(func, *args, **kwargs):
args, kwargs, f"__torch_dispatch__, {func}", len_args=3, len_kwargs=0
)
if not torch.is_tensor(args[0]):
raise ValueError("__torch_dispatch__, {func}: expected args[0] to be a tensor")
raise ValueError(f"__torch_dispatch__, {func}: expected args[0] to be a tensor")
mx = args[1]
my = args[2]
if not is_masked_tensor(mx):

View File

@ -404,7 +404,7 @@ class ConfigModule(ModuleType):
try:
module = importlib.import_module(module_name)
except ImportError as e:
raise AttributeError("config alias {alias} does not exist") from e
raise AttributeError(f"config alias {alias} does not exist") from e
return module, constant_name
def _get_alias_val(self, entry: _ConfigEntry) -> Any:

View File

@ -53,6 +53,6 @@ if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("Usage:\n {sys.argv[0]} filename")
print(f"Usage:\n {sys.argv[0]} filename")
sys.exit(1)
print(embed_headers(sys.argv[1]))