From a989a0b13a02c61ab8db08ce4336000315fa3b91 Mon Sep 17 00:00:00 2001 From: c8ef Date: Fri, 24 Jan 2025 18:58:56 +0000 Subject: [PATCH] [NFC] Fix some minor typos. (#145599) Pull Request resolved: https://github.com/pytorch/pytorch/pull/145599 Approved by: https://github.com/Skylion007 --- torch/_inductor/codegen/wrapper.py | 4 ++-- torch/csrc/jit/python/pybind_utils.h | 2 +- torch/distributed/distributed_c10d.py | 2 +- torch/distributed/pipelining/_backward.py | 2 +- torchgen/gen_aoti_c_shim.py | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/torch/_inductor/codegen/wrapper.py b/torch/_inductor/codegen/wrapper.py index 5fedc0cb99c..d76f943fde2 100644 --- a/torch/_inductor/codegen/wrapper.py +++ b/torch/_inductor/codegen/wrapper.py @@ -1078,7 +1078,7 @@ class PythonWrapperCodegen(CodeGen): arg.get_dtype() if isinstance(arg, IRNode) else type(arg) for arg in raw_args ] - # Because generate_kernel_call can be overriden by a subclass, explictly call + # Because generate_kernel_call can be overriden by a subclass, explicitly call # PythonWrapperCodegen.generate_kernel_call here PythonWrapperCodegen.generate_kernel_call( self, @@ -2376,7 +2376,7 @@ class PythonWrapperCodegen(CodeGen): def codegen_subgraph_prefix(self, subgraph, outer_inputs, outer_outputs): # All inputs of hops must be explicitly passed in. - # Free tensors and basic symbols should have been explictily lifted as inputs in dynamo. + # Free tensors and basic symbols should have been explicitly lifted as inputs in dynamo. assert len(outer_inputs) == len( subgraph.graph.graph_input_names ), f"graph_input_names:{subgraph.graph.graph_input_names}, outer_inputs: {outer_inputs}" diff --git a/torch/csrc/jit/python/pybind_utils.h b/torch/csrc/jit/python/pybind_utils.h index 4c108904cc5..273c6acd49c 100644 --- a/torch/csrc/jit/python/pybind_utils.h +++ b/torch/csrc/jit/python/pybind_utils.h @@ -691,7 +691,7 @@ inline IValue toTypeInferredIValue(py::handle input) { if (auto mod = as_module(object)) { // if obj is already a ScriptModule, just return its ivalue auto ptr = mod.value()._ivalue(); - // explict copy semantics for strong ownership of the resource. + // explicit copy semantics for strong ownership of the resource. return c10::intrusive_ptr::reclaim_copy( ptr.release()); } diff --git a/torch/distributed/distributed_c10d.py b/torch/distributed/distributed_c10d.py index 1b0442c1542..ff89dd79f2d 100644 --- a/torch/distributed/distributed_c10d.py +++ b/torch/distributed/distributed_c10d.py @@ -1811,7 +1811,7 @@ def _shutdown_backend(pg): except RuntimeError: pass if is_nccl_available() and isinstance(backend, ProcessGroupNCCL): - # explictly call shutdown to ensure that NCCL resources are released + # explicitly call shutdown to ensure that NCCL resources are released backend._shutdown() diff --git a/torch/distributed/pipelining/_backward.py b/torch/distributed/pipelining/_backward.py index e2eebf49ad7..a31ee53206a 100644 --- a/torch/distributed/pipelining/_backward.py +++ b/torch/distributed/pipelining/_backward.py @@ -344,7 +344,7 @@ def stage_backward( # 2. extract_tensors_with_grads referred to both stage_output_tensors, output_grad_tensors, # and to itself (extract_tensors_with_grads) since it makes a recursive call # 3. stage_output_tensors was kept alive by the above refcycle, and it holds activation tensors, which is bad - # fix -> explictly pass in the ref to the fn, so there is no gc cycle anymore + # fix -> explicitly pass in the ref to the fn, so there is no gc cycle anymore extract_tensors_with_grads( stage_output, output_grads, extract_tensors_with_grads ) diff --git a/torchgen/gen_aoti_c_shim.py b/torchgen/gen_aoti_c_shim.py index 9846de77b43..da4abab91c8 100644 --- a/torchgen/gen_aoti_c_shim.py +++ b/torchgen/gen_aoti_c_shim.py @@ -142,7 +142,7 @@ def convert_arg_type_and_name( # type: ignore[return] new_callsite_exprs, ) elif isinstance(typ, ListType): - # Need to explictly pass the list as pointer + length + # Need to explicitly pass the list as pointer + length c_types, names, aten_types, _ = convert_arg_type_and_name(typ.elem, name) assert len(c_types) == 1, "ListType with unsupported element type " + repr(typ)