[dynamo] remove dead code for DATA_PTR_MATCH (#152206)

Summary: Seems this guard is not created anywhere

Test Plan: CI

Differential Revision: D73682084

Pull Request resolved: https://github.com/pytorch/pytorch/pull/152206
Approved by: https://github.com/anijain2305, https://github.com/jansel
This commit is contained in:
Zhengxu Chen 2025-04-26 15:25:01 +00:00 committed by PyTorch MergeBot
parent ee8166e94f
commit 203201255f
3 changed files with 0 additions and 53 deletions

View File

@ -201,12 +201,6 @@ num_guards_executed=0)
finally:
torch.set_default_device(None)
def test_data_ptr_match_guard(self):
foo = torch.tensor([1, 2, 3])
guard = guards.DATA_PTR_MATCH(foo, ["x.data_ptr() == foo.data_ptr()"])
self.assertTrue(guard(foo))
self.assertFalse(guard(torch.tensor([1, 2, 3])))
def test_length_check_guard(self):
foo = [1, 2, 3]
guard = guards.LENGTH_CHECK(len(foo), ["len(x) == len(foo)"])

View File

@ -1553,16 +1553,6 @@ class GuardBuilder(GuardBuilderBase):
def NAME_MATCH(self, guard: Guard):
self._guard_on_attribute(guard, "__name__", GuardBuilder.EQUALS_MATCH)
def DATA_PTR_MATCH(self, guard: Guard):
# C++ guard has the type check internally
obj = self.get(guard.name)
code = f"{self.arg_ref(guard)}.data_ptr() == {obj.data_ptr()}"
self._set_guard_export_info(guard, [code])
self.get_guard_manager(guard).add_data_ptr_guard(
obj, get_verbose_code_parts(code, guard)
)
def DUAL_LEVEL(self, guard: Guard):
# Invalidate dual level if current dual level is different than the one
# in the fx graph

View File

@ -1881,30 +1881,6 @@ class GLOBAL_STATE : public LeafGuard {
std::unique_ptr<GlobalStateGuard> _guard;
};
class DATA_PTR_MATCH : public LeafGuard {
public:
DATA_PTR_MATCH(py::object tensor, py::object verbose_code_parts)
: LeafGuard(std::move(verbose_code_parts)) {
PyObject* value = tensor.ptr();
if (!THPVariable_CheckExact(value) && !THPVariable_Check(value)) {
throw std::runtime_error("DATA_PTR_MATCH guard requires a tensor");
}
_data_ptr = THPVariable_Unpack(value).data_ptr();
}
bool check_nopybind(PyObject* value) override { // borrowed ref
if (!THPVariable_CheckExact(value) && !THPVariable_Check(value)) {
return false;
}
void* data_ptr = THPVariable_Unpack(value).data_ptr();
return data_ptr == _data_ptr;
}
private:
// Original tensor data pointer.
void* _data_ptr;
};
// Checks that an attr is absent in the object. We don't need the opposite
// HASATTR guard because we can just rely on GetAttrGuardAccessor to act as
// HASATTR guard.
@ -5498,10 +5474,6 @@ PyObject* torch_c_dynamo_guards_init() {
py_m, "TORCH_FUNCTION_MODE_STACK")
.def(py::init<py::list, py::list>())
.def("__call__", &TORCH_FUNCTION_MODE_STACK::check);
py::class_<DATA_PTR_MATCH, LeafGuard, std::shared_ptr<DATA_PTR_MATCH>>(
py_m, "DATA_PTR_MATCH")
.def(py::init<py::object, py::list>())
.def("__call__", &DATA_PTR_MATCH::check);
py::class_<NO_HASATTR, LeafGuard, std::shared_ptr<NO_HASATTR>>(
py_m, "NO_HASATTR")
.def(py::init<py::object, py::list>())
@ -5830,15 +5802,6 @@ PyObject* torch_c_dynamo_guards_init() {
self.add_leaf_guard(std::make_shared<TORCH_FUNCTION_MODE_STACK>(
initial_stack, std::move(verbose_code_parts)));
})
.def(
"add_data_ptr_guard",
[](GuardManager& self,
py::object data_ptr,
py::object verbose_code_parts) -> void {
SKIP_IF_GUARD_ALREADY_PRESENT("DATA_PTR_MATCH");
self.add_leaf_guard(std::make_shared<DATA_PTR_MATCH>(
std::move(data_ptr), std::move(verbose_code_parts)));
})
.def(
"add_no_hasattr_guard",
[](GuardManager& self,