mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
This PR: - registers all of the codegened Nodes to the torch._C._functions module, this is where special nodes like AccumulateGrad are already registered. - creates a autograd.graph.Node abstract base class that all of the newly registered nodes subclass from. We make the subclassing happen by implementing the ``__subclasshook__`` method - enables static type checking to work and also enables Sphinx to generate documentation for the Node and its methods - handles both the custom Function and codegened cases Pull Request resolved: https://github.com/pytorch/pytorch/pull/91475 Approved by: https://github.com/albanD
18 lines
345 B
C++
18 lines
345 B
C++
#pragma once
|
|
|
|
#include <Python.h>
|
|
|
|
// ${generated_comment}
|
|
|
|
// Python bindings for automatically generated autograd functions
|
|
|
|
namespace torch { namespace autograd { namespace generated {
|
|
|
|
${shard_forward_declare}
|
|
|
|
inline void initialize_autogenerated_functions(PyObject* module) {
|
|
${shard_call}
|
|
}
|
|
|
|
}}} // namespace torch::autograd::generated
|