mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/19093 ghimport-source-id: a82e3dce912a173b42a6a7e35eb1302d9f334e03 Differential Revision: D14865520 Pulled By: li-roy fbshipit-source-id: b1a8bf32f87920ce8d82f990d670477bc79d0ca7
44 lines
1.2 KiB
C++
44 lines
1.2 KiB
C++
#pragma once
|
|
|
|
#include <torch/csrc/autograd/function.h>
|
|
#include <torch/csrc/autograd/variable.h>
|
|
|
|
#include <ATen/TensorGeometry.h>
|
|
#include <ATen/core/DeprecatedTypeProperties.h>
|
|
#include <c10/util/Optional.h>
|
|
|
|
#include <cstdint>
|
|
#include <memory>
|
|
|
|
namespace torch { namespace autograd {
|
|
|
|
struct CopyBackwards : public Function {
|
|
variable_list apply(variable_list&& grads) override;
|
|
|
|
at::DeprecatedTypeProperties *src_type = nullptr; // initialized for safety.
|
|
at::Device src_device = at::kCPU;
|
|
};
|
|
|
|
// Performs grad[idx] = fn(grad[idx]), but out-of-place. The slicing operation
|
|
// grad[idx] is defined by the relative sizes, strides, and offset of base and
|
|
// view.
|
|
// When an in-place operation is done on a differentiable view, the base's
|
|
// grad_fn is updated to become a `CopySlice` wrapping the backward of the
|
|
// in-place operation.
|
|
// See NOTE [ Autograd View Variables ].
|
|
struct CopySlices : public Function {
|
|
CopySlices(
|
|
const Variable& base_var,
|
|
at::TensorGeometry view_,
|
|
std::shared_ptr<Function> fn_);
|
|
|
|
variable_list apply(variable_list&& inputs) override;
|
|
void release_variables() override;
|
|
|
|
at::TensorGeometry base;
|
|
at::TensorGeometry view;
|
|
std::shared_ptr<Function> fn;
|
|
};
|
|
|
|
}}
|