mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/13942 Codemod generated with clangr shard mode, 25 files per diff, motivation: https://github.com/pytorch/pytorch/pull/12407 Reviewed By: smessmer Differential Revision: D13054770 fbshipit-source-id: a9e86e5dfcb4f7cebf5243e1d359fad064561bed
46 lines
1.2 KiB
C++
46 lines
1.2 KiB
C++
#include "caffe2/operators/create_scope_op.h"
|
|
|
|
C10_DEFINE_bool(
|
|
caffe2_workspace_stack_debug,
|
|
false,
|
|
"Enable debug checks for CreateScope's workspace stack");
|
|
|
|
namespace caffe2 {
|
|
CAFFE_KNOWN_TYPE(detail::WorkspaceStack);
|
|
|
|
template <>
|
|
bool CreateScopeOp<CPUContext>::RunOnDevice() {
|
|
auto* ws_stack = OperatorBase::Output<detail::WorkspaceStack>(0);
|
|
ws_stack->clear();
|
|
return true;
|
|
}
|
|
|
|
REGISTER_CPU_OPERATOR(CreateScope, CreateScopeOp<CPUContext>);
|
|
|
|
SHOULD_NOT_DO_GRADIENT(CreateScope);
|
|
|
|
OPERATOR_SCHEMA(CreateScope).NumInputs(0).NumOutputs(1).SetDoc(R"DOC(
|
|
'CreateScope' operator initializes and outputs empty scope that is used
|
|
by Do operator to store local blobs
|
|
)DOC");
|
|
|
|
template <>
|
|
bool HasScopeOp<CPUContext>::RunOnDevice() {
|
|
const auto& ws_stack = OperatorBase::Input<detail::WorkspaceStack>(0);
|
|
|
|
auto* output = Output(0, {1}, at::dtype<bool>());
|
|
bool* output_value = output->template mutable_data<bool>();
|
|
*output_value = !ws_stack.empty();
|
|
return true;
|
|
}
|
|
|
|
REGISTER_CPU_OPERATOR(HasScope, HasScopeOp<CPUContext>);
|
|
|
|
SHOULD_NOT_DO_GRADIENT(HasScope);
|
|
|
|
OPERATOR_SCHEMA(HasScope).NumInputs(1).NumOutputs(1).SetDoc(R"DOC(
|
|
Checks whether scope blob has any saved scopes left
|
|
)DOC");
|
|
|
|
} // namespace caffe2
|