pytorch/torch/csrc/lazy/backend/backend_interface.cpp
PyTorch MergeBot 32ce06a5ab Revert "[Reland] fix missing-prototypes warnings in torch_cpu (Part 4) (#101949)"
This reverts commit 4f2c007a1b.

Reverted https://github.com/pytorch/pytorch/pull/101949 on behalf of https://github.com/osalpekar due to As noted in @izaitsevfb's comment, we are still seeing linker errors, this time due to `nnc_prepacked_linear_clamp_run` being made a static function. ([comment](https://github.com/pytorch/pytorch/pull/101949#issuecomment-1560226880))
2023-05-23 22:53:47 +00:00

55 lines
1.5 KiB
C++

#include <torch/csrc/lazy/backend/backend_interface.h>
#include <torch/csrc/lazy/core/internal_ops/ltc_ops.h>
namespace torch {
namespace lazy {
namespace {
std::atomic<const BackendImplInterface*> backend_impl_registry;
} // namespace
bool hasBackend() {
return !!backend_impl_registry.load();
}
const BackendImplInterface* getBackend() {
auto* interface = backend_impl_registry.load();
TORCH_CHECK(interface, "Lazy tensor backend not registered.");
return interface;
}
BackendRegistrar::BackendRegistrar(
const BackendImplInterface* backend_impl_interface) {
backend_impl_registry.store(backend_impl_interface);
}
// Get IrBuilder from backend. Use TorchScriptIrBuilder by default
const IrBuilder* getIrBuilder() {
static const IrBuilder* builder = getBackend()->GetIrBuilder();
return builder;
}
at::Tensor MakeTensorFromComputationData(
const BackendDataPtr data,
c10::optional<at::ScalarType> logical_scalar_type) {
return getBackend()->MakeTensorFromComputationData(data, logical_scalar_type);
}
std::unique_ptr<LoweringContext> LoweringContext::Create(
const std::string& name,
BackendDevice device,
c10::ArrayRef<const Node*> post_order,
Util::EmissionMap emit_status) {
return getBackend()->CreateLoweringContext(
name, std::move(device), post_order, emit_status);
}
std::unique_ptr<LoweringContext> LoweringContext::Create(
const std::string& name,
BackendDevice device) {
return getBackend()->CreateLoweringContext(name, std::move(device));
}
} // namespace lazy
} // namespace torch