pytorch/torch/csrc/lazy/backend/backend_interface.cpp
PyTorch MergeBot 498c34e8e8 Revert " fix missing-prototypes warnings in torch_cpu (Part 4) (#100849)"
This reverts commit c2f28d1c1d.

Reverted https://github.com/pytorch/pytorch/pull/100849 on behalf of https://github.com/izaitsevfb due to fails internal Meta builds, including fbcode and android, see D46009888: ld.lld: error: undefined symbol: nnc_aten_embedding ([comment](https://github.com/pytorch/pytorch/pull/100849#issuecomment-1555105800))
2023-05-19 19:05:15 +00:00

55 lines
1.5 KiB
C++

#include <torch/csrc/lazy/backend/backend_interface.h>
#include <torch/csrc/lazy/core/internal_ops/ltc_ops.h>
namespace torch {
namespace lazy {
namespace {
std::atomic<const BackendImplInterface*> backend_impl_registry;
} // namespace
bool hasBackend() {
return !!backend_impl_registry.load();
}
const BackendImplInterface* getBackend() {
auto* interface = backend_impl_registry.load();
TORCH_CHECK(interface, "Lazy tensor backend not registered.");
return interface;
}
BackendRegistrar::BackendRegistrar(
const BackendImplInterface* backend_impl_interface) {
backend_impl_registry.store(backend_impl_interface);
}
// Get IrBuilder from backend. Use TorchScriptIrBuilder by default
const IrBuilder* getIrBuilder() {
static const IrBuilder* builder = getBackend()->GetIrBuilder();
return builder;
}
at::Tensor MakeTensorFromComputationData(
const BackendDataPtr data,
c10::optional<at::ScalarType> logical_scalar_type) {
return getBackend()->MakeTensorFromComputationData(data, logical_scalar_type);
}
std::unique_ptr<LoweringContext> LoweringContext::Create(
const std::string& name,
BackendDevice device,
c10::ArrayRef<const Node*> post_order,
Util::EmissionMap emit_status) {
return getBackend()->CreateLoweringContext(
name, std::move(device), post_order, emit_status);
}
std::unique_ptr<LoweringContext> LoweringContext::Create(
const std::string& name,
BackendDevice device) {
return getBackend()->CreateLoweringContext(name, std::move(device));
}
} // namespace lazy
} // namespace torch