mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary: This is a re-land off https://github.com/pytorch/pytorch/pull/51797 with fix for spurious libcuda dependency Fix limits the scope of `no-as-needed` linker flag to just `jitbackend_test` Pull Request resolved: https://github.com/pytorch/pytorch/pull/52340 Reviewed By: agolynski, iseeyuan Differential Revision: D26476168 Pulled By: malfet fbshipit-source-id: f909428af82182b3bffd020ca18cca7a9b5846b6
41 lines
1.2 KiB
C++
41 lines
1.2 KiB
C++
#include <gtest/gtest.h>
|
|
#include <torch/csrc/jit/api/module.h>
|
|
#include <torch/csrc/jit/backends/backend_detail.h>
|
|
#include <torch/torch.h>
|
|
|
|
// Tests go in torch::jit
|
|
namespace torch {
|
|
namespace jit {
|
|
TEST(BackendTest, ToBackend) {
|
|
Module m("m");
|
|
m.define(R"(
|
|
def forward(self, x, h):
|
|
return self.accum(x, h), self.sub_accum(x, h)
|
|
|
|
def accum(self, x, h):
|
|
return x + h
|
|
|
|
def sub_accum(self, x, h):
|
|
return x - h
|
|
)");
|
|
|
|
std::vector<IValue> inputs;
|
|
inputs.emplace_back(2.0 * torch::ones({}));
|
|
inputs.emplace_back(1.0 * torch::ones({}));
|
|
auto ref = m.forward(inputs).toTuple()->elements();
|
|
|
|
c10::Dict<IValue, IValue> compile_spec(StringType::get(), AnyType::get());
|
|
c10::Dict<IValue, IValue> fake_dict(StringType::get(), AnyType::get());
|
|
fake_dict.insert("", "");
|
|
compile_spec.insert("forward", fake_dict);
|
|
auto any_dict_ty = DictType::create(StringType::get(), AnyType::get());
|
|
// lowered module
|
|
auto lm = torch::jit::detail::codegen_backend_module(
|
|
"test_backend", m, compile_spec, any_dict_ty);
|
|
auto res = lm.forward(inputs).toTuple()->elements();
|
|
AT_ASSERT(res[0].toTensor().equal(ref[0].toTensor()));
|
|
AT_ASSERT(res[1].toTensor().equal(ref[1].toTensor()));
|
|
}
|
|
} // namespace jit
|
|
} // namespace torch
|