pytorch/test/cpp/jit/test_load_upgraders.cpp
Tugsbayasgalan (Tugsuu) Manlaibaatar b0fdca8855 Bump version number to 7 and compile old operators with old schema (#68358)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/68358

Test Plan: Imported from OSS

Reviewed By: albanD

Differential Revision: D33433730

Pulled By: tugsbayasgalan

fbshipit-source-id: 202c58365bae13195d3545cefcb0da9162b02151
2022-01-05 23:57:22 -08:00

46 lines
1.3 KiB
C++

#include <caffe2/serialize/versions.h>
#include <gtest/gtest.h>
#include <torch/csrc/jit/api/module.h>
#include <torch/csrc/jit/operator_upgraders/upgraders.h>
#include <torch/csrc/jit/operator_upgraders/version_map.h>
#include <torch/csrc/jit/serialization/import.h>
#include <test/cpp/jit/test_utils.h>
namespace torch {
namespace jit {
#if ENABLE_UPGRADERS
// Basic tests to check if C++ torch::jit::load
// can load the upgraders fine
// TODO (tugsuu) add more tests
TEST(UpgraderLoad, CanPopulateUpgradersGraph) {
Module m("m");
m.define(R"(
def forward(self, x: Tensor):
b = 5
return torch.div(x, b)
)");
std::stringstream ms;
m.save(ms);
auto loaded_m = torch::jit::load(ms);
auto version_map = get_operator_version_map();
auto upgraders = dump_upgraders_map();
for (const auto& entry : version_map) {
auto list_of_upgraders_for_op = entry.second;
for (const auto& upgrader_entry : list_of_upgraders_for_op) {
EXPECT_TRUE(
upgraders.find(upgrader_entry.upgrader_name) != upgraders.end());
}
}
auto test_graph = loaded_m.get_method("forward").graph();
// should have saved with version 4, so it is still up to date
testing::FileCheck().check_count("aten::div", 1, true)->run(*test_graph);
}
#endif
} // namespace jit
} // namespace torch