mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 00:21:07 +01:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/75201 In this diff: 1. Bump supported version to 9, which will serve as a placeholder for upcoming version bump to v9 for flatbuffer format migration. 2. Implements backport from v9 flatbuffer file to v8 pickle file. ghstack-source-id: 153225189 (Note: this ignores all push blocking failures!) Test Plan: fb: ``` cd ~/fbsource/fbcode/ && buck test -c fbcode.caffe2_enable_flatbuffer=1 caffe2/test/cpp/jit:jit -- LiteInterpreterTest.BackPortByteCodeModelAllVersions Parsing buck files: finished in 0.7 sec Downloaded 0/25 artifacts, 0.00 bytes, 100.0% cache miss (for updated rules) Building: finished in 20.7 sec (100%) 21783/21783 jobs, 5/21783 updated cd ~/fbsource/fbcode/ && buck test caffe2/test/cpp/jit:jit -- FlatbufferTest.FlatbufferBackPortTest Parsing buck files: finished in 0.7 sec Building: finished in 4.5 sec (100%) 12972/53298 jobs, 0/53298 updated Total time: 5.3 sec More details at https://www.internalfb.com/intern/buck/build/b658d597-d358-4293-97cb-28e7612b96e8 BUILD SUCCEEDED Tpx test run coordinator for Facebook. See https://fburl.com/tpx for details. Running with tpx session id: 35d5542d-6ee3-4c28-be10-1d822c7a6fef Trace available for this run at /tmp/tpx-20220308-090347.891303-35d5542d-6ee3-4c28-be10-1d822c7a6fef/trace.log RemoteExecution session id: reSessionID-35d5542d-6ee3-4c28-be10-1d822c7a6fef-tpx Started reporting to test run: https://www.internalfb.com/intern/testinfra/testrun/8444249379196000 ✓ ListingSuccess: caffe2/test/cpp/jit:jit : 490 tests discovered (22.838) ✓ Pass: caffe2/test/cpp/jit:jit - FlatbufferTest.FlatbufferBackPortTest (0.289) Summary Pass: 1 ListingSuccess: 1 If you need help understanding your runs, please follow the wiki: https://fburl.com/posting_in_tpx_users Finished test run: https://www.internalfb.com/intern/testinfra/testrun/8444249379196000 ``` Reviewed By: iseeyuan Differential Revision: D34702597 fbshipit-source-id: 5c203c29d13360d7934ce6e57557739e7038c05e (cherry picked from commit 6189e08a2bd968fdab636f77cb6bd73d6c36beb2)
98 lines
3.3 KiB
C++
98 lines
3.3 KiB
C++
#include <torch/csrc/python_headers.h>
|
|
|
|
#include <libshm.h>
|
|
#include <cstdlib>
|
|
|
|
#include <pybind11/detail/common.h>
|
|
#include <pybind11/functional.h>
|
|
#include <pybind11/pybind11.h>
|
|
#include <pybind11/pytypes.h>
|
|
#include <pybind11/stl.h>
|
|
#include <pybind11/stl_bind.h>
|
|
|
|
#include <Python.h> // NOLINT
|
|
#include <torch/csrc/jit/mobile/flatbuffer_loader.h>
|
|
#include <torch/csrc/jit/python/module_python.h>
|
|
#include <torch/csrc/jit/python/python_ivalue.h>
|
|
#include <torch/csrc/jit/python/python_sugared_value.h>
|
|
#include <torch/csrc/jit/serialization/flatbuffer_serializer.h>
|
|
|
|
namespace py = pybind11;
|
|
|
|
static std::shared_ptr<char> copyStr(const std::string& bytes) {
|
|
size_t size = (bytes.size() / FLATBUFFERS_MAX_ALIGNMENT + 1) *
|
|
FLATBUFFERS_MAX_ALIGNMENT;
|
|
#ifdef _WIN32
|
|
std::shared_ptr<char> bytes_copy(
|
|
static_cast<char*>(_aligned_malloc(size, FLATBUFFERS_MAX_ALIGNMENT)),
|
|
_aligned_free);
|
|
#else
|
|
std::shared_ptr<char> bytes_copy(
|
|
static_cast<char*>(aligned_alloc(FLATBUFFERS_MAX_ALIGNMENT, size)), free);
|
|
#endif
|
|
memcpy(bytes_copy.get(), bytes.data(), bytes.size());
|
|
return bytes_copy;
|
|
}
|
|
|
|
extern "C"
|
|
#ifdef _WIN32
|
|
__declspec(dllexport)
|
|
#endif
|
|
PyObject* initModuleFlatbuffer() {
|
|
using namespace torch::jit;
|
|
PyMethodDef m[] = {{nullptr, nullptr, 0, nullptr}}; // NOLINT
|
|
static struct PyModuleDef torchmodule = {
|
|
PyModuleDef_HEAD_INIT,
|
|
"torch._C_flatbuffer",
|
|
nullptr,
|
|
-1,
|
|
m,
|
|
}; // NOLINT
|
|
PyObject* module = PyModule_Create(&torchmodule);
|
|
auto pym = py::handle(module).cast<py::module>();
|
|
pym.def("_load_mobile_module_from_file", [](const std::string& filename) {
|
|
return torch::jit::load_mobile_module_from_file(filename);
|
|
});
|
|
pym.def("_load_mobile_module_from_bytes", [](const std::string& bytes) {
|
|
auto bytes_copy = copyStr(bytes);
|
|
return torch::jit::parse_and_initialize_mobile_module(
|
|
bytes_copy, bytes.size());
|
|
});
|
|
pym.def("_load_jit_module_from_file", [](const std::string& filename) {
|
|
ExtraFilesMap extra_files = ExtraFilesMap();
|
|
return torch::jit::load_jit_module_from_file(filename, extra_files);
|
|
});
|
|
pym.def("_load_jit_module_from_bytes", [](const std::string& bytes) {
|
|
auto bytes_copy = copyStr(bytes);
|
|
ExtraFilesMap extra_files = ExtraFilesMap();
|
|
return torch::jit::parse_and_initialize_jit_module(
|
|
bytes_copy, bytes.size(), extra_files);
|
|
});
|
|
pym.def(
|
|
"_save_mobile_module",
|
|
[](const torch::jit::mobile::Module& module,
|
|
const std::string& filename) {
|
|
return torch::jit::save_mobile_module(module, filename);
|
|
});
|
|
pym.def(
|
|
"_save_jit_module",
|
|
[](const torch::jit::Module& module, const std::string& filename) {
|
|
return torch::jit::save_jit_module(module, filename);
|
|
});
|
|
pym.def(
|
|
"_save_mobile_module_to_bytes",
|
|
[](const torch::jit::mobile::Module& module) {
|
|
auto detached_buffer = torch::jit::save_mobile_module_to_bytes(module);
|
|
return py::bytes(
|
|
reinterpret_cast<char*>(detached_buffer.data()),
|
|
detached_buffer.size());
|
|
});
|
|
pym.def("_save_jit_module_to_bytes", [](const torch::jit::Module& module) {
|
|
auto detached_buffer = torch::jit::save_jit_module_to_bytes(module);
|
|
return py::bytes(
|
|
reinterpret_cast<char*>(detached_buffer.data()),
|
|
detached_buffer.size());
|
|
});
|
|
return module;
|
|
}
|