[CodeClean] Replace std::runtime_error with TORCH_CHECK (#164130)

As the title stated.

**Changes**:
- torch/csrc/inductor(Part 1)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/164130
Approved by: https://github.com/albanD, https://github.com/Skylion007
This commit is contained in:
FFFrog 2025-10-14 15:27:42 +08:00 committed by PyTorch MergeBot
parent 09a4187b8e
commit 6f713e25bb
6 changed files with 112 additions and 136 deletions

View File

@ -102,11 +102,10 @@ std::string create_temp_dir() {
} }
#else #else
std::string temp_dir = "/tmp/XXXXXX"; std::string temp_dir = "/tmp/XXXXXX";
if (mkdtemp(temp_dir.data()) == nullptr) { TORCH_CHECK(
throw std::runtime_error( mkdtemp(temp_dir.data()) != nullptr,
std::string("Failed to create temporary directory: ") + "Failed to create temporary directory: ",
c10::utils::str_error(errno)); c10::utils::str_error(errno));
}
return temp_dir; return temp_dir;
#endif #endif
} }
@ -156,9 +155,7 @@ namespace torch::inductor {
namespace { namespace {
const nlohmann::json& load_json_file(const std::string& json_path) { const nlohmann::json& load_json_file(const std::string& json_path) {
if (!file_exists(json_path)) { TORCH_CHECK(file_exists(json_path), "File not found: ", json_path);
throw std::runtime_error("File not found: " + json_path);
}
std::ifstream json_file(json_path); std::ifstream json_file(json_path);
TORCH_CHECK(json_file.is_open()); TORCH_CHECK(json_file.is_open());
@ -415,32 +412,25 @@ std::string compile_so(
get_cpp_compile_command(filename, obj_filenames, linker_flags); get_cpp_compile_command(filename, obj_filenames, linker_flags);
// Run the commands to generate a .so file // Run the commands to generate a .so file
int status = system(compile_cmd.c_str()); TORCH_CHECK(system(compile_cmd.c_str()) == 0, "Failed to compile cpp file.");
if (status != 0) { TORCH_CHECK(system(link_cmd.c_str()) == 0, "Failed to link files.");
throw std::runtime_error("Failed to compile cpp file.");
}
status = system(link_cmd.c_str());
if (status != 0) {
throw std::runtime_error("Failed to link files.");
}
// Move the mmapped weights onto the .so // Move the mmapped weights onto the .so
std::string serialized_weights_path = filename + "_serialized_weights.bin"; std::string serialized_weights_path = filename + "_serialized_weights.bin";
if (file_exists(serialized_weights_path)) { if (file_exists(serialized_weights_path)) {
std::ifstream serialized_weights_file( std::ifstream serialized_weights_file(
serialized_weights_path, std::ios::binary); serialized_weights_path, std::ios::binary);
if (!serialized_weights_file.is_open()) { TORCH_CHECK(
throw std::runtime_error("Failed to open serialized weights file"); serialized_weights_file.is_open(),
} "Failed to open serialized weights file");
std::vector<char> serialized_weights( std::vector<char> serialized_weights(
(std::istreambuf_iterator<char>(serialized_weights_file)), (std::istreambuf_iterator<char>(serialized_weights_file)),
std::istreambuf_iterator<char>()); std::istreambuf_iterator<char>());
serialized_weights_file.close(); serialized_weights_file.close();
std::ofstream output_so_file(output_so, std::ios::binary | std::ios::app); std::ofstream output_so_file(output_so, std::ios::binary | std::ios::app);
if (!output_so_file.is_open()) { TORCH_CHECK(output_so_file.is_open(), "Failed to open output .so file");
throw std::runtime_error("Failed to open output .so file");
}
// Page align the weights // Page align the weights
std::streampos so_size = output_so_file.tellp(); std::streampos so_size = output_so_file.tellp();
std::vector<char> padding(16384 - so_size % 16384, ' '); std::vector<char> padding(16384 - so_size % 16384, ' ');
@ -495,12 +485,11 @@ class RAIIMinizArchive {
public: public:
RAIIMinizArchive(const std::string& zip_path) { RAIIMinizArchive(const std::string& zip_path) {
mz_zip_zero_struct(&_zip_archive); mz_zip_zero_struct(&_zip_archive);
if (!mz_zip_reader_init_file( TORCH_CHECK(
&_zip_archive, normalize_path_separator(zip_path).c_str(), 0)) { mz_zip_reader_init_file(
throw std::runtime_error(fmt::format( &_zip_archive, normalize_path_separator(zip_path).c_str(), 0),
"Failed to initialize zip archive: {}", "Failed to initialize zip archive: ",
mz_zip_get_error_string(mz_zip_get_last_error(&_zip_archive)))); mz_zip_get_error_string(mz_zip_get_last_error(&_zip_archive)));
}
} }
RAIIMinizArchive(const RAIIMinizArchive&) = delete; RAIIMinizArchive(const RAIIMinizArchive&) = delete;
RAIIMinizArchive& operator=(const RAIIMinizArchive&) = delete; RAIIMinizArchive& operator=(const RAIIMinizArchive&) = delete;
@ -522,18 +511,18 @@ class RAIIMinizArchive {
// terminator // terminator
const auto zip_filename_len{ const auto zip_filename_len{
mz_zip_reader_get_filename(&_zip_archive, i, nullptr, 0)}; mz_zip_reader_get_filename(&_zip_archive, i, nullptr, 0)};
if (!zip_filename_len) { TORCH_CHECK(
throw std::runtime_error( zip_filename_len, "Failed to read zip filename length at index ", i);
fmt::format("Failed to read zip filename length at index {}", i));
}
// std::string implicitly appends a character for the null terminator // std::string implicitly appends a character for the null terminator
std::string zip_filename(zip_filename_len - 1, '\0'); std::string zip_filename(zip_filename_len - 1, '\0');
if (!mz_zip_reader_get_filename( TORCH_CHECK(
&_zip_archive, i, zip_filename.data(), zip_filename_len)) { mz_zip_reader_get_filename(
throw std::runtime_error( &_zip_archive, i, zip_filename.data(), zip_filename_len),
fmt::format("Failed to read zip filename at index {}", i)); "Failed to read zip filename at index ",
} i);
zip_filenames.emplace_back(zip_filename);
zip_filenames.emplace_back(std::move(zip_filename));
} }
return zip_filenames; return zip_filenames;
@ -551,18 +540,25 @@ class RAIIMinizArchive {
0)) { 0)) {
#ifdef _WIN32 #ifdef _WIN32
DWORD dwErrCode = GetLastError(); DWORD dwErrCode = GetLastError();
throw std::runtime_error(fmt::format( TORCH_CHECK(
"Failed to extract zip file {} to destination file {}, error code: {}, mz_zip error string: {}", false,
"Failed to extract zip file ",
zip_filename, zip_filename,
" to destination file ",
path_dest_filename, path_dest_filename,
", error code: ",
dwErrCode, dwErrCode,
mz_zip_get_error_string(mz_zip_get_last_error(&_zip_archive)))); " mz_zip error string: ",
mz_zip_get_error_string(mz_zip_get_last_error(&_zip_archive)));
#else #else
throw std::runtime_error(fmt::format( TORCH_CHECK(
"Failed to extract zip file {} to destination file {}, mz_zip error string: {}", false,
"Failed to extract zip file ",
zip_filename, zip_filename,
" to destination file ",
path_dest_filename, path_dest_filename,
mz_zip_get_error_string(mz_zip_get_last_error(&_zip_archive)))); ", mz_zip error string: ",
mz_zip_get_error_string(mz_zip_get_last_error(&_zip_archive)));
#endif #endif
} }
} }
@ -578,9 +574,7 @@ std::unordered_map<std::string, std::string> AOTIModelPackageLoader::
// Open the zip archive // Open the zip archive
RAIIMinizArchive zip_archive{model_package_path}; RAIIMinizArchive zip_archive{model_package_path};
auto found_filenames{zip_archive.get_filenames()}; auto found_filenames{zip_archive.get_filenames()};
if (found_filenames.empty()) { TORCH_CHECK(!found_filenames.empty(), "No files found in zip archive.");
throw std::runtime_error("No files found in zip archive.");
}
// Find the file prefix (similar to constructor logic) // Find the file prefix (similar to constructor logic)
std::string file_prefix; std::string file_prefix;
@ -624,15 +618,13 @@ std::unordered_map<std::string, std::string> AOTIModelPackageLoader::
model_names_str += model_name_tmp + "\n"; model_names_str += model_name_tmp + "\n";
} }
throw std::runtime_error( TORCH_CHECK(
"Failed to find a generated cpp file or so file for model '" + "Failed to find a generated cpp file or so file for model '",
model_name + model_name,
"' in the zip archive.\n\n" "' in the zip archive.\n\nAvailable models in the archive:\n",
"Available models in the archive:\n" + model_names_str,
model_names_str + "\n\nTo load a specific model, please provide its name using the `model_name` parameter when calling AOTIModelPackageLoader() or torch._inductor.package.load_package.\n\n",
"\n\n" "The following files were loaded from the archive:\n",
"To load a specific model, please provide its name using the `model_name` parameter when calling AOTIModelPackageLoader() or torch._inductor.package.load_package.\n\n"
"The following files were loaded from the archive:\n" +
found_filenames_str); found_filenames_str);
} }
@ -643,17 +635,15 @@ std::unordered_map<std::string, std::string> AOTIModelPackageLoader::
// Create the parent directory if it doesn't exist // Create the parent directory if it doesn't exist
size_t parent_path_idx = output_path_str.find_last_of(k_separator); size_t parent_path_idx = output_path_str.find_last_of(k_separator);
if (parent_path_idx == std::string::npos) { TORCH_CHECK(
throw std::runtime_error( parent_path_idx != std::string::npos,
"Failed to find parent path in " + output_path_str); "Failed to find parent path in " + output_path_str);
}
std::string parent_path = output_path_str.substr(0, parent_path_idx); std::string parent_path = output_path_str.substr(0, parent_path_idx);
if (!recursive_mkdir(parent_path)) { TORCH_CHECK(
throw std::runtime_error(fmt::format( recursive_mkdir(parent_path),
"Failed to create directory {}: {}", "Failed to create directory " + parent_path,
parent_path, ": ",
c10::utils::str_error(errno))); c10::utils::str_error(errno));
}
LOG(INFO) << "Extract file: " << metadata_filename << " to " LOG(INFO) << "Extract file: " << metadata_filename << " to "
<< output_path_str; << output_path_str;
@ -679,23 +669,19 @@ AOTIModelPackageLoader::AOTIModelPackageLoader(
const size_t num_runners, const size_t num_runners,
const c10::DeviceIndex device_index) { const c10::DeviceIndex device_index) {
if (run_single_threaded) { if (run_single_threaded) {
if (num_runners != 1) { TORCH_CHECK(
throw std::runtime_error( num_runners == 1,
"num_runners must be 1 when run_single_threaded is true"); "num_runners must be 1 when run_single_threaded is true");
}
} else { } else {
if (num_runners < 1) { TORCH_CHECK(
throw std::runtime_error( num_runners >= 1,
"num_runners must be >=1 when run_single_threaded is false"); "num_runners must be >=1 when run_single_threaded is false");
}
} }
// Extract all files within the zipfile to a temporary directory // Extract all files within the zipfile to a temporary directory
RAIIMinizArchive zip_archive{model_package_path}; RAIIMinizArchive zip_archive{model_package_path};
auto found_filenames{zip_archive.get_filenames()}; auto found_filenames{zip_archive.get_filenames()};
if (found_filenames.empty()) { TORCH_CHECK(!found_filenames.empty(), "No files found in zip archive.");
throw std::runtime_error("No files found in zip archive.");
}
// All the paths are prepended with a tmp/ directory. We need to find the // All the paths are prepended with a tmp/ directory. We need to find the
// prefix. // prefix.
@ -758,17 +744,16 @@ AOTIModelPackageLoader::AOTIModelPackageLoader(
// Create the parent directory if it doesn't exist // Create the parent directory if it doesn't exist
size_t parent_path_idx = output_file_path.find_last_of(k_separator); size_t parent_path_idx = output_file_path.find_last_of(k_separator);
if (parent_path_idx == std::string::npos) { TORCH_CHECK(
throw std::runtime_error( parent_path_idx != std::string::npos,
"Failed to find parent path in " + output_file_path); "Failed to find parent path in " + output_file_path);
}
std::string parent_path = output_file_path.substr(0, parent_path_idx); std::string parent_path = output_file_path.substr(0, parent_path_idx);
if (!recursive_mkdir(parent_path)) { TORCH_CHECK(
throw std::runtime_error(fmt::format( recursive_mkdir(parent_path),
"Failed to create directory {}: {}", "Failed to create directory " + parent_path,
parent_path, ": ",
c10::utils::str_error(errno))); c10::utils::str_error(errno));
}
// Extracts file to the temp directory // Extracts file to the temp directory
zip_archive.extract_file(zip_filename_str, output_path_str); zip_archive.extract_file(zip_filename_str, output_path_str);
@ -801,15 +786,14 @@ AOTIModelPackageLoader::AOTIModelPackageLoader(
model_names_str += model_name_tmp + "\n"; model_names_str += model_name_tmp + "\n";
} }
throw std::runtime_error( TORCH_CHECK(
"Failed to find a generated cpp file or so file for model '" + false,
model_name + "Failed to find a generated cpp file or so file for model '",
"' in the zip archive.\n\n" model_name,
"Available models in the archive:\n" + "' in the zip archive.\n\nAvailable models in the archive:\n",
model_names_str + model_names_str,
"\n\n" "\n\nTo load a specific model, please provide its name using the `model_name` parameter when calling AOTIModelPackageLoader() or torch._inductor.package.load_package.\n\n",
"To load a specific model, please provide its name using the `model_name` parameter when calling AOTIModelPackageLoader() or torch._inductor.package.load_package.\n\n" "The following files were loaded from the archive:\n",
"The following files were loaded from the archive:\n" +
found_filenames_str); found_filenames_str);
} }
@ -823,17 +807,15 @@ AOTIModelPackageLoader::AOTIModelPackageLoader(
// Construct the runner depending on the device information // Construct the runner depending on the device information
std::string device_key = metadata_["AOTI_DEVICE_KEY"]; std::string device_key = metadata_["AOTI_DEVICE_KEY"];
TORCH_CHECK(!device_key.empty(), "No device information found.");
if (device_key.empty()) {
throw std::runtime_error("No device information found.");
}
std::unordered_map<std::string, CreateAOTIModelRunnerFunc> std::unordered_map<std::string, CreateAOTIModelRunnerFunc>
registered_aoti_runner = getAOTIModelRunnerRegistry(); registered_aoti_runner = getAOTIModelRunnerRegistry();
if (registered_aoti_runner.find(device_key) == registered_aoti_runner.end()) { TORCH_CHECK(
throw std::runtime_error("Unsupported device key found: " + device_key); registered_aoti_runner.find(device_key) != registered_aoti_runner.end(),
} "Unsupported device key found: ",
device_key);
c10::Device device = c10::Device(device_key); c10::Device device = c10::Device(device_key);
device.set_index(device_index); device.set_index(device_index);
@ -896,7 +878,7 @@ void AOTIModelPackageLoader::load_constants(
if (fqn_to_constant_name.find(it.first) != fqn_to_constant_name.end()) { if (fqn_to_constant_name.find(it.first) != fqn_to_constant_name.end()) {
updated_constants_map.emplace(fqn_to_constant_name[it.first], it.second); updated_constants_map.emplace(fqn_to_constant_name[it.first], it.second);
} else { } else {
throw std::runtime_error("Constant not found: " + it.first); TORCH_CHECK(false, "Constant not found: ", it.first);
} }
} }

View File

@ -29,15 +29,13 @@ AOTIModelContainerRunner::AOTIModelContainerRunner(
const std::string& cubin_dir, const std::string& cubin_dir,
const bool run_single_threaded) { const bool run_single_threaded) {
if (run_single_threaded) { if (run_single_threaded) {
if (num_models != 1) { TORCH_CHECK(
throw std::runtime_error( num_models == 1,
"num_models must be 1 when run_single_threaded is true"); "num_models must be 1 when run_single_threaded is true");
}
} else { } else {
if (num_models < 1) { TORCH_CHECK(
throw std::runtime_error( num_models >= 1,
"num_models must be >=1 when run_single_threaded is false"); "num_models must be >=1 when run_single_threaded is false");
}
} }
model_so_ = std::make_unique<at::DynamicLibrary>(model_so_path.c_str()); model_so_ = std::make_unique<at::DynamicLibrary>(model_so_path.c_str());
TORCH_CHECK(model_so_, "Failed to load model: ", model_so_path); TORCH_CHECK(model_so_, "Failed to load model: ", model_so_path);
@ -86,11 +84,10 @@ AOTIModelContainerRunner::AOTIModelContainerRunner(
? "AOTInductorModelContainerRunSingleThreaded" ? "AOTInductorModelContainerRunSingleThreaded"
: "AOTInductorModelContainerRun"; : "AOTInductorModelContainerRun";
TRY_LOAD_SYMBOL(run_func_, run_func_name) TRY_LOAD_SYMBOL(run_func_, run_func_name)
if (run_func_ == nullptr && run_single_threaded) { TORCH_CHECK(
throw std::runtime_error( run_func_ != nullptr || !run_single_threaded,
"No AOTInductorModelContainerRunSingleThreaded function in .so! To use AOTInductor-compiled model in the single-threaded mode,\ "No AOTInductorModelContainerRunSingleThreaded function in .so! To use AOTInductor-compiled model in the single-threaded mode,\
consider rebuild your model with the latest AOTInductor."); consider rebuild your model with the latest AOTInductor.");
}
TRY_LOAD_SYMBOL( TRY_LOAD_SYMBOL(
free_inactive_constant_buffer_func_, free_inactive_constant_buffer_func_,
@ -366,10 +363,9 @@ void AOTIModelContainerRunner::swap_constant_buffer() {
} }
void AOTIModelContainerRunner::free_inactive_constant_buffer() { void AOTIModelContainerRunner::free_inactive_constant_buffer() {
if (!free_inactive_constant_buffer_func_) { TORCH_CHECK(
throw std::runtime_error( free_inactive_constant_buffer_func_ != nullptr,
"No free_inactive_constant_buffer in .so! Consider rebuild your model with the latest AOTInductor."); "No free_inactive_constant_buffer in .so! Consider rebuild your model with the latest AOTInductor.");
}
AOTI_RUNTIME_ERROR_CODE_CHECK( AOTI_RUNTIME_ERROR_CODE_CHECK(
free_inactive_constant_buffer_func_(container_handle_)); free_inactive_constant_buffer_func_(container_handle_));
} }

View File

@ -25,9 +25,8 @@ std::unique_ptr<AOTIModelContainerRunner> create_aoti_runner_cpu(
const std::string& device_str, const std::string& device_str,
const std::string& cubin_dir, const std::string& cubin_dir,
const bool run_single_threaded) { const bool run_single_threaded) {
if (device_str != "cpu") { TORCH_CHECK(
throw std::runtime_error("Incorrect device passed to aoti_runner_cpu"); device_str == "cpu", "Incorrect device passed to aoti_runner_cpu");
}
return std::make_unique<AOTIModelContainerRunnerCpu>( return std::make_unique<AOTIModelContainerRunnerCpu>(
model_so_path, num_models, run_single_threaded); model_so_path, num_models, run_single_threaded);
} }

View File

@ -23,9 +23,8 @@ std::unique_ptr<AOTIModelContainerRunner> create_aoti_runner_mps(
const std::string& device_str, const std::string& device_str,
const std::string& cubin_dir, const std::string& cubin_dir,
const bool run_single_threaded) { const bool run_single_threaded) {
if (device_str != "mps") { TORCH_CHECK(
throw std::runtime_error("Incorrect device passed to aoti_runner_mps"); device_str == "mps", "Incorrect device passed to aoti_runner_mps");
}
return std::make_unique<AOTIModelContainerRunnerMps>( return std::make_unique<AOTIModelContainerRunnerMps>(
model_so_path, num_models, run_single_threaded); model_so_path, num_models, run_single_threaded);
} }

View File

@ -77,11 +77,11 @@ void convert_handles_to_inputs(
template <typename T> template <typename T>
void assert_numel(const ArrayRefTensor<T>& tensor, uint64_t numel) { void assert_numel(const ArrayRefTensor<T>& tensor, uint64_t numel) {
if (tensor.numel() != numel) { TORCH_CHECK(
std::stringstream err; tensor.numel() == numel,
err << "incorrect numel for input tensor. expected " << numel << ", got " "incorrect numel for input tensor. expected ",
<< tensor.numel(); numel,
throw std::runtime_error(err.str()); ", got ",
} tensor.numel());
} }
} // namespace torch::aot_inductor } // namespace torch::aot_inductor

View File

@ -657,8 +657,8 @@ inline at::vec::Vectorized<float> vec_shuffle_down(
case 4: case 4:
return vec_t(_mm256_permute2f128_ps(x, x, SHUFFLE_MASK(1, 1, 1, 1))); return vec_t(_mm256_permute2f128_ps(x, x, SHUFFLE_MASK(1, 1, 1, 1)));
} }
throw std::runtime_error(
"Unhandled vec_shuffle_down value " + std::to_string(n)); TORCH_CHECK(false, "Unhandled vec_shuffle_down value ", n);
} }
#endif #endif
@ -682,8 +682,8 @@ inline at::vec::Vectorized<float> vec_shuffle_down(
return vec_t(_mm512_permutexvar_ps( return vec_t(_mm512_permutexvar_ps(
_mm512_set_epi32(8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8), x)); _mm512_set_epi32(8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8), x));
} }
throw std::runtime_error(
"Unhandled vec_shuffle_down value " + std::to_string(n)); TORCH_CHECK(false, "Unhandled vec_shuffle_down value ", n);
} }
#endif #endif