From f810e98143b7b1fe3436d5315101b22aa8438775 Mon Sep 17 00:00:00 2001 From: Justin Chu Date: Mon, 16 Jun 2025 08:40:44 +0000 Subject: [PATCH] [ONNX] Update default opset to 18 (#156023) Update default opset for the torchscript exporter to 18 to match the dynamo exporter, because support was actaully added and tested in https://github.com/pytorch/pytorch/pull/118828. In the next version we should plan to update to opset 21 or higher. This change also removes the hard limit on the torchscript exporter for more flexibility. Pull Request resolved: https://github.com/pytorch/pytorch/pull/156023 Approved by: https://github.com/Skylion007 --- test/onnx/onnx_test_common.py | 6 ------ torch/csrc/jit/serialization/export.cpp | 7 +++++-- torch/onnx/_constants.py | 5 ++--- torch/onnx/_globals.py | 5 ----- torch/onnx/_internal/exporter/_compat.py | 2 +- torch/onnx/utils.py | 7 ++----- 6 files changed, 10 insertions(+), 22 deletions(-) diff --git a/test/onnx/onnx_test_common.py b/test/onnx/onnx_test_common.py index a138d023c5f..ef0c3ba3ebc 100644 --- a/test/onnx/onnx_test_common.py +++ b/test/onnx/onnx_test_common.py @@ -236,12 +236,6 @@ MIN_ONNX_OPSET_VERSION = 9 MAX_ONNX_OPSET_VERSION = _constants.ONNX_TORCHSCRIPT_EXPORTER_MAX_OPSET TESTED_OPSETS = range(MIN_ONNX_OPSET_VERSION, MAX_ONNX_OPSET_VERSION + 1) -# The min onnx opset version to test for -FX_MIN_ONNX_OPSET_VERSION = 18 -# The max onnx opset version to test for -FX_MAX_ONNX_OPSET_VERSION = 18 -FX_TESTED_OPSETS = range(FX_MIN_ONNX_OPSET_VERSION, FX_MAX_ONNX_OPSET_VERSION + 1) - BOOL_TYPES = (torch.bool,) INT_TYPES = ( diff --git a/torch/csrc/jit/serialization/export.cpp b/torch/csrc/jit/serialization/export.cpp index 527e8e9cee4..1ed0a461e85 100644 --- a/torch/csrc/jit/serialization/export.cpp +++ b/torch/csrc/jit/serialization/export.cpp @@ -87,8 +87,8 @@ namespace { namespace onnx_torch = ::torch::onnx; namespace onnx = ::ONNX_NAMESPACE; -const static int kInvalidOpsetVersion = -1; -const static int kMainOpsetVersion = 20; +constexpr int kInvalidOpsetVersion = -1; +constexpr int kMainOpsetVersion = 23; // Based on OP_SET_ID_VERSION_MAP in // https://github.com/onnx/onnx/blob/master/onnx/helper.py. constexpr static std::array @@ -114,6 +114,9 @@ constexpr static std::array 8, // opset 18 9, // opset 19 9, // opset 20 + 10, // opset 21 + 10, // opset 22 + 11, // opset 23 }; std::string getNodeStackTraceString(const Node* n) { diff --git a/torch/onnx/_constants.py b/torch/onnx/_constants.py index 6c91b245ed7..b3c386b701d 100644 --- a/torch/onnx/_constants.py +++ b/torch/onnx/_constants.py @@ -4,10 +4,9 @@ ONNX_ARCHIVE_MODEL_PROTO_NAME = "__MODEL_PROTO" ONNX_BASE_OPSET = 9 ONNX_MIN_OPSET = 7 -ONNX_MAX_OPSET = 20 +ONNX_MAX_OPSET = 23 ONNX_TORCHSCRIPT_EXPORTER_MAX_OPSET = 20 -# ONNX_DEFAULT_OPSET generated by tools/onnx/update_default_opset_version.py -ONNX_DEFAULT_OPSET = 17 +ONNX_DEFAULT_OPSET = 18 ONNX_CONSTANT_FOLDING_MIN_OPSET = 9 PYTORCH_GITHUB_ISSUES_URL = "https://github.com/pytorch/pytorch/issues" diff --git a/torch/onnx/_globals.py b/torch/onnx/_globals.py index f3dd273386f..55d0550324e 100644 --- a/torch/onnx/_globals.py +++ b/torch/onnx/_globals.py @@ -54,11 +54,6 @@ class _InternalGlobals: @export_onnx_opset_version.setter def export_onnx_opset_version(self, value: int): - supported_versions = range( - _constants.ONNX_MIN_OPSET, _constants.ONNX_MAX_OPSET + 1 - ) - if value not in supported_versions: - raise ValueError(f"Unsupported ONNX opset version: {value}") self._export_onnx_opset_version = value @property diff --git a/torch/onnx/_internal/exporter/_compat.py b/torch/onnx/_internal/exporter/_compat.py index b570b20bd02..96e652eb59a 100644 --- a/torch/onnx/_internal/exporter/_compat.py +++ b/torch/onnx/_internal/exporter/_compat.py @@ -159,7 +159,7 @@ def export_compat( export_params=export_params, input_names=input_names, output_names=output_names, - opset_version=17, # TODO(justinchuby): Hard coded to 17 for now + opset_version=opset_version, dynamic_axes=dynamic_axes, keep_initializers_as_inputs=keep_initializers_as_inputs, ) diff --git a/torch/onnx/utils.py b/torch/onnx/utils.py index cce74561010..ec08090a595 100644 --- a/torch/onnx/utils.py +++ b/torch/onnx/utils.py @@ -353,9 +353,9 @@ def export( Models exported this way are probably runnable only by Caffe2. - opset_version (int, default 17): The version of the + opset_version (int, default 18): The version of the `default (ai.onnx) opset `_ - to target. Must be >= 7 and <= 17. + to target. Must be >= 7. do_constant_folding: Apply the constant-folding optimization. Constant-folding will replace some of the ops that have all constant inputs with pre-computed constant nodes. @@ -1393,10 +1393,7 @@ def _export( if opset_version is None: opset_version = _constants.ONNX_DEFAULT_OPSET - # torch.onnx.export does not support opset versions >=18 if opset_version > _constants.ONNX_TORCHSCRIPT_EXPORTER_MAX_OPSET: - # We do not want to fail because we should still allow users to create - # custom symbolic functions for opset>17 warnings.warn( f"Exporting to ONNX opset version {opset_version} is not supported. " f"by 'torch.onnx.export()'. "