mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Revert "Add warning for weights_only (#129239)"
This reverts commit381ce0821c. Reverted https://github.com/pytorch/pytorch/pull/129239 on behalf of https://github.com/huydhn due to Sorry for reverting your change but I am seeing some test_nn failures from ROCm381ce0821c, trying to revert this to see if trunk recovers ([comment](https://github.com/pytorch/pytorch/pull/129239#issuecomment-2189812903))
This commit is contained in:
parent
7cf454ec52
commit
b1f486aff9
|
|
@ -1802,35 +1802,26 @@ tensor(..., device='meta', size=(1,), requires_grad=True)""")
|
|||
m = nn.ParameterList(map(nn.Parameter, [torch.rand(2), torch.rand(2)]))
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
m = pickle.loads(pickle.dumps(m))
|
||||
# warning from torch.load call in _load_from_bytes
|
||||
num_warnings = 2 if torch._dynamo.is_compiling() else 1
|
||||
self.assertTrue(len(w) == num_warnings)
|
||||
self.assertEqual(w[0].category, FutureWarning)
|
||||
self.assertTrue(len(w) == 0)
|
||||
|
||||
# Test whether loading from older checkpoints works without triggering warnings
|
||||
m = nn.ParameterList(map(nn.Parameter, [torch.rand(2), torch.rand(2)]))
|
||||
del m._forward_pre_hooks, m._state_dict_hooks, m._load_state_dict_pre_hooks, m._non_persistent_buffers_set
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
m = pickle.loads(pickle.dumps(m))
|
||||
# warning from torch.load call in _load_from_bytes
|
||||
self.assertTrue(len(w) == 1)
|
||||
self.assertEqual(w[0].category, FutureWarning)
|
||||
self.assertTrue(len(w) == 0)
|
||||
|
||||
m = nn.ParameterDict({"a": nn.Parameter(torch.rand(2)), "b": nn.Parameter(torch.rand(2))})
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
m = pickle.loads(pickle.dumps(m))
|
||||
# warning from torch.load call in _load_from_bytes
|
||||
self.assertTrue(len(w) == 1)
|
||||
self.assertEqual(w[0].category, FutureWarning)
|
||||
self.assertTrue(len(w) == 0)
|
||||
|
||||
# Test whether loading from older checkpoints works without triggering warnings
|
||||
m = nn.ParameterDict({"a": nn.Parameter(torch.rand(2)), "b": nn.Parameter(torch.rand(2))})
|
||||
del m._forward_pre_hooks, m._state_dict_hooks, m._load_state_dict_pre_hooks, m._non_persistent_buffers_set
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
m = pickle.loads(pickle.dumps(m))
|
||||
# warning from torch.load call in _load_from_bytes
|
||||
self.assertTrue(len(w) == 1)
|
||||
self.assertEqual(w[0].category, FutureWarning)
|
||||
self.assertTrue(len(w) == 0)
|
||||
|
||||
def test_weight_norm_pickle(self):
|
||||
m = torch.nn.utils.weight_norm(nn.Linear(5, 7))
|
||||
|
|
|
|||
|
|
@ -837,6 +837,7 @@ class TestBothSerialization(TestCase):
|
|||
test(f_new, f_old)
|
||||
self.assertTrue(len(w) == 0, msg=f"Expected no warnings but got {[str(x) for x in w]}")
|
||||
|
||||
|
||||
class TestOldSerialization(TestCase, SerializationMixin):
|
||||
# unique_key is necessary because on Python 2.7, if a warning passed to
|
||||
# the warning module is the same, it is not raised again.
|
||||
|
|
@ -864,8 +865,7 @@ class TestOldSerialization(TestCase, SerializationMixin):
|
|||
loaded = torch.load(checkpoint)
|
||||
self.assertTrue(isinstance(loaded, module.Net))
|
||||
if can_retrieve_source:
|
||||
self.assertEqual(len(w), 1)
|
||||
self.assertEqual(w[0].category, FutureWarning)
|
||||
self.assertEqual(len(w), 0)
|
||||
|
||||
# Replace the module with different source
|
||||
fname = get_file_path_2(os.path.dirname(os.path.dirname(torch.__file__)), 'torch', 'testing',
|
||||
|
|
@ -876,8 +876,8 @@ class TestOldSerialization(TestCase, SerializationMixin):
|
|||
loaded = torch.load(checkpoint)
|
||||
self.assertTrue(isinstance(loaded, module.Net))
|
||||
if can_retrieve_source:
|
||||
self.assertEqual(len(w), 2)
|
||||
self.assertTrue(w[1].category, 'SourceChangeWarning')
|
||||
self.assertEqual(len(w), 1)
|
||||
self.assertTrue(w[0].category, 'SourceChangeWarning')
|
||||
|
||||
def test_serialization_container(self):
|
||||
self._test_serialization_container('file', tempfile.NamedTemporaryFile)
|
||||
|
|
|
|||
|
|
@ -987,7 +987,7 @@ def load(
|
|||
map_location: MAP_LOCATION = None,
|
||||
pickle_module: Any = None,
|
||||
*,
|
||||
weights_only: Optional[bool] = None,
|
||||
weights_only: bool = False,
|
||||
mmap: Optional[bool] = None,
|
||||
**pickle_load_args: Any,
|
||||
) -> Any:
|
||||
|
|
@ -1097,11 +1097,6 @@ def load(
|
|||
" with `weights_only` please check the recommended steps in the following error message."
|
||||
" WeightsUnpickler error: "
|
||||
)
|
||||
if weights_only is None:
|
||||
weights_only, warn_weights_only = False, True
|
||||
else:
|
||||
warn_weights_only = False
|
||||
|
||||
# Add ability to force safe only weight loads via environment variable
|
||||
if os.getenv("TORCH_FORCE_WEIGHTS_ONLY_LOAD", "0").lower() in [
|
||||
"1",
|
||||
|
|
@ -1118,20 +1113,6 @@ def load(
|
|||
)
|
||||
else:
|
||||
if pickle_module is None:
|
||||
if warn_weights_only:
|
||||
warnings.warn(
|
||||
"You are using `torch.load` with `weights_only=False` (the current default value), which uses "
|
||||
"the default pickle module implicitly. It is possible to construct malicious pickle data "
|
||||
"which will execute arbitrary code during unpickling (See "
|
||||
"https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). "
|
||||
"In a future release, the default value for `weights_only` will be flipped to `True`. This "
|
||||
"limits the functions that could be executed during unpickling. Arbitrary objects will no "
|
||||
"longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the "
|
||||
"user via `torch.serialization.add_safe_globals`. We recommend you start setting "
|
||||
"`weights_only=True` for any use case where you don't have full control of the loaded file. "
|
||||
"Please open an issue on GitHub for any issues related to this experimental feature.",
|
||||
FutureWarning,
|
||||
)
|
||||
pickle_module = pickle
|
||||
|
||||
# make flipping default BC-compatible
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user