[BC breaking] Remove check_sparse_nnz argument of gradcheck (#115658)

As in title per deprecation plan.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/115658
Approved by: https://github.com/cpuhrsch, https://github.com/soulitzer
This commit is contained in:
Pearu Peterson 2023-12-13 16:32:48 +02:00 committed by PyTorch MergeBot
parent 310f6ab11a
commit 193f87857e
2 changed files with 0 additions and 76 deletions

View File

@ -162,58 +162,6 @@ class TestSparseLegacyAndDeprecation(TestCase):
# Check warn-once: # Check warn-once:
self.assertEqual(len(cm.warnings), 1) self.assertEqual(len(cm.warnings), 1)
@parametrize('fast_mode', (True, False))
def test_gradcheck_check_sparse_nnz(self, fast_mode):
"""Tests for deprecated check_sparse_nnz keyword argument of gradcheck.
Deprecation steps:
2.1: Specification of check_sparse_nnz triggers a warning.
2.2: Specification of check_sparse_nnz triggers an
exception. Remove all check_sparse_nnz usages from
gradcheck and delete this test.
"""
def fn(x, masked_grad):
return x.to_dense(masked_grad=masked_grad)
def test(x, masked_grad, masked, check_sparse_nnz):
x = x.detach().clone().requires_grad_()
torch.autograd.gradcheck(fn, (x, masked_grad), masked=masked, check_sparse_nnz=check_sparse_nnz, fast_mode=fast_mode)
x = torch.tensor([[0, 2], [3, 4]], dtype=torch.float64).to_sparse()
for masked_grad, masked, check_sparse_nnz in itertools.product(*[(True, False, None)] * 3):
effective_masked_grad = True if masked_grad is None else masked_grad
effective_check_sparse_nnz = False if check_sparse_nnz is None else check_sparse_nnz
# For BC, the effective masked depends on the value of specified check_sparse_nnz:
effective_masked = (check_sparse_nnz if check_sparse_nnz is not None else False) if masked is None else masked
warn_using_check_sparse_nnz = self.assertWarns(
UserWarning,
msg=('Backwards compatibility: check_sparse_nnz is deprecated, it will be removed in a future version of PyTorch.'
f' Use masked={effective_check_sparse_nnz} instead.'))
raise_on_non_equal_masked_and_check_sparse_nnz = self.assertRaisesRegex(
ValueError,
f"Expected specified check_sparse_nnz [(]={effective_check_sparse_nnz}[)]"
f" to be equal to masked [(]={effective_masked}[)]")
raise_jacobian_mismatch = self.assertRaisesRegex(RuntimeError, "Jacobian mismatch for output 0 with respect to input 0")
def run_test():
if effective_masked_grad != effective_masked and not fast_mode:
with raise_jacobian_mismatch:
test(x, masked_grad, masked, check_sparse_nnz)
else:
test(x, masked_grad, masked, check_sparse_nnz)
if masked != check_sparse_nnz and None not in {masked, check_sparse_nnz}:
# the specified masked and check_sparse_nnz must match
with warn_using_check_sparse_nnz:
with raise_on_non_equal_masked_and_check_sparse_nnz:
test(x, masked_grad, masked, check_sparse_nnz)
elif check_sparse_nnz is not None:
with warn_using_check_sparse_nnz:
run_test()
else:
self.assertNotWarn(run_test)
class TestSparseBase(TestCase): class TestSparseBase(TestCase):
def run(self, result=None): def run(self, result=None):

View File

@ -1951,7 +1951,6 @@ def gradcheck(
atol: float = 1e-5, atol: float = 1e-5,
rtol: float = 1e-3, rtol: float = 1e-3,
raise_exception: bool = True, raise_exception: bool = True,
check_sparse_nnz: Optional[bool] = None,
nondet_tol: float = 0.0, nondet_tol: float = 0.0,
check_undefined_grad: bool = True, check_undefined_grad: bool = True,
check_grad_dtypes: bool = False, check_grad_dtypes: bool = False,
@ -2006,12 +2005,6 @@ def gradcheck(
raise_exception (bool, optional): indicating whether to raise an exception if raise_exception (bool, optional): indicating whether to raise an exception if
the check fails. The exception gives more information about the the check fails. The exception gives more information about the
exact nature of the failure. This is helpful when debugging gradchecks. exact nature of the failure. This is helpful when debugging gradchecks.
check_sparse_nnz (bool, optional): if ``True``, gradcheck allows
for SparseTensor input, and for any SparseTensor inputs,
gradcheck will perform its check at ``nnz`` positions only.
The ``check_sparse_nnz`` argument is deprecated, use the
``masked`` argument instead. If ``check_sparse_nnz != masked``, an
exception is raised.
nondet_tol (float, optional): tolerance for non-determinism. When running nondet_tol (float, optional): tolerance for non-determinism. When running
identical inputs through the differentiation, the results must either match identical inputs through the differentiation, the results must either match
exactly (default, 0.0) or be within this tolerance. exactly (default, 0.0) or be within this tolerance.
@ -2035,22 +2028,6 @@ def gradcheck(
``True`` if all differences satisfy allclose condition ``True`` if all differences satisfy allclose condition
""" """
if check_sparse_nnz is None:
if masked is None:
check_sparse_nnz = masked = False
else:
check_sparse_nnz = masked
else:
warnings.warn(
"Backwards compatibility: check_sparse_nnz is deprecated, it will be removed in a future version of PyTorch."
f" Use masked={check_sparse_nnz} instead."
)
if masked is None:
masked = check_sparse_nnz
elif check_sparse_nnz != masked:
raise ValueError(
f"Expected specified check_sparse_nnz (={check_sparse_nnz}) to be equal to masked (={masked})."
)
assert ( assert (
check_forward_ad or check_backward_ad check_forward_ad or check_backward_ad
), "Expected at least one of check_forward_ad or check_backward_ad to be True" ), "Expected at least one of check_forward_ad or check_backward_ad to be True"
@ -2062,7 +2039,6 @@ def gradcheck(
), "Setting check_batched_forward_grad=True requires check_forward_ad to be True" ), "Setting check_batched_forward_grad=True requires check_forward_ad to be True"
args = locals().copy() args = locals().copy()
args.pop("raise_exception") args.pop("raise_exception")
args.pop("check_sparse_nnz")
if not raise_exception: if not raise_exception:
try: try:
return _gradcheck_helper(**args) return _gradcheck_helper(**args)