[CUDA] Actually bump tolerances for test_grad_pca_lowrank (#130770)

Fixes change in #129902 to actually bump pca rather than svd, thanks @ptrblck for the catch

Pull Request resolved: https://github.com/pytorch/pytorch/pull/130770
Approved by: https://github.com/Skylion007
This commit is contained in:
eqy 2024-07-16 00:41:07 +00:00 committed by PyTorch MergeBot
parent 80236dca90
commit 5e617d7ef5

View File

@ -17973,8 +17973,6 @@ op_db: List[OpInfo] = [
DecorateInfo(toleranceOverride({torch.float32: tol(atol=1e-03, rtol=1e-03),
torch.complex64: tol(atol=1e-02, rtol=1e-02)}),
'TestCommon', 'test_noncontiguous_samples'),
DecorateInfo(toleranceOverride({torch.float32: tol(atol=1e-05, rtol=5e-05)}),
'TestOperators', 'test_grad'),
# FIXME This should be the following, but the toleranceOverride does not seem to do anything!
# DecorateInfo(toleranceOverride({torch.complex128: tol(atol=1e-04, rtol=1e-04)}),
# 'TestFwdGradients', 'test_fn_fwgrad_bwgrad'),
@ -18016,6 +18014,8 @@ op_db: List[OpInfo] = [
DecorateInfo(toleranceOverride({torch.float32: tol(atol=1e-03, rtol=1e-03),
torch.complex64: tol(atol=4e-02, rtol=4e-02)}),
'TestCommon', 'test_noncontiguous_samples'),
DecorateInfo(toleranceOverride({torch.float32: tol(atol=1e-05, rtol=5e-05)}),
'TestOperators', 'test_grad'),
# FIXME This should be the following, but the toleranceOverride does not seem to do anything!
# DecorateInfo(toleranceOverride({torch.complex128: tol(atol=1e-04, rtol=1e-04)}),
# 'TestFwdGradients', 'test_fn_fwgrad_bwgrad'),