Autograd attaches logging hooks only in debug level (#116522)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/116522
Approved by: https://github.com/albanD
This commit is contained in:
soulitzer 2023-12-28 17:44:45 -05:00 committed by PyTorch MergeBot
parent b18d8d4595
commit 77d979f748
2 changed files with 3 additions and 3 deletions

View File

@ -7,7 +7,7 @@ from torch.testing._internal.logging_utils import LoggingTestCase, make_logging_
class TestAutogradLogging(LoggingTestCase):
@make_logging_test(autograd=logging.INFO)
@make_logging_test(autograd=logging.DEBUG)
def test_logging(self, records):
a = torch.rand(10, requires_grad=True)
b = a.mul(2).div(3).sum()

View File

@ -660,7 +660,7 @@ def _register_logging_hooks_on_whole_graph(t_outputs: List[torch.Tensor]):
def prehook(grad_output):
node = torch._C._current_autograd_node()
log_str = f"Executing: {node} with grad_output: {grad_output}"
log.info(log_str)
log.debug(log_str)
handles = []
for node in iter_graph(grad_fns):
@ -674,7 +674,7 @@ def _register_logging_hooks_on_whole_graph(t_outputs: List[torch.Tensor]):
def _engine_run_backward(t_outputs, *args, **kwargs):
attach_logging_hooks = log.getEffectiveLevel() <= logging.INFO
attach_logging_hooks = log.getEffectiveLevel() <= logging.DEBUG
if attach_logging_hooks:
unregister_hooks = _register_logging_hooks_on_whole_graph(t_outputs)
try: