mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Autograd attaches logging hooks only in debug level (#116522)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/116522 Approved by: https://github.com/albanD
This commit is contained in:
parent
b18d8d4595
commit
77d979f748
|
|
@ -7,7 +7,7 @@ from torch.testing._internal.logging_utils import LoggingTestCase, make_logging_
|
|||
|
||||
|
||||
class TestAutogradLogging(LoggingTestCase):
|
||||
@make_logging_test(autograd=logging.INFO)
|
||||
@make_logging_test(autograd=logging.DEBUG)
|
||||
def test_logging(self, records):
|
||||
a = torch.rand(10, requires_grad=True)
|
||||
b = a.mul(2).div(3).sum()
|
||||
|
|
|
|||
|
|
@ -660,7 +660,7 @@ def _register_logging_hooks_on_whole_graph(t_outputs: List[torch.Tensor]):
|
|||
def prehook(grad_output):
|
||||
node = torch._C._current_autograd_node()
|
||||
log_str = f"Executing: {node} with grad_output: {grad_output}"
|
||||
log.info(log_str)
|
||||
log.debug(log_str)
|
||||
|
||||
handles = []
|
||||
for node in iter_graph(grad_fns):
|
||||
|
|
@ -674,7 +674,7 @@ def _register_logging_hooks_on_whole_graph(t_outputs: List[torch.Tensor]):
|
|||
|
||||
|
||||
def _engine_run_backward(t_outputs, *args, **kwargs):
|
||||
attach_logging_hooks = log.getEffectiveLevel() <= logging.INFO
|
||||
attach_logging_hooks = log.getEffectiveLevel() <= logging.DEBUG
|
||||
if attach_logging_hooks:
|
||||
unregister_hooks = _register_logging_hooks_on_whole_graph(t_outputs)
|
||||
try:
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user