mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary: https://github.com/pytorch/pytorch/issues/54339 broke Flake8. This PR fixes it. Pull Request resolved: https://github.com/pytorch/pytorch/pull/54540 Test Plan: ``` flake8 ``` Reviewed By: walterddr Differential Revision: D27274171 Pulled By: samestep fbshipit-source-id: 4b440d72b4b5615f45e6fcb25f7a4c0423add272
52 lines
1.7 KiB
Python
52 lines
1.7 KiB
Python
import gdb
|
|
import textwrap
|
|
|
|
class DisableBreakpoints:
|
|
"""
|
|
Context-manager to temporarily disable all gdb breakpoints, useful if
|
|
there is a risk to hit one during the evaluation of one of our custom
|
|
commands
|
|
"""
|
|
|
|
def __enter__(self):
|
|
self.disabled_breakpoints = []
|
|
for b in gdb.breakpoints():
|
|
if b.enabled:
|
|
b.enabled = False
|
|
self.disabled_breakpoints.append(b)
|
|
|
|
def __exit__(self, etype, evalue, tb):
|
|
for b in self.disabled_breakpoints:
|
|
b.enabled = True
|
|
|
|
class TensorRepr(gdb.Command):
|
|
"""
|
|
Print a human readable representation of the given at::Tensor.
|
|
Usage: torch-tensor-repr EXP
|
|
|
|
at::Tensor instances do not have a C++ implementation of a repr method: in
|
|
pytoch, this is done by pure-Python code. As such, torch-tensor-repr
|
|
internally creates a Python wrapper for the given tensor and call repr()
|
|
on it.
|
|
"""
|
|
__doc__ = textwrap.dedent(__doc__).strip()
|
|
|
|
def __init__(self):
|
|
gdb.Command.__init__(self, 'torch-tensor-repr',
|
|
gdb.COMMAND_USER, gdb.COMPLETE_EXPRESSION)
|
|
|
|
def invoke(self, args, from_tty):
|
|
args = gdb.string_to_argv(args)
|
|
if len(args) != 1:
|
|
print('Usage: torch-tensor-repr EXP')
|
|
return
|
|
name = args[0]
|
|
with DisableBreakpoints():
|
|
res = gdb.parse_and_eval('torch::gdb::tensor_repr(%s)' % name)
|
|
print('Python-level repr of %s:' % name)
|
|
print(res.string())
|
|
# torch::gdb::tensor_repr returns a malloc()ed buffer, let's free it
|
|
gdb.parse_and_eval('(void)free(%s)' % int(res))
|
|
|
|
TensorRepr()
|