Fix broken docs (#124940)

These were causing doctest to be unhappy.

In particular the doc from #124496 caused #124771 to fail "trunk / win-vs2019-cpu-py3 / test" to fail when pushing. Not sure why it wasn't a problem on the original PR.

Testing:

`./test/run_doctests.sh`:
  before:
```
=== 4 warnings in 11.21 seconds ===
```
  after:
```
===  in 11.11 seconds ===
```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/124940
Approved by: https://github.com/zou3519, https://github.com/atalman, https://github.com/huydhn
This commit is contained in:
Aaron Orenstein 2024-04-26 08:55:44 -07:00 committed by PyTorch MergeBot
parent 9266e472e2
commit 4e2b4c6ed6
4 changed files with 8 additions and 8 deletions

View File

@ -92,7 +92,7 @@ jobs:
retry_wait_seconds: 30 retry_wait_seconds: 30
command: | command: |
set -eu set -eu
python3 -m pip install rockset==1.0.3 python3 -m pip install rockset==1.0.3 'xdoctest>=1.1.0'
- name: Start monitoring script - name: Start monitoring script
id: monitor-script id: monitor-script

View File

@ -166,9 +166,9 @@ def is_compiling() -> bool:
>>> def forward(self, x): >>> def forward(self, x):
>>> if not torch.compiler.is_compiling(): >>> if not torch.compiler.is_compiling():
>>> ...logic that is not needed in a compiled/traced graph... >>> pass # ...logic that is not needed in a compiled/traced graph...
>>> >>>
>>> ...rest of the function... >>> # ...rest of the function...
""" """
if torch.jit.is_scripting(): if torch.jit.is_scripting():
return False return False
@ -186,8 +186,8 @@ def is_dynamo_compiling() -> bool:
>>> def forward(self, x): >>> def forward(self, x):
>>> if not torch.compiler.is_dynamo_compiling(): >>> if not torch.compiler.is_dynamo_compiling():
>>> ...logic that is not needed in a TorchDynamo-traced graph... >>> pass # ...logic that is not needed in a TorchDynamo-traced graph...
>>> >>>
>>> ...rest of the function... >>> # ...rest of the function...
""" """
return False return False

View File

@ -849,11 +849,11 @@ def opcheck(
>>> def _(x, y): >>> def _(x, y):
>>> return torch.empty_like(x) >>> return torch.empty_like(x)
>>> >>>
>>> def setup_context(ctx, inputs, output) >>> def setup_context(ctx, inputs, output):
>>> y, = inputs >>> y, = inputs
>>> ctx.y = y >>> ctx.y = y
>>> >>>
>>> def backward(ctx, grad) >>> def backward(ctx, grad):
>>> return grad * ctx.y, None >>> return grad * ctx.y, None
>>> >>>
>>> numpy_sin.register_autograd(backward, setup_context=setup_context) >>> numpy_sin.register_autograd(backward, setup_context=setup_context)

View File

@ -121,7 +121,7 @@ def collate(batch, *, collate_fn_map: Optional[Dict[Union[Type, Tuple[Type, ...]
Examples: Examples:
>>> def collate_tensor_fn(batch, *, collate_fn_map): >>> def collate_tensor_fn(batch, *, collate_fn_map):
>>> # Extend this function to handle batch of tensors ... # Extend this function to handle batch of tensors
... return torch.stack(batch, 0) ... return torch.stack(batch, 0)
>>> def custom_collate(batch): >>> def custom_collate(batch):
... collate_map = {torch.Tensor: collate_tensor_fn} ... collate_map = {torch.Tensor: collate_tensor_fn}