diff --git a/.github/workflows/_win-test.yml b/.github/workflows/_win-test.yml index ebc8434407a..99d037f0355 100644 --- a/.github/workflows/_win-test.yml +++ b/.github/workflows/_win-test.yml @@ -92,7 +92,7 @@ jobs: retry_wait_seconds: 30 command: | set -eu - python3 -m pip install rockset==1.0.3 + python3 -m pip install rockset==1.0.3 'xdoctest>=1.1.0' - name: Start monitoring script id: monitor-script diff --git a/torch/compiler/__init__.py b/torch/compiler/__init__.py index cd3d032ea1c..cf0b544e929 100644 --- a/torch/compiler/__init__.py +++ b/torch/compiler/__init__.py @@ -166,9 +166,9 @@ def is_compiling() -> bool: >>> def forward(self, x): >>> if not torch.compiler.is_compiling(): - >>> ...logic that is not needed in a compiled/traced graph... + >>> pass # ...logic that is not needed in a compiled/traced graph... >>> - >>> ...rest of the function... + >>> # ...rest of the function... """ if torch.jit.is_scripting(): return False @@ -186,8 +186,8 @@ def is_dynamo_compiling() -> bool: >>> def forward(self, x): >>> if not torch.compiler.is_dynamo_compiling(): - >>> ...logic that is not needed in a TorchDynamo-traced graph... + >>> pass # ...logic that is not needed in a TorchDynamo-traced graph... >>> - >>> ...rest of the function... + >>> # ...rest of the function... """ return False diff --git a/torch/library.py b/torch/library.py index 0515fa5bc12..2c413a7c9af 100644 --- a/torch/library.py +++ b/torch/library.py @@ -849,11 +849,11 @@ def opcheck( >>> def _(x, y): >>> return torch.empty_like(x) >>> - >>> def setup_context(ctx, inputs, output) + >>> def setup_context(ctx, inputs, output): >>> y, = inputs >>> ctx.y = y >>> - >>> def backward(ctx, grad) + >>> def backward(ctx, grad): >>> return grad * ctx.y, None >>> >>> numpy_sin.register_autograd(backward, setup_context=setup_context) diff --git a/torch/utils/data/_utils/collate.py b/torch/utils/data/_utils/collate.py index 5262497a90c..4c17597bd6f 100644 --- a/torch/utils/data/_utils/collate.py +++ b/torch/utils/data/_utils/collate.py @@ -121,7 +121,7 @@ def collate(batch, *, collate_fn_map: Optional[Dict[Union[Type, Tuple[Type, ...] Examples: >>> def collate_tensor_fn(batch, *, collate_fn_map): - >>> # Extend this function to handle batch of tensors + ... # Extend this function to handle batch of tensors ... return torch.stack(batch, 0) >>> def custom_collate(batch): ... collate_map = {torch.Tensor: collate_tensor_fn}