Clean up some type annotations in android (#49944)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/49944

Upgrades type annotations from Python2 to Python3

Test Plan: Sandcastle tests

Reviewed By: xush6528

Differential Revision: D25717539

fbshipit-source-id: c621e2712e87eaed08cda48eb0fb224f6b0570c9
This commit is contained in:
Richard Barnes 2021-01-07 15:36:48 -08:00 committed by Facebook GitHub Bot
parent f83d57f99e
commit 09eefec627
2 changed files with 40 additions and 80 deletions

View File

@ -20,92 +20,77 @@ class Test(torch.jit.ScriptModule):
return None return None
@torch.jit.script_method @torch.jit.script_method
def eqBool(self, input): def eqBool(self, input: bool) -> bool:
# type: (bool) -> bool
return input return input
@torch.jit.script_method @torch.jit.script_method
def eqInt(self, input): def eqInt(self, input: int) -> int:
# type: (int) -> int
return input return input
@torch.jit.script_method @torch.jit.script_method
def eqFloat(self, input): def eqFloat(self, input: float) -> float:
# type: (float) -> float
return input return input
@torch.jit.script_method @torch.jit.script_method
def eqStr(self, input): def eqStr(self, input: str) -> str:
# type: (str) -> str
return input return input
@torch.jit.script_method @torch.jit.script_method
def eqTensor(self, input): def eqTensor(self, input: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
return input return input
@torch.jit.script_method @torch.jit.script_method
def eqDictStrKeyIntValue(self, input): def eqDictStrKeyIntValue(self, input: Dict[str, int]) -> Dict[str, int]:
# type: (Dict[str, int]) -> Dict[str, int]
return input return input
@torch.jit.script_method @torch.jit.script_method
def eqDictIntKeyIntValue(self, input): def eqDictIntKeyIntValue(self, input: Dict[int, int]) -> Dict[int, int]:
# type: (Dict[int, int]) -> Dict[int, int]
return input return input
@torch.jit.script_method @torch.jit.script_method
def eqDictFloatKeyIntValue(self, input): def eqDictFloatKeyIntValue(self, input: Dict[float, int]) -> Dict[float, int]:
# type: (Dict[float, int]) -> Dict[float, int]
return input return input
@torch.jit.script_method @torch.jit.script_method
def listIntSumReturnTuple(self, input): def listIntSumReturnTuple(self, input: List[int]) -> Tuple[List[int], int]:
# type: (List[int]) -> Tuple[List[int], int]
sum = 0 sum = 0
for x in input: for x in input:
sum += x sum += x
return (input, sum) return (input, sum)
@torch.jit.script_method @torch.jit.script_method
def listBoolConjunction(self, input): def listBoolConjunction(self, input: List[bool]) -> bool:
# type: (List[bool]) -> bool
res = True res = True
for x in input: for x in input:
res = res and x res = res and x
return res return res
@torch.jit.script_method @torch.jit.script_method
def listBoolDisjunction(self, input): def listBoolDisjunction(self, input: List[bool]) -> bool:
# type: (List[bool]) -> bool
res = False res = False
for x in input: for x in input:
res = res or x res = res or x
return res return res
@torch.jit.script_method @torch.jit.script_method
def tupleIntSumReturnTuple(self, input): def tupleIntSumReturnTuple(self, input: Tuple[int, int, int]) -> Tuple[Tuple[int, int, int], int]:
# type: (Tuple[int, int, int]) -> Tuple[Tuple[int, int, int], int]
sum = 0 sum = 0
for x in input: for x in input:
sum += x sum += x
return (input, sum) return (input, sum)
@torch.jit.script_method @torch.jit.script_method
def optionalIntIsNone(self, input): def optionalIntIsNone(self, input: Optional[int]) -> bool:
# type: (Optional[int]) -> bool
return input is None return input is None
@torch.jit.script_method @torch.jit.script_method
def intEq0None(self, input): def intEq0None(self, input: int) -> Optional[int]:
# type: (int) -> Optional[int]
if input == 0: if input == 0:
return None return None
return input return input
@torch.jit.script_method @torch.jit.script_method
def str3Concat(self, input): def str3Concat(self, input: str) -> str:
# type: (str) -> str
return input + input + input return input + input + input
@torch.jit.script_method @torch.jit.script_method
@ -113,8 +98,7 @@ class Test(torch.jit.ScriptModule):
return torch.tensor([int(input.item())])[0] return torch.tensor([int(input.item())])[0]
@torch.jit.script_method @torch.jit.script_method
def testAliasWithOffset(self): def testAliasWithOffset(self) -> List[Tensor]:
# type: () -> List[Tensor]
x = torch.tensor([100, 200]) x = torch.tensor([100, 200])
a = [x[0], x[1]] a = [x[0], x[1]]
return a return a
@ -128,8 +112,7 @@ class Test(torch.jit.ScriptModule):
return x return x
@torch.jit.script_method @torch.jit.script_method
def conv2d(self, x, w, toChannelsLast): def conv2d(self, x: Tensor, w: Tensor, toChannelsLast: bool) -> Tensor:
# type: (Tensor, Tensor, bool) -> Tensor
r = torch.nn.functional.conv2d(x, w) r = torch.nn.functional.conv2d(x, w)
if (toChannelsLast): if (toChannelsLast):
r = r.contiguous(memory_format=torch.channels_last) r = r.contiguous(memory_format=torch.channels_last)
@ -138,18 +121,15 @@ class Test(torch.jit.ScriptModule):
return r return r
@torch.jit.script_method @torch.jit.script_method
def contiguous(self, x): def contiguous(self, x: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
return x.contiguous() return x.contiguous()
@torch.jit.script_method @torch.jit.script_method
def contiguousChannelsLast(self, x): def contiguousChannelsLast(self, x: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
return x.contiguous(memory_format=torch.channels_last) return x.contiguous(memory_format=torch.channels_last)
@torch.jit.script_method @torch.jit.script_method
def contiguousChannelsLast3d(self, x): def contiguousChannelsLast3d(self, x: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
return x.contiguous(memory_format=torch.channels_last_3d) return x.contiguous(memory_format=torch.channels_last_3d)
scriptAndSave(Test(), "test.pt") scriptAndSave(Test(), "test.pt")

View File

@ -1,85 +1,69 @@
def forward(self, input): def forward(self, input):
return None return None
def eqBool(self, input): def eqBool(self, input: bool) -> bool:
# type: (bool) -> bool
return input return input
def eqInt(self, input): def eqInt(self, input: int) -> int:
# type: (int) -> int
return input return input
def eqFloat(self, input): def eqFloat(self, input: float) -> float:
# type: (float) -> float
return input return input
def eqStr(self, input): def eqStr(self, input: str) -> str:
# type: (str) -> str
return input return input
def eqTensor(self, input): def eqTensor(self, input: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
return input return input
def eqDictStrKeyIntValue(self, input): def eqDictStrKeyIntValue(self, input: Dict[str, int]) -> Dict[str, int]:
# type: (Dict[str, int]) -> Dict[str, int]
return input return input
def eqDictIntKeyIntValue(self, input): def eqDictIntKeyIntValue(self, input: Dict[int, int]) -> Dict[int, int]:
# type: (Dict[int, int]) -> Dict[int, int]
return input return input
def eqDictFloatKeyIntValue(self, input): def eqDictFloatKeyIntValue(self, input: Dict[float, int]) -> Dict[float, int]:
# type: (Dict[float, int]) -> Dict[float, int]
return input return input
def listIntSumReturnTuple(self, input): def listIntSumReturnTuple(self, input: List[int]) -> Tuple[List[int], int]:
# type: (List[int]) -> Tuple[List[int], int]
sum = 0 sum = 0
for x in input: for x in input:
sum += x sum += x
return (input, sum) return (input, sum)
def listBoolConjunction(self, input): def listBoolConjunction(self, input: List[bool]) -> bool:
# type: (List[bool]) -> bool
res = True res = True
for x in input: for x in input:
res = res and x res = res and x
return res return res
def listBoolDisjunction(self, input): def listBoolDisjunction(self, input: List[bool]) -> bool:
# type: (List[bool]) -> bool
res = False res = False
for x in input: for x in input:
res = res or x res = res or x
return res return res
def tupleIntSumReturnTuple(self, input): def tupleIntSumReturnTuple(self, input: Tuple[int, int, int]) -> Tuple[Tuple[int, int, int], int]:
# type: (Tuple[int, int, int]) -> Tuple[Tuple[int, int, int], int]
sum = 0 sum = 0
for x in input: for x in input:
sum += x sum += x
return (input, sum) return (input, sum)
def optionalIntIsNone(self, input): def optionalIntIsNone(self, input: Optional[int]) -> bool:
# type: (Optional[int]) -> bool
return input is None return input is None
def intEq0None(self, input): def intEq0None(self, input: int) -> Optional[int]:
# type: (int) -> Optional[int]
if input == 0: if input == 0:
return None return None
return input return input
def str3Concat(self, input): def str3Concat(self, input: str) -> str:
# type: (str) -> str
return input + input + input return input + input + input
def newEmptyShapeWithItem(self, input): def newEmptyShapeWithItem(self, input):
return torch.tensor([int(input.item())])[0] return torch.tensor([int(input.item())])[0]
def testAliasWithOffset(self): def testAliasWithOffset(self) -> List[Tensor]:
# type: () -> List[Tensor]
x = torch.tensor([100, 200]) x = torch.tensor([100, 200])
a = [x[0], x[1]] a = [x[0], x[1]]
return a return a
@ -91,8 +75,7 @@ def testNonContiguous(self):
assert x[1] == 300 assert x[1] == 300
return x return x
def conv2d(self, x, w, toChannelsLast): def conv2d(self, x: Tensor, w: Tensor, toChannelsLast: bool) -> Tensor:
# type: (Tensor, Tensor, bool) -> Tensor
r = torch.conv2d(x, w) r = torch.conv2d(x, w)
if (toChannelsLast): if (toChannelsLast):
# memory_format=torch.channels_last # memory_format=torch.channels_last
@ -101,16 +84,13 @@ def conv2d(self, x, w, toChannelsLast):
r = r.contiguous() r = r.contiguous()
return r return r
def contiguous(self, x): def contiguous(self, x: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
return x.contiguous() return x.contiguous()
def contiguousChannelsLast(self, x): def contiguousChannelsLast(self, x: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
# memory_format=torch.channels_last # memory_format=torch.channels_last
return x.contiguous(memory_format=2) return x.contiguous(memory_format=2)
def contiguousChannelsLast3d(self, x): def contiguousChannelsLast3d(self, x: Tensor) -> Tensor:
# type: (Tensor) -> Tensor
# memory_format=torch.channels_last_3d # memory_format=torch.channels_last_3d
return x.contiguous(memory_format=3) return x.contiguous(memory_format=3)