Skip to content

Commit

Permalink
Revert D25718705: Clean up type annotations in caffe2/torch/nn/modules
Browse files Browse the repository at this point in the history
Test Plan: revert-hammer

Differential Revision:
D25718705 (891759f)

Original commit changeset: 6a9e3e6d17aa

fbshipit-source-id: 1a4ef0bfdec8eb8e7ce149bfbdb34a4ad8d964b6
  • Loading branch information
Mike Ruberry authored and facebook-github-bot committed Dec 30, 2020
1 parent 14edc72 commit 01b57e1
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 4 deletions.
5 changes: 3 additions & 2 deletions torch/nn/modules/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -921,8 +921,9 @@ def __setstate__(self, state):

super(MultiheadAttention, self).__setstate__(state)

def forward(self, query: Tensor, key: Tensor, value: Tensor, key_padding_mask: Optional[Tensor] = None,
need_weights: bool = True, attn_mask: Optional[Tensor] = None) -> Tuple[Tensor, Optional[Tensor]]:
def forward(self, query, key, value, key_padding_mask=None,
need_weights=True, attn_mask=None):
# type: (Tensor, Tensor, Tensor, Optional[Tensor], bool, Optional[Tensor]) -> Tuple[Tensor, Optional[Tensor]]
r"""
Args:
query, key, value: map a query and a set of key-value pairs to an output.
Expand Down
3 changes: 2 additions & 1 deletion torch/nn/modules/conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,7 +530,8 @@ def __init__(self, in_channels, out_channels, kernel_size, stride,

# dilation being an optional parameter is for backwards
# compatibility
def _output_padding(self, input: Tensor, output_size: Optional[List[int]], stride: List[int], padding: List[int], kernel_size: List[int], dilation: Optional[List[int]] = None) -> List[int]:
def _output_padding(self, input, output_size, stride, padding, kernel_size, dilation=None):
# type: (Tensor, Optional[List[int]], List[int], List[int], List[int], Optional[List[int]]) -> List[int]
if output_size is None:
ret = _single(self.output_padding) # converting to list if was not already
else:
Expand Down
3 changes: 2 additions & 1 deletion torch/nn/modules/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ def _reverse_repeat_tuple(t, n):
return tuple(x for x in reversed(t) for _ in range(n))


def _list_with_default(out_size: List[int], defaults: List[int]) -> List[int]:
def _list_with_default(out_size, defaults):
# type: (List[int], List[int]) -> List[int]
if isinstance(out_size, int):
return out_size
if len(defaults) <= len(out_size):
Expand Down

0 comments on commit 01b57e1

Please sign in to comment.