Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
132 changes: 60 additions & 72 deletions stdlib/3/tokenize.pyi
Original file line number Diff line number Diff line change
@@ -1,13 +1,8 @@
from typing import Any, Callable, Generator, Iterable, List, NamedTuple, Optional, Pattern, Union, Sequence, TextIO, Tuple
from typing import Any, Callable, Dict, Generator, Iterable, List, NamedTuple, Optional, Pattern, Union, Sequence, Set, TextIO, Tuple
from builtins import open as _builtin_open
import sys
from token import * # noqa: F403

if sys.version_info < (3, 7):
COMMENT: int
NL: int
ENCODING: int

cookie_re: Pattern[str]
blank_re: Pattern[bytes]

Expand All @@ -28,7 +23,7 @@ class TokenInfo(_TokenInfo):
_Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]

class TokenError(Exception): ...
class StopTokenizing(Exception): ...
class StopTokenizing(Exception): ... # undocumented

class Untokenizer:
tokens: List[str]
Expand All @@ -40,6 +35,8 @@ class Untokenizer:
def untokenize(self, iterable: Iterable[_Token]) -> str: ...
def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ...

# the docstring says "returns bytes" but is incorrect --
# if the ENCODING token is missing, it skips the encode
def untokenize(iterable: Iterable[_Token]) -> Any: ...
def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
Expand All @@ -48,70 +45,61 @@ def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, N
if sys.version_info >= (3, 6):
from os import PathLike
def open(filename: Union[str, bytes, int, PathLike[Any]]) -> TextIO: ...
else:
elif sys.version_info >= (3, 2):
def open(filename: Union[str, bytes, int]) -> TextIO: ...

# Names in __all__ with no definition:
# AMPER
# AMPEREQUAL
# ASYNC
# AT
# ATEQUAL
# AWAIT
# CIRCUMFLEX
# CIRCUMFLEXEQUAL
# COLON
# COMMA
# DEDENT
# DOT
# DOUBLESLASH
# DOUBLESLASHEQUAL
# DOUBLESTAR
# DOUBLESTAREQUAL
# ELLIPSIS
# ENDMARKER
# EQEQUAL
# EQUAL
# ERRORTOKEN
# GREATER
# GREATEREQUAL
# INDENT
# ISEOF
# ISNONTERMINAL
# ISTERMINAL
# LBRACE
# LEFTSHIFT
# LEFTSHIFTEQUAL
# LESS
# LESSEQUAL
# LPAR
# LSQB
# MINEQUAL
# MINUS
# NAME
# NEWLINE
# NOTEQUAL
# NT_OFFSET
# NUMBER
# N_TOKENS
# OP
# PERCENT
# PERCENTEQUAL
# PLUS
# PLUSEQUAL
# RARROW
# RBRACE
# RIGHTSHIFT
# RIGHTSHIFTEQUAL
# RPAR
# RSQB
# SEMI
# SLASH
# SLASHEQUAL
# STAR
# STAREQUAL
# STRING
# TILDE
# VBAR
# VBAREQUAL
# tok_name
def group(*choices: str) -> str: ... # undocumented
def any(*choices: str) -> str: ... # undocumented
def maybe(*choices: str) -> str: ... # undocumented

Whitespace: str # undocumented
Comment: str # undocumented
Ignore: str # undocumented
Name: str # undocumented

Hexnumber: str # undocumented
Binnumber: str # undocumented
Octnumber: str # undocumented
Decnumber: str # undocumented
Intnumber: str # undocumented
Exponent: str # undocumented
Pointfloat: str # undocumented
Expfloat: str # undocumented
Floatnumber: str # undocumented
Imagnumber: str # undocumented
Number: str # undocumented

def _all_string_prefixes() -> Set[str]: ... # undocumented

StringPrefix: str # undocumented

Single: str # undocumented
Double: str # undocumented
Single3: str # undocumented
Double3: str # undocumented
Triple: str # undocumented
String: str # undocumented

if sys.version_info < (3, 7):
Operator: str # undocumented
Bracket: str # undocumented

Special: str # undocumented
Funny: str # undocumented

PlainToken: str # undocumented
Token: str # undocumented

ContStr: str # undocumented
PseudoExtras: str # undocumented
PseudoToken: str # undocumented

endpats: Dict[str, str] # undocumented
if sys.version_info < (3, 6):
single_quoted: Dict[str, str] # undocumented
triple_quoted: Dict[str, str] # undocumented
else:
single_quoted: Set[str] # undocumented
triple_quoted: Set[str] # undocumented

tabsize: int # undocumented