Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 14 additions & 15 deletions stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
from _typeshed import Incomplete

from antlr4.error.Errors import IllegalStateException as IllegalStateException
from antlr4.Lexer import Lexer as ActualLexer, TokenSource
from antlr4.Token import Token as Token

Lexer: Incomplete
Lexer: None

class TokenStream: ...

class BufferedTokenStream(TokenStream):
__slots__ = ("tokenSource", "tokens", "index", "fetchedEOF")
tokenSource: Incomplete
tokens: Incomplete
tokenSource: TokenSource
tokens: list[Token]
index: int
fetchedEOF: bool
def __init__(self, tokenSource: Lexer) -> None: ...
def __init__(self, tokenSource: ActualLexer | None) -> None: ...
def mark(self) -> int: ...
def release(self, marker: int) -> None: ...
def reset(self) -> None: ...
Expand All @@ -26,15 +25,15 @@ class BufferedTokenStream(TokenStream):
def LA(self, i: int) -> int: ...
def LB(self, k: int) -> Token | None: ...
def LT(self, k: int) -> Token | None: ...
def adjustSeekIndex(self, i: int): ...
def adjustSeekIndex(self, i: int) -> int: ...
def lazyInit(self) -> None: ...
def setup(self) -> None: ...
def setTokenSource(self, tokenSource: Lexer): ...
def nextTokenOnChannel(self, i: int, channel: int): ...
def previousTokenOnChannel(self, i: int, channel: int): ...
def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1): ...
def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1): ...
def filterForChannel(self, left: int, right: int, channel: int): ...
def getSourceName(self): ...
def getText(self, start: int | None = None, stop: int | None = None): ...
def setTokenSource(self, tokenSource: ActualLexer | None) -> None: ...
def nextTokenOnChannel(self, i: int, channel: int) -> int: ...
def previousTokenOnChannel(self, i: int, channel: int) -> int: ...
def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1) -> list[Token] | None: ...
def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1) -> list[Token] | None: ...
def filterForChannel(self, left: int, right: int, channel: int) -> list[Token] | None: ...
def getSourceName(self) -> str: ...
def getText(self, start: int | None = None, stop: int | None = None) -> str: ...
def fill(self) -> None: ...
22 changes: 15 additions & 7 deletions stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi
Original file line number Diff line number Diff line change
@@ -1,15 +1,23 @@
from _typeshed import Incomplete

from antlr4.InputStream import InputStream
from antlr4.Lexer import TokenSource
from antlr4.Token import CommonToken as CommonToken

class TokenFactory: ...

class CommonTokenFactory(TokenFactory):
__slots__ = "copyText"
DEFAULT: Incomplete
copyText: Incomplete
DEFAULT: CommonTokenFactory | None
copyText: bool
def __init__(self, copyText: bool = False) -> None: ...
def create(
self, source: tuple[Incomplete, ...], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int
): ...
def createThin(self, type: int, text: str): ...
self,
source: tuple[TokenSource, InputStream],
type: int,
text: str,
channel: int,
start: int,
stop: int,
line: int,
column: int,
) -> CommonToken: ...
def createThin(self, type: int, text: str) -> CommonToken: ...
4 changes: 1 addition & 3 deletions stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
from _typeshed import Incomplete

from antlr4.BufferedTokenStream import BufferedTokenStream as BufferedTokenStream
from antlr4.Lexer import Lexer as Lexer
from antlr4.Token import Token as Token

class CommonTokenStream(BufferedTokenStream):
__slots__ = "channel"
channel: Incomplete
channel: int
def __init__(self, lexer: Lexer, channel: int = 0) -> None: ...
def adjustSeekIndex(self, i: int) -> int: ...
def LB(self, k: int) -> Token | None: ...
Expand Down
6 changes: 2 additions & 4 deletions stubs/antlr4-python3-runtime/antlr4/FileStream.pyi
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
from _typeshed import Incomplete

from antlr4.InputStream import InputStream as InputStream

class FileStream(InputStream):
__slots__ = "fileName"
fileName: Incomplete
fileName: str
def __init__(self, fileName: str, encoding: str = "ascii", errors: str = "strict") -> None: ...
def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict"): ...
def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict") -> str: ...
24 changes: 13 additions & 11 deletions stubs/antlr4-python3-runtime/antlr4/InputStream.pyi
Original file line number Diff line number Diff line change
@@ -1,22 +1,24 @@
from _typeshed import Incomplete
from typing import Literal

from antlr4.Token import Token as Token

class InputStream:
__slots__ = ("name", "strdata", "_index", "data", "_size")
name: str
strdata: Incomplete
data: Incomplete
strdata: str
data: list[int]
_index: int
_size: int
def __init__(self, data: str) -> None: ...
@property
def index(self): ...
def index(self) -> int: ...
@property
def size(self): ...
def size(self) -> int: ...
def reset(self) -> None: ...
def consume(self) -> None: ...
def LA(self, offset: int): ...
def LT(self, offset: int): ...
def mark(self): ...
def release(self, marker: int): ...
def seek(self, _index: int): ...
def getText(self, start: int, stop: int): ...
def LA(self, offset: int) -> int: ...
def LT(self, offset: int) -> int: ...
def mark(self) -> Literal[-1]: ...
def release(self, marker: int) -> None: ...
def seek(self, _index: int) -> None: ...
def getText(self, start: int, stop: int) -> str: ...
72 changes: 43 additions & 29 deletions stubs/antlr4-python3-runtime/antlr4/Lexer.pyi
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from _typeshed import Incomplete
from typing import TextIO

from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator
Expand All @@ -10,7 +9,7 @@ from antlr4.error.Errors import (
)
from antlr4.InputStream import InputStream as InputStream
from antlr4.Recognizer import Recognizer as Recognizer
from antlr4.Token import Token as Token
from antlr4.Token import CommonToken, Token as Token

class TokenSource: ...

Expand All @@ -34,47 +33,62 @@ class Lexer(Recognizer, TokenSource):
DEFAULT_MODE: int
MORE: int
SKIP: int
DEFAULT_TOKEN_CHANNEL: Incomplete
HIDDEN: Incomplete
DEFAULT_TOKEN_CHANNEL: int
HIDDEN: int
MIN_CHAR_VALUE: int
MAX_CHAR_VALUE: int
_input: InputStream
_output: TextIO
_factory: CommonTokenFactory
_tokenFactorySourcePair: tuple[TokenSource, InputStream]
_interp: LexerATNSimulator
_token: Token | None
_tokenStartCharIndex: int
_tokenStartLine: int
_tokenStartColumn: int
_hitEOF: bool
_channel: int
_type: int
_modeStack: list[int]
_mode: int
_text: str | None
def __init__(self, input: InputStream, output: TextIO = ...) -> None: ...
def reset(self) -> None: ...
def nextToken(self): ...
def nextToken(self) -> Token | None: ...
def skip(self) -> None: ...
def more(self) -> None: ...
def mode(self, m: int): ...
def pushMode(self, m: int): ...
def popMode(self): ...
def mode(self, m: int) -> None: ...
def pushMode(self, m: int) -> None: ...
def popMode(self) -> int: ...
@property
def inputStream(self): ...
def inputStream(self) -> InputStream: ...
@inputStream.setter
def inputStream(self, input: InputStream): ...
def inputStream(self, input: InputStream) -> None: ...
@property
def sourceName(self): ...
def emitToken(self, token: Token): ...
def emit(self): ...
def emitEOF(self): ...
def sourceName(self) -> str: ...
def emitToken(self, token: Token) -> None: ...
def emit(self) -> CommonToken: ...
def emitEOF(self) -> CommonToken: ...
@property
def type(self): ...
def type(self) -> int: ...
@type.setter
def type(self, type: int): ...
def type(self, type: int) -> None: ...
@property
def line(self): ...
def line(self) -> int: ...
@line.setter
def line(self, line: int): ...
def line(self, line: int) -> None: ...
@property
def column(self): ...
def column(self) -> int: ...
@column.setter
def column(self, column: int): ...
def getCharIndex(self): ...
def column(self, column: int) -> None: ...
def getCharIndex(self) -> int: ...
@property
def text(self): ...
def text(self) -> str: ...
@text.setter
def text(self, txt: str): ...
def getAllTokens(self): ...
def notifyListeners(self, e: LexerNoViableAltException): ...
def getErrorDisplay(self, s: str): ...
def getErrorDisplayForChar(self, c: str): ...
def getCharErrorDisplay(self, c: str): ...
def recover(self, re: RecognitionException): ...
def text(self, txt: str) -> None: ...
def getAllTokens(self) -> list[Token]: ...
def notifyListeners(self, e: LexerNoViableAltException) -> None: ...
def getErrorDisplay(self, s: str) -> str: ...
def getErrorDisplayForChar(self, c: str) -> str: ...
def getCharErrorDisplay(self, c: str) -> str: ...
def recover(self, re: RecognitionException) -> None: ...
81 changes: 47 additions & 34 deletions stubs/antlr4-python3-runtime/antlr4/Parser.pyi
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from _typeshed import Incomplete
from typing import TextIO
from typing import Literal, TextIO

from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions
from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.BufferedTokenStream import TokenStream as TokenStream
from antlr4.CommonTokenFactory import TokenFactory as TokenFactory
from antlr4.error.Errors import (
Expand All @@ -16,6 +17,7 @@ from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext
from antlr4.Recognizer import Recognizer as Recognizer
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
from antlr4.tree.ParseTreePattern import ParseTreePattern
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher
from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode

Expand All @@ -39,48 +41,59 @@ class Parser(Recognizer):
"_parseListeners",
"_syntaxErrors",
)
bypassAltsAtnCache: Incomplete
_input: TokenStream
_output: TextIO
_errHandler: DefaultErrorStrategy
_precedenceStack: list[int]
_ctx: ParserRuleContext | None
_tracer: TraceListener | None
_parseListeners: list[ParseTreeListener]
_syntaxErrors: int
_interp: ParserATNSimulator
bypassAltsAtnCache: dict[Incomplete, Incomplete]
buildParseTrees: bool
def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ...
def reset(self) -> None: ...
def match(self, ttype: int): ...
def matchWildcard(self): ...
def getParseListeners(self): ...
def addParseListener(self, listener: ParseTreeListener): ...
def removeParseListener(self, listener: ParseTreeListener): ...
def match(self, ttype: int) -> Token: ...
def matchWildcard(self) -> Token: ...
def getParseListeners(self) -> list[ParseTreeListener]: ...
def addParseListener(self, listener: ParseTreeListener) -> None: ...
def removeParseListener(self, listener: ParseTreeListener) -> None: ...
def removeParseListeners(self) -> None: ...
def triggerEnterRuleEvent(self) -> None: ...
def triggerExitRuleEvent(self) -> None: ...
def getNumberOfSyntaxErrors(self): ...
def getTokenFactory(self): ...
def setTokenFactory(self, factory: TokenFactory): ...
def getNumberOfSyntaxErrors(self) -> int: ...
def getTokenFactory(self) -> TokenFactory: ...
def setTokenFactory(self, factory: TokenFactory) -> None: ...
def getATNWithBypassAlts(self): ...
def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None): ...
def getInputStream(self): ...
def setInputStream(self, input: InputStream): ...
def getTokenStream(self): ...
def setTokenStream(self, input: TokenStream): ...
def getCurrentToken(self): ...
def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None): ...
def consume(self): ...
def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None) -> ParseTreePattern: ...
def getInputStream(self) -> InputStream: ...
def setInputStream(self, input: InputStream) -> None: ...
def getTokenStream(self) -> TokenStream: ...
def setTokenStream(self, input: TokenStream) -> None: ...
def getCurrentToken(self) -> Token | None: ...
def notifyErrorListeners(
self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None
) -> None: ...
def consume(self) -> None: ...
def addContextToParseTree(self) -> None: ...
state: Incomplete
def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
state: int
def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> None: ...
def exitRule(self) -> None: ...
def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ...
def getPrecedence(self): ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ...
def getInvokingContext(self, ruleIndex: int): ...
def precpred(self, localctx: RuleContext, precedence: int): ...
def inContext(self, context: str): ...
def isExpectedToken(self, symbol: int): ...
def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int) -> None: ...
def getPrecedence(self) -> int: ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> None: ...
def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> None: ...
def unrollRecursionContexts(self, parentCtx: ParserRuleContext) -> None: ...
def getInvokingContext(self, ruleIndex: int) -> RuleContext | None: ...
def precpred(self, localctx: RuleContext, precedence: int) -> bool: ...
def inContext(self, context: str) -> Literal[False]: ...
def isExpectedToken(self, symbol: int) -> bool: ...
def getExpectedTokens(self): ...
def getExpectedTokensWithinCurrentRule(self): ...
def getRuleIndex(self, ruleName: str): ...
def getRuleInvocationStack(self, p: RuleContext | None = None): ...
def getDFAStrings(self): ...
def getRuleIndex(self, ruleName: str) -> int: ...
def getRuleInvocationStack(self, p: RuleContext | None = None) -> list[str]: ...
def getDFAStrings(self) -> list[str]: ...
def dumpDFA(self) -> None: ...
def getSourceName(self): ...
def setTrace(self, trace: bool): ...
def getSourceName(self) -> str: ...
def setTrace(self, trace: bool) -> None: ...
26 changes: 13 additions & 13 deletions stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -28,19 +28,19 @@ class ParserInterpreter(Parser):
"_parentContextStack",
"pushRecursionContextStates",
)
grammarFileName: Incomplete
atn: Incomplete
tokenNames: Incomplete
ruleNames: Incomplete
decisionToDFA: Incomplete
sharedContextCache: Incomplete
pushRecursionContextStates: Incomplete
grammarFileName: str
atn: ATN
tokenNames: list[Incomplete]
ruleNames: list[str]
decisionToDFA: list[DFA]
sharedContextCache: PredictionContextCache
pushRecursionContextStates: set[int]
def __init__(
self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream
) -> None: ...
state: Incomplete
def parse(self, startRuleIndex: int): ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
def getATNState(self): ...
def visitState(self, p: ATNState): ...
def visitRuleStopState(self, p: ATNState): ...
state: int
def parse(self, startRuleIndex: int) -> ParserRuleContext | None: ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> None: ...
def getATNState(self) -> ATNState: ...
def visitState(self, p: ATNState) -> None: ...
def visitRuleStopState(self, p: ATNState) -> None: ...
Loading