From 463513e29eb352501badd5538a09e7137a51a264 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Wed, 20 Dec 2023 07:41:05 -0500 Subject: [PATCH 01/34] Add files via upload --- stubs/antlr4/BufferedTokenStream.pyi | 38 +++++++ stubs/antlr4/CommonTokenFactory.pyi | 11 ++ stubs/antlr4/CommonTokenStream.pyi | 12 +++ stubs/antlr4/FileStream.pyi | 7 ++ stubs/antlr4/InputStream.pyi | 19 ++++ stubs/antlr4/IntervalSet.pyi | 20 ++++ stubs/antlr4/LL1Analyzer.pyi | 16 +++ stubs/antlr4/Lexer.pyi | 59 ++++++++++ stubs/antlr4/ListTokenSource.pyi | 18 ++++ stubs/antlr4/Parser.pyi | 70 ++++++++++++ stubs/antlr4/ParserInterpreter.pyi | 29 +++++ stubs/antlr4/ParserRuleContext.pyi | 33 ++++++ stubs/antlr4/PredictionContext.pyi | 65 +++++++++++ stubs/antlr4/Recognizer.pyi | 28 +++++ stubs/antlr4/RuleContext.pyi | 27 +++++ stubs/antlr4/StdinStream.pyi | 4 + stubs/antlr4/Token.pyi | 41 +++++++ stubs/antlr4/TokenStreamRewriter.pyi | 48 +++++++++ stubs/antlr4/Utils.pyi | 2 + stubs/antlr4/__init__.pyi | 21 ++++ stubs/antlr4/_pygrun.pyi | 4 + stubs/antlr4/atn/ATN.pyi | 28 +++++ stubs/antlr4/atn/ATNConfig.pyi | 28 +++++ stubs/antlr4/atn/ATNConfigSet.pyi | 40 +++++++ .../antlr4/atn/ATNDeserializationOptions.pyi | 9 ++ stubs/antlr4/atn/ATNDeserializer.pyi | 48 +++++++++ stubs/antlr4/atn/ATNSimulator.pyi | 12 +++ stubs/antlr4/atn/ATNState.pyi | 98 +++++++++++++++++ stubs/antlr4/atn/ATNType.pyi | 7 ++ stubs/antlr4/atn/LexerATNSimulator.pyi | 62 +++++++++++ stubs/antlr4/atn/LexerAction.pyi | 82 ++++++++++++++ stubs/antlr4/atn/LexerActionExecutor.pyi | 16 +++ stubs/antlr4/atn/ParserATNSimulator.pyi | 71 ++++++++++++ stubs/antlr4/atn/PredictionMode.pyi | 40 +++++++ stubs/antlr4/atn/SemanticContext.pyi | 49 +++++++++ stubs/antlr4/atn/Transition.pyi | 102 ++++++++++++++++++ stubs/antlr4/atn/__init__.pyi | 0 stubs/antlr4/dfa/DFA.pyi | 20 ++++ stubs/antlr4/dfa/DFASerializer.pyi | 16 +++ stubs/antlr4/dfa/DFAState.pyi | 22 ++++ stubs/antlr4/dfa/__init__.pyi | 0 .../antlr4/error/DiagnosticErrorListener.pyi | 13 +++ stubs/antlr4/error/ErrorListener.pyi | 19 ++++ stubs/antlr4/error/ErrorStrategy.pyi | 51 +++++++++ stubs/antlr4/error/Errors.pyi | 58 ++++++++++ stubs/antlr4/error/__init__.pyi | 0 stubs/antlr4/tree/Chunk.pyi | 12 +++ stubs/antlr4/tree/ParseTreeMatch.pyi | 13 +++ stubs/antlr4/tree/ParseTreePattern.pyi | 14 +++ stubs/antlr4/tree/ParseTreePatternMatcher.pyi | 40 +++++++ stubs/antlr4/tree/RuleTagToken.pyi | 16 +++ stubs/antlr4/tree/TokenTagToken.pyi | 8 ++ stubs/antlr4/tree/Tree.pyi | 52 +++++++++ stubs/antlr4/tree/Trees.pyi | 24 +++++ stubs/antlr4/tree/__init__.pyi | 0 stubs/antlr4/xpath/XPath.pyi | 59 ++++++++++ stubs/antlr4/xpath/XPathLexer.pyi | 28 +++++ stubs/antlr4/xpath/__init__.pyi | 0 58 files changed, 1729 insertions(+) create mode 100644 stubs/antlr4/BufferedTokenStream.pyi create mode 100644 stubs/antlr4/CommonTokenFactory.pyi create mode 100644 stubs/antlr4/CommonTokenStream.pyi create mode 100644 stubs/antlr4/FileStream.pyi create mode 100644 stubs/antlr4/InputStream.pyi create mode 100644 stubs/antlr4/IntervalSet.pyi create mode 100644 stubs/antlr4/LL1Analyzer.pyi create mode 100644 stubs/antlr4/Lexer.pyi create mode 100644 stubs/antlr4/ListTokenSource.pyi create mode 100644 stubs/antlr4/Parser.pyi create mode 100644 stubs/antlr4/ParserInterpreter.pyi create mode 100644 stubs/antlr4/ParserRuleContext.pyi create mode 100644 stubs/antlr4/PredictionContext.pyi create mode 100644 stubs/antlr4/Recognizer.pyi create mode 100644 stubs/antlr4/RuleContext.pyi create mode 100644 stubs/antlr4/StdinStream.pyi create mode 100644 stubs/antlr4/Token.pyi create mode 100644 stubs/antlr4/TokenStreamRewriter.pyi create mode 100644 stubs/antlr4/Utils.pyi create mode 100644 stubs/antlr4/__init__.pyi create mode 100644 stubs/antlr4/_pygrun.pyi create mode 100644 stubs/antlr4/atn/ATN.pyi create mode 100644 stubs/antlr4/atn/ATNConfig.pyi create mode 100644 stubs/antlr4/atn/ATNConfigSet.pyi create mode 100644 stubs/antlr4/atn/ATNDeserializationOptions.pyi create mode 100644 stubs/antlr4/atn/ATNDeserializer.pyi create mode 100644 stubs/antlr4/atn/ATNSimulator.pyi create mode 100644 stubs/antlr4/atn/ATNState.pyi create mode 100644 stubs/antlr4/atn/ATNType.pyi create mode 100644 stubs/antlr4/atn/LexerATNSimulator.pyi create mode 100644 stubs/antlr4/atn/LexerAction.pyi create mode 100644 stubs/antlr4/atn/LexerActionExecutor.pyi create mode 100644 stubs/antlr4/atn/ParserATNSimulator.pyi create mode 100644 stubs/antlr4/atn/PredictionMode.pyi create mode 100644 stubs/antlr4/atn/SemanticContext.pyi create mode 100644 stubs/antlr4/atn/Transition.pyi create mode 100644 stubs/antlr4/atn/__init__.pyi create mode 100644 stubs/antlr4/dfa/DFA.pyi create mode 100644 stubs/antlr4/dfa/DFASerializer.pyi create mode 100644 stubs/antlr4/dfa/DFAState.pyi create mode 100644 stubs/antlr4/dfa/__init__.pyi create mode 100644 stubs/antlr4/error/DiagnosticErrorListener.pyi create mode 100644 stubs/antlr4/error/ErrorListener.pyi create mode 100644 stubs/antlr4/error/ErrorStrategy.pyi create mode 100644 stubs/antlr4/error/Errors.pyi create mode 100644 stubs/antlr4/error/__init__.pyi create mode 100644 stubs/antlr4/tree/Chunk.pyi create mode 100644 stubs/antlr4/tree/ParseTreeMatch.pyi create mode 100644 stubs/antlr4/tree/ParseTreePattern.pyi create mode 100644 stubs/antlr4/tree/ParseTreePatternMatcher.pyi create mode 100644 stubs/antlr4/tree/RuleTagToken.pyi create mode 100644 stubs/antlr4/tree/TokenTagToken.pyi create mode 100644 stubs/antlr4/tree/Tree.pyi create mode 100644 stubs/antlr4/tree/Trees.pyi create mode 100644 stubs/antlr4/tree/__init__.pyi create mode 100644 stubs/antlr4/xpath/XPath.pyi create mode 100644 stubs/antlr4/xpath/XPathLexer.pyi create mode 100644 stubs/antlr4/xpath/__init__.pyi diff --git a/stubs/antlr4/BufferedTokenStream.pyi b/stubs/antlr4/BufferedTokenStream.pyi new file mode 100644 index 000000000000..de1ebe422ccd --- /dev/null +++ b/stubs/antlr4/BufferedTokenStream.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete +from antlr4.Token import Token as Token +from antlr4.error.Errors import IllegalStateException as IllegalStateException + +Lexer: Incomplete + +class TokenStream: ... + +class BufferedTokenStream(TokenStream): + tokenSource: Incomplete + tokens: Incomplete + index: int + fetchedEOF: bool + def __init__(self, tokenSource: Lexer) -> None: ... + def mark(self): ... + def release(self, marker: int): ... + def reset(self) -> None: ... + def seek(self, index: int): ... + def get(self, index: int): ... + def consume(self) -> None: ... + def sync(self, i: int): ... + def fetch(self, n: int): ... + def getTokens(self, start: int, stop: int, types: set = ...): ... + def LA(self, i: int): ... + def LB(self, k: int): ... + def LT(self, k: int): ... + def adjustSeekIndex(self, i: int): ... + def lazyInit(self) -> None: ... + def setup(self) -> None: ... + def setTokenSource(self, tokenSource: Lexer): ... + def nextTokenOnChannel(self, i: int, channel: int): ... + def previousTokenOnChannel(self, i: int, channel: int): ... + def getHiddenTokensToRight(self, tokenIndex: int, channel: int = ...): ... + def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = ...): ... + def filterForChannel(self, left: int, right: int, channel: int): ... + def getSourceName(self): ... + def getText(self, start: int = ..., stop: int = ...): ... + def fill(self) -> None: ... diff --git a/stubs/antlr4/CommonTokenFactory.pyi b/stubs/antlr4/CommonTokenFactory.pyi new file mode 100644 index 000000000000..bcedb7b02095 --- /dev/null +++ b/stubs/antlr4/CommonTokenFactory.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from antlr4.Token import CommonToken as CommonToken + +class TokenFactory: ... + +class CommonTokenFactory(TokenFactory): + DEFAULT: Incomplete + copyText: Incomplete + def __init__(self, copyText: bool = ...) -> None: ... + def create(self, source, type: int, text: str, channel: int, start: int, stop: int, line: int, column: int): ... + def createThin(self, type: int, text: str): ... diff --git a/stubs/antlr4/CommonTokenStream.pyi b/stubs/antlr4/CommonTokenStream.pyi new file mode 100644 index 000000000000..495b823a442e --- /dev/null +++ b/stubs/antlr4/CommonTokenStream.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from antlr4.BufferedTokenStream import BufferedTokenStream as BufferedTokenStream +from antlr4.Lexer import Lexer as Lexer +from antlr4.Token import Token as Token + +class CommonTokenStream(BufferedTokenStream): + channel: Incomplete + def __init__(self, lexer: Lexer, channel: int = ...) -> None: ... + def adjustSeekIndex(self, i: int): ... + def LB(self, k: int): ... + def LT(self, k: int): ... + def getNumberOfOnChannelTokens(self): ... diff --git a/stubs/antlr4/FileStream.pyi b/stubs/antlr4/FileStream.pyi new file mode 100644 index 000000000000..37961be4c77b --- /dev/null +++ b/stubs/antlr4/FileStream.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete +from antlr4.InputStream import InputStream as InputStream + +class FileStream(InputStream): + fileName: Incomplete + def __init__(self, fileName: str, encoding: str = ..., errors: str = ...) -> None: ... + def readDataFrom(self, fileName: str, encoding: str, errors: str = ...): ... diff --git a/stubs/antlr4/InputStream.pyi b/stubs/antlr4/InputStream.pyi new file mode 100644 index 000000000000..e7645d559999 --- /dev/null +++ b/stubs/antlr4/InputStream.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from antlr4.Token import Token as Token + +class InputStream: + name: str + strdata: Incomplete + def __init__(self, data: str) -> None: ... + @property + def index(self): ... + @property + def size(self): ... + def reset(self) -> None: ... + def consume(self) -> None: ... + def LA(self, offset: int): ... + def LT(self, offset: int): ... + def mark(self): ... + def release(self, marker: int): ... + def seek(self, _index: int): ... + def getText(self, start: int, stop: int): ... diff --git a/stubs/antlr4/IntervalSet.pyi b/stubs/antlr4/IntervalSet.pyi new file mode 100644 index 000000000000..421b79faa1b1 --- /dev/null +++ b/stubs/antlr4/IntervalSet.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from antlr4.Token import Token as Token + +class IntervalSet: + intervals: Incomplete + readonly: bool + def __init__(self) -> None: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def addOne(self, v: int): ... + def addRange(self, v: range): ... + def addSet(self, other: IntervalSet): ... + def reduce(self, k: int): ... + def complement(self, start, stop): ... + def __contains__(self, item) -> bool: ... + def __len__(self) -> int: ... + def removeRange(self, v) -> None: ... + def removeOne(self, v) -> None: ... + def toString(self, literalNames: list, symbolicNames: list): ... + def elementName(self, literalNames: list, symbolicNames: list, a: int): ... diff --git a/stubs/antlr4/LL1Analyzer.pyi b/stubs/antlr4/LL1Analyzer.pyi new file mode 100644 index 000000000000..982cfb824198 --- /dev/null +++ b/stubs/antlr4/LL1Analyzer.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from antlr4.IntervalSet import IntervalSet as IntervalSet +from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextFromRuleContext as PredictionContextFromRuleContext, SingletonPredictionContext as SingletonPredictionContext +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNConfig import ATNConfig as ATNConfig +from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopState +from antlr4.atn.Transition import AbstractPredicateTransition as AbstractPredicateTransition, NotSetTransition as NotSetTransition, RuleTransition as RuleTransition, WildcardTransition as WildcardTransition + +class LL1Analyzer: + HIT_PRED: Incomplete + atn: Incomplete + def __init__(self, atn: ATN) -> None: ... + def getDecisionLookahead(self, s: ATNState): ... + def LOOK(self, s: ATNState, stopState: ATNState = ..., ctx: RuleContext = ...): ... diff --git a/stubs/antlr4/Lexer.pyi b/stubs/antlr4/Lexer.pyi new file mode 100644 index 000000000000..82dee8fccedf --- /dev/null +++ b/stubs/antlr4/Lexer.pyi @@ -0,0 +1,59 @@ +from _typeshed import Incomplete +from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory +from antlr4.InputStream import InputStream as InputStream +from antlr4.Recognizer import Recognizer as Recognizer +from antlr4.Token import Token as Token +from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator +from antlr4.error.Errors import IllegalStateException as IllegalStateException, LexerNoViableAltException as LexerNoViableAltException, RecognitionException as RecognitionException +from typing import TextIO + +class TokenSource: ... + +class Lexer(Recognizer, TokenSource): + DEFAULT_MODE: int + MORE: int + SKIP: int + DEFAULT_TOKEN_CHANNEL: Incomplete + HIDDEN: Incomplete + MIN_CHAR_VALUE: int + MAX_CHAR_VALUE: int + def __init__(self, input: InputStream, output: TextIO = ...) -> None: ... + def reset(self) -> None: ... + def nextToken(self): ... + def skip(self) -> None: ... + def more(self) -> None: ... + def mode(self, m: int): ... + def pushMode(self, m: int): ... + def popMode(self): ... + @property + def inputStream(self): ... + @inputStream.setter + def inputStream(self, input: InputStream): ... + @property + def sourceName(self): ... + def emitToken(self, token: Token): ... + def emit(self): ... + def emitEOF(self): ... + @property + def type(self): ... + @type.setter + def type(self, type: int): ... + @property + def line(self): ... + @line.setter + def line(self, line: int): ... + @property + def column(self): ... + @column.setter + def column(self, column: int): ... + def getCharIndex(self): ... + @property + def text(self): ... + @text.setter + def text(self, txt: str): ... + def getAllTokens(self): ... + def notifyListeners(self, e: LexerNoViableAltException): ... + def getErrorDisplay(self, s: str): ... + def getErrorDisplayForChar(self, c: str): ... + def getCharErrorDisplay(self, c: str): ... + def recover(self, re: RecognitionException): ... diff --git a/stubs/antlr4/ListTokenSource.pyi b/stubs/antlr4/ListTokenSource.pyi new file mode 100644 index 000000000000..aba4368b2ebf --- /dev/null +++ b/stubs/antlr4/ListTokenSource.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory +from antlr4.Lexer import TokenSource as TokenSource +from antlr4.Token import Token as Token + +class ListTokenSource(TokenSource): + tokens: Incomplete + sourceName: Incomplete + pos: int + eofToken: Incomplete + def __init__(self, tokens: list, sourceName: str = ...) -> None: ... + @property + def column(self): ... + def nextToken(self): ... + @property + def line(self): ... + def getInputStream(self): ... + def getSourceName(self): ... diff --git a/stubs/antlr4/Parser.pyi b/stubs/antlr4/Parser.pyi new file mode 100644 index 000000000000..8faafd163822 --- /dev/null +++ b/stubs/antlr4/Parser.pyi @@ -0,0 +1,70 @@ +from _typeshed import Incomplete +from antlr4.BufferedTokenStream import TokenStream as TokenStream +from antlr4.CommonTokenFactory import TokenFactory as TokenFactory +from antlr4.InputStream import InputStream as InputStream +from antlr4.Lexer import Lexer as Lexer +from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext +from antlr4.Recognizer import Recognizer as Recognizer +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions +from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer +from antlr4.error.ErrorStrategy import DefaultErrorStrategy as DefaultErrorStrategy +from antlr4.error.Errors import RecognitionException as RecognitionException, UnsupportedOperationException as UnsupportedOperationException +from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher +from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode +from typing import TextIO + +class TraceListener(ParseTreeListener): + def __init__(self, parser) -> None: ... + def enterEveryRule(self, ctx) -> None: ... + def visitTerminal(self, node) -> None: ... + def visitErrorNode(self, node) -> None: ... + def exitEveryRule(self, ctx) -> None: ... + +class Parser(Recognizer): + bypassAltsAtnCache: Incomplete + buildParseTrees: bool + def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ... + def reset(self) -> None: ... + def match(self, ttype: int): ... + def matchWildcard(self): ... + def getParseListeners(self): ... + def addParseListener(self, listener: ParseTreeListener): ... + def removeParseListener(self, listener: ParseTreeListener): ... + def removeParseListeners(self) -> None: ... + def triggerEnterRuleEvent(self) -> None: ... + def triggerExitRuleEvent(self) -> None: ... + def getNumberOfSyntaxErrors(self): ... + def getTokenFactory(self): ... + def setTokenFactory(self, factory: TokenFactory): ... + def getATNWithBypassAlts(self): ... + def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer = ...): ... + def getInputStream(self): ... + def setInputStream(self, input: InputStream): ... + def getTokenStream(self): ... + def setTokenStream(self, input: TokenStream): ... + def getCurrentToken(self): ... + def notifyErrorListeners(self, msg: str, offendingToken: Token = ..., e: RecognitionException = ...): ... + def consume(self): ... + def addContextToParseTree(self) -> None: ... + state: Incomplete + def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... + def exitRule(self) -> None: ... + def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ... + def getPrecedence(self): ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... + def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... + def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ... + def getInvokingContext(self, ruleIndex: int): ... + def precpred(self, localctx: RuleContext, precedence: int): ... + def inContext(self, context: str): ... + def isExpectedToken(self, symbol: int): ... + def getExpectedTokens(self): ... + def getExpectedTokensWithinCurrentRule(self): ... + def getRuleIndex(self, ruleName: str): ... + def getRuleInvocationStack(self, p: RuleContext = ...): ... + def getDFAStrings(self): ... + def dumpDFA(self) -> None: ... + def getSourceName(self): ... + def setTrace(self, trace: bool): ... diff --git a/stubs/antlr4/ParserInterpreter.pyi b/stubs/antlr4/ParserInterpreter.pyi new file mode 100644 index 000000000000..33c229d0eba5 --- /dev/null +++ b/stubs/antlr4/ParserInterpreter.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from antlr4.BufferedTokenStream import TokenStream as TokenStream +from antlr4.Lexer import Lexer as Lexer +from antlr4.Parser import Parser as Parser +from antlr4.ParserRuleContext import InterpreterRuleContext as InterpreterRuleContext, ParserRuleContext as ParserRuleContext +from antlr4.PredictionContext import PredictionContextCache as PredictionContextCache +from antlr4.Token import Token as Token +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNState import ATNState as ATNState, LoopEndState as LoopEndState, StarLoopEntryState as StarLoopEntryState +from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator +from antlr4.atn.Transition import Transition as Transition +from antlr4.dfa.DFA import DFA as DFA +from antlr4.error.Errors import FailedPredicateException as FailedPredicateException, RecognitionException as RecognitionException, UnsupportedOperationException as UnsupportedOperationException + +class ParserInterpreter(Parser): + grammarFileName: Incomplete + atn: Incomplete + tokenNames: Incomplete + ruleNames: Incomplete + decisionToDFA: Incomplete + sharedContextCache: Incomplete + pushRecursionContextStates: Incomplete + def __init__(self, grammarFileName: str, tokenNames: list, ruleNames: list, atn: ATN, input: TokenStream) -> None: ... + state: Incomplete + def parse(self, startRuleIndex: int): ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... + def getATNState(self): ... + def visitState(self, p: ATNState): ... + def visitRuleStopState(self, p: ATNState): ... diff --git a/stubs/antlr4/ParserRuleContext.pyi b/stubs/antlr4/ParserRuleContext.pyi new file mode 100644 index 000000000000..dba9b5e79e0a --- /dev/null +++ b/stubs/antlr4/ParserRuleContext.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.tree.Tree import ErrorNodeImpl as ErrorNodeImpl, INVALID_INTERVAL as INVALID_INTERVAL, ParseTree as ParseTree, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode, TerminalNodeImpl as TerminalNodeImpl +from collections.abc import Generator + +class ParserRuleContext(RuleContext): + children: Incomplete + start: Incomplete + stop: Incomplete + exception: Incomplete + def __init__(self, parent: ParserRuleContext = ..., invokingStateNumber: int = ...) -> None: ... + parentCtx: Incomplete + invokingState: Incomplete + def copyFrom(self, ctx: ParserRuleContext): ... + def enterRule(self, listener: ParseTreeListener): ... + def exitRule(self, listener: ParseTreeListener): ... + def addChild(self, child: ParseTree): ... + def removeLastChild(self) -> None: ... + def addTokenNode(self, token: Token): ... + def addErrorNode(self, badToken: Token): ... + def getChild(self, i: int, ttype: type = ...): ... + def getChildren(self, predicate: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... + def getToken(self, ttype: int, i: int): ... + def getTokens(self, ttype: int): ... + def getTypedRuleContext(self, ctxType: type, i: int): ... + def getTypedRuleContexts(self, ctxType: type): ... + def getChildCount(self): ... + def getSourceInterval(self): ... + +class InterpreterRuleContext(ParserRuleContext): + ruleIndex: Incomplete + def __init__(self, parent: ParserRuleContext, invokingStateNumber: int, ruleIndex: int) -> None: ... diff --git a/stubs/antlr4/PredictionContext.pyi b/stubs/antlr4/PredictionContext.pyi new file mode 100644 index 000000000000..951585702132 --- /dev/null +++ b/stubs/antlr4/PredictionContext.pyi @@ -0,0 +1,65 @@ +from _typeshed import Incomplete +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.atn.ATN import ATN as ATN +from antlr4.error.Errors import IllegalStateException as IllegalStateException + +class PredictionContext: + EMPTY: Incomplete + EMPTY_RETURN_STATE: int + globalNodeCount: int + id = globalNodeCount + cachedHashCode: Incomplete + def __init__(self, cachedHashCode: int) -> None: ... + def __len__(self) -> int: ... + def isEmpty(self): ... + def hasEmptyPath(self): ... + def getReturnState(self, index: int): ... + def __hash__(self): ... + +def calculateHashCode(parent: PredictionContext, returnState: int): ... +def calculateListsHashCode(parents: list, returnStates: list): ... + +class PredictionContextCache: + cache: Incomplete + def __init__(self) -> None: ... + def add(self, ctx: PredictionContext): ... + def get(self, ctx: PredictionContext): ... + def __len__(self) -> int: ... + +class SingletonPredictionContext(PredictionContext): + @staticmethod + def create(parent: PredictionContext, returnState: int): ... + parentCtx: Incomplete + returnState: Incomplete + def __init__(self, parent: PredictionContext, returnState: int) -> None: ... + def __len__(self) -> int: ... + def getParent(self, index: int): ... + def getReturnState(self, index: int): ... + def __eq__(self, other): ... + def __hash__(self): ... + +class EmptyPredictionContext(SingletonPredictionContext): + def __init__(self) -> None: ... + def isEmpty(self): ... + def __eq__(self, other): ... + def __hash__(self): ... + +class ArrayPredictionContext(PredictionContext): + parents: Incomplete + returnStates: Incomplete + def __init__(self, parents: list, returnStates: list) -> None: ... + def isEmpty(self): ... + def __len__(self) -> int: ... + def getParent(self, index: int): ... + def getReturnState(self, index: int): ... + def __eq__(self, other): ... + def __hash__(self): ... + +def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext = ...): ... +def merge(a: PredictionContext, b: PredictionContext, rootIsWildcard: bool, mergeCache: dict): ... +def mergeSingletons(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool, mergeCache: dict): ... +def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool): ... +def mergeArrays(a: ArrayPredictionContext, b: ArrayPredictionContext, rootIsWildcard: bool, mergeCache: dict): ... +def combineCommonParents(parents: list): ... +def getCachedPredictionContext(context: PredictionContext, contextCache: PredictionContextCache, visited: dict): ... +def getAllContextNodes(context: PredictionContext, nodes: list = ..., visited: dict = ...): ... diff --git a/stubs/antlr4/Recognizer.pyi b/stubs/antlr4/Recognizer.pyi new file mode 100644 index 000000000000..142418845c00 --- /dev/null +++ b/stubs/antlr4/Recognizer.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.error.ErrorListener import ConsoleErrorListener as ConsoleErrorListener, ProxyErrorListener as ProxyErrorListener + +RecognitionException: Incomplete + +class Recognizer: + tokenTypeMapCache: Incomplete + ruleIndexMapCache: Incomplete + def __init__(self) -> None: ... + def extractVersion(self, version): ... + def checkVersion(self, toolVersion) -> None: ... + def addErrorListener(self, listener) -> None: ... + def removeErrorListener(self, listener) -> None: ... + def removeErrorListeners(self) -> None: ... + def getTokenTypeMap(self): ... + def getRuleIndexMap(self): ... + def getTokenType(self, tokenName: str): ... + def getErrorHeader(self, e: RecognitionException): ... + def getTokenErrorDisplay(self, t: Token): ... + def getErrorListenerDispatch(self): ... + def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... + def precpred(self, localctx: RuleContext, precedence: int): ... + @property + def state(self): ... + @state.setter + def state(self, atnState: int): ... diff --git a/stubs/antlr4/RuleContext.pyi b/stubs/antlr4/RuleContext.pyi new file mode 100644 index 000000000000..cc984eb49e46 --- /dev/null +++ b/stubs/antlr4/RuleContext.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from antlr4.tree.Tree import INVALID_INTERVAL as INVALID_INTERVAL, ParseTreeVisitor as ParseTreeVisitor, RuleNode as RuleNode +from antlr4.tree.Trees import Trees as Trees +from collections.abc import Generator + +Parser: Incomplete + +class RuleContext(RuleNode): + EMPTY: Incomplete + parentCtx: Incomplete + invokingState: Incomplete + def __init__(self, parent: RuleContext = ..., invokingState: int = ...) -> None: ... + def depth(self): ... + def isEmpty(self): ... + def getSourceInterval(self): ... + def getRuleContext(self): ... + def getPayload(self): ... + def getText(self): ... + def getRuleIndex(self): ... + def getAltNumber(self): ... + def setAltNumber(self, altNumber: int): ... + def getChild(self, i: int): ... + def getChildCount(self): ... + def getChildren(self) -> Generator[Incomplete, None, None]: ... + def accept(self, visitor: ParseTreeVisitor): ... + def toStringTree(self, ruleNames: list = ..., recog: Parser = ...): ... + def toString(self, ruleNames: list, stop: RuleContext) -> str: ... diff --git a/stubs/antlr4/StdinStream.pyi b/stubs/antlr4/StdinStream.pyi new file mode 100644 index 000000000000..2adec17b9328 --- /dev/null +++ b/stubs/antlr4/StdinStream.pyi @@ -0,0 +1,4 @@ +from antlr4.InputStream import InputStream as InputStream + +class StdinStream(InputStream): + def __init__(self, encoding: str = ..., errors: str = ...) -> None: ... diff --git a/stubs/antlr4/Token.pyi b/stubs/antlr4/Token.pyi new file mode 100644 index 000000000000..b9d12a08134d --- /dev/null +++ b/stubs/antlr4/Token.pyi @@ -0,0 +1,41 @@ +from _typeshed import Incomplete + +class Token: + INVALID_TYPE: int + EPSILON: int + MIN_USER_TOKEN_TYPE: int + EOF: int + DEFAULT_CHANNEL: int + HIDDEN_CHANNEL: int + source: Incomplete + type: Incomplete + channel: Incomplete + start: Incomplete + stop: Incomplete + tokenIndex: Incomplete + line: Incomplete + column: Incomplete + def __init__(self) -> None: ... + @property + def text(self): ... + @text.setter + def text(self, text: str): ... + def getTokenSource(self): ... + def getInputStream(self): ... + +class CommonToken(Token): + EMPTY_SOURCE: Incomplete + source: Incomplete + type: Incomplete + channel: Incomplete + start: Incomplete + stop: Incomplete + tokenIndex: int + line: Incomplete + column: Incomplete + def __init__(self, source: tuple = ..., type: int = ..., channel: int = ..., start: int = ..., stop: int = ...) -> None: ... + def clone(self): ... + @property + def text(self): ... + @text.setter + def text(self, text: str): ... diff --git a/stubs/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4/TokenStreamRewriter.pyi new file mode 100644 index 000000000000..826ee25b149f --- /dev/null +++ b/stubs/antlr4/TokenStreamRewriter.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete +from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream +from antlr4.Token import Token as Token + +class TokenStreamRewriter: + DEFAULT_PROGRAM_NAME: str + PROGRAM_INIT_SIZE: int + MIN_TOKEN_INDEX: int + tokens: Incomplete + programs: Incomplete + lastRewriteTokenIndexes: Incomplete + def __init__(self, tokens) -> None: ... + def getTokenStream(self): ... + def rollback(self, instruction_index, program_name) -> None: ... + def deleteProgram(self, program_name=...) -> None: ... + def insertAfterToken(self, token, text, program_name=...) -> None: ... + def insertAfter(self, index, text, program_name=...) -> None: ... + def insertBeforeIndex(self, index, text) -> None: ... + def insertBeforeToken(self, token, text, program_name=...) -> None: ... + def insertBefore(self, program_name, index, text) -> None: ... + def replaceIndex(self, index, text) -> None: ... + def replaceRange(self, from_idx, to_idx, text) -> None: ... + def replaceSingleToken(self, token, text) -> None: ... + def replaceRangeTokens(self, from_token, to_token, text, program_name=...) -> None: ... + def replace(self, program_name, from_idx, to_idx, text) -> None: ... + def deleteToken(self, token) -> None: ... + def deleteIndex(self, index) -> None: ... + def delete(self, program_name, from_idx, to_idx) -> None: ... + def lastRewriteTokenIndex(self, program_name=...): ... + def setLastRewriteTokenIndex(self, program_name, i) -> None: ... + def getProgram(self, program_name): ... + def getDefaultText(self): ... + def getText(self, program_name, start: int, stop: int): ... + class RewriteOperation: + tokens: Incomplete + index: Incomplete + text: Incomplete + instructionIndex: int + def __init__(self, tokens, index, text: str = ...) -> None: ... + def execute(self, buf): ... + class InsertBeforeOp(RewriteOperation): + def __init__(self, tokens, index, text: str = ...) -> None: ... + def execute(self, buf): ... + class InsertAfterOp(InsertBeforeOp): ... + class ReplaceOp(RewriteOperation): + last_index: Incomplete + def __init__(self, from_idx, to_idx, tokens, text) -> None: ... + def execute(self, buf): ... diff --git a/stubs/antlr4/Utils.pyi b/stubs/antlr4/Utils.pyi new file mode 100644 index 000000000000..42f2c4261b11 --- /dev/null +++ b/stubs/antlr4/Utils.pyi @@ -0,0 +1,2 @@ +def str_list(val): ... +def escapeWhitespace(s: str, escapeSpaces: bool): ... diff --git a/stubs/antlr4/__init__.pyi b/stubs/antlr4/__init__.pyi new file mode 100644 index 000000000000..6d15b5fb2628 --- /dev/null +++ b/stubs/antlr4/__init__.pyi @@ -0,0 +1,21 @@ +from antlr4.BufferedTokenStream import TokenStream as TokenStream +from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream +from antlr4.FileStream import FileStream as FileStream +from antlr4.InputStream import InputStream as InputStream +from antlr4.Lexer import Lexer as Lexer +from antlr4.Parser import Parser as Parser +from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext, RuleContext as RuleContext +from antlr4.PredictionContext import PredictionContextCache as PredictionContextCache +from antlr4.StdinStream import StdinStream as StdinStream +from antlr4.Token import Token as Token +from antlr4.Utils import str_list as str_list +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer +from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator +from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator +from antlr4.atn.PredictionMode import PredictionMode as PredictionMode +from antlr4.dfa.DFA import DFA as DFA +from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener as DiagnosticErrorListener +from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy +from antlr4.error.Errors import IllegalStateException as IllegalStateException, NoViableAltException as NoViableAltException, RecognitionException as RecognitionException +from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, ParseTreeVisitor as ParseTreeVisitor, ParseTreeWalker as ParseTreeWalker, RuleNode as RuleNode, TerminalNode as TerminalNode diff --git a/stubs/antlr4/_pygrun.pyi b/stubs/antlr4/_pygrun.pyi new file mode 100644 index 000000000000..c45632a1e4d1 --- /dev/null +++ b/stubs/antlr4/_pygrun.pyi @@ -0,0 +1,4 @@ +from antlr4 import * + +def beautify_lisp_string(in_string): ... +def main() -> None: ... diff --git a/stubs/antlr4/atn/ATN.pyi b/stubs/antlr4/atn/ATN.pyi new file mode 100644 index 000000000000..a5d188e8a4bd --- /dev/null +++ b/stubs/antlr4/atn/ATN.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from antlr4.IntervalSet import IntervalSet as IntervalSet +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState +from antlr4.atn.ATNType import ATNType as ATNType + +class ATN: + INVALID_ALT_NUMBER: int + grammarType: Incomplete + maxTokenType: Incomplete + states: Incomplete + decisionToState: Incomplete + ruleToStartState: Incomplete + ruleToStopState: Incomplete + modeNameToStartState: Incomplete + ruleToTokenType: Incomplete + lexerActions: Incomplete + modeToStartState: Incomplete + def __init__(self, grammarType: ATNType, maxTokenType: int) -> None: ... + def nextTokensInContext(self, s: ATNState, ctx: RuleContext): ... + def nextTokensNoContext(self, s: ATNState): ... + def nextTokens(self, s: ATNState, ctx: RuleContext = ...): ... + def addState(self, state: ATNState): ... + def removeState(self, state: ATNState): ... + def defineDecisionState(self, s: DecisionState): ... + def getDecisionState(self, decision: int): ... + def getExpectedTokens(self, stateNumber: int, ctx: RuleContext): ... diff --git a/stubs/antlr4/atn/ATNConfig.pyi b/stubs/antlr4/atn/ATNConfig.pyi new file mode 100644 index 000000000000..67b15428fd45 --- /dev/null +++ b/stubs/antlr4/atn/ATNConfig.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from antlr4.PredictionContext import PredictionContext as PredictionContext +from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState +from antlr4.atn.LexerActionExecutor import LexerActionExecutor as LexerActionExecutor +from antlr4.atn.SemanticContext import SemanticContext as SemanticContext + +class ATNConfig: + state: Incomplete + alt: Incomplete + context: Incomplete + semanticContext: Incomplete + reachesIntoOuterContext: Incomplete + precedenceFilterSuppressed: Incomplete + def __init__(self, state: ATNState = ..., alt: int = ..., context: PredictionContext = ..., semantic: SemanticContext = ..., config: ATNConfig = ...) -> None: ... + def __eq__(self, other): ... + def __hash__(self): ... + def hashCodeForConfigSet(self): ... + def equalsForConfigSet(self, other): ... + +class LexerATNConfig(ATNConfig): + lexerActionExecutor: Incomplete + passedThroughNonGreedyDecision: Incomplete + def __init__(self, state: ATNState, alt: int = ..., context: PredictionContext = ..., semantic: SemanticContext = ..., lexerActionExecutor: LexerActionExecutor = ..., config: LexerATNConfig = ...) -> None: ... + def __hash__(self): ... + def __eq__(self, other): ... + def hashCodeForConfigSet(self): ... + def equalsForConfigSet(self, other): ... + def checkNonGreedyDecision(self, source: LexerATNConfig, target: ATNState): ... diff --git a/stubs/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4/atn/ATNConfigSet.pyi new file mode 100644 index 000000000000..81f80880b64e --- /dev/null +++ b/stubs/antlr4/atn/ATNConfigSet.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from antlr4.PredictionContext import merge as merge +from antlr4.Utils import str_list as str_list +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNConfig import ATNConfig as ATNConfig +from antlr4.atn.SemanticContext import SemanticContext as SemanticContext +from antlr4.error.Errors import IllegalStateException as IllegalStateException, UnsupportedOperationException as UnsupportedOperationException + +ATNSimulator: Incomplete + +class ATNConfigSet: + configLookup: Incomplete + fullCtx: Incomplete + readonly: bool + configs: Incomplete + uniqueAlt: int + conflictingAlts: Incomplete + hasSemanticContext: bool + dipsIntoOuterContext: bool + cachedHashCode: int + def __init__(self, fullCtx: bool = ...) -> None: ... + def __iter__(self): ... + def add(self, config: ATNConfig, mergeCache: Incomplete | None = ...): ... + def getOrAdd(self, config: ATNConfig): ... + def getStates(self): ... + def getPredicates(self): ... + def get(self, i: int): ... + def optimizeConfigs(self, interpreter: ATNSimulator): ... + def addAll(self, coll: list): ... + def __eq__(self, other): ... + def __hash__(self): ... + def hashConfigs(self): ... + def __len__(self) -> int: ... + def isEmpty(self): ... + def __contains__(self, config) -> bool: ... + def clear(self) -> None: ... + def setReadonly(self, readonly: bool): ... + +class OrderedATNConfigSet(ATNConfigSet): + def __init__(self) -> None: ... diff --git a/stubs/antlr4/atn/ATNDeserializationOptions.pyi b/stubs/antlr4/atn/ATNDeserializationOptions.pyi new file mode 100644 index 000000000000..0a5111d60c79 --- /dev/null +++ b/stubs/antlr4/atn/ATNDeserializationOptions.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +class ATNDeserializationOptions: + defaultOptions: Incomplete + readonly: bool + verifyATN: Incomplete + generateRuleBypassTransitions: Incomplete + def __init__(self, copyFrom: ATNDeserializationOptions = ...) -> None: ... + def __setattr__(self, key, value) -> None: ... diff --git a/stubs/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4/atn/ATNDeserializer.pyi new file mode 100644 index 000000000000..43664d7cc8df --- /dev/null +++ b/stubs/antlr4/atn/ATNDeserializer.pyi @@ -0,0 +1,48 @@ +from antlr4.atn.ATNState import * +from antlr4.atn.Transition import * +from antlr4.atn.LexerAction import * +from _typeshed import Incomplete +from antlr4.Token import Token as Token +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions +from antlr4.atn.ATNType import ATNType as ATNType +from io import StringIO as StringIO + +SERIALIZED_VERSION: int + +class ATNDeserializer: + deserializationOptions: Incomplete + def __init__(self, options: ATNDeserializationOptions = ...) -> None: ... + data: Incomplete + pos: int + def deserialize(self, data: list[int]): ... + def checkVersion(self) -> None: ... + def readATN(self): ... + def readStates(self, atn: ATN): ... + def readRules(self, atn: ATN): ... + def readModes(self, atn: ATN): ... + def readSets(self, atn: ATN, sets: list): ... + def readEdges(self, atn: ATN, sets: list): ... + def readDecisions(self, atn: ATN): ... + def readLexerActions(self, atn: ATN): ... + def generateRuleBypassTransitions(self, atn: ATN): ... + def generateRuleBypassTransition(self, atn: ATN, idx: int): ... + def stateIsEndStateFor(self, state: ATNState, idx: int): ... + def markPrecedenceDecisions(self, atn: ATN): ... + def verifyATN(self, atn: ATN): ... + def checkCondition(self, condition: bool, message: Incomplete | None = ...): ... + def readInt(self): ... + edgeFactories: Incomplete + def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list): ... + stateFactories: Incomplete + def stateFactory(self, type: int, ruleIndex: int): ... + CHANNEL: int + CUSTOM: int + MODE: int + MORE: int + POP_MODE: int + PUSH_MODE: int + SKIP: int + TYPE: int + actionFactories: Incomplete + def lexerActionFactory(self, type: int, data1: int, data2: int): ... diff --git a/stubs/antlr4/atn/ATNSimulator.pyi b/stubs/antlr4/atn/ATNSimulator.pyi new file mode 100644 index 000000000000..cf032a691ef6 --- /dev/null +++ b/stubs/antlr4/atn/ATNSimulator.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextCache as PredictionContextCache, getCachedPredictionContext as getCachedPredictionContext +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet +from antlr4.dfa.DFAState import DFAState as DFAState + +class ATNSimulator: + ERROR: Incomplete + atn: Incomplete + sharedContextCache: Incomplete + def __init__(self, atn: ATN, sharedContextCache: PredictionContextCache) -> None: ... + def getCachedContext(self, context: PredictionContext): ... diff --git a/stubs/antlr4/atn/ATNState.pyi b/stubs/antlr4/atn/ATNState.pyi new file mode 100644 index 000000000000..2874d6b08724 --- /dev/null +++ b/stubs/antlr4/atn/ATNState.pyi @@ -0,0 +1,98 @@ +from _typeshed import Incomplete +from antlr4.atn.Transition import Transition as Transition + +INITIAL_NUM_TRANSITIONS: int + +class ATNState: + INVALID_TYPE: int + BASIC: int + RULE_START: int + BLOCK_START: int + PLUS_BLOCK_START: int + STAR_BLOCK_START: int + TOKEN_START: int + RULE_STOP: int + BLOCK_END: int + STAR_LOOP_BACK: int + STAR_LOOP_ENTRY: int + PLUS_LOOP_BACK: int + LOOP_END: int + serializationNames: Incomplete + INVALID_STATE_NUMBER: int + atn: Incomplete + stateNumber: Incomplete + stateType: Incomplete + ruleIndex: int + epsilonOnlyTransitions: bool + transitions: Incomplete + nextTokenWithinRule: Incomplete + def __init__(self) -> None: ... + def __hash__(self): ... + def __eq__(self, other): ... + def onlyHasEpsilonTransitions(self): ... + def isNonGreedyExitState(self): ... + def addTransition(self, trans: Transition, index: int = ...): ... + +class BasicState(ATNState): + stateType: Incomplete + def __init__(self) -> None: ... + +class DecisionState(ATNState): + decision: int + nonGreedy: bool + def __init__(self) -> None: ... + +class BlockStartState(DecisionState): + endState: Incomplete + def __init__(self) -> None: ... + +class BasicBlockStartState(BlockStartState): + stateType: Incomplete + def __init__(self) -> None: ... + +class BlockEndState(ATNState): + stateType: Incomplete + startState: Incomplete + def __init__(self) -> None: ... + +class RuleStopState(ATNState): + stateType: Incomplete + def __init__(self) -> None: ... + +class RuleStartState(ATNState): + stateType: Incomplete + stopState: Incomplete + isPrecedenceRule: bool + def __init__(self) -> None: ... + +class PlusLoopbackState(DecisionState): + stateType: Incomplete + def __init__(self) -> None: ... + +class PlusBlockStartState(BlockStartState): + stateType: Incomplete + loopBackState: Incomplete + def __init__(self) -> None: ... + +class StarBlockStartState(BlockStartState): + stateType: Incomplete + def __init__(self) -> None: ... + +class StarLoopbackState(ATNState): + stateType: Incomplete + def __init__(self) -> None: ... + +class StarLoopEntryState(DecisionState): + stateType: Incomplete + loopBackState: Incomplete + isPrecedenceDecision: Incomplete + def __init__(self) -> None: ... + +class LoopEndState(ATNState): + stateType: Incomplete + loopBackState: Incomplete + def __init__(self) -> None: ... + +class TokensStartState(DecisionState): + stateType: Incomplete + def __init__(self) -> None: ... diff --git a/stubs/antlr4/atn/ATNType.pyi b/stubs/antlr4/atn/ATNType.pyi new file mode 100644 index 000000000000..888deeb2be1e --- /dev/null +++ b/stubs/antlr4/atn/ATNType.pyi @@ -0,0 +1,7 @@ +from enum import IntEnum + +class ATNType(IntEnum): + LEXER: int + PARSER: int + @classmethod + def fromOrdinal(cls, i: int): ... diff --git a/stubs/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4/atn/LexerATNSimulator.pyi new file mode 100644 index 000000000000..f48e8090a762 --- /dev/null +++ b/stubs/antlr4/atn/LexerATNSimulator.pyi @@ -0,0 +1,62 @@ +from _typeshed import Incomplete +from antlr4.InputStream import InputStream as InputStream +from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextCache as PredictionContextCache, SingletonPredictionContext as SingletonPredictionContext +from antlr4.Token import Token as Token +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNConfig import LexerATNConfig as LexerATNConfig +from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet, OrderedATNConfigSet as OrderedATNConfigSet +from antlr4.atn.ATNSimulator import ATNSimulator as ATNSimulator +from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopState +from antlr4.atn.LexerActionExecutor import LexerActionExecutor as LexerActionExecutor +from antlr4.atn.Transition import Transition as Transition +from antlr4.dfa.DFAState import DFAState as DFAState +from antlr4.error.Errors import LexerNoViableAltException as LexerNoViableAltException, UnsupportedOperationException as UnsupportedOperationException + +class SimState: + def __init__(self) -> None: ... + index: int + line: int + column: int + dfaState: Incomplete + def reset(self) -> None: ... + +Lexer: Incomplete + +class LexerATNSimulator(ATNSimulator): + debug: bool + dfa_debug: bool + MIN_DFA_EDGE: int + MAX_DFA_EDGE: int + ERROR: Incomplete + decisionToDFA: Incomplete + recog: Incomplete + startIndex: int + line: int + column: int + mode: Incomplete + DEFAULT_MODE: Incomplete + MAX_CHAR_VALUE: Incomplete + prevAccept: Incomplete + def __init__(self, recog: Lexer, atn: ATN, decisionToDFA: list, sharedContextCache: PredictionContextCache) -> None: ... + def copyState(self, simulator: LexerATNSimulator): ... + def match(self, input: InputStream, mode: int): ... + def reset(self) -> None: ... + def matchATN(self, input: InputStream): ... + def execATN(self, input: InputStream, ds0: DFAState): ... + def getExistingTargetState(self, s: DFAState, t: int): ... + def computeTargetState(self, input: InputStream, s: DFAState, t: int): ... + def failOrAccept(self, prevAccept: SimState, input: InputStream, reach: ATNConfigSet, t: int): ... + def getReachableConfigSet(self, input: InputStream, closure: ATNConfigSet, reach: ATNConfigSet, t: int): ... + def accept(self, input: InputStream, lexerActionExecutor: LexerActionExecutor, startIndex: int, index: int, line: int, charPos: int): ... + def getReachableTarget(self, trans: Transition, t: int): ... + def computeStartState(self, input: InputStream, p: ATNState): ... + def closure(self, input: InputStream, config: LexerATNConfig, configs: ATNConfigSet, currentAltReachedAcceptState: bool, speculative: bool, treatEofAsEpsilon: bool): ... + def getEpsilonTarget(self, input: InputStream, config: LexerATNConfig, t: Transition, configs: ATNConfigSet, speculative: bool, treatEofAsEpsilon: bool): ... + def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool): ... + def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState): ... + def addDFAEdge(self, from_: DFAState, tk: int, to: DFAState = ..., cfgs: ATNConfigSet = ...) -> DFAState: ... + def addDFAState(self, configs: ATNConfigSet) -> DFAState: ... + def getDFA(self, mode: int): ... + def getText(self, input: InputStream): ... + def consume(self, input: InputStream): ... + def getTokenName(self, t: int): ... diff --git a/stubs/antlr4/atn/LexerAction.pyi b/stubs/antlr4/atn/LexerAction.pyi new file mode 100644 index 000000000000..0309d3549f50 --- /dev/null +++ b/stubs/antlr4/atn/LexerAction.pyi @@ -0,0 +1,82 @@ +from _typeshed import Incomplete +from enum import IntEnum + +Lexer: Incomplete + +class LexerActionType(IntEnum): + CHANNEL: int + CUSTOM: int + MODE: int + MORE: int + POP_MODE: int + PUSH_MODE: int + SKIP: int + TYPE: int + +class LexerAction: + actionType: Incomplete + isPositionDependent: bool + def __init__(self, action: LexerActionType) -> None: ... + def __hash__(self): ... + def __eq__(self, other): ... + +class LexerSkipAction(LexerAction): + INSTANCE: Incomplete + def __init__(self) -> None: ... + def execute(self, lexer: Lexer): ... + +class LexerTypeAction(LexerAction): + type: Incomplete + def __init__(self, type: int) -> None: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... + +class LexerPushModeAction(LexerAction): + mode: Incomplete + def __init__(self, mode: int) -> None: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... + +class LexerPopModeAction(LexerAction): + INSTANCE: Incomplete + def __init__(self) -> None: ... + def execute(self, lexer: Lexer): ... + +class LexerMoreAction(LexerAction): + INSTANCE: Incomplete + def __init__(self) -> None: ... + def execute(self, lexer: Lexer): ... + +class LexerModeAction(LexerAction): + mode: Incomplete + def __init__(self, mode: int) -> None: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... + +class LexerCustomAction(LexerAction): + ruleIndex: Incomplete + actionIndex: Incomplete + isPositionDependent: bool + def __init__(self, ruleIndex: int, actionIndex: int) -> None: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... + +class LexerChannelAction(LexerAction): + channel: Incomplete + def __init__(self, channel: int) -> None: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... + +class LexerIndexedCustomAction(LexerAction): + offset: Incomplete + action: Incomplete + isPositionDependent: bool + def __init__(self, offset: int, action: LexerAction) -> None: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... diff --git a/stubs/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4/atn/LexerActionExecutor.pyi new file mode 100644 index 000000000000..62067c049227 --- /dev/null +++ b/stubs/antlr4/atn/LexerActionExecutor.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from antlr4.InputStream import InputStream as InputStream +from antlr4.atn.LexerAction import LexerAction as LexerAction, LexerIndexedCustomAction as LexerIndexedCustomAction + +Lexer: Incomplete + +class LexerActionExecutor: + lexerActions: Incomplete + hashCode: Incomplete + def __init__(self, lexerActions: list = ...) -> None: ... + @staticmethod + def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction): ... + def fixOffsetBeforeMatch(self, offset: int): ... + def execute(self, lexer: Lexer, input: InputStream, startIndex: int): ... + def __hash__(self): ... + def __eq__(self, other): ... diff --git a/stubs/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4/atn/ParserATNSimulator.pyi new file mode 100644 index 000000000000..69cd20dbec3f --- /dev/null +++ b/stubs/antlr4/atn/ParserATNSimulator.pyi @@ -0,0 +1,71 @@ +from _typeshed import Incomplete +from antlr4 import DFA as DFA +from antlr4.BufferedTokenStream import TokenStream as TokenStream +from antlr4.Parser import Parser as Parser +from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext +from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextCache as PredictionContextCache, PredictionContextFromRuleContext as PredictionContextFromRuleContext, SingletonPredictionContext as SingletonPredictionContext +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.Utils import str_list as str_list +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNConfig import ATNConfig as ATNConfig +from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet +from antlr4.atn.ATNSimulator import ATNSimulator as ATNSimulator +from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState, RuleStopState as RuleStopState +from antlr4.atn.PredictionMode import PredictionMode as PredictionMode +from antlr4.atn.SemanticContext import SemanticContext as SemanticContext, andContext as andContext, orContext as orContext +from antlr4.atn.Transition import ActionTransition as ActionTransition, AtomTransition as AtomTransition, NotSetTransition as NotSetTransition, PrecedencePredicateTransition as PrecedencePredicateTransition, PredicateTransition as PredicateTransition, RuleTransition as RuleTransition, SetTransition as SetTransition, Transition as Transition +from antlr4.dfa.DFAState import DFAState as DFAState, PredPrediction as PredPrediction +from antlr4.error.Errors import NoViableAltException as NoViableAltException + +class ParserATNSimulator(ATNSimulator): + debug: bool + trace_atn_sim: bool + dfa_debug: bool + retry_debug: bool + parser: Incomplete + decisionToDFA: Incomplete + predictionMode: Incomplete + mergeCache: Incomplete + def __init__(self, parser: Parser, atn: ATN, decisionToDFA: list, sharedContextCache: PredictionContextCache) -> None: ... + def reset(self) -> None: ... + def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext): ... + def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ... + def getExistingTargetState(self, previousD: DFAState, t: int): ... + def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int): ... + def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState): ... + def execATNWithFullContext(self, dfa: DFA, D: DFAState, s0: ATNConfigSet, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ... + def computeReachSet(self, closure: ATNConfigSet, t: int, fullCtx: bool): ... + def removeAllConfigsNotInRuleStopState(self, configs: ATNConfigSet, lookToEndOfRule: bool): ... + def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool): ... + def applyPrecedenceFilter(self, configs: ATNConfigSet): ... + def getReachableTarget(self, trans: Transition, ttype: int): ... + def getPredsForAmbigAlts(self, ambigAlts: set, configs: ATNConfigSet, nalts: int): ... + def getPredicatePredictions(self, ambigAlts: set, altToPred: list): ... + def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... + def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet): ... + def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... + def evalSemanticContext(self, predPredictions: list, outerContext: ParserRuleContext, complete: bool): ... + def closure(self, config: ATNConfig, configs: ATNConfigSet, closureBusy: set, collectPredicates: bool, fullCtx: bool, treatEofAsEpsilon: bool): ... + def closureCheckingStopState(self, config: ATNConfig, configs: ATNConfigSet, closureBusy: set, collectPredicates: bool, fullCtx: bool, depth: int, treatEofAsEpsilon: bool): ... + def closure_(self, config: ATNConfig, configs: ATNConfigSet, closureBusy: set, collectPredicates: bool, fullCtx: bool, depth: int, treatEofAsEpsilon: bool): ... + def canDropLoopEntryEdgeInLeftRecursiveRule(self, config): ... + def getRuleName(self, index: int): ... + epsilonTargetMethods: Incomplete + def getEpsilonTarget(self, config: ATNConfig, t: Transition, collectPredicates: bool, inContext: bool, fullCtx: bool, treatEofAsEpsilon: bool): ... + def actionTransition(self, config: ATNConfig, t: ActionTransition): ... + def precedenceTransition(self, config: ATNConfig, pt: PrecedencePredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool): ... + def predTransition(self, config: ATNConfig, pt: PredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool): ... + def ruleTransition(self, config: ATNConfig, t: RuleTransition): ... + def getConflictingAlts(self, configs: ATNConfigSet): ... + def getConflictingAltsOrUniqueAlt(self, configs: ATNConfigSet): ... + def getTokenName(self, t: int): ... + def getLookaheadName(self, input: TokenStream): ... + def dumpDeadEndConfigs(self, nvae: NoViableAltException): ... + def noViableAlt(self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int): ... + def getUniqueAlt(self, configs: ATNConfigSet): ... + def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState): ... + def addDFAState(self, dfa: DFA, D: DFAState): ... + def reportAttemptingFullContext(self, dfa: DFA, conflictingAlts: set, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... + def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... + def reportAmbiguity(self, dfa: DFA, D: DFAState, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet): ... diff --git a/stubs/antlr4/atn/PredictionMode.pyi b/stubs/antlr4/atn/PredictionMode.pyi new file mode 100644 index 000000000000..2f69b9323c1d --- /dev/null +++ b/stubs/antlr4/atn/PredictionMode.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNConfig import ATNConfig as ATNConfig +from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet +from antlr4.atn.ATNState import RuleStopState as RuleStopState +from antlr4.atn.SemanticContext import SemanticContext as SemanticContext +from enum import Enum + +class PredictionMode(Enum): + SLL: int + LL: int + LL_EXACT_AMBIG_DETECTION: int + @classmethod + def hasSLLConflictTerminatingPrediction(cls, mode: PredictionMode, configs: ATNConfigSet): ... + @classmethod + def hasConfigInRuleStopState(cls, configs: ATNConfigSet): ... + @classmethod + def allConfigsInRuleStopStates(cls, configs: ATNConfigSet): ... + @classmethod + def resolvesToJustOneViableAlt(cls, altsets: list): ... + @classmethod + def allSubsetsConflict(cls, altsets: list): ... + @classmethod + def hasNonConflictingAltSet(cls, altsets: list): ... + @classmethod + def hasConflictingAltSet(cls, altsets: list): ... + @classmethod + def allSubsetsEqual(cls, altsets: list): ... + @classmethod + def getUniqueAlt(cls, altsets: list): ... + @classmethod + def getAlts(cls, altsets: list): ... + @classmethod + def getConflictingAltSubsets(cls, configs: ATNConfigSet): ... + @classmethod + def getStateToAltMap(cls, configs: ATNConfigSet): ... + @classmethod + def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet): ... + @classmethod + def getSingleViableAlt(cls, altsets: list): ... diff --git a/stubs/antlr4/atn/SemanticContext.pyi b/stubs/antlr4/atn/SemanticContext.pyi new file mode 100644 index 000000000000..444caaae9047 --- /dev/null +++ b/stubs/antlr4/atn/SemanticContext.pyi @@ -0,0 +1,49 @@ +from _typeshed import Incomplete +from antlr4.Recognizer import Recognizer as Recognizer +from antlr4.RuleContext import RuleContext as RuleContext + +class SemanticContext: + NONE: Incomplete + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... + +def andContext(a: SemanticContext, b: SemanticContext): ... + +def orContext(a: SemanticContext, b: SemanticContext): ... +def filterPrecedencePredicates(collection: set): ... + +class EmptySemanticContext(SemanticContext): ... + +class Predicate(SemanticContext): + ruleIndex: Incomplete + predIndex: Incomplete + isCtxDependent: Incomplete + def __init__(self, ruleIndex: int = ..., predIndex: int = ..., isCtxDependent: bool = ...) -> None: ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def __hash__(self): ... + def __eq__(self, other): ... + +class PrecedencePredicate(SemanticContext): + precedence: Incomplete + def __init__(self, precedence: int = ...) -> None: ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... + def __lt__(self, other): ... + def __hash__(self): ... + def __eq__(self, other): ... + +class AND(SemanticContext): + opnds: Incomplete + def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ... + def __eq__(self, other): ... + def __hash__(self): ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... + +class OR(SemanticContext): + opnds: Incomplete + def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ... + def __eq__(self, other): ... + def __hash__(self): ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... diff --git a/stubs/antlr4/atn/Transition.pyi b/stubs/antlr4/atn/Transition.pyi new file mode 100644 index 000000000000..3e19c0e52df7 --- /dev/null +++ b/stubs/antlr4/atn/Transition.pyi @@ -0,0 +1,102 @@ +from antlr4.atn.ATNState import * +from _typeshed import Incomplete +from antlr4.IntervalSet import IntervalSet as IntervalSet +from antlr4.Token import Token as Token +from antlr4.atn.SemanticContext import PrecedencePredicate as PrecedencePredicate, Predicate as Predicate + +class Transition: + EPSILON: int + RANGE: int + RULE: int + PREDICATE: int + ATOM: int + ACTION: int + SET: int + NOT_SET: int + WILDCARD: int + PRECEDENCE: int + serializationNames: Incomplete + serializationTypes: Incomplete + target: Incomplete + isEpsilon: bool + label: Incomplete + def __init__(self, target: ATNState) -> None: ... + +class AtomTransition(Transition): + label_: Incomplete + label: Incomplete + serializationType: Incomplete + def __init__(self, target: ATNState, label: int) -> None: ... + def makeLabel(self): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class RuleTransition(Transition): + ruleIndex: Incomplete + precedence: Incomplete + followState: Incomplete + serializationType: Incomplete + isEpsilon: bool + def __init__(self, ruleStart: RuleStartState, ruleIndex: int, precedence: int, followState: ATNState) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class EpsilonTransition(Transition): + serializationType: Incomplete + isEpsilon: bool + outermostPrecedenceReturn: Incomplete + def __init__(self, target, outermostPrecedenceReturn: int = ...) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class RangeTransition(Transition): + serializationType: Incomplete + start: Incomplete + stop: Incomplete + label: Incomplete + def __init__(self, target: ATNState, start: int, stop: int) -> None: ... + def makeLabel(self): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class AbstractPredicateTransition(Transition): + def __init__(self, target: ATNState) -> None: ... + +class PredicateTransition(AbstractPredicateTransition): + serializationType: Incomplete + ruleIndex: Incomplete + predIndex: Incomplete + isCtxDependent: Incomplete + isEpsilon: bool + def __init__(self, target: ATNState, ruleIndex: int, predIndex: int, isCtxDependent: bool) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def getPredicate(self): ... + +class ActionTransition(Transition): + serializationType: Incomplete + ruleIndex: Incomplete + actionIndex: Incomplete + isCtxDependent: Incomplete + isEpsilon: bool + def __init__(self, target: ATNState, ruleIndex: int, actionIndex: int = ..., isCtxDependent: bool = ...) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class SetTransition(Transition): + serializationType: Incomplete + label: Incomplete + def __init__(self, target: ATNState, set: IntervalSet) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class NotSetTransition(SetTransition): + serializationType: Incomplete + def __init__(self, target: ATNState, set: IntervalSet) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class WildcardTransition(Transition): + serializationType: Incomplete + def __init__(self, target: ATNState) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + +class PrecedencePredicateTransition(AbstractPredicateTransition): + serializationType: Incomplete + precedence: Incomplete + isEpsilon: bool + def __init__(self, target: ATNState, precedence: int) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def getPredicate(self): ... diff --git a/stubs/antlr4/atn/__init__.pyi b/stubs/antlr4/atn/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/antlr4/dfa/DFA.pyi b/stubs/antlr4/dfa/DFA.pyi new file mode 100644 index 000000000000..72ea5c57a17b --- /dev/null +++ b/stubs/antlr4/dfa/DFA.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet +from antlr4.atn.ATNState import DecisionState as DecisionState, StarLoopEntryState as StarLoopEntryState +from antlr4.dfa.DFAState import DFAState as DFAState +from antlr4.error.Errors import IllegalStateException as IllegalStateException + +class DFA: + atnStartState: Incomplete + decision: Incomplete + s0: Incomplete + precedenceDfa: bool + def __init__(self, atnStartState: DecisionState, decision: int = ...) -> None: ... + def getPrecedenceStartState(self, precedence: int): ... + def setPrecedenceStartState(self, precedence: int, startState: DFAState): ... + def setPrecedenceDfa(self, precedenceDfa: bool): ... + @property + def states(self): ... + def sortedStates(self): ... + def toString(self, literalNames: list = ..., symbolicNames: list = ...): ... + def toLexerString(self): ... diff --git a/stubs/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4/dfa/DFASerializer.pyi new file mode 100644 index 000000000000..a789db9b9e36 --- /dev/null +++ b/stubs/antlr4/dfa/DFASerializer.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from antlr4 import DFA as DFA +from antlr4.Utils import str_list as str_list +from antlr4.dfa.DFAState import DFAState as DFAState + +class DFASerializer: + dfa: Incomplete + literalNames: Incomplete + symbolicNames: Incomplete + def __init__(self, dfa: DFA, literalNames: list = ..., symbolicNames: list = ...) -> None: ... + def getEdgeLabel(self, i: int): ... + def getStateString(self, s: DFAState): ... + +class LexerDFASerializer(DFASerializer): + def __init__(self, dfa: DFA) -> None: ... + def getEdgeLabel(self, i: int): ... diff --git a/stubs/antlr4/dfa/DFAState.pyi b/stubs/antlr4/dfa/DFAState.pyi new file mode 100644 index 000000000000..90f6914c63a1 --- /dev/null +++ b/stubs/antlr4/dfa/DFAState.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet +from antlr4.atn.SemanticContext import SemanticContext as SemanticContext + +class PredPrediction: + alt: Incomplete + pred: Incomplete + def __init__(self, pred: SemanticContext, alt: int) -> None: ... + +class DFAState: + stateNumber: Incomplete + configs: Incomplete + edges: Incomplete + isAcceptState: bool + prediction: int + lexerActionExecutor: Incomplete + requiresFullContext: bool + predicates: Incomplete + def __init__(self, stateNumber: int = ..., configs: ATNConfigSet = ...) -> None: ... + def getAltSet(self): ... + def __hash__(self): ... + def __eq__(self, other): ... diff --git a/stubs/antlr4/dfa/__init__.pyi b/stubs/antlr4/dfa/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4/error/DiagnosticErrorListener.pyi new file mode 100644 index 000000000000..18c1ff4846c4 --- /dev/null +++ b/stubs/antlr4/error/DiagnosticErrorListener.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from antlr4 import DFA as DFA +from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet +from antlr4.error.ErrorListener import ErrorListener as ErrorListener + +class DiagnosticErrorListener(ErrorListener): + exactOnly: Incomplete + def __init__(self, exactOnly: bool = ...) -> None: ... + def reportAmbiguity(self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet): ... + def reportAttemptingFullContext(self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set, configs: ATNConfigSet): ... + def reportContextSensitivity(self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet): ... + def getDecisionDescription(self, recognizer, dfa: DFA): ... + def getConflictingAlts(self, reportedAlts: set, configs: ATNConfigSet): ... diff --git a/stubs/antlr4/error/ErrorListener.pyi b/stubs/antlr4/error/ErrorListener.pyi new file mode 100644 index 000000000000..34482bde9131 --- /dev/null +++ b/stubs/antlr4/error/ErrorListener.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class ErrorListener: + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... + def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ... + def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ... + def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ... + +class ConsoleErrorListener(ErrorListener): + INSTANCE: Incomplete + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... + +class ProxyErrorListener(ErrorListener): + delegates: Incomplete + def __init__(self, delegates) -> None: ... + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... + def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ... + def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ... + def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ... diff --git a/stubs/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4/error/ErrorStrategy.pyi new file mode 100644 index 000000000000..5523fe361c7d --- /dev/null +++ b/stubs/antlr4/error/ErrorStrategy.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete +from antlr4.IntervalSet import IntervalSet as IntervalSet +from antlr4.Token import Token as Token +from antlr4.atn.ATNState import ATNState as ATNState +from antlr4.error.Errors import FailedPredicateException as FailedPredicateException, InputMismatchException as InputMismatchException, NoViableAltException as NoViableAltException, ParseCancellationException as ParseCancellationException, RecognitionException as RecognitionException + +Parser: Incomplete + +class ErrorStrategy: + def reset(self, recognizer: Parser): ... + def recoverInline(self, recognizer: Parser): ... + def recover(self, recognizer: Parser, e: RecognitionException): ... + def sync(self, recognizer: Parser): ... + def inErrorRecoveryMode(self, recognizer: Parser): ... + def reportError(self, recognizer: Parser, e: RecognitionException): ... + +class DefaultErrorStrategy(ErrorStrategy): + errorRecoveryMode: bool + lastErrorIndex: int + lastErrorStates: Incomplete + nextTokensContext: Incomplete + nextTokenState: int + def __init__(self) -> None: ... + def reset(self, recognizer: Parser): ... + def beginErrorCondition(self, recognizer: Parser): ... + def inErrorRecoveryMode(self, recognizer: Parser): ... + def endErrorCondition(self, recognizer: Parser): ... + def reportMatch(self, recognizer: Parser): ... + def reportError(self, recognizer: Parser, e: RecognitionException): ... + def recover(self, recognizer: Parser, e: RecognitionException): ... + nextTokensState: Incomplete + def sync(self, recognizer: Parser): ... + def reportNoViableAlternative(self, recognizer: Parser, e: NoViableAltException): ... + def reportInputMismatch(self, recognizer: Parser, e: InputMismatchException): ... + def reportFailedPredicate(self, recognizer, e) -> None: ... + def reportUnwantedToken(self, recognizer: Parser): ... + def reportMissingToken(self, recognizer: Parser): ... + def recoverInline(self, recognizer: Parser): ... + def singleTokenInsertion(self, recognizer: Parser): ... + def singleTokenDeletion(self, recognizer: Parser): ... + def getMissingSymbol(self, recognizer: Parser): ... + def getExpectedTokens(self, recognizer: Parser): ... + def getTokenErrorDisplay(self, t: Token): ... + def escapeWSAndQuote(self, s: str): ... + def getErrorRecoverySet(self, recognizer: Parser): ... + def consumeUntil(self, recognizer: Parser, set_: set): ... + +class BailErrorStrategy(DefaultErrorStrategy): + def recover(self, recognizer: Parser, e: RecognitionException): ... + def recoverInline(self, recognizer: Parser): ... + def sync(self, recognizer: Parser): ... diff --git a/stubs/antlr4/error/Errors.pyi b/stubs/antlr4/error/Errors.pyi new file mode 100644 index 000000000000..aa62e0e9ff3c --- /dev/null +++ b/stubs/antlr4/error/Errors.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete +from antlr4.InputStream import InputStream as InputStream +from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext +from antlr4.Recognizer import Recognizer as Recognizer + +Token: Incomplete +Parser: Incomplete +Lexer: Incomplete +TokenStream: Incomplete +ATNConfigSet: Incomplete +ParserRulecontext: Incomplete +PredicateTransition: Incomplete +BufferedTokenStream: Incomplete + +class UnsupportedOperationException(Exception): + def __init__(self, msg: str) -> None: ... + +class IllegalStateException(Exception): + def __init__(self, msg: str) -> None: ... + +class CancellationException(IllegalStateException): + def __init__(self, msg: str) -> None: ... + +class RecognitionException(Exception): + message: Incomplete + recognizer: Incomplete + input: Incomplete + ctx: Incomplete + offendingToken: Incomplete + offendingState: int + def __init__(self, message: str = ..., recognizer: Recognizer = ..., input: InputStream = ..., ctx: ParserRulecontext = ...) -> None: ... + def getExpectedTokens(self): ... + +class LexerNoViableAltException(RecognitionException): + startIndex: Incomplete + deadEndConfigs: Incomplete + message: str + def __init__(self, lexer: Lexer, input: InputStream, startIndex: int, deadEndConfigs: ATNConfigSet) -> None: ... + +class NoViableAltException(RecognitionException): + deadEndConfigs: Incomplete + startToken: Incomplete + offendingToken: Incomplete + def __init__(self, recognizer: Parser, input: TokenStream = ..., startToken: Token = ..., offendingToken: Token = ..., deadEndConfigs: ATNConfigSet = ..., ctx: ParserRuleContext = ...) -> None: ... + +class InputMismatchException(RecognitionException): + offendingToken: Incomplete + def __init__(self, recognizer: Parser) -> None: ... + +class FailedPredicateException(RecognitionException): + ruleIndex: Incomplete + predicateIndex: Incomplete + predicate: Incomplete + offendingToken: Incomplete + def __init__(self, recognizer: Parser, predicate: str = ..., message: str = ...) -> None: ... + def formatMessage(self, predicate: str, message: str): ... + +class ParseCancellationException(CancellationException): ... diff --git a/stubs/antlr4/error/__init__.pyi b/stubs/antlr4/error/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/antlr4/tree/Chunk.pyi b/stubs/antlr4/tree/Chunk.pyi new file mode 100644 index 000000000000..e494274ac701 --- /dev/null +++ b/stubs/antlr4/tree/Chunk.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class Chunk: ... + +class TagChunk(Chunk): + tag: Incomplete + label: Incomplete + def __init__(self, tag: str, label: str = ...) -> None: ... + +class TextChunk(Chunk): + text: Incomplete + def __init__(self, text: str) -> None: ... diff --git a/stubs/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4/tree/ParseTreeMatch.pyi new file mode 100644 index 000000000000..9a5f5525dc58 --- /dev/null +++ b/stubs/antlr4/tree/ParseTreeMatch.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from antlr4.tree.ParseTreePattern import ParseTreePattern as ParseTreePattern +from antlr4.tree.Tree import ParseTree as ParseTree + +class ParseTreeMatch: + tree: Incomplete + pattern: Incomplete + labels: Incomplete + mismatchedNode: Incomplete + def __init__(self, tree: ParseTree, pattern: ParseTreePattern, labels: dict, mismatchedNode: ParseTree) -> None: ... + def get(self, label: str): ... + def getAll(self, label: str): ... + def succeeded(self): ... diff --git a/stubs/antlr4/tree/ParseTreePattern.pyi b/stubs/antlr4/tree/ParseTreePattern.pyi new file mode 100644 index 000000000000..476e6c41b657 --- /dev/null +++ b/stubs/antlr4/tree/ParseTreePattern.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher +from antlr4.tree.Tree import ParseTree as ParseTree +from antlr4.xpath.XPathLexer import XPathLexer as XPathLexer + +class ParseTreePattern: + matcher: Incomplete + patternRuleIndex: Incomplete + pattern: Incomplete + patternTree: Incomplete + def __init__(self, matcher: ParseTreePatternMatcher, pattern: str, patternRuleIndex: int, patternTree: ParseTree) -> None: ... + def match(self, tree: ParseTree): ... + def matches(self, tree: ParseTree): ... + def findAll(self, tree: ParseTree, xpath: str): ... diff --git a/stubs/antlr4/tree/ParseTreePatternMatcher.pyi b/stubs/antlr4/tree/ParseTreePatternMatcher.pyi new file mode 100644 index 000000000000..fe4ef88848f0 --- /dev/null +++ b/stubs/antlr4/tree/ParseTreePatternMatcher.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream +from antlr4.InputStream import InputStream as InputStream +from antlr4.Lexer import Lexer as Lexer +from antlr4.ListTokenSource import ListTokenSource as ListTokenSource +from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext +from antlr4.Token import Token as Token +from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy +from antlr4.error.Errors import ParseCancellationException as ParseCancellationException, RecognitionException as RecognitionException +from antlr4.tree.Chunk import TagChunk as TagChunk, TextChunk as TextChunk +from antlr4.tree.RuleTagToken import RuleTagToken as RuleTagToken +from antlr4.tree.TokenTagToken import TokenTagToken as TokenTagToken +from antlr4.tree.Tree import ParseTree as ParseTree, RuleNode as RuleNode, TerminalNode as TerminalNode + +Parser: Incomplete +ParseTreePattern: Incomplete + +class CannotInvokeStartRule(Exception): + def __init__(self, e: Exception) -> None: ... + +class StartRuleDoesNotConsumeFullPattern(Exception): ... + +class ParseTreePatternMatcher: + lexer: Incomplete + parser: Incomplete + start: str + stop: str + escape: str + def __init__(self, lexer: Lexer, parser: Parser) -> None: ... + def setDelimiters(self, start: str, stop: str, escapeLeft: str): ... + def matchesRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ... + def matchesPattern(self, tree: ParseTree, pattern: ParseTreePattern): ... + def matchRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ... + def matchPattern(self, tree: ParseTree, pattern: ParseTreePattern): ... + def compileTreePattern(self, pattern: str, patternRuleIndex: int): ... + def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict): ... + def map(self, labels, label, tree) -> None: ... + def getRuleTagToken(self, tree: ParseTree): ... + def tokenize(self, pattern: str): ... + def split(self, pattern: str): ... diff --git a/stubs/antlr4/tree/RuleTagToken.pyi b/stubs/antlr4/tree/RuleTagToken.pyi new file mode 100644 index 000000000000..5569e6ccb9ab --- /dev/null +++ b/stubs/antlr4/tree/RuleTagToken.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from antlr4.Token import Token as Token + +class RuleTagToken(Token): + source: Incomplete + type: Incomplete + channel: Incomplete + start: int + stop: int + tokenIndex: int + line: int + column: int + label: Incomplete + ruleName: Incomplete + def __init__(self, ruleName: str, bypassTokenType: int, label: str = ...) -> None: ... + def getText(self): ... diff --git a/stubs/antlr4/tree/TokenTagToken.pyi b/stubs/antlr4/tree/TokenTagToken.pyi new file mode 100644 index 000000000000..0f1b3b41982d --- /dev/null +++ b/stubs/antlr4/tree/TokenTagToken.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete +from antlr4.Token import CommonToken as CommonToken + +class TokenTagToken(CommonToken): + tokenName: Incomplete + label: Incomplete + def __init__(self, tokenName: str, type: int, label: str = ...) -> None: ... + def getText(self): ... diff --git a/stubs/antlr4/tree/Tree.pyi b/stubs/antlr4/tree/Tree.pyi new file mode 100644 index 000000000000..b33d15733155 --- /dev/null +++ b/stubs/antlr4/tree/Tree.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from antlr4.Token import Token as Token + +INVALID_INTERVAL: Incomplete + +class Tree: ... +class SyntaxTree(Tree): ... +class ParseTree(SyntaxTree): ... +class RuleNode(ParseTree): ... +class TerminalNode(ParseTree): ... +class ErrorNode(TerminalNode): ... + +class ParseTreeVisitor: + def visit(self, tree): ... + def visitChildren(self, node): ... + def visitTerminal(self, node): ... + def visitErrorNode(self, node): ... + def defaultResult(self) -> None: ... + def aggregateResult(self, aggregate, nextResult): ... + def shouldVisitNextChild(self, node, currentResult): ... + +ParserRuleContext: Incomplete + +class ParseTreeListener: + def visitTerminal(self, node: TerminalNode): ... + def visitErrorNode(self, node: ErrorNode): ... + def enterEveryRule(self, ctx: ParserRuleContext): ... + def exitEveryRule(self, ctx: ParserRuleContext): ... + +class TerminalNodeImpl(TerminalNode): + parentCtx: Incomplete + symbol: Incomplete + def __init__(self, symbol: Token) -> None: ... + def __setattr__(self, key, value) -> None: ... + def getChild(self, i: int): ... + def getSymbol(self): ... + def getParent(self): ... + def getPayload(self): ... + def getSourceInterval(self): ... + def getChildCount(self): ... + def accept(self, visitor: ParseTreeVisitor): ... + def getText(self): ... + +class ErrorNodeImpl(TerminalNodeImpl, ErrorNode): + def __init__(self, token: Token) -> None: ... + def accept(self, visitor: ParseTreeVisitor): ... + +class ParseTreeWalker: + DEFAULT: Incomplete + def walk(self, listener: ParseTreeListener, t: ParseTree): ... + def enterRule(self, listener: ParseTreeListener, r: RuleNode): ... + def exitRule(self, listener: ParseTreeListener, r: RuleNode): ... diff --git a/stubs/antlr4/tree/Trees.pyi b/stubs/antlr4/tree/Trees.pyi new file mode 100644 index 000000000000..912f4d9e2320 --- /dev/null +++ b/stubs/antlr4/tree/Trees.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from antlr4.Token import Token as Token +from antlr4.Utils import escapeWhitespace as escapeWhitespace +from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTree as ParseTree, RuleNode as RuleNode, TerminalNode as TerminalNode, Tree as Tree + +Parser: Incomplete + +class Trees: + @classmethod + def toStringTree(cls, t: Tree, ruleNames: list = ..., recog: Parser = ...): ... + @classmethod + def getNodeText(cls, t: Tree, ruleNames: list = ..., recog: Parser = ...): ... + @classmethod + def getChildren(cls, t: Tree): ... + @classmethod + def getAncestors(cls, t: Tree): ... + @classmethod + def findAllTokenNodes(cls, t: ParseTree, ttype: int): ... + @classmethod + def findAllRuleNodes(cls, t: ParseTree, ruleIndex: int): ... + @classmethod + def findAllNodes(cls, t: ParseTree, index: int, findTokens: bool): ... + @classmethod + def descendants(cls, t: ParseTree): ... diff --git a/stubs/antlr4/tree/__init__.pyi b/stubs/antlr4/tree/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/antlr4/xpath/XPath.pyi b/stubs/antlr4/xpath/XPath.pyi new file mode 100644 index 000000000000..b35364e153ed --- /dev/null +++ b/stubs/antlr4/xpath/XPath.pyi @@ -0,0 +1,59 @@ +from _typeshed import Incomplete +from antlr4 import CommonTokenStream as CommonTokenStream, DFA as DFA, Lexer as Lexer, LexerATNSimulator as LexerATNSimulator, ParserRuleContext as ParserRuleContext, PredictionContextCache as PredictionContextCache, TerminalNode as TerminalNode +from antlr4.InputStream import InputStream as InputStream +from antlr4.Parser import Parser as Parser +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer +from antlr4.error.ErrorListener import ErrorListener as ErrorListener +from antlr4.error.Errors import LexerNoViableAltException as LexerNoViableAltException +from antlr4.tree.Tree import ParseTree as ParseTree +from antlr4.tree.Trees import Trees as Trees +from antlr4.xpath.XPathLexer import XPathLexer as XPathLexer +from io import StringIO as StringIO + +class XPath: + WILDCARD: str + NOT: str + parser: Incomplete + path: Incomplete + elements: Incomplete + def __init__(self, parser: Parser, path: str) -> None: ... + def split(self, path: str): ... + def getXPathElement(self, wordToken: Token, anywhere: bool): ... + @staticmethod + def findAll(tree: ParseTree, xpath: str, parser: Parser): ... + def evaluate(self, t: ParseTree): ... + +class XPathElement: + nodeName: Incomplete + invert: bool + def __init__(self, nodeName: str) -> None: ... + +class XPathRuleAnywhereElement(XPathElement): + ruleIndex: Incomplete + def __init__(self, ruleName: str, ruleIndex: int) -> None: ... + def evaluate(self, t: ParseTree): ... + +class XPathRuleElement(XPathElement): + ruleIndex: Incomplete + def __init__(self, ruleName: str, ruleIndex: int) -> None: ... + def evaluate(self, t: ParseTree): ... + +class XPathTokenAnywhereElement(XPathElement): + tokenType: Incomplete + def __init__(self, ruleName: str, tokenType: int) -> None: ... + def evaluate(self, t: ParseTree): ... + +class XPathTokenElement(XPathElement): + tokenType: Incomplete + def __init__(self, ruleName: str, tokenType: int) -> None: ... + def evaluate(self, t: ParseTree): ... + +class XPathWildcardAnywhereElement(XPathElement): + def __init__(self) -> None: ... + def evaluate(self, t: ParseTree): ... + +class XPathWildcardElement(XPathElement): + def __init__(self) -> None: ... + def evaluate(self, t: ParseTree): ... diff --git a/stubs/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4/xpath/XPathLexer.pyi new file mode 100644 index 000000000000..a534417b63c9 --- /dev/null +++ b/stubs/antlr4/xpath/XPathLexer.pyi @@ -0,0 +1,28 @@ +from antlr4 import * +from _typeshed import Incomplete +from io import StringIO as StringIO +from typing import TextIO + +def serializedATN(): ... + +class XPathLexer(Lexer): + atn: Incomplete + decisionsToDFA: Incomplete + TOKEN_REF: int + RULE_REF: int + ANYWHERE: int + ROOT: int + WILDCARD: int + BANG: int + ID: int + STRING: int + channelNames: Incomplete + modeNames: Incomplete + literalNames: Incomplete + symbolicNames: Incomplete + ruleNames: Incomplete + grammarFileName: str + def __init__(self, input: Incomplete | None = ..., output: TextIO = ...) -> None: ... + def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... + type: Incomplete + def ID_action(self, localctx: RuleContext, actionIndex: int): ... diff --git a/stubs/antlr4/xpath/__init__.pyi b/stubs/antlr4/xpath/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 From 9c2af914eda01b26d8eef96806c39c8644f55a4e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 12:42:58 +0000 Subject: [PATCH 02/34] [pre-commit.ci] auto fixes from pre-commit.com hooks --- stubs/antlr4/BufferedTokenStream.pyi | 3 +- stubs/antlr4/CommonTokenFactory.pyi | 1 + stubs/antlr4/CommonTokenStream.pyi | 1 + stubs/antlr4/FileStream.pyi | 1 + stubs/antlr4/InputStream.pyi | 1 + stubs/antlr4/IntervalSet.pyi | 1 + stubs/antlr4/LL1Analyzer.pyi | 20 +++-- stubs/antlr4/Lexer.pyi | 11 ++- stubs/antlr4/ListTokenSource.pyi | 1 + stubs/antlr4/Parser.pyi | 14 +-- stubs/antlr4/ParserInterpreter.pyi | 17 ++-- stubs/antlr4/ParserRuleContext.pyi | 12 ++- stubs/antlr4/PredictionContext.pyi | 3 +- stubs/antlr4/Recognizer.pyi | 3 +- stubs/antlr4/RuleContext.pyi | 3 +- stubs/antlr4/TokenStreamRewriter.pyi | 5 ++ stubs/antlr4/__init__.pyi | 31 ++++--- stubs/antlr4/atn/ATN.pyi | 5 +- stubs/antlr4/atn/ATNConfig.pyi | 22 ++++- stubs/antlr4/atn/ATNConfigSet.pyi | 10 ++- stubs/antlr4/atn/ATNDeserializer.pyi | 11 +-- stubs/antlr4/atn/ATNSimulator.pyi | 7 +- stubs/antlr4/atn/ATNState.pyi | 1 + stubs/antlr4/atn/LexerATNSimulator.pyi | 40 +++++++-- stubs/antlr4/atn/LexerActionExecutor.pyi | 3 +- stubs/antlr4/atn/ParserATNSimulator.pyi | 87 +++++++++++++++---- stubs/antlr4/atn/PredictionMode.pyi | 4 +- stubs/antlr4/atn/SemanticContext.pyi | 2 +- stubs/antlr4/atn/Transition.pyi | 5 +- stubs/antlr4/dfa/DFA.pyi | 1 + stubs/antlr4/dfa/DFASerializer.pyi | 3 +- stubs/antlr4/dfa/DFAState.pyi | 1 + .../antlr4/error/DiagnosticErrorListener.pyi | 13 ++- stubs/antlr4/error/ErrorStrategy.pyi | 11 ++- stubs/antlr4/error/Errors.pyi | 15 +++- stubs/antlr4/tree/ParseTreeMatch.pyi | 1 + stubs/antlr4/tree/ParseTreePattern.pyi | 1 + stubs/antlr4/tree/ParseTreePatternMatcher.pyi | 8 +- stubs/antlr4/tree/RuleTagToken.pyi | 1 + stubs/antlr4/tree/TokenTagToken.pyi | 1 + stubs/antlr4/tree/Tree.pyi | 1 + stubs/antlr4/tree/Trees.pyi | 9 +- stubs/antlr4/xpath/XPath.pyi | 19 ++-- stubs/antlr4/xpath/XPathLexer.pyi | 3 +- 44 files changed, 317 insertions(+), 96 deletions(-) diff --git a/stubs/antlr4/BufferedTokenStream.pyi b/stubs/antlr4/BufferedTokenStream.pyi index de1ebe422ccd..e2f5ca282d98 100644 --- a/stubs/antlr4/BufferedTokenStream.pyi +++ b/stubs/antlr4/BufferedTokenStream.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete -from antlr4.Token import Token as Token + from antlr4.error.Errors import IllegalStateException as IllegalStateException +from antlr4.Token import Token as Token Lexer: Incomplete diff --git a/stubs/antlr4/CommonTokenFactory.pyi b/stubs/antlr4/CommonTokenFactory.pyi index bcedb7b02095..ddf59824325f 100644 --- a/stubs/antlr4/CommonTokenFactory.pyi +++ b/stubs/antlr4/CommonTokenFactory.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.Token import CommonToken as CommonToken class TokenFactory: ... diff --git a/stubs/antlr4/CommonTokenStream.pyi b/stubs/antlr4/CommonTokenStream.pyi index 495b823a442e..d6fcc01674d7 100644 --- a/stubs/antlr4/CommonTokenStream.pyi +++ b/stubs/antlr4/CommonTokenStream.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.BufferedTokenStream import BufferedTokenStream as BufferedTokenStream from antlr4.Lexer import Lexer as Lexer from antlr4.Token import Token as Token diff --git a/stubs/antlr4/FileStream.pyi b/stubs/antlr4/FileStream.pyi index 37961be4c77b..5b25bbaac9b8 100644 --- a/stubs/antlr4/FileStream.pyi +++ b/stubs/antlr4/FileStream.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.InputStream import InputStream as InputStream class FileStream(InputStream): diff --git a/stubs/antlr4/InputStream.pyi b/stubs/antlr4/InputStream.pyi index e7645d559999..9fca584455a9 100644 --- a/stubs/antlr4/InputStream.pyi +++ b/stubs/antlr4/InputStream.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.Token import Token as Token class InputStream: diff --git a/stubs/antlr4/IntervalSet.pyi b/stubs/antlr4/IntervalSet.pyi index 421b79faa1b1..e6889e160fa0 100644 --- a/stubs/antlr4/IntervalSet.pyi +++ b/stubs/antlr4/IntervalSet.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.Token import Token as Token class IntervalSet: diff --git a/stubs/antlr4/LL1Analyzer.pyi b/stubs/antlr4/LL1Analyzer.pyi index 982cfb824198..65ed79263996 100644 --- a/stubs/antlr4/LL1Analyzer.pyi +++ b/stubs/antlr4/LL1Analyzer.pyi @@ -1,12 +1,22 @@ from _typeshed import Incomplete -from antlr4.IntervalSet import IntervalSet as IntervalSet -from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextFromRuleContext as PredictionContextFromRuleContext, SingletonPredictionContext as SingletonPredictionContext -from antlr4.RuleContext import RuleContext as RuleContext -from antlr4.Token import Token as Token + from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfig import ATNConfig as ATNConfig from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopState -from antlr4.atn.Transition import AbstractPredicateTransition as AbstractPredicateTransition, NotSetTransition as NotSetTransition, RuleTransition as RuleTransition, WildcardTransition as WildcardTransition +from antlr4.atn.Transition import ( + AbstractPredicateTransition as AbstractPredicateTransition, + NotSetTransition as NotSetTransition, + RuleTransition as RuleTransition, + WildcardTransition as WildcardTransition, +) +from antlr4.IntervalSet import IntervalSet as IntervalSet +from antlr4.PredictionContext import ( + PredictionContext as PredictionContext, + PredictionContextFromRuleContext as PredictionContextFromRuleContext, + SingletonPredictionContext as SingletonPredictionContext, +) +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token class LL1Analyzer: HIT_PRED: Incomplete diff --git a/stubs/antlr4/Lexer.pyi b/stubs/antlr4/Lexer.pyi index 82dee8fccedf..41a63716ffff 100644 --- a/stubs/antlr4/Lexer.pyi +++ b/stubs/antlr4/Lexer.pyi @@ -1,11 +1,16 @@ from _typeshed import Incomplete +from typing import TextIO + +from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory +from antlr4.error.Errors import ( + IllegalStateException as IllegalStateException, + LexerNoViableAltException as LexerNoViableAltException, + RecognitionException as RecognitionException, +) from antlr4.InputStream import InputStream as InputStream from antlr4.Recognizer import Recognizer as Recognizer from antlr4.Token import Token as Token -from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator -from antlr4.error.Errors import IllegalStateException as IllegalStateException, LexerNoViableAltException as LexerNoViableAltException, RecognitionException as RecognitionException -from typing import TextIO class TokenSource: ... diff --git a/stubs/antlr4/ListTokenSource.pyi b/stubs/antlr4/ListTokenSource.pyi index aba4368b2ebf..2a3cc7ff1800 100644 --- a/stubs/antlr4/ListTokenSource.pyi +++ b/stubs/antlr4/ListTokenSource.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory from antlr4.Lexer import TokenSource as TokenSource from antlr4.Token import Token as Token diff --git a/stubs/antlr4/Parser.pyi b/stubs/antlr4/Parser.pyi index 8faafd163822..0f7d95c1a481 100644 --- a/stubs/antlr4/Parser.pyi +++ b/stubs/antlr4/Parser.pyi @@ -1,19 +1,23 @@ from _typeshed import Incomplete +from typing import TextIO + +from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions +from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer from antlr4.BufferedTokenStream import TokenStream as TokenStream from antlr4.CommonTokenFactory import TokenFactory as TokenFactory +from antlr4.error.Errors import ( + RecognitionException as RecognitionException, + UnsupportedOperationException as UnsupportedOperationException, +) +from antlr4.error.ErrorStrategy import DefaultErrorStrategy as DefaultErrorStrategy from antlr4.InputStream import InputStream as InputStream from antlr4.Lexer import Lexer as Lexer from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext from antlr4.Recognizer import Recognizer as Recognizer from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token -from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions -from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer -from antlr4.error.ErrorStrategy import DefaultErrorStrategy as DefaultErrorStrategy -from antlr4.error.Errors import RecognitionException as RecognitionException, UnsupportedOperationException as UnsupportedOperationException from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode -from typing import TextIO class TraceListener(ParseTreeListener): def __init__(self, parser) -> None: ... diff --git a/stubs/antlr4/ParserInterpreter.pyi b/stubs/antlr4/ParserInterpreter.pyi index 33c229d0eba5..99a462225ba0 100644 --- a/stubs/antlr4/ParserInterpreter.pyi +++ b/stubs/antlr4/ParserInterpreter.pyi @@ -1,16 +1,21 @@ from _typeshed import Incomplete + +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNState import ATNState as ATNState, LoopEndState as LoopEndState, StarLoopEntryState as StarLoopEntryState +from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator +from antlr4.atn.Transition import Transition as Transition from antlr4.BufferedTokenStream import TokenStream as TokenStream +from antlr4.dfa.DFA import DFA as DFA +from antlr4.error.Errors import ( + FailedPredicateException as FailedPredicateException, + RecognitionException as RecognitionException, + UnsupportedOperationException as UnsupportedOperationException, +) from antlr4.Lexer import Lexer as Lexer from antlr4.Parser import Parser as Parser from antlr4.ParserRuleContext import InterpreterRuleContext as InterpreterRuleContext, ParserRuleContext as ParserRuleContext from antlr4.PredictionContext import PredictionContextCache as PredictionContextCache from antlr4.Token import Token as Token -from antlr4.atn.ATN import ATN as ATN -from antlr4.atn.ATNState import ATNState as ATNState, LoopEndState as LoopEndState, StarLoopEntryState as StarLoopEntryState -from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator -from antlr4.atn.Transition import Transition as Transition -from antlr4.dfa.DFA import DFA as DFA -from antlr4.error.Errors import FailedPredicateException as FailedPredicateException, RecognitionException as RecognitionException, UnsupportedOperationException as UnsupportedOperationException class ParserInterpreter(Parser): grammarFileName: Incomplete diff --git a/stubs/antlr4/ParserRuleContext.pyi b/stubs/antlr4/ParserRuleContext.pyi index dba9b5e79e0a..dba11d605bd6 100644 --- a/stubs/antlr4/ParserRuleContext.pyi +++ b/stubs/antlr4/ParserRuleContext.pyi @@ -1,8 +1,16 @@ from _typeshed import Incomplete +from collections.abc import Generator + from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token -from antlr4.tree.Tree import ErrorNodeImpl as ErrorNodeImpl, INVALID_INTERVAL as INVALID_INTERVAL, ParseTree as ParseTree, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode, TerminalNodeImpl as TerminalNodeImpl -from collections.abc import Generator +from antlr4.tree.Tree import ( + INVALID_INTERVAL as INVALID_INTERVAL, + ErrorNodeImpl as ErrorNodeImpl, + ParseTree as ParseTree, + ParseTreeListener as ParseTreeListener, + TerminalNode as TerminalNode, + TerminalNodeImpl as TerminalNodeImpl, +) class ParserRuleContext(RuleContext): children: Incomplete diff --git a/stubs/antlr4/PredictionContext.pyi b/stubs/antlr4/PredictionContext.pyi index 951585702132..4e1896e3b102 100644 --- a/stubs/antlr4/PredictionContext.pyi +++ b/stubs/antlr4/PredictionContext.pyi @@ -1,7 +1,8 @@ from _typeshed import Incomplete -from antlr4.RuleContext import RuleContext as RuleContext + from antlr4.atn.ATN import ATN as ATN from antlr4.error.Errors import IllegalStateException as IllegalStateException +from antlr4.RuleContext import RuleContext as RuleContext class PredictionContext: EMPTY: Incomplete diff --git a/stubs/antlr4/Recognizer.pyi b/stubs/antlr4/Recognizer.pyi index 142418845c00..c5e882a19b31 100644 --- a/stubs/antlr4/Recognizer.pyi +++ b/stubs/antlr4/Recognizer.pyi @@ -1,7 +1,8 @@ from _typeshed import Incomplete + +from antlr4.error.ErrorListener import ConsoleErrorListener as ConsoleErrorListener, ProxyErrorListener as ProxyErrorListener from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token -from antlr4.error.ErrorListener import ConsoleErrorListener as ConsoleErrorListener, ProxyErrorListener as ProxyErrorListener RecognitionException: Incomplete diff --git a/stubs/antlr4/RuleContext.pyi b/stubs/antlr4/RuleContext.pyi index cc984eb49e46..972437264321 100644 --- a/stubs/antlr4/RuleContext.pyi +++ b/stubs/antlr4/RuleContext.pyi @@ -1,7 +1,8 @@ from _typeshed import Incomplete +from collections.abc import Generator + from antlr4.tree.Tree import INVALID_INTERVAL as INVALID_INTERVAL, ParseTreeVisitor as ParseTreeVisitor, RuleNode as RuleNode from antlr4.tree.Trees import Trees as Trees -from collections.abc import Generator Parser: Incomplete diff --git a/stubs/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4/TokenStreamRewriter.pyi index 826ee25b149f..c6f6eacd4c2e 100644 --- a/stubs/antlr4/TokenStreamRewriter.pyi +++ b/stubs/antlr4/TokenStreamRewriter.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream from antlr4.Token import Token as Token @@ -31,6 +32,7 @@ class TokenStreamRewriter: def getProgram(self, program_name): ... def getDefaultText(self): ... def getText(self, program_name, start: int, stop: int): ... + class RewriteOperation: tokens: Incomplete index: Incomplete @@ -38,10 +40,13 @@ class TokenStreamRewriter: instructionIndex: int def __init__(self, tokens, index, text: str = ...) -> None: ... def execute(self, buf): ... + class InsertBeforeOp(RewriteOperation): def __init__(self, tokens, index, text: str = ...) -> None: ... def execute(self, buf): ... + class InsertAfterOp(InsertBeforeOp): ... + class ReplaceOp(RewriteOperation): last_index: Incomplete def __init__(self, from_idx, to_idx, tokens, text) -> None: ... diff --git a/stubs/antlr4/__init__.pyi b/stubs/antlr4/__init__.pyi index 6d15b5fb2628..7d6894a50a79 100644 --- a/stubs/antlr4/__init__.pyi +++ b/stubs/antlr4/__init__.pyi @@ -1,5 +1,18 @@ +from antlr4.atn.ATN import ATN as ATN +from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer +from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator +from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator +from antlr4.atn.PredictionMode import PredictionMode as PredictionMode from antlr4.BufferedTokenStream import TokenStream as TokenStream from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream +from antlr4.dfa.DFA import DFA as DFA +from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener as DiagnosticErrorListener +from antlr4.error.Errors import ( + IllegalStateException as IllegalStateException, + NoViableAltException as NoViableAltException, + RecognitionException as RecognitionException, +) +from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy from antlr4.FileStream import FileStream as FileStream from antlr4.InputStream import InputStream as InputStream from antlr4.Lexer import Lexer as Lexer @@ -8,14 +21,12 @@ from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext, Rul from antlr4.PredictionContext import PredictionContextCache as PredictionContextCache from antlr4.StdinStream import StdinStream as StdinStream from antlr4.Token import Token as Token +from antlr4.tree.Tree import ( + ErrorNode as ErrorNode, + ParseTreeListener as ParseTreeListener, + ParseTreeVisitor as ParseTreeVisitor, + ParseTreeWalker as ParseTreeWalker, + RuleNode as RuleNode, + TerminalNode as TerminalNode, +) from antlr4.Utils import str_list as str_list -from antlr4.atn.ATN import ATN as ATN -from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer -from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator -from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator -from antlr4.atn.PredictionMode import PredictionMode as PredictionMode -from antlr4.dfa.DFA import DFA as DFA -from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener as DiagnosticErrorListener -from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy -from antlr4.error.Errors import IllegalStateException as IllegalStateException, NoViableAltException as NoViableAltException, RecognitionException as RecognitionException -from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, ParseTreeVisitor as ParseTreeVisitor, ParseTreeWalker as ParseTreeWalker, RuleNode as RuleNode, TerminalNode as TerminalNode diff --git a/stubs/antlr4/atn/ATN.pyi b/stubs/antlr4/atn/ATN.pyi index a5d188e8a4bd..e65d631decd5 100644 --- a/stubs/antlr4/atn/ATN.pyi +++ b/stubs/antlr4/atn/ATN.pyi @@ -1,9 +1,10 @@ from _typeshed import Incomplete + +from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState +from antlr4.atn.ATNType import ATNType as ATNType from antlr4.IntervalSet import IntervalSet as IntervalSet from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token -from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState -from antlr4.atn.ATNType import ATNType as ATNType class ATN: INVALID_ALT_NUMBER: int diff --git a/stubs/antlr4/atn/ATNConfig.pyi b/stubs/antlr4/atn/ATNConfig.pyi index 67b15428fd45..70d3281d00d3 100644 --- a/stubs/antlr4/atn/ATNConfig.pyi +++ b/stubs/antlr4/atn/ATNConfig.pyi @@ -1,8 +1,9 @@ from _typeshed import Incomplete -from antlr4.PredictionContext import PredictionContext as PredictionContext + from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState from antlr4.atn.LexerActionExecutor import LexerActionExecutor as LexerActionExecutor from antlr4.atn.SemanticContext import SemanticContext as SemanticContext +from antlr4.PredictionContext import PredictionContext as PredictionContext class ATNConfig: state: Incomplete @@ -11,7 +12,14 @@ class ATNConfig: semanticContext: Incomplete reachesIntoOuterContext: Incomplete precedenceFilterSuppressed: Incomplete - def __init__(self, state: ATNState = ..., alt: int = ..., context: PredictionContext = ..., semantic: SemanticContext = ..., config: ATNConfig = ...) -> None: ... + def __init__( + self, + state: ATNState = ..., + alt: int = ..., + context: PredictionContext = ..., + semantic: SemanticContext = ..., + config: ATNConfig = ..., + ) -> None: ... def __eq__(self, other): ... def __hash__(self): ... def hashCodeForConfigSet(self): ... @@ -20,7 +28,15 @@ class ATNConfig: class LexerATNConfig(ATNConfig): lexerActionExecutor: Incomplete passedThroughNonGreedyDecision: Incomplete - def __init__(self, state: ATNState, alt: int = ..., context: PredictionContext = ..., semantic: SemanticContext = ..., lexerActionExecutor: LexerActionExecutor = ..., config: LexerATNConfig = ...) -> None: ... + def __init__( + self, + state: ATNState, + alt: int = ..., + context: PredictionContext = ..., + semantic: SemanticContext = ..., + lexerActionExecutor: LexerActionExecutor = ..., + config: LexerATNConfig = ..., + ) -> None: ... def __hash__(self): ... def __eq__(self, other): ... def hashCodeForConfigSet(self): ... diff --git a/stubs/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4/atn/ATNConfigSet.pyi index 81f80880b64e..57ca1d57b981 100644 --- a/stubs/antlr4/atn/ATNConfigSet.pyi +++ b/stubs/antlr4/atn/ATNConfigSet.pyi @@ -1,10 +1,14 @@ from _typeshed import Incomplete -from antlr4.PredictionContext import merge as merge -from antlr4.Utils import str_list as str_list + from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfig import ATNConfig as ATNConfig from antlr4.atn.SemanticContext import SemanticContext as SemanticContext -from antlr4.error.Errors import IllegalStateException as IllegalStateException, UnsupportedOperationException as UnsupportedOperationException +from antlr4.error.Errors import ( + IllegalStateException as IllegalStateException, + UnsupportedOperationException as UnsupportedOperationException, +) +from antlr4.PredictionContext import merge as merge +from antlr4.Utils import str_list as str_list ATNSimulator: Incomplete diff --git a/stubs/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4/atn/ATNDeserializer.pyi index 43664d7cc8df..d047fb38ad25 100644 --- a/stubs/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4/atn/ATNDeserializer.pyi @@ -1,12 +1,13 @@ -from antlr4.atn.ATNState import * -from antlr4.atn.Transition import * -from antlr4.atn.LexerAction import * from _typeshed import Incomplete -from antlr4.Token import Token as Token +from io import StringIO as StringIO + from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions +from antlr4.atn.ATNState import * from antlr4.atn.ATNType import ATNType as ATNType -from io import StringIO as StringIO +from antlr4.atn.LexerAction import * +from antlr4.atn.Transition import * +from antlr4.Token import Token as Token SERIALIZED_VERSION: int diff --git a/stubs/antlr4/atn/ATNSimulator.pyi b/stubs/antlr4/atn/ATNSimulator.pyi index cf032a691ef6..11fbfe7b0705 100644 --- a/stubs/antlr4/atn/ATNSimulator.pyi +++ b/stubs/antlr4/atn/ATNSimulator.pyi @@ -1,8 +1,13 @@ from _typeshed import Incomplete -from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextCache as PredictionContextCache, getCachedPredictionContext as getCachedPredictionContext + from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet from antlr4.dfa.DFAState import DFAState as DFAState +from antlr4.PredictionContext import ( + PredictionContext as PredictionContext, + PredictionContextCache as PredictionContextCache, + getCachedPredictionContext as getCachedPredictionContext, +) class ATNSimulator: ERROR: Incomplete diff --git a/stubs/antlr4/atn/ATNState.pyi b/stubs/antlr4/atn/ATNState.pyi index 2874d6b08724..d24e1365e824 100644 --- a/stubs/antlr4/atn/ATNState.pyi +++ b/stubs/antlr4/atn/ATNState.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.atn.Transition import Transition as Transition INITIAL_NUM_TRANSITIONS: int diff --git a/stubs/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4/atn/LexerATNSimulator.pyi index f48e8090a762..c0695c177819 100644 --- a/stubs/antlr4/atn/LexerATNSimulator.pyi +++ b/stubs/antlr4/atn/LexerATNSimulator.pyi @@ -1,7 +1,5 @@ from _typeshed import Incomplete -from antlr4.InputStream import InputStream as InputStream -from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextCache as PredictionContextCache, SingletonPredictionContext as SingletonPredictionContext -from antlr4.Token import Token as Token + from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfig import LexerATNConfig as LexerATNConfig from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet, OrderedATNConfigSet as OrderedATNConfigSet @@ -10,7 +8,17 @@ from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopS from antlr4.atn.LexerActionExecutor import LexerActionExecutor as LexerActionExecutor from antlr4.atn.Transition import Transition as Transition from antlr4.dfa.DFAState import DFAState as DFAState -from antlr4.error.Errors import LexerNoViableAltException as LexerNoViableAltException, UnsupportedOperationException as UnsupportedOperationException +from antlr4.error.Errors import ( + LexerNoViableAltException as LexerNoViableAltException, + UnsupportedOperationException as UnsupportedOperationException, +) +from antlr4.InputStream import InputStream as InputStream +from antlr4.PredictionContext import ( + PredictionContext as PredictionContext, + PredictionContextCache as PredictionContextCache, + SingletonPredictionContext as SingletonPredictionContext, +) +from antlr4.Token import Token as Token class SimState: def __init__(self) -> None: ... @@ -47,11 +55,29 @@ class LexerATNSimulator(ATNSimulator): def computeTargetState(self, input: InputStream, s: DFAState, t: int): ... def failOrAccept(self, prevAccept: SimState, input: InputStream, reach: ATNConfigSet, t: int): ... def getReachableConfigSet(self, input: InputStream, closure: ATNConfigSet, reach: ATNConfigSet, t: int): ... - def accept(self, input: InputStream, lexerActionExecutor: LexerActionExecutor, startIndex: int, index: int, line: int, charPos: int): ... + def accept( + self, input: InputStream, lexerActionExecutor: LexerActionExecutor, startIndex: int, index: int, line: int, charPos: int + ): ... def getReachableTarget(self, trans: Transition, t: int): ... def computeStartState(self, input: InputStream, p: ATNState): ... - def closure(self, input: InputStream, config: LexerATNConfig, configs: ATNConfigSet, currentAltReachedAcceptState: bool, speculative: bool, treatEofAsEpsilon: bool): ... - def getEpsilonTarget(self, input: InputStream, config: LexerATNConfig, t: Transition, configs: ATNConfigSet, speculative: bool, treatEofAsEpsilon: bool): ... + def closure( + self, + input: InputStream, + config: LexerATNConfig, + configs: ATNConfigSet, + currentAltReachedAcceptState: bool, + speculative: bool, + treatEofAsEpsilon: bool, + ): ... + def getEpsilonTarget( + self, + input: InputStream, + config: LexerATNConfig, + t: Transition, + configs: ATNConfigSet, + speculative: bool, + treatEofAsEpsilon: bool, + ): ... def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool): ... def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState): ... def addDFAEdge(self, from_: DFAState, tk: int, to: DFAState = ..., cfgs: ATNConfigSet = ...) -> DFAState: ... diff --git a/stubs/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4/atn/LexerActionExecutor.pyi index 62067c049227..d13f1cf2684f 100644 --- a/stubs/antlr4/atn/LexerActionExecutor.pyi +++ b/stubs/antlr4/atn/LexerActionExecutor.pyi @@ -1,6 +1,7 @@ from _typeshed import Incomplete -from antlr4.InputStream import InputStream as InputStream + from antlr4.atn.LexerAction import LexerAction as LexerAction, LexerIndexedCustomAction as LexerIndexedCustomAction +from antlr4.InputStream import InputStream as InputStream Lexer: Incomplete diff --git a/stubs/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4/atn/ParserATNSimulator.pyi index 69cd20dbec3f..781c2a6121cc 100644 --- a/stubs/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4/atn/ParserATNSimulator.pyi @@ -1,12 +1,6 @@ from _typeshed import Incomplete + from antlr4 import DFA as DFA -from antlr4.BufferedTokenStream import TokenStream as TokenStream -from antlr4.Parser import Parser as Parser -from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext -from antlr4.PredictionContext import PredictionContext as PredictionContext, PredictionContextCache as PredictionContextCache, PredictionContextFromRuleContext as PredictionContextFromRuleContext, SingletonPredictionContext as SingletonPredictionContext -from antlr4.RuleContext import RuleContext as RuleContext -from antlr4.Token import Token as Token -from antlr4.Utils import str_list as str_list from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfig import ATNConfig as ATNConfig from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet @@ -14,9 +8,30 @@ from antlr4.atn.ATNSimulator import ATNSimulator as ATNSimulator from antlr4.atn.ATNState import ATNState as ATNState, DecisionState as DecisionState, RuleStopState as RuleStopState from antlr4.atn.PredictionMode import PredictionMode as PredictionMode from antlr4.atn.SemanticContext import SemanticContext as SemanticContext, andContext as andContext, orContext as orContext -from antlr4.atn.Transition import ActionTransition as ActionTransition, AtomTransition as AtomTransition, NotSetTransition as NotSetTransition, PrecedencePredicateTransition as PrecedencePredicateTransition, PredicateTransition as PredicateTransition, RuleTransition as RuleTransition, SetTransition as SetTransition, Transition as Transition +from antlr4.atn.Transition import ( + ActionTransition as ActionTransition, + AtomTransition as AtomTransition, + NotSetTransition as NotSetTransition, + PrecedencePredicateTransition as PrecedencePredicateTransition, + PredicateTransition as PredicateTransition, + RuleTransition as RuleTransition, + SetTransition as SetTransition, + Transition as Transition, +) +from antlr4.BufferedTokenStream import TokenStream as TokenStream from antlr4.dfa.DFAState import DFAState as DFAState, PredPrediction as PredPrediction from antlr4.error.Errors import NoViableAltException as NoViableAltException +from antlr4.Parser import Parser as Parser +from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext +from antlr4.PredictionContext import ( + PredictionContext as PredictionContext, + PredictionContextCache as PredictionContextCache, + PredictionContextFromRuleContext as PredictionContextFromRuleContext, + SingletonPredictionContext as SingletonPredictionContext, +) +from antlr4.RuleContext import RuleContext as RuleContext +from antlr4.Token import Token as Token +from antlr4.Utils import str_list as str_list class ParserATNSimulator(ATNSimulator): debug: bool @@ -34,7 +49,9 @@ class ParserATNSimulator(ATNSimulator): def getExistingTargetState(self, previousD: DFAState, t: int): ... def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int): ... def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState): ... - def execATNWithFullContext(self, dfa: DFA, D: DFAState, s0: ATNConfigSet, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ... + def execATNWithFullContext( + self, dfa: DFA, D: DFAState, s0: ATNConfigSet, input: TokenStream, startIndex: int, outerContext: ParserRuleContext + ): ... def computeReachSet(self, closure: ATNConfigSet, t: int, fullCtx: bool): ... def removeAllConfigsNotInRuleStopState(self, configs: ATNConfigSet, lookToEndOfRule: bool): ... def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool): ... @@ -46,16 +63,48 @@ class ParserATNSimulator(ATNSimulator): def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet): ... def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... def evalSemanticContext(self, predPredictions: list, outerContext: ParserRuleContext, complete: bool): ... - def closure(self, config: ATNConfig, configs: ATNConfigSet, closureBusy: set, collectPredicates: bool, fullCtx: bool, treatEofAsEpsilon: bool): ... - def closureCheckingStopState(self, config: ATNConfig, configs: ATNConfigSet, closureBusy: set, collectPredicates: bool, fullCtx: bool, depth: int, treatEofAsEpsilon: bool): ... - def closure_(self, config: ATNConfig, configs: ATNConfigSet, closureBusy: set, collectPredicates: bool, fullCtx: bool, depth: int, treatEofAsEpsilon: bool): ... + def closure( + self, + config: ATNConfig, + configs: ATNConfigSet, + closureBusy: set, + collectPredicates: bool, + fullCtx: bool, + treatEofAsEpsilon: bool, + ): ... + def closureCheckingStopState( + self, + config: ATNConfig, + configs: ATNConfigSet, + closureBusy: set, + collectPredicates: bool, + fullCtx: bool, + depth: int, + treatEofAsEpsilon: bool, + ): ... + def closure_( + self, + config: ATNConfig, + configs: ATNConfigSet, + closureBusy: set, + collectPredicates: bool, + fullCtx: bool, + depth: int, + treatEofAsEpsilon: bool, + ): ... def canDropLoopEntryEdgeInLeftRecursiveRule(self, config): ... def getRuleName(self, index: int): ... epsilonTargetMethods: Incomplete - def getEpsilonTarget(self, config: ATNConfig, t: Transition, collectPredicates: bool, inContext: bool, fullCtx: bool, treatEofAsEpsilon: bool): ... + def getEpsilonTarget( + self, config: ATNConfig, t: Transition, collectPredicates: bool, inContext: bool, fullCtx: bool, treatEofAsEpsilon: bool + ): ... def actionTransition(self, config: ATNConfig, t: ActionTransition): ... - def precedenceTransition(self, config: ATNConfig, pt: PrecedencePredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool): ... - def predTransition(self, config: ATNConfig, pt: PredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool): ... + def precedenceTransition( + self, config: ATNConfig, pt: PrecedencePredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool + ): ... + def predTransition( + self, config: ATNConfig, pt: PredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool + ): ... def ruleTransition(self, config: ATNConfig, t: RuleTransition): ... def getConflictingAlts(self, configs: ATNConfigSet): ... def getConflictingAltsOrUniqueAlt(self, configs: ATNConfigSet): ... @@ -66,6 +115,10 @@ class ParserATNSimulator(ATNSimulator): def getUniqueAlt(self, configs: ATNConfigSet): ... def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState): ... def addDFAState(self, dfa: DFA, D: DFAState): ... - def reportAttemptingFullContext(self, dfa: DFA, conflictingAlts: set, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... + def reportAttemptingFullContext( + self, dfa: DFA, conflictingAlts: set, configs: ATNConfigSet, startIndex: int, stopIndex: int + ): ... def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... - def reportAmbiguity(self, dfa: DFA, D: DFAState, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet): ... + def reportAmbiguity( + self, dfa: DFA, D: DFAState, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet + ): ... diff --git a/stubs/antlr4/atn/PredictionMode.pyi b/stubs/antlr4/atn/PredictionMode.pyi index 2f69b9323c1d..dd8b32bed31c 100644 --- a/stubs/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4/atn/PredictionMode.pyi @@ -1,10 +1,10 @@ -from _typeshed import Incomplete +from enum import Enum + from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfig import ATNConfig as ATNConfig from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet from antlr4.atn.ATNState import RuleStopState as RuleStopState from antlr4.atn.SemanticContext import SemanticContext as SemanticContext -from enum import Enum class PredictionMode(Enum): SLL: int diff --git a/stubs/antlr4/atn/SemanticContext.pyi b/stubs/antlr4/atn/SemanticContext.pyi index 444caaae9047..672dae091eab 100644 --- a/stubs/antlr4/atn/SemanticContext.pyi +++ b/stubs/antlr4/atn/SemanticContext.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.Recognizer import Recognizer as Recognizer from antlr4.RuleContext import RuleContext as RuleContext @@ -8,7 +9,6 @@ class SemanticContext: def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... def andContext(a: SemanticContext, b: SemanticContext): ... - def orContext(a: SemanticContext, b: SemanticContext): ... def filterPrecedencePredicates(collection: set): ... diff --git a/stubs/antlr4/atn/Transition.pyi b/stubs/antlr4/atn/Transition.pyi index 3e19c0e52df7..79f11e6d695d 100644 --- a/stubs/antlr4/atn/Transition.pyi +++ b/stubs/antlr4/atn/Transition.pyi @@ -1,8 +1,9 @@ -from antlr4.atn.ATNState import * from _typeshed import Incomplete + +from antlr4.atn.ATNState import * +from antlr4.atn.SemanticContext import PrecedencePredicate as PrecedencePredicate, Predicate as Predicate from antlr4.IntervalSet import IntervalSet as IntervalSet from antlr4.Token import Token as Token -from antlr4.atn.SemanticContext import PrecedencePredicate as PrecedencePredicate, Predicate as Predicate class Transition: EPSILON: int diff --git a/stubs/antlr4/dfa/DFA.pyi b/stubs/antlr4/dfa/DFA.pyi index 72ea5c57a17b..a88a08d900a5 100644 --- a/stubs/antlr4/dfa/DFA.pyi +++ b/stubs/antlr4/dfa/DFA.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet from antlr4.atn.ATNState import DecisionState as DecisionState, StarLoopEntryState as StarLoopEntryState from antlr4.dfa.DFAState import DFAState as DFAState diff --git a/stubs/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4/dfa/DFASerializer.pyi index a789db9b9e36..d8cd4e444217 100644 --- a/stubs/antlr4/dfa/DFASerializer.pyi +++ b/stubs/antlr4/dfa/DFASerializer.pyi @@ -1,7 +1,8 @@ from _typeshed import Incomplete + from antlr4 import DFA as DFA -from antlr4.Utils import str_list as str_list from antlr4.dfa.DFAState import DFAState as DFAState +from antlr4.Utils import str_list as str_list class DFASerializer: dfa: Incomplete diff --git a/stubs/antlr4/dfa/DFAState.pyi b/stubs/antlr4/dfa/DFAState.pyi index 90f6914c63a1..752bbd6929a0 100644 --- a/stubs/antlr4/dfa/DFAState.pyi +++ b/stubs/antlr4/dfa/DFAState.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet from antlr4.atn.SemanticContext import SemanticContext as SemanticContext diff --git a/stubs/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4/error/DiagnosticErrorListener.pyi index 18c1ff4846c4..374384ed651e 100644 --- a/stubs/antlr4/error/DiagnosticErrorListener.pyi +++ b/stubs/antlr4/error/DiagnosticErrorListener.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4 import DFA as DFA from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet from antlr4.error.ErrorListener import ErrorListener as ErrorListener @@ -6,8 +7,14 @@ from antlr4.error.ErrorListener import ErrorListener as ErrorListener class DiagnosticErrorListener(ErrorListener): exactOnly: Incomplete def __init__(self, exactOnly: bool = ...) -> None: ... - def reportAmbiguity(self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet): ... - def reportAttemptingFullContext(self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set, configs: ATNConfigSet): ... - def reportContextSensitivity(self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet): ... + def reportAmbiguity( + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet + ): ... + def reportAttemptingFullContext( + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set, configs: ATNConfigSet + ): ... + def reportContextSensitivity( + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet + ): ... def getDecisionDescription(self, recognizer, dfa: DFA): ... def getConflictingAlts(self, reportedAlts: set, configs: ATNConfigSet): ... diff --git a/stubs/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4/error/ErrorStrategy.pyi index 5523fe361c7d..5e5646a56de5 100644 --- a/stubs/antlr4/error/ErrorStrategy.pyi +++ b/stubs/antlr4/error/ErrorStrategy.pyi @@ -1,8 +1,15 @@ from _typeshed import Incomplete + +from antlr4.atn.ATNState import ATNState as ATNState +from antlr4.error.Errors import ( + FailedPredicateException as FailedPredicateException, + InputMismatchException as InputMismatchException, + NoViableAltException as NoViableAltException, + ParseCancellationException as ParseCancellationException, + RecognitionException as RecognitionException, +) from antlr4.IntervalSet import IntervalSet as IntervalSet from antlr4.Token import Token as Token -from antlr4.atn.ATNState import ATNState as ATNState -from antlr4.error.Errors import FailedPredicateException as FailedPredicateException, InputMismatchException as InputMismatchException, NoViableAltException as NoViableAltException, ParseCancellationException as ParseCancellationException, RecognitionException as RecognitionException Parser: Incomplete diff --git a/stubs/antlr4/error/Errors.pyi b/stubs/antlr4/error/Errors.pyi index aa62e0e9ff3c..7d7a1aa511c0 100644 --- a/stubs/antlr4/error/Errors.pyi +++ b/stubs/antlr4/error/Errors.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.InputStream import InputStream as InputStream from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext from antlr4.Recognizer import Recognizer as Recognizer @@ -28,7 +29,9 @@ class RecognitionException(Exception): ctx: Incomplete offendingToken: Incomplete offendingState: int - def __init__(self, message: str = ..., recognizer: Recognizer = ..., input: InputStream = ..., ctx: ParserRulecontext = ...) -> None: ... + def __init__( + self, message: str = ..., recognizer: Recognizer = ..., input: InputStream = ..., ctx: ParserRulecontext = ... + ) -> None: ... def getExpectedTokens(self): ... class LexerNoViableAltException(RecognitionException): @@ -41,7 +44,15 @@ class NoViableAltException(RecognitionException): deadEndConfigs: Incomplete startToken: Incomplete offendingToken: Incomplete - def __init__(self, recognizer: Parser, input: TokenStream = ..., startToken: Token = ..., offendingToken: Token = ..., deadEndConfigs: ATNConfigSet = ..., ctx: ParserRuleContext = ...) -> None: ... + def __init__( + self, + recognizer: Parser, + input: TokenStream = ..., + startToken: Token = ..., + offendingToken: Token = ..., + deadEndConfigs: ATNConfigSet = ..., + ctx: ParserRuleContext = ..., + ) -> None: ... class InputMismatchException(RecognitionException): offendingToken: Incomplete diff --git a/stubs/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4/tree/ParseTreeMatch.pyi index 9a5f5525dc58..781fc9dfd335 100644 --- a/stubs/antlr4/tree/ParseTreeMatch.pyi +++ b/stubs/antlr4/tree/ParseTreeMatch.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.tree.ParseTreePattern import ParseTreePattern as ParseTreePattern from antlr4.tree.Tree import ParseTree as ParseTree diff --git a/stubs/antlr4/tree/ParseTreePattern.pyi b/stubs/antlr4/tree/ParseTreePattern.pyi index 476e6c41b657..d50da5c95c09 100644 --- a/stubs/antlr4/tree/ParseTreePattern.pyi +++ b/stubs/antlr4/tree/ParseTreePattern.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher from antlr4.tree.Tree import ParseTree as ParseTree from antlr4.xpath.XPathLexer import XPathLexer as XPathLexer diff --git a/stubs/antlr4/tree/ParseTreePatternMatcher.pyi b/stubs/antlr4/tree/ParseTreePatternMatcher.pyi index fe4ef88848f0..4364b3b11272 100644 --- a/stubs/antlr4/tree/ParseTreePatternMatcher.pyi +++ b/stubs/antlr4/tree/ParseTreePatternMatcher.pyi @@ -1,12 +1,16 @@ from _typeshed import Incomplete + from antlr4.CommonTokenStream import CommonTokenStream as CommonTokenStream +from antlr4.error.Errors import ( + ParseCancellationException as ParseCancellationException, + RecognitionException as RecognitionException, +) +from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy from antlr4.InputStream import InputStream as InputStream from antlr4.Lexer import Lexer as Lexer from antlr4.ListTokenSource import ListTokenSource as ListTokenSource from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext from antlr4.Token import Token as Token -from antlr4.error.ErrorStrategy import BailErrorStrategy as BailErrorStrategy -from antlr4.error.Errors import ParseCancellationException as ParseCancellationException, RecognitionException as RecognitionException from antlr4.tree.Chunk import TagChunk as TagChunk, TextChunk as TextChunk from antlr4.tree.RuleTagToken import RuleTagToken as RuleTagToken from antlr4.tree.TokenTagToken import TokenTagToken as TokenTagToken diff --git a/stubs/antlr4/tree/RuleTagToken.pyi b/stubs/antlr4/tree/RuleTagToken.pyi index 5569e6ccb9ab..4e3e5a2c673f 100644 --- a/stubs/antlr4/tree/RuleTagToken.pyi +++ b/stubs/antlr4/tree/RuleTagToken.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.Token import Token as Token class RuleTagToken(Token): diff --git a/stubs/antlr4/tree/TokenTagToken.pyi b/stubs/antlr4/tree/TokenTagToken.pyi index 0f1b3b41982d..c8ba32d8dd99 100644 --- a/stubs/antlr4/tree/TokenTagToken.pyi +++ b/stubs/antlr4/tree/TokenTagToken.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.Token import CommonToken as CommonToken class TokenTagToken(CommonToken): diff --git a/stubs/antlr4/tree/Tree.pyi b/stubs/antlr4/tree/Tree.pyi index b33d15733155..d056082b2531 100644 --- a/stubs/antlr4/tree/Tree.pyi +++ b/stubs/antlr4/tree/Tree.pyi @@ -1,4 +1,5 @@ from _typeshed import Incomplete + from antlr4.Token import Token as Token INVALID_INTERVAL: Incomplete diff --git a/stubs/antlr4/tree/Trees.pyi b/stubs/antlr4/tree/Trees.pyi index 912f4d9e2320..4e9e97ad506c 100644 --- a/stubs/antlr4/tree/Trees.pyi +++ b/stubs/antlr4/tree/Trees.pyi @@ -1,7 +1,14 @@ from _typeshed import Incomplete + from antlr4.Token import Token as Token +from antlr4.tree.Tree import ( + ErrorNode as ErrorNode, + ParseTree as ParseTree, + RuleNode as RuleNode, + TerminalNode as TerminalNode, + Tree as Tree, +) from antlr4.Utils import escapeWhitespace as escapeWhitespace -from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTree as ParseTree, RuleNode as RuleNode, TerminalNode as TerminalNode, Tree as Tree Parser: Incomplete diff --git a/stubs/antlr4/xpath/XPath.pyi b/stubs/antlr4/xpath/XPath.pyi index b35364e153ed..67f0448bf3ed 100644 --- a/stubs/antlr4/xpath/XPath.pyi +++ b/stubs/antlr4/xpath/XPath.pyi @@ -1,16 +1,25 @@ from _typeshed import Incomplete -from antlr4 import CommonTokenStream as CommonTokenStream, DFA as DFA, Lexer as Lexer, LexerATNSimulator as LexerATNSimulator, ParserRuleContext as ParserRuleContext, PredictionContextCache as PredictionContextCache, TerminalNode as TerminalNode +from io import StringIO as StringIO + +from antlr4 import ( + DFA as DFA, + CommonTokenStream as CommonTokenStream, + Lexer as Lexer, + LexerATNSimulator as LexerATNSimulator, + ParserRuleContext as ParserRuleContext, + PredictionContextCache as PredictionContextCache, + TerminalNode as TerminalNode, +) +from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer +from antlr4.error.ErrorListener import ErrorListener as ErrorListener +from antlr4.error.Errors import LexerNoViableAltException as LexerNoViableAltException from antlr4.InputStream import InputStream as InputStream from antlr4.Parser import Parser as Parser from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token -from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer -from antlr4.error.ErrorListener import ErrorListener as ErrorListener -from antlr4.error.Errors import LexerNoViableAltException as LexerNoViableAltException from antlr4.tree.Tree import ParseTree as ParseTree from antlr4.tree.Trees import Trees as Trees from antlr4.xpath.XPathLexer import XPathLexer as XPathLexer -from io import StringIO as StringIO class XPath: WILDCARD: str diff --git a/stubs/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4/xpath/XPathLexer.pyi index a534417b63c9..6ff4c371e1b4 100644 --- a/stubs/antlr4/xpath/XPathLexer.pyi +++ b/stubs/antlr4/xpath/XPathLexer.pyi @@ -1,8 +1,9 @@ -from antlr4 import * from _typeshed import Incomplete from io import StringIO as StringIO from typing import TextIO +from antlr4 import * + def serializedATN(): ... class XPathLexer(Lexer): From e857e1bd51290b320fd4531dccfeffe896ff9347 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Wed, 20 Dec 2023 07:55:46 -0500 Subject: [PATCH 03/34] Create METADATA.toml --- stubs/antlr4/METADATA.toml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 stubs/antlr4/METADATA.toml diff --git a/stubs/antlr4/METADATA.toml b/stubs/antlr4/METADATA.toml new file mode 100644 index 000000000000..9f1f20021b62 --- /dev/null +++ b/stubs/antlr4/METADATA.toml @@ -0,0 +1,5 @@ +version = "4.13.*" +upstream_repository = "https://github.com/antlr/antlr4" + +[tool.stubtest] +platforms = ["linux", "win32"] From 6e188aff3eebb7c9bde2f4088d8ce7762b149a9c Mon Sep 17 00:00:00 2001 From: Beakerboy Date: Wed, 20 Dec 2023 13:03:28 +0000 Subject: [PATCH 04/34] moved files --- stubs/antlr4/{ => antlr4}/BufferedTokenStream.pyi | 0 stubs/antlr4/{ => antlr4}/CommonTokenFactory.pyi | 0 stubs/antlr4/{ => antlr4}/CommonTokenStream.pyi | 0 stubs/antlr4/{ => antlr4}/FileStream.pyi | 0 stubs/antlr4/{ => antlr4}/InputStream.pyi | 0 stubs/antlr4/{ => antlr4}/IntervalSet.pyi | 0 stubs/antlr4/{ => antlr4}/LL1Analyzer.pyi | 0 stubs/antlr4/{ => antlr4}/Lexer.pyi | 0 stubs/antlr4/{ => antlr4}/ListTokenSource.pyi | 0 stubs/antlr4/{ => antlr4}/Parser.pyi | 0 stubs/antlr4/{ => antlr4}/ParserInterpreter.pyi | 0 stubs/antlr4/{ => antlr4}/ParserRuleContext.pyi | 0 stubs/antlr4/{ => antlr4}/PredictionContext.pyi | 0 stubs/antlr4/{ => antlr4}/Recognizer.pyi | 0 stubs/antlr4/{ => antlr4}/RuleContext.pyi | 0 stubs/antlr4/{ => antlr4}/StdinStream.pyi | 0 stubs/antlr4/{ => antlr4}/Token.pyi | 0 stubs/antlr4/{ => antlr4}/TokenStreamRewriter.pyi | 0 stubs/antlr4/{ => antlr4}/Utils.pyi | 0 stubs/antlr4/{ => antlr4}/__init__.pyi | 0 stubs/antlr4/{ => antlr4}/_pygrun.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATN.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATNConfig.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATNConfigSet.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATNDeserializationOptions.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATNDeserializer.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATNSimulator.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATNState.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ATNType.pyi | 0 stubs/antlr4/{ => antlr4}/atn/LexerATNSimulator.pyi | 0 stubs/antlr4/{ => antlr4}/atn/LexerAction.pyi | 0 stubs/antlr4/{ => antlr4}/atn/LexerActionExecutor.pyi | 0 stubs/antlr4/{ => antlr4}/atn/ParserATNSimulator.pyi | 0 stubs/antlr4/{ => antlr4}/atn/PredictionMode.pyi | 0 stubs/antlr4/{ => antlr4}/atn/SemanticContext.pyi | 0 stubs/antlr4/{ => antlr4}/atn/Transition.pyi | 0 stubs/antlr4/{ => antlr4}/atn/__init__.pyi | 0 stubs/antlr4/{ => antlr4}/dfa/DFA.pyi | 0 stubs/antlr4/{ => antlr4}/dfa/DFASerializer.pyi | 0 stubs/antlr4/{ => antlr4}/dfa/DFAState.pyi | 0 stubs/antlr4/{ => antlr4}/dfa/__init__.pyi | 0 stubs/antlr4/{ => antlr4}/error/DiagnosticErrorListener.pyi | 0 stubs/antlr4/{ => antlr4}/error/ErrorListener.pyi | 0 stubs/antlr4/{ => antlr4}/error/ErrorStrategy.pyi | 0 stubs/antlr4/{ => antlr4}/error/Errors.pyi | 0 stubs/antlr4/{ => antlr4}/error/__init__.pyi | 0 stubs/antlr4/{ => antlr4}/tree/Chunk.pyi | 0 stubs/antlr4/{ => antlr4}/tree/ParseTreeMatch.pyi | 0 stubs/antlr4/{ => antlr4}/tree/ParseTreePattern.pyi | 0 stubs/antlr4/{ => antlr4}/tree/ParseTreePatternMatcher.pyi | 0 stubs/antlr4/{ => antlr4}/tree/RuleTagToken.pyi | 0 stubs/antlr4/{ => antlr4}/tree/TokenTagToken.pyi | 0 stubs/antlr4/{ => antlr4}/tree/Tree.pyi | 0 stubs/antlr4/{ => antlr4}/tree/Trees.pyi | 0 stubs/antlr4/{ => antlr4}/tree/__init__.pyi | 0 stubs/antlr4/{ => antlr4}/xpath/XPath.pyi | 0 stubs/antlr4/{ => antlr4}/xpath/XPathLexer.pyi | 0 stubs/antlr4/{ => antlr4}/xpath/__init__.pyi | 0 58 files changed, 0 insertions(+), 0 deletions(-) rename stubs/antlr4/{ => antlr4}/BufferedTokenStream.pyi (100%) rename stubs/antlr4/{ => antlr4}/CommonTokenFactory.pyi (100%) rename stubs/antlr4/{ => antlr4}/CommonTokenStream.pyi (100%) rename stubs/antlr4/{ => antlr4}/FileStream.pyi (100%) rename stubs/antlr4/{ => antlr4}/InputStream.pyi (100%) rename stubs/antlr4/{ => antlr4}/IntervalSet.pyi (100%) rename stubs/antlr4/{ => antlr4}/LL1Analyzer.pyi (100%) rename stubs/antlr4/{ => antlr4}/Lexer.pyi (100%) rename stubs/antlr4/{ => antlr4}/ListTokenSource.pyi (100%) rename stubs/antlr4/{ => antlr4}/Parser.pyi (100%) rename stubs/antlr4/{ => antlr4}/ParserInterpreter.pyi (100%) rename stubs/antlr4/{ => antlr4}/ParserRuleContext.pyi (100%) rename stubs/antlr4/{ => antlr4}/PredictionContext.pyi (100%) rename stubs/antlr4/{ => antlr4}/Recognizer.pyi (100%) rename stubs/antlr4/{ => antlr4}/RuleContext.pyi (100%) rename stubs/antlr4/{ => antlr4}/StdinStream.pyi (100%) rename stubs/antlr4/{ => antlr4}/Token.pyi (100%) rename stubs/antlr4/{ => antlr4}/TokenStreamRewriter.pyi (100%) rename stubs/antlr4/{ => antlr4}/Utils.pyi (100%) rename stubs/antlr4/{ => antlr4}/__init__.pyi (100%) rename stubs/antlr4/{ => antlr4}/_pygrun.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATN.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATNConfig.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATNConfigSet.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATNDeserializationOptions.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATNDeserializer.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATNSimulator.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATNState.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ATNType.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/LexerATNSimulator.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/LexerAction.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/LexerActionExecutor.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/ParserATNSimulator.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/PredictionMode.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/SemanticContext.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/Transition.pyi (100%) rename stubs/antlr4/{ => antlr4}/atn/__init__.pyi (100%) rename stubs/antlr4/{ => antlr4}/dfa/DFA.pyi (100%) rename stubs/antlr4/{ => antlr4}/dfa/DFASerializer.pyi (100%) rename stubs/antlr4/{ => antlr4}/dfa/DFAState.pyi (100%) rename stubs/antlr4/{ => antlr4}/dfa/__init__.pyi (100%) rename stubs/antlr4/{ => antlr4}/error/DiagnosticErrorListener.pyi (100%) rename stubs/antlr4/{ => antlr4}/error/ErrorListener.pyi (100%) rename stubs/antlr4/{ => antlr4}/error/ErrorStrategy.pyi (100%) rename stubs/antlr4/{ => antlr4}/error/Errors.pyi (100%) rename stubs/antlr4/{ => antlr4}/error/__init__.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/Chunk.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/ParseTreeMatch.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/ParseTreePattern.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/ParseTreePatternMatcher.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/RuleTagToken.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/TokenTagToken.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/Tree.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/Trees.pyi (100%) rename stubs/antlr4/{ => antlr4}/tree/__init__.pyi (100%) rename stubs/antlr4/{ => antlr4}/xpath/XPath.pyi (100%) rename stubs/antlr4/{ => antlr4}/xpath/XPathLexer.pyi (100%) rename stubs/antlr4/{ => antlr4}/xpath/__init__.pyi (100%) diff --git a/stubs/antlr4/BufferedTokenStream.pyi b/stubs/antlr4/antlr4/BufferedTokenStream.pyi similarity index 100% rename from stubs/antlr4/BufferedTokenStream.pyi rename to stubs/antlr4/antlr4/BufferedTokenStream.pyi diff --git a/stubs/antlr4/CommonTokenFactory.pyi b/stubs/antlr4/antlr4/CommonTokenFactory.pyi similarity index 100% rename from stubs/antlr4/CommonTokenFactory.pyi rename to stubs/antlr4/antlr4/CommonTokenFactory.pyi diff --git a/stubs/antlr4/CommonTokenStream.pyi b/stubs/antlr4/antlr4/CommonTokenStream.pyi similarity index 100% rename from stubs/antlr4/CommonTokenStream.pyi rename to stubs/antlr4/antlr4/CommonTokenStream.pyi diff --git a/stubs/antlr4/FileStream.pyi b/stubs/antlr4/antlr4/FileStream.pyi similarity index 100% rename from stubs/antlr4/FileStream.pyi rename to stubs/antlr4/antlr4/FileStream.pyi diff --git a/stubs/antlr4/InputStream.pyi b/stubs/antlr4/antlr4/InputStream.pyi similarity index 100% rename from stubs/antlr4/InputStream.pyi rename to stubs/antlr4/antlr4/InputStream.pyi diff --git a/stubs/antlr4/IntervalSet.pyi b/stubs/antlr4/antlr4/IntervalSet.pyi similarity index 100% rename from stubs/antlr4/IntervalSet.pyi rename to stubs/antlr4/antlr4/IntervalSet.pyi diff --git a/stubs/antlr4/LL1Analyzer.pyi b/stubs/antlr4/antlr4/LL1Analyzer.pyi similarity index 100% rename from stubs/antlr4/LL1Analyzer.pyi rename to stubs/antlr4/antlr4/LL1Analyzer.pyi diff --git a/stubs/antlr4/Lexer.pyi b/stubs/antlr4/antlr4/Lexer.pyi similarity index 100% rename from stubs/antlr4/Lexer.pyi rename to stubs/antlr4/antlr4/Lexer.pyi diff --git a/stubs/antlr4/ListTokenSource.pyi b/stubs/antlr4/antlr4/ListTokenSource.pyi similarity index 100% rename from stubs/antlr4/ListTokenSource.pyi rename to stubs/antlr4/antlr4/ListTokenSource.pyi diff --git a/stubs/antlr4/Parser.pyi b/stubs/antlr4/antlr4/Parser.pyi similarity index 100% rename from stubs/antlr4/Parser.pyi rename to stubs/antlr4/antlr4/Parser.pyi diff --git a/stubs/antlr4/ParserInterpreter.pyi b/stubs/antlr4/antlr4/ParserInterpreter.pyi similarity index 100% rename from stubs/antlr4/ParserInterpreter.pyi rename to stubs/antlr4/antlr4/ParserInterpreter.pyi diff --git a/stubs/antlr4/ParserRuleContext.pyi b/stubs/antlr4/antlr4/ParserRuleContext.pyi similarity index 100% rename from stubs/antlr4/ParserRuleContext.pyi rename to stubs/antlr4/antlr4/ParserRuleContext.pyi diff --git a/stubs/antlr4/PredictionContext.pyi b/stubs/antlr4/antlr4/PredictionContext.pyi similarity index 100% rename from stubs/antlr4/PredictionContext.pyi rename to stubs/antlr4/antlr4/PredictionContext.pyi diff --git a/stubs/antlr4/Recognizer.pyi b/stubs/antlr4/antlr4/Recognizer.pyi similarity index 100% rename from stubs/antlr4/Recognizer.pyi rename to stubs/antlr4/antlr4/Recognizer.pyi diff --git a/stubs/antlr4/RuleContext.pyi b/stubs/antlr4/antlr4/RuleContext.pyi similarity index 100% rename from stubs/antlr4/RuleContext.pyi rename to stubs/antlr4/antlr4/RuleContext.pyi diff --git a/stubs/antlr4/StdinStream.pyi b/stubs/antlr4/antlr4/StdinStream.pyi similarity index 100% rename from stubs/antlr4/StdinStream.pyi rename to stubs/antlr4/antlr4/StdinStream.pyi diff --git a/stubs/antlr4/Token.pyi b/stubs/antlr4/antlr4/Token.pyi similarity index 100% rename from stubs/antlr4/Token.pyi rename to stubs/antlr4/antlr4/Token.pyi diff --git a/stubs/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4/antlr4/TokenStreamRewriter.pyi similarity index 100% rename from stubs/antlr4/TokenStreamRewriter.pyi rename to stubs/antlr4/antlr4/TokenStreamRewriter.pyi diff --git a/stubs/antlr4/Utils.pyi b/stubs/antlr4/antlr4/Utils.pyi similarity index 100% rename from stubs/antlr4/Utils.pyi rename to stubs/antlr4/antlr4/Utils.pyi diff --git a/stubs/antlr4/__init__.pyi b/stubs/antlr4/antlr4/__init__.pyi similarity index 100% rename from stubs/antlr4/__init__.pyi rename to stubs/antlr4/antlr4/__init__.pyi diff --git a/stubs/antlr4/_pygrun.pyi b/stubs/antlr4/antlr4/_pygrun.pyi similarity index 100% rename from stubs/antlr4/_pygrun.pyi rename to stubs/antlr4/antlr4/_pygrun.pyi diff --git a/stubs/antlr4/atn/ATN.pyi b/stubs/antlr4/antlr4/atn/ATN.pyi similarity index 100% rename from stubs/antlr4/atn/ATN.pyi rename to stubs/antlr4/antlr4/atn/ATN.pyi diff --git a/stubs/antlr4/atn/ATNConfig.pyi b/stubs/antlr4/antlr4/atn/ATNConfig.pyi similarity index 100% rename from stubs/antlr4/atn/ATNConfig.pyi rename to stubs/antlr4/antlr4/atn/ATNConfig.pyi diff --git a/stubs/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4/antlr4/atn/ATNConfigSet.pyi similarity index 100% rename from stubs/antlr4/atn/ATNConfigSet.pyi rename to stubs/antlr4/antlr4/atn/ATNConfigSet.pyi diff --git a/stubs/antlr4/atn/ATNDeserializationOptions.pyi b/stubs/antlr4/antlr4/atn/ATNDeserializationOptions.pyi similarity index 100% rename from stubs/antlr4/atn/ATNDeserializationOptions.pyi rename to stubs/antlr4/antlr4/atn/ATNDeserializationOptions.pyi diff --git a/stubs/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4/antlr4/atn/ATNDeserializer.pyi similarity index 100% rename from stubs/antlr4/atn/ATNDeserializer.pyi rename to stubs/antlr4/antlr4/atn/ATNDeserializer.pyi diff --git a/stubs/antlr4/atn/ATNSimulator.pyi b/stubs/antlr4/antlr4/atn/ATNSimulator.pyi similarity index 100% rename from stubs/antlr4/atn/ATNSimulator.pyi rename to stubs/antlr4/antlr4/atn/ATNSimulator.pyi diff --git a/stubs/antlr4/atn/ATNState.pyi b/stubs/antlr4/antlr4/atn/ATNState.pyi similarity index 100% rename from stubs/antlr4/atn/ATNState.pyi rename to stubs/antlr4/antlr4/atn/ATNState.pyi diff --git a/stubs/antlr4/atn/ATNType.pyi b/stubs/antlr4/antlr4/atn/ATNType.pyi similarity index 100% rename from stubs/antlr4/atn/ATNType.pyi rename to stubs/antlr4/antlr4/atn/ATNType.pyi diff --git a/stubs/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4/antlr4/atn/LexerATNSimulator.pyi similarity index 100% rename from stubs/antlr4/atn/LexerATNSimulator.pyi rename to stubs/antlr4/antlr4/atn/LexerATNSimulator.pyi diff --git a/stubs/antlr4/atn/LexerAction.pyi b/stubs/antlr4/antlr4/atn/LexerAction.pyi similarity index 100% rename from stubs/antlr4/atn/LexerAction.pyi rename to stubs/antlr4/antlr4/atn/LexerAction.pyi diff --git a/stubs/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4/antlr4/atn/LexerActionExecutor.pyi similarity index 100% rename from stubs/antlr4/atn/LexerActionExecutor.pyi rename to stubs/antlr4/antlr4/atn/LexerActionExecutor.pyi diff --git a/stubs/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4/antlr4/atn/ParserATNSimulator.pyi similarity index 100% rename from stubs/antlr4/atn/ParserATNSimulator.pyi rename to stubs/antlr4/antlr4/atn/ParserATNSimulator.pyi diff --git a/stubs/antlr4/atn/PredictionMode.pyi b/stubs/antlr4/antlr4/atn/PredictionMode.pyi similarity index 100% rename from stubs/antlr4/atn/PredictionMode.pyi rename to stubs/antlr4/antlr4/atn/PredictionMode.pyi diff --git a/stubs/antlr4/atn/SemanticContext.pyi b/stubs/antlr4/antlr4/atn/SemanticContext.pyi similarity index 100% rename from stubs/antlr4/atn/SemanticContext.pyi rename to stubs/antlr4/antlr4/atn/SemanticContext.pyi diff --git a/stubs/antlr4/atn/Transition.pyi b/stubs/antlr4/antlr4/atn/Transition.pyi similarity index 100% rename from stubs/antlr4/atn/Transition.pyi rename to stubs/antlr4/antlr4/atn/Transition.pyi diff --git a/stubs/antlr4/atn/__init__.pyi b/stubs/antlr4/antlr4/atn/__init__.pyi similarity index 100% rename from stubs/antlr4/atn/__init__.pyi rename to stubs/antlr4/antlr4/atn/__init__.pyi diff --git a/stubs/antlr4/dfa/DFA.pyi b/stubs/antlr4/antlr4/dfa/DFA.pyi similarity index 100% rename from stubs/antlr4/dfa/DFA.pyi rename to stubs/antlr4/antlr4/dfa/DFA.pyi diff --git a/stubs/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4/antlr4/dfa/DFASerializer.pyi similarity index 100% rename from stubs/antlr4/dfa/DFASerializer.pyi rename to stubs/antlr4/antlr4/dfa/DFASerializer.pyi diff --git a/stubs/antlr4/dfa/DFAState.pyi b/stubs/antlr4/antlr4/dfa/DFAState.pyi similarity index 100% rename from stubs/antlr4/dfa/DFAState.pyi rename to stubs/antlr4/antlr4/dfa/DFAState.pyi diff --git a/stubs/antlr4/dfa/__init__.pyi b/stubs/antlr4/antlr4/dfa/__init__.pyi similarity index 100% rename from stubs/antlr4/dfa/__init__.pyi rename to stubs/antlr4/antlr4/dfa/__init__.pyi diff --git a/stubs/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4/antlr4/error/DiagnosticErrorListener.pyi similarity index 100% rename from stubs/antlr4/error/DiagnosticErrorListener.pyi rename to stubs/antlr4/antlr4/error/DiagnosticErrorListener.pyi diff --git a/stubs/antlr4/error/ErrorListener.pyi b/stubs/antlr4/antlr4/error/ErrorListener.pyi similarity index 100% rename from stubs/antlr4/error/ErrorListener.pyi rename to stubs/antlr4/antlr4/error/ErrorListener.pyi diff --git a/stubs/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4/antlr4/error/ErrorStrategy.pyi similarity index 100% rename from stubs/antlr4/error/ErrorStrategy.pyi rename to stubs/antlr4/antlr4/error/ErrorStrategy.pyi diff --git a/stubs/antlr4/error/Errors.pyi b/stubs/antlr4/antlr4/error/Errors.pyi similarity index 100% rename from stubs/antlr4/error/Errors.pyi rename to stubs/antlr4/antlr4/error/Errors.pyi diff --git a/stubs/antlr4/error/__init__.pyi b/stubs/antlr4/antlr4/error/__init__.pyi similarity index 100% rename from stubs/antlr4/error/__init__.pyi rename to stubs/antlr4/antlr4/error/__init__.pyi diff --git a/stubs/antlr4/tree/Chunk.pyi b/stubs/antlr4/antlr4/tree/Chunk.pyi similarity index 100% rename from stubs/antlr4/tree/Chunk.pyi rename to stubs/antlr4/antlr4/tree/Chunk.pyi diff --git a/stubs/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4/antlr4/tree/ParseTreeMatch.pyi similarity index 100% rename from stubs/antlr4/tree/ParseTreeMatch.pyi rename to stubs/antlr4/antlr4/tree/ParseTreeMatch.pyi diff --git a/stubs/antlr4/tree/ParseTreePattern.pyi b/stubs/antlr4/antlr4/tree/ParseTreePattern.pyi similarity index 100% rename from stubs/antlr4/tree/ParseTreePattern.pyi rename to stubs/antlr4/antlr4/tree/ParseTreePattern.pyi diff --git a/stubs/antlr4/tree/ParseTreePatternMatcher.pyi b/stubs/antlr4/antlr4/tree/ParseTreePatternMatcher.pyi similarity index 100% rename from stubs/antlr4/tree/ParseTreePatternMatcher.pyi rename to stubs/antlr4/antlr4/tree/ParseTreePatternMatcher.pyi diff --git a/stubs/antlr4/tree/RuleTagToken.pyi b/stubs/antlr4/antlr4/tree/RuleTagToken.pyi similarity index 100% rename from stubs/antlr4/tree/RuleTagToken.pyi rename to stubs/antlr4/antlr4/tree/RuleTagToken.pyi diff --git a/stubs/antlr4/tree/TokenTagToken.pyi b/stubs/antlr4/antlr4/tree/TokenTagToken.pyi similarity index 100% rename from stubs/antlr4/tree/TokenTagToken.pyi rename to stubs/antlr4/antlr4/tree/TokenTagToken.pyi diff --git a/stubs/antlr4/tree/Tree.pyi b/stubs/antlr4/antlr4/tree/Tree.pyi similarity index 100% rename from stubs/antlr4/tree/Tree.pyi rename to stubs/antlr4/antlr4/tree/Tree.pyi diff --git a/stubs/antlr4/tree/Trees.pyi b/stubs/antlr4/antlr4/tree/Trees.pyi similarity index 100% rename from stubs/antlr4/tree/Trees.pyi rename to stubs/antlr4/antlr4/tree/Trees.pyi diff --git a/stubs/antlr4/tree/__init__.pyi b/stubs/antlr4/antlr4/tree/__init__.pyi similarity index 100% rename from stubs/antlr4/tree/__init__.pyi rename to stubs/antlr4/antlr4/tree/__init__.pyi diff --git a/stubs/antlr4/xpath/XPath.pyi b/stubs/antlr4/antlr4/xpath/XPath.pyi similarity index 100% rename from stubs/antlr4/xpath/XPath.pyi rename to stubs/antlr4/antlr4/xpath/XPath.pyi diff --git a/stubs/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4/antlr4/xpath/XPathLexer.pyi similarity index 100% rename from stubs/antlr4/xpath/XPathLexer.pyi rename to stubs/antlr4/antlr4/xpath/XPathLexer.pyi diff --git a/stubs/antlr4/xpath/__init__.pyi b/stubs/antlr4/antlr4/xpath/__init__.pyi similarity index 100% rename from stubs/antlr4/xpath/__init__.pyi rename to stubs/antlr4/antlr4/xpath/__init__.pyi From 328b1c98d83b6998d14fc947cbb3cce5c73c0c07 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Wed, 20 Dec 2023 08:14:26 -0500 Subject: [PATCH 05/34] changed directory name (#3) --- stubs/{antlr4 => antlr4-python3-runtime}/METADATA.toml | 0 .../antlr4/BufferedTokenStream.pyi | 0 .../antlr4/CommonTokenFactory.pyi | 0 .../antlr4/CommonTokenStream.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/FileStream.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/InputStream.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/IntervalSet.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/LL1Analyzer.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Lexer.pyi | 0 .../{antlr4 => antlr4-python3-runtime}/antlr4/ListTokenSource.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Parser.pyi | 0 .../antlr4/ParserInterpreter.pyi | 0 .../antlr4/ParserRuleContext.pyi | 0 .../antlr4/PredictionContext.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Recognizer.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/RuleContext.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/StdinStream.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Token.pyi | 0 .../antlr4/TokenStreamRewriter.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Utils.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/__init__.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/_pygrun.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATN.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNConfig.pyi | 0 .../antlr4/atn/ATNConfigSet.pyi | 0 .../antlr4/atn/ATNDeserializationOptions.pyi | 0 .../antlr4/atn/ATNDeserializer.pyi | 0 .../antlr4/atn/ATNSimulator.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNState.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNType.pyi | 0 .../antlr4/atn/LexerATNSimulator.pyi | 0 .../{antlr4 => antlr4-python3-runtime}/antlr4/atn/LexerAction.pyi | 0 .../antlr4/atn/LexerActionExecutor.pyi | 0 .../antlr4/atn/ParserATNSimulator.pyi | 0 .../antlr4/atn/PredictionMode.pyi | 0 .../antlr4/atn/SemanticContext.pyi | 0 .../{antlr4 => antlr4-python3-runtime}/antlr4/atn/Transition.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/__init__.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/dfa/DFA.pyi | 0 .../antlr4/dfa/DFASerializer.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/dfa/DFAState.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/dfa/__init__.pyi | 0 .../antlr4/error/DiagnosticErrorListener.pyi | 0 .../antlr4/error/ErrorListener.pyi | 0 .../antlr4/error/ErrorStrategy.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/error/Errors.pyi | 0 .../{antlr4 => antlr4-python3-runtime}/antlr4/error/__init__.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/Chunk.pyi | 0 .../antlr4/tree/ParseTreeMatch.pyi | 0 .../antlr4/tree/ParseTreePattern.pyi | 0 .../antlr4/tree/ParseTreePatternMatcher.pyi | 0 .../antlr4/tree/RuleTagToken.pyi | 0 .../antlr4/tree/TokenTagToken.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/Tree.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/Trees.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/__init__.pyi | 0 stubs/{antlr4 => antlr4-python3-runtime}/antlr4/xpath/XPath.pyi | 0 .../antlr4/xpath/XPathLexer.pyi | 0 .../{antlr4 => antlr4-python3-runtime}/antlr4/xpath/__init__.pyi | 0 59 files changed, 0 insertions(+), 0 deletions(-) rename stubs/{antlr4 => antlr4-python3-runtime}/METADATA.toml (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/BufferedTokenStream.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/CommonTokenFactory.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/CommonTokenStream.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/FileStream.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/InputStream.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/IntervalSet.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/LL1Analyzer.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Lexer.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/ListTokenSource.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Parser.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/ParserInterpreter.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/ParserRuleContext.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/PredictionContext.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Recognizer.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/RuleContext.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/StdinStream.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Token.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/TokenStreamRewriter.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/Utils.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/__init__.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/_pygrun.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATN.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNConfig.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNConfigSet.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNDeserializationOptions.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNDeserializer.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNSimulator.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNState.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ATNType.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/LexerATNSimulator.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/LexerAction.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/LexerActionExecutor.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/ParserATNSimulator.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/PredictionMode.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/SemanticContext.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/Transition.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/atn/__init__.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/dfa/DFA.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/dfa/DFASerializer.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/dfa/DFAState.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/dfa/__init__.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/error/DiagnosticErrorListener.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/error/ErrorListener.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/error/ErrorStrategy.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/error/Errors.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/error/__init__.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/Chunk.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/ParseTreeMatch.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/ParseTreePattern.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/ParseTreePatternMatcher.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/RuleTagToken.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/TokenTagToken.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/Tree.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/Trees.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/tree/__init__.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/xpath/XPath.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/xpath/XPathLexer.pyi (100%) rename stubs/{antlr4 => antlr4-python3-runtime}/antlr4/xpath/__init__.pyi (100%) diff --git a/stubs/antlr4/METADATA.toml b/stubs/antlr4-python3-runtime/METADATA.toml similarity index 100% rename from stubs/antlr4/METADATA.toml rename to stubs/antlr4-python3-runtime/METADATA.toml diff --git a/stubs/antlr4/antlr4/BufferedTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi similarity index 100% rename from stubs/antlr4/antlr4/BufferedTokenStream.pyi rename to stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi diff --git a/stubs/antlr4/antlr4/CommonTokenFactory.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi similarity index 100% rename from stubs/antlr4/antlr4/CommonTokenFactory.pyi rename to stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi diff --git a/stubs/antlr4/antlr4/CommonTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi similarity index 100% rename from stubs/antlr4/antlr4/CommonTokenStream.pyi rename to stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi diff --git a/stubs/antlr4/antlr4/FileStream.pyi b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi similarity index 100% rename from stubs/antlr4/antlr4/FileStream.pyi rename to stubs/antlr4-python3-runtime/antlr4/FileStream.pyi diff --git a/stubs/antlr4/antlr4/InputStream.pyi b/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi similarity index 100% rename from stubs/antlr4/antlr4/InputStream.pyi rename to stubs/antlr4-python3-runtime/antlr4/InputStream.pyi diff --git a/stubs/antlr4/antlr4/IntervalSet.pyi b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi similarity index 100% rename from stubs/antlr4/antlr4/IntervalSet.pyi rename to stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi diff --git a/stubs/antlr4/antlr4/LL1Analyzer.pyi b/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi similarity index 100% rename from stubs/antlr4/antlr4/LL1Analyzer.pyi rename to stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi diff --git a/stubs/antlr4/antlr4/Lexer.pyi b/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi similarity index 100% rename from stubs/antlr4/antlr4/Lexer.pyi rename to stubs/antlr4-python3-runtime/antlr4/Lexer.pyi diff --git a/stubs/antlr4/antlr4/ListTokenSource.pyi b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi similarity index 100% rename from stubs/antlr4/antlr4/ListTokenSource.pyi rename to stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi diff --git a/stubs/antlr4/antlr4/Parser.pyi b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi similarity index 100% rename from stubs/antlr4/antlr4/Parser.pyi rename to stubs/antlr4-python3-runtime/antlr4/Parser.pyi diff --git a/stubs/antlr4/antlr4/ParserInterpreter.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi similarity index 100% rename from stubs/antlr4/antlr4/ParserInterpreter.pyi rename to stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi diff --git a/stubs/antlr4/antlr4/ParserRuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi similarity index 100% rename from stubs/antlr4/antlr4/ParserRuleContext.pyi rename to stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi diff --git a/stubs/antlr4/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi similarity index 100% rename from stubs/antlr4/antlr4/PredictionContext.pyi rename to stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi diff --git a/stubs/antlr4/antlr4/Recognizer.pyi b/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi similarity index 100% rename from stubs/antlr4/antlr4/Recognizer.pyi rename to stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi diff --git a/stubs/antlr4/antlr4/RuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi similarity index 100% rename from stubs/antlr4/antlr4/RuleContext.pyi rename to stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi diff --git a/stubs/antlr4/antlr4/StdinStream.pyi b/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi similarity index 100% rename from stubs/antlr4/antlr4/StdinStream.pyi rename to stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi diff --git a/stubs/antlr4/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi similarity index 100% rename from stubs/antlr4/antlr4/Token.pyi rename to stubs/antlr4-python3-runtime/antlr4/Token.pyi diff --git a/stubs/antlr4/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi similarity index 100% rename from stubs/antlr4/antlr4/TokenStreamRewriter.pyi rename to stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi diff --git a/stubs/antlr4/antlr4/Utils.pyi b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi similarity index 100% rename from stubs/antlr4/antlr4/Utils.pyi rename to stubs/antlr4-python3-runtime/antlr4/Utils.pyi diff --git a/stubs/antlr4/antlr4/__init__.pyi b/stubs/antlr4-python3-runtime/antlr4/__init__.pyi similarity index 100% rename from stubs/antlr4/antlr4/__init__.pyi rename to stubs/antlr4-python3-runtime/antlr4/__init__.pyi diff --git a/stubs/antlr4/antlr4/_pygrun.pyi b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi similarity index 100% rename from stubs/antlr4/antlr4/_pygrun.pyi rename to stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi diff --git a/stubs/antlr4/antlr4/atn/ATN.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATN.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi diff --git a/stubs/antlr4/antlr4/atn/ATNConfig.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATNConfig.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi diff --git a/stubs/antlr4/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATNConfigSet.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi diff --git a/stubs/antlr4/antlr4/atn/ATNDeserializationOptions.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATNDeserializationOptions.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi diff --git a/stubs/antlr4/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATNDeserializer.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi diff --git a/stubs/antlr4/antlr4/atn/ATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATNSimulator.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi diff --git a/stubs/antlr4/antlr4/atn/ATNState.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATNState.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi diff --git a/stubs/antlr4/antlr4/atn/ATNType.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ATNType.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi diff --git a/stubs/antlr4/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/LexerATNSimulator.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi diff --git a/stubs/antlr4/antlr4/atn/LexerAction.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/LexerAction.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi diff --git a/stubs/antlr4/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/LexerActionExecutor.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi diff --git a/stubs/antlr4/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/ParserATNSimulator.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi diff --git a/stubs/antlr4/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/PredictionMode.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi diff --git a/stubs/antlr4/antlr4/atn/SemanticContext.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/SemanticContext.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi diff --git a/stubs/antlr4/antlr4/atn/Transition.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/Transition.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi diff --git a/stubs/antlr4/antlr4/atn/__init__.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/__init__.pyi similarity index 100% rename from stubs/antlr4/antlr4/atn/__init__.pyi rename to stubs/antlr4-python3-runtime/antlr4/atn/__init__.pyi diff --git a/stubs/antlr4/antlr4/dfa/DFA.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi similarity index 100% rename from stubs/antlr4/antlr4/dfa/DFA.pyi rename to stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi diff --git a/stubs/antlr4/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi similarity index 100% rename from stubs/antlr4/antlr4/dfa/DFASerializer.pyi rename to stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi diff --git a/stubs/antlr4/antlr4/dfa/DFAState.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi similarity index 100% rename from stubs/antlr4/antlr4/dfa/DFAState.pyi rename to stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi diff --git a/stubs/antlr4/antlr4/dfa/__init__.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/__init__.pyi similarity index 100% rename from stubs/antlr4/antlr4/dfa/__init__.pyi rename to stubs/antlr4-python3-runtime/antlr4/dfa/__init__.pyi diff --git a/stubs/antlr4/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi similarity index 100% rename from stubs/antlr4/antlr4/error/DiagnosticErrorListener.pyi rename to stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi diff --git a/stubs/antlr4/antlr4/error/ErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi similarity index 100% rename from stubs/antlr4/antlr4/error/ErrorListener.pyi rename to stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi diff --git a/stubs/antlr4/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi similarity index 100% rename from stubs/antlr4/antlr4/error/ErrorStrategy.pyi rename to stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi diff --git a/stubs/antlr4/antlr4/error/Errors.pyi b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi similarity index 100% rename from stubs/antlr4/antlr4/error/Errors.pyi rename to stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi diff --git a/stubs/antlr4/antlr4/error/__init__.pyi b/stubs/antlr4-python3-runtime/antlr4/error/__init__.pyi similarity index 100% rename from stubs/antlr4/antlr4/error/__init__.pyi rename to stubs/antlr4-python3-runtime/antlr4/error/__init__.pyi diff --git a/stubs/antlr4/antlr4/tree/Chunk.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Chunk.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/Chunk.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/Chunk.pyi diff --git a/stubs/antlr4/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/ParseTreeMatch.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi diff --git a/stubs/antlr4/antlr4/tree/ParseTreePattern.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/ParseTreePattern.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi diff --git a/stubs/antlr4/antlr4/tree/ParseTreePatternMatcher.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/ParseTreePatternMatcher.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi diff --git a/stubs/antlr4/antlr4/tree/RuleTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/RuleTagToken.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi diff --git a/stubs/antlr4/antlr4/tree/TokenTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/TokenTagToken.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi diff --git a/stubs/antlr4/antlr4/tree/Tree.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/Tree.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi diff --git a/stubs/antlr4/antlr4/tree/Trees.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/Trees.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi diff --git a/stubs/antlr4/antlr4/tree/__init__.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/__init__.pyi similarity index 100% rename from stubs/antlr4/antlr4/tree/__init__.pyi rename to stubs/antlr4-python3-runtime/antlr4/tree/__init__.pyi diff --git a/stubs/antlr4/antlr4/xpath/XPath.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi similarity index 100% rename from stubs/antlr4/antlr4/xpath/XPath.pyi rename to stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi diff --git a/stubs/antlr4/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi similarity index 100% rename from stubs/antlr4/antlr4/xpath/XPathLexer.pyi rename to stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi diff --git a/stubs/antlr4/antlr4/xpath/__init__.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/__init__.pyi similarity index 100% rename from stubs/antlr4/antlr4/xpath/__init__.pyi rename to stubs/antlr4-python3-runtime/antlr4/xpath/__init__.pyi From 76873d450dee3478db65f3df697a8c2ce84c3a9c Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Thu, 21 Dec 2023 15:33:12 -0500 Subject: [PATCH 06/34] Fixed Basic Errors --- .../antlr4/BufferedTokenStream.pyi | 20 +++++++++---------- .../antlr4/IntervalSet.pyi | 4 ++-- .../antlr4/ListTokenSource.pyi | 2 +- .../antlr4/ParserInterpreter.pyi | 2 +- .../antlr4/PredictionContext.pyi | 16 +++++++-------- .../antlr4/RuleContext.pyi | 4 ++-- stubs/antlr4-python3-runtime/antlr4/Token.pyi | 2 +- .../antlr4/atn/ATNConfigSet.pyi | 2 +- .../antlr4/atn/ATNDeserializer.pyi | 6 +++--- .../antlr4/atn/LexerATNSimulator.pyi | 3 ++- .../antlr4/atn/LexerActionExecutor.pyi | 2 +- .../antlr4/atn/ParserATNSimulator.pyi | 18 ++++++++--------- .../antlr4/atn/PredictionMode.pyi | 16 +++++++-------- .../antlr4/atn/SemanticContext.pyi | 2 +- .../antlr4-python3-runtime/antlr4/dfa/DFA.pyi | 2 +- .../antlr4/dfa/DFASerializer.pyi | 2 +- .../antlr4/error/DiagnosticErrorListener.pyi | 6 +++--- .../antlr4/error/ErrorStrategy.pyi | 2 +- .../antlr4/tree/ParseTreeMatch.pyi | 2 +- .../antlr4/tree/ParseTreePatternMatcher.pyi | 2 +- .../antlr4/tree/Trees.pyi | 4 ++-- 21 files changed, 60 insertions(+), 59 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi index e2f5ca282d98..1d8b708fc1db 100644 --- a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi @@ -13,18 +13,18 @@ class BufferedTokenStream(TokenStream): index: int fetchedEOF: bool def __init__(self, tokenSource: Lexer) -> None: ... - def mark(self): ... - def release(self, marker: int): ... + def mark(self) -> int: ... + def release(self, marker: int) -> None: ... def reset(self) -> None: ... - def seek(self, index: int): ... - def get(self, index: int): ... + def seek(self, index: int) -> None: ... + def get(self, index: int) -> Token: ... def consume(self) -> None: ... - def sync(self, i: int): ... - def fetch(self, n: int): ... - def getTokens(self, start: int, stop: int, types: set = ...): ... - def LA(self, i: int): ... - def LB(self, k: int): ... - def LT(self, k: int): ... + def sync(self, i: int) -> bool: ... + def fetch(self, n: int) -> int: ... + def getTokens(self, start: int, stop: int, types: set[int] = ...) -> list[Token]: ... + def LA(self, i: int) -> int: ... + def LB(self, k: int) -> Token | None: ... + def LT(self, k: int) -> Token | None: ... def adjustSeekIndex(self, i: int): ... def lazyInit(self) -> None: ... def setup(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi index e6889e160fa0..35a8d8e7f7e4 100644 --- a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi @@ -17,5 +17,5 @@ class IntervalSet: def __len__(self) -> int: ... def removeRange(self, v) -> None: ... def removeOne(self, v) -> None: ... - def toString(self, literalNames: list, symbolicNames: list): ... - def elementName(self, literalNames: list, symbolicNames: list, a: int): ... + def toString(self, literalNames: list[str], symbolicNames: list[str]): ... + def elementName(self, literalNames: list[str], symbolicNames: list[str], a: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi index 2a3cc7ff1800..0bef64f6f0ca 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi @@ -9,7 +9,7 @@ class ListTokenSource(TokenSource): sourceName: Incomplete pos: int eofToken: Incomplete - def __init__(self, tokens: list, sourceName: str = ...) -> None: ... + def __init__(self, tokens: list[Token], sourceName: str = ...) -> None: ... @property def column(self): ... def nextToken(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi index 99a462225ba0..d538e97ec02d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi @@ -25,7 +25,7 @@ class ParserInterpreter(Parser): decisionToDFA: Incomplete sharedContextCache: Incomplete pushRecursionContextStates: Incomplete - def __init__(self, grammarFileName: str, tokenNames: list, ruleNames: list, atn: ATN, input: TokenStream) -> None: ... + def __init__(self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream) -> None: ... state: Incomplete def parse(self, startRuleIndex: int): ... def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index 4e1896e3b102..4d3fc93d6817 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -18,7 +18,7 @@ class PredictionContext: def __hash__(self): ... def calculateHashCode(parent: PredictionContext, returnState: int): ... -def calculateListsHashCode(parents: list, returnStates: list): ... +def calculateListsHashCode(parents: list[PredictionContext], returnStates: list[int]): ... class PredictionContextCache: cache: Incomplete @@ -48,7 +48,7 @@ class EmptyPredictionContext(SingletonPredictionContext): class ArrayPredictionContext(PredictionContext): parents: Incomplete returnStates: Incomplete - def __init__(self, parents: list, returnStates: list) -> None: ... + def __init__(self, parents: list[PredictionContext], returnStates: list[int]) -> None: ... def isEmpty(self): ... def __len__(self) -> int: ... def getParent(self, index: int): ... @@ -57,10 +57,10 @@ class ArrayPredictionContext(PredictionContext): def __hash__(self): ... def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext = ...): ... -def merge(a: PredictionContext, b: PredictionContext, rootIsWildcard: bool, mergeCache: dict): ... -def mergeSingletons(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool, mergeCache: dict): ... +def merge(a: PredictionContext, b: PredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext]): ... +def mergeSingletons(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext]): ... def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool): ... -def mergeArrays(a: ArrayPredictionContext, b: ArrayPredictionContext, rootIsWildcard: bool, mergeCache: dict): ... -def combineCommonParents(parents: list): ... -def getCachedPredictionContext(context: PredictionContext, contextCache: PredictionContextCache, visited: dict): ... -def getAllContextNodes(context: PredictionContext, nodes: list = ..., visited: dict = ...): ... +def mergeArrays(a: ArrayPredictionContext, b: ArrayPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext]): ... +def combineCommonParents(parents: list[PredictionContext]): ... +def getCachedPredictionContext(context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext]): ... +def getAllContextNodes(context: PredictionContext, nodes: list[Incomplete] = ..., visited: dict[PredictionContext, PredictionContext] = ...): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi index 972437264321..bf61f017d51a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi @@ -24,5 +24,5 @@ class RuleContext(RuleNode): def getChildCount(self): ... def getChildren(self) -> Generator[Incomplete, None, None]: ... def accept(self, visitor: ParseTreeVisitor): ... - def toStringTree(self, ruleNames: list = ..., recog: Parser = ...): ... - def toString(self, ruleNames: list, stop: RuleContext) -> str: ... + def toStringTree(self, ruleNames: list[Incomplete] = ..., recog: Parser = ...): ... + def toString(self, ruleNames: list[Incomplete], stop: RuleContext) -> str: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi index b9d12a08134d..bcc788b07051 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -33,7 +33,7 @@ class CommonToken(Token): tokenIndex: int line: Incomplete column: Incomplete - def __init__(self, source: tuple = ..., type: int = ..., channel: int = ..., start: int = ..., stop: int = ...) -> None: ... + def __init__(self, source: tuple[Incomplete] = ..., type: int = ..., channel: int = ..., start: int = ..., stop: int = ...) -> None: ... def clone(self): ... @property def text(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi index 57ca1d57b981..fc58fd8c8b8e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi @@ -30,7 +30,7 @@ class ATNConfigSet: def getPredicates(self): ... def get(self, i: int): ... def optimizeConfigs(self, interpreter: ATNSimulator): ... - def addAll(self, coll: list): ... + def addAll(self, coll: list[Incomplete]): ... def __eq__(self, other): ... def __hash__(self): ... def hashConfigs(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index d047fb38ad25..5d8b28604ee6 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -22,8 +22,8 @@ class ATNDeserializer: def readStates(self, atn: ATN): ... def readRules(self, atn: ATN): ... def readModes(self, atn: ATN): ... - def readSets(self, atn: ATN, sets: list): ... - def readEdges(self, atn: ATN, sets: list): ... + def readSets(self, atn: ATN, sets: list[Incomplete]): ... + def readEdges(self, atn: ATN, sets: list[Incomplete]): ... def readDecisions(self, atn: ATN): ... def readLexerActions(self, atn: ATN): ... def generateRuleBypassTransitions(self, atn: ATN): ... @@ -34,7 +34,7 @@ class ATNDeserializer: def checkCondition(self, condition: bool, message: Incomplete | None = ...): ... def readInt(self): ... edgeFactories: Incomplete - def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list): ... + def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete]): ... stateFactories: Incomplete def stateFactory(self, type: int, ruleIndex: int): ... CHANNEL: int diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi index c0695c177819..7563cdcb9d3d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi @@ -1,5 +1,6 @@ from _typeshed import Incomplete +from antlr4.dfa.DFA import DFA from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfig import LexerATNConfig as LexerATNConfig from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet, OrderedATNConfigSet as OrderedATNConfigSet @@ -45,7 +46,7 @@ class LexerATNSimulator(ATNSimulator): DEFAULT_MODE: Incomplete MAX_CHAR_VALUE: Incomplete prevAccept: Incomplete - def __init__(self, recog: Lexer, atn: ATN, decisionToDFA: list, sharedContextCache: PredictionContextCache) -> None: ... + def __init__(self, recog: Lexer, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ... def copyState(self, simulator: LexerATNSimulator): ... def match(self, input: InputStream, mode: int): ... def reset(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi index d13f1cf2684f..e2226687ae29 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi @@ -8,7 +8,7 @@ Lexer: Incomplete class LexerActionExecutor: lexerActions: Incomplete hashCode: Incomplete - def __init__(self, lexerActions: list = ...) -> None: ... + def __init__(self, lexerActions: list[LexerAction] = ...) -> None: ... @staticmethod def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction): ... def fixOffsetBeforeMatch(self, offset: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index 781c2a6121cc..3adde212c30a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -42,7 +42,7 @@ class ParserATNSimulator(ATNSimulator): decisionToDFA: Incomplete predictionMode: Incomplete mergeCache: Incomplete - def __init__(self, parser: Parser, atn: ATN, decisionToDFA: list, sharedContextCache: PredictionContextCache) -> None: ... + def __init__(self, parser: Parser, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ... def reset(self) -> None: ... def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext): ... def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ... @@ -57,17 +57,17 @@ class ParserATNSimulator(ATNSimulator): def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool): ... def applyPrecedenceFilter(self, configs: ATNConfigSet): ... def getReachableTarget(self, trans: Transition, ttype: int): ... - def getPredsForAmbigAlts(self, ambigAlts: set, configs: ATNConfigSet, nalts: int): ... - def getPredicatePredictions(self, ambigAlts: set, altToPred: list): ... + def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int): ... + def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]): ... def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet): ... def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... - def evalSemanticContext(self, predPredictions: list, outerContext: ParserRuleContext, complete: bool): ... + def evalSemanticContext(self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool): ... def closure( self, config: ATNConfig, configs: ATNConfigSet, - closureBusy: set, + closureBusy: set[Incomplete], collectPredicates: bool, fullCtx: bool, treatEofAsEpsilon: bool, @@ -76,7 +76,7 @@ class ParserATNSimulator(ATNSimulator): self, config: ATNConfig, configs: ATNConfigSet, - closureBusy: set, + closureBusy: set[Incomplete], collectPredicates: bool, fullCtx: bool, depth: int, @@ -86,7 +86,7 @@ class ParserATNSimulator(ATNSimulator): self, config: ATNConfig, configs: ATNConfigSet, - closureBusy: set, + closureBusy: set[Incomplete], collectPredicates: bool, fullCtx: bool, depth: int, @@ -116,9 +116,9 @@ class ParserATNSimulator(ATNSimulator): def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState): ... def addDFAState(self, dfa: DFA, D: DFAState): ... def reportAttemptingFullContext( - self, dfa: DFA, conflictingAlts: set, configs: ATNConfigSet, startIndex: int, stopIndex: int + self, dfa: DFA, conflictingAlts: set[Incomplete], configs: ATNConfigSet, startIndex: int, stopIndex: int ): ... def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... def reportAmbiguity( - self, dfa: DFA, D: DFAState, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet + self, dfa: DFA, D: DFAState, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[Incomplete], configs: ATNConfigSet ): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index dd8b32bed31c..09a04cc5b11a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -17,19 +17,19 @@ class PredictionMode(Enum): @classmethod def allConfigsInRuleStopStates(cls, configs: ATNConfigSet): ... @classmethod - def resolvesToJustOneViableAlt(cls, altsets: list): ... + def resolvesToJustOneViableAlt(cls, altsets: list[set[int]]): ... @classmethod - def allSubsetsConflict(cls, altsets: list): ... + def allSubsetsConflict(cls, altsets: list[set[int]]): ... @classmethod - def hasNonConflictingAltSet(cls, altsets: list): ... + def hasNonConflictingAltSet(cls, altsets: list[set[int]]): ... @classmethod - def hasConflictingAltSet(cls, altsets: list): ... + def hasConflictingAltSet(cls, altsets: list[set[int]]): ... @classmethod - def allSubsetsEqual(cls, altsets: list): ... + def allSubsetsEqual(cls, altsets: list[set[int]]): ... @classmethod - def getUniqueAlt(cls, altsets: list): ... + def getUniqueAlt(cls, altsets: list[set[int]]): ... @classmethod - def getAlts(cls, altsets: list): ... + def getAlts(cls, altsets: list[set[int]]): ... @classmethod def getConflictingAltSubsets(cls, configs: ATNConfigSet): ... @classmethod @@ -37,4 +37,4 @@ class PredictionMode(Enum): @classmethod def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet): ... @classmethod - def getSingleViableAlt(cls, altsets: list): ... + def getSingleViableAlt(cls, altsets: list[set[int]]): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi index 672dae091eab..0cbd255d52e3 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi @@ -10,7 +10,7 @@ class SemanticContext: def andContext(a: SemanticContext, b: SemanticContext): ... def orContext(a: SemanticContext, b: SemanticContext): ... -def filterPrecedencePredicates(collection: set): ... +def filterPrecedencePredicates(collection: set[SemanticContext]): ... class EmptySemanticContext(SemanticContext): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi index a88a08d900a5..5898365dfc98 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi @@ -17,5 +17,5 @@ class DFA: @property def states(self): ... def sortedStates(self): ... - def toString(self, literalNames: list = ..., symbolicNames: list = ...): ... + def toString(self, literalNames: list[str] = ..., symbolicNames: list[str] = ...): ... def toLexerString(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi index d8cd4e444217..b2f934c75201 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi @@ -8,7 +8,7 @@ class DFASerializer: dfa: Incomplete literalNames: Incomplete symbolicNames: Incomplete - def __init__(self, dfa: DFA, literalNames: list = ..., symbolicNames: list = ...) -> None: ... + def __init__(self, dfa: DFA, literalNames: list[str] = ..., symbolicNames: list[str] = ...) -> None: ... def getEdgeLabel(self, i: int): ... def getStateString(self, s: DFAState): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi index 374384ed651e..75be49895ff4 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi @@ -8,13 +8,13 @@ class DiagnosticErrorListener(ErrorListener): exactOnly: Incomplete def __init__(self, exactOnly: bool = ...) -> None: ... def reportAmbiguity( - self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set, configs: ATNConfigSet + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[int], configs: ATNConfigSet ): ... def reportAttemptingFullContext( - self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set, configs: ATNConfigSet + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet ): ... def reportContextSensitivity( self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet ): ... def getDecisionDescription(self, recognizer, dfa: DFA): ... - def getConflictingAlts(self, reportedAlts: set, configs: ATNConfigSet): ... + def getConflictingAlts(self, reportedAlts: set[int], configs: ATNConfigSet): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi index 5e5646a56de5..6b7ea7128d93 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi @@ -50,7 +50,7 @@ class DefaultErrorStrategy(ErrorStrategy): def getTokenErrorDisplay(self, t: Token): ... def escapeWSAndQuote(self, s: str): ... def getErrorRecoverySet(self, recognizer: Parser): ... - def consumeUntil(self, recognizer: Parser, set_: set): ... + def consumeUntil(self, recognizer: Parser, set_: set[int]): ... class BailErrorStrategy(DefaultErrorStrategy): def recover(self, recognizer: Parser, e: RecognitionException): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi index 781fc9dfd335..04da8384b73b 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi @@ -8,7 +8,7 @@ class ParseTreeMatch: pattern: Incomplete labels: Incomplete mismatchedNode: Incomplete - def __init__(self, tree: ParseTree, pattern: ParseTreePattern, labels: dict, mismatchedNode: ParseTree) -> None: ... + def __init__(self, tree: ParseTree, pattern: ParseTreePattern, labels: dict[str, list[ParseTree]], mismatchedNode: ParseTree) -> None: ... def get(self, label: str): ... def getAll(self, label: str): ... def succeeded(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi index 4364b3b11272..392d7be9b31b 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi @@ -37,7 +37,7 @@ class ParseTreePatternMatcher: def matchRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ... def matchPattern(self, tree: ParseTree, pattern: ParseTreePattern): ... def compileTreePattern(self, pattern: str, patternRuleIndex: int): ... - def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict): ... + def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict[str, list[ParseTree]]): ... def map(self, labels, label, tree) -> None: ... def getRuleTagToken(self, tree: ParseTree): ... def tokenize(self, pattern: str): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi index 4e9e97ad506c..5b5ffa4813be 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi @@ -14,9 +14,9 @@ Parser: Incomplete class Trees: @classmethod - def toStringTree(cls, t: Tree, ruleNames: list = ..., recog: Parser = ...): ... + def toStringTree(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...): ... @classmethod - def getNodeText(cls, t: Tree, ruleNames: list = ..., recog: Parser = ...): ... + def getNodeText(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...): ... @classmethod def getChildren(cls, t: Tree): ... @classmethod From b4b9f0641d9b01dbdcd5c22b9dbac1c5e5cc5823 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 21 Dec 2023 20:34:42 +0000 Subject: [PATCH 07/34] [pre-commit.ci] auto fixes from pre-commit.com hooks --- .../antlr4/ParserInterpreter.pyi | 4 ++- .../antlr4/PredictionContext.pyi | 29 +++++++++++++++---- stubs/antlr4-python3-runtime/antlr4/Token.pyi | 4 ++- .../antlr4/atn/LexerATNSimulator.pyi | 2 +- .../antlr4/atn/ParserATNSimulator.pyi | 13 +++++++-- .../antlr4/tree/ParseTreeMatch.pyi | 4 ++- 6 files changed, 45 insertions(+), 11 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi index d538e97ec02d..4405ed5dfa6e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi @@ -25,7 +25,9 @@ class ParserInterpreter(Parser): decisionToDFA: Incomplete sharedContextCache: Incomplete pushRecursionContextStates: Incomplete - def __init__(self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream) -> None: ... + def __init__( + self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream + ) -> None: ... state: Incomplete def parse(self, startRuleIndex: int): ... def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index 4d3fc93d6817..82e3fb779336 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -57,10 +57,29 @@ class ArrayPredictionContext(PredictionContext): def __hash__(self): ... def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext = ...): ... -def merge(a: PredictionContext, b: PredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext]): ... -def mergeSingletons(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext]): ... +def merge( + a: PredictionContext, + b: PredictionContext, + rootIsWildcard: bool, + mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], +): ... +def mergeSingletons( + a: SingletonPredictionContext, + b: SingletonPredictionContext, + rootIsWildcard: bool, + mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], +): ... def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool): ... -def mergeArrays(a: ArrayPredictionContext, b: ArrayPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext]): ... +def mergeArrays( + a: ArrayPredictionContext, + b: ArrayPredictionContext, + rootIsWildcard: bool, + mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], +): ... def combineCommonParents(parents: list[PredictionContext]): ... -def getCachedPredictionContext(context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext]): ... -def getAllContextNodes(context: PredictionContext, nodes: list[Incomplete] = ..., visited: dict[PredictionContext, PredictionContext] = ...): ... +def getCachedPredictionContext( + context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext] +): ... +def getAllContextNodes( + context: PredictionContext, nodes: list[Incomplete] = ..., visited: dict[PredictionContext, PredictionContext] = ... +): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi index bcc788b07051..c49d4c944dc8 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -33,7 +33,9 @@ class CommonToken(Token): tokenIndex: int line: Incomplete column: Incomplete - def __init__(self, source: tuple[Incomplete] = ..., type: int = ..., channel: int = ..., start: int = ..., stop: int = ...) -> None: ... + def __init__( + self, source: tuple[Incomplete] = ..., type: int = ..., channel: int = ..., start: int = ..., stop: int = ... + ) -> None: ... def clone(self): ... @property def text(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi index 7563cdcb9d3d..6d0e1156c1c0 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi @@ -1,6 +1,5 @@ from _typeshed import Incomplete -from antlr4.dfa.DFA import DFA from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNConfig import LexerATNConfig as LexerATNConfig from antlr4.atn.ATNConfigSet import ATNConfigSet as ATNConfigSet, OrderedATNConfigSet as OrderedATNConfigSet @@ -8,6 +7,7 @@ from antlr4.atn.ATNSimulator import ATNSimulator as ATNSimulator from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopState from antlr4.atn.LexerActionExecutor import LexerActionExecutor as LexerActionExecutor from antlr4.atn.Transition import Transition as Transition +from antlr4.dfa.DFA import DFA from antlr4.dfa.DFAState import DFAState as DFAState from antlr4.error.Errors import ( LexerNoViableAltException as LexerNoViableAltException, diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index 3adde212c30a..824700785e60 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -42,7 +42,9 @@ class ParserATNSimulator(ATNSimulator): decisionToDFA: Incomplete predictionMode: Incomplete mergeCache: Incomplete - def __init__(self, parser: Parser, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ... + def __init__( + self, parser: Parser, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache + ) -> None: ... def reset(self) -> None: ... def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext): ... def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ... @@ -120,5 +122,12 @@ class ParserATNSimulator(ATNSimulator): ): ... def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... def reportAmbiguity( - self, dfa: DFA, D: DFAState, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[Incomplete], configs: ATNConfigSet + self, + dfa: DFA, + D: DFAState, + startIndex: int, + stopIndex: int, + exact: bool, + ambigAlts: set[Incomplete], + configs: ATNConfigSet, ): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi index 04da8384b73b..23e09c3dd752 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi @@ -8,7 +8,9 @@ class ParseTreeMatch: pattern: Incomplete labels: Incomplete mismatchedNode: Incomplete - def __init__(self, tree: ParseTree, pattern: ParseTreePattern, labels: dict[str, list[ParseTree]], mismatchedNode: ParseTree) -> None: ... + def __init__( + self, tree: ParseTree, pattern: ParseTreePattern, labels: dict[str, list[ParseTree]], mismatchedNode: ParseTree + ) -> None: ... def get(self, label: str): ... def getAll(self, label: str): ... def succeeded(self): ... From cda790f8b27bae5fa57ff7bcbe456fe50f9b8c57 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Sat, 23 Dec 2023 07:39:39 -0600 Subject: [PATCH 08/34] Fixed strict annotations --- .../antlr4/BufferedTokenStream.pyi | 18 ++--- .../antlr4/CommonTokenFactory.pyi | 4 +- .../antlr4/CommonTokenStream.pyi | 8 +- .../antlr4/FileStream.pyi | 2 +- .../antlr4/InputStream.pyi | 16 ++-- .../antlr4/IntervalSet.pyi | 26 +++---- .../antlr4/LL1Analyzer.pyi | 4 +- stubs/antlr4-python3-runtime/antlr4/Lexer.pyi | 50 ++++++------ .../antlr4/ListTokenSource.pyi | 10 +-- .../antlr4-python3-runtime/antlr4/Parser.pyi | 78 +++++++++---------- .../antlr4/ParserInterpreter.pyi | 10 +-- .../antlr4/ParserRuleContext.pyi | 26 +++---- .../antlr4/PredictionContext.pyi | 58 +++++++------- .../antlr4/Recognizer.pyi | 28 +++---- .../antlr4/RuleContext.pyi | 26 +++---- stubs/antlr4-python3-runtime/antlr4/Token.pyi | 14 ++-- .../antlr4/TokenStreamRewriter.pyi | 56 ++++++------- stubs/antlr4-python3-runtime/antlr4/Utils.pyi | 6 +- .../antlr4-python3-runtime/antlr4/_pygrun.pyi | 4 +- .../antlr4-python3-runtime/antlr4/atn/ATN.pyi | 16 ++-- .../antlr4/atn/ATNConfig.pyi | 18 ++--- .../antlr4/atn/ATNConfigSet.pyi | 28 +++---- .../antlr4/atn/ATNDeserializationOptions.pyi | 2 +- .../antlr4/atn/ATNDeserializer.pyi | 38 ++++----- .../antlr4/atn/ATNSimulator.pyi | 2 +- .../antlr4/atn/ATNState.pyi | 10 +-- .../antlr4/atn/ATNType.pyi | 4 +- .../antlr4/atn/LexerATNSimulator.pyi | 38 ++++----- .../antlr4/atn/LexerAction.pyi | 46 +++++------ .../antlr4/atn/LexerActionExecutor.pyi | 10 +-- .../antlr4/atn/ParserATNSimulator.pyi | 78 +++++++++---------- .../antlr4/atn/PredictionMode.pyi | 30 +++---- .../antlr4/atn/SemanticContext.pyi | 42 +++++----- .../antlr4/atn/Transition.pyi | 30 +++---- .../antlr4-python3-runtime/antlr4/dfa/DFA.pyi | 14 ++-- .../antlr4/dfa/DFASerializer.pyi | 6 +- .../antlr4/dfa/DFAState.pyi | 6 +- .../antlr4/error/DiagnosticErrorListener.pyi | 16 ++-- .../antlr4/error/ErrorListener.pyi | 20 ++--- .../antlr4/error/ErrorStrategy.pyi | 62 +++++++-------- .../antlr4/error/Errors.pyi | 4 +- .../antlr4/tree/ParseTreeMatch.pyi | 6 +- .../antlr4/tree/ParseTreePattern.pyi | 6 +- .../antlr4/tree/ParseTreePatternMatcher.pyi | 22 +++--- .../antlr4/tree/RuleTagToken.pyi | 2 +- .../antlr4/tree/TokenTagToken.pyi | 2 +- .../antlr4/tree/Tree.pyi | 46 +++++------ .../antlr4/tree/Trees.pyi | 16 ++-- .../antlr4/xpath/XPath.pyi | 20 ++--- .../antlr4/xpath/XPathLexer.pyi | 6 +- 50 files changed, 549 insertions(+), 541 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi index 1d8b708fc1db..ab91944347f8 100644 --- a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi @@ -25,15 +25,15 @@ class BufferedTokenStream(TokenStream): def LA(self, i: int) -> int: ... def LB(self, k: int) -> Token | None: ... def LT(self, k: int) -> Token | None: ... - def adjustSeekIndex(self, i: int): ... + def adjustSeekIndex(self, i: int) -> Incomplete: ... def lazyInit(self) -> None: ... def setup(self) -> None: ... - def setTokenSource(self, tokenSource: Lexer): ... - def nextTokenOnChannel(self, i: int, channel: int): ... - def previousTokenOnChannel(self, i: int, channel: int): ... - def getHiddenTokensToRight(self, tokenIndex: int, channel: int = ...): ... - def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = ...): ... - def filterForChannel(self, left: int, right: int, channel: int): ... - def getSourceName(self): ... - def getText(self, start: int = ..., stop: int = ...): ... + def setTokenSource(self, tokenSource: Lexer) -> Incomplete: ... + def nextTokenOnChannel(self, i: int, channel: int) -> Incomplete: ... + def previousTokenOnChannel(self, i: int, channel: int) -> Incomplete: ... + def getHiddenTokensToRight(self, tokenIndex: int, channel: int = ...) -> Incomplete: ... + def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = ...) -> Incomplete: ... + def filterForChannel(self, left: int, right: int, channel: int) -> Incomplete: ... + def getSourceName(self) -> Incomplete: ... + def getText(self, start: int = ..., stop: int = ...) -> Incomplete: ... def fill(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi index ddf59824325f..d7e83bd94db9 100644 --- a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi @@ -8,5 +8,5 @@ class CommonTokenFactory(TokenFactory): DEFAULT: Incomplete copyText: Incomplete def __init__(self, copyText: bool = ...) -> None: ... - def create(self, source, type: int, text: str, channel: int, start: int, stop: int, line: int, column: int): ... - def createThin(self, type: int, text: str): ... + def create(self, source: tuple[Incomplete], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int) -> Incomplete: ... + def createThin(self, type: int, text: str) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi index d6fcc01674d7..9d0023fd6e58 100644 --- a/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi @@ -7,7 +7,7 @@ from antlr4.Token import Token as Token class CommonTokenStream(BufferedTokenStream): channel: Incomplete def __init__(self, lexer: Lexer, channel: int = ...) -> None: ... - def adjustSeekIndex(self, i: int): ... - def LB(self, k: int): ... - def LT(self, k: int): ... - def getNumberOfOnChannelTokens(self): ... + def adjustSeekIndex(self, i: int) -> int: ... + def LB(self, k: int) -> Token | None: ... + def LT(self, k: int) -> Token | None: ... + def getNumberOfOnChannelTokens(self) -> int: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi index 5b25bbaac9b8..6bb90731b9f6 100644 --- a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi @@ -5,4 +5,4 @@ from antlr4.InputStream import InputStream as InputStream class FileStream(InputStream): fileName: Incomplete def __init__(self, fileName: str, encoding: str = ..., errors: str = ...) -> None: ... - def readDataFrom(self, fileName: str, encoding: str, errors: str = ...): ... + def readDataFrom(self, fileName: str, encoding: str, errors: str = ...) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi b/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi index 9fca584455a9..affc47edd680 100644 --- a/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi @@ -7,14 +7,14 @@ class InputStream: strdata: Incomplete def __init__(self, data: str) -> None: ... @property - def index(self): ... + def index(self) -> Incomplete: ... @property - def size(self): ... + def size(self) -> Incomplete: ... def reset(self) -> None: ... def consume(self) -> None: ... - def LA(self, offset: int): ... - def LT(self, offset: int): ... - def mark(self): ... - def release(self, marker: int): ... - def seek(self, _index: int): ... - def getText(self, start: int, stop: int): ... + def LA(self, offset: int) -> Incomplete: ... + def LT(self, offset: int) -> Incomplete: ... + def mark(self) -> Incomplete: ... + def release(self, marker: int) -> Incomplete: ... + def seek(self, _index: int) -> Incomplete: ... + def getText(self, start: int, stop: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi index 35a8d8e7f7e4..eaa83d4c8251 100644 --- a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi @@ -3,19 +3,19 @@ from _typeshed import Incomplete from antlr4.Token import Token as Token class IntervalSet: - intervals: Incomplete + intervals: list[range] | None readonly: bool def __init__(self) -> None: ... - def __iter__(self): ... - def __getitem__(self, item): ... - def addOne(self, v: int): ... - def addRange(self, v: range): ... - def addSet(self, other: IntervalSet): ... - def reduce(self, k: int): ... - def complement(self, start, stop): ... - def __contains__(self, item) -> bool: ... + def __iter__(self) -> Incomplete: ... + def __getitem__(self, item: Incomplete) -> Incomplete: ... + def addOne(self, v: int) -> Incomplete: ... + def addRange(self, v: range) -> Incomplete: ... + def addSet(self, other: IntervalSet) -> Incomplete: ... + def reduce(self, k: int) -> Incomplete: ... + def complement(self, start: int, stop: int) -> Incomplete: ... + def __contains__(self, item: Incomplete) -> bool: ... def __len__(self) -> int: ... - def removeRange(self, v) -> None: ... - def removeOne(self, v) -> None: ... - def toString(self, literalNames: list[str], symbolicNames: list[str]): ... - def elementName(self, literalNames: list[str], symbolicNames: list[str], a: int): ... + def removeRange(self, v: Incomplete) -> None: ... + def removeOne(self, v: Incomplete) -> None: ... + def toString(self, literalNames: list[str], symbolicNames: list[str]) -> Incomplete: ... + def elementName(self, literalNames: list[str], symbolicNames: list[str], a: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi b/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi index 65ed79263996..db26b45bf437 100644 --- a/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi @@ -22,5 +22,5 @@ class LL1Analyzer: HIT_PRED: Incomplete atn: Incomplete def __init__(self, atn: ATN) -> None: ... - def getDecisionLookahead(self, s: ATNState): ... - def LOOK(self, s: ATNState, stopState: ATNState = ..., ctx: RuleContext = ...): ... + def getDecisionLookahead(self, s: ATNState) -> Incomplete: ... + def LOOK(self, s: ATNState, stopState: ATNState = ..., ctx: RuleContext = ...) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi b/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi index 41a63716ffff..7bde890442d8 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi @@ -24,41 +24,41 @@ class Lexer(Recognizer, TokenSource): MAX_CHAR_VALUE: int def __init__(self, input: InputStream, output: TextIO = ...) -> None: ... def reset(self) -> None: ... - def nextToken(self): ... + def nextToken(self) -> Incomplete: ... def skip(self) -> None: ... def more(self) -> None: ... - def mode(self, m: int): ... - def pushMode(self, m: int): ... - def popMode(self): ... + def mode(self, m: int) -> Incomplete: ... + def pushMode(self, m: int) -> Incomplete: ... + def popMode(self) -> Incomplete: ... @property - def inputStream(self): ... + def inputStream(self) -> Incomplete: ... @inputStream.setter - def inputStream(self, input: InputStream): ... + def inputStream(self, input: InputStream) -> Incomplete: ... @property - def sourceName(self): ... - def emitToken(self, token: Token): ... - def emit(self): ... - def emitEOF(self): ... + def sourceName(self) -> Incomplete: ... + def emitToken(self, token: Token) -> Incomplete: ... + def emit(self) -> Incomplete: ... + def emitEOF(self) -> Incomplete: ... @property - def type(self): ... + def type(self) -> Incomplete: ... @type.setter - def type(self, type: int): ... + def type(self, type: int) -> Incomplete: ... @property - def line(self): ... + def line(self) -> Incomplete: ... @line.setter - def line(self, line: int): ... + def line(self, line: int) -> Incomplete: ... @property - def column(self): ... + def column(self) -> Incomplete: ... @column.setter - def column(self, column: int): ... - def getCharIndex(self): ... + def column(self, column: int) -> Incomplete: ... + def getCharIndex(self) -> Incomplete: ... @property - def text(self): ... + def text(self) -> Incomplete: ... @text.setter - def text(self, txt: str): ... - def getAllTokens(self): ... - def notifyListeners(self, e: LexerNoViableAltException): ... - def getErrorDisplay(self, s: str): ... - def getErrorDisplayForChar(self, c: str): ... - def getCharErrorDisplay(self, c: str): ... - def recover(self, re: RecognitionException): ... + def text(self, txt: str) -> Incomplete: ... + def getAllTokens(self) -> Incomplete: ... + def notifyListeners(self, e: LexerNoViableAltException) -> Incomplete: ... + def getErrorDisplay(self, s: str) -> Incomplete: ... + def getErrorDisplayForChar(self, c: str) -> Incomplete: ... + def getCharErrorDisplay(self, c: str) -> Incomplete: ... + def recover(self, re: RecognitionException) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi index 0bef64f6f0ca..de3dc02e6a07 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi @@ -11,9 +11,9 @@ class ListTokenSource(TokenSource): eofToken: Incomplete def __init__(self, tokens: list[Token], sourceName: str = ...) -> None: ... @property - def column(self): ... - def nextToken(self): ... + def column(self) -> Incomplete: ... + def nextToken(self) -> Incomplete: ... @property - def line(self): ... - def getInputStream(self): ... - def getSourceName(self): ... + def line(self) -> Incomplete: ... + def getInputStream(self) -> Incomplete: ... + def getSourceName(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi index 0f7d95c1a481..f64b4f9a053e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi @@ -20,55 +20,55 @@ from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as Parse from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode class TraceListener(ParseTreeListener): - def __init__(self, parser) -> None: ... - def enterEveryRule(self, ctx) -> None: ... - def visitTerminal(self, node) -> None: ... - def visitErrorNode(self, node) -> None: ... - def exitEveryRule(self, ctx) -> None: ... + def __init__(self, parser: Incomplete) -> None: ... + def enterEveryRule(self, ctx: Incomplete) -> None: ... + def visitTerminal(self, node: Incomplete) -> None: ... + def visitErrorNode(self, node: Incomplete) -> None: ... + def exitEveryRule(self, ctx: Incomplete) -> None: ... class Parser(Recognizer): bypassAltsAtnCache: Incomplete buildParseTrees: bool def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ... def reset(self) -> None: ... - def match(self, ttype: int): ... - def matchWildcard(self): ... - def getParseListeners(self): ... - def addParseListener(self, listener: ParseTreeListener): ... - def removeParseListener(self, listener: ParseTreeListener): ... + def match(self, ttype: int) -> Incomplete: ... + def matchWildcard(self) -> Incomplete: ... + def getParseListeners(self) -> Incomplete: ... + def addParseListener(self, listener: ParseTreeListener) -> Incomplete: ... + def removeParseListener(self, listener: ParseTreeListener) -> Incomplete: ... def removeParseListeners(self) -> None: ... def triggerEnterRuleEvent(self) -> None: ... def triggerExitRuleEvent(self) -> None: ... - def getNumberOfSyntaxErrors(self): ... - def getTokenFactory(self): ... - def setTokenFactory(self, factory: TokenFactory): ... - def getATNWithBypassAlts(self): ... - def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer = ...): ... - def getInputStream(self): ... - def setInputStream(self, input: InputStream): ... - def getTokenStream(self): ... - def setTokenStream(self, input: TokenStream): ... - def getCurrentToken(self): ... - def notifyErrorListeners(self, msg: str, offendingToken: Token = ..., e: RecognitionException = ...): ... - def consume(self): ... + def getNumberOfSyntaxErrors(self) -> Incomplete: ... + def getTokenFactory(self) -> Incomplete: ... + def setTokenFactory(self, factory: TokenFactory) -> Incomplete: ... + def getATNWithBypassAlts(self) -> Incomplete: ... + def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer = ...) -> Incomplete: ... + def getInputStream(self) -> Incomplete: ... + def setInputStream(self, input: InputStream) -> Incomplete: ... + def getTokenStream(self) -> Incomplete: ... + def setTokenStream(self, input: TokenStream) -> Incomplete: ... + def getCurrentToken(self) -> Incomplete: ... + def notifyErrorListeners(self, msg: str, offendingToken: Token = ..., e: RecognitionException = ...) -> Incomplete: ... + def consume(self) -> Incomplete: ... def addContextToParseTree(self) -> None: ... state: Incomplete - def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... + def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> Incomplete: ... def exitRule(self) -> None: ... - def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ... - def getPrecedence(self): ... - def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... - def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... - def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ... - def getInvokingContext(self, ruleIndex: int): ... - def precpred(self, localctx: RuleContext, precedence: int): ... - def inContext(self, context: str): ... - def isExpectedToken(self, symbol: int): ... - def getExpectedTokens(self): ... - def getExpectedTokensWithinCurrentRule(self): ... - def getRuleIndex(self, ruleName: str): ... - def getRuleInvocationStack(self, p: RuleContext = ...): ... - def getDFAStrings(self): ... + def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int) -> Incomplete: ... + def getPrecedence(self) -> Incomplete: ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> Incomplete: ... + def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> Incomplete: ... + def unrollRecursionContexts(self, parentCtx: ParserRuleContext) -> Incomplete: ... + def getInvokingContext(self, ruleIndex: int) -> Incomplete: ... + def precpred(self, localctx: RuleContext, precedence: int) -> Incomplete: ... + def inContext(self, context: str) -> Incomplete: ... + def isExpectedToken(self, symbol: int) -> Incomplete: ... + def getExpectedTokens(self) -> Incomplete: ... + def getExpectedTokensWithinCurrentRule(self) -> Incomplete: ... + def getRuleIndex(self, ruleName: str) -> Incomplete: ... + def getRuleInvocationStack(self, p: RuleContext = ...) -> Incomplete: ... + def getDFAStrings(self) -> Incomplete: ... def dumpDFA(self) -> None: ... - def getSourceName(self): ... - def setTrace(self, trace: bool): ... + def getSourceName(self) -> Incomplete: ... + def setTrace(self, trace: bool) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi index 4405ed5dfa6e..43635239ea8a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi @@ -29,8 +29,8 @@ class ParserInterpreter(Parser): self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream ) -> None: ... state: Incomplete - def parse(self, startRuleIndex: int): ... - def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... - def getATNState(self): ... - def visitState(self, p: ATNState): ... - def visitRuleStopState(self, p: ATNState): ... + def parse(self, startRuleIndex: int) -> Incomplete: ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> Incomplete: ... + def getATNState(self) -> Incomplete: ... + def visitState(self, p: ATNState) -> Incomplete: ... + def visitRuleStopState(self, p: ATNState) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi index dba11d605bd6..c4dac4bd9065 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi @@ -20,21 +20,21 @@ class ParserRuleContext(RuleContext): def __init__(self, parent: ParserRuleContext = ..., invokingStateNumber: int = ...) -> None: ... parentCtx: Incomplete invokingState: Incomplete - def copyFrom(self, ctx: ParserRuleContext): ... - def enterRule(self, listener: ParseTreeListener): ... - def exitRule(self, listener: ParseTreeListener): ... - def addChild(self, child: ParseTree): ... + def copyFrom(self, ctx: ParserRuleContext) -> Incomplete: ... + def enterRule(self, listener: ParseTreeListener) -> Incomplete: ... + def exitRule(self, listener: ParseTreeListener) -> Incomplete: ... + def addChild(self, child: ParseTree) -> Incomplete: ... def removeLastChild(self) -> None: ... - def addTokenNode(self, token: Token): ... - def addErrorNode(self, badToken: Token): ... - def getChild(self, i: int, ttype: type = ...): ... + def addTokenNode(self, token: Token) -> Incomplete: ... + def addErrorNode(self, badToken: Token) -> Incomplete: ... + def getChild(self, i: int, ttype: type = ...) -> Incomplete: ... def getChildren(self, predicate: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... - def getToken(self, ttype: int, i: int): ... - def getTokens(self, ttype: int): ... - def getTypedRuleContext(self, ctxType: type, i: int): ... - def getTypedRuleContexts(self, ctxType: type): ... - def getChildCount(self): ... - def getSourceInterval(self): ... + def getToken(self, ttype: int, i: int) -> Incomplete: ... + def getTokens(self, ttype: int) -> Incomplete: ... + def getTypedRuleContext(self, ctxType: type, i: int) -> Incomplete: ... + def getTypedRuleContexts(self, ctxType: type) -> Incomplete: ... + def getChildCount(self) -> Incomplete: ... + def getSourceInterval(self) -> Incomplete: ... class InterpreterRuleContext(ParserRuleContext): ruleIndex: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index 82e3fb779336..b41e7d06d853 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -12,74 +12,74 @@ class PredictionContext: cachedHashCode: Incomplete def __init__(self, cachedHashCode: int) -> None: ... def __len__(self) -> int: ... - def isEmpty(self): ... - def hasEmptyPath(self): ... - def getReturnState(self, index: int): ... - def __hash__(self): ... + def isEmpty(self) -> Incomplete: ... + def hasEmptyPath(self) -> Incomplete: ... + def getReturnState(self, index: int) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... -def calculateHashCode(parent: PredictionContext, returnState: int): ... -def calculateListsHashCode(parents: list[PredictionContext], returnStates: list[int]): ... +def calculateHashCode(parent: PredictionContext, returnState: int) -> Incomplete: ... +def calculateListsHashCode(parents: list[PredictionContext], returnStates: list[int]) -> Incomplete: ... class PredictionContextCache: cache: Incomplete def __init__(self) -> None: ... - def add(self, ctx: PredictionContext): ... - def get(self, ctx: PredictionContext): ... + def add(self, ctx: PredictionContext) -> Incomplete: ... + def get(self, ctx: PredictionContext) -> Incomplete: ... def __len__(self) -> int: ... class SingletonPredictionContext(PredictionContext): @staticmethod - def create(parent: PredictionContext, returnState: int): ... + def create(parent: PredictionContext, returnState: int) -> Incomplete: ... parentCtx: Incomplete returnState: Incomplete def __init__(self, parent: PredictionContext, returnState: int) -> None: ... def __len__(self) -> int: ... - def getParent(self, index: int): ... - def getReturnState(self, index: int): ... - def __eq__(self, other): ... - def __hash__(self): ... + def getParent(self, index: int) -> Incomplete: ... + def getReturnState(self, index: int) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... class EmptyPredictionContext(SingletonPredictionContext): def __init__(self) -> None: ... - def isEmpty(self): ... - def __eq__(self, other): ... - def __hash__(self): ... + def isEmpty(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... class ArrayPredictionContext(PredictionContext): parents: Incomplete returnStates: Incomplete def __init__(self, parents: list[PredictionContext], returnStates: list[int]) -> None: ... - def isEmpty(self): ... + def isEmpty(self) -> Incomplete: ... def __len__(self) -> int: ... - def getParent(self, index: int): ... - def getReturnState(self, index: int): ... - def __eq__(self, other): ... - def __hash__(self): ... + def getParent(self, index: int) -> Incomplete: ... + def getReturnState(self, index: int) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... -def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext = ...): ... +def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext = ...) -> Incomplete: ... def merge( a: PredictionContext, b: PredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], -): ... +) -> Incomplete: ... def mergeSingletons( a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], -): ... -def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool): ... +) -> Incomplete: ... +def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool) -> Incomplete: ... def mergeArrays( a: ArrayPredictionContext, b: ArrayPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], -): ... -def combineCommonParents(parents: list[PredictionContext]): ... +) -> Incomplete: ... +def combineCommonParents(parents: list[PredictionContext]) -> Incomplete: ... def getCachedPredictionContext( context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext] -): ... +) -> Incomplete: ... def getAllContextNodes( context: PredictionContext, nodes: list[Incomplete] = ..., visited: dict[PredictionContext, PredictionContext] = ... -): ... +) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi b/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi index c5e882a19b31..5d757e2be6b7 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi @@ -10,20 +10,20 @@ class Recognizer: tokenTypeMapCache: Incomplete ruleIndexMapCache: Incomplete def __init__(self) -> None: ... - def extractVersion(self, version): ... - def checkVersion(self, toolVersion) -> None: ... - def addErrorListener(self, listener) -> None: ... - def removeErrorListener(self, listener) -> None: ... + def extractVersion(self, version: Incomplete) -> Incomplete: ... + def checkVersion(self, toolVersion: Incomplete) -> None: ... + def addErrorListener(self, listener: Incomplete) -> None: ... + def removeErrorListener(self, listener: Incomplete) -> None: ... def removeErrorListeners(self) -> None: ... - def getTokenTypeMap(self): ... - def getRuleIndexMap(self): ... - def getTokenType(self, tokenName: str): ... - def getErrorHeader(self, e: RecognitionException): ... - def getTokenErrorDisplay(self, t: Token): ... - def getErrorListenerDispatch(self): ... - def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... - def precpred(self, localctx: RuleContext, precedence: int): ... + def getTokenTypeMap(self) -> Incomplete: ... + def getRuleIndexMap(self) -> Incomplete: ... + def getTokenType(self, tokenName: str) -> Incomplete: ... + def getErrorHeader(self, e: RecognitionException) -> Incomplete: ... + def getTokenErrorDisplay(self, t: Token) -> Incomplete: ... + def getErrorListenerDispatch(self) -> Incomplete: ... + def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int) -> Incomplete: ... + def precpred(self, localctx: RuleContext, precedence: int) -> Incomplete: ... @property - def state(self): ... + def state(self) -> Incomplete: ... @state.setter - def state(self, atnState: int): ... + def state(self, atnState: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi index bf61f017d51a..0645a2eeddfa 100644 --- a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi @@ -11,18 +11,18 @@ class RuleContext(RuleNode): parentCtx: Incomplete invokingState: Incomplete def __init__(self, parent: RuleContext = ..., invokingState: int = ...) -> None: ... - def depth(self): ... - def isEmpty(self): ... - def getSourceInterval(self): ... - def getRuleContext(self): ... - def getPayload(self): ... - def getText(self): ... - def getRuleIndex(self): ... - def getAltNumber(self): ... - def setAltNumber(self, altNumber: int): ... - def getChild(self, i: int): ... - def getChildCount(self): ... + def depth(self) -> Incomplete: ... + def isEmpty(self) -> Incomplete: ... + def getSourceInterval(self) -> Incomplete: ... + def getRuleContext(self) -> Incomplete: ... + def getPayload(self) -> Incomplete: ... + def getText(self) -> Incomplete: ... + def getRuleIndex(self) -> Incomplete: ... + def getAltNumber(self) -> Incomplete: ... + def setAltNumber(self, altNumber: int) -> Incomplete: ... + def getChild(self, i: int) -> Incomplete: ... + def getChildCount(self) -> Incomplete: ... def getChildren(self) -> Generator[Incomplete, None, None]: ... - def accept(self, visitor: ParseTreeVisitor): ... - def toStringTree(self, ruleNames: list[Incomplete] = ..., recog: Parser = ...): ... + def accept(self, visitor: ParseTreeVisitor) -> Incomplete: ... + def toStringTree(self, ruleNames: list[Incomplete] = ..., recog: Parser = ...) -> Incomplete: ... def toString(self, ruleNames: list[Incomplete], stop: RuleContext) -> str: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi index c49d4c944dc8..eba0be29ba3e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -17,11 +17,11 @@ class Token: column: Incomplete def __init__(self) -> None: ... @property - def text(self): ... + def text(self) -> Incomplete: ... @text.setter - def text(self, text: str): ... - def getTokenSource(self): ... - def getInputStream(self): ... + def text(self, text: str) -> Incomplete: ... + def getTokenSource(self) -> Incomplete: ... + def getInputStream(self) -> Incomplete: ... class CommonToken(Token): EMPTY_SOURCE: Incomplete @@ -36,8 +36,8 @@ class CommonToken(Token): def __init__( self, source: tuple[Incomplete] = ..., type: int = ..., channel: int = ..., start: int = ..., stop: int = ... ) -> None: ... - def clone(self): ... + def clone(self) -> Incomplete: ... @property - def text(self): ... + def text(self) -> Incomplete: ... @text.setter - def text(self, text: str): ... + def text(self, text: str) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi index c6f6eacd4c2e..3fc9aff6495e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi @@ -10,44 +10,44 @@ class TokenStreamRewriter: tokens: Incomplete programs: Incomplete lastRewriteTokenIndexes: Incomplete - def __init__(self, tokens) -> None: ... - def getTokenStream(self): ... - def rollback(self, instruction_index, program_name) -> None: ... - def deleteProgram(self, program_name=...) -> None: ... - def insertAfterToken(self, token, text, program_name=...) -> None: ... - def insertAfter(self, index, text, program_name=...) -> None: ... - def insertBeforeIndex(self, index, text) -> None: ... - def insertBeforeToken(self, token, text, program_name=...) -> None: ... - def insertBefore(self, program_name, index, text) -> None: ... - def replaceIndex(self, index, text) -> None: ... - def replaceRange(self, from_idx, to_idx, text) -> None: ... - def replaceSingleToken(self, token, text) -> None: ... - def replaceRangeTokens(self, from_token, to_token, text, program_name=...) -> None: ... - def replace(self, program_name, from_idx, to_idx, text) -> None: ... - def deleteToken(self, token) -> None: ... - def deleteIndex(self, index) -> None: ... - def delete(self, program_name, from_idx, to_idx) -> None: ... - def lastRewriteTokenIndex(self, program_name=...): ... - def setLastRewriteTokenIndex(self, program_name, i) -> None: ... - def getProgram(self, program_name): ... - def getDefaultText(self): ... - def getText(self, program_name, start: int, stop: int): ... + def __init__(self, tokens: Incomplete) -> None: ... + def getTokenStream(self) -> Incomplete: ... + def rollback(self, instruction_index: Incomplete, program_name: Incomplete) -> None: ... + def deleteProgram(self, program_name: Incomplete = ...) -> None: ... + def insertAfterToken(self, token: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... + def insertAfter(self, index: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... + def insertBeforeIndex(self, index: Incomplete, text: Incomplete) -> None: ... + def insertBeforeToken(self, token: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... + def insertBefore(self, program_name: Incomplete, index: Incomplete, text: Incomplete) -> None: ... + def replaceIndex(self, index: Incomplete, text: Incomplete) -> None: ... + def replaceRange(self, from_idx: Incomplete, to_idx: Incomplete, text: Incomplete) -> None: ... + def replaceSingleToken(self, token: Incomplete, text: Incomplete) -> None: ... + def replaceRangeTokens(self, from_token: Incomplete, to_token: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... + def replace(self, program_name: Incomplete, from_idx: Incomplete, to_idx: Incomplete, text: Incomplete) -> None: ... + def deleteToken(self, token: Incomplete) -> None: ... + def deleteIndex(self, index: Incomplete) -> None: ... + def delete(self, program_name: Incomplete, from_idx: Incomplete, to_idx: Incomplete) -> None: ... + def lastRewriteTokenIndex(self, program_name: Incomplete = ...) -> Incomplete: ... + def setLastRewriteTokenIndex(self, program_name: Incomplete, i: Incomplete) -> None: ... + def getProgram(self, program_name: Incomplete) -> Incomplete: ... + def getDefaultText(self) -> Incomplete: ... + def getText(self, program_name: Incomplete, start: int, stop: int) -> Incomplete: ... class RewriteOperation: tokens: Incomplete index: Incomplete text: Incomplete instructionIndex: int - def __init__(self, tokens, index, text: str = ...) -> None: ... - def execute(self, buf): ... + def __init__(self, tokens: Incomplete, index: Incomplete, text: str = ...) -> None: ... + def execute(self, buf: Incomplete) -> Incomplete: ... class InsertBeforeOp(RewriteOperation): - def __init__(self, tokens, index, text: str = ...) -> None: ... - def execute(self, buf): ... + def __init__(self, tokens: Incomplete, index: Incomplete, text: str = ...) -> None: ... + def execute(self, buf: Incomplete) -> Incomplete: ... class InsertAfterOp(InsertBeforeOp): ... class ReplaceOp(RewriteOperation): last_index: Incomplete - def __init__(self, from_idx, to_idx, tokens, text) -> None: ... - def execute(self, buf): ... + def __init__(self, from_idx: Incomplete, to_idx: Incomplete, tokens: Incomplete, text: Incomplete) -> None: ... + def execute(self, buf: Incomplete) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi index 42f2c4261b11..87c146271c03 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi @@ -1,2 +1,4 @@ -def str_list(val): ... -def escapeWhitespace(s: str, escapeSpaces: bool): ... +from _typeshed import Incomplete + +def str_list(val: Incomplete) -> str: ... +def escapeWhitespace(s: str, escapeSpaces: bool) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi index c45632a1e4d1..3cb81e4ceed8 100644 --- a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi @@ -1,4 +1,6 @@ + +from _typeshed import Incomplete from antlr4 import * -def beautify_lisp_string(in_string): ... +def beautify_lisp_string(in_string: Incomplete) -> Incomplete: ... def main() -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi index e65d631decd5..b5a76ea6bb2f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi @@ -19,11 +19,11 @@ class ATN: lexerActions: Incomplete modeToStartState: Incomplete def __init__(self, grammarType: ATNType, maxTokenType: int) -> None: ... - def nextTokensInContext(self, s: ATNState, ctx: RuleContext): ... - def nextTokensNoContext(self, s: ATNState): ... - def nextTokens(self, s: ATNState, ctx: RuleContext = ...): ... - def addState(self, state: ATNState): ... - def removeState(self, state: ATNState): ... - def defineDecisionState(self, s: DecisionState): ... - def getDecisionState(self, decision: int): ... - def getExpectedTokens(self, stateNumber: int, ctx: RuleContext): ... + def nextTokensInContext(self, s: ATNState, ctx: RuleContext) -> Incomplete: ... + def nextTokensNoContext(self, s: ATNState) -> Incomplete: ... + def nextTokens(self, s: ATNState, ctx: RuleContext = ...) -> Incomplete: ... + def addState(self, state: ATNState) -> Incomplete: ... + def removeState(self, state: ATNState) -> Incomplete: ... + def defineDecisionState(self, s: DecisionState) -> Incomplete: ... + def getDecisionState(self, decision: int) -> Incomplete: ... + def getExpectedTokens(self, stateNumber: int, ctx: RuleContext) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi index 70d3281d00d3..df9b16d0edb9 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi @@ -20,10 +20,10 @@ class ATNConfig: semantic: SemanticContext = ..., config: ATNConfig = ..., ) -> None: ... - def __eq__(self, other): ... - def __hash__(self): ... - def hashCodeForConfigSet(self): ... - def equalsForConfigSet(self, other): ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def hashCodeForConfigSet(self) -> Incomplete: ... + def equalsForConfigSet(self, other: Incomplete) -> Incomplete: ... class LexerATNConfig(ATNConfig): lexerActionExecutor: Incomplete @@ -37,8 +37,8 @@ class LexerATNConfig(ATNConfig): lexerActionExecutor: LexerActionExecutor = ..., config: LexerATNConfig = ..., ) -> None: ... - def __hash__(self): ... - def __eq__(self, other): ... - def hashCodeForConfigSet(self): ... - def equalsForConfigSet(self, other): ... - def checkNonGreedyDecision(self, source: LexerATNConfig, target: ATNState): ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def hashCodeForConfigSet(self) -> Incomplete: ... + def equalsForConfigSet(self, other: Incomplete) -> Incomplete: ... + def checkNonGreedyDecision(self, source: LexerATNConfig, target: ATNState) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi index fc58fd8c8b8e..60f500b77ae2 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi @@ -23,22 +23,22 @@ class ATNConfigSet: dipsIntoOuterContext: bool cachedHashCode: int def __init__(self, fullCtx: bool = ...) -> None: ... - def __iter__(self): ... - def add(self, config: ATNConfig, mergeCache: Incomplete | None = ...): ... - def getOrAdd(self, config: ATNConfig): ... - def getStates(self): ... - def getPredicates(self): ... - def get(self, i: int): ... - def optimizeConfigs(self, interpreter: ATNSimulator): ... - def addAll(self, coll: list[Incomplete]): ... - def __eq__(self, other): ... - def __hash__(self): ... - def hashConfigs(self): ... + def __iter__(self) -> Incomplete: ... + def add(self, config: ATNConfig, mergeCache: Incomplete | None = ...) -> Incomplete: ... + def getOrAdd(self, config: ATNConfig) -> Incomplete: ... + def getStates(self) -> Incomplete: ... + def getPredicates(self) -> Incomplete: ... + def get(self, i: int) -> Incomplete: ... + def optimizeConfigs(self, interpreter: ATNSimulator) -> Incomplete: ... + def addAll(self, coll: list[Incomplete]) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def hashConfigs(self) -> Incomplete: ... def __len__(self) -> int: ... - def isEmpty(self): ... - def __contains__(self, config) -> bool: ... + def isEmpty(self) -> Incomplete: ... + def __contains__(self, config: Incomplete) -> bool: ... def clear(self) -> None: ... - def setReadonly(self, readonly: bool): ... + def setReadonly(self, readonly: bool) -> Incomplete: ... class OrderedATNConfigSet(ATNConfigSet): def __init__(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi index 0a5111d60c79..b041fc0efc41 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi @@ -6,4 +6,4 @@ class ATNDeserializationOptions: verifyATN: Incomplete generateRuleBypassTransitions: Incomplete def __init__(self, copyFrom: ATNDeserializationOptions = ...) -> None: ... - def __setattr__(self, key, value) -> None: ... + def __setattr__(self, key: Incomplete, value: Incomplete) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index 5d8b28604ee6..9da95395edc9 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -16,27 +16,27 @@ class ATNDeserializer: def __init__(self, options: ATNDeserializationOptions = ...) -> None: ... data: Incomplete pos: int - def deserialize(self, data: list[int]): ... + def deserialize(self, data: list[int]) -> Incomplete: ... def checkVersion(self) -> None: ... - def readATN(self): ... - def readStates(self, atn: ATN): ... - def readRules(self, atn: ATN): ... - def readModes(self, atn: ATN): ... - def readSets(self, atn: ATN, sets: list[Incomplete]): ... - def readEdges(self, atn: ATN, sets: list[Incomplete]): ... - def readDecisions(self, atn: ATN): ... - def readLexerActions(self, atn: ATN): ... - def generateRuleBypassTransitions(self, atn: ATN): ... - def generateRuleBypassTransition(self, atn: ATN, idx: int): ... - def stateIsEndStateFor(self, state: ATNState, idx: int): ... - def markPrecedenceDecisions(self, atn: ATN): ... - def verifyATN(self, atn: ATN): ... - def checkCondition(self, condition: bool, message: Incomplete | None = ...): ... - def readInt(self): ... + def readATN(self) -> Incomplete: ... + def readStates(self, atn: ATN) -> Incomplete: ... + def readRules(self, atn: ATN) -> Incomplete: ... + def readModes(self, atn: ATN) -> Incomplete: ... + def readSets(self, atn: ATN, sets: list[Incomplete]) -> Incomplete: ... + def readEdges(self, atn: ATN, sets: list[Incomplete]) -> Incomplete: ... + def readDecisions(self, atn: ATN) -> Incomplete: ... + def readLexerActions(self, atn: ATN) -> Incomplete: ... + def generateRuleBypassTransitions(self, atn: ATN) -> Incomplete: ... + def generateRuleBypassTransition(self, atn: ATN, idx: int) -> Incomplete: ... + def stateIsEndStateFor(self, state: ATNState, idx: int) -> Incomplete: ... + def markPrecedenceDecisions(self, atn: ATN) -> Incomplete: ... + def verifyATN(self, atn: ATN) -> Incomplete: ... + def checkCondition(self, condition: bool, message: Incomplete | None = ...) -> Incomplete: ... + def readInt(self) -> Incomplete: ... edgeFactories: Incomplete - def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete]): ... + def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete]) -> Incomplete: ... stateFactories: Incomplete - def stateFactory(self, type: int, ruleIndex: int): ... + def stateFactory(self, type: int, ruleIndex: int) -> Incomplete: ... CHANNEL: int CUSTOM: int MODE: int @@ -46,4 +46,4 @@ class ATNDeserializer: SKIP: int TYPE: int actionFactories: Incomplete - def lexerActionFactory(self, type: int, data1: int, data2: int): ... + def lexerActionFactory(self, type: int, data1: int, data2: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi index 11fbfe7b0705..57c34e3404c1 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi @@ -14,4 +14,4 @@ class ATNSimulator: atn: Incomplete sharedContextCache: Incomplete def __init__(self, atn: ATN, sharedContextCache: PredictionContextCache) -> None: ... - def getCachedContext(self, context: PredictionContext): ... + def getCachedContext(self, context: PredictionContext) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi index d24e1365e824..32b16a22e6ef 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi @@ -28,11 +28,11 @@ class ATNState: transitions: Incomplete nextTokenWithinRule: Incomplete def __init__(self) -> None: ... - def __hash__(self): ... - def __eq__(self, other): ... - def onlyHasEpsilonTransitions(self): ... - def isNonGreedyExitState(self): ... - def addTransition(self, trans: Transition, index: int = ...): ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def onlyHasEpsilonTransitions(self) -> Incomplete: ... + def isNonGreedyExitState(self) -> Incomplete: ... + def addTransition(self, trans: Transition, index: int = ...) -> Incomplete: ... class BasicState(ATNState): stateType: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi index 888deeb2be1e..4322de176e7d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi @@ -1,7 +1,9 @@ + +from _typeshed import Incomplete from enum import IntEnum class ATNType(IntEnum): LEXER: int PARSER: int @classmethod - def fromOrdinal(cls, i: int): ... + def fromOrdinal(cls, i: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi index 6d0e1156c1c0..60c66048eb9d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi @@ -47,20 +47,20 @@ class LexerATNSimulator(ATNSimulator): MAX_CHAR_VALUE: Incomplete prevAccept: Incomplete def __init__(self, recog: Lexer, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ... - def copyState(self, simulator: LexerATNSimulator): ... - def match(self, input: InputStream, mode: int): ... + def copyState(self, simulator: LexerATNSimulator) -> Incomplete: ... + def match(self, input: InputStream, mode: int) -> Incomplete: ... def reset(self) -> None: ... - def matchATN(self, input: InputStream): ... - def execATN(self, input: InputStream, ds0: DFAState): ... - def getExistingTargetState(self, s: DFAState, t: int): ... - def computeTargetState(self, input: InputStream, s: DFAState, t: int): ... - def failOrAccept(self, prevAccept: SimState, input: InputStream, reach: ATNConfigSet, t: int): ... - def getReachableConfigSet(self, input: InputStream, closure: ATNConfigSet, reach: ATNConfigSet, t: int): ... + def matchATN(self, input: InputStream) -> Incomplete: ... + def execATN(self, input: InputStream, ds0: DFAState) -> Incomplete: ... + def getExistingTargetState(self, s: DFAState, t: int) -> Incomplete: ... + def computeTargetState(self, input: InputStream, s: DFAState, t: int) -> Incomplete: ... + def failOrAccept(self, prevAccept: SimState, input: InputStream, reach: ATNConfigSet, t: int) -> Incomplete: ... + def getReachableConfigSet(self, input: InputStream, closure: ATNConfigSet, reach: ATNConfigSet, t: int) -> Incomplete: ... def accept( self, input: InputStream, lexerActionExecutor: LexerActionExecutor, startIndex: int, index: int, line: int, charPos: int - ): ... - def getReachableTarget(self, trans: Transition, t: int): ... - def computeStartState(self, input: InputStream, p: ATNState): ... + ) -> Incomplete: ... + def getReachableTarget(self, trans: Transition, t: int) -> Incomplete: ... + def computeStartState(self, input: InputStream, p: ATNState) -> Incomplete: ... def closure( self, input: InputStream, @@ -69,7 +69,7 @@ class LexerATNSimulator(ATNSimulator): currentAltReachedAcceptState: bool, speculative: bool, treatEofAsEpsilon: bool, - ): ... + ) -> Incomplete: ... def getEpsilonTarget( self, input: InputStream, @@ -78,12 +78,12 @@ class LexerATNSimulator(ATNSimulator): configs: ATNConfigSet, speculative: bool, treatEofAsEpsilon: bool, - ): ... - def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool): ... - def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState): ... + ) -> Incomplete: ... + def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool) -> Incomplete: ... + def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState) -> Incomplete: ... def addDFAEdge(self, from_: DFAState, tk: int, to: DFAState = ..., cfgs: ATNConfigSet = ...) -> DFAState: ... def addDFAState(self, configs: ATNConfigSet) -> DFAState: ... - def getDFA(self, mode: int): ... - def getText(self, input: InputStream): ... - def consume(self, input: InputStream): ... - def getTokenName(self, t: int): ... + def getDFA(self, mode: int) -> Incomplete: ... + def getText(self, input: InputStream) -> Incomplete: ... + def consume(self, input: InputStream) -> Incomplete: ... + def getTokenName(self, t: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi index 0309d3549f50..2446757724fa 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi @@ -17,66 +17,66 @@ class LexerAction: actionType: Incomplete isPositionDependent: bool def __init__(self, action: LexerActionType) -> None: ... - def __hash__(self): ... - def __eq__(self, other): ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class LexerSkipAction(LexerAction): INSTANCE: Incomplete def __init__(self) -> None: ... - def execute(self, lexer: Lexer): ... + def execute(self, lexer: Lexer) -> Incomplete: ... class LexerTypeAction(LexerAction): type: Incomplete def __init__(self, type: int) -> None: ... - def execute(self, lexer: Lexer): ... - def __hash__(self): ... - def __eq__(self, other): ... + def execute(self, lexer: Lexer) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class LexerPushModeAction(LexerAction): mode: Incomplete def __init__(self, mode: int) -> None: ... - def execute(self, lexer: Lexer): ... - def __hash__(self): ... - def __eq__(self, other): ... + def execute(self, lexer: Lexer) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class LexerPopModeAction(LexerAction): INSTANCE: Incomplete def __init__(self) -> None: ... - def execute(self, lexer: Lexer): ... + def execute(self, lexer: Lexer) -> Incomplete: ... class LexerMoreAction(LexerAction): INSTANCE: Incomplete def __init__(self) -> None: ... - def execute(self, lexer: Lexer): ... + def execute(self, lexer: Lexer) -> Incomplete: ... class LexerModeAction(LexerAction): mode: Incomplete def __init__(self, mode: int) -> None: ... - def execute(self, lexer: Lexer): ... - def __hash__(self): ... - def __eq__(self, other): ... + def execute(self, lexer: Lexer) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class LexerCustomAction(LexerAction): ruleIndex: Incomplete actionIndex: Incomplete isPositionDependent: bool def __init__(self, ruleIndex: int, actionIndex: int) -> None: ... - def execute(self, lexer: Lexer): ... - def __hash__(self): ... - def __eq__(self, other): ... + def execute(self, lexer: Lexer) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class LexerChannelAction(LexerAction): channel: Incomplete def __init__(self, channel: int) -> None: ... - def execute(self, lexer: Lexer): ... - def __hash__(self): ... - def __eq__(self, other): ... + def execute(self, lexer: Lexer) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class LexerIndexedCustomAction(LexerAction): offset: Incomplete action: Incomplete isPositionDependent: bool def __init__(self, offset: int, action: LexerAction) -> None: ... - def execute(self, lexer: Lexer): ... - def __hash__(self): ... - def __eq__(self, other): ... + def execute(self, lexer: Lexer) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi index e2226687ae29..5ed06d2718bf 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi @@ -10,8 +10,8 @@ class LexerActionExecutor: hashCode: Incomplete def __init__(self, lexerActions: list[LexerAction] = ...) -> None: ... @staticmethod - def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction): ... - def fixOffsetBeforeMatch(self, offset: int): ... - def execute(self, lexer: Lexer, input: InputStream, startIndex: int): ... - def __hash__(self): ... - def __eq__(self, other): ... + def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction) -> Incomplete: ... + def fixOffsetBeforeMatch(self, offset: int) -> Incomplete: ... + def execute(self, lexer: Lexer, input: InputStream, startIndex: int) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index 824700785e60..ca93094d6923 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -46,25 +46,25 @@ class ParserATNSimulator(ATNSimulator): self, parser: Parser, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache ) -> None: ... def reset(self) -> None: ... - def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext): ... - def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ... - def getExistingTargetState(self, previousD: DFAState, t: int): ... - def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int): ... - def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState): ... + def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext) -> Incomplete: ... + def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext) -> Incomplete: ... + def getExistingTargetState(self, previousD: DFAState, t: int) -> Incomplete: ... + def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int) -> Incomplete: ... + def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState) -> Incomplete: ... def execATNWithFullContext( self, dfa: DFA, D: DFAState, s0: ATNConfigSet, input: TokenStream, startIndex: int, outerContext: ParserRuleContext - ): ... - def computeReachSet(self, closure: ATNConfigSet, t: int, fullCtx: bool): ... - def removeAllConfigsNotInRuleStopState(self, configs: ATNConfigSet, lookToEndOfRule: bool): ... - def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool): ... - def applyPrecedenceFilter(self, configs: ATNConfigSet): ... - def getReachableTarget(self, trans: Transition, ttype: int): ... - def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int): ... - def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]): ... - def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... - def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet): ... - def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... - def evalSemanticContext(self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool): ... + ) -> Incomplete: ... + def computeReachSet(self, closure: ATNConfigSet, t: int, fullCtx: bool) -> Incomplete: ... + def removeAllConfigsNotInRuleStopState(self, configs: ATNConfigSet, lookToEndOfRule: bool) -> Incomplete: ... + def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool) -> Incomplete: ... + def applyPrecedenceFilter(self, configs: ATNConfigSet) -> Incomplete: ... + def getReachableTarget(self, trans: Transition, ttype: int) -> Incomplete: ... + def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int) -> Incomplete: ... + def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]) -> Incomplete: ... + def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet, outerContext: ParserRuleContext) -> Incomplete: ... + def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet) -> Incomplete: ... + def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext) -> Incomplete: ... + def evalSemanticContext(self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool) -> Incomplete: ... def closure( self, config: ATNConfig, @@ -73,7 +73,7 @@ class ParserATNSimulator(ATNSimulator): collectPredicates: bool, fullCtx: bool, treatEofAsEpsilon: bool, - ): ... + ) -> Incomplete: ... def closureCheckingStopState( self, config: ATNConfig, @@ -83,7 +83,7 @@ class ParserATNSimulator(ATNSimulator): fullCtx: bool, depth: int, treatEofAsEpsilon: bool, - ): ... + ) -> Incomplete: ... def closure_( self, config: ATNConfig, @@ -93,34 +93,34 @@ class ParserATNSimulator(ATNSimulator): fullCtx: bool, depth: int, treatEofAsEpsilon: bool, - ): ... - def canDropLoopEntryEdgeInLeftRecursiveRule(self, config): ... - def getRuleName(self, index: int): ... + ) -> Incomplete: ... + def canDropLoopEntryEdgeInLeftRecursiveRule(self, config: Incomplete) -> Incomplete: ... + def getRuleName(self, index: int) -> Incomplete: ... epsilonTargetMethods: Incomplete def getEpsilonTarget( self, config: ATNConfig, t: Transition, collectPredicates: bool, inContext: bool, fullCtx: bool, treatEofAsEpsilon: bool - ): ... - def actionTransition(self, config: ATNConfig, t: ActionTransition): ... + ) -> Incomplete: ... + def actionTransition(self, config: ATNConfig, t: ActionTransition) -> Incomplete: ... def precedenceTransition( self, config: ATNConfig, pt: PrecedencePredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool - ): ... + ) -> Incomplete: ... def predTransition( self, config: ATNConfig, pt: PredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool - ): ... - def ruleTransition(self, config: ATNConfig, t: RuleTransition): ... - def getConflictingAlts(self, configs: ATNConfigSet): ... - def getConflictingAltsOrUniqueAlt(self, configs: ATNConfigSet): ... - def getTokenName(self, t: int): ... - def getLookaheadName(self, input: TokenStream): ... - def dumpDeadEndConfigs(self, nvae: NoViableAltException): ... - def noViableAlt(self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int): ... - def getUniqueAlt(self, configs: ATNConfigSet): ... - def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState): ... - def addDFAState(self, dfa: DFA, D: DFAState): ... + ) -> Incomplete: ... + def ruleTransition(self, config: ATNConfig, t: RuleTransition) -> Incomplete: ... + def getConflictingAlts(self, configs: ATNConfigSet) -> Incomplete: ... + def getConflictingAltsOrUniqueAlt(self, configs: ATNConfigSet) -> Incomplete: ... + def getTokenName(self, t: int) -> Incomplete: ... + def getLookaheadName(self, input: TokenStream) -> Incomplete: ... + def dumpDeadEndConfigs(self, nvae: NoViableAltException) -> Incomplete: ... + def noViableAlt(self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int) -> Incomplete: ... + def getUniqueAlt(self, configs: ATNConfigSet) -> Incomplete: ... + def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState) -> Incomplete: ... + def addDFAState(self, dfa: DFA, D: DFAState) -> Incomplete: ... def reportAttemptingFullContext( self, dfa: DFA, conflictingAlts: set[Incomplete], configs: ATNConfigSet, startIndex: int, stopIndex: int - ): ... - def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... + ) -> Incomplete: ... + def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int) -> Incomplete: ... def reportAmbiguity( self, dfa: DFA, @@ -130,4 +130,4 @@ class ParserATNSimulator(ATNSimulator): exact: bool, ambigAlts: set[Incomplete], configs: ATNConfigSet, - ): ... + ) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index 09a04cc5b11a..b302a42b2cfe 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -1,3 +1,5 @@ + +from _typeshed import Incomplete from enum import Enum from antlr4.atn.ATN import ATN as ATN @@ -11,30 +13,30 @@ class PredictionMode(Enum): LL: int LL_EXACT_AMBIG_DETECTION: int @classmethod - def hasSLLConflictTerminatingPrediction(cls, mode: PredictionMode, configs: ATNConfigSet): ... + def hasSLLConflictTerminatingPrediction(cls, mode: PredictionMode, configs: ATNConfigSet) -> Incomplete: ... @classmethod - def hasConfigInRuleStopState(cls, configs: ATNConfigSet): ... + def hasConfigInRuleStopState(cls, configs: ATNConfigSet) -> Incomplete: ... @classmethod - def allConfigsInRuleStopStates(cls, configs: ATNConfigSet): ... + def allConfigsInRuleStopStates(cls, configs: ATNConfigSet) -> Incomplete: ... @classmethod - def resolvesToJustOneViableAlt(cls, altsets: list[set[int]]): ... + def resolvesToJustOneViableAlt(cls, altsets: list[set[int]]) -> Incomplete: ... @classmethod - def allSubsetsConflict(cls, altsets: list[set[int]]): ... + def allSubsetsConflict(cls, altsets: list[set[int]]) -> Incomplete: ... @classmethod - def hasNonConflictingAltSet(cls, altsets: list[set[int]]): ... + def hasNonConflictingAltSet(cls, altsets: list[set[int]]) -> Incomplete: ... @classmethod - def hasConflictingAltSet(cls, altsets: list[set[int]]): ... + def hasConflictingAltSet(cls, altsets: list[set[int]]) -> Incomplete: ... @classmethod - def allSubsetsEqual(cls, altsets: list[set[int]]): ... + def allSubsetsEqual(cls, altsets: list[set[int]]) -> Incomplete: ... @classmethod - def getUniqueAlt(cls, altsets: list[set[int]]): ... + def getUniqueAlt(cls, altsets: list[set[int]]) -> Incomplete: ... @classmethod - def getAlts(cls, altsets: list[set[int]]): ... + def getAlts(cls, altsets: list[set[int]]) -> Incomplete: ... @classmethod - def getConflictingAltSubsets(cls, configs: ATNConfigSet): ... + def getConflictingAltSubsets(cls, configs: ATNConfigSet) -> Incomplete: ... @classmethod - def getStateToAltMap(cls, configs: ATNConfigSet): ... + def getStateToAltMap(cls, configs: ATNConfigSet) -> Incomplete: ... @classmethod - def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet): ... + def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet) -> Incomplete: ... @classmethod - def getSingleViableAlt(cls, altsets: list[set[int]]): ... + def getSingleViableAlt(cls, altsets: list[set[int]]) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi index 0cbd255d52e3..c9c1dbdc1728 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi @@ -5,12 +5,12 @@ from antlr4.RuleContext import RuleContext as RuleContext class SemanticContext: NONE: Incomplete - def eval(self, parser: Recognizer, outerContext: RuleContext): ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... + def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... -def andContext(a: SemanticContext, b: SemanticContext): ... -def orContext(a: SemanticContext, b: SemanticContext): ... -def filterPrecedencePredicates(collection: set[SemanticContext]): ... +def andContext(a: SemanticContext, b: SemanticContext) -> Incomplete: ... +def orContext(a: SemanticContext, b: SemanticContext) -> Incomplete: ... +def filterPrecedencePredicates(collection: set[SemanticContext]) -> Incomplete: ... class EmptySemanticContext(SemanticContext): ... @@ -19,31 +19,31 @@ class Predicate(SemanticContext): predIndex: Incomplete isCtxDependent: Incomplete def __init__(self, ruleIndex: int = ..., predIndex: int = ..., isCtxDependent: bool = ...) -> None: ... - def eval(self, parser: Recognizer, outerContext: RuleContext): ... - def __hash__(self): ... - def __eq__(self, other): ... + def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class PrecedencePredicate(SemanticContext): precedence: Incomplete def __init__(self, precedence: int = ...) -> None: ... - def eval(self, parser: Recognizer, outerContext: RuleContext): ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... - def __lt__(self, other): ... - def __hash__(self): ... - def __eq__(self, other): ... + def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def __lt__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... class AND(SemanticContext): opnds: Incomplete def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ... - def __eq__(self, other): ... - def __hash__(self): ... - def eval(self, parser: Recognizer, outerContext: RuleContext): ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... class OR(SemanticContext): opnds: Incomplete def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ... - def __eq__(self, other): ... - def __hash__(self): ... - def eval(self, parser: Recognizer, outerContext: RuleContext): ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... + def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi index 79f11e6d695d..9a1928dbe7df 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi @@ -28,8 +28,8 @@ class AtomTransition(Transition): label: Incomplete serializationType: Incomplete def __init__(self, target: ATNState, label: int) -> None: ... - def makeLabel(self): ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def makeLabel(self) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class RuleTransition(Transition): ruleIndex: Incomplete @@ -38,14 +38,14 @@ class RuleTransition(Transition): serializationType: Incomplete isEpsilon: bool def __init__(self, ruleStart: RuleStartState, ruleIndex: int, precedence: int, followState: ATNState) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class EpsilonTransition(Transition): serializationType: Incomplete isEpsilon: bool outermostPrecedenceReturn: Incomplete - def __init__(self, target, outermostPrecedenceReturn: int = ...) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def __init__(self, target: Incomplete, outermostPrecedenceReturn: int = ...) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class RangeTransition(Transition): serializationType: Incomplete @@ -53,8 +53,8 @@ class RangeTransition(Transition): stop: Incomplete label: Incomplete def __init__(self, target: ATNState, start: int, stop: int) -> None: ... - def makeLabel(self): ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def makeLabel(self) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class AbstractPredicateTransition(Transition): def __init__(self, target: ATNState) -> None: ... @@ -66,8 +66,8 @@ class PredicateTransition(AbstractPredicateTransition): isCtxDependent: Incomplete isEpsilon: bool def __init__(self, target: ATNState, ruleIndex: int, predIndex: int, isCtxDependent: bool) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... - def getPredicate(self): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def getPredicate(self) -> Incomplete: ... class ActionTransition(Transition): serializationType: Incomplete @@ -76,28 +76,28 @@ class ActionTransition(Transition): isCtxDependent: Incomplete isEpsilon: bool def __init__(self, target: ATNState, ruleIndex: int, actionIndex: int = ..., isCtxDependent: bool = ...) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class SetTransition(Transition): serializationType: Incomplete label: Incomplete def __init__(self, target: ATNState, set: IntervalSet) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class NotSetTransition(SetTransition): serializationType: Incomplete def __init__(self, target: ATNState, set: IntervalSet) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class WildcardTransition(Transition): serializationType: Incomplete def __init__(self, target: ATNState) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... class PrecedencePredicateTransition(AbstractPredicateTransition): serializationType: Incomplete precedence: Incomplete isEpsilon: bool def __init__(self, target: ATNState, precedence: int) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... - def getPredicate(self): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def getPredicate(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi index 5898365dfc98..e76157b644c7 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi @@ -11,11 +11,11 @@ class DFA: s0: Incomplete precedenceDfa: bool def __init__(self, atnStartState: DecisionState, decision: int = ...) -> None: ... - def getPrecedenceStartState(self, precedence: int): ... - def setPrecedenceStartState(self, precedence: int, startState: DFAState): ... - def setPrecedenceDfa(self, precedenceDfa: bool): ... + def getPrecedenceStartState(self, precedence: int) -> Incomplete: ... + def setPrecedenceStartState(self, precedence: int, startState: DFAState) -> Incomplete: ... + def setPrecedenceDfa(self, precedenceDfa: bool) -> Incomplete: ... @property - def states(self): ... - def sortedStates(self): ... - def toString(self, literalNames: list[str] = ..., symbolicNames: list[str] = ...): ... - def toLexerString(self): ... + def states(self) -> Incomplete: ... + def sortedStates(self) -> Incomplete: ... + def toString(self, literalNames: list[str] = ..., symbolicNames: list[str] = ...) -> Incomplete: ... + def toLexerString(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi index b2f934c75201..e9e6b441d978 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi @@ -9,9 +9,9 @@ class DFASerializer: literalNames: Incomplete symbolicNames: Incomplete def __init__(self, dfa: DFA, literalNames: list[str] = ..., symbolicNames: list[str] = ...) -> None: ... - def getEdgeLabel(self, i: int): ... - def getStateString(self, s: DFAState): ... + def getEdgeLabel(self, i: int) -> Incomplete: ... + def getStateString(self, s: DFAState) -> Incomplete: ... class LexerDFASerializer(DFASerializer): def __init__(self, dfa: DFA) -> None: ... - def getEdgeLabel(self, i: int): ... + def getEdgeLabel(self, i: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi index 752bbd6929a0..955ec701eb9a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi @@ -18,6 +18,6 @@ class DFAState: requiresFullContext: bool predicates: Incomplete def __init__(self, stateNumber: int = ..., configs: ATNConfigSet = ...) -> None: ... - def getAltSet(self): ... - def __hash__(self): ... - def __eq__(self, other): ... + def getAltSet(self) -> Incomplete: ... + def __hash__(self) -> Incomplete: ... + def __eq__(self, other: Incomplete) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi index 75be49895ff4..20f0db5a509f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi @@ -8,13 +8,13 @@ class DiagnosticErrorListener(ErrorListener): exactOnly: Incomplete def __init__(self, exactOnly: bool = ...) -> None: ... def reportAmbiguity( - self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[int], configs: ATNConfigSet - ): ... + self, recognizer: Incomplete, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[int], configs: ATNConfigSet + ) -> Incomplete: ... def reportAttemptingFullContext( - self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet - ): ... + self, recognizer: Incomplete, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet + ) -> Incomplete: ... def reportContextSensitivity( - self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet - ): ... - def getDecisionDescription(self, recognizer, dfa: DFA): ... - def getConflictingAlts(self, reportedAlts: set[int], configs: ATNConfigSet): ... + self, recognizer: Incomplete, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet + ) -> Incomplete: ... + def getDecisionDescription(self, recognizer: Incomplete, dfa: DFA) -> Incomplete: ... + def getConflictingAlts(self, reportedAlts: set[int], configs: ATNConfigSet) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi index 34482bde9131..b198d86663b6 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi @@ -1,19 +1,19 @@ from _typeshed import Incomplete class ErrorListener: - def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... - def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ... - def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ... - def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ... + def syntaxError(self, recognizer: Incomplete, offendingSymbol: Incomplete, line: Incomplete, column: Incomplete, msg: Incomplete, e: Incomplete) -> None: ... + def reportAmbiguity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, exact: Incomplete, ambigAlts: Incomplete, configs: Incomplete) -> None: ... + def reportAttemptingFullContext(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, conflictingAlts: Incomplete, configs: Incomplete) -> None: ... + def reportContextSensitivity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, prediction: Incomplete, configs: Incomplete) -> None: ... class ConsoleErrorListener(ErrorListener): INSTANCE: Incomplete - def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... + def syntaxError(self, recognizer: Incomplete, offendingSymbol: Incomplete, line: Incomplete, column: Incomplete, msg: Incomplete, e: Incomplete) -> None: ... class ProxyErrorListener(ErrorListener): delegates: Incomplete - def __init__(self, delegates) -> None: ... - def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... - def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ... - def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ... - def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ... + def __init__(self, delegates: Incomplete) -> None: ... + def syntaxError(self, recognizer: Incomplete, offendingSymbol: Incomplete, line: Incomplete, column: Incomplete, msg: Incomplete, e: Incomplete) -> None: ... + def reportAmbiguity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, exact: Incomplete, ambigAlts: Incomplete, configs: Incomplete) -> None: ... + def reportAttemptingFullContext(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, conflictingAlts: Incomplete, configs: Incomplete) -> None: ... + def reportContextSensitivity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, prediction: Incomplete, configs: Incomplete) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi index 6b7ea7128d93..470603ec55a7 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi @@ -14,12 +14,12 @@ from antlr4.Token import Token as Token Parser: Incomplete class ErrorStrategy: - def reset(self, recognizer: Parser): ... - def recoverInline(self, recognizer: Parser): ... - def recover(self, recognizer: Parser, e: RecognitionException): ... - def sync(self, recognizer: Parser): ... - def inErrorRecoveryMode(self, recognizer: Parser): ... - def reportError(self, recognizer: Parser, e: RecognitionException): ... + def reset(self, recognizer: Parser) -> Incomplete: ... + def recoverInline(self, recognizer: Parser) -> Incomplete: ... + def recover(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... + def sync(self, recognizer: Parser) -> Incomplete: ... + def inErrorRecoveryMode(self, recognizer: Parser) -> Incomplete: ... + def reportError(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... class DefaultErrorStrategy(ErrorStrategy): errorRecoveryMode: bool @@ -28,31 +28,31 @@ class DefaultErrorStrategy(ErrorStrategy): nextTokensContext: Incomplete nextTokenState: int def __init__(self) -> None: ... - def reset(self, recognizer: Parser): ... - def beginErrorCondition(self, recognizer: Parser): ... - def inErrorRecoveryMode(self, recognizer: Parser): ... - def endErrorCondition(self, recognizer: Parser): ... - def reportMatch(self, recognizer: Parser): ... - def reportError(self, recognizer: Parser, e: RecognitionException): ... - def recover(self, recognizer: Parser, e: RecognitionException): ... + def reset(self, recognizer: Parser) -> Incomplete: ... + def beginErrorCondition(self, recognizer: Parser) -> Incomplete: ... + def inErrorRecoveryMode(self, recognizer: Parser) -> Incomplete: ... + def endErrorCondition(self, recognizer: Parser) -> Incomplete: ... + def reportMatch(self, recognizer: Parser) -> Incomplete: ... + def reportError(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... + def recover(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... nextTokensState: Incomplete - def sync(self, recognizer: Parser): ... - def reportNoViableAlternative(self, recognizer: Parser, e: NoViableAltException): ... - def reportInputMismatch(self, recognizer: Parser, e: InputMismatchException): ... - def reportFailedPredicate(self, recognizer, e) -> None: ... - def reportUnwantedToken(self, recognizer: Parser): ... - def reportMissingToken(self, recognizer: Parser): ... - def recoverInline(self, recognizer: Parser): ... - def singleTokenInsertion(self, recognizer: Parser): ... - def singleTokenDeletion(self, recognizer: Parser): ... - def getMissingSymbol(self, recognizer: Parser): ... - def getExpectedTokens(self, recognizer: Parser): ... - def getTokenErrorDisplay(self, t: Token): ... - def escapeWSAndQuote(self, s: str): ... - def getErrorRecoverySet(self, recognizer: Parser): ... - def consumeUntil(self, recognizer: Parser, set_: set[int]): ... + def sync(self, recognizer: Parser) -> Incomplete: ... + def reportNoViableAlternative(self, recognizer: Parser, e: NoViableAltException) -> Incomplete: ... + def reportInputMismatch(self, recognizer: Parser, e: InputMismatchException) -> Incomplete: ... + def reportFailedPredicate(self, recognizer: Incomplete, e: Incomplete) -> None: ... + def reportUnwantedToken(self, recognizer: Parser) -> Incomplete: ... + def reportMissingToken(self, recognizer: Parser) -> Incomplete: ... + def recoverInline(self, recognizer: Parser) -> Incomplete: ... + def singleTokenInsertion(self, recognizer: Parser) -> Incomplete: ... + def singleTokenDeletion(self, recognizer: Parser) -> Incomplete: ... + def getMissingSymbol(self, recognizer: Parser) -> Incomplete: ... + def getExpectedTokens(self, recognizer: Parser) -> Incomplete: ... + def getTokenErrorDisplay(self, t: Token) -> Incomplete: ... + def escapeWSAndQuote(self, s: str) -> Incomplete: ... + def getErrorRecoverySet(self, recognizer: Parser) -> Incomplete: ... + def consumeUntil(self, recognizer: Parser, set_: set[int]) -> Incomplete: ... class BailErrorStrategy(DefaultErrorStrategy): - def recover(self, recognizer: Parser, e: RecognitionException): ... - def recoverInline(self, recognizer: Parser): ... - def sync(self, recognizer: Parser): ... + def recover(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... + def recoverInline(self, recognizer: Parser) -> Incomplete: ... + def sync(self, recognizer: Parser) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi index 7d7a1aa511c0..860cef8871f1 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi @@ -32,7 +32,7 @@ class RecognitionException(Exception): def __init__( self, message: str = ..., recognizer: Recognizer = ..., input: InputStream = ..., ctx: ParserRulecontext = ... ) -> None: ... - def getExpectedTokens(self): ... + def getExpectedTokens(self) -> Incomplete: ... class LexerNoViableAltException(RecognitionException): startIndex: Incomplete @@ -64,6 +64,6 @@ class FailedPredicateException(RecognitionException): predicate: Incomplete offendingToken: Incomplete def __init__(self, recognizer: Parser, predicate: str = ..., message: str = ...) -> None: ... - def formatMessage(self, predicate: str, message: str): ... + def formatMessage(self, predicate: str, message: str) -> Incomplete: ... class ParseCancellationException(CancellationException): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi index 23e09c3dd752..da9a5b6a851f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi @@ -11,6 +11,6 @@ class ParseTreeMatch: def __init__( self, tree: ParseTree, pattern: ParseTreePattern, labels: dict[str, list[ParseTree]], mismatchedNode: ParseTree ) -> None: ... - def get(self, label: str): ... - def getAll(self, label: str): ... - def succeeded(self): ... + def get(self, label: str) -> Incomplete: ... + def getAll(self, label: str) -> Incomplete: ... + def succeeded(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi index d50da5c95c09..c685195ebd3f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi @@ -10,6 +10,6 @@ class ParseTreePattern: pattern: Incomplete patternTree: Incomplete def __init__(self, matcher: ParseTreePatternMatcher, pattern: str, patternRuleIndex: int, patternTree: ParseTree) -> None: ... - def match(self, tree: ParseTree): ... - def matches(self, tree: ParseTree): ... - def findAll(self, tree: ParseTree, xpath: str): ... + def match(self, tree: ParseTree) -> Incomplete: ... + def matches(self, tree: ParseTree) -> Incomplete: ... + def findAll(self, tree: ParseTree, xpath: str) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi index 392d7be9b31b..84148c78f3a1 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi @@ -31,14 +31,14 @@ class ParseTreePatternMatcher: stop: str escape: str def __init__(self, lexer: Lexer, parser: Parser) -> None: ... - def setDelimiters(self, start: str, stop: str, escapeLeft: str): ... - def matchesRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ... - def matchesPattern(self, tree: ParseTree, pattern: ParseTreePattern): ... - def matchRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ... - def matchPattern(self, tree: ParseTree, pattern: ParseTreePattern): ... - def compileTreePattern(self, pattern: str, patternRuleIndex: int): ... - def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict[str, list[ParseTree]]): ... - def map(self, labels, label, tree) -> None: ... - def getRuleTagToken(self, tree: ParseTree): ... - def tokenize(self, pattern: str): ... - def split(self, pattern: str): ... + def setDelimiters(self, start: str, stop: str, escapeLeft: str) -> Incomplete: ... + def matchesRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int) -> Incomplete: ... + def matchesPattern(self, tree: ParseTree, pattern: ParseTreePattern) -> Incomplete: ... + def matchRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int) -> Incomplete: ... + def matchPattern(self, tree: ParseTree, pattern: ParseTreePattern) -> Incomplete: ... + def compileTreePattern(self, pattern: str, patternRuleIndex: int) -> Incomplete: ... + def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict[str, list[ParseTree]]) -> Incomplete: ... + def map(self, labels: Incomplete, label: Incomplete, tree: Incomplete) -> None: ... + def getRuleTagToken(self, tree: ParseTree) -> Incomplete: ... + def tokenize(self, pattern: str) -> Incomplete: ... + def split(self, pattern: str) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi index 4e3e5a2c673f..c4bf5b594dd7 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi @@ -14,4 +14,4 @@ class RuleTagToken(Token): label: Incomplete ruleName: Incomplete def __init__(self, ruleName: str, bypassTokenType: int, label: str = ...) -> None: ... - def getText(self): ... + def getText(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi index c8ba32d8dd99..f49b1901405b 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi @@ -6,4 +6,4 @@ class TokenTagToken(CommonToken): tokenName: Incomplete label: Incomplete def __init__(self, tokenName: str, type: int, label: str = ...) -> None: ... - def getText(self): ... + def getText(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi index d056082b2531..429388d0597a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi @@ -12,42 +12,42 @@ class TerminalNode(ParseTree): ... class ErrorNode(TerminalNode): ... class ParseTreeVisitor: - def visit(self, tree): ... - def visitChildren(self, node): ... - def visitTerminal(self, node): ... - def visitErrorNode(self, node): ... + def visit(self, tree: Incomplete) -> Incomplete: ... + def visitChildren(self, node: Incomplete) -> Incomplete: ... + def visitTerminal(self, node: Incomplete) -> Incomplete: ... + def visitErrorNode(self, node: Incomplete) -> Incomplete: ... def defaultResult(self) -> None: ... - def aggregateResult(self, aggregate, nextResult): ... - def shouldVisitNextChild(self, node, currentResult): ... + def aggregateResult(self, aggregate: Incomplete, nextResult: Incomplete) -> Incomplete: ... + def shouldVisitNextChild(self, node: Incomplete, currentResult: Incomplete) -> Incomplete: ... ParserRuleContext: Incomplete class ParseTreeListener: - def visitTerminal(self, node: TerminalNode): ... - def visitErrorNode(self, node: ErrorNode): ... - def enterEveryRule(self, ctx: ParserRuleContext): ... - def exitEveryRule(self, ctx: ParserRuleContext): ... + def visitTerminal(self, node: TerminalNode) -> Incomplete: ... + def visitErrorNode(self, node: ErrorNode) -> Incomplete: ... + def enterEveryRule(self, ctx: ParserRuleContext) -> Incomplete: ... + def exitEveryRule(self, ctx: ParserRuleContext) -> Incomplete: ... class TerminalNodeImpl(TerminalNode): parentCtx: Incomplete symbol: Incomplete def __init__(self, symbol: Token) -> None: ... - def __setattr__(self, key, value) -> None: ... - def getChild(self, i: int): ... - def getSymbol(self): ... - def getParent(self): ... - def getPayload(self): ... - def getSourceInterval(self): ... - def getChildCount(self): ... - def accept(self, visitor: ParseTreeVisitor): ... - def getText(self): ... + def __setattr__(self, key: Incomplete, value: Incomplete) -> None: ... + def getChild(self, i: int) -> Incomplete: ... + def getSymbol(self) -> Incomplete: ... + def getParent(self) -> Incomplete: ... + def getPayload(self) -> Incomplete: ... + def getSourceInterval(self) -> Incomplete: ... + def getChildCount(self) -> Incomplete: ... + def accept(self, visitor: ParseTreeVisitor) -> Incomplete: ... + def getText(self) -> Incomplete: ... class ErrorNodeImpl(TerminalNodeImpl, ErrorNode): def __init__(self, token: Token) -> None: ... - def accept(self, visitor: ParseTreeVisitor): ... + def accept(self, visitor: ParseTreeVisitor) -> Incomplete: ... class ParseTreeWalker: DEFAULT: Incomplete - def walk(self, listener: ParseTreeListener, t: ParseTree): ... - def enterRule(self, listener: ParseTreeListener, r: RuleNode): ... - def exitRule(self, listener: ParseTreeListener, r: RuleNode): ... + def walk(self, listener: ParseTreeListener, t: ParseTree) -> Incomplete: ... + def enterRule(self, listener: ParseTreeListener, r: RuleNode) -> Incomplete: ... + def exitRule(self, listener: ParseTreeListener, r: RuleNode) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi index 5b5ffa4813be..835f35d5af4d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi @@ -14,18 +14,18 @@ Parser: Incomplete class Trees: @classmethod - def toStringTree(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...): ... + def toStringTree(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...) -> Incomplete: ... @classmethod - def getNodeText(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...): ... + def getNodeText(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...) -> Incomplete: ... @classmethod - def getChildren(cls, t: Tree): ... + def getChildren(cls, t: Tree) -> Incomplete: ... @classmethod - def getAncestors(cls, t: Tree): ... + def getAncestors(cls, t: Tree) -> Incomplete: ... @classmethod - def findAllTokenNodes(cls, t: ParseTree, ttype: int): ... + def findAllTokenNodes(cls, t: ParseTree, ttype: int) -> Incomplete: ... @classmethod - def findAllRuleNodes(cls, t: ParseTree, ruleIndex: int): ... + def findAllRuleNodes(cls, t: ParseTree, ruleIndex: int) -> Incomplete: ... @classmethod - def findAllNodes(cls, t: ParseTree, index: int, findTokens: bool): ... + def findAllNodes(cls, t: ParseTree, index: int, findTokens: bool) -> Incomplete: ... @classmethod - def descendants(cls, t: ParseTree): ... + def descendants(cls, t: ParseTree) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi index 67f0448bf3ed..4306522c0991 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi @@ -28,11 +28,11 @@ class XPath: path: Incomplete elements: Incomplete def __init__(self, parser: Parser, path: str) -> None: ... - def split(self, path: str): ... - def getXPathElement(self, wordToken: Token, anywhere: bool): ... + def split(self, path: str) -> Incomplete: ... + def getXPathElement(self, wordToken: Token, anywhere: bool) -> Incomplete: ... @staticmethod - def findAll(tree: ParseTree, xpath: str, parser: Parser): ... - def evaluate(self, t: ParseTree): ... + def findAll(tree: ParseTree, xpath: str, parser: Parser) -> Incomplete: ... + def evaluate(self, t: ParseTree) -> Incomplete: ... class XPathElement: nodeName: Incomplete @@ -42,27 +42,27 @@ class XPathElement: class XPathRuleAnywhereElement(XPathElement): ruleIndex: Incomplete def __init__(self, ruleName: str, ruleIndex: int) -> None: ... - def evaluate(self, t: ParseTree): ... + def evaluate(self, t: ParseTree) -> Incomplete: ... class XPathRuleElement(XPathElement): ruleIndex: Incomplete def __init__(self, ruleName: str, ruleIndex: int) -> None: ... - def evaluate(self, t: ParseTree): ... + def evaluate(self, t: ParseTree) -> Incomplete: ... class XPathTokenAnywhereElement(XPathElement): tokenType: Incomplete def __init__(self, ruleName: str, tokenType: int) -> None: ... - def evaluate(self, t: ParseTree): ... + def evaluate(self, t: ParseTree) -> Incomplete: ... class XPathTokenElement(XPathElement): tokenType: Incomplete def __init__(self, ruleName: str, tokenType: int) -> None: ... - def evaluate(self, t: ParseTree): ... + def evaluate(self, t: ParseTree) -> Incomplete: ... class XPathWildcardAnywhereElement(XPathElement): def __init__(self) -> None: ... - def evaluate(self, t: ParseTree): ... + def evaluate(self, t: ParseTree) -> Incomplete: ... class XPathWildcardElement(XPathElement): def __init__(self) -> None: ... - def evaluate(self, t: ParseTree): ... + def evaluate(self, t: ParseTree) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi index 6ff4c371e1b4..1d607314bd65 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi @@ -4,7 +4,7 @@ from typing import TextIO from antlr4 import * -def serializedATN(): ... +def serializedATN() -> Incomplete: ... class XPathLexer(Lexer): atn: Incomplete @@ -24,6 +24,6 @@ class XPathLexer(Lexer): ruleNames: Incomplete grammarFileName: str def __init__(self, input: Incomplete | None = ..., output: TextIO = ...) -> None: ... - def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... + def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int) -> Incomplete: ... type: Incomplete - def ID_action(self, localctx: RuleContext, actionIndex: int): ... + def ID_action(self, localctx: RuleContext, actionIndex: int) -> Incomplete: ... From bc92839971c18674621f087239b8ed11c63ae304 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 23 Dec 2023 13:40:21 +0000 Subject: [PATCH 09/34] [pre-commit.ci] auto fixes from pre-commit.com hooks --- .../antlr4/CommonTokenFactory.pyi | 4 +- .../antlr4/TokenStreamRewriter.pyi | 4 +- .../antlr4-python3-runtime/antlr4/_pygrun.pyi | 2 +- .../antlr4/atn/ATNDeserializer.pyi | 4 +- .../antlr4/atn/ATNType.pyi | 1 - .../antlr4/atn/ParserATNSimulator.pyi | 20 +++- .../antlr4/atn/PredictionMode.pyi | 1 - .../antlr4/error/DiagnosticErrorListener.pyi | 9 +- .../antlr4/error/ErrorListener.pyi | 92 +++++++++++++++++-- 9 files changed, 116 insertions(+), 21 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi index d7e83bd94db9..529ca4a84065 100644 --- a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi @@ -8,5 +8,7 @@ class CommonTokenFactory(TokenFactory): DEFAULT: Incomplete copyText: Incomplete def __init__(self, copyText: bool = ...) -> None: ... - def create(self, source: tuple[Incomplete], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int) -> Incomplete: ... + def create( + self, source: tuple[Incomplete], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int + ) -> Incomplete: ... def createThin(self, type: int, text: str) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi index 3fc9aff6495e..49469219172d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi @@ -22,7 +22,9 @@ class TokenStreamRewriter: def replaceIndex(self, index: Incomplete, text: Incomplete) -> None: ... def replaceRange(self, from_idx: Incomplete, to_idx: Incomplete, text: Incomplete) -> None: ... def replaceSingleToken(self, token: Incomplete, text: Incomplete) -> None: ... - def replaceRangeTokens(self, from_token: Incomplete, to_token: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... + def replaceRangeTokens( + self, from_token: Incomplete, to_token: Incomplete, text: Incomplete, program_name: Incomplete = ... + ) -> None: ... def replace(self, program_name: Incomplete, from_idx: Incomplete, to_idx: Incomplete, text: Incomplete) -> None: ... def deleteToken(self, token: Incomplete) -> None: ... def deleteIndex(self, index: Incomplete) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi index 3cb81e4ceed8..f96a012835bc 100644 --- a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi @@ -1,5 +1,5 @@ - from _typeshed import Incomplete + from antlr4 import * def beautify_lisp_string(in_string: Incomplete) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index 9da95395edc9..e9974e8da195 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -34,7 +34,9 @@ class ATNDeserializer: def checkCondition(self, condition: bool, message: Incomplete | None = ...) -> Incomplete: ... def readInt(self) -> Incomplete: ... edgeFactories: Incomplete - def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete]) -> Incomplete: ... + def edgeFactory( + self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete] + ) -> Incomplete: ... stateFactories: Incomplete def stateFactory(self, type: int, ruleIndex: int) -> Incomplete: ... CHANNEL: int diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi index 4322de176e7d..4d3ea3d48948 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi @@ -1,4 +1,3 @@ - from _typeshed import Incomplete from enum import IntEnum diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index ca93094d6923..d4e1db588af3 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -47,7 +47,9 @@ class ParserATNSimulator(ATNSimulator): ) -> None: ... def reset(self) -> None: ... def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext) -> Incomplete: ... - def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext) -> Incomplete: ... + def execATN( + self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext + ) -> Incomplete: ... def getExistingTargetState(self, previousD: DFAState, t: int) -> Incomplete: ... def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int) -> Incomplete: ... def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState) -> Incomplete: ... @@ -61,10 +63,14 @@ class ParserATNSimulator(ATNSimulator): def getReachableTarget(self, trans: Transition, ttype: int) -> Incomplete: ... def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int) -> Incomplete: ... def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]) -> Incomplete: ... - def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet, outerContext: ParserRuleContext) -> Incomplete: ... + def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule( + self, configs: ATNConfigSet, outerContext: ParserRuleContext + ) -> Incomplete: ... def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet) -> Incomplete: ... def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext) -> Incomplete: ... - def evalSemanticContext(self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool) -> Incomplete: ... + def evalSemanticContext( + self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool + ) -> Incomplete: ... def closure( self, config: ATNConfig, @@ -113,14 +119,18 @@ class ParserATNSimulator(ATNSimulator): def getTokenName(self, t: int) -> Incomplete: ... def getLookaheadName(self, input: TokenStream) -> Incomplete: ... def dumpDeadEndConfigs(self, nvae: NoViableAltException) -> Incomplete: ... - def noViableAlt(self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int) -> Incomplete: ... + def noViableAlt( + self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int + ) -> Incomplete: ... def getUniqueAlt(self, configs: ATNConfigSet) -> Incomplete: ... def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState) -> Incomplete: ... def addDFAState(self, dfa: DFA, D: DFAState) -> Incomplete: ... def reportAttemptingFullContext( self, dfa: DFA, conflictingAlts: set[Incomplete], configs: ATNConfigSet, startIndex: int, stopIndex: int ) -> Incomplete: ... - def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int) -> Incomplete: ... + def reportContextSensitivity( + self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int + ) -> Incomplete: ... def reportAmbiguity( self, dfa: DFA, diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index b302a42b2cfe..92a86d4c5f45 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -1,4 +1,3 @@ - from _typeshed import Incomplete from enum import Enum diff --git a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi index 20f0db5a509f..8109a8ea1ce1 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi @@ -8,7 +8,14 @@ class DiagnosticErrorListener(ErrorListener): exactOnly: Incomplete def __init__(self, exactOnly: bool = ...) -> None: ... def reportAmbiguity( - self, recognizer: Incomplete, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[int], configs: ATNConfigSet + self, + recognizer: Incomplete, + dfa: DFA, + startIndex: int, + stopIndex: int, + exact: bool, + ambigAlts: set[int], + configs: ATNConfigSet, ) -> Incomplete: ... def reportAttemptingFullContext( self, recognizer: Incomplete, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi index b198d86663b6..9984ec5f5d9e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi @@ -1,19 +1,93 @@ from _typeshed import Incomplete class ErrorListener: - def syntaxError(self, recognizer: Incomplete, offendingSymbol: Incomplete, line: Incomplete, column: Incomplete, msg: Incomplete, e: Incomplete) -> None: ... - def reportAmbiguity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, exact: Incomplete, ambigAlts: Incomplete, configs: Incomplete) -> None: ... - def reportAttemptingFullContext(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, conflictingAlts: Incomplete, configs: Incomplete) -> None: ... - def reportContextSensitivity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, prediction: Incomplete, configs: Incomplete) -> None: ... + def syntaxError( + self, + recognizer: Incomplete, + offendingSymbol: Incomplete, + line: Incomplete, + column: Incomplete, + msg: Incomplete, + e: Incomplete, + ) -> None: ... + def reportAmbiguity( + self, + recognizer: Incomplete, + dfa: Incomplete, + startIndex: Incomplete, + stopIndex: Incomplete, + exact: Incomplete, + ambigAlts: Incomplete, + configs: Incomplete, + ) -> None: ... + def reportAttemptingFullContext( + self, + recognizer: Incomplete, + dfa: Incomplete, + startIndex: Incomplete, + stopIndex: Incomplete, + conflictingAlts: Incomplete, + configs: Incomplete, + ) -> None: ... + def reportContextSensitivity( + self, + recognizer: Incomplete, + dfa: Incomplete, + startIndex: Incomplete, + stopIndex: Incomplete, + prediction: Incomplete, + configs: Incomplete, + ) -> None: ... class ConsoleErrorListener(ErrorListener): INSTANCE: Incomplete - def syntaxError(self, recognizer: Incomplete, offendingSymbol: Incomplete, line: Incomplete, column: Incomplete, msg: Incomplete, e: Incomplete) -> None: ... + def syntaxError( + self, + recognizer: Incomplete, + offendingSymbol: Incomplete, + line: Incomplete, + column: Incomplete, + msg: Incomplete, + e: Incomplete, + ) -> None: ... class ProxyErrorListener(ErrorListener): delegates: Incomplete def __init__(self, delegates: Incomplete) -> None: ... - def syntaxError(self, recognizer: Incomplete, offendingSymbol: Incomplete, line: Incomplete, column: Incomplete, msg: Incomplete, e: Incomplete) -> None: ... - def reportAmbiguity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, exact: Incomplete, ambigAlts: Incomplete, configs: Incomplete) -> None: ... - def reportAttemptingFullContext(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, conflictingAlts: Incomplete, configs: Incomplete) -> None: ... - def reportContextSensitivity(self, recognizer: Incomplete, dfa: Incomplete, startIndex: Incomplete, stopIndex: Incomplete, prediction: Incomplete, configs: Incomplete) -> None: ... + def syntaxError( + self, + recognizer: Incomplete, + offendingSymbol: Incomplete, + line: Incomplete, + column: Incomplete, + msg: Incomplete, + e: Incomplete, + ) -> None: ... + def reportAmbiguity( + self, + recognizer: Incomplete, + dfa: Incomplete, + startIndex: Incomplete, + stopIndex: Incomplete, + exact: Incomplete, + ambigAlts: Incomplete, + configs: Incomplete, + ) -> None: ... + def reportAttemptingFullContext( + self, + recognizer: Incomplete, + dfa: Incomplete, + startIndex: Incomplete, + stopIndex: Incomplete, + conflictingAlts: Incomplete, + configs: Incomplete, + ) -> None: ... + def reportContextSensitivity( + self, + recognizer: Incomplete, + dfa: Incomplete, + startIndex: Incomplete, + stopIndex: Incomplete, + prediction: Incomplete, + configs: Incomplete, + ) -> None: ... From ae8c1ee6a02f8c5de633664e5fede575abfda5b1 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Sun, 24 Dec 2023 09:52:07 -0600 Subject: [PATCH 10/34] =?UTF-8?q?Added=20=E2=80=99=20|=20None=20=3D=20None?= =?UTF-8?q?=E2=80=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../antlr4/BufferedTokenStream.pyi | 4 ++-- .../antlr4/InputStream.pyi | 1 + .../antlr4/LL1Analyzer.pyi | 2 +- .../antlr4/ListTokenSource.pyi | 2 +- stubs/antlr4-python3-runtime/antlr4/Parser.pyi | 6 +++--- .../antlr4/ParserRuleContext.pyi | 4 ++-- .../antlr4/PredictionContext.pyi | 4 ++-- .../antlr4/RuleContext.pyi | 4 ++-- stubs/antlr4-python3-runtime/antlr4/Token.pyi | 2 +- .../antlr4-python3-runtime/antlr4/atn/ATN.pyi | 2 +- .../antlr4/atn/ATNConfig.pyi | 18 +++++++++--------- .../antlr4/atn/ATNDeserializationOptions.pyi | 2 +- .../antlr4/atn/ATNDeserializer.pyi | 2 +- .../antlr4/atn/LexerATNSimulator.pyi | 2 +- .../antlr4-python3-runtime/antlr4/dfa/DFA.pyi | 2 +- .../antlr4/dfa/DFASerializer.pyi | 2 +- .../antlr4/error/Errors.pyi | 14 +++++++------- .../antlr4/tree/Chunk.pyi | 2 +- .../antlr4/tree/RuleTagToken.pyi | 2 +- .../antlr4/tree/TokenTagToken.pyi | 2 +- .../antlr4/tree/Trees.pyi | 4 ++-- .../antlr4/xpath/XPathLexer.pyi | 2 +- 22 files changed, 43 insertions(+), 42 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi index ab91944347f8..2938c22a8183 100644 --- a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi @@ -21,7 +21,7 @@ class BufferedTokenStream(TokenStream): def consume(self) -> None: ... def sync(self, i: int) -> bool: ... def fetch(self, n: int) -> int: ... - def getTokens(self, start: int, stop: int, types: set[int] = ...) -> list[Token]: ... + def getTokens(self, start: int, stop: int, types: set[int] | None = None) -> list[Token]: ... def LA(self, i: int) -> int: ... def LB(self, k: int) -> Token | None: ... def LT(self, k: int) -> Token | None: ... @@ -35,5 +35,5 @@ class BufferedTokenStream(TokenStream): def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = ...) -> Incomplete: ... def filterForChannel(self, left: int, right: int, channel: int) -> Incomplete: ... def getSourceName(self) -> Incomplete: ... - def getText(self, start: int = ..., stop: int = ...) -> Incomplete: ... + def getText(self, start: int | None = None, stop: int | None = None) -> Incomplete: ... def fill(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi b/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi index affc47edd680..2c7b564e02e3 100644 --- a/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi @@ -5,6 +5,7 @@ from antlr4.Token import Token as Token class InputStream: name: str strdata: Incomplete + data: Incomplete def __init__(self, data: str) -> None: ... @property def index(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi b/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi index db26b45bf437..1f2bbfe820f5 100644 --- a/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi @@ -23,4 +23,4 @@ class LL1Analyzer: atn: Incomplete def __init__(self, atn: ATN) -> None: ... def getDecisionLookahead(self, s: ATNState) -> Incomplete: ... - def LOOK(self, s: ATNState, stopState: ATNState = ..., ctx: RuleContext = ...) -> Incomplete: ... + def LOOK(self, s: ATNState, stopState: ATNState | None = None, ctx: RuleContext | None = None) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi index de3dc02e6a07..92ae08a2e78e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi @@ -9,7 +9,7 @@ class ListTokenSource(TokenSource): sourceName: Incomplete pos: int eofToken: Incomplete - def __init__(self, tokens: list[Token], sourceName: str = ...) -> None: ... + def __init__(self, tokens: list[Token], sourceName: str | None = None) -> None: ... @property def column(self) -> Incomplete: ... def nextToken(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi index f64b4f9a053e..1a363507fa68 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi @@ -43,13 +43,13 @@ class Parser(Recognizer): def getTokenFactory(self) -> Incomplete: ... def setTokenFactory(self, factory: TokenFactory) -> Incomplete: ... def getATNWithBypassAlts(self) -> Incomplete: ... - def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer = ...) -> Incomplete: ... + def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None) -> Incomplete: ... def getInputStream(self) -> Incomplete: ... def setInputStream(self, input: InputStream) -> Incomplete: ... def getTokenStream(self) -> Incomplete: ... def setTokenStream(self, input: TokenStream) -> Incomplete: ... def getCurrentToken(self) -> Incomplete: ... - def notifyErrorListeners(self, msg: str, offendingToken: Token = ..., e: RecognitionException = ...) -> Incomplete: ... + def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None) -> Incomplete: ... def consume(self) -> Incomplete: ... def addContextToParseTree(self) -> None: ... state: Incomplete @@ -67,7 +67,7 @@ class Parser(Recognizer): def getExpectedTokens(self) -> Incomplete: ... def getExpectedTokensWithinCurrentRule(self) -> Incomplete: ... def getRuleIndex(self, ruleName: str) -> Incomplete: ... - def getRuleInvocationStack(self, p: RuleContext = ...) -> Incomplete: ... + def getRuleInvocationStack(self, p: RuleContext | None = None) -> Incomplete: ... def getDFAStrings(self) -> Incomplete: ... def dumpDFA(self) -> None: ... def getSourceName(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi index c4dac4bd9065..6926c89eb534 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi @@ -17,7 +17,7 @@ class ParserRuleContext(RuleContext): start: Incomplete stop: Incomplete exception: Incomplete - def __init__(self, parent: ParserRuleContext = ..., invokingStateNumber: int = ...) -> None: ... + def __init__(self, parent: ParserRuleContext | None = None, invokingStateNumber: int | None = None) -> None: ... parentCtx: Incomplete invokingState: Incomplete def copyFrom(self, ctx: ParserRuleContext) -> Incomplete: ... @@ -27,7 +27,7 @@ class ParserRuleContext(RuleContext): def removeLastChild(self) -> None: ... def addTokenNode(self, token: Token) -> Incomplete: ... def addErrorNode(self, badToken: Token) -> Incomplete: ... - def getChild(self, i: int, ttype: type = ...) -> Incomplete: ... + def getChild(self, i: int, ttype: type | None = None) -> Incomplete: ... def getChildren(self, predicate: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... def getToken(self, ttype: int, i: int) -> Incomplete: ... def getTokens(self, ttype: int) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index b41e7d06d853..0c51a3776781 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -56,7 +56,7 @@ class ArrayPredictionContext(PredictionContext): def __eq__(self, other: Incomplete) -> Incomplete: ... def __hash__(self) -> Incomplete: ... -def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext = ...) -> Incomplete: ... +def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext | None = None) -> Incomplete: ... def merge( a: PredictionContext, b: PredictionContext, @@ -81,5 +81,5 @@ def getCachedPredictionContext( context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext] ) -> Incomplete: ... def getAllContextNodes( - context: PredictionContext, nodes: list[Incomplete] = ..., visited: dict[PredictionContext, PredictionContext] = ... + context: PredictionContext, nodes: list[Incomplete] | None = None, visited: dict[PredictionContext, PredictionContext] | None = None ) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi index 0645a2eeddfa..7abb5833c10d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi @@ -10,7 +10,7 @@ class RuleContext(RuleNode): EMPTY: Incomplete parentCtx: Incomplete invokingState: Incomplete - def __init__(self, parent: RuleContext = ..., invokingState: int = ...) -> None: ... + def __init__(self, parent: RuleContext | None = None, invokingState: int = ...) -> None: ... def depth(self) -> Incomplete: ... def isEmpty(self) -> Incomplete: ... def getSourceInterval(self) -> Incomplete: ... @@ -24,5 +24,5 @@ class RuleContext(RuleNode): def getChildCount(self) -> Incomplete: ... def getChildren(self) -> Generator[Incomplete, None, None]: ... def accept(self, visitor: ParseTreeVisitor) -> Incomplete: ... - def toStringTree(self, ruleNames: list[Incomplete] = ..., recog: Parser = ...) -> Incomplete: ... + def toStringTree(self, ruleNames: list[Incomplete] | None = None, recog: Parser | None = None) -> Incomplete: ... def toString(self, ruleNames: list[Incomplete], stop: RuleContext) -> str: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi index eba0be29ba3e..6f179f6fc7fc 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -34,7 +34,7 @@ class CommonToken(Token): line: Incomplete column: Incomplete def __init__( - self, source: tuple[Incomplete] = ..., type: int = ..., channel: int = ..., start: int = ..., stop: int = ... + self, source: tuple[Incomplete, Incomplete] = ..., type: int | None = None, channel: int = ..., start: int = ..., stop: int = ... ) -> None: ... def clone(self) -> Incomplete: ... @property diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi index b5a76ea6bb2f..abb946de44dd 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi @@ -21,7 +21,7 @@ class ATN: def __init__(self, grammarType: ATNType, maxTokenType: int) -> None: ... def nextTokensInContext(self, s: ATNState, ctx: RuleContext) -> Incomplete: ... def nextTokensNoContext(self, s: ATNState) -> Incomplete: ... - def nextTokens(self, s: ATNState, ctx: RuleContext = ...) -> Incomplete: ... + def nextTokens(self, s: ATNState, ctx: RuleContext | None = None) -> Incomplete: ... def addState(self, state: ATNState) -> Incomplete: ... def removeState(self, state: ATNState) -> Incomplete: ... def defineDecisionState(self, s: DecisionState) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi index df9b16d0edb9..00bef2fbef81 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi @@ -14,11 +14,11 @@ class ATNConfig: precedenceFilterSuppressed: Incomplete def __init__( self, - state: ATNState = ..., - alt: int = ..., - context: PredictionContext = ..., - semantic: SemanticContext = ..., - config: ATNConfig = ..., + state: ATNState | None = None, + alt: int | None = None, + context: PredictionContext | None = None, + semantic: SemanticContext | None = None, + config: ATNConfig | None = None, ) -> None: ... def __eq__(self, other: Incomplete) -> Incomplete: ... def __hash__(self) -> Incomplete: ... @@ -31,11 +31,11 @@ class LexerATNConfig(ATNConfig): def __init__( self, state: ATNState, - alt: int = ..., - context: PredictionContext = ..., + alt: int | None = None, + context: PredictionContext | None = None, semantic: SemanticContext = ..., - lexerActionExecutor: LexerActionExecutor = ..., - config: LexerATNConfig = ..., + lexerActionExecutor: LexerActionExecutor | None = None, + config: LexerATNConfig | None = None, ) -> None: ... def __hash__(self) -> Incomplete: ... def __eq__(self, other: Incomplete) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi index b041fc0efc41..737a4bfeb004 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi @@ -5,5 +5,5 @@ class ATNDeserializationOptions: readonly: bool verifyATN: Incomplete generateRuleBypassTransitions: Incomplete - def __init__(self, copyFrom: ATNDeserializationOptions = ...) -> None: ... + def __init__(self, copyFrom: ATNDeserializationOptions | None = None) -> None: ... def __setattr__(self, key: Incomplete, value: Incomplete) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index e9974e8da195..c8bd1234d221 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -13,7 +13,7 @@ SERIALIZED_VERSION: int class ATNDeserializer: deserializationOptions: Incomplete - def __init__(self, options: ATNDeserializationOptions = ...) -> None: ... + def __init__(self, options: ATNDeserializationOptions | None = None) -> None: ... data: Incomplete pos: int def deserialize(self, data: list[int]) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi index 60c66048eb9d..75de75cfd131 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi @@ -81,7 +81,7 @@ class LexerATNSimulator(ATNSimulator): ) -> Incomplete: ... def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool) -> Incomplete: ... def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState) -> Incomplete: ... - def addDFAEdge(self, from_: DFAState, tk: int, to: DFAState = ..., cfgs: ATNConfigSet = ...) -> DFAState: ... + def addDFAEdge(self, from_: DFAState, tk: int, to: DFAState | None = None, cfgs: ATNConfigSet | None = None) -> DFAState: ... def addDFAState(self, configs: ATNConfigSet) -> DFAState: ... def getDFA(self, mode: int) -> Incomplete: ... def getText(self, input: InputStream) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi index e76157b644c7..f2fcfef071d7 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi @@ -17,5 +17,5 @@ class DFA: @property def states(self) -> Incomplete: ... def sortedStates(self) -> Incomplete: ... - def toString(self, literalNames: list[str] = ..., symbolicNames: list[str] = ...) -> Incomplete: ... + def toString(self, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None) -> Incomplete: ... def toLexerString(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi index e9e6b441d978..e7cda2037c92 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi @@ -8,7 +8,7 @@ class DFASerializer: dfa: Incomplete literalNames: Incomplete symbolicNames: Incomplete - def __init__(self, dfa: DFA, literalNames: list[str] = ..., symbolicNames: list[str] = ...) -> None: ... + def __init__(self, dfa: DFA, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None) -> None: ... def getEdgeLabel(self, i: int) -> Incomplete: ... def getStateString(self, s: DFAState) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi index 860cef8871f1..2e1546be2fdd 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi @@ -30,7 +30,7 @@ class RecognitionException(Exception): offendingToken: Incomplete offendingState: int def __init__( - self, message: str = ..., recognizer: Recognizer = ..., input: InputStream = ..., ctx: ParserRulecontext = ... + self, message: str | None = None, recognizer: Recognizer | None = None, input: InputStream | None = None, ctx: ParserRulecontext | None = None ) -> None: ... def getExpectedTokens(self) -> Incomplete: ... @@ -47,11 +47,11 @@ class NoViableAltException(RecognitionException): def __init__( self, recognizer: Parser, - input: TokenStream = ..., - startToken: Token = ..., - offendingToken: Token = ..., - deadEndConfigs: ATNConfigSet = ..., - ctx: ParserRuleContext = ..., + input: TokenStream | None = None, + startToken: Token | None = None, + offendingToken: Token | None = None, + deadEndConfigs: ATNConfigSet | None = None, + ctx: ParserRuleContext | None = None, ) -> None: ... class InputMismatchException(RecognitionException): @@ -63,7 +63,7 @@ class FailedPredicateException(RecognitionException): predicateIndex: Incomplete predicate: Incomplete offendingToken: Incomplete - def __init__(self, recognizer: Parser, predicate: str = ..., message: str = ...) -> None: ... + def __init__(self, recognizer: Parser, predicate: str | None = None, message: str | None = None) -> None: ... def formatMessage(self, predicate: str, message: str) -> Incomplete: ... class ParseCancellationException(CancellationException): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Chunk.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Chunk.pyi index e494274ac701..a8f1b30f938f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Chunk.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Chunk.pyi @@ -5,7 +5,7 @@ class Chunk: ... class TagChunk(Chunk): tag: Incomplete label: Incomplete - def __init__(self, tag: str, label: str = ...) -> None: ... + def __init__(self, tag: str, label: str | None = None) -> None: ... class TextChunk(Chunk): text: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi index c4bf5b594dd7..ab51c9a94b7c 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi @@ -13,5 +13,5 @@ class RuleTagToken(Token): column: int label: Incomplete ruleName: Incomplete - def __init__(self, ruleName: str, bypassTokenType: int, label: str = ...) -> None: ... + def __init__(self, ruleName: str, bypassTokenType: int, label: str | None = None) -> None: ... def getText(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi index f49b1901405b..f6f91298b954 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi @@ -5,5 +5,5 @@ from antlr4.Token import CommonToken as CommonToken class TokenTagToken(CommonToken): tokenName: Incomplete label: Incomplete - def __init__(self, tokenName: str, type: int, label: str = ...) -> None: ... + def __init__(self, tokenName: str, type: int, label: str | None = None) -> None: ... def getText(self) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi index 835f35d5af4d..76a81babce36 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi @@ -14,9 +14,9 @@ Parser: Incomplete class Trees: @classmethod - def toStringTree(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...) -> Incomplete: ... + def toStringTree(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None) -> Incomplete: ... @classmethod - def getNodeText(cls, t: Tree, ruleNames: list[str] = ..., recog: Parser = ...) -> Incomplete: ... + def getNodeText(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None) -> Incomplete: ... @classmethod def getChildren(cls, t: Tree) -> Incomplete: ... @classmethod diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi index 1d607314bd65..dc03efa81bfd 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi @@ -23,7 +23,7 @@ class XPathLexer(Lexer): symbolicNames: Incomplete ruleNames: Incomplete grammarFileName: str - def __init__(self, input: Incomplete | None = ..., output: TextIO = ...) -> None: ... + def __init__(self, input: Incomplete | None = None, output: TextIO = ...) -> None: ... def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int) -> Incomplete: ... type: Incomplete def ID_action(self, localctx: RuleContext, actionIndex: int) -> Incomplete: ... From a9d0273a02e777ecb9af2e720193393aa1ca2970 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 24 Dec 2023 15:52:48 +0000 Subject: [PATCH 11/34] [pre-commit.ci] auto fixes from pre-commit.com hooks --- stubs/antlr4-python3-runtime/antlr4/Parser.pyi | 4 +++- stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi | 4 +++- stubs/antlr4-python3-runtime/antlr4/Token.pyi | 7 ++++++- stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi | 6 +++++- 4 files changed, 17 insertions(+), 4 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi index 1a363507fa68..cae244a25f9e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi @@ -49,7 +49,9 @@ class Parser(Recognizer): def getTokenStream(self) -> Incomplete: ... def setTokenStream(self, input: TokenStream) -> Incomplete: ... def getCurrentToken(self) -> Incomplete: ... - def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None) -> Incomplete: ... + def notifyErrorListeners( + self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None + ) -> Incomplete: ... def consume(self) -> Incomplete: ... def addContextToParseTree(self) -> None: ... state: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index 0c51a3776781..b8b04af4877f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -81,5 +81,7 @@ def getCachedPredictionContext( context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext] ) -> Incomplete: ... def getAllContextNodes( - context: PredictionContext, nodes: list[Incomplete] | None = None, visited: dict[PredictionContext, PredictionContext] | None = None + context: PredictionContext, + nodes: list[Incomplete] | None = None, + visited: dict[PredictionContext, PredictionContext] | None = None, ) -> Incomplete: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi index 6f179f6fc7fc..ab4801fe9162 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -34,7 +34,12 @@ class CommonToken(Token): line: Incomplete column: Incomplete def __init__( - self, source: tuple[Incomplete, Incomplete] = ..., type: int | None = None, channel: int = ..., start: int = ..., stop: int = ... + self, + source: tuple[Incomplete, Incomplete] = ..., + type: int | None = None, + channel: int = ..., + start: int = ..., + stop: int = ..., ) -> None: ... def clone(self) -> Incomplete: ... @property diff --git a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi index 2e1546be2fdd..d453b19cd224 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi @@ -30,7 +30,11 @@ class RecognitionException(Exception): offendingToken: Incomplete offendingState: int def __init__( - self, message: str | None = None, recognizer: Recognizer | None = None, input: InputStream | None = None, ctx: ParserRulecontext | None = None + self, + message: str | None = None, + recognizer: Recognizer | None = None, + input: InputStream | None = None, + ctx: ParserRulecontext | None = None, ) -> None: ... def getExpectedTokens(self) -> Incomplete: ... From b19f2f2b972d7a24d27508708b03343d366c5324 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Sun, 24 Dec 2023 15:12:21 -0600 Subject: [PATCH 12/34] No re-export --- stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi | 2 +- stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi | 2 +- stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index c8bd1234d221..153daa731808 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from io import StringIO as StringIO +from io import StringIO from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi index 4306522c0991..25ac3fc9872b 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from io import StringIO as StringIO +from io import StringIO from antlr4 import ( DFA as DFA, diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi index dc03efa81bfd..21b9f1212f63 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi @@ -1,5 +1,5 @@ from _typeshed import Incomplete -from io import StringIO as StringIO +from io import StringIO from typing import TextIO from antlr4 import * From 50f8ddbb5ca4acdcd5b261606e265e70f093147a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 24 Dec 2023 21:13:00 +0000 Subject: [PATCH 13/34] [pre-commit.ci] auto fixes from pre-commit.com hooks --- stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi | 1 - stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi | 1 - stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi | 1 - 3 files changed, 3 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index 153daa731808..809da83a4722 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -1,5 +1,4 @@ from _typeshed import Incomplete -from io import StringIO from antlr4.atn.ATN import ATN as ATN from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi index 25ac3fc9872b..496b702270c3 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi @@ -1,5 +1,4 @@ from _typeshed import Incomplete -from io import StringIO from antlr4 import ( DFA as DFA, diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi index 21b9f1212f63..b09ffe0a3e27 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi @@ -1,5 +1,4 @@ from _typeshed import Incomplete -from io import StringIO from typing import TextIO from antlr4 import * From 9f184a131c90cbcaefcdce41aadcb2e0cb41a9cd Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Sun, 24 Dec 2023 18:21:23 -0600 Subject: [PATCH 14/34] Update stubs/antlr4-python3-runtime/METADATA.toml Co-authored-by: Alex Waygood --- stubs/antlr4-python3-runtime/METADATA.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/stubs/antlr4-python3-runtime/METADATA.toml b/stubs/antlr4-python3-runtime/METADATA.toml index 9f1f20021b62..db215adfe920 100644 --- a/stubs/antlr4-python3-runtime/METADATA.toml +++ b/stubs/antlr4-python3-runtime/METADATA.toml @@ -2,4 +2,5 @@ version = "4.13.*" upstream_repository = "https://github.com/antlr/antlr4" [tool.stubtest] +ignore_missing_stub = true platforms = ["linux", "win32"] From 2f19d4fe3fc4609dea9edae51817ac21617d99ad Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Thu, 28 Dec 2023 12:43:01 +0000 Subject: [PATCH 15/34] Add antlr4 to the excludelist for stricter pyright settings --- pyrightconfig.stricter.json | 1 + 1 file changed, 1 insertion(+) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 85d904e72d17..7f85e397c621 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -23,6 +23,7 @@ "stdlib/xml/dom/minidom.pyi", "stdlib/xml/dom/pulldom.pyi", "stdlib/xml/sax", + "stubs/antlr4", "stubs/aws-xray-sdk", "stubs/beautifulsoup4", "stubs/bleach", From 4a290241d46d2e6d3f3dcfe25288b80f9e9f1a8a Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Thu, 28 Dec 2023 12:49:38 +0000 Subject: [PATCH 16/34] Codemod away the undesirable `Incomplete` annotations --- .../antlr4/BufferedTokenStream.pyi | 18 +-- .../antlr4/CommonTokenFactory.pyi | 4 +- .../antlr4/FileStream.pyi | 2 +- .../antlr4/InputStream.pyi | 16 +-- .../antlr4/IntervalSet.pyi | 24 ++-- .../antlr4/LL1Analyzer.pyi | 4 +- stubs/antlr4-python3-runtime/antlr4/Lexer.pyi | 50 ++++---- .../antlr4/ListTokenSource.pyi | 10 +- .../antlr4-python3-runtime/antlr4/Parser.pyi | 78 ++++++------ .../antlr4/ParserInterpreter.pyi | 10 +- .../antlr4/ParserRuleContext.pyi | 26 ++-- .../antlr4/PredictionContext.pyi | 58 ++++----- .../antlr4/Recognizer.pyi | 28 ++--- .../antlr4/RuleContext.pyi | 26 ++-- stubs/antlr4-python3-runtime/antlr4/Token.pyi | 14 +-- .../antlr4/TokenStreamRewriter.pyi | 54 ++++----- stubs/antlr4-python3-runtime/antlr4/Utils.pyi | 4 +- .../antlr4-python3-runtime/antlr4/_pygrun.pyi | 2 +- .../antlr4-python3-runtime/antlr4/atn/ATN.pyi | 16 +-- .../antlr4/atn/ATNConfig.pyi | 18 +-- .../antlr4/atn/ATNConfigSet.pyi | 28 ++--- .../antlr4/atn/ATNDeserializationOptions.pyi | 2 +- .../antlr4/atn/ATNDeserializer.pyi | 38 +++--- .../antlr4/atn/ATNSimulator.pyi | 2 +- .../antlr4/atn/ATNState.pyi | 10 +- .../antlr4/atn/ATNType.pyi | 2 +- .../antlr4/atn/LexerATNSimulator.pyi | 38 +++--- .../antlr4/atn/LexerAction.pyi | 46 +++---- .../antlr4/atn/LexerActionExecutor.pyi | 10 +- .../antlr4/atn/ParserATNSimulator.pyi | 78 ++++++------ .../antlr4/atn/PredictionMode.pyi | 28 ++--- .../antlr4/atn/SemanticContext.pyi | 42 +++---- .../antlr4/atn/Transition.pyi | 30 ++--- .../antlr4-python3-runtime/antlr4/dfa/DFA.pyi | 14 +-- .../antlr4/dfa/DFASerializer.pyi | 6 +- .../antlr4/dfa/DFAState.pyi | 6 +- .../antlr4/error/DiagnosticErrorListener.pyi | 16 +-- .../antlr4/error/ErrorListener.pyi | 114 +++++++++--------- .../antlr4/error/ErrorStrategy.pyi | 62 +++++----- .../antlr4/error/Errors.pyi | 4 +- .../antlr4/tree/ParseTreeMatch.pyi | 6 +- .../antlr4/tree/ParseTreePattern.pyi | 6 +- .../antlr4/tree/ParseTreePatternMatcher.pyi | 22 ++-- .../antlr4/tree/RuleTagToken.pyi | 2 +- .../antlr4/tree/TokenTagToken.pyi | 2 +- .../antlr4/tree/Tree.pyi | 46 +++---- .../antlr4/tree/Trees.pyi | 16 +-- .../antlr4/xpath/XPath.pyi | 20 +-- .../antlr4/xpath/XPathLexer.pyi | 6 +- 49 files changed, 582 insertions(+), 582 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi index 2938c22a8183..b34d5d204f54 100644 --- a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi @@ -25,15 +25,15 @@ class BufferedTokenStream(TokenStream): def LA(self, i: int) -> int: ... def LB(self, k: int) -> Token | None: ... def LT(self, k: int) -> Token | None: ... - def adjustSeekIndex(self, i: int) -> Incomplete: ... + def adjustSeekIndex(self, i: int): ... def lazyInit(self) -> None: ... def setup(self) -> None: ... - def setTokenSource(self, tokenSource: Lexer) -> Incomplete: ... - def nextTokenOnChannel(self, i: int, channel: int) -> Incomplete: ... - def previousTokenOnChannel(self, i: int, channel: int) -> Incomplete: ... - def getHiddenTokensToRight(self, tokenIndex: int, channel: int = ...) -> Incomplete: ... - def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = ...) -> Incomplete: ... - def filterForChannel(self, left: int, right: int, channel: int) -> Incomplete: ... - def getSourceName(self) -> Incomplete: ... - def getText(self, start: int | None = None, stop: int | None = None) -> Incomplete: ... + def setTokenSource(self, tokenSource: Lexer): ... + def nextTokenOnChannel(self, i: int, channel: int): ... + def previousTokenOnChannel(self, i: int, channel: int): ... + def getHiddenTokensToRight(self, tokenIndex: int, channel: int = ...): ... + def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = ...): ... + def filterForChannel(self, left: int, right: int, channel: int): ... + def getSourceName(self): ... + def getText(self, start: int | None = None, stop: int | None = None): ... def fill(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi index 529ca4a84065..bacc13173696 100644 --- a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi @@ -10,5 +10,5 @@ class CommonTokenFactory(TokenFactory): def __init__(self, copyText: bool = ...) -> None: ... def create( self, source: tuple[Incomplete], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int - ) -> Incomplete: ... - def createThin(self, type: int, text: str) -> Incomplete: ... + ): ... + def createThin(self, type: int, text: str): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi index 6bb90731b9f6..5b25bbaac9b8 100644 --- a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi @@ -5,4 +5,4 @@ from antlr4.InputStream import InputStream as InputStream class FileStream(InputStream): fileName: Incomplete def __init__(self, fileName: str, encoding: str = ..., errors: str = ...) -> None: ... - def readDataFrom(self, fileName: str, encoding: str, errors: str = ...) -> Incomplete: ... + def readDataFrom(self, fileName: str, encoding: str, errors: str = ...): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi b/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi index 2c7b564e02e3..d42ed870429c 100644 --- a/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/InputStream.pyi @@ -8,14 +8,14 @@ class InputStream: data: Incomplete def __init__(self, data: str) -> None: ... @property - def index(self) -> Incomplete: ... + def index(self): ... @property - def size(self) -> Incomplete: ... + def size(self): ... def reset(self) -> None: ... def consume(self) -> None: ... - def LA(self, offset: int) -> Incomplete: ... - def LT(self, offset: int) -> Incomplete: ... - def mark(self) -> Incomplete: ... - def release(self, marker: int) -> Incomplete: ... - def seek(self, _index: int) -> Incomplete: ... - def getText(self, start: int, stop: int) -> Incomplete: ... + def LA(self, offset: int): ... + def LT(self, offset: int): ... + def mark(self): ... + def release(self, marker: int): ... + def seek(self, _index: int): ... + def getText(self, start: int, stop: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi index eaa83d4c8251..2653e17ee2ab 100644 --- a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi @@ -6,16 +6,16 @@ class IntervalSet: intervals: list[range] | None readonly: bool def __init__(self) -> None: ... - def __iter__(self) -> Incomplete: ... - def __getitem__(self, item: Incomplete) -> Incomplete: ... - def addOne(self, v: int) -> Incomplete: ... - def addRange(self, v: range) -> Incomplete: ... - def addSet(self, other: IntervalSet) -> Incomplete: ... - def reduce(self, k: int) -> Incomplete: ... - def complement(self, start: int, stop: int) -> Incomplete: ... - def __contains__(self, item: Incomplete) -> bool: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def addOne(self, v: int): ... + def addRange(self, v: range): ... + def addSet(self, other: IntervalSet): ... + def reduce(self, k: int): ... + def complement(self, start: int, stop: int): ... + def __contains__(self, item) -> bool: ... def __len__(self) -> int: ... - def removeRange(self, v: Incomplete) -> None: ... - def removeOne(self, v: Incomplete) -> None: ... - def toString(self, literalNames: list[str], symbolicNames: list[str]) -> Incomplete: ... - def elementName(self, literalNames: list[str], symbolicNames: list[str], a: int) -> Incomplete: ... + def removeRange(self, v) -> None: ... + def removeOne(self, v) -> None: ... + def toString(self, literalNames: list[str], symbolicNames: list[str]): ... + def elementName(self, literalNames: list[str], symbolicNames: list[str], a: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi b/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi index 1f2bbfe820f5..1963fd468b85 100644 --- a/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi @@ -22,5 +22,5 @@ class LL1Analyzer: HIT_PRED: Incomplete atn: Incomplete def __init__(self, atn: ATN) -> None: ... - def getDecisionLookahead(self, s: ATNState) -> Incomplete: ... - def LOOK(self, s: ATNState, stopState: ATNState | None = None, ctx: RuleContext | None = None) -> Incomplete: ... + def getDecisionLookahead(self, s: ATNState): ... + def LOOK(self, s: ATNState, stopState: ATNState | None = None, ctx: RuleContext | None = None): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi b/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi index 7bde890442d8..41a63716ffff 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Lexer.pyi @@ -24,41 +24,41 @@ class Lexer(Recognizer, TokenSource): MAX_CHAR_VALUE: int def __init__(self, input: InputStream, output: TextIO = ...) -> None: ... def reset(self) -> None: ... - def nextToken(self) -> Incomplete: ... + def nextToken(self): ... def skip(self) -> None: ... def more(self) -> None: ... - def mode(self, m: int) -> Incomplete: ... - def pushMode(self, m: int) -> Incomplete: ... - def popMode(self) -> Incomplete: ... + def mode(self, m: int): ... + def pushMode(self, m: int): ... + def popMode(self): ... @property - def inputStream(self) -> Incomplete: ... + def inputStream(self): ... @inputStream.setter - def inputStream(self, input: InputStream) -> Incomplete: ... + def inputStream(self, input: InputStream): ... @property - def sourceName(self) -> Incomplete: ... - def emitToken(self, token: Token) -> Incomplete: ... - def emit(self) -> Incomplete: ... - def emitEOF(self) -> Incomplete: ... + def sourceName(self): ... + def emitToken(self, token: Token): ... + def emit(self): ... + def emitEOF(self): ... @property - def type(self) -> Incomplete: ... + def type(self): ... @type.setter - def type(self, type: int) -> Incomplete: ... + def type(self, type: int): ... @property - def line(self) -> Incomplete: ... + def line(self): ... @line.setter - def line(self, line: int) -> Incomplete: ... + def line(self, line: int): ... @property - def column(self) -> Incomplete: ... + def column(self): ... @column.setter - def column(self, column: int) -> Incomplete: ... - def getCharIndex(self) -> Incomplete: ... + def column(self, column: int): ... + def getCharIndex(self): ... @property - def text(self) -> Incomplete: ... + def text(self): ... @text.setter - def text(self, txt: str) -> Incomplete: ... - def getAllTokens(self) -> Incomplete: ... - def notifyListeners(self, e: LexerNoViableAltException) -> Incomplete: ... - def getErrorDisplay(self, s: str) -> Incomplete: ... - def getErrorDisplayForChar(self, c: str) -> Incomplete: ... - def getCharErrorDisplay(self, c: str) -> Incomplete: ... - def recover(self, re: RecognitionException) -> Incomplete: ... + def text(self, txt: str): ... + def getAllTokens(self): ... + def notifyListeners(self, e: LexerNoViableAltException): ... + def getErrorDisplay(self, s: str): ... + def getErrorDisplayForChar(self, c: str): ... + def getCharErrorDisplay(self, c: str): ... + def recover(self, re: RecognitionException): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi index 92ae08a2e78e..935f1b738024 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi @@ -11,9 +11,9 @@ class ListTokenSource(TokenSource): eofToken: Incomplete def __init__(self, tokens: list[Token], sourceName: str | None = None) -> None: ... @property - def column(self) -> Incomplete: ... - def nextToken(self) -> Incomplete: ... + def column(self): ... + def nextToken(self): ... @property - def line(self) -> Incomplete: ... - def getInputStream(self) -> Incomplete: ... - def getSourceName(self) -> Incomplete: ... + def line(self): ... + def getInputStream(self): ... + def getSourceName(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi index cae244a25f9e..037b37980734 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi @@ -20,57 +20,57 @@ from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as Parse from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode class TraceListener(ParseTreeListener): - def __init__(self, parser: Incomplete) -> None: ... - def enterEveryRule(self, ctx: Incomplete) -> None: ... - def visitTerminal(self, node: Incomplete) -> None: ... - def visitErrorNode(self, node: Incomplete) -> None: ... - def exitEveryRule(self, ctx: Incomplete) -> None: ... + def __init__(self, parser) -> None: ... + def enterEveryRule(self, ctx) -> None: ... + def visitTerminal(self, node) -> None: ... + def visitErrorNode(self, node) -> None: ... + def exitEveryRule(self, ctx) -> None: ... class Parser(Recognizer): bypassAltsAtnCache: Incomplete buildParseTrees: bool def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ... def reset(self) -> None: ... - def match(self, ttype: int) -> Incomplete: ... - def matchWildcard(self) -> Incomplete: ... - def getParseListeners(self) -> Incomplete: ... - def addParseListener(self, listener: ParseTreeListener) -> Incomplete: ... - def removeParseListener(self, listener: ParseTreeListener) -> Incomplete: ... + def match(self, ttype: int): ... + def matchWildcard(self): ... + def getParseListeners(self): ... + def addParseListener(self, listener: ParseTreeListener): ... + def removeParseListener(self, listener: ParseTreeListener): ... def removeParseListeners(self) -> None: ... def triggerEnterRuleEvent(self) -> None: ... def triggerExitRuleEvent(self) -> None: ... - def getNumberOfSyntaxErrors(self) -> Incomplete: ... - def getTokenFactory(self) -> Incomplete: ... - def setTokenFactory(self, factory: TokenFactory) -> Incomplete: ... - def getATNWithBypassAlts(self) -> Incomplete: ... - def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None) -> Incomplete: ... - def getInputStream(self) -> Incomplete: ... - def setInputStream(self, input: InputStream) -> Incomplete: ... - def getTokenStream(self) -> Incomplete: ... - def setTokenStream(self, input: TokenStream) -> Incomplete: ... - def getCurrentToken(self) -> Incomplete: ... + def getNumberOfSyntaxErrors(self): ... + def getTokenFactory(self): ... + def setTokenFactory(self, factory: TokenFactory): ... + def getATNWithBypassAlts(self): ... + def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None): ... + def getInputStream(self): ... + def setInputStream(self, input: InputStream): ... + def getTokenStream(self): ... + def setTokenStream(self, input: TokenStream): ... + def getCurrentToken(self): ... def notifyErrorListeners( self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None - ) -> Incomplete: ... - def consume(self) -> Incomplete: ... + ): ... + def consume(self): ... def addContextToParseTree(self) -> None: ... state: Incomplete - def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> Incomplete: ... + def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... def exitRule(self) -> None: ... - def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int) -> Incomplete: ... - def getPrecedence(self) -> Incomplete: ... - def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> Incomplete: ... - def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int) -> Incomplete: ... - def unrollRecursionContexts(self, parentCtx: ParserRuleContext) -> Incomplete: ... - def getInvokingContext(self, ruleIndex: int) -> Incomplete: ... - def precpred(self, localctx: RuleContext, precedence: int) -> Incomplete: ... - def inContext(self, context: str) -> Incomplete: ... - def isExpectedToken(self, symbol: int) -> Incomplete: ... - def getExpectedTokens(self) -> Incomplete: ... - def getExpectedTokensWithinCurrentRule(self) -> Incomplete: ... - def getRuleIndex(self, ruleName: str) -> Incomplete: ... - def getRuleInvocationStack(self, p: RuleContext | None = None) -> Incomplete: ... - def getDFAStrings(self) -> Incomplete: ... + def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ... + def getPrecedence(self): ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... + def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ... + def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ... + def getInvokingContext(self, ruleIndex: int): ... + def precpred(self, localctx: RuleContext, precedence: int): ... + def inContext(self, context: str): ... + def isExpectedToken(self, symbol: int): ... + def getExpectedTokens(self): ... + def getExpectedTokensWithinCurrentRule(self): ... + def getRuleIndex(self, ruleName: str): ... + def getRuleInvocationStack(self, p: RuleContext | None = None): ... + def getDFAStrings(self): ... def dumpDFA(self) -> None: ... - def getSourceName(self) -> Incomplete: ... - def setTrace(self, trace: bool) -> Incomplete: ... + def getSourceName(self): ... + def setTrace(self, trace: bool): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi index 43635239ea8a..4405ed5dfa6e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi @@ -29,8 +29,8 @@ class ParserInterpreter(Parser): self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream ) -> None: ... state: Incomplete - def parse(self, startRuleIndex: int) -> Incomplete: ... - def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int) -> Incomplete: ... - def getATNState(self) -> Incomplete: ... - def visitState(self, p: ATNState) -> Incomplete: ... - def visitRuleStopState(self, p: ATNState) -> Incomplete: ... + def parse(self, startRuleIndex: int): ... + def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ... + def getATNState(self): ... + def visitState(self, p: ATNState): ... + def visitRuleStopState(self, p: ATNState): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi index 6926c89eb534..038de21261c9 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi @@ -20,21 +20,21 @@ class ParserRuleContext(RuleContext): def __init__(self, parent: ParserRuleContext | None = None, invokingStateNumber: int | None = None) -> None: ... parentCtx: Incomplete invokingState: Incomplete - def copyFrom(self, ctx: ParserRuleContext) -> Incomplete: ... - def enterRule(self, listener: ParseTreeListener) -> Incomplete: ... - def exitRule(self, listener: ParseTreeListener) -> Incomplete: ... - def addChild(self, child: ParseTree) -> Incomplete: ... + def copyFrom(self, ctx: ParserRuleContext): ... + def enterRule(self, listener: ParseTreeListener): ... + def exitRule(self, listener: ParseTreeListener): ... + def addChild(self, child: ParseTree): ... def removeLastChild(self) -> None: ... - def addTokenNode(self, token: Token) -> Incomplete: ... - def addErrorNode(self, badToken: Token) -> Incomplete: ... - def getChild(self, i: int, ttype: type | None = None) -> Incomplete: ... + def addTokenNode(self, token: Token): ... + def addErrorNode(self, badToken: Token): ... + def getChild(self, i: int, ttype: type | None = None): ... def getChildren(self, predicate: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... - def getToken(self, ttype: int, i: int) -> Incomplete: ... - def getTokens(self, ttype: int) -> Incomplete: ... - def getTypedRuleContext(self, ctxType: type, i: int) -> Incomplete: ... - def getTypedRuleContexts(self, ctxType: type) -> Incomplete: ... - def getChildCount(self) -> Incomplete: ... - def getSourceInterval(self) -> Incomplete: ... + def getToken(self, ttype: int, i: int): ... + def getTokens(self, ttype: int): ... + def getTypedRuleContext(self, ctxType: type, i: int): ... + def getTypedRuleContexts(self, ctxType: type): ... + def getChildCount(self): ... + def getSourceInterval(self): ... class InterpreterRuleContext(ParserRuleContext): ruleIndex: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index b8b04af4877f..255a682a3a16 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -12,76 +12,76 @@ class PredictionContext: cachedHashCode: Incomplete def __init__(self, cachedHashCode: int) -> None: ... def __len__(self) -> int: ... - def isEmpty(self) -> Incomplete: ... - def hasEmptyPath(self) -> Incomplete: ... - def getReturnState(self, index: int) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... + def isEmpty(self): ... + def hasEmptyPath(self): ... + def getReturnState(self, index: int): ... + def __hash__(self): ... -def calculateHashCode(parent: PredictionContext, returnState: int) -> Incomplete: ... -def calculateListsHashCode(parents: list[PredictionContext], returnStates: list[int]) -> Incomplete: ... +def calculateHashCode(parent: PredictionContext, returnState: int): ... +def calculateListsHashCode(parents: list[PredictionContext], returnStates: list[int]): ... class PredictionContextCache: cache: Incomplete def __init__(self) -> None: ... - def add(self, ctx: PredictionContext) -> Incomplete: ... - def get(self, ctx: PredictionContext) -> Incomplete: ... + def add(self, ctx: PredictionContext): ... + def get(self, ctx: PredictionContext): ... def __len__(self) -> int: ... class SingletonPredictionContext(PredictionContext): @staticmethod - def create(parent: PredictionContext, returnState: int) -> Incomplete: ... + def create(parent: PredictionContext, returnState: int): ... parentCtx: Incomplete returnState: Incomplete def __init__(self, parent: PredictionContext, returnState: int) -> None: ... def __len__(self) -> int: ... - def getParent(self, index: int) -> Incomplete: ... - def getReturnState(self, index: int) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... + def getParent(self, index: int): ... + def getReturnState(self, index: int): ... + def __eq__(self, other): ... + def __hash__(self): ... class EmptyPredictionContext(SingletonPredictionContext): def __init__(self) -> None: ... - def isEmpty(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... + def isEmpty(self): ... + def __eq__(self, other): ... + def __hash__(self): ... class ArrayPredictionContext(PredictionContext): parents: Incomplete returnStates: Incomplete def __init__(self, parents: list[PredictionContext], returnStates: list[int]) -> None: ... - def isEmpty(self) -> Incomplete: ... + def isEmpty(self): ... def __len__(self) -> int: ... - def getParent(self, index: int) -> Incomplete: ... - def getReturnState(self, index: int) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... + def getParent(self, index: int): ... + def getReturnState(self, index: int): ... + def __eq__(self, other): ... + def __hash__(self): ... -def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext | None = None) -> Incomplete: ... +def PredictionContextFromRuleContext(atn: ATN, outerContext: RuleContext | None = None): ... def merge( a: PredictionContext, b: PredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], -) -> Incomplete: ... +): ... def mergeSingletons( a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], -) -> Incomplete: ... -def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool) -> Incomplete: ... +): ... +def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool): ... def mergeArrays( a: ArrayPredictionContext, b: ArrayPredictionContext, rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], -) -> Incomplete: ... -def combineCommonParents(parents: list[PredictionContext]) -> Incomplete: ... +): ... +def combineCommonParents(parents: list[PredictionContext]): ... def getCachedPredictionContext( context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext] -) -> Incomplete: ... +): ... def getAllContextNodes( context: PredictionContext, nodes: list[Incomplete] | None = None, visited: dict[PredictionContext, PredictionContext] | None = None, -) -> Incomplete: ... +): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi b/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi index 5d757e2be6b7..c5e882a19b31 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi @@ -10,20 +10,20 @@ class Recognizer: tokenTypeMapCache: Incomplete ruleIndexMapCache: Incomplete def __init__(self) -> None: ... - def extractVersion(self, version: Incomplete) -> Incomplete: ... - def checkVersion(self, toolVersion: Incomplete) -> None: ... - def addErrorListener(self, listener: Incomplete) -> None: ... - def removeErrorListener(self, listener: Incomplete) -> None: ... + def extractVersion(self, version): ... + def checkVersion(self, toolVersion) -> None: ... + def addErrorListener(self, listener) -> None: ... + def removeErrorListener(self, listener) -> None: ... def removeErrorListeners(self) -> None: ... - def getTokenTypeMap(self) -> Incomplete: ... - def getRuleIndexMap(self) -> Incomplete: ... - def getTokenType(self, tokenName: str) -> Incomplete: ... - def getErrorHeader(self, e: RecognitionException) -> Incomplete: ... - def getTokenErrorDisplay(self, t: Token) -> Incomplete: ... - def getErrorListenerDispatch(self) -> Incomplete: ... - def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int) -> Incomplete: ... - def precpred(self, localctx: RuleContext, precedence: int) -> Incomplete: ... + def getTokenTypeMap(self): ... + def getRuleIndexMap(self): ... + def getTokenType(self, tokenName: str): ... + def getErrorHeader(self, e: RecognitionException): ... + def getTokenErrorDisplay(self, t: Token): ... + def getErrorListenerDispatch(self): ... + def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... + def precpred(self, localctx: RuleContext, precedence: int): ... @property - def state(self) -> Incomplete: ... + def state(self): ... @state.setter - def state(self, atnState: int) -> Incomplete: ... + def state(self, atnState: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi index 7abb5833c10d..17d4efaa5e3e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi @@ -11,18 +11,18 @@ class RuleContext(RuleNode): parentCtx: Incomplete invokingState: Incomplete def __init__(self, parent: RuleContext | None = None, invokingState: int = ...) -> None: ... - def depth(self) -> Incomplete: ... - def isEmpty(self) -> Incomplete: ... - def getSourceInterval(self) -> Incomplete: ... - def getRuleContext(self) -> Incomplete: ... - def getPayload(self) -> Incomplete: ... - def getText(self) -> Incomplete: ... - def getRuleIndex(self) -> Incomplete: ... - def getAltNumber(self) -> Incomplete: ... - def setAltNumber(self, altNumber: int) -> Incomplete: ... - def getChild(self, i: int) -> Incomplete: ... - def getChildCount(self) -> Incomplete: ... + def depth(self): ... + def isEmpty(self): ... + def getSourceInterval(self): ... + def getRuleContext(self): ... + def getPayload(self): ... + def getText(self): ... + def getRuleIndex(self): ... + def getAltNumber(self): ... + def setAltNumber(self, altNumber: int): ... + def getChild(self, i: int): ... + def getChildCount(self): ... def getChildren(self) -> Generator[Incomplete, None, None]: ... - def accept(self, visitor: ParseTreeVisitor) -> Incomplete: ... - def toStringTree(self, ruleNames: list[Incomplete] | None = None, recog: Parser | None = None) -> Incomplete: ... + def accept(self, visitor: ParseTreeVisitor): ... + def toStringTree(self, ruleNames: list[Incomplete] | None = None, recog: Parser | None = None): ... def toString(self, ruleNames: list[Incomplete], stop: RuleContext) -> str: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi index ab4801fe9162..e3cfa875d437 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -17,11 +17,11 @@ class Token: column: Incomplete def __init__(self) -> None: ... @property - def text(self) -> Incomplete: ... + def text(self): ... @text.setter - def text(self, text: str) -> Incomplete: ... - def getTokenSource(self) -> Incomplete: ... - def getInputStream(self) -> Incomplete: ... + def text(self, text: str): ... + def getTokenSource(self): ... + def getInputStream(self): ... class CommonToken(Token): EMPTY_SOURCE: Incomplete @@ -41,8 +41,8 @@ class CommonToken(Token): start: int = ..., stop: int = ..., ) -> None: ... - def clone(self) -> Incomplete: ... + def clone(self): ... @property - def text(self) -> Incomplete: ... + def text(self): ... @text.setter - def text(self, text: str) -> Incomplete: ... + def text(self, text: str): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi index 49469219172d..87bc381128b6 100644 --- a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi @@ -10,46 +10,46 @@ class TokenStreamRewriter: tokens: Incomplete programs: Incomplete lastRewriteTokenIndexes: Incomplete - def __init__(self, tokens: Incomplete) -> None: ... - def getTokenStream(self) -> Incomplete: ... - def rollback(self, instruction_index: Incomplete, program_name: Incomplete) -> None: ... + def __init__(self, tokens) -> None: ... + def getTokenStream(self): ... + def rollback(self, instruction_index, program_name) -> None: ... def deleteProgram(self, program_name: Incomplete = ...) -> None: ... - def insertAfterToken(self, token: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... - def insertAfter(self, index: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... - def insertBeforeIndex(self, index: Incomplete, text: Incomplete) -> None: ... - def insertBeforeToken(self, token: Incomplete, text: Incomplete, program_name: Incomplete = ...) -> None: ... - def insertBefore(self, program_name: Incomplete, index: Incomplete, text: Incomplete) -> None: ... - def replaceIndex(self, index: Incomplete, text: Incomplete) -> None: ... - def replaceRange(self, from_idx: Incomplete, to_idx: Incomplete, text: Incomplete) -> None: ... - def replaceSingleToken(self, token: Incomplete, text: Incomplete) -> None: ... + def insertAfterToken(self, token, text, program_name: Incomplete = ...) -> None: ... + def insertAfter(self, index, text, program_name: Incomplete = ...) -> None: ... + def insertBeforeIndex(self, index, text) -> None: ... + def insertBeforeToken(self, token, text, program_name: Incomplete = ...) -> None: ... + def insertBefore(self, program_name, index, text) -> None: ... + def replaceIndex(self, index, text) -> None: ... + def replaceRange(self, from_idx, to_idx, text) -> None: ... + def replaceSingleToken(self, token, text) -> None: ... def replaceRangeTokens( - self, from_token: Incomplete, to_token: Incomplete, text: Incomplete, program_name: Incomplete = ... + self, from_token, to_token, text, program_name: Incomplete = ... ) -> None: ... - def replace(self, program_name: Incomplete, from_idx: Incomplete, to_idx: Incomplete, text: Incomplete) -> None: ... - def deleteToken(self, token: Incomplete) -> None: ... - def deleteIndex(self, index: Incomplete) -> None: ... - def delete(self, program_name: Incomplete, from_idx: Incomplete, to_idx: Incomplete) -> None: ... - def lastRewriteTokenIndex(self, program_name: Incomplete = ...) -> Incomplete: ... - def setLastRewriteTokenIndex(self, program_name: Incomplete, i: Incomplete) -> None: ... - def getProgram(self, program_name: Incomplete) -> Incomplete: ... - def getDefaultText(self) -> Incomplete: ... - def getText(self, program_name: Incomplete, start: int, stop: int) -> Incomplete: ... + def replace(self, program_name, from_idx, to_idx, text) -> None: ... + def deleteToken(self, token) -> None: ... + def deleteIndex(self, index) -> None: ... + def delete(self, program_name, from_idx, to_idx) -> None: ... + def lastRewriteTokenIndex(self, program_name: Incomplete = ...): ... + def setLastRewriteTokenIndex(self, program_name, i) -> None: ... + def getProgram(self, program_name): ... + def getDefaultText(self): ... + def getText(self, program_name, start: int, stop: int): ... class RewriteOperation: tokens: Incomplete index: Incomplete text: Incomplete instructionIndex: int - def __init__(self, tokens: Incomplete, index: Incomplete, text: str = ...) -> None: ... - def execute(self, buf: Incomplete) -> Incomplete: ... + def __init__(self, tokens, index, text: str = ...) -> None: ... + def execute(self, buf): ... class InsertBeforeOp(RewriteOperation): - def __init__(self, tokens: Incomplete, index: Incomplete, text: str = ...) -> None: ... - def execute(self, buf: Incomplete) -> Incomplete: ... + def __init__(self, tokens, index, text: str = ...) -> None: ... + def execute(self, buf): ... class InsertAfterOp(InsertBeforeOp): ... class ReplaceOp(RewriteOperation): last_index: Incomplete - def __init__(self, from_idx: Incomplete, to_idx: Incomplete, tokens: Incomplete, text: Incomplete) -> None: ... - def execute(self, buf: Incomplete) -> Incomplete: ... + def __init__(self, from_idx, to_idx, tokens, text) -> None: ... + def execute(self, buf): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi index 87c146271c03..f9c25e1591a2 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi @@ -1,4 +1,4 @@ from _typeshed import Incomplete -def str_list(val: Incomplete) -> str: ... -def escapeWhitespace(s: str, escapeSpaces: bool) -> Incomplete: ... +def str_list(val) -> str: ... +def escapeWhitespace(s: str, escapeSpaces: bool): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi index f96a012835bc..9ff24621acd5 100644 --- a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi @@ -2,5 +2,5 @@ from _typeshed import Incomplete from antlr4 import * -def beautify_lisp_string(in_string: Incomplete) -> Incomplete: ... +def beautify_lisp_string(in_string): ... def main() -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi index abb946de44dd..cdef3f41715d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATN.pyi @@ -19,11 +19,11 @@ class ATN: lexerActions: Incomplete modeToStartState: Incomplete def __init__(self, grammarType: ATNType, maxTokenType: int) -> None: ... - def nextTokensInContext(self, s: ATNState, ctx: RuleContext) -> Incomplete: ... - def nextTokensNoContext(self, s: ATNState) -> Incomplete: ... - def nextTokens(self, s: ATNState, ctx: RuleContext | None = None) -> Incomplete: ... - def addState(self, state: ATNState) -> Incomplete: ... - def removeState(self, state: ATNState) -> Incomplete: ... - def defineDecisionState(self, s: DecisionState) -> Incomplete: ... - def getDecisionState(self, decision: int) -> Incomplete: ... - def getExpectedTokens(self, stateNumber: int, ctx: RuleContext) -> Incomplete: ... + def nextTokensInContext(self, s: ATNState, ctx: RuleContext): ... + def nextTokensNoContext(self, s: ATNState): ... + def nextTokens(self, s: ATNState, ctx: RuleContext | None = None): ... + def addState(self, state: ATNState): ... + def removeState(self, state: ATNState): ... + def defineDecisionState(self, s: DecisionState): ... + def getDecisionState(self, decision: int): ... + def getExpectedTokens(self, stateNumber: int, ctx: RuleContext): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi index 00bef2fbef81..99018332d164 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfig.pyi @@ -20,10 +20,10 @@ class ATNConfig: semantic: SemanticContext | None = None, config: ATNConfig | None = None, ) -> None: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def hashCodeForConfigSet(self) -> Incomplete: ... - def equalsForConfigSet(self, other: Incomplete) -> Incomplete: ... + def __eq__(self, other): ... + def __hash__(self): ... + def hashCodeForConfigSet(self): ... + def equalsForConfigSet(self, other): ... class LexerATNConfig(ATNConfig): lexerActionExecutor: Incomplete @@ -37,8 +37,8 @@ class LexerATNConfig(ATNConfig): lexerActionExecutor: LexerActionExecutor | None = None, config: LexerATNConfig | None = None, ) -> None: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def hashCodeForConfigSet(self) -> Incomplete: ... - def equalsForConfigSet(self, other: Incomplete) -> Incomplete: ... - def checkNonGreedyDecision(self, source: LexerATNConfig, target: ATNState) -> Incomplete: ... + def __hash__(self): ... + def __eq__(self, other): ... + def hashCodeForConfigSet(self): ... + def equalsForConfigSet(self, other): ... + def checkNonGreedyDecision(self, source: LexerATNConfig, target: ATNState): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi index 60f500b77ae2..fc58fd8c8b8e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi @@ -23,22 +23,22 @@ class ATNConfigSet: dipsIntoOuterContext: bool cachedHashCode: int def __init__(self, fullCtx: bool = ...) -> None: ... - def __iter__(self) -> Incomplete: ... - def add(self, config: ATNConfig, mergeCache: Incomplete | None = ...) -> Incomplete: ... - def getOrAdd(self, config: ATNConfig) -> Incomplete: ... - def getStates(self) -> Incomplete: ... - def getPredicates(self) -> Incomplete: ... - def get(self, i: int) -> Incomplete: ... - def optimizeConfigs(self, interpreter: ATNSimulator) -> Incomplete: ... - def addAll(self, coll: list[Incomplete]) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def hashConfigs(self) -> Incomplete: ... + def __iter__(self): ... + def add(self, config: ATNConfig, mergeCache: Incomplete | None = ...): ... + def getOrAdd(self, config: ATNConfig): ... + def getStates(self): ... + def getPredicates(self): ... + def get(self, i: int): ... + def optimizeConfigs(self, interpreter: ATNSimulator): ... + def addAll(self, coll: list[Incomplete]): ... + def __eq__(self, other): ... + def __hash__(self): ... + def hashConfigs(self): ... def __len__(self) -> int: ... - def isEmpty(self) -> Incomplete: ... - def __contains__(self, config: Incomplete) -> bool: ... + def isEmpty(self): ... + def __contains__(self, config) -> bool: ... def clear(self) -> None: ... - def setReadonly(self, readonly: bool) -> Incomplete: ... + def setReadonly(self, readonly: bool): ... class OrderedATNConfigSet(ATNConfigSet): def __init__(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi index 737a4bfeb004..e1834444b474 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializationOptions.pyi @@ -6,4 +6,4 @@ class ATNDeserializationOptions: verifyATN: Incomplete generateRuleBypassTransitions: Incomplete def __init__(self, copyFrom: ATNDeserializationOptions | None = None) -> None: ... - def __setattr__(self, key: Incomplete, value: Incomplete) -> None: ... + def __setattr__(self, key, value) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index 809da83a4722..029563f11482 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -15,29 +15,29 @@ class ATNDeserializer: def __init__(self, options: ATNDeserializationOptions | None = None) -> None: ... data: Incomplete pos: int - def deserialize(self, data: list[int]) -> Incomplete: ... + def deserialize(self, data: list[int]): ... def checkVersion(self) -> None: ... - def readATN(self) -> Incomplete: ... - def readStates(self, atn: ATN) -> Incomplete: ... - def readRules(self, atn: ATN) -> Incomplete: ... - def readModes(self, atn: ATN) -> Incomplete: ... - def readSets(self, atn: ATN, sets: list[Incomplete]) -> Incomplete: ... - def readEdges(self, atn: ATN, sets: list[Incomplete]) -> Incomplete: ... - def readDecisions(self, atn: ATN) -> Incomplete: ... - def readLexerActions(self, atn: ATN) -> Incomplete: ... - def generateRuleBypassTransitions(self, atn: ATN) -> Incomplete: ... - def generateRuleBypassTransition(self, atn: ATN, idx: int) -> Incomplete: ... - def stateIsEndStateFor(self, state: ATNState, idx: int) -> Incomplete: ... - def markPrecedenceDecisions(self, atn: ATN) -> Incomplete: ... - def verifyATN(self, atn: ATN) -> Incomplete: ... - def checkCondition(self, condition: bool, message: Incomplete | None = ...) -> Incomplete: ... - def readInt(self) -> Incomplete: ... + def readATN(self): ... + def readStates(self, atn: ATN): ... + def readRules(self, atn: ATN): ... + def readModes(self, atn: ATN): ... + def readSets(self, atn: ATN, sets: list[Incomplete]): ... + def readEdges(self, atn: ATN, sets: list[Incomplete]): ... + def readDecisions(self, atn: ATN): ... + def readLexerActions(self, atn: ATN): ... + def generateRuleBypassTransitions(self, atn: ATN): ... + def generateRuleBypassTransition(self, atn: ATN, idx: int): ... + def stateIsEndStateFor(self, state: ATNState, idx: int): ... + def markPrecedenceDecisions(self, atn: ATN): ... + def verifyATN(self, atn: ATN): ... + def checkCondition(self, condition: bool, message: Incomplete | None = ...): ... + def readInt(self): ... edgeFactories: Incomplete def edgeFactory( self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete] - ) -> Incomplete: ... + ): ... stateFactories: Incomplete - def stateFactory(self, type: int, ruleIndex: int) -> Incomplete: ... + def stateFactory(self, type: int, ruleIndex: int): ... CHANNEL: int CUSTOM: int MODE: int @@ -47,4 +47,4 @@ class ATNDeserializer: SKIP: int TYPE: int actionFactories: Incomplete - def lexerActionFactory(self, type: int, data1: int, data2: int) -> Incomplete: ... + def lexerActionFactory(self, type: int, data1: int, data2: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi index 57c34e3404c1..11fbfe7b0705 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNSimulator.pyi @@ -14,4 +14,4 @@ class ATNSimulator: atn: Incomplete sharedContextCache: Incomplete def __init__(self, atn: ATN, sharedContextCache: PredictionContextCache) -> None: ... - def getCachedContext(self, context: PredictionContext) -> Incomplete: ... + def getCachedContext(self, context: PredictionContext): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi index 32b16a22e6ef..d24e1365e824 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi @@ -28,11 +28,11 @@ class ATNState: transitions: Incomplete nextTokenWithinRule: Incomplete def __init__(self) -> None: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def onlyHasEpsilonTransitions(self) -> Incomplete: ... - def isNonGreedyExitState(self) -> Incomplete: ... - def addTransition(self, trans: Transition, index: int = ...) -> Incomplete: ... + def __hash__(self): ... + def __eq__(self, other): ... + def onlyHasEpsilonTransitions(self): ... + def isNonGreedyExitState(self): ... + def addTransition(self, trans: Transition, index: int = ...): ... class BasicState(ATNState): stateType: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi index 4d3ea3d48948..bec1f1bb056e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi @@ -5,4 +5,4 @@ class ATNType(IntEnum): LEXER: int PARSER: int @classmethod - def fromOrdinal(cls, i: int) -> Incomplete: ... + def fromOrdinal(cls, i: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi index 75de75cfd131..9b965255050c 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi @@ -47,20 +47,20 @@ class LexerATNSimulator(ATNSimulator): MAX_CHAR_VALUE: Incomplete prevAccept: Incomplete def __init__(self, recog: Lexer, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ... - def copyState(self, simulator: LexerATNSimulator) -> Incomplete: ... - def match(self, input: InputStream, mode: int) -> Incomplete: ... + def copyState(self, simulator: LexerATNSimulator): ... + def match(self, input: InputStream, mode: int): ... def reset(self) -> None: ... - def matchATN(self, input: InputStream) -> Incomplete: ... - def execATN(self, input: InputStream, ds0: DFAState) -> Incomplete: ... - def getExistingTargetState(self, s: DFAState, t: int) -> Incomplete: ... - def computeTargetState(self, input: InputStream, s: DFAState, t: int) -> Incomplete: ... - def failOrAccept(self, prevAccept: SimState, input: InputStream, reach: ATNConfigSet, t: int) -> Incomplete: ... - def getReachableConfigSet(self, input: InputStream, closure: ATNConfigSet, reach: ATNConfigSet, t: int) -> Incomplete: ... + def matchATN(self, input: InputStream): ... + def execATN(self, input: InputStream, ds0: DFAState): ... + def getExistingTargetState(self, s: DFAState, t: int): ... + def computeTargetState(self, input: InputStream, s: DFAState, t: int): ... + def failOrAccept(self, prevAccept: SimState, input: InputStream, reach: ATNConfigSet, t: int): ... + def getReachableConfigSet(self, input: InputStream, closure: ATNConfigSet, reach: ATNConfigSet, t: int): ... def accept( self, input: InputStream, lexerActionExecutor: LexerActionExecutor, startIndex: int, index: int, line: int, charPos: int - ) -> Incomplete: ... - def getReachableTarget(self, trans: Transition, t: int) -> Incomplete: ... - def computeStartState(self, input: InputStream, p: ATNState) -> Incomplete: ... + ): ... + def getReachableTarget(self, trans: Transition, t: int): ... + def computeStartState(self, input: InputStream, p: ATNState): ... def closure( self, input: InputStream, @@ -69,7 +69,7 @@ class LexerATNSimulator(ATNSimulator): currentAltReachedAcceptState: bool, speculative: bool, treatEofAsEpsilon: bool, - ) -> Incomplete: ... + ): ... def getEpsilonTarget( self, input: InputStream, @@ -78,12 +78,12 @@ class LexerATNSimulator(ATNSimulator): configs: ATNConfigSet, speculative: bool, treatEofAsEpsilon: bool, - ) -> Incomplete: ... - def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool) -> Incomplete: ... - def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState) -> Incomplete: ... + ): ... + def evaluatePredicate(self, input: InputStream, ruleIndex: int, predIndex: int, speculative: bool): ... + def captureSimState(self, settings: SimState, input: InputStream, dfaState: DFAState): ... def addDFAEdge(self, from_: DFAState, tk: int, to: DFAState | None = None, cfgs: ATNConfigSet | None = None) -> DFAState: ... def addDFAState(self, configs: ATNConfigSet) -> DFAState: ... - def getDFA(self, mode: int) -> Incomplete: ... - def getText(self, input: InputStream) -> Incomplete: ... - def consume(self, input: InputStream) -> Incomplete: ... - def getTokenName(self, t: int) -> Incomplete: ... + def getDFA(self, mode: int): ... + def getText(self, input: InputStream): ... + def consume(self, input: InputStream): ... + def getTokenName(self, t: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi index 2446757724fa..0309d3549f50 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerAction.pyi @@ -17,66 +17,66 @@ class LexerAction: actionType: Incomplete isPositionDependent: bool def __init__(self, action: LexerActionType) -> None: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def __hash__(self): ... + def __eq__(self, other): ... class LexerSkipAction(LexerAction): INSTANCE: Incomplete def __init__(self) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... + def execute(self, lexer: Lexer): ... class LexerTypeAction(LexerAction): type: Incomplete def __init__(self, type: int) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... class LexerPushModeAction(LexerAction): mode: Incomplete def __init__(self, mode: int) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... class LexerPopModeAction(LexerAction): INSTANCE: Incomplete def __init__(self) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... + def execute(self, lexer: Lexer): ... class LexerMoreAction(LexerAction): INSTANCE: Incomplete def __init__(self) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... + def execute(self, lexer: Lexer): ... class LexerModeAction(LexerAction): mode: Incomplete def __init__(self, mode: int) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... class LexerCustomAction(LexerAction): ruleIndex: Incomplete actionIndex: Incomplete isPositionDependent: bool def __init__(self, ruleIndex: int, actionIndex: int) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... class LexerChannelAction(LexerAction): channel: Incomplete def __init__(self, channel: int) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... class LexerIndexedCustomAction(LexerAction): offset: Incomplete action: Incomplete isPositionDependent: bool def __init__(self, offset: int, action: LexerAction) -> None: ... - def execute(self, lexer: Lexer) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def execute(self, lexer: Lexer): ... + def __hash__(self): ... + def __eq__(self, other): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi index 5ed06d2718bf..e2226687ae29 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi @@ -10,8 +10,8 @@ class LexerActionExecutor: hashCode: Incomplete def __init__(self, lexerActions: list[LexerAction] = ...) -> None: ... @staticmethod - def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction) -> Incomplete: ... - def fixOffsetBeforeMatch(self, offset: int) -> Incomplete: ... - def execute(self, lexer: Lexer, input: InputStream, startIndex: int) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction): ... + def fixOffsetBeforeMatch(self, offset: int): ... + def execute(self, lexer: Lexer, input: InputStream, startIndex: int): ... + def __hash__(self): ... + def __eq__(self, other): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index d4e1db588af3..f663efeafc38 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -46,31 +46,31 @@ class ParserATNSimulator(ATNSimulator): self, parser: Parser, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache ) -> None: ... def reset(self) -> None: ... - def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext) -> Incomplete: ... + def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext): ... def execATN( self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext - ) -> Incomplete: ... - def getExistingTargetState(self, previousD: DFAState, t: int) -> Incomplete: ... - def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int) -> Incomplete: ... - def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState) -> Incomplete: ... + ): ... + def getExistingTargetState(self, previousD: DFAState, t: int): ... + def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int): ... + def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState): ... def execATNWithFullContext( self, dfa: DFA, D: DFAState, s0: ATNConfigSet, input: TokenStream, startIndex: int, outerContext: ParserRuleContext - ) -> Incomplete: ... - def computeReachSet(self, closure: ATNConfigSet, t: int, fullCtx: bool) -> Incomplete: ... - def removeAllConfigsNotInRuleStopState(self, configs: ATNConfigSet, lookToEndOfRule: bool) -> Incomplete: ... - def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool) -> Incomplete: ... - def applyPrecedenceFilter(self, configs: ATNConfigSet) -> Incomplete: ... - def getReachableTarget(self, trans: Transition, ttype: int) -> Incomplete: ... - def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int) -> Incomplete: ... - def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]) -> Incomplete: ... + ): ... + def computeReachSet(self, closure: ATNConfigSet, t: int, fullCtx: bool): ... + def removeAllConfigsNotInRuleStopState(self, configs: ATNConfigSet, lookToEndOfRule: bool): ... + def computeStartState(self, p: ATNState, ctx: RuleContext, fullCtx: bool): ... + def applyPrecedenceFilter(self, configs: ATNConfigSet): ... + def getReachableTarget(self, trans: Transition, ttype: int): ... + def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int): ... + def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]): ... def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule( self, configs: ATNConfigSet, outerContext: ParserRuleContext - ) -> Incomplete: ... - def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet) -> Incomplete: ... - def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext) -> Incomplete: ... + ): ... + def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet): ... + def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... def evalSemanticContext( self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool - ) -> Incomplete: ... + ): ... def closure( self, config: ATNConfig, @@ -79,7 +79,7 @@ class ParserATNSimulator(ATNSimulator): collectPredicates: bool, fullCtx: bool, treatEofAsEpsilon: bool, - ) -> Incomplete: ... + ): ... def closureCheckingStopState( self, config: ATNConfig, @@ -89,7 +89,7 @@ class ParserATNSimulator(ATNSimulator): fullCtx: bool, depth: int, treatEofAsEpsilon: bool, - ) -> Incomplete: ... + ): ... def closure_( self, config: ATNConfig, @@ -99,38 +99,38 @@ class ParserATNSimulator(ATNSimulator): fullCtx: bool, depth: int, treatEofAsEpsilon: bool, - ) -> Incomplete: ... - def canDropLoopEntryEdgeInLeftRecursiveRule(self, config: Incomplete) -> Incomplete: ... - def getRuleName(self, index: int) -> Incomplete: ... + ): ... + def canDropLoopEntryEdgeInLeftRecursiveRule(self, config): ... + def getRuleName(self, index: int): ... epsilonTargetMethods: Incomplete def getEpsilonTarget( self, config: ATNConfig, t: Transition, collectPredicates: bool, inContext: bool, fullCtx: bool, treatEofAsEpsilon: bool - ) -> Incomplete: ... - def actionTransition(self, config: ATNConfig, t: ActionTransition) -> Incomplete: ... + ): ... + def actionTransition(self, config: ATNConfig, t: ActionTransition): ... def precedenceTransition( self, config: ATNConfig, pt: PrecedencePredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool - ) -> Incomplete: ... + ): ... def predTransition( self, config: ATNConfig, pt: PredicateTransition, collectPredicates: bool, inContext: bool, fullCtx: bool - ) -> Incomplete: ... - def ruleTransition(self, config: ATNConfig, t: RuleTransition) -> Incomplete: ... - def getConflictingAlts(self, configs: ATNConfigSet) -> Incomplete: ... - def getConflictingAltsOrUniqueAlt(self, configs: ATNConfigSet) -> Incomplete: ... - def getTokenName(self, t: int) -> Incomplete: ... - def getLookaheadName(self, input: TokenStream) -> Incomplete: ... - def dumpDeadEndConfigs(self, nvae: NoViableAltException) -> Incomplete: ... + ): ... + def ruleTransition(self, config: ATNConfig, t: RuleTransition): ... + def getConflictingAlts(self, configs: ATNConfigSet): ... + def getConflictingAltsOrUniqueAlt(self, configs: ATNConfigSet): ... + def getTokenName(self, t: int): ... + def getLookaheadName(self, input: TokenStream): ... + def dumpDeadEndConfigs(self, nvae: NoViableAltException): ... def noViableAlt( self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int - ) -> Incomplete: ... - def getUniqueAlt(self, configs: ATNConfigSet) -> Incomplete: ... - def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState) -> Incomplete: ... - def addDFAState(self, dfa: DFA, D: DFAState) -> Incomplete: ... + ): ... + def getUniqueAlt(self, configs: ATNConfigSet): ... + def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState): ... + def addDFAState(self, dfa: DFA, D: DFAState): ... def reportAttemptingFullContext( self, dfa: DFA, conflictingAlts: set[Incomplete], configs: ATNConfigSet, startIndex: int, stopIndex: int - ) -> Incomplete: ... + ): ... def reportContextSensitivity( self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int - ) -> Incomplete: ... + ): ... def reportAmbiguity( self, dfa: DFA, @@ -140,4 +140,4 @@ class ParserATNSimulator(ATNSimulator): exact: bool, ambigAlts: set[Incomplete], configs: ATNConfigSet, - ) -> Incomplete: ... + ): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index 92a86d4c5f45..675328d3cbf4 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -12,30 +12,30 @@ class PredictionMode(Enum): LL: int LL_EXACT_AMBIG_DETECTION: int @classmethod - def hasSLLConflictTerminatingPrediction(cls, mode: PredictionMode, configs: ATNConfigSet) -> Incomplete: ... + def hasSLLConflictTerminatingPrediction(cls, mode: PredictionMode, configs: ATNConfigSet): ... @classmethod - def hasConfigInRuleStopState(cls, configs: ATNConfigSet) -> Incomplete: ... + def hasConfigInRuleStopState(cls, configs: ATNConfigSet): ... @classmethod - def allConfigsInRuleStopStates(cls, configs: ATNConfigSet) -> Incomplete: ... + def allConfigsInRuleStopStates(cls, configs: ATNConfigSet): ... @classmethod - def resolvesToJustOneViableAlt(cls, altsets: list[set[int]]) -> Incomplete: ... + def resolvesToJustOneViableAlt(cls, altsets: list[set[int]]): ... @classmethod - def allSubsetsConflict(cls, altsets: list[set[int]]) -> Incomplete: ... + def allSubsetsConflict(cls, altsets: list[set[int]]): ... @classmethod - def hasNonConflictingAltSet(cls, altsets: list[set[int]]) -> Incomplete: ... + def hasNonConflictingAltSet(cls, altsets: list[set[int]]): ... @classmethod - def hasConflictingAltSet(cls, altsets: list[set[int]]) -> Incomplete: ... + def hasConflictingAltSet(cls, altsets: list[set[int]]): ... @classmethod - def allSubsetsEqual(cls, altsets: list[set[int]]) -> Incomplete: ... + def allSubsetsEqual(cls, altsets: list[set[int]]): ... @classmethod - def getUniqueAlt(cls, altsets: list[set[int]]) -> Incomplete: ... + def getUniqueAlt(cls, altsets: list[set[int]]): ... @classmethod - def getAlts(cls, altsets: list[set[int]]) -> Incomplete: ... + def getAlts(cls, altsets: list[set[int]]): ... @classmethod - def getConflictingAltSubsets(cls, configs: ATNConfigSet) -> Incomplete: ... + def getConflictingAltSubsets(cls, configs: ATNConfigSet): ... @classmethod - def getStateToAltMap(cls, configs: ATNConfigSet) -> Incomplete: ... + def getStateToAltMap(cls, configs: ATNConfigSet): ... @classmethod - def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet) -> Incomplete: ... + def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet): ... @classmethod - def getSingleViableAlt(cls, altsets: list[set[int]]) -> Incomplete: ... + def getSingleViableAlt(cls, altsets: list[set[int]]): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi index c9c1dbdc1728..0cbd255d52e3 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi @@ -5,12 +5,12 @@ from antlr4.RuleContext import RuleContext as RuleContext class SemanticContext: NONE: Incomplete - def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... -def andContext(a: SemanticContext, b: SemanticContext) -> Incomplete: ... -def orContext(a: SemanticContext, b: SemanticContext) -> Incomplete: ... -def filterPrecedencePredicates(collection: set[SemanticContext]) -> Incomplete: ... +def andContext(a: SemanticContext, b: SemanticContext): ... +def orContext(a: SemanticContext, b: SemanticContext): ... +def filterPrecedencePredicates(collection: set[SemanticContext]): ... class EmptySemanticContext(SemanticContext): ... @@ -19,31 +19,31 @@ class Predicate(SemanticContext): predIndex: Incomplete isCtxDependent: Incomplete def __init__(self, ruleIndex: int = ..., predIndex: int = ..., isCtxDependent: bool = ...) -> None: ... - def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def __hash__(self): ... + def __eq__(self, other): ... class PrecedencePredicate(SemanticContext): precedence: Incomplete def __init__(self, precedence: int = ...) -> None: ... - def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... - def __lt__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... + def __lt__(self, other): ... + def __hash__(self): ... + def __eq__(self, other): ... class AND(SemanticContext): opnds: Incomplete def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def __eq__(self, other): ... + def __hash__(self): ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... class OR(SemanticContext): opnds: Incomplete def __init__(self, a: SemanticContext, b: SemanticContext) -> None: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def eval(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... - def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext) -> Incomplete: ... + def __eq__(self, other): ... + def __hash__(self): ... + def eval(self, parser: Recognizer, outerContext: RuleContext): ... + def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi index 9a1928dbe7df..79f11e6d695d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi @@ -28,8 +28,8 @@ class AtomTransition(Transition): label: Incomplete serializationType: Incomplete def __init__(self, target: ATNState, label: int) -> None: ... - def makeLabel(self) -> Incomplete: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def makeLabel(self): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class RuleTransition(Transition): ruleIndex: Incomplete @@ -38,14 +38,14 @@ class RuleTransition(Transition): serializationType: Incomplete isEpsilon: bool def __init__(self, ruleStart: RuleStartState, ruleIndex: int, precedence: int, followState: ATNState) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class EpsilonTransition(Transition): serializationType: Incomplete isEpsilon: bool outermostPrecedenceReturn: Incomplete - def __init__(self, target: Incomplete, outermostPrecedenceReturn: int = ...) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def __init__(self, target, outermostPrecedenceReturn: int = ...) -> None: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class RangeTransition(Transition): serializationType: Incomplete @@ -53,8 +53,8 @@ class RangeTransition(Transition): stop: Incomplete label: Incomplete def __init__(self, target: ATNState, start: int, stop: int) -> None: ... - def makeLabel(self) -> Incomplete: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def makeLabel(self): ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class AbstractPredicateTransition(Transition): def __init__(self, target: ATNState) -> None: ... @@ -66,8 +66,8 @@ class PredicateTransition(AbstractPredicateTransition): isCtxDependent: Incomplete isEpsilon: bool def __init__(self, target: ATNState, ruleIndex: int, predIndex: int, isCtxDependent: bool) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... - def getPredicate(self) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def getPredicate(self): ... class ActionTransition(Transition): serializationType: Incomplete @@ -76,28 +76,28 @@ class ActionTransition(Transition): isCtxDependent: Incomplete isEpsilon: bool def __init__(self, target: ATNState, ruleIndex: int, actionIndex: int = ..., isCtxDependent: bool = ...) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class SetTransition(Transition): serializationType: Incomplete label: Incomplete def __init__(self, target: ATNState, set: IntervalSet) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class NotSetTransition(SetTransition): serializationType: Incomplete def __init__(self, target: ATNState, set: IntervalSet) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class WildcardTransition(Transition): serializationType: Incomplete def __init__(self, target: ATNState) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class PrecedencePredicateTransition(AbstractPredicateTransition): serializationType: Incomplete precedence: Incomplete isEpsilon: bool def __init__(self, target: ATNState, precedence: int) -> None: ... - def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int) -> Incomplete: ... - def getPredicate(self) -> Incomplete: ... + def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... + def getPredicate(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi index f2fcfef071d7..02c54121d632 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi @@ -11,11 +11,11 @@ class DFA: s0: Incomplete precedenceDfa: bool def __init__(self, atnStartState: DecisionState, decision: int = ...) -> None: ... - def getPrecedenceStartState(self, precedence: int) -> Incomplete: ... - def setPrecedenceStartState(self, precedence: int, startState: DFAState) -> Incomplete: ... - def setPrecedenceDfa(self, precedenceDfa: bool) -> Incomplete: ... + def getPrecedenceStartState(self, precedence: int): ... + def setPrecedenceStartState(self, precedence: int, startState: DFAState): ... + def setPrecedenceDfa(self, precedenceDfa: bool): ... @property - def states(self) -> Incomplete: ... - def sortedStates(self) -> Incomplete: ... - def toString(self, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None) -> Incomplete: ... - def toLexerString(self) -> Incomplete: ... + def states(self): ... + def sortedStates(self): ... + def toString(self, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None): ... + def toLexerString(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi index e7cda2037c92..3271670ff5ac 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi @@ -9,9 +9,9 @@ class DFASerializer: literalNames: Incomplete symbolicNames: Incomplete def __init__(self, dfa: DFA, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None) -> None: ... - def getEdgeLabel(self, i: int) -> Incomplete: ... - def getStateString(self, s: DFAState) -> Incomplete: ... + def getEdgeLabel(self, i: int): ... + def getStateString(self, s: DFAState): ... class LexerDFASerializer(DFASerializer): def __init__(self, dfa: DFA) -> None: ... - def getEdgeLabel(self, i: int) -> Incomplete: ... + def getEdgeLabel(self, i: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi index 955ec701eb9a..752bbd6929a0 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi @@ -18,6 +18,6 @@ class DFAState: requiresFullContext: bool predicates: Incomplete def __init__(self, stateNumber: int = ..., configs: ATNConfigSet = ...) -> None: ... - def getAltSet(self) -> Incomplete: ... - def __hash__(self) -> Incomplete: ... - def __eq__(self, other: Incomplete) -> Incomplete: ... + def getAltSet(self): ... + def __hash__(self): ... + def __eq__(self, other): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi index 8109a8ea1ce1..dc9966c7564a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi @@ -9,19 +9,19 @@ class DiagnosticErrorListener(ErrorListener): def __init__(self, exactOnly: bool = ...) -> None: ... def reportAmbiguity( self, - recognizer: Incomplete, + recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[int], configs: ATNConfigSet, - ) -> Incomplete: ... + ): ... def reportAttemptingFullContext( - self, recognizer: Incomplete, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet - ) -> Incomplete: ... + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet + ): ... def reportContextSensitivity( - self, recognizer: Incomplete, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet - ) -> Incomplete: ... - def getDecisionDescription(self, recognizer: Incomplete, dfa: DFA) -> Incomplete: ... - def getConflictingAlts(self, reportedAlts: set[int], configs: ATNConfigSet) -> Incomplete: ... + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, prediction: int, configs: ATNConfigSet + ): ... + def getDecisionDescription(self, recognizer, dfa: DFA): ... + def getConflictingAlts(self, reportedAlts: set[int], configs: ATNConfigSet): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi index 9984ec5f5d9e..266a6ae345f5 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi @@ -3,91 +3,91 @@ from _typeshed import Incomplete class ErrorListener: def syntaxError( self, - recognizer: Incomplete, - offendingSymbol: Incomplete, - line: Incomplete, - column: Incomplete, - msg: Incomplete, - e: Incomplete, + recognizer, + offendingSymbol, + line, + column, + msg, + e, ) -> None: ... def reportAmbiguity( self, - recognizer: Incomplete, - dfa: Incomplete, - startIndex: Incomplete, - stopIndex: Incomplete, - exact: Incomplete, - ambigAlts: Incomplete, - configs: Incomplete, + recognizer, + dfa, + startIndex, + stopIndex, + exact, + ambigAlts, + configs, ) -> None: ... def reportAttemptingFullContext( self, - recognizer: Incomplete, - dfa: Incomplete, - startIndex: Incomplete, - stopIndex: Incomplete, - conflictingAlts: Incomplete, - configs: Incomplete, + recognizer, + dfa, + startIndex, + stopIndex, + conflictingAlts, + configs, ) -> None: ... def reportContextSensitivity( self, - recognizer: Incomplete, - dfa: Incomplete, - startIndex: Incomplete, - stopIndex: Incomplete, - prediction: Incomplete, - configs: Incomplete, + recognizer, + dfa, + startIndex, + stopIndex, + prediction, + configs, ) -> None: ... class ConsoleErrorListener(ErrorListener): INSTANCE: Incomplete def syntaxError( self, - recognizer: Incomplete, - offendingSymbol: Incomplete, - line: Incomplete, - column: Incomplete, - msg: Incomplete, - e: Incomplete, + recognizer, + offendingSymbol, + line, + column, + msg, + e, ) -> None: ... class ProxyErrorListener(ErrorListener): delegates: Incomplete - def __init__(self, delegates: Incomplete) -> None: ... + def __init__(self, delegates) -> None: ... def syntaxError( self, - recognizer: Incomplete, - offendingSymbol: Incomplete, - line: Incomplete, - column: Incomplete, - msg: Incomplete, - e: Incomplete, + recognizer, + offendingSymbol, + line, + column, + msg, + e, ) -> None: ... def reportAmbiguity( self, - recognizer: Incomplete, - dfa: Incomplete, - startIndex: Incomplete, - stopIndex: Incomplete, - exact: Incomplete, - ambigAlts: Incomplete, - configs: Incomplete, + recognizer, + dfa, + startIndex, + stopIndex, + exact, + ambigAlts, + configs, ) -> None: ... def reportAttemptingFullContext( self, - recognizer: Incomplete, - dfa: Incomplete, - startIndex: Incomplete, - stopIndex: Incomplete, - conflictingAlts: Incomplete, - configs: Incomplete, + recognizer, + dfa, + startIndex, + stopIndex, + conflictingAlts, + configs, ) -> None: ... def reportContextSensitivity( self, - recognizer: Incomplete, - dfa: Incomplete, - startIndex: Incomplete, - stopIndex: Incomplete, - prediction: Incomplete, - configs: Incomplete, + recognizer, + dfa, + startIndex, + stopIndex, + prediction, + configs, ) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi index 470603ec55a7..6b7ea7128d93 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi @@ -14,12 +14,12 @@ from antlr4.Token import Token as Token Parser: Incomplete class ErrorStrategy: - def reset(self, recognizer: Parser) -> Incomplete: ... - def recoverInline(self, recognizer: Parser) -> Incomplete: ... - def recover(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... - def sync(self, recognizer: Parser) -> Incomplete: ... - def inErrorRecoveryMode(self, recognizer: Parser) -> Incomplete: ... - def reportError(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... + def reset(self, recognizer: Parser): ... + def recoverInline(self, recognizer: Parser): ... + def recover(self, recognizer: Parser, e: RecognitionException): ... + def sync(self, recognizer: Parser): ... + def inErrorRecoveryMode(self, recognizer: Parser): ... + def reportError(self, recognizer: Parser, e: RecognitionException): ... class DefaultErrorStrategy(ErrorStrategy): errorRecoveryMode: bool @@ -28,31 +28,31 @@ class DefaultErrorStrategy(ErrorStrategy): nextTokensContext: Incomplete nextTokenState: int def __init__(self) -> None: ... - def reset(self, recognizer: Parser) -> Incomplete: ... - def beginErrorCondition(self, recognizer: Parser) -> Incomplete: ... - def inErrorRecoveryMode(self, recognizer: Parser) -> Incomplete: ... - def endErrorCondition(self, recognizer: Parser) -> Incomplete: ... - def reportMatch(self, recognizer: Parser) -> Incomplete: ... - def reportError(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... - def recover(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... + def reset(self, recognizer: Parser): ... + def beginErrorCondition(self, recognizer: Parser): ... + def inErrorRecoveryMode(self, recognizer: Parser): ... + def endErrorCondition(self, recognizer: Parser): ... + def reportMatch(self, recognizer: Parser): ... + def reportError(self, recognizer: Parser, e: RecognitionException): ... + def recover(self, recognizer: Parser, e: RecognitionException): ... nextTokensState: Incomplete - def sync(self, recognizer: Parser) -> Incomplete: ... - def reportNoViableAlternative(self, recognizer: Parser, e: NoViableAltException) -> Incomplete: ... - def reportInputMismatch(self, recognizer: Parser, e: InputMismatchException) -> Incomplete: ... - def reportFailedPredicate(self, recognizer: Incomplete, e: Incomplete) -> None: ... - def reportUnwantedToken(self, recognizer: Parser) -> Incomplete: ... - def reportMissingToken(self, recognizer: Parser) -> Incomplete: ... - def recoverInline(self, recognizer: Parser) -> Incomplete: ... - def singleTokenInsertion(self, recognizer: Parser) -> Incomplete: ... - def singleTokenDeletion(self, recognizer: Parser) -> Incomplete: ... - def getMissingSymbol(self, recognizer: Parser) -> Incomplete: ... - def getExpectedTokens(self, recognizer: Parser) -> Incomplete: ... - def getTokenErrorDisplay(self, t: Token) -> Incomplete: ... - def escapeWSAndQuote(self, s: str) -> Incomplete: ... - def getErrorRecoverySet(self, recognizer: Parser) -> Incomplete: ... - def consumeUntil(self, recognizer: Parser, set_: set[int]) -> Incomplete: ... + def sync(self, recognizer: Parser): ... + def reportNoViableAlternative(self, recognizer: Parser, e: NoViableAltException): ... + def reportInputMismatch(self, recognizer: Parser, e: InputMismatchException): ... + def reportFailedPredicate(self, recognizer, e) -> None: ... + def reportUnwantedToken(self, recognizer: Parser): ... + def reportMissingToken(self, recognizer: Parser): ... + def recoverInline(self, recognizer: Parser): ... + def singleTokenInsertion(self, recognizer: Parser): ... + def singleTokenDeletion(self, recognizer: Parser): ... + def getMissingSymbol(self, recognizer: Parser): ... + def getExpectedTokens(self, recognizer: Parser): ... + def getTokenErrorDisplay(self, t: Token): ... + def escapeWSAndQuote(self, s: str): ... + def getErrorRecoverySet(self, recognizer: Parser): ... + def consumeUntil(self, recognizer: Parser, set_: set[int]): ... class BailErrorStrategy(DefaultErrorStrategy): - def recover(self, recognizer: Parser, e: RecognitionException) -> Incomplete: ... - def recoverInline(self, recognizer: Parser) -> Incomplete: ... - def sync(self, recognizer: Parser) -> Incomplete: ... + def recover(self, recognizer: Parser, e: RecognitionException): ... + def recoverInline(self, recognizer: Parser): ... + def sync(self, recognizer: Parser): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi index d453b19cd224..bb6293d3e19e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi @@ -36,7 +36,7 @@ class RecognitionException(Exception): input: InputStream | None = None, ctx: ParserRulecontext | None = None, ) -> None: ... - def getExpectedTokens(self) -> Incomplete: ... + def getExpectedTokens(self): ... class LexerNoViableAltException(RecognitionException): startIndex: Incomplete @@ -68,6 +68,6 @@ class FailedPredicateException(RecognitionException): predicate: Incomplete offendingToken: Incomplete def __init__(self, recognizer: Parser, predicate: str | None = None, message: str | None = None) -> None: ... - def formatMessage(self, predicate: str, message: str) -> Incomplete: ... + def formatMessage(self, predicate: str, message: str): ... class ParseCancellationException(CancellationException): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi index da9a5b6a851f..23e09c3dd752 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreeMatch.pyi @@ -11,6 +11,6 @@ class ParseTreeMatch: def __init__( self, tree: ParseTree, pattern: ParseTreePattern, labels: dict[str, list[ParseTree]], mismatchedNode: ParseTree ) -> None: ... - def get(self, label: str) -> Incomplete: ... - def getAll(self, label: str) -> Incomplete: ... - def succeeded(self) -> Incomplete: ... + def get(self, label: str): ... + def getAll(self, label: str): ... + def succeeded(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi index c685195ebd3f..d50da5c95c09 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePattern.pyi @@ -10,6 +10,6 @@ class ParseTreePattern: pattern: Incomplete patternTree: Incomplete def __init__(self, matcher: ParseTreePatternMatcher, pattern: str, patternRuleIndex: int, patternTree: ParseTree) -> None: ... - def match(self, tree: ParseTree) -> Incomplete: ... - def matches(self, tree: ParseTree) -> Incomplete: ... - def findAll(self, tree: ParseTree, xpath: str) -> Incomplete: ... + def match(self, tree: ParseTree): ... + def matches(self, tree: ParseTree): ... + def findAll(self, tree: ParseTree, xpath: str): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi index 84148c78f3a1..392d7be9b31b 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/ParseTreePatternMatcher.pyi @@ -31,14 +31,14 @@ class ParseTreePatternMatcher: stop: str escape: str def __init__(self, lexer: Lexer, parser: Parser) -> None: ... - def setDelimiters(self, start: str, stop: str, escapeLeft: str) -> Incomplete: ... - def matchesRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int) -> Incomplete: ... - def matchesPattern(self, tree: ParseTree, pattern: ParseTreePattern) -> Incomplete: ... - def matchRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int) -> Incomplete: ... - def matchPattern(self, tree: ParseTree, pattern: ParseTreePattern) -> Incomplete: ... - def compileTreePattern(self, pattern: str, patternRuleIndex: int) -> Incomplete: ... - def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict[str, list[ParseTree]]) -> Incomplete: ... - def map(self, labels: Incomplete, label: Incomplete, tree: Incomplete) -> None: ... - def getRuleTagToken(self, tree: ParseTree) -> Incomplete: ... - def tokenize(self, pattern: str) -> Incomplete: ... - def split(self, pattern: str) -> Incomplete: ... + def setDelimiters(self, start: str, stop: str, escapeLeft: str): ... + def matchesRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ... + def matchesPattern(self, tree: ParseTree, pattern: ParseTreePattern): ... + def matchRuleIndex(self, tree: ParseTree, pattern: str, patternRuleIndex: int): ... + def matchPattern(self, tree: ParseTree, pattern: ParseTreePattern): ... + def compileTreePattern(self, pattern: str, patternRuleIndex: int): ... + def matchImpl(self, tree: ParseTree, patternTree: ParseTree, labels: dict[str, list[ParseTree]]): ... + def map(self, labels, label, tree) -> None: ... + def getRuleTagToken(self, tree: ParseTree): ... + def tokenize(self, pattern: str): ... + def split(self, pattern: str): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi index ab51c9a94b7c..12358ca0b9d8 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/RuleTagToken.pyi @@ -14,4 +14,4 @@ class RuleTagToken(Token): label: Incomplete ruleName: Incomplete def __init__(self, ruleName: str, bypassTokenType: int, label: str | None = None) -> None: ... - def getText(self) -> Incomplete: ... + def getText(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi index f6f91298b954..ff76088ca935 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/TokenTagToken.pyi @@ -6,4 +6,4 @@ class TokenTagToken(CommonToken): tokenName: Incomplete label: Incomplete def __init__(self, tokenName: str, type: int, label: str | None = None) -> None: ... - def getText(self) -> Incomplete: ... + def getText(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi index 429388d0597a..d056082b2531 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi @@ -12,42 +12,42 @@ class TerminalNode(ParseTree): ... class ErrorNode(TerminalNode): ... class ParseTreeVisitor: - def visit(self, tree: Incomplete) -> Incomplete: ... - def visitChildren(self, node: Incomplete) -> Incomplete: ... - def visitTerminal(self, node: Incomplete) -> Incomplete: ... - def visitErrorNode(self, node: Incomplete) -> Incomplete: ... + def visit(self, tree): ... + def visitChildren(self, node): ... + def visitTerminal(self, node): ... + def visitErrorNode(self, node): ... def defaultResult(self) -> None: ... - def aggregateResult(self, aggregate: Incomplete, nextResult: Incomplete) -> Incomplete: ... - def shouldVisitNextChild(self, node: Incomplete, currentResult: Incomplete) -> Incomplete: ... + def aggregateResult(self, aggregate, nextResult): ... + def shouldVisitNextChild(self, node, currentResult): ... ParserRuleContext: Incomplete class ParseTreeListener: - def visitTerminal(self, node: TerminalNode) -> Incomplete: ... - def visitErrorNode(self, node: ErrorNode) -> Incomplete: ... - def enterEveryRule(self, ctx: ParserRuleContext) -> Incomplete: ... - def exitEveryRule(self, ctx: ParserRuleContext) -> Incomplete: ... + def visitTerminal(self, node: TerminalNode): ... + def visitErrorNode(self, node: ErrorNode): ... + def enterEveryRule(self, ctx: ParserRuleContext): ... + def exitEveryRule(self, ctx: ParserRuleContext): ... class TerminalNodeImpl(TerminalNode): parentCtx: Incomplete symbol: Incomplete def __init__(self, symbol: Token) -> None: ... - def __setattr__(self, key: Incomplete, value: Incomplete) -> None: ... - def getChild(self, i: int) -> Incomplete: ... - def getSymbol(self) -> Incomplete: ... - def getParent(self) -> Incomplete: ... - def getPayload(self) -> Incomplete: ... - def getSourceInterval(self) -> Incomplete: ... - def getChildCount(self) -> Incomplete: ... - def accept(self, visitor: ParseTreeVisitor) -> Incomplete: ... - def getText(self) -> Incomplete: ... + def __setattr__(self, key, value) -> None: ... + def getChild(self, i: int): ... + def getSymbol(self): ... + def getParent(self): ... + def getPayload(self): ... + def getSourceInterval(self): ... + def getChildCount(self): ... + def accept(self, visitor: ParseTreeVisitor): ... + def getText(self): ... class ErrorNodeImpl(TerminalNodeImpl, ErrorNode): def __init__(self, token: Token) -> None: ... - def accept(self, visitor: ParseTreeVisitor) -> Incomplete: ... + def accept(self, visitor: ParseTreeVisitor): ... class ParseTreeWalker: DEFAULT: Incomplete - def walk(self, listener: ParseTreeListener, t: ParseTree) -> Incomplete: ... - def enterRule(self, listener: ParseTreeListener, r: RuleNode) -> Incomplete: ... - def exitRule(self, listener: ParseTreeListener, r: RuleNode) -> Incomplete: ... + def walk(self, listener: ParseTreeListener, t: ParseTree): ... + def enterRule(self, listener: ParseTreeListener, r: RuleNode): ... + def exitRule(self, listener: ParseTreeListener, r: RuleNode): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi index 76a81babce36..896a4f9da254 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Trees.pyi @@ -14,18 +14,18 @@ Parser: Incomplete class Trees: @classmethod - def toStringTree(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None) -> Incomplete: ... + def toStringTree(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None): ... @classmethod - def getNodeText(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None) -> Incomplete: ... + def getNodeText(cls, t: Tree, ruleNames: list[str] | None = None, recog: Parser | None = None): ... @classmethod - def getChildren(cls, t: Tree) -> Incomplete: ... + def getChildren(cls, t: Tree): ... @classmethod - def getAncestors(cls, t: Tree) -> Incomplete: ... + def getAncestors(cls, t: Tree): ... @classmethod - def findAllTokenNodes(cls, t: ParseTree, ttype: int) -> Incomplete: ... + def findAllTokenNodes(cls, t: ParseTree, ttype: int): ... @classmethod - def findAllRuleNodes(cls, t: ParseTree, ruleIndex: int) -> Incomplete: ... + def findAllRuleNodes(cls, t: ParseTree, ruleIndex: int): ... @classmethod - def findAllNodes(cls, t: ParseTree, index: int, findTokens: bool) -> Incomplete: ... + def findAllNodes(cls, t: ParseTree, index: int, findTokens: bool): ... @classmethod - def descendants(cls, t: ParseTree) -> Incomplete: ... + def descendants(cls, t: ParseTree): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi index 496b702270c3..47ede0ad6bae 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPath.pyi @@ -27,11 +27,11 @@ class XPath: path: Incomplete elements: Incomplete def __init__(self, parser: Parser, path: str) -> None: ... - def split(self, path: str) -> Incomplete: ... - def getXPathElement(self, wordToken: Token, anywhere: bool) -> Incomplete: ... + def split(self, path: str): ... + def getXPathElement(self, wordToken: Token, anywhere: bool): ... @staticmethod - def findAll(tree: ParseTree, xpath: str, parser: Parser) -> Incomplete: ... - def evaluate(self, t: ParseTree) -> Incomplete: ... + def findAll(tree: ParseTree, xpath: str, parser: Parser): ... + def evaluate(self, t: ParseTree): ... class XPathElement: nodeName: Incomplete @@ -41,27 +41,27 @@ class XPathElement: class XPathRuleAnywhereElement(XPathElement): ruleIndex: Incomplete def __init__(self, ruleName: str, ruleIndex: int) -> None: ... - def evaluate(self, t: ParseTree) -> Incomplete: ... + def evaluate(self, t: ParseTree): ... class XPathRuleElement(XPathElement): ruleIndex: Incomplete def __init__(self, ruleName: str, ruleIndex: int) -> None: ... - def evaluate(self, t: ParseTree) -> Incomplete: ... + def evaluate(self, t: ParseTree): ... class XPathTokenAnywhereElement(XPathElement): tokenType: Incomplete def __init__(self, ruleName: str, tokenType: int) -> None: ... - def evaluate(self, t: ParseTree) -> Incomplete: ... + def evaluate(self, t: ParseTree): ... class XPathTokenElement(XPathElement): tokenType: Incomplete def __init__(self, ruleName: str, tokenType: int) -> None: ... - def evaluate(self, t: ParseTree) -> Incomplete: ... + def evaluate(self, t: ParseTree): ... class XPathWildcardAnywhereElement(XPathElement): def __init__(self) -> None: ... - def evaluate(self, t: ParseTree) -> Incomplete: ... + def evaluate(self, t: ParseTree): ... class XPathWildcardElement(XPathElement): def __init__(self) -> None: ... - def evaluate(self, t: ParseTree) -> Incomplete: ... + def evaluate(self, t: ParseTree): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi index b09ffe0a3e27..a6b67a6d5329 100644 --- a/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/xpath/XPathLexer.pyi @@ -3,7 +3,7 @@ from typing import TextIO from antlr4 import * -def serializedATN() -> Incomplete: ... +def serializedATN(): ... class XPathLexer(Lexer): atn: Incomplete @@ -23,6 +23,6 @@ class XPathLexer(Lexer): ruleNames: Incomplete grammarFileName: str def __init__(self, input: Incomplete | None = None, output: TextIO = ...) -> None: ... - def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int) -> Incomplete: ... + def action(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... type: Incomplete - def ID_action(self, localctx: RuleContext, actionIndex: int) -> Incomplete: ... + def ID_action(self, localctx: RuleContext, actionIndex: int): ... From 6b4e82640c9da7dc0a68dd3dc2fd1b6b55e9132a Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Thu, 28 Dec 2023 12:51:53 +0000 Subject: [PATCH 17/34] Run stubdefaulter --- .../antlr4/BufferedTokenStream.pyi | 4 ++-- .../antlr4/CommonTokenFactory.pyi | 2 +- .../antlr4/CommonTokenStream.pyi | 2 +- .../antlr4-python3-runtime/antlr4/FileStream.pyi | 4 ++-- .../antlr4/ParserRuleContext.pyi | 2 +- .../antlr4/RuleContext.pyi | 2 +- .../antlr4/StdinStream.pyi | 2 +- stubs/antlr4-python3-runtime/antlr4/Token.pyi | 8 ++++---- .../antlr4/TokenStreamRewriter.pyi | 16 ++++++++-------- .../antlr4/atn/ATNConfigSet.pyi | 4 ++-- .../antlr4/atn/ATNDeserializer.pyi | 2 +- .../antlr4/atn/ATNState.pyi | 2 +- .../antlr4/atn/LexerActionExecutor.pyi | 2 +- .../antlr4/atn/SemanticContext.pyi | 4 ++-- .../antlr4/atn/Transition.pyi | 4 ++-- stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi | 2 +- .../antlr4/dfa/DFAState.pyi | 2 +- .../antlr4/error/DiagnosticErrorListener.pyi | 2 +- 18 files changed, 33 insertions(+), 33 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi index b34d5d204f54..bf21e75e68a6 100644 --- a/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi @@ -31,8 +31,8 @@ class BufferedTokenStream(TokenStream): def setTokenSource(self, tokenSource: Lexer): ... def nextTokenOnChannel(self, i: int, channel: int): ... def previousTokenOnChannel(self, i: int, channel: int): ... - def getHiddenTokensToRight(self, tokenIndex: int, channel: int = ...): ... - def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = ...): ... + def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1): ... + def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1): ... def filterForChannel(self, left: int, right: int, channel: int): ... def getSourceName(self): ... def getText(self, start: int | None = None, stop: int | None = None): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi index bacc13173696..a425807dde20 100644 --- a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi @@ -7,7 +7,7 @@ class TokenFactory: ... class CommonTokenFactory(TokenFactory): DEFAULT: Incomplete copyText: Incomplete - def __init__(self, copyText: bool = ...) -> None: ... + def __init__(self, copyText: bool = False) -> None: ... def create( self, source: tuple[Incomplete], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int ): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi index 9d0023fd6e58..34abe2d04e8c 100644 --- a/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi @@ -6,7 +6,7 @@ from antlr4.Token import Token as Token class CommonTokenStream(BufferedTokenStream): channel: Incomplete - def __init__(self, lexer: Lexer, channel: int = ...) -> None: ... + def __init__(self, lexer: Lexer, channel: int = 0) -> None: ... def adjustSeekIndex(self, i: int) -> int: ... def LB(self, k: int) -> Token | None: ... def LT(self, k: int) -> Token | None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi index 5b25bbaac9b8..8c515b467990 100644 --- a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi @@ -4,5 +4,5 @@ from antlr4.InputStream import InputStream as InputStream class FileStream(InputStream): fileName: Incomplete - def __init__(self, fileName: str, encoding: str = ..., errors: str = ...) -> None: ... - def readDataFrom(self, fileName: str, encoding: str, errors: str = ...): ... + def __init__(self, fileName: str, encoding: str = 'ascii', errors: str = 'strict') -> None: ... + def readDataFrom(self, fileName: str, encoding: str, errors: str = 'strict'): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi index 038de21261c9..2a78146a481a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/ParserRuleContext.pyi @@ -28,7 +28,7 @@ class ParserRuleContext(RuleContext): def addTokenNode(self, token: Token): ... def addErrorNode(self, badToken: Token): ... def getChild(self, i: int, ttype: type | None = None): ... - def getChildren(self, predicate: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... + def getChildren(self, predicate: Incomplete | None = None) -> Generator[Incomplete, None, None]: ... def getToken(self, ttype: int, i: int): ... def getTokens(self, ttype: int): ... def getTypedRuleContext(self, ctxType: type, i: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi index 17d4efaa5e3e..178d903f0dfe 100644 --- a/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/RuleContext.pyi @@ -10,7 +10,7 @@ class RuleContext(RuleNode): EMPTY: Incomplete parentCtx: Incomplete invokingState: Incomplete - def __init__(self, parent: RuleContext | None = None, invokingState: int = ...) -> None: ... + def __init__(self, parent: RuleContext | None = None, invokingState: int = -1) -> None: ... def depth(self): ... def isEmpty(self): ... def getSourceInterval(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi b/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi index 2adec17b9328..29e94b2a803c 100644 --- a/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi @@ -1,4 +1,4 @@ from antlr4.InputStream import InputStream as InputStream class StdinStream(InputStream): - def __init__(self, encoding: str = ..., errors: str = ...) -> None: ... + def __init__(self, encoding: str = 'ascii', errors: str = 'strict') -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Token.pyi b/stubs/antlr4-python3-runtime/antlr4/Token.pyi index e3cfa875d437..8f93ed2564b1 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Token.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Token.pyi @@ -35,11 +35,11 @@ class CommonToken(Token): column: Incomplete def __init__( self, - source: tuple[Incomplete, Incomplete] = ..., + source: tuple[Incomplete, Incomplete] = (None, None), type: int | None = None, - channel: int = ..., - start: int = ..., - stop: int = ..., + channel: int = 0, + start: int = -1, + stop: int = -1, ) -> None: ... def clone(self): ... @property diff --git a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi index 87bc381128b6..fa2f9faca548 100644 --- a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi @@ -13,23 +13,23 @@ class TokenStreamRewriter: def __init__(self, tokens) -> None: ... def getTokenStream(self): ... def rollback(self, instruction_index, program_name) -> None: ... - def deleteProgram(self, program_name: Incomplete = ...) -> None: ... - def insertAfterToken(self, token, text, program_name: Incomplete = ...) -> None: ... - def insertAfter(self, index, text, program_name: Incomplete = ...) -> None: ... + def deleteProgram(self, program_name: Incomplete = 'default') -> None: ... + def insertAfterToken(self, token, text, program_name: Incomplete = 'default') -> None: ... + def insertAfter(self, index, text, program_name: Incomplete = 'default') -> None: ... def insertBeforeIndex(self, index, text) -> None: ... - def insertBeforeToken(self, token, text, program_name: Incomplete = ...) -> None: ... + def insertBeforeToken(self, token, text, program_name: Incomplete = 'default') -> None: ... def insertBefore(self, program_name, index, text) -> None: ... def replaceIndex(self, index, text) -> None: ... def replaceRange(self, from_idx, to_idx, text) -> None: ... def replaceSingleToken(self, token, text) -> None: ... def replaceRangeTokens( - self, from_token, to_token, text, program_name: Incomplete = ... + self, from_token, to_token, text, program_name: Incomplete = 'default' ) -> None: ... def replace(self, program_name, from_idx, to_idx, text) -> None: ... def deleteToken(self, token) -> None: ... def deleteIndex(self, index) -> None: ... def delete(self, program_name, from_idx, to_idx) -> None: ... - def lastRewriteTokenIndex(self, program_name: Incomplete = ...): ... + def lastRewriteTokenIndex(self, program_name: Incomplete = 'default'): ... def setLastRewriteTokenIndex(self, program_name, i) -> None: ... def getProgram(self, program_name): ... def getDefaultText(self): ... @@ -40,11 +40,11 @@ class TokenStreamRewriter: index: Incomplete text: Incomplete instructionIndex: int - def __init__(self, tokens, index, text: str = ...) -> None: ... + def __init__(self, tokens, index, text: str = '') -> None: ... def execute(self, buf): ... class InsertBeforeOp(RewriteOperation): - def __init__(self, tokens, index, text: str = ...) -> None: ... + def __init__(self, tokens, index, text: str = '') -> None: ... def execute(self, buf): ... class InsertAfterOp(InsertBeforeOp): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi index fc58fd8c8b8e..3e20e2da2e62 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNConfigSet.pyi @@ -22,9 +22,9 @@ class ATNConfigSet: hasSemanticContext: bool dipsIntoOuterContext: bool cachedHashCode: int - def __init__(self, fullCtx: bool = ...) -> None: ... + def __init__(self, fullCtx: bool = True) -> None: ... def __iter__(self): ... - def add(self, config: ATNConfig, mergeCache: Incomplete | None = ...): ... + def add(self, config: ATNConfig, mergeCache: Incomplete | None = None): ... def getOrAdd(self, config: ATNConfig): ... def getStates(self): ... def getPredicates(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index 029563f11482..a871147996cf 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -30,7 +30,7 @@ class ATNDeserializer: def stateIsEndStateFor(self, state: ATNState, idx: int): ... def markPrecedenceDecisions(self, atn: ATN): ... def verifyATN(self, atn: ATN): ... - def checkCondition(self, condition: bool, message: Incomplete | None = ...): ... + def checkCondition(self, condition: bool, message: Incomplete | None = None): ... def readInt(self): ... edgeFactories: Incomplete def edgeFactory( diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi index d24e1365e824..79b7e4818b3a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNState.pyi @@ -32,7 +32,7 @@ class ATNState: def __eq__(self, other): ... def onlyHasEpsilonTransitions(self): ... def isNonGreedyExitState(self): ... - def addTransition(self, trans: Transition, index: int = ...): ... + def addTransition(self, trans: Transition, index: int = -1): ... class BasicState(ATNState): stateType: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi index e2226687ae29..bfecb498beab 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi @@ -8,7 +8,7 @@ Lexer: Incomplete class LexerActionExecutor: lexerActions: Incomplete hashCode: Incomplete - def __init__(self, lexerActions: list[LexerAction] = ...) -> None: ... + def __init__(self, lexerActions: list[LexerAction] = []) -> None: ... @staticmethod def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction): ... def fixOffsetBeforeMatch(self, offset: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi index 0cbd255d52e3..7fb59cd568f0 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/SemanticContext.pyi @@ -18,14 +18,14 @@ class Predicate(SemanticContext): ruleIndex: Incomplete predIndex: Incomplete isCtxDependent: Incomplete - def __init__(self, ruleIndex: int = ..., predIndex: int = ..., isCtxDependent: bool = ...) -> None: ... + def __init__(self, ruleIndex: int = -1, predIndex: int = -1, isCtxDependent: bool = False) -> None: ... def eval(self, parser: Recognizer, outerContext: RuleContext): ... def __hash__(self): ... def __eq__(self, other): ... class PrecedencePredicate(SemanticContext): precedence: Incomplete - def __init__(self, precedence: int = ...) -> None: ... + def __init__(self, precedence: int = 0) -> None: ... def eval(self, parser: Recognizer, outerContext: RuleContext): ... def evalPrecedence(self, parser: Recognizer, outerContext: RuleContext): ... def __lt__(self, other): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi index 79f11e6d695d..1f50f4e5eade 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi @@ -44,7 +44,7 @@ class EpsilonTransition(Transition): serializationType: Incomplete isEpsilon: bool outermostPrecedenceReturn: Incomplete - def __init__(self, target, outermostPrecedenceReturn: int = ...) -> None: ... + def __init__(self, target, outermostPrecedenceReturn: int = -1) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class RangeTransition(Transition): @@ -75,7 +75,7 @@ class ActionTransition(Transition): actionIndex: Incomplete isCtxDependent: Incomplete isEpsilon: bool - def __init__(self, target: ATNState, ruleIndex: int, actionIndex: int = ..., isCtxDependent: bool = ...) -> None: ... + def __init__(self, target: ATNState, ruleIndex: int, actionIndex: int = -1, isCtxDependent: bool = False) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class SetTransition(Transition): diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi index 02c54121d632..86abf9a72b44 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFA.pyi @@ -10,7 +10,7 @@ class DFA: decision: Incomplete s0: Incomplete precedenceDfa: bool - def __init__(self, atnStartState: DecisionState, decision: int = ...) -> None: ... + def __init__(self, atnStartState: DecisionState, decision: int = 0) -> None: ... def getPrecedenceStartState(self, precedence: int): ... def setPrecedenceStartState(self, precedence: int, startState: DFAState): ... def setPrecedenceDfa(self, precedenceDfa: bool): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi index 752bbd6929a0..d70d1a38c7b2 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFAState.pyi @@ -17,7 +17,7 @@ class DFAState: lexerActionExecutor: Incomplete requiresFullContext: bool predicates: Incomplete - def __init__(self, stateNumber: int = ..., configs: ATNConfigSet = ...) -> None: ... + def __init__(self, stateNumber: int = -1, configs: ATNConfigSet = ...) -> None: ... def getAltSet(self): ... def __hash__(self): ... def __eq__(self, other): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi index dc9966c7564a..1c68ad2c2b3d 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi @@ -6,7 +6,7 @@ from antlr4.error.ErrorListener import ErrorListener as ErrorListener class DiagnosticErrorListener(ErrorListener): exactOnly: Incomplete - def __init__(self, exactOnly: bool = ...) -> None: ... + def __init__(self, exactOnly: bool = True) -> None: ... def reportAmbiguity( self, recognizer, From 8b9085f4254966259cd9e0824bf9e60c675089b4 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Thu, 28 Dec 2023 12:52:19 +0000 Subject: [PATCH 18/34] Run linters --- .../antlr4/FileStream.pyi | 4 +- .../antlr4/IntervalSet.pyi | 1 - .../antlr4-python3-runtime/antlr4/Parser.pyi | 4 +- .../antlr4/StdinStream.pyi | 2 +- .../antlr4/TokenStreamRewriter.pyi | 18 ++-- stubs/antlr4-python3-runtime/antlr4/Utils.pyi | 1 - .../antlr4-python3-runtime/antlr4/_pygrun.pyi | 1 - .../antlr4/atn/ATNDeserializer.pyi | 4 +- .../antlr4/atn/ATNType.pyi | 1 - .../antlr4/atn/ParserATNSimulator.pyi | 20 +--- .../antlr4/atn/PredictionMode.pyi | 1 - .../antlr4/error/DiagnosticErrorListener.pyi | 9 +- .../antlr4/error/ErrorListener.pyi | 92 ++----------------- 13 files changed, 28 insertions(+), 130 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi index 8c515b467990..c55094a91456 100644 --- a/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/FileStream.pyi @@ -4,5 +4,5 @@ from antlr4.InputStream import InputStream as InputStream class FileStream(InputStream): fileName: Incomplete - def __init__(self, fileName: str, encoding: str = 'ascii', errors: str = 'strict') -> None: ... - def readDataFrom(self, fileName: str, encoding: str, errors: str = 'strict'): ... + def __init__(self, fileName: str, encoding: str = "ascii", errors: str = "strict") -> None: ... + def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict"): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi index 2653e17ee2ab..1111a1e14724 100644 --- a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from antlr4.Token import Token as Token diff --git a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi index 037b37980734..2a89284c5a68 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Parser.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Parser.pyi @@ -49,9 +49,7 @@ class Parser(Recognizer): def getTokenStream(self): ... def setTokenStream(self, input: TokenStream): ... def getCurrentToken(self): ... - def notifyErrorListeners( - self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None - ): ... + def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None): ... def consume(self): ... def addContextToParseTree(self) -> None: ... state: Incomplete diff --git a/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi b/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi index 29e94b2a803c..54d3522b9a03 100644 --- a/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/StdinStream.pyi @@ -1,4 +1,4 @@ from antlr4.InputStream import InputStream as InputStream class StdinStream(InputStream): - def __init__(self, encoding: str = 'ascii', errors: str = 'strict') -> None: ... + def __init__(self, encoding: str = "ascii", errors: str = "strict") -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi index fa2f9faca548..89f9dee33eca 100644 --- a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi @@ -13,23 +13,21 @@ class TokenStreamRewriter: def __init__(self, tokens) -> None: ... def getTokenStream(self): ... def rollback(self, instruction_index, program_name) -> None: ... - def deleteProgram(self, program_name: Incomplete = 'default') -> None: ... - def insertAfterToken(self, token, text, program_name: Incomplete = 'default') -> None: ... - def insertAfter(self, index, text, program_name: Incomplete = 'default') -> None: ... + def deleteProgram(self, program_name: Incomplete = "default") -> None: ... + def insertAfterToken(self, token, text, program_name: Incomplete = "default") -> None: ... + def insertAfter(self, index, text, program_name: Incomplete = "default") -> None: ... def insertBeforeIndex(self, index, text) -> None: ... - def insertBeforeToken(self, token, text, program_name: Incomplete = 'default') -> None: ... + def insertBeforeToken(self, token, text, program_name: Incomplete = "default") -> None: ... def insertBefore(self, program_name, index, text) -> None: ... def replaceIndex(self, index, text) -> None: ... def replaceRange(self, from_idx, to_idx, text) -> None: ... def replaceSingleToken(self, token, text) -> None: ... - def replaceRangeTokens( - self, from_token, to_token, text, program_name: Incomplete = 'default' - ) -> None: ... + def replaceRangeTokens(self, from_token, to_token, text, program_name: Incomplete = "default") -> None: ... def replace(self, program_name, from_idx, to_idx, text) -> None: ... def deleteToken(self, token) -> None: ... def deleteIndex(self, index) -> None: ... def delete(self, program_name, from_idx, to_idx) -> None: ... - def lastRewriteTokenIndex(self, program_name: Incomplete = 'default'): ... + def lastRewriteTokenIndex(self, program_name: Incomplete = "default"): ... def setLastRewriteTokenIndex(self, program_name, i) -> None: ... def getProgram(self, program_name): ... def getDefaultText(self): ... @@ -40,11 +38,11 @@ class TokenStreamRewriter: index: Incomplete text: Incomplete instructionIndex: int - def __init__(self, tokens, index, text: str = '') -> None: ... + def __init__(self, tokens, index, text: str = "") -> None: ... def execute(self, buf): ... class InsertBeforeOp(RewriteOperation): - def __init__(self, tokens, index, text: str = '') -> None: ... + def __init__(self, tokens, index, text: str = "") -> None: ... def execute(self, buf): ... class InsertAfterOp(InsertBeforeOp): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi index f9c25e1591a2..d6286d3634be 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete def str_list(val) -> str: ... def escapeWhitespace(s: str, escapeSpaces: bool): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi index 9ff24621acd5..00fae50ae935 100644 --- a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from antlr4 import * diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi index a871147996cf..fbb06ad67f97 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNDeserializer.pyi @@ -33,9 +33,7 @@ class ATNDeserializer: def checkCondition(self, condition: bool, message: Incomplete | None = None): ... def readInt(self): ... edgeFactories: Incomplete - def edgeFactory( - self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete] - ): ... + def edgeFactory(self, atn: ATN, type: int, src: int, trg: int, arg1: int, arg2: int, arg3: int, sets: list[Incomplete]): ... stateFactories: Incomplete def stateFactory(self, type: int, ruleIndex: int): ... CHANNEL: int diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi index bec1f1bb056e..888deeb2be1e 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ATNType.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from enum import IntEnum class ATNType(IntEnum): diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index f663efeafc38..824700785e60 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -47,9 +47,7 @@ class ParserATNSimulator(ATNSimulator): ) -> None: ... def reset(self) -> None: ... def adaptivePredict(self, input: TokenStream, decision: int, outerContext: ParserRuleContext): ... - def execATN( - self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext - ): ... + def execATN(self, dfa: DFA, s0: DFAState, input: TokenStream, startIndex: int, outerContext: ParserRuleContext): ... def getExistingTargetState(self, previousD: DFAState, t: int): ... def computeTargetState(self, dfa: DFA, previousD: DFAState, t: int): ... def predicateDFAState(self, dfaState: DFAState, decisionState: DecisionState): ... @@ -63,14 +61,10 @@ class ParserATNSimulator(ATNSimulator): def getReachableTarget(self, trans: Transition, ttype: int): ... def getPredsForAmbigAlts(self, ambigAlts: set[int], configs: ATNConfigSet, nalts: int): ... def getPredicatePredictions(self, ambigAlts: set[int], altToPred: list[int]): ... - def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule( - self, configs: ATNConfigSet, outerContext: ParserRuleContext - ): ... + def getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... def getAltThatFinishedDecisionEntryRule(self, configs: ATNConfigSet): ... def splitAccordingToSemanticValidity(self, configs: ATNConfigSet, outerContext: ParserRuleContext): ... - def evalSemanticContext( - self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool - ): ... + def evalSemanticContext(self, predPredictions: list[Incomplete], outerContext: ParserRuleContext, complete: bool): ... def closure( self, config: ATNConfig, @@ -119,18 +113,14 @@ class ParserATNSimulator(ATNSimulator): def getTokenName(self, t: int): ... def getLookaheadName(self, input: TokenStream): ... def dumpDeadEndConfigs(self, nvae: NoViableAltException): ... - def noViableAlt( - self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int - ): ... + def noViableAlt(self, input: TokenStream, outerContext: ParserRuleContext, configs: ATNConfigSet, startIndex: int): ... def getUniqueAlt(self, configs: ATNConfigSet): ... def addDFAEdge(self, dfa: DFA, from_: DFAState, t: int, to: DFAState): ... def addDFAState(self, dfa: DFA, D: DFAState): ... def reportAttemptingFullContext( self, dfa: DFA, conflictingAlts: set[Incomplete], configs: ATNConfigSet, startIndex: int, stopIndex: int ): ... - def reportContextSensitivity( - self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int - ): ... + def reportContextSensitivity(self, dfa: DFA, prediction: int, configs: ATNConfigSet, startIndex: int, stopIndex: int): ... def reportAmbiguity( self, dfa: DFA, diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index 675328d3cbf4..09a04cc5b11a 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -1,4 +1,3 @@ -from _typeshed import Incomplete from enum import Enum from antlr4.atn.ATN import ATN as ATN diff --git a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi index 1c68ad2c2b3d..750c04100245 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/DiagnosticErrorListener.pyi @@ -8,14 +8,7 @@ class DiagnosticErrorListener(ErrorListener): exactOnly: Incomplete def __init__(self, exactOnly: bool = True) -> None: ... def reportAmbiguity( - self, - recognizer, - dfa: DFA, - startIndex: int, - stopIndex: int, - exact: bool, - ambigAlts: set[int], - configs: ATNConfigSet, + self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, exact: bool, ambigAlts: set[int], configs: ATNConfigSet ): ... def reportAttemptingFullContext( self, recognizer, dfa: DFA, startIndex: int, stopIndex: int, conflictingAlts: set[int], configs: ATNConfigSet diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi index 266a6ae345f5..34482bde9131 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorListener.pyi @@ -1,93 +1,19 @@ from _typeshed import Incomplete class ErrorListener: - def syntaxError( - self, - recognizer, - offendingSymbol, - line, - column, - msg, - e, - ) -> None: ... - def reportAmbiguity( - self, - recognizer, - dfa, - startIndex, - stopIndex, - exact, - ambigAlts, - configs, - ) -> None: ... - def reportAttemptingFullContext( - self, - recognizer, - dfa, - startIndex, - stopIndex, - conflictingAlts, - configs, - ) -> None: ... - def reportContextSensitivity( - self, - recognizer, - dfa, - startIndex, - stopIndex, - prediction, - configs, - ) -> None: ... + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... + def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ... + def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ... + def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ... class ConsoleErrorListener(ErrorListener): INSTANCE: Incomplete - def syntaxError( - self, - recognizer, - offendingSymbol, - line, - column, - msg, - e, - ) -> None: ... + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... class ProxyErrorListener(ErrorListener): delegates: Incomplete def __init__(self, delegates) -> None: ... - def syntaxError( - self, - recognizer, - offendingSymbol, - line, - column, - msg, - e, - ) -> None: ... - def reportAmbiguity( - self, - recognizer, - dfa, - startIndex, - stopIndex, - exact, - ambigAlts, - configs, - ) -> None: ... - def reportAttemptingFullContext( - self, - recognizer, - dfa, - startIndex, - stopIndex, - conflictingAlts, - configs, - ) -> None: ... - def reportContextSensitivity( - self, - recognizer, - dfa, - startIndex, - stopIndex, - prediction, - configs, - ) -> None: ... + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e) -> None: ... + def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) -> None: ... + def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) -> None: ... + def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs) -> None: ... From d92896b0664329117f37222052a337d77f999612 Mon Sep 17 00:00:00 2001 From: AlexWaygood Date: Thu, 28 Dec 2023 12:55:46 +0000 Subject: [PATCH 19/34] More codemodding --- stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi | 1 - .../antlr4/TokenStreamRewriter.pyi | 12 ++++++------ stubs/antlr4-python3-runtime/antlr4/Utils.pyi | 1 - stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi | 1 - 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi index 1111a1e14724..6a6d56fdf83b 100644 --- a/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi @@ -1,4 +1,3 @@ - from antlr4.Token import Token as Token class IntervalSet: diff --git a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi index 89f9dee33eca..92aa2a6ccc69 100644 --- a/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/TokenStreamRewriter.pyi @@ -13,21 +13,21 @@ class TokenStreamRewriter: def __init__(self, tokens) -> None: ... def getTokenStream(self): ... def rollback(self, instruction_index, program_name) -> None: ... - def deleteProgram(self, program_name: Incomplete = "default") -> None: ... - def insertAfterToken(self, token, text, program_name: Incomplete = "default") -> None: ... - def insertAfter(self, index, text, program_name: Incomplete = "default") -> None: ... + def deleteProgram(self, program_name="default") -> None: ... + def insertAfterToken(self, token, text, program_name="default") -> None: ... + def insertAfter(self, index, text, program_name="default") -> None: ... def insertBeforeIndex(self, index, text) -> None: ... - def insertBeforeToken(self, token, text, program_name: Incomplete = "default") -> None: ... + def insertBeforeToken(self, token, text, program_name="default") -> None: ... def insertBefore(self, program_name, index, text) -> None: ... def replaceIndex(self, index, text) -> None: ... def replaceRange(self, from_idx, to_idx, text) -> None: ... def replaceSingleToken(self, token, text) -> None: ... - def replaceRangeTokens(self, from_token, to_token, text, program_name: Incomplete = "default") -> None: ... + def replaceRangeTokens(self, from_token, to_token, text, program_name="default") -> None: ... def replace(self, program_name, from_idx, to_idx, text) -> None: ... def deleteToken(self, token) -> None: ... def deleteIndex(self, index) -> None: ... def delete(self, program_name, from_idx, to_idx) -> None: ... - def lastRewriteTokenIndex(self, program_name: Incomplete = "default"): ... + def lastRewriteTokenIndex(self, program_name="default"): ... def setLastRewriteTokenIndex(self, program_name, i) -> None: ... def getProgram(self, program_name): ... def getDefaultText(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi index d6286d3634be..6c0130df5642 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Utils.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Utils.pyi @@ -1,3 +1,2 @@ - def str_list(val) -> str: ... def escapeWhitespace(s: str, escapeSpaces: bool): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi index 00fae50ae935..c45632a1e4d1 100644 --- a/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/_pygrun.pyi @@ -1,4 +1,3 @@ - from antlr4 import * def beautify_lisp_string(in_string): ... From 1add9afa0098d9b918a63a93bc2c58f64f44351b Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 28 Dec 2023 12:58:44 +0000 Subject: [PATCH 20/34] Update pyrightconfig.stricter.json --- pyrightconfig.stricter.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 7f85e397c621..3c69595a7eed 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -23,7 +23,7 @@ "stdlib/xml/dom/minidom.pyi", "stdlib/xml/dom/pulldom.pyi", "stdlib/xml/sax", - "stubs/antlr4", + "stubs/antlr4-python3-runtime", "stubs/aws-xray-sdk", "stubs/beautifulsoup4", "stubs/bleach", From bb6e3c8cac56b8f58c0755e9038fe29f6ebcc603 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Thu, 28 Dec 2023 08:54:02 -0500 Subject: [PATCH 21/34] Update stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi Co-authored-by: Alex Waygood --- stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi index a425807dde20..5887446c9d92 100644 --- a/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi @@ -9,6 +9,6 @@ class CommonTokenFactory(TokenFactory): copyText: Incomplete def __init__(self, copyText: bool = False) -> None: ... def create( - self, source: tuple[Incomplete], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int + self, source: tuple[Incomplete, ...], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int ): ... def createThin(self, type: int, text: str): ... From 2dd61a65052766edf023aa88383490689c3cf634 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Thu, 28 Dec 2023 10:52:52 -0500 Subject: [PATCH 22/34] =?UTF-8?q?Removed=20=E2=80=9Cremoved=E2=80=9D=20cla?= =?UTF-8?q?sses?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../antlr4/Recognizer.pyi | 4 +- .../antlr4/atn/LexerATNSimulator.pyi | 4 +- .../antlr4/atn/LexerActionExecutor.pyi | 4 +- .../antlr4/atn/Transition.pyi | 28 +++++---- .../antlr4/error/ErrorStrategy.pyi | 58 +++++++++---------- .../antlr4/error/Errors.pyi | 27 +++------ .../antlr4/tree/Tree.pyi | 6 +- 7 files changed, 55 insertions(+), 76 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi b/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi index c5e882a19b31..b671ec91105f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/Recognizer.pyi @@ -4,8 +4,6 @@ from antlr4.error.ErrorListener import ConsoleErrorListener as ConsoleErrorListe from antlr4.RuleContext import RuleContext as RuleContext from antlr4.Token import Token as Token -RecognitionException: Incomplete - class Recognizer: tokenTypeMapCache: Incomplete ruleIndexMapCache: Incomplete @@ -18,7 +16,7 @@ class Recognizer: def getTokenTypeMap(self): ... def getRuleIndexMap(self): ... def getTokenType(self, tokenName: str): ... - def getErrorHeader(self, e: RecognitionException): ... + def getErrorHeader(self, e): ... def getTokenErrorDisplay(self, t: Token): ... def getErrorListenerDispatch(self): ... def sempred(self, localctx: RuleContext, ruleIndex: int, actionIndex: int): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi index 9b965255050c..f7bd7e42de2b 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerATNSimulator.pyi @@ -29,8 +29,6 @@ class SimState: dfaState: Incomplete def reset(self) -> None: ... -Lexer: Incomplete - class LexerATNSimulator(ATNSimulator): debug: bool dfa_debug: bool @@ -46,7 +44,7 @@ class LexerATNSimulator(ATNSimulator): DEFAULT_MODE: Incomplete MAX_CHAR_VALUE: Incomplete prevAccept: Incomplete - def __init__(self, recog: Lexer, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ... + def __init__(self, recog, atn: ATN, decisionToDFA: list[DFA], sharedContextCache: PredictionContextCache) -> None: ... def copyState(self, simulator: LexerATNSimulator): ... def match(self, input: InputStream, mode: int): ... def reset(self) -> None: ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi index bfecb498beab..2cfa6d63c2de 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/LexerActionExecutor.pyi @@ -3,8 +3,6 @@ from _typeshed import Incomplete from antlr4.atn.LexerAction import LexerAction as LexerAction, LexerIndexedCustomAction as LexerIndexedCustomAction from antlr4.InputStream import InputStream as InputStream -Lexer: Incomplete - class LexerActionExecutor: lexerActions: Incomplete hashCode: Incomplete @@ -12,6 +10,6 @@ class LexerActionExecutor: @staticmethod def append(lexerActionExecutor: LexerActionExecutor, lexerAction: LexerAction): ... def fixOffsetBeforeMatch(self, offset: int): ... - def execute(self, lexer: Lexer, input: InputStream, startIndex: int): ... + def execute(self, lexer, input: InputStream, startIndex: int): ... def __hash__(self): ... def __eq__(self, other): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi index 1f50f4e5eade..9e2a8a57bcde 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/Transition.pyi @@ -1,9 +1,7 @@ from _typeshed import Incomplete -from antlr4.atn.ATNState import * -from antlr4.atn.SemanticContext import PrecedencePredicate as PrecedencePredicate, Predicate as Predicate -from antlr4.IntervalSet import IntervalSet as IntervalSet -from antlr4.Token import Token as Token +from antlr4.atn.ATNState import RuleStartState +from antlr4.IntervalSet import IntervalSet class Transition: EPSILON: int @@ -21,13 +19,13 @@ class Transition: target: Incomplete isEpsilon: bool label: Incomplete - def __init__(self, target: ATNState) -> None: ... + def __init__(self, target) -> None: ... class AtomTransition(Transition): label_: Incomplete label: Incomplete serializationType: Incomplete - def __init__(self, target: ATNState, label: int) -> None: ... + def __init__(self, target, label: int) -> None: ... def makeLabel(self): ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... @@ -37,7 +35,7 @@ class RuleTransition(Transition): followState: Incomplete serializationType: Incomplete isEpsilon: bool - def __init__(self, ruleStart: RuleStartState, ruleIndex: int, precedence: int, followState: ATNState) -> None: ... + def __init__(self, ruleStart: RuleStartState, ruleIndex: int, precedence: int, followState) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class EpsilonTransition(Transition): @@ -52,12 +50,12 @@ class RangeTransition(Transition): start: Incomplete stop: Incomplete label: Incomplete - def __init__(self, target: ATNState, start: int, stop: int) -> None: ... + def __init__(self, target, start: int, stop: int) -> None: ... def makeLabel(self): ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class AbstractPredicateTransition(Transition): - def __init__(self, target: ATNState) -> None: ... + def __init__(self, target) -> None: ... class PredicateTransition(AbstractPredicateTransition): serializationType: Incomplete @@ -65,7 +63,7 @@ class PredicateTransition(AbstractPredicateTransition): predIndex: Incomplete isCtxDependent: Incomplete isEpsilon: bool - def __init__(self, target: ATNState, ruleIndex: int, predIndex: int, isCtxDependent: bool) -> None: ... + def __init__(self, target, ruleIndex: int, predIndex: int, isCtxDependent: bool) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... def getPredicate(self): ... @@ -75,29 +73,29 @@ class ActionTransition(Transition): actionIndex: Incomplete isCtxDependent: Incomplete isEpsilon: bool - def __init__(self, target: ATNState, ruleIndex: int, actionIndex: int = -1, isCtxDependent: bool = False) -> None: ... + def __init__(self, target, ruleIndex: int, actionIndex: int = -1, isCtxDependent: bool = False) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class SetTransition(Transition): serializationType: Incomplete label: Incomplete - def __init__(self, target: ATNState, set: IntervalSet) -> None: ... + def __init__(self, target, set: IntervalSet) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class NotSetTransition(SetTransition): serializationType: Incomplete - def __init__(self, target: ATNState, set: IntervalSet) -> None: ... + def __init__(self, target, set: IntervalSet) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class WildcardTransition(Transition): serializationType: Incomplete - def __init__(self, target: ATNState) -> None: ... + def __init__(self, target) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... class PrecedencePredicateTransition(AbstractPredicateTransition): serializationType: Incomplete precedence: Incomplete isEpsilon: bool - def __init__(self, target: ATNState, precedence: int) -> None: ... + def __init__(self, target, precedence: int) -> None: ... def matches(self, symbol: int, minVocabSymbol: int, maxVocabSymbol: int): ... def getPredicate(self): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi index 6b7ea7128d93..cde0e754c0d8 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/ErrorStrategy.pyi @@ -11,15 +11,13 @@ from antlr4.error.Errors import ( from antlr4.IntervalSet import IntervalSet as IntervalSet from antlr4.Token import Token as Token -Parser: Incomplete - class ErrorStrategy: - def reset(self, recognizer: Parser): ... - def recoverInline(self, recognizer: Parser): ... - def recover(self, recognizer: Parser, e: RecognitionException): ... - def sync(self, recognizer: Parser): ... - def inErrorRecoveryMode(self, recognizer: Parser): ... - def reportError(self, recognizer: Parser, e: RecognitionException): ... + def reset(self, recognizer): ... + def recoverInline(self, recognizer): ... + def recover(self, recognizer, e: RecognitionException): ... + def sync(self, recognizer): ... + def inErrorRecoveryMode(self, recognizer): ... + def reportError(self, recognizer, e: RecognitionException): ... class DefaultErrorStrategy(ErrorStrategy): errorRecoveryMode: bool @@ -28,31 +26,31 @@ class DefaultErrorStrategy(ErrorStrategy): nextTokensContext: Incomplete nextTokenState: int def __init__(self) -> None: ... - def reset(self, recognizer: Parser): ... - def beginErrorCondition(self, recognizer: Parser): ... - def inErrorRecoveryMode(self, recognizer: Parser): ... - def endErrorCondition(self, recognizer: Parser): ... - def reportMatch(self, recognizer: Parser): ... - def reportError(self, recognizer: Parser, e: RecognitionException): ... - def recover(self, recognizer: Parser, e: RecognitionException): ... + def reset(self, recognizer): ... + def beginErrorCondition(self, recognizer): ... + def inErrorRecoveryMode(self, recognizer): ... + def endErrorCondition(self, recognizer): ... + def reportMatch(self, recognizer): ... + def reportError(self, recognizer, e: RecognitionException): ... + def recover(self, recognizer, e: RecognitionException): ... nextTokensState: Incomplete - def sync(self, recognizer: Parser): ... - def reportNoViableAlternative(self, recognizer: Parser, e: NoViableAltException): ... - def reportInputMismatch(self, recognizer: Parser, e: InputMismatchException): ... + def sync(self, recognizer): ... + def reportNoViableAlternative(self, recognizer, e: NoViableAltException): ... + def reportInputMismatch(self, recognizer, e: InputMismatchException): ... def reportFailedPredicate(self, recognizer, e) -> None: ... - def reportUnwantedToken(self, recognizer: Parser): ... - def reportMissingToken(self, recognizer: Parser): ... - def recoverInline(self, recognizer: Parser): ... - def singleTokenInsertion(self, recognizer: Parser): ... - def singleTokenDeletion(self, recognizer: Parser): ... - def getMissingSymbol(self, recognizer: Parser): ... - def getExpectedTokens(self, recognizer: Parser): ... + def reportUnwantedToken(self, recognizer): ... + def reportMissingToken(self, recognizer): ... + def recoverInline(self, recognizer): ... + def singleTokenInsertion(self, recognizer): ... + def singleTokenDeletion(self, recognizer): ... + def getMissingSymbol(self, recognizer): ... + def getExpectedTokens(self, recognizer): ... def getTokenErrorDisplay(self, t: Token): ... def escapeWSAndQuote(self, s: str): ... - def getErrorRecoverySet(self, recognizer: Parser): ... - def consumeUntil(self, recognizer: Parser, set_: set[int]): ... + def getErrorRecoverySet(self, recognizer): ... + def consumeUntil(self, recognizer, set_: set[int]): ... class BailErrorStrategy(DefaultErrorStrategy): - def recover(self, recognizer: Parser, e: RecognitionException): ... - def recoverInline(self, recognizer: Parser): ... - def sync(self, recognizer: Parser): ... + def recover(self, recognizer, e: RecognitionException): ... + def recoverInline(self, recognizer): ... + def sync(self, recognizer): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi index bb6293d3e19e..e61dabafd2b7 100644 --- a/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/error/Errors.pyi @@ -4,15 +4,6 @@ from antlr4.InputStream import InputStream as InputStream from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext from antlr4.Recognizer import Recognizer as Recognizer -Token: Incomplete -Parser: Incomplete -Lexer: Incomplete -TokenStream: Incomplete -ATNConfigSet: Incomplete -ParserRulecontext: Incomplete -PredicateTransition: Incomplete -BufferedTokenStream: Incomplete - class UnsupportedOperationException(Exception): def __init__(self, msg: str) -> None: ... @@ -34,7 +25,7 @@ class RecognitionException(Exception): message: str | None = None, recognizer: Recognizer | None = None, input: InputStream | None = None, - ctx: ParserRulecontext | None = None, + ctx: Incomplete | None = None, ) -> None: ... def getExpectedTokens(self): ... @@ -42,7 +33,7 @@ class LexerNoViableAltException(RecognitionException): startIndex: Incomplete deadEndConfigs: Incomplete message: str - def __init__(self, lexer: Lexer, input: InputStream, startIndex: int, deadEndConfigs: ATNConfigSet) -> None: ... + def __init__(self, lexer, input: InputStream, startIndex: int, deadEndConfigs) -> None: ... class NoViableAltException(RecognitionException): deadEndConfigs: Incomplete @@ -50,24 +41,24 @@ class NoViableAltException(RecognitionException): offendingToken: Incomplete def __init__( self, - recognizer: Parser, - input: TokenStream | None = None, - startToken: Token | None = None, - offendingToken: Token | None = None, - deadEndConfigs: ATNConfigSet | None = None, + recognizer: Incomplete, + input: Incomplete | None = None, + startToken: Incomplete | None = None, + offendingToken: Incomplete | None = None, + deadEndConfigs: Incomplete | None = None, ctx: ParserRuleContext | None = None, ) -> None: ... class InputMismatchException(RecognitionException): offendingToken: Incomplete - def __init__(self, recognizer: Parser) -> None: ... + def __init__(self, recognizer) -> None: ... class FailedPredicateException(RecognitionException): ruleIndex: Incomplete predicateIndex: Incomplete predicate: Incomplete offendingToken: Incomplete - def __init__(self, recognizer: Parser, predicate: str | None = None, message: str | None = None) -> None: ... + def __init__(self, recognizer, predicate: str | None = None, message: str | None = None) -> None: ... def formatMessage(self, predicate: str, message: str): ... class ParseCancellationException(CancellationException): ... diff --git a/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi b/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi index d056082b2531..8ec130b45133 100644 --- a/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/tree/Tree.pyi @@ -20,13 +20,11 @@ class ParseTreeVisitor: def aggregateResult(self, aggregate, nextResult): ... def shouldVisitNextChild(self, node, currentResult): ... -ParserRuleContext: Incomplete - class ParseTreeListener: def visitTerminal(self, node: TerminalNode): ... def visitErrorNode(self, node: ErrorNode): ... - def enterEveryRule(self, ctx: ParserRuleContext): ... - def exitEveryRule(self, ctx: ParserRuleContext): ... + def enterEveryRule(self, ctx): ... + def exitEveryRule(self, ctx): ... class TerminalNodeImpl(TerminalNode): parentCtx: Incomplete From eb23c597bda17841470789758a23b735f98c7158 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 08:59:28 -0500 Subject: [PATCH 23/34] Update stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi Co-authored-by: Sebastian Rittau --- .../antlr4/atn/PredictionMode.pyi | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index 09a04cc5b11a..4812203e59cc 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -17,19 +17,19 @@ class PredictionMode(Enum): @classmethod def allConfigsInRuleStopStates(cls, configs: ATNConfigSet): ... @classmethod - def resolvesToJustOneViableAlt(cls, altsets: list[set[int]]): ... + def resolvesToJustOneViableAlt(cls, altsets: Sequence[set[int]]): ... @classmethod - def allSubsetsConflict(cls, altsets: list[set[int]]): ... + def allSubsetsConflict(cls, altsets: Sequence[set[int]]): ... @classmethod - def hasNonConflictingAltSet(cls, altsets: list[set[int]]): ... + def hasNonConflictingAltSet(cls, altsets: Sequence[set[int]]): ... @classmethod - def hasConflictingAltSet(cls, altsets: list[set[int]]): ... + def hasConflictingAltSet(cls, altsets: Sequence[set[int]]): ... @classmethod - def allSubsetsEqual(cls, altsets: list[set[int]]): ... + def allSubsetsEqual(cls, altsets: Sequence[set[int]]): ... @classmethod - def getUniqueAlt(cls, altsets: list[set[int]]): ... + def getUniqueAlt(cls, altsets: Sequence[set[int]]): ... @classmethod - def getAlts(cls, altsets: list[set[int]]): ... + def getAlts(cls, altsets: Sequence[set[int]]): ... @classmethod def getConflictingAltSubsets(cls, configs: ATNConfigSet): ... @classmethod From aa7462889e36fb8b873fce9337b55bd6b641e058 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:00:07 -0500 Subject: [PATCH 24/34] Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index 255a682a3a16..f074b629b09c 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -61,7 +61,7 @@ def merge( a: PredictionContext, b: PredictionContext, rootIsWildcard: bool, - mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], + mergeCache: dict[tuple[Incomplete, Incomplete], SingletonPredictionContext] | None, ): ... def mergeSingletons( a: SingletonPredictionContext, From 6fdd3bfd68ff2b496f79fe5694856afe4883dabc Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:00:25 -0500 Subject: [PATCH 25/34] Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index f074b629b09c..afef79b47f1c 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -76,7 +76,7 @@ def mergeArrays( rootIsWildcard: bool, mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], ): ... -def combineCommonParents(parents: list[PredictionContext]): ... +def combineCommonParents(parents: SupportsLenAndGetItem[PredictionContext]): ... def getCachedPredictionContext( context: PredictionContext, contextCache: PredictionContextCache, visited: dict[PredictionContext, PredictionContext] ): ... From fd85b9af4d5488203682889a3c6341097402b814 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:00:39 -0500 Subject: [PATCH 26/34] Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index afef79b47f1c..ddc9e401bfa3 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -67,7 +67,7 @@ def mergeSingletons( a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool, - mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], + mergeCache: dict[tuple[Incomplete, Incomplete], SingletonPredictionContext] | None, ): ... def mergeRoot(a: SingletonPredictionContext, b: SingletonPredictionContext, rootIsWildcard: bool): ... def mergeArrays( From e7cf59aee87ee52acf984d77069e4eb1c3df7336 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:00:50 -0500 Subject: [PATCH 27/34] Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index ddc9e401bfa3..65281b7c74c0 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -74,7 +74,7 @@ def mergeArrays( a: ArrayPredictionContext, b: ArrayPredictionContext, rootIsWildcard: bool, - mergeCache: dict[tuple[Incomplete], SingletonPredictionContext], + mergeCache: dict[tuple[Incomplete, Incomplete], SingletonPredictionContext] | None, ): ... def combineCommonParents(parents: SupportsLenAndGetItem[PredictionContext]): ... def getCachedPredictionContext( From 8d82f89c0b79ed45933c149dd76660a600daed50 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:01:03 -0500 Subject: [PATCH 28/34] Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index 65281b7c74c0..2d4e232ed0ef 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -1,4 +1,5 @@ -from _typeshed import Incomplete +from collections.abc import MutableMapping +from _typeshed import Incomplete, SupportsLenAndGetItem from antlr4.atn.ATN import ATN as ATN from antlr4.error.Errors import IllegalStateException as IllegalStateException From d12e60ae78e7c81238038fada779e9cea50d5415 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 14:02:45 +0000 Subject: [PATCH 29/34] [pre-commit.ci] auto fixes from pre-commit.com hooks --- stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi | 1 - 1 file changed, 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi index 2d4e232ed0ef..93fe47f89eb5 100644 --- a/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi @@ -1,4 +1,3 @@ -from collections.abc import MutableMapping from _typeshed import Incomplete, SupportsLenAndGetItem from antlr4.atn.ATN import ATN as ATN From 74e3243bb9548103b49715f5d4586da738980dba Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:08:04 -0500 Subject: [PATCH 30/34] Update stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index 4812203e59cc..40c9f5e8e11f 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -37,4 +37,4 @@ class PredictionMode(Enum): @classmethod def hasStateAssociatedWithOneAlt(cls, configs: ATNConfigSet): ... @classmethod - def getSingleViableAlt(cls, altsets: list[set[int]]): ... + def getSingleViableAlt(cls, altsets: Sequence[set[int]]): ... From c45194a69df3b46a8d8c004748102186f3006b57 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:08:19 -0500 Subject: [PATCH 31/34] Update stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi | 1 + 1 file changed, 1 insertion(+) diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi index 40c9f5e8e11f..4597bfbf0482 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi @@ -1,3 +1,4 @@ +from collections.abc import Sequence from enum import Enum from antlr4.atn.ATN import ATN as ATN From f3bd9391a82d52f018462e50cba12855cdb4e2c4 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:09:33 -0500 Subject: [PATCH 32/34] Update stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi index 3271670ff5ac..6d9aac86d3d2 100644 --- a/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi @@ -6,8 +6,8 @@ from antlr4.Utils import str_list as str_list class DFASerializer: dfa: Incomplete - literalNames: Incomplete - symbolicNames: Incomplete + literalNames: list[str] | None + symbolicNames: list[str] | None def __init__(self, dfa: DFA, literalNames: list[str] | None = None, symbolicNames: list[str] | None = None) -> None: ... def getEdgeLabel(self, i: int): ... def getStateString(self, s: DFAState): ... From 840e81c3ce1eeb1714383794e2faefa78a741d48 Mon Sep 17 00:00:00 2001 From: Kevin Nowaczyk Date: Mon, 15 Jan 2024 09:27:14 -0500 Subject: [PATCH 33/34] Update stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi Co-authored-by: Sebastian Rittau --- stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi | 1 + 1 file changed, 1 insertion(+) diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index 824700785e60..7bfdb761a902 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -1,3 +1,4 @@ +from collections.abc import Iterable from _typeshed import Incomplete from antlr4 import DFA as DFA From 05eabd266fee2ddc735ff23bcf5083cca5e89267 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 14:28:29 +0000 Subject: [PATCH 34/34] [pre-commit.ci] auto fixes from pre-commit.com hooks --- stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi | 1 - 1 file changed, 1 deletion(-) diff --git a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi index 7bfdb761a902..824700785e60 100644 --- a/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi +++ b/stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi @@ -1,4 +1,3 @@ -from collections.abc import Iterable from _typeshed import Incomplete from antlr4 import DFA as DFA