Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
463513e
Add files via upload
Beakerboy Dec 20, 2023
9c2af91
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 20, 2023
e857e1b
Create METADATA.toml
Beakerboy Dec 20, 2023
6e188af
moved files
Dec 20, 2023
2904e29
Merge pull request #2 from Beakerboy/move-files
Beakerboy Dec 20, 2023
328b1c9
changed directory name (#3)
Beakerboy Dec 20, 2023
76873d4
Fixed Basic Errors
Beakerboy Dec 21, 2023
b4b9f06
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 21, 2023
cda790f
Fixed strict annotations
Beakerboy Dec 23, 2023
bc92839
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 23, 2023
ae8c1ee
Added ’ | None = None’
Beakerboy Dec 24, 2023
a9d0273
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 24, 2023
b19f2f2
No re-export
Beakerboy Dec 24, 2023
50f8ddb
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 24, 2023
9f184a1
Update stubs/antlr4-python3-runtime/METADATA.toml
Beakerboy Dec 25, 2023
2f19d4f
Add antlr4 to the excludelist for stricter pyright settings
AlexWaygood Dec 28, 2023
4a29024
Codemod away the undesirable `Incomplete` annotations
AlexWaygood Dec 28, 2023
6b4e826
Run stubdefaulter
AlexWaygood Dec 28, 2023
8b9085f
Run linters
AlexWaygood Dec 28, 2023
d92896b
More codemodding
AlexWaygood Dec 28, 2023
1add9af
Update pyrightconfig.stricter.json
AlexWaygood Dec 28, 2023
bb6e3c8
Update stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi
Beakerboy Dec 28, 2023
2dd61a6
Removed “removed” classes
Beakerboy Dec 28, 2023
2407193
Merge branch 'main' into antlr4
Beakerboy Dec 28, 2023
eb23c59
Update stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi
Beakerboy Jan 15, 2024
aa74628
Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi
Beakerboy Jan 15, 2024
6fdd3bf
Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi
Beakerboy Jan 15, 2024
fd85b9a
Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi
Beakerboy Jan 15, 2024
e7cf59a
Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi
Beakerboy Jan 15, 2024
8d82f89
Update stubs/antlr4-python3-runtime/antlr4/PredictionContext.pyi
Beakerboy Jan 15, 2024
d12e60a
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 15, 2024
74e3243
Update stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi
Beakerboy Jan 15, 2024
c45194a
Update stubs/antlr4-python3-runtime/antlr4/atn/PredictionMode.pyi
Beakerboy Jan 15, 2024
f3bd939
Update stubs/antlr4-python3-runtime/antlr4/dfa/DFASerializer.pyi
Beakerboy Jan 15, 2024
7ae2fe4
Merge branch 'main' into antlr4
Beakerboy Jan 15, 2024
840e81c
Update stubs/antlr4-python3-runtime/antlr4/atn/ParserATNSimulator.pyi
Beakerboy Jan 15, 2024
05eabd2
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jan 15, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyrightconfig.stricter.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"stdlib/xml/dom/minidom.pyi",
"stdlib/xml/dom/pulldom.pyi",
"stdlib/xml/sax",
"stubs/antlr4-python3-runtime",
"stubs/aws-xray-sdk",
"stubs/beautifulsoup4",
"stubs/bleach",
Expand Down
6 changes: 6 additions & 0 deletions stubs/antlr4-python3-runtime/METADATA.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
version = "4.13.*"
upstream_repository = "https://github.com/antlr/antlr4"

[tool.stubtest]
ignore_missing_stub = true
platforms = ["linux", "win32"]
39 changes: 39 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/BufferedTokenStream.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from _typeshed import Incomplete

from antlr4.error.Errors import IllegalStateException as IllegalStateException
from antlr4.Token import Token as Token

Lexer: Incomplete

class TokenStream: ...

class BufferedTokenStream(TokenStream):
tokenSource: Incomplete
tokens: Incomplete
index: int
fetchedEOF: bool
def __init__(self, tokenSource: Lexer) -> None: ...
def mark(self) -> int: ...
def release(self, marker: int) -> None: ...
def reset(self) -> None: ...
def seek(self, index: int) -> None: ...
def get(self, index: int) -> Token: ...
def consume(self) -> None: ...
def sync(self, i: int) -> bool: ...
def fetch(self, n: int) -> int: ...
def getTokens(self, start: int, stop: int, types: set[int] | None = None) -> list[Token]: ...
def LA(self, i: int) -> int: ...
def LB(self, k: int) -> Token | None: ...
def LT(self, k: int) -> Token | None: ...
def adjustSeekIndex(self, i: int): ...
def lazyInit(self) -> None: ...
def setup(self) -> None: ...
def setTokenSource(self, tokenSource: Lexer): ...
def nextTokenOnChannel(self, i: int, channel: int): ...
def previousTokenOnChannel(self, i: int, channel: int): ...
def getHiddenTokensToRight(self, tokenIndex: int, channel: int = -1): ...
def getHiddenTokensToLeft(self, tokenIndex: int, channel: int = -1): ...
def filterForChannel(self, left: int, right: int, channel: int): ...
def getSourceName(self): ...
def getText(self, start: int | None = None, stop: int | None = None): ...
def fill(self) -> None: ...
14 changes: 14 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/CommonTokenFactory.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from _typeshed import Incomplete

from antlr4.Token import CommonToken as CommonToken

class TokenFactory: ...

class CommonTokenFactory(TokenFactory):
DEFAULT: Incomplete
copyText: Incomplete
def __init__(self, copyText: bool = False) -> None: ...
def create(
self, source: tuple[Incomplete, ...], type: int, text: str, channel: int, start: int, stop: int, line: int, column: int
): ...
def createThin(self, type: int, text: str): ...
13 changes: 13 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/CommonTokenStream.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from _typeshed import Incomplete

from antlr4.BufferedTokenStream import BufferedTokenStream as BufferedTokenStream
from antlr4.Lexer import Lexer as Lexer
from antlr4.Token import Token as Token

class CommonTokenStream(BufferedTokenStream):
channel: Incomplete
def __init__(self, lexer: Lexer, channel: int = 0) -> None: ...
def adjustSeekIndex(self, i: int) -> int: ...
def LB(self, k: int) -> Token | None: ...
def LT(self, k: int) -> Token | None: ...
def getNumberOfOnChannelTokens(self) -> int: ...
8 changes: 8 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/FileStream.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from _typeshed import Incomplete

from antlr4.InputStream import InputStream as InputStream

class FileStream(InputStream):
fileName: Incomplete
def __init__(self, fileName: str, encoding: str = "ascii", errors: str = "strict") -> None: ...
def readDataFrom(self, fileName: str, encoding: str, errors: str = "strict"): ...
21 changes: 21 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/InputStream.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from _typeshed import Incomplete

from antlr4.Token import Token as Token

class InputStream:
name: str
strdata: Incomplete
data: Incomplete
def __init__(self, data: str) -> None: ...
@property
def index(self): ...
@property
def size(self): ...
def reset(self) -> None: ...
def consume(self) -> None: ...
def LA(self, offset: int): ...
def LT(self, offset: int): ...
def mark(self): ...
def release(self, marker: int): ...
def seek(self, _index: int): ...
def getText(self, start: int, stop: int): ...
19 changes: 19 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/IntervalSet.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from antlr4.Token import Token as Token

class IntervalSet:
intervals: list[range] | None
readonly: bool
def __init__(self) -> None: ...
def __iter__(self): ...
def __getitem__(self, item): ...
def addOne(self, v: int): ...
def addRange(self, v: range): ...
def addSet(self, other: IntervalSet): ...
def reduce(self, k: int): ...
def complement(self, start: int, stop: int): ...
def __contains__(self, item) -> bool: ...
def __len__(self) -> int: ...
def removeRange(self, v) -> None: ...
def removeOne(self, v) -> None: ...
def toString(self, literalNames: list[str], symbolicNames: list[str]): ...
def elementName(self, literalNames: list[str], symbolicNames: list[str], a: int): ...
26 changes: 26 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/LL1Analyzer.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from _typeshed import Incomplete

from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNConfig import ATNConfig as ATNConfig
from antlr4.atn.ATNState import ATNState as ATNState, RuleStopState as RuleStopState
from antlr4.atn.Transition import (
AbstractPredicateTransition as AbstractPredicateTransition,
NotSetTransition as NotSetTransition,
RuleTransition as RuleTransition,
WildcardTransition as WildcardTransition,
)
from antlr4.IntervalSet import IntervalSet as IntervalSet
from antlr4.PredictionContext import (
PredictionContext as PredictionContext,
PredictionContextFromRuleContext as PredictionContextFromRuleContext,
SingletonPredictionContext as SingletonPredictionContext,
)
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token

class LL1Analyzer:
HIT_PRED: Incomplete
atn: Incomplete
def __init__(self, atn: ATN) -> None: ...
def getDecisionLookahead(self, s: ATNState): ...
def LOOK(self, s: ATNState, stopState: ATNState | None = None, ctx: RuleContext | None = None): ...
64 changes: 64 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/Lexer.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
from _typeshed import Incomplete
from typing import TextIO

from antlr4.atn.LexerATNSimulator import LexerATNSimulator as LexerATNSimulator
from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory
from antlr4.error.Errors import (
IllegalStateException as IllegalStateException,
LexerNoViableAltException as LexerNoViableAltException,
RecognitionException as RecognitionException,
)
from antlr4.InputStream import InputStream as InputStream
from antlr4.Recognizer import Recognizer as Recognizer
from antlr4.Token import Token as Token

class TokenSource: ...

class Lexer(Recognizer, TokenSource):
DEFAULT_MODE: int
MORE: int
SKIP: int
DEFAULT_TOKEN_CHANNEL: Incomplete
HIDDEN: Incomplete
MIN_CHAR_VALUE: int
MAX_CHAR_VALUE: int
def __init__(self, input: InputStream, output: TextIO = ...) -> None: ...
def reset(self) -> None: ...
def nextToken(self): ...
def skip(self) -> None: ...
def more(self) -> None: ...
def mode(self, m: int): ...
def pushMode(self, m: int): ...
def popMode(self): ...
@property
def inputStream(self): ...
@inputStream.setter
def inputStream(self, input: InputStream): ...
@property
def sourceName(self): ...
def emitToken(self, token: Token): ...
def emit(self): ...
def emitEOF(self): ...
@property
def type(self): ...
@type.setter
def type(self, type: int): ...
@property
def line(self): ...
@line.setter
def line(self, line: int): ...
@property
def column(self): ...
@column.setter
def column(self, column: int): ...
def getCharIndex(self): ...
@property
def text(self): ...
@text.setter
def text(self, txt: str): ...
def getAllTokens(self): ...
def notifyListeners(self, e: LexerNoViableAltException): ...
def getErrorDisplay(self, s: str): ...
def getErrorDisplayForChar(self, c: str): ...
def getCharErrorDisplay(self, c: str): ...
def recover(self, re: RecognitionException): ...
19 changes: 19 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/ListTokenSource.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from _typeshed import Incomplete

from antlr4.CommonTokenFactory import CommonTokenFactory as CommonTokenFactory
from antlr4.Lexer import TokenSource as TokenSource
from antlr4.Token import Token as Token

class ListTokenSource(TokenSource):
tokens: Incomplete
sourceName: Incomplete
pos: int
eofToken: Incomplete
def __init__(self, tokens: list[Token], sourceName: str | None = None) -> None: ...
@property
def column(self): ...
def nextToken(self): ...
@property
def line(self): ...
def getInputStream(self): ...
def getSourceName(self): ...
74 changes: 74 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/Parser.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
from _typeshed import Incomplete
from typing import TextIO

from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions as ATNDeserializationOptions
from antlr4.atn.ATNDeserializer import ATNDeserializer as ATNDeserializer
from antlr4.BufferedTokenStream import TokenStream as TokenStream
from antlr4.CommonTokenFactory import TokenFactory as TokenFactory
from antlr4.error.Errors import (
RecognitionException as RecognitionException,
UnsupportedOperationException as UnsupportedOperationException,
)
from antlr4.error.ErrorStrategy import DefaultErrorStrategy as DefaultErrorStrategy
from antlr4.InputStream import InputStream as InputStream
from antlr4.Lexer import Lexer as Lexer
from antlr4.ParserRuleContext import ParserRuleContext as ParserRuleContext
from antlr4.Recognizer import Recognizer as Recognizer
from antlr4.RuleContext import RuleContext as RuleContext
from antlr4.Token import Token as Token
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher as ParseTreePatternMatcher
from antlr4.tree.Tree import ErrorNode as ErrorNode, ParseTreeListener as ParseTreeListener, TerminalNode as TerminalNode

class TraceListener(ParseTreeListener):
def __init__(self, parser) -> None: ...
def enterEveryRule(self, ctx) -> None: ...
def visitTerminal(self, node) -> None: ...
def visitErrorNode(self, node) -> None: ...
def exitEveryRule(self, ctx) -> None: ...

class Parser(Recognizer):
bypassAltsAtnCache: Incomplete
buildParseTrees: bool
def __init__(self, input: TokenStream, output: TextIO = ...) -> None: ...
def reset(self) -> None: ...
def match(self, ttype: int): ...
def matchWildcard(self): ...
def getParseListeners(self): ...
def addParseListener(self, listener: ParseTreeListener): ...
def removeParseListener(self, listener: ParseTreeListener): ...
def removeParseListeners(self) -> None: ...
def triggerEnterRuleEvent(self) -> None: ...
def triggerExitRuleEvent(self) -> None: ...
def getNumberOfSyntaxErrors(self): ...
def getTokenFactory(self): ...
def setTokenFactory(self, factory: TokenFactory): ...
def getATNWithBypassAlts(self): ...
def compileParseTreePattern(self, pattern: str, patternRuleIndex: int, lexer: Lexer | None = None): ...
def getInputStream(self): ...
def setInputStream(self, input: InputStream): ...
def getTokenStream(self): ...
def setTokenStream(self, input: TokenStream): ...
def getCurrentToken(self): ...
def notifyErrorListeners(self, msg: str, offendingToken: Token | None = None, e: RecognitionException | None = None): ...
def consume(self): ...
def addContextToParseTree(self) -> None: ...
state: Incomplete
def enterRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
def exitRule(self) -> None: ...
def enterOuterAlt(self, localctx: ParserRuleContext, altNum: int): ...
def getPrecedence(self): ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
def pushNewRecursionContext(self, localctx: ParserRuleContext, state: int, ruleIndex: int): ...
def unrollRecursionContexts(self, parentCtx: ParserRuleContext): ...
def getInvokingContext(self, ruleIndex: int): ...
def precpred(self, localctx: RuleContext, precedence: int): ...
def inContext(self, context: str): ...
def isExpectedToken(self, symbol: int): ...
def getExpectedTokens(self): ...
def getExpectedTokensWithinCurrentRule(self): ...
def getRuleIndex(self, ruleName: str): ...
def getRuleInvocationStack(self, p: RuleContext | None = None): ...
def getDFAStrings(self): ...
def dumpDFA(self) -> None: ...
def getSourceName(self): ...
def setTrace(self, trace: bool): ...
36 changes: 36 additions & 0 deletions stubs/antlr4-python3-runtime/antlr4/ParserInterpreter.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from _typeshed import Incomplete

from antlr4.atn.ATN import ATN as ATN
from antlr4.atn.ATNState import ATNState as ATNState, LoopEndState as LoopEndState, StarLoopEntryState as StarLoopEntryState
from antlr4.atn.ParserATNSimulator import ParserATNSimulator as ParserATNSimulator
from antlr4.atn.Transition import Transition as Transition
from antlr4.BufferedTokenStream import TokenStream as TokenStream
from antlr4.dfa.DFA import DFA as DFA
from antlr4.error.Errors import (
FailedPredicateException as FailedPredicateException,
RecognitionException as RecognitionException,
UnsupportedOperationException as UnsupportedOperationException,
)
from antlr4.Lexer import Lexer as Lexer
from antlr4.Parser import Parser as Parser
from antlr4.ParserRuleContext import InterpreterRuleContext as InterpreterRuleContext, ParserRuleContext as ParserRuleContext
from antlr4.PredictionContext import PredictionContextCache as PredictionContextCache
from antlr4.Token import Token as Token

class ParserInterpreter(Parser):
grammarFileName: Incomplete
atn: Incomplete
tokenNames: Incomplete
ruleNames: Incomplete
decisionToDFA: Incomplete
sharedContextCache: Incomplete
pushRecursionContextStates: Incomplete
def __init__(
self, grammarFileName: str, tokenNames: list[str], ruleNames: list[str], atn: ATN, input: TokenStream
) -> None: ...
state: Incomplete
def parse(self, startRuleIndex: int): ...
def enterRecursionRule(self, localctx: ParserRuleContext, state: int, ruleIndex: int, precedence: int): ...
def getATNState(self): ...
def visitState(self, p: ATNState): ...
def visitRuleStopState(self, p: ATNState): ...
Loading