From b9bb0a7479e92fcfd83c5c7182e1972021ca9ecb Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sat, 7 Nov 2020 19:04:28 -0800 Subject: [PATCH 01/32] removed unused boilerplate --- python/Makefile | 22 ---------------------- python/setup.py | 2 +- python/tox.ini | 1 - 3 files changed, 1 insertion(+), 24 deletions(-) delete mode 100644 python/Makefile diff --git a/python/Makefile b/python/Makefile deleted file mode 100644 index ab925f6..0000000 --- a/python/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -install: - pip install .[dev] - -pre-commit-install: - python -m pre_commit install - -lint: - pre-commit run --all-files - -tests: - python -m pytest tests - -black: - pre-commit run black - -flake8: - pre-commit run flake8 - -isort: - pre-commit run isort - -.PHONY: install pre-commit-install lint tests black flake8 isort diff --git a/python/setup.py b/python/setup.py index ad48d86..eb8f7a0 100644 --- a/python/setup.py +++ b/python/setup.py @@ -24,6 +24,6 @@ "Programming Language :: Python :: 3.8", ], install_requires=['antlr4-python3-runtime==4.8'], - extras_require={"tests": ["pytest>=5.3.5"], "dev": ["pytest>=5.3.5", "pre-commit>=2.1.1"]}, + extras_require={"tests": ["pytest>=5.3.5"]}, include_package_data=True, ) diff --git a/python/tox.ini b/python/tox.ini index 6b2bcd5..20fcb18 100644 --- a/python/tox.ini +++ b/python/tox.ini @@ -1,5 +1,4 @@ [pytest] -strict = true addopts = -ra testpaths = tests filterwarnings = From 5cf265cce6d63c22379dc935fb32baa482adfc9b Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sat, 7 Nov 2020 19:06:18 -0800 Subject: [PATCH 02/32] local test runner --- Makefile | 40 ++++++++++++++++++++++---- docker/Dockerfile-python-tests | 27 +++++++++++++++++ python/requirements-tests.txt | 5 ++++ python/requirements.txt | 4 +++ python/tests/antlr_tel/grammar_test.py | 3 ++ 5 files changed, 74 insertions(+), 5 deletions(-) create mode 100644 docker/Dockerfile-python-tests create mode 100644 python/requirements-tests.txt create mode 100644 python/requirements.txt diff --git a/Makefile b/Makefile index 6789a9c..cbc21da 100644 --- a/Makefile +++ b/Makefile @@ -9,28 +9,58 @@ VENDOR_NAME:=panoramic IMAGE_NAME:=tel-grammar JAVA_IMAGE_NAME_FULL?=$(VENDOR_NAME)/java-$(IMAGE_NAME) PYTHON_IMAGE_NAME_FULL?=$(VENDOR_NAME)/python-$(IMAGE_NAME) +PYTHON_IMAGE_TESTS_NAME_FULL?=$(VENDOR_NAME)/python-$(IMAGE_NAME)-tests WORKDIR=/usr/src/app image-java: docker build \ - --pull \ + --pull \ -t $(JAVA_IMAGE_NAME_FULL):latest \ -f docker/Dockerfile-java . image-python: docker build \ - --pull \ + --pull \ -t $(PYTHON_IMAGE_NAME_FULL):latest \ -f docker/Dockerfile-python . PHONY: image-java image-python -test: image-python - docker run --rm ${PYTHON_IMAGE_NAME_FULL}:latest python -m pytest tests +_TEST_IMAGE_MARKER:=/tmp/.$(VENDOR_NAME)-$(IMAGE_NAME)-testrunner-done +$(_TEST_IMAGE_MARKER): python/requirements.txt python/requirements-tests.txt + docker build \ + -t $(PYTHON_IMAGE_TESTS_NAME_FULL) \ + -f docker/Dockerfile-python-tests . + touch $(_TEST_IMAGE_MARKER) + +test-dev: $(_TEST_IMAGE_MARKER) + docker run -it --rm \ + -v $(PWD)/python:$(WORKDIR) \ + --workdir ${WORKDIR} \ + $(PYTHON_IMAGE_TESTS_NAME_FULL) \ + pytest -s tests/ + +# see shipping/Jenkinsfile and keep in sync +test: + docker run -it --rm \ + -v $(PWD):$(WORKDIR) \ + --workdir ${WORKDIR} \ + python:3.7 \ + bash -c "pip install --upgrade tox && tox -e py37 -c python/tox.ini" + docker run -it --rm \ + -v $(PWD):$(WORKDIR) \ + --workdir ${WORKDIR} \ + python:3.8 \ + bash -c "pip install --upgrade tox && tox -e py38 -c python/tox.ini" + docker run -it --rm \ + -v $(PWD):$(WORKDIR) \ + --workdir ${WORKDIR} \ + python:3.9 \ + bash -c "pip install --upgrade tox && tox -e py39 -c python/tox.ini" -.PHONY: test +.PHONY: test test-dev build-code-python: diff --git a/docker/Dockerfile-python-tests b/docker/Dockerfile-python-tests new file mode 100644 index 0000000..c3ba066 --- /dev/null +++ b/docker/Dockerfile-python-tests @@ -0,0 +1,27 @@ +ARG PYTHON_VERSION=3.8 +FROM python:${PYTHON_VERSION} as baseimage + +ARG WORKDIR=/usr/src/app +WORKDIR $WORKDIR + +ARG PYTHONUSERBASE=/usr/src/lib + +# PYTHONUNBUFFERED: Force stdin, stdout and stderr to be totally unbuffered. (equivalent to `python -u`) +# PYTHONHASHSEED: Enable hash randomization (equivalent to `python -R`) +# PYTHONDONTWRITEBYTECODE: Do not write byte files to disk, since we maintain it as readonly. (equivalent to `python -B`) +ENV PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUSERBASE=$PYTHONUSERBASE \ + PATH="${PYTHONUSERBASE}/bin:${PATH}" + +# Setup PYTHONUSERBASE directory +# we allow running / managing these folders by non-root users. Thus need chmod +RUN set -ex; \ + mkdir -p $PYTHONUSERBASE && chmod 777 ${PYTHONUSERBASE}; \ + mkdir -p $WORKDIR && chmod 777 ${WORKDIR} + +COPY python/requirements.txt python/requirements-tests.txt ./ +RUN pip install \ + -r requirements.txt \ + -r requirements-tests.txt diff --git a/python/requirements-tests.txt b/python/requirements-tests.txt new file mode 100644 index 0000000..b6df1dd --- /dev/null +++ b/python/requirements-tests.txt @@ -0,0 +1,5 @@ +# file used as alternative to setup.py +# to avoid installing tel_grammar package in development +# If you change setup.py requirements, change this too, please. +-r requirements.txt +pytest>=5.3.5 diff --git a/python/requirements.txt b/python/requirements.txt new file mode 100644 index 0000000..503c349 --- /dev/null +++ b/python/requirements.txt @@ -0,0 +1,4 @@ +# file used as alternative to setup.py +# to avoid installing tel_grammar package in development +# If you change setup.py requirements, change this too, please. +antlr4-python3-runtime==4.8 diff --git a/python/tests/antlr_tel/grammar_test.py b/python/tests/antlr_tel/grammar_test.py index 8607a37..a9c255d 100644 --- a/python/tests/antlr_tel/grammar_test.py +++ b/python/tests/antlr_tel/grammar_test.py @@ -1,7 +1,10 @@ +import os +import sys import pytest from antlr4 import CommonTokenStream, InputStream +sys.path.append('./src') from tel_grammar.antlr.TelLexer import TelLexer from tel_grammar.antlr.TelParser import TelParser from tel_grammar.antlr.TelVisitor import TelVisitor From 6a1c306f658386e62d1970187301c87b3147559e Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sat, 7 Nov 2020 19:13:12 -0800 Subject: [PATCH 03/32] Adam's taxon_expr > taxon patch --- grammar/Tel.g4 | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/grammar/Tel.g4 b/grammar/Tel.g4 index 77b47b0..136e50d 100644 --- a/grammar/Tel.g4 +++ b/grammar/Tel.g4 @@ -31,13 +31,11 @@ MULT : '*'; DIV : '/'; OPTIONAL_TAXON_OPERATOR: '?'; // Taxon slug prefix noting, that the taxon slug is optional. - WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines // auxiliarly rules fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; // matches functions -taxon: WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; // matches a taxon slug -taxon_expr: OPTIONAL_TAXON_OPERATOR?taxon ; // taxon slug with optional taxon prefix operator +taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; // matches a taxon slug // final rules parse: expr EOF; // main rule for parsing @@ -52,11 +50,11 @@ expr ; atom -: L_BRACKET expr R_BRACKET #bracketExpr +: L_BRACKET expr R_BRACKET #bracketExpr | (INT | REAL) #numberAtom | fn #fnExpr | (TRUE | FALSE) #booleanAtom -| taxon_expr #taxonSlugAtom +| taxon #taxonSlugAtom | SINGLE_QUOTED_ELEMENT #singleQuotedAtom | STRING_CONSTANT #stringConstantAtom ; From 7570f3909d752ea1715bcce48b134b47a75b87c8 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 8 Nov 2020 15:59:56 -0800 Subject: [PATCH 04/32] harden WORD - not starting with number, push up literals higher in rank in ATOM --- grammar/Tel.g4 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/grammar/Tel.g4 b/grammar/Tel.g4 index 136e50d..ac7dc2a 100644 --- a/grammar/Tel.g4 +++ b/grammar/Tel.g4 @@ -7,7 +7,7 @@ FALSE : 'false' | 'FALSE'; // false NOT : 'not' | 'NOT'; KW_IS : 'is' | 'IS'; KW_NULL : 'null' | 'NULL'; -WORD : [a-zA-Z0-9_.]+; // one word (either part of slug or fn name) +WORD : [a-zA-Z_][a-zA-Z_0-9$]*; // one word (either part of slug or fn name) STRING_CONSTANT : '"' ( '\\"' | ~'"' )* '"' ; // string constant. Not greedy, and supports \ to escape " char. SINGLE_QUOTED_ELEMENT: '\'' ( '\\\'' | ~'\'' )* '\'' ; // string element surrounded by single quotes. Not greedy, and supports \ to escape ' char. @@ -50,11 +50,11 @@ expr ; atom -: L_BRACKET expr R_BRACKET #bracketExpr +: L_BRACKET expr R_BRACKET #bracketExpr | (INT | REAL) #numberAtom -| fn #fnExpr | (TRUE | FALSE) #booleanAtom -| taxon #taxonSlugAtom | SINGLE_QUOTED_ELEMENT #singleQuotedAtom | STRING_CONSTANT #stringConstantAtom +| fn #fnExpr +| taxon #taxonSlugAtom ; From 1edb2d31e41892d0a64bcf87f4bb325759656d39 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 8 Nov 2020 16:01:35 -0800 Subject: [PATCH 05/32] reorder dependencies with higher-order structures higher in file --- grammar/Tel.g4 | 55 +++++++++++++++++++++++++------------------------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/grammar/Tel.g4 b/grammar/Tel.g4 index ac7dc2a..07d0526 100644 --- a/grammar/Tel.g4 +++ b/grammar/Tel.g4 @@ -1,5 +1,32 @@ grammar Tel; +parse: expr EOF; // main rule for parsing + +expr +: NOT expr #notExpr +| expr op=(MULT | DIV) expr #multiplicationExpr +| expr op=(PLUS | MINUS) expr #additiveExpr +| expr op=(OR | AND | EQ | NEQ | GT | LT | GTEQ | LTEQ) expr #logicalExpr +| expr KW_IS NOT? KW_NULL #nullTestExpr +| atom #atomExpr +; + +atom +: L_BRACKET expr R_BRACKET #bracketExpr +| (INT | REAL) #numberAtom +| (TRUE | FALSE) #booleanAtom +| SINGLE_QUOTED_ELEMENT #singleQuotedAtom +| STRING_CONSTANT #stringConstantAtom +| fn #fnExpr +| taxon #taxonSlugAtom +; + +// auxiliarly rules +fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; // matches functions +taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; // matches a taxon slug + +WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines + INT : '-'? [0-9]+ ; // integer REAL : '-'? [0-9]+ '.' [0-9]+ ; // integer TRUE : 'true' | 'TRUE'; // true @@ -30,31 +57,3 @@ MINUS : '-'; MULT : '*'; DIV : '/'; OPTIONAL_TAXON_OPERATOR: '?'; // Taxon slug prefix noting, that the taxon slug is optional. - -WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines - -// auxiliarly rules -fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; // matches functions -taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; // matches a taxon slug - -// final rules -parse: expr EOF; // main rule for parsing - -expr -: NOT expr #notExpr -| expr op=(MULT | DIV) expr #multiplicationExpr -| expr op=(PLUS | MINUS) expr #additiveExpr -| expr op=(OR | AND | EQ | NEQ | GT | LT | GTEQ | LTEQ) expr #logicalExpr -| expr KW_IS NOT? KW_NULL #nullTestExpr -| atom #atomExpr -; - -atom -: L_BRACKET expr R_BRACKET #bracketExpr -| (INT | REAL) #numberAtom -| (TRUE | FALSE) #booleanAtom -| SINGLE_QUOTED_ELEMENT #singleQuotedAtom -| STRING_CONSTANT #stringConstantAtom -| fn #fnExpr -| taxon #taxonSlugAtom -; From e5aab07653166f5a605a571f09d07461546bff29 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 8 Nov 2020 16:11:00 -0800 Subject: [PATCH 06/32] allow mixed case in keywords: True == tRUE --- grammar/Tel.g4 | 52 ++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 40 insertions(+), 12 deletions(-) diff --git a/grammar/Tel.g4 b/grammar/Tel.g4 index 07d0526..b78d2d4 100644 --- a/grammar/Tel.g4 +++ b/grammar/Tel.g4 @@ -21,20 +21,18 @@ atom | taxon #taxonSlugAtom ; -// auxiliarly rules -fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; // matches functions -taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; // matches a taxon slug +fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; +taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; -WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines +INT : '-'? DIGIT+ ; // integer +REAL : '-'? DIGIT+ '.' DIGIT+ ; // integer +TRUE : T R U E; +FALSE : F A L S E; +NOT : N O T; +KW_IS : I S; +KW_NULL : N U L L; +WORD : [a-zA-Z_][a-zA-Z_0-9$.]*; // one word (either part of slug or fn name). must start with non-digit -INT : '-'? [0-9]+ ; // integer -REAL : '-'? [0-9]+ '.' [0-9]+ ; // integer -TRUE : 'true' | 'TRUE'; // true -FALSE : 'false' | 'FALSE'; // false -NOT : 'not' | 'NOT'; -KW_IS : 'is' | 'IS'; -KW_NULL : 'null' | 'NULL'; -WORD : [a-zA-Z_][a-zA-Z_0-9$]*; // one word (either part of slug or fn name) STRING_CONSTANT : '"' ( '\\"' | ~'"' )* '"' ; // string constant. Not greedy, and supports \ to escape " char. SINGLE_QUOTED_ELEMENT: '\'' ( '\\\'' | ~'\'' )* '\'' ; // string element surrounded by single quotes. Not greedy, and supports \ to escape ' char. @@ -57,3 +55,33 @@ MINUS : '-'; MULT : '*'; DIV : '/'; OPTIONAL_TAXON_OPERATOR: '?'; // Taxon slug prefix noting, that the taxon slug is optional. + +WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines + +fragment DIGIT : [0-9]; +fragment A : [aA]; +fragment B : [bB]; +fragment C : [cC]; +fragment D : [dD]; +fragment E : [eE]; +fragment F : [fF]; +fragment G : [gG]; +fragment H : [hH]; +fragment I : [iI]; +fragment J : [jJ]; +fragment K : [kK]; +fragment L : [lL]; +fragment M : [mM]; +fragment N : [nN]; +fragment O : [oO]; +fragment P : [pP]; +fragment Q : [qQ]; +fragment R : [rR]; +fragment S : [sS]; +fragment T : [tT]; +fragment U : [uU]; +fragment V : [vV]; +fragment W : [wW]; +fragment X : [xX]; +fragment Y : [yY]; +fragment Z : [zZ]; From edacb529a2816680af9bc2291e5984222c8d633e Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 8 Nov 2020 18:54:24 -0800 Subject: [PATCH 07/32] split Lexer and Parser into separate files --- Makefile | 55 +++++++++++++++++++++++---------- README.md | 20 ++++++------ docker/Dockerfile-antlr | 12 +++++++ grammar/.gitignore | 3 ++ grammar/{Tel.g4 => TelLexer.g4} | 31 +++---------------- grammar/TelParser.g4 | 29 +++++++++++++++++ 6 files changed, 99 insertions(+), 51 deletions(-) create mode 100644 docker/Dockerfile-antlr create mode 100644 grammar/.gitignore rename grammar/{Tel.g4 => TelLexer.g4} (57%) create mode 100644 grammar/TelParser.g4 diff --git a/Makefile b/Makefile index cbc21da..87e3ffb 100644 --- a/Makefile +++ b/Makefile @@ -62,19 +62,42 @@ test: .PHONY: test test-dev - -build-code-python: - docker run \ - -v $(PWD):$(WORKDIR) \ - --workdir ${WORKDIR} \ - --rm ${JAVA_IMAGE_NAME_FULL}:latest \ - java -Xmx500M -cp '/usr/local/lib/antlr-4.8-complete.jar:$$CLASSPATH' org.antlr.v4.Tool -visitor -Dlanguage=Python3 -o python/src/tel_grammar/antlr -Xexact-output-dir grammar/Tel.g4 - -build-code-js: - docker run \ - -v $(PWD):$(WORKDIR) \ - --workdir ${WORKDIR} \ - --rm ${JAVA_IMAGE_NAME_FULL}:latest \ - java -Xmx500M -cp '/usr/local/lib/antlr-4.8-complete.jar:$$CLASSPATH' org.antlr.v4.Tool -visitor -Dlanguage=JavaScript -o js-temp/ -Xexact-output-dir grammar/Tel.g4 - -.PHONY: build-code-python +image-antlr: + DOCKER_BUILDKIT=1 docker build \ + -t antlr \ + -f docker/Dockerfile-antlr . + +# https://github.com/antlr/antlr4/issues/2335 +# solves "cannot find token file" error +grammar/TelLexer.tokens: grammar/TelLexer.g4 + docker run --rm \ + -v $(PWD):/mnt \ + antlr \ + -o ./ \ + grammar/TelLexer.g4 + +build-code-python: grammar/TelLexer.tokens grammar/TelParser.g4 # image-antlr + docker run --rm \ + -v $(PWD):/mnt \ + antlr \ + -visitor \ + -Dlanguage=Python3 \ + -Xexact-output-dir \ + -o python/src/tel_grammar/antlr \ + grammar/TelLexer.g4 \ + grammar/TelParser.g4 + +build-code-js: grammar/TelLexer.tokens grammar/TelParser.g4 # image-antlr + docker run --rm \ + -v $(PWD):/mnt \ + antlr \ + -visitor \ + -Dlanguage=JavaScript \ + -Xexact-output-dir \ + -o js-temp/ \ + grammar/TelLexer.g4 \ + grammar/TelParser.g4 + +build-code: build-code-python build-code-js + +.PHONY: image.antlr build-code-python build-code-js build-code diff --git a/README.md b/README.md index ddb100f..acd5214 100644 --- a/README.md +++ b/README.md @@ -25,25 +25,27 @@ It can generate following components in both python and JavaScript to handle par Current documentation on the language is available [here](https://diesel-service.operamprod.com/documentation#taxon-expression-language-tel). -## How to use it +## Local Development -### `make image-java` +### `make image-antlr` -It builds local docker image to run ANTLR commands. You need to run this command before you may run ANTLR-related make commands. - -### `make image-python` - -It builds local docker image to run python tests. This image is used to run tests on the current grammar. +It builds local docker image to run ANTLR commands. +You need to run this command before you may run ANTLR-related make commands. ### `make build-code-python` It generates all components in python language - ### `make build-code-js` It generates all components in JavaScript language +### `make test-dev` + +Runs tests on the current version of grammar in quick mode. +Reuses pre-built python image (3.8) to mount local python code and tests and run them. + ### `make test` -Runs tests on the current version of grammar. +Runs same tests as above, but against multiple supported python versions, using TOX config. +(Takes much much longer to run because each python image is built from scratch each time.) diff --git a/docker/Dockerfile-antlr b/docker/Dockerfile-antlr new file mode 100644 index 0000000..b841207 --- /dev/null +++ b/docker/Dockerfile-antlr @@ -0,0 +1,12 @@ +FROM java:8 + +ENV ANTLR_VERSION=4.8 +ENV CLASSPATH .:/antlr-${ANTLR_VERSION}-complete.jar:$CLASSPATH + +ADD http://www.antlr.org/download/antlr-${ANTLR_VERSION}-complete.jar /usr/bin/ +RUN chmod +r /usr/bin/antlr-${ANTLR_VERSION}-complete.jar \ + && ln /usr/bin/antlr-${ANTLR_VERSION}-complete.jar /usr/bin/antlr.jar + +WORKDIR /mnt + +ENTRYPOINT ["java", "-jar", "/usr/bin/antlr.jar"] diff --git a/grammar/.gitignore b/grammar/.gitignore new file mode 100644 index 0000000..88224d5 --- /dev/null +++ b/grammar/.gitignore @@ -0,0 +1,3 @@ +*.* +!*.g4 +!.gitignore diff --git a/grammar/Tel.g4 b/grammar/TelLexer.g4 similarity index 57% rename from grammar/Tel.g4 rename to grammar/TelLexer.g4 index b78d2d4..e9a5cbf 100644 --- a/grammar/Tel.g4 +++ b/grammar/TelLexer.g4 @@ -1,28 +1,4 @@ -grammar Tel; - -parse: expr EOF; // main rule for parsing - -expr -: NOT expr #notExpr -| expr op=(MULT | DIV) expr #multiplicationExpr -| expr op=(PLUS | MINUS) expr #additiveExpr -| expr op=(OR | AND | EQ | NEQ | GT | LT | GTEQ | LTEQ) expr #logicalExpr -| expr KW_IS NOT? KW_NULL #nullTestExpr -| atom #atomExpr -; - -atom -: L_BRACKET expr R_BRACKET #bracketExpr -| (INT | REAL) #numberAtom -| (TRUE | FALSE) #booleanAtom -| SINGLE_QUOTED_ELEMENT #singleQuotedAtom -| STRING_CONSTANT #stringConstantAtom -| fn #fnExpr -| taxon #taxonSlugAtom -; - -fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; -taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; +lexer grammar TelLexer; INT : '-'? DIGIT+ ; // integer REAL : '-'? DIGIT+ '.' DIGIT+ ; // integer @@ -56,7 +32,10 @@ MULT : '*'; DIV : '/'; OPTIONAL_TAXON_OPERATOR: '?'; // Taxon slug prefix noting, that the taxon slug is optional. -WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines +// support SQL, JavaScript and Python style syntax for single-line comment +SINGLE_LINE_COMMENT : ('--'|'//'|'#') ~[\r\n]* -> channel(HIDDEN) ; + +WS : [ \t\r\n]+ -> channel(HIDDEN) ; // skip spaces, tabs, newlines fragment DIGIT : [0-9]; fragment A : [aA]; diff --git a/grammar/TelParser.g4 b/grammar/TelParser.g4 new file mode 100644 index 0000000..61948da --- /dev/null +++ b/grammar/TelParser.g4 @@ -0,0 +1,29 @@ +parser grammar TelParser; + +options { + tokenVocab = TelLexer; +} + +parse: expr EOF; // main rule for parsing + +expr +: NOT expr #notExpr +| expr op=(MULT | DIV) expr #multiplicationExpr +| expr op=(PLUS | MINUS) expr #additiveExpr +| expr op=(OR | AND | EQ | NEQ | GT | LT | GTEQ | LTEQ) expr #logicalExpr +| expr KW_IS NOT? KW_NULL #nullTestExpr +| atom #atomExpr +; + +atom +: L_BRACKET expr R_BRACKET #bracketExpr +| (INT | REAL) #numberAtom +| (TRUE | FALSE) #booleanAtom +| SINGLE_QUOTED_ELEMENT #singleQuotedAtom +| STRING_CONSTANT #stringConstantAtom +| fn #fnExpr +| taxon #taxonSlugAtom +; + +fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; +taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; From 47efd5480807a78e1f8d8f2e7da099100fe3dc9e Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 8 Nov 2020 19:21:59 -0800 Subject: [PATCH 08/32] Py and JS code adapted / rerendered with new split Lexer, MODULE AND CLASS NAMES CHANGED. Note, `TelVisitor` is now `TelParserVisitor` and `TelListener` is now `TelParserListener` --- js-temp/TelLexer.js | 324 ++++--- js-temp/TelListener.js | 168 ---- js-temp/TelParser.js | 879 ++++++++---------- js-temp/TelParserListener.js | 159 ++++ .../{TelVisitor.js => TelParserVisitor.js} | 76 +- python/src/tel_grammar/antlr/TelLexer.py | 210 +++-- python/src/tel_grammar/antlr/TelParser.py | 596 ++++++------ .../{TelListener.py => TelParserListener.py} | 81 +- .../{TelVisitor.py => TelParserVisitor.py} | 45 +- python/tests/antlr_tel/grammar_test.py | 2 +- 10 files changed, 1257 insertions(+), 1283 deletions(-) delete mode 100644 js-temp/TelListener.js create mode 100644 js-temp/TelParserListener.js rename js-temp/{TelVisitor.js => TelParserVisitor.js} (57%) rename python/src/tel_grammar/antlr/{TelListener.py => TelParserListener.py} (92%) rename python/src/tel_grammar/antlr/{TelVisitor.py => TelParserVisitor.py} (92%) diff --git a/js-temp/TelLexer.js b/js-temp/TelLexer.js index 0d70409..0697657 100644 --- a/js-temp/TelLexer.js +++ b/js-temp/TelLexer.js @@ -1,11 +1,11 @@ -// Generated from grammar/Tel.g4 by ANTLR 4.8 +// Generated from grammar/TelLexer.g4 by ANTLR 4.8 // jshint ignore: start var antlr4 = require('antlr4/index'); var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u0002\u001f\u00cd\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", + "\u0002 \u0135\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", "\u0004\t\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t", "\u0007\u0004\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004", "\f\t\f\u0004\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0004\u0010", @@ -13,129 +13,189 @@ var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", "\u0004\u0014\t\u0014\u0004\u0015\t\u0015\u0004\u0016\t\u0016\u0004\u0017", "\t\u0017\u0004\u0018\t\u0018\u0004\u0019\t\u0019\u0004\u001a\t\u001a", "\u0004\u001b\t\u001b\u0004\u001c\t\u001c\u0004\u001d\t\u001d\u0004\u001e", - "\t\u001e\u0003\u0002\u0005\u0002?\n\u0002\u0003\u0002\u0006\u0002B\n", - "\u0002\r\u0002\u000e\u0002C\u0003\u0003\u0005\u0003G\n\u0003\u0003\u0003", - "\u0006\u0003J\n\u0003\r\u0003\u000e\u0003K\u0003\u0003\u0003\u0003\u0006", - "\u0003P\n\u0003\r\u0003\u000e\u0003Q\u0003\u0004\u0003\u0004\u0003\u0004", - "\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0005\u0004", - "\\\n\u0004\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005", - "\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0005\u0005", - "h\n\u0005\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006", - "\u0003\u0006\u0005\u0006p\n\u0006\u0003\u0007\u0003\u0007\u0003\u0007", - "\u0003\u0007\u0005\u0007v\n\u0007\u0003\b\u0003\b\u0003\b\u0003\b\u0003", - "\b\u0003\b\u0003\b\u0003\b\u0005\b\u0080\n\b\u0003\t\u0006\t\u0083\n", - "\t\r\t\u000e\t\u0084\u0003\n\u0003\n\u0003\n\u0003\n\u0007\n\u008b\n", - "\n\f\n\u000e\n\u008e\u000b\n\u0003\n\u0003\n\u0003\u000b\u0003\u000b", - "\u0003\u000b\u0003\u000b\u0007\u000b\u0096\n\u000b\f\u000b\u000e\u000b", - "\u0099\u000b\u000b\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003\r\u0003", - "\r\u0003\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010\u0003", - "\u0010\u0003\u0011\u0003\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003", - "\u0012\u0003\u0013\u0003\u0013\u0003\u0013\u0003\u0014\u0003\u0014\u0003", - "\u0014\u0003\u0015\u0003\u0015\u0003\u0016\u0003\u0016\u0003\u0017\u0003", - "\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0018\u0003\u0019\u0003", - "\u0019\u0003\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c\u0003", - "\u001c\u0003\u001d\u0003\u001d\u0003\u001e\u0006\u001e\u00c8\n\u001e", - "\r\u001e\u000e\u001e\u00c9\u0003\u001e\u0003\u001e\u0002\u0002\u001f", - "\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b\u0007\r\b\u000f\t", - "\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e\u001b\u000f\u001d\u0010", - "\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016+\u0017-\u0018/\u0019", - "1\u001a3\u001b5\u001c7\u001d9\u001e;\u001f\u0003\u0002\u0007\u0003\u0002", - "2;\u0007\u0002002;C\\aac|\u0003\u0002$$\u0003\u0002))\u0005\u0002\u000b", - "\f\u000f\u000f\"\"\u0002\u00dc\u0002\u0003\u0003\u0002\u0002\u0002\u0002", - "\u0005\u0003\u0002\u0002\u0002\u0002\u0007\u0003\u0002\u0002\u0002\u0002", - "\t\u0003\u0002\u0002\u0002\u0002\u000b\u0003\u0002\u0002\u0002\u0002", - "\r\u0003\u0002\u0002\u0002\u0002\u000f\u0003\u0002\u0002\u0002\u0002", - "\u0011\u0003\u0002\u0002\u0002\u0002\u0013\u0003\u0002\u0002\u0002\u0002", - "\u0015\u0003\u0002\u0002\u0002\u0002\u0017\u0003\u0002\u0002\u0002\u0002", - "\u0019\u0003\u0002\u0002\u0002\u0002\u001b\u0003\u0002\u0002\u0002\u0002", - "\u001d\u0003\u0002\u0002\u0002\u0002\u001f\u0003\u0002\u0002\u0002\u0002", - "!\u0003\u0002\u0002\u0002\u0002#\u0003\u0002\u0002\u0002\u0002%\u0003", - "\u0002\u0002\u0002\u0002\'\u0003\u0002\u0002\u0002\u0002)\u0003\u0002", - "\u0002\u0002\u0002+\u0003\u0002\u0002\u0002\u0002-\u0003\u0002\u0002", - "\u0002\u0002/\u0003\u0002\u0002\u0002\u00021\u0003\u0002\u0002\u0002", - "\u00023\u0003\u0002\u0002\u0002\u00025\u0003\u0002\u0002\u0002\u0002", - "7\u0003\u0002\u0002\u0002\u00029\u0003\u0002\u0002\u0002\u0002;\u0003", - "\u0002\u0002\u0002\u0003>\u0003\u0002\u0002\u0002\u0005F\u0003\u0002", - "\u0002\u0002\u0007[\u0003\u0002\u0002\u0002\tg\u0003\u0002\u0002\u0002", - "\u000bo\u0003\u0002\u0002\u0002\ru\u0003\u0002\u0002\u0002\u000f\u007f", - "\u0003\u0002\u0002\u0002\u0011\u0082\u0003\u0002\u0002\u0002\u0013\u0086", - "\u0003\u0002\u0002\u0002\u0015\u0091\u0003\u0002\u0002\u0002\u0017\u009c", - "\u0003\u0002\u0002\u0002\u0019\u009e\u0003\u0002\u0002\u0002\u001b\u00a0", - "\u0003\u0002\u0002\u0002\u001d\u00a2\u0003\u0002\u0002\u0002\u001f\u00a4", - "\u0003\u0002\u0002\u0002!\u00a6\u0003\u0002\u0002\u0002#\u00a9\u0003", - "\u0002\u0002\u0002%\u00ac\u0003\u0002\u0002\u0002\'\u00af\u0003\u0002", - "\u0002\u0002)\u00b2\u0003\u0002\u0002\u0002+\u00b4\u0003\u0002\u0002", - "\u0002-\u00b6\u0003\u0002\u0002\u0002/\u00b9\u0003\u0002\u0002\u0002", - "1\u00bc\u0003\u0002\u0002\u00023\u00be\u0003\u0002\u0002\u00025\u00c0", - "\u0003\u0002\u0002\u00027\u00c2\u0003\u0002\u0002\u00029\u00c4\u0003", - "\u0002\u0002\u0002;\u00c7\u0003\u0002\u0002\u0002=?\u0007/\u0002\u0002", - ">=\u0003\u0002\u0002\u0002>?\u0003\u0002\u0002\u0002?A\u0003\u0002\u0002", - "\u0002@B\t\u0002\u0002\u0002A@\u0003\u0002\u0002\u0002BC\u0003\u0002", - "\u0002\u0002CA\u0003\u0002\u0002\u0002CD\u0003\u0002\u0002\u0002D\u0004", - "\u0003\u0002\u0002\u0002EG\u0007/\u0002\u0002FE\u0003\u0002\u0002\u0002", - "FG\u0003\u0002\u0002\u0002GI\u0003\u0002\u0002\u0002HJ\t\u0002\u0002", - "\u0002IH\u0003\u0002\u0002\u0002JK\u0003\u0002\u0002\u0002KI\u0003\u0002", - "\u0002\u0002KL\u0003\u0002\u0002\u0002LM\u0003\u0002\u0002\u0002MO\u0007", - "0\u0002\u0002NP\t\u0002\u0002\u0002ON\u0003\u0002\u0002\u0002PQ\u0003", - "\u0002\u0002\u0002QO\u0003\u0002\u0002\u0002QR\u0003\u0002\u0002\u0002", - "R\u0006\u0003\u0002\u0002\u0002ST\u0007v\u0002\u0002TU\u0007t\u0002", - "\u0002UV\u0007w\u0002\u0002V\\\u0007g\u0002\u0002WX\u0007V\u0002\u0002", - "XY\u0007T\u0002\u0002YZ\u0007W\u0002\u0002Z\\\u0007G\u0002\u0002[S\u0003", - "\u0002\u0002\u0002[W\u0003\u0002\u0002\u0002\\\b\u0003\u0002\u0002\u0002", - "]^\u0007h\u0002\u0002^_\u0007c\u0002\u0002_`\u0007n\u0002\u0002`a\u0007", - "u\u0002\u0002ah\u0007g\u0002\u0002bc\u0007H\u0002\u0002cd\u0007C\u0002", - "\u0002de\u0007N\u0002\u0002ef\u0007U\u0002\u0002fh\u0007G\u0002\u0002", - "g]\u0003\u0002\u0002\u0002gb\u0003\u0002\u0002\u0002h\n\u0003\u0002", - "\u0002\u0002ij\u0007p\u0002\u0002jk\u0007q\u0002\u0002kp\u0007v\u0002", - "\u0002lm\u0007P\u0002\u0002mn\u0007Q\u0002\u0002np\u0007V\u0002\u0002", - "oi\u0003\u0002\u0002\u0002ol\u0003\u0002\u0002\u0002p\f\u0003\u0002", - "\u0002\u0002qr\u0007k\u0002\u0002rv\u0007u\u0002\u0002st\u0007K\u0002", - "\u0002tv\u0007U\u0002\u0002uq\u0003\u0002\u0002\u0002us\u0003\u0002", - "\u0002\u0002v\u000e\u0003\u0002\u0002\u0002wx\u0007p\u0002\u0002xy\u0007", - "w\u0002\u0002yz\u0007n\u0002\u0002z\u0080\u0007n\u0002\u0002{|\u0007", - "P\u0002\u0002|}\u0007W\u0002\u0002}~\u0007N\u0002\u0002~\u0080\u0007", - "N\u0002\u0002\u007fw\u0003\u0002\u0002\u0002\u007f{\u0003\u0002\u0002", - "\u0002\u0080\u0010\u0003\u0002\u0002\u0002\u0081\u0083\t\u0003\u0002", - "\u0002\u0082\u0081\u0003\u0002\u0002\u0002\u0083\u0084\u0003\u0002\u0002", - "\u0002\u0084\u0082\u0003\u0002\u0002\u0002\u0084\u0085\u0003\u0002\u0002", - "\u0002\u0085\u0012\u0003\u0002\u0002\u0002\u0086\u008c\u0007$\u0002", - "\u0002\u0087\u0088\u0007^\u0002\u0002\u0088\u008b\u0007$\u0002\u0002", - "\u0089\u008b\n\u0004\u0002\u0002\u008a\u0087\u0003\u0002\u0002\u0002", - "\u008a\u0089\u0003\u0002\u0002\u0002\u008b\u008e\u0003\u0002\u0002\u0002", - "\u008c\u008a\u0003\u0002\u0002\u0002\u008c\u008d\u0003\u0002\u0002\u0002", - "\u008d\u008f\u0003\u0002\u0002\u0002\u008e\u008c\u0003\u0002\u0002\u0002", - "\u008f\u0090\u0007$\u0002\u0002\u0090\u0014\u0003\u0002\u0002\u0002", - "\u0091\u0097\u0007)\u0002\u0002\u0092\u0093\u0007^\u0002\u0002\u0093", - "\u0096\u0007)\u0002\u0002\u0094\u0096\n\u0005\u0002\u0002\u0095\u0092", - "\u0003\u0002\u0002\u0002\u0095\u0094\u0003\u0002\u0002\u0002\u0096\u0099", - "\u0003\u0002\u0002\u0002\u0097\u0095\u0003\u0002\u0002\u0002\u0097\u0098", - "\u0003\u0002\u0002\u0002\u0098\u009a\u0003\u0002\u0002\u0002\u0099\u0097", - "\u0003\u0002\u0002\u0002\u009a\u009b\u0007)\u0002\u0002\u009b\u0016", - "\u0003\u0002\u0002\u0002\u009c\u009d\u0007*\u0002\u0002\u009d\u0018", - "\u0003\u0002\u0002\u0002\u009e\u009f\u0007+\u0002\u0002\u009f\u001a", - "\u0003\u0002\u0002\u0002\u00a0\u00a1\u0007~\u0002\u0002\u00a1\u001c", - "\u0003\u0002\u0002\u0002\u00a2\u00a3\u0007<\u0002\u0002\u00a3\u001e", - "\u0003\u0002\u0002\u0002\u00a4\u00a5\u0007.\u0002\u0002\u00a5 \u0003", - "\u0002\u0002\u0002\u00a6\u00a7\u0007~\u0002\u0002\u00a7\u00a8\u0007", - "~\u0002\u0002\u00a8\"\u0003\u0002\u0002\u0002\u00a9\u00aa\u0007(\u0002", - "\u0002\u00aa\u00ab\u0007(\u0002\u0002\u00ab$\u0003\u0002\u0002\u0002", - "\u00ac\u00ad\u0007?\u0002\u0002\u00ad\u00ae\u0007?\u0002\u0002\u00ae", - "&\u0003\u0002\u0002\u0002\u00af\u00b0\u0007#\u0002\u0002\u00b0\u00b1", - "\u0007?\u0002\u0002\u00b1(\u0003\u0002\u0002\u0002\u00b2\u00b3\u0007", - "@\u0002\u0002\u00b3*\u0003\u0002\u0002\u0002\u00b4\u00b5\u0007>\u0002", - "\u0002\u00b5,\u0003\u0002\u0002\u0002\u00b6\u00b7\u0007@\u0002\u0002", - "\u00b7\u00b8\u0007?\u0002\u0002\u00b8.\u0003\u0002\u0002\u0002\u00b9", - "\u00ba\u0007>\u0002\u0002\u00ba\u00bb\u0007?\u0002\u0002\u00bb0\u0003", - "\u0002\u0002\u0002\u00bc\u00bd\u0007-\u0002\u0002\u00bd2\u0003\u0002", - "\u0002\u0002\u00be\u00bf\u0007/\u0002\u0002\u00bf4\u0003\u0002\u0002", - "\u0002\u00c0\u00c1\u0007,\u0002\u0002\u00c16\u0003\u0002\u0002\u0002", - "\u00c2\u00c3\u00071\u0002\u0002\u00c38\u0003\u0002\u0002\u0002\u00c4", - "\u00c5\u0007A\u0002\u0002\u00c5:\u0003\u0002\u0002\u0002\u00c6\u00c8", - "\t\u0006\u0002\u0002\u00c7\u00c6\u0003\u0002\u0002\u0002\u00c8\u00c9", - "\u0003\u0002\u0002\u0002\u00c9\u00c7\u0003\u0002\u0002\u0002\u00c9\u00ca", - "\u0003\u0002\u0002\u0002\u00ca\u00cb\u0003\u0002\u0002\u0002\u00cb\u00cc", - "\b\u001e\u0002\u0002\u00cc<\u0003\u0002\u0002\u0002\u0013\u0002>CFK", - "Q[gou\u007f\u0084\u008a\u008c\u0095\u0097\u00c9\u0003\b\u0002\u0002"].join(""); + "\t\u001e\u0004\u001f\t\u001f\u0004 \t \u0004!\t!\u0004\"\t\"\u0004#", + "\t#\u0004$\t$\u0004%\t%\u0004&\t&\u0004\'\t\'\u0004(\t(\u0004)\t)\u0004", + "*\t*\u0004+\t+\u0004,\t,\u0004-\t-\u0004.\t.\u0004/\t/\u00040\t0\u0004", + "1\t1\u00042\t2\u00043\t3\u00044\t4\u00045\t5\u00046\t6\u00047\t7\u0004", + "8\t8\u00049\t9\u0004:\t:\u0003\u0002\u0005\u0002w\n\u0002\u0003\u0002", + "\u0006\u0002z\n\u0002\r\u0002\u000e\u0002{\u0003\u0003\u0005\u0003\u007f", + "\n\u0003\u0003\u0003\u0006\u0003\u0082\n\u0003\r\u0003\u000e\u0003\u0083", + "\u0003\u0003\u0003\u0003\u0006\u0003\u0088\n\u0003\r\u0003\u000e\u0003", + "\u0089\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003", + "\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003", + "\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0007\u0003\u0007\u0003", + "\u0007\u0003\b\u0003\b\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0007", + "\t\u00a5\n\t\f\t\u000e\t\u00a8\u000b\t\u0003\n\u0003\n\u0003\n\u0003", + "\n\u0007\n\u00ae\n\n\f\n\u000e\n\u00b1\u000b\n\u0003\n\u0003\n\u0003", + "\u000b\u0003\u000b\u0003\u000b\u0003\u000b\u0007\u000b\u00b9\n\u000b", + "\f\u000b\u000e\u000b\u00bc\u000b\u000b\u0003\u000b\u0003\u000b\u0003", + "\f\u0003\f\u0003\r\u0003\r\u0003\u000e\u0003\u000e\u0003\u000f\u0003", + "\u000f\u0003\u0010\u0003\u0010\u0003\u0011\u0003\u0011\u0003\u0011\u0003", + "\u0012\u0003\u0012\u0003\u0012\u0003\u0013\u0003\u0013\u0003\u0013\u0003", + "\u0014\u0003\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016\u0003", + "\u0016\u0003\u0017\u0003\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003", + "\u0018\u0003\u0019\u0003\u0019\u0003\u001a\u0003\u001a\u0003\u001b\u0003", + "\u001b\u0003\u001c\u0003\u001c\u0003\u001d\u0003\u001d\u0003\u001e\u0003", + "\u001e\u0003\u001e\u0003\u001e\u0003\u001e\u0005\u001e\u00ef\n\u001e", + "\u0003\u001e\u0007\u001e\u00f2\n\u001e\f\u001e\u000e\u001e\u00f5\u000b", + "\u001e\u0003\u001e\u0003\u001e\u0003\u001f\u0006\u001f\u00fa\n\u001f", + "\r\u001f\u000e\u001f\u00fb\u0003\u001f\u0003\u001f\u0003 \u0003 \u0003", + "!\u0003!\u0003\"\u0003\"\u0003#\u0003#\u0003$\u0003$\u0003%\u0003%\u0003", + "&\u0003&\u0003\'\u0003\'\u0003(\u0003(\u0003)\u0003)\u0003*\u0003*\u0003", + "+\u0003+\u0003,\u0003,\u0003-\u0003-\u0003.\u0003.\u0003/\u0003/\u0003", + "0\u00030\u00031\u00031\u00032\u00032\u00033\u00033\u00034\u00034\u0003", + "5\u00035\u00036\u00036\u00037\u00037\u00038\u00038\u00039\u00039\u0003", + ":\u0003:\u0002\u0002;\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b", + "\u0007\r\b\u000f\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e\u001b", + "\u000f\u001d\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016+", + "\u0017-\u0018/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?\u0002", + "A\u0002C\u0002E\u0002G\u0002I\u0002K\u0002M\u0002O\u0002Q\u0002S\u0002", + "U\u0002W\u0002Y\u0002[\u0002]\u0002_\u0002a\u0002c\u0002e\u0002g\u0002", + "i\u0002k\u0002m\u0002o\u0002q\u0002s\u0002\u0003\u0002#\u0005\u0002", + "C\\aac|\b\u0002&&002;C\\aac|\u0003\u0002$$\u0003\u0002))\u0004\u0002", + "\f\f\u000f\u000f\u0005\u0002\u000b\f\u000f\u000f\"\"\u0003\u00022;\u0004", + "\u0002CCcc\u0004\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002", + "GGgg\u0004\u0002HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKk", + "k\u0004\u0002LLll\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004", + "\u0002PPpp\u0004\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002", + "TTtt\u0004\u0002UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXx", + "x\u0004\u0002YYyy\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||", + "\u0002\u0127\u0002\u0003\u0003\u0002\u0002\u0002\u0002\u0005\u0003\u0002", + "\u0002\u0002\u0002\u0007\u0003\u0002\u0002\u0002\u0002\t\u0003\u0002", + "\u0002\u0002\u0002\u000b\u0003\u0002\u0002\u0002\u0002\r\u0003\u0002", + "\u0002\u0002\u0002\u000f\u0003\u0002\u0002\u0002\u0002\u0011\u0003\u0002", + "\u0002\u0002\u0002\u0013\u0003\u0002\u0002\u0002\u0002\u0015\u0003\u0002", + "\u0002\u0002\u0002\u0017\u0003\u0002\u0002\u0002\u0002\u0019\u0003\u0002", + "\u0002\u0002\u0002\u001b\u0003\u0002\u0002\u0002\u0002\u001d\u0003\u0002", + "\u0002\u0002\u0002\u001f\u0003\u0002\u0002\u0002\u0002!\u0003\u0002", + "\u0002\u0002\u0002#\u0003\u0002\u0002\u0002\u0002%\u0003\u0002\u0002", + "\u0002\u0002\'\u0003\u0002\u0002\u0002\u0002)\u0003\u0002\u0002\u0002", + "\u0002+\u0003\u0002\u0002\u0002\u0002-\u0003\u0002\u0002\u0002\u0002", + "/\u0003\u0002\u0002\u0002\u00021\u0003\u0002\u0002\u0002\u00023\u0003", + "\u0002\u0002\u0002\u00025\u0003\u0002\u0002\u0002\u00027\u0003\u0002", + "\u0002\u0002\u00029\u0003\u0002\u0002\u0002\u0002;\u0003\u0002\u0002", + "\u0002\u0002=\u0003\u0002\u0002\u0002\u0003v\u0003\u0002\u0002\u0002", + "\u0005~\u0003\u0002\u0002\u0002\u0007\u008b\u0003\u0002\u0002\u0002", + "\t\u0090\u0003\u0002\u0002\u0002\u000b\u0096\u0003\u0002\u0002\u0002", + "\r\u009a\u0003\u0002\u0002\u0002\u000f\u009d\u0003\u0002\u0002\u0002", + "\u0011\u00a2\u0003\u0002\u0002\u0002\u0013\u00a9\u0003\u0002\u0002\u0002", + "\u0015\u00b4\u0003\u0002\u0002\u0002\u0017\u00bf\u0003\u0002\u0002\u0002", + "\u0019\u00c1\u0003\u0002\u0002\u0002\u001b\u00c3\u0003\u0002\u0002\u0002", + "\u001d\u00c5\u0003\u0002\u0002\u0002\u001f\u00c7\u0003\u0002\u0002\u0002", + "!\u00c9\u0003\u0002\u0002\u0002#\u00cc\u0003\u0002\u0002\u0002%\u00cf", + "\u0003\u0002\u0002\u0002\'\u00d2\u0003\u0002\u0002\u0002)\u00d5\u0003", + "\u0002\u0002\u0002+\u00d7\u0003\u0002\u0002\u0002-\u00d9\u0003\u0002", + "\u0002\u0002/\u00dc\u0003\u0002\u0002\u00021\u00df\u0003\u0002\u0002", + "\u00023\u00e1\u0003\u0002\u0002\u00025\u00e3\u0003\u0002\u0002\u0002", + "7\u00e5\u0003\u0002\u0002\u00029\u00e7\u0003\u0002\u0002\u0002;\u00ee", + "\u0003\u0002\u0002\u0002=\u00f9\u0003\u0002\u0002\u0002?\u00ff\u0003", + "\u0002\u0002\u0002A\u0101\u0003\u0002\u0002\u0002C\u0103\u0003\u0002", + "\u0002\u0002E\u0105\u0003\u0002\u0002\u0002G\u0107\u0003\u0002\u0002", + "\u0002I\u0109\u0003\u0002\u0002\u0002K\u010b\u0003\u0002\u0002\u0002", + "M\u010d\u0003\u0002\u0002\u0002O\u010f\u0003\u0002\u0002\u0002Q\u0111", + "\u0003\u0002\u0002\u0002S\u0113\u0003\u0002\u0002\u0002U\u0115\u0003", + "\u0002\u0002\u0002W\u0117\u0003\u0002\u0002\u0002Y\u0119\u0003\u0002", + "\u0002\u0002[\u011b\u0003\u0002\u0002\u0002]\u011d\u0003\u0002\u0002", + "\u0002_\u011f\u0003\u0002\u0002\u0002a\u0121\u0003\u0002\u0002\u0002", + "c\u0123\u0003\u0002\u0002\u0002e\u0125\u0003\u0002\u0002\u0002g\u0127", + "\u0003\u0002\u0002\u0002i\u0129\u0003\u0002\u0002\u0002k\u012b\u0003", + "\u0002\u0002\u0002m\u012d\u0003\u0002\u0002\u0002o\u012f\u0003\u0002", + "\u0002\u0002q\u0131\u0003\u0002\u0002\u0002s\u0133\u0003\u0002\u0002", + "\u0002uw\u0007/\u0002\u0002vu\u0003\u0002\u0002\u0002vw\u0003\u0002", + "\u0002\u0002wy\u0003\u0002\u0002\u0002xz\u0005? \u0002yx\u0003\u0002", + "\u0002\u0002z{\u0003\u0002\u0002\u0002{y\u0003\u0002\u0002\u0002{|\u0003", + "\u0002\u0002\u0002|\u0004\u0003\u0002\u0002\u0002}\u007f\u0007/\u0002", + "\u0002~}\u0003\u0002\u0002\u0002~\u007f\u0003\u0002\u0002\u0002\u007f", + "\u0081\u0003\u0002\u0002\u0002\u0080\u0082\u0005? \u0002\u0081\u0080", + "\u0003\u0002\u0002\u0002\u0082\u0083\u0003\u0002\u0002\u0002\u0083\u0081", + "\u0003\u0002\u0002\u0002\u0083\u0084\u0003\u0002\u0002\u0002\u0084\u0085", + "\u0003\u0002\u0002\u0002\u0085\u0087\u00070\u0002\u0002\u0086\u0088", + "\u0005? \u0002\u0087\u0086\u0003\u0002\u0002\u0002\u0088\u0089\u0003", + "\u0002\u0002\u0002\u0089\u0087\u0003\u0002\u0002\u0002\u0089\u008a\u0003", + "\u0002\u0002\u0002\u008a\u0006\u0003\u0002\u0002\u0002\u008b\u008c\u0005", + "g4\u0002\u008c\u008d\u0005c2\u0002\u008d\u008e\u0005i5\u0002\u008e\u008f", + "\u0005I%\u0002\u008f\b\u0003\u0002\u0002\u0002\u0090\u0091\u0005K&\u0002", + "\u0091\u0092\u0005A!\u0002\u0092\u0093\u0005W,\u0002\u0093\u0094\u0005", + "e3\u0002\u0094\u0095\u0005I%\u0002\u0095\n\u0003\u0002\u0002\u0002\u0096", + "\u0097\u0005[.\u0002\u0097\u0098\u0005]/\u0002\u0098\u0099\u0005g4\u0002", + "\u0099\f\u0003\u0002\u0002\u0002\u009a\u009b\u0005Q)\u0002\u009b\u009c", + "\u0005e3\u0002\u009c\u000e\u0003\u0002\u0002\u0002\u009d\u009e\u0005", + "[.\u0002\u009e\u009f\u0005i5\u0002\u009f\u00a0\u0005W,\u0002\u00a0\u00a1", + "\u0005W,\u0002\u00a1\u0010\u0003\u0002\u0002\u0002\u00a2\u00a6\t\u0002", + "\u0002\u0002\u00a3\u00a5\t\u0003\u0002\u0002\u00a4\u00a3\u0003\u0002", + "\u0002\u0002\u00a5\u00a8\u0003\u0002\u0002\u0002\u00a6\u00a4\u0003\u0002", + "\u0002\u0002\u00a6\u00a7\u0003\u0002\u0002\u0002\u00a7\u0012\u0003\u0002", + "\u0002\u0002\u00a8\u00a6\u0003\u0002\u0002\u0002\u00a9\u00af\u0007$", + "\u0002\u0002\u00aa\u00ab\u0007^\u0002\u0002\u00ab\u00ae\u0007$\u0002", + "\u0002\u00ac\u00ae\n\u0004\u0002\u0002\u00ad\u00aa\u0003\u0002\u0002", + "\u0002\u00ad\u00ac\u0003\u0002\u0002\u0002\u00ae\u00b1\u0003\u0002\u0002", + "\u0002\u00af\u00ad\u0003\u0002\u0002\u0002\u00af\u00b0\u0003\u0002\u0002", + "\u0002\u00b0\u00b2\u0003\u0002\u0002\u0002\u00b1\u00af\u0003\u0002\u0002", + "\u0002\u00b2\u00b3\u0007$\u0002\u0002\u00b3\u0014\u0003\u0002\u0002", + "\u0002\u00b4\u00ba\u0007)\u0002\u0002\u00b5\u00b6\u0007^\u0002\u0002", + "\u00b6\u00b9\u0007)\u0002\u0002\u00b7\u00b9\n\u0005\u0002\u0002\u00b8", + "\u00b5\u0003\u0002\u0002\u0002\u00b8\u00b7\u0003\u0002\u0002\u0002\u00b9", + "\u00bc\u0003\u0002\u0002\u0002\u00ba\u00b8\u0003\u0002\u0002\u0002\u00ba", + "\u00bb\u0003\u0002\u0002\u0002\u00bb\u00bd\u0003\u0002\u0002\u0002\u00bc", + "\u00ba\u0003\u0002\u0002\u0002\u00bd\u00be\u0007)\u0002\u0002\u00be", + "\u0016\u0003\u0002\u0002\u0002\u00bf\u00c0\u0007*\u0002\u0002\u00c0", + "\u0018\u0003\u0002\u0002\u0002\u00c1\u00c2\u0007+\u0002\u0002\u00c2", + "\u001a\u0003\u0002\u0002\u0002\u00c3\u00c4\u0007~\u0002\u0002\u00c4", + "\u001c\u0003\u0002\u0002\u0002\u00c5\u00c6\u0007<\u0002\u0002\u00c6", + "\u001e\u0003\u0002\u0002\u0002\u00c7\u00c8\u0007.\u0002\u0002\u00c8", + " \u0003\u0002\u0002\u0002\u00c9\u00ca\u0007~\u0002\u0002\u00ca\u00cb", + "\u0007~\u0002\u0002\u00cb\"\u0003\u0002\u0002\u0002\u00cc\u00cd\u0007", + "(\u0002\u0002\u00cd\u00ce\u0007(\u0002\u0002\u00ce$\u0003\u0002\u0002", + "\u0002\u00cf\u00d0\u0007?\u0002\u0002\u00d0\u00d1\u0007?\u0002\u0002", + "\u00d1&\u0003\u0002\u0002\u0002\u00d2\u00d3\u0007#\u0002\u0002\u00d3", + "\u00d4\u0007?\u0002\u0002\u00d4(\u0003\u0002\u0002\u0002\u00d5\u00d6", + "\u0007@\u0002\u0002\u00d6*\u0003\u0002\u0002\u0002\u00d7\u00d8\u0007", + ">\u0002\u0002\u00d8,\u0003\u0002\u0002\u0002\u00d9\u00da\u0007@\u0002", + "\u0002\u00da\u00db\u0007?\u0002\u0002\u00db.\u0003\u0002\u0002\u0002", + "\u00dc\u00dd\u0007>\u0002\u0002\u00dd\u00de\u0007?\u0002\u0002\u00de", + "0\u0003\u0002\u0002\u0002\u00df\u00e0\u0007-\u0002\u0002\u00e02\u0003", + "\u0002\u0002\u0002\u00e1\u00e2\u0007/\u0002\u0002\u00e24\u0003\u0002", + "\u0002\u0002\u00e3\u00e4\u0007,\u0002\u0002\u00e46\u0003\u0002\u0002", + "\u0002\u00e5\u00e6\u00071\u0002\u0002\u00e68\u0003\u0002\u0002\u0002", + "\u00e7\u00e8\u0007A\u0002\u0002\u00e8:\u0003\u0002\u0002\u0002\u00e9", + "\u00ea\u0007/\u0002\u0002\u00ea\u00ef\u0007/\u0002\u0002\u00eb\u00ec", + "\u00071\u0002\u0002\u00ec\u00ef\u00071\u0002\u0002\u00ed\u00ef\u0007", + "%\u0002\u0002\u00ee\u00e9\u0003\u0002\u0002\u0002\u00ee\u00eb\u0003", + "\u0002\u0002\u0002\u00ee\u00ed\u0003\u0002\u0002\u0002\u00ef\u00f3\u0003", + "\u0002\u0002\u0002\u00f0\u00f2\n\u0006\u0002\u0002\u00f1\u00f0\u0003", + "\u0002\u0002\u0002\u00f2\u00f5\u0003\u0002\u0002\u0002\u00f3\u00f1\u0003", + "\u0002\u0002\u0002\u00f3\u00f4\u0003\u0002\u0002\u0002\u00f4\u00f6\u0003", + "\u0002\u0002\u0002\u00f5\u00f3\u0003\u0002\u0002\u0002\u00f6\u00f7\b", + "\u001e\u0002\u0002\u00f7<\u0003\u0002\u0002\u0002\u00f8\u00fa\t\u0007", + "\u0002\u0002\u00f9\u00f8\u0003\u0002\u0002\u0002\u00fa\u00fb\u0003\u0002", + "\u0002\u0002\u00fb\u00f9\u0003\u0002\u0002\u0002\u00fb\u00fc\u0003\u0002", + "\u0002\u0002\u00fc\u00fd\u0003\u0002\u0002\u0002\u00fd\u00fe\b\u001f", + "\u0002\u0002\u00fe>\u0003\u0002\u0002\u0002\u00ff\u0100\t\b\u0002\u0002", + "\u0100@\u0003\u0002\u0002\u0002\u0101\u0102\t\t\u0002\u0002\u0102B\u0003", + "\u0002\u0002\u0002\u0103\u0104\t\n\u0002\u0002\u0104D\u0003\u0002\u0002", + "\u0002\u0105\u0106\t\u000b\u0002\u0002\u0106F\u0003\u0002\u0002\u0002", + "\u0107\u0108\t\f\u0002\u0002\u0108H\u0003\u0002\u0002\u0002\u0109\u010a", + "\t\r\u0002\u0002\u010aJ\u0003\u0002\u0002\u0002\u010b\u010c\t\u000e", + "\u0002\u0002\u010cL\u0003\u0002\u0002\u0002\u010d\u010e\t\u000f\u0002", + "\u0002\u010eN\u0003\u0002\u0002\u0002\u010f\u0110\t\u0010\u0002\u0002", + "\u0110P\u0003\u0002\u0002\u0002\u0111\u0112\t\u0011\u0002\u0002\u0112", + "R\u0003\u0002\u0002\u0002\u0113\u0114\t\u0012\u0002\u0002\u0114T\u0003", + "\u0002\u0002\u0002\u0115\u0116\t\u0013\u0002\u0002\u0116V\u0003\u0002", + "\u0002\u0002\u0117\u0118\t\u0014\u0002\u0002\u0118X\u0003\u0002\u0002", + "\u0002\u0119\u011a\t\u0015\u0002\u0002\u011aZ\u0003\u0002\u0002\u0002", + "\u011b\u011c\t\u0016\u0002\u0002\u011c\\\u0003\u0002\u0002\u0002\u011d", + "\u011e\t\u0017\u0002\u0002\u011e^\u0003\u0002\u0002\u0002\u011f\u0120", + "\t\u0018\u0002\u0002\u0120`\u0003\u0002\u0002\u0002\u0121\u0122\t\u0019", + "\u0002\u0002\u0122b\u0003\u0002\u0002\u0002\u0123\u0124\t\u001a\u0002", + "\u0002\u0124d\u0003\u0002\u0002\u0002\u0125\u0126\t\u001b\u0002\u0002", + "\u0126f\u0003\u0002\u0002\u0002\u0127\u0128\t\u001c\u0002\u0002\u0128", + "h\u0003\u0002\u0002\u0002\u0129\u012a\t\u001d\u0002\u0002\u012aj\u0003", + "\u0002\u0002\u0002\u012b\u012c\t\u001e\u0002\u0002\u012cl\u0003\u0002", + "\u0002\u0002\u012d\u012e\t\u001f\u0002\u0002\u012en\u0003\u0002\u0002", + "\u0002\u012f\u0130\t \u0002\u0002\u0130p\u0003\u0002\u0002\u0002\u0131", + "\u0132\t!\u0002\u0002\u0132r\u0003\u0002\u0002\u0002\u0133\u0134\t\"", + "\u0002\u0002\u0134t\u0003\u0002\u0002\u0002\u0010\u0002v{~\u0083\u0089", + "\u00a6\u00ad\u00af\u00b8\u00ba\u00ee\u00f3\u00fb\u0003\u0002\u0003\u0002"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -186,7 +246,8 @@ TelLexer.MINUS = 25; TelLexer.MULT = 26; TelLexer.DIV = 27; TelLexer.OPTIONAL_TAXON_OPERATOR = 28; -TelLexer.WS = 29; +TelLexer.SINGLE_LINE_COMMENT = 29; +TelLexer.WS = 30; TelLexer.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; @@ -206,7 +267,8 @@ TelLexer.prototype.symbolicNames = [ null, "INT", "REAL", "TRUE", "FALSE", "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", "OR", "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", "MINUS", "MULT", - "DIV", "OPTIONAL_TAXON_OPERATOR", "WS" ]; + "DIV", "OPTIONAL_TAXON_OPERATOR", "SINGLE_LINE_COMMENT", + "WS" ]; TelLexer.prototype.ruleNames = [ "INT", "REAL", "TRUE", "FALSE", "NOT", "KW_IS", "KW_NULL", "WORD", "STRING_CONSTANT", @@ -215,9 +277,13 @@ TelLexer.prototype.ruleNames = [ "INT", "REAL", "TRUE", "FALSE", "NOT", "FN_PARAMETER_DELIMITER", "OR", "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", - "WS" ]; + "SINGLE_LINE_COMMENT", "WS", "DIGIT", "A", + "B", "C", "D", "E", "F", "G", "H", "I", + "J", "K", "L", "M", "N", "O", "P", "Q", + "R", "S", "T", "U", "V", "W", "X", "Y", + "Z" ]; -TelLexer.prototype.grammarFileName = "Tel.g4"; +TelLexer.prototype.grammarFileName = "TelLexer.g4"; exports.TelLexer = TelLexer; diff --git a/js-temp/TelListener.js b/js-temp/TelListener.js deleted file mode 100644 index 23a3904..0000000 --- a/js-temp/TelListener.js +++ /dev/null @@ -1,168 +0,0 @@ -// Generated from grammar/Tel.g4 by ANTLR 4.8 -// jshint ignore: start -var antlr4 = require('antlr4/index'); - -// This class defines a complete listener for a parse tree produced by TelParser. -function TelListener() { - antlr4.tree.ParseTreeListener.call(this); - return this; -} - -TelListener.prototype = Object.create(antlr4.tree.ParseTreeListener.prototype); -TelListener.prototype.constructor = TelListener; - -// Enter a parse tree produced by TelParser#fn. -TelListener.prototype.enterFn = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#fn. -TelListener.prototype.exitFn = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#taxon. -TelListener.prototype.enterTaxon = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#taxon. -TelListener.prototype.exitTaxon = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#taxon_expr. -TelListener.prototype.enterTaxon_expr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#taxon_expr. -TelListener.prototype.exitTaxon_expr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#parse. -TelListener.prototype.enterParse = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#parse. -TelListener.prototype.exitParse = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#nullTestExpr. -TelListener.prototype.enterNullTestExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#nullTestExpr. -TelListener.prototype.exitNullTestExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#notExpr. -TelListener.prototype.enterNotExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#notExpr. -TelListener.prototype.exitNotExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#logicalExpr. -TelListener.prototype.enterLogicalExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#logicalExpr. -TelListener.prototype.exitLogicalExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#multiplicationExpr. -TelListener.prototype.enterMultiplicationExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#multiplicationExpr. -TelListener.prototype.exitMultiplicationExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#atomExpr. -TelListener.prototype.enterAtomExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#atomExpr. -TelListener.prototype.exitAtomExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#additiveExpr. -TelListener.prototype.enterAdditiveExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#additiveExpr. -TelListener.prototype.exitAdditiveExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#bracketExpr. -TelListener.prototype.enterBracketExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#bracketExpr. -TelListener.prototype.exitBracketExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#numberAtom. -TelListener.prototype.enterNumberAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#numberAtom. -TelListener.prototype.exitNumberAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#fnExpr. -TelListener.prototype.enterFnExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#fnExpr. -TelListener.prototype.exitFnExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#booleanAtom. -TelListener.prototype.enterBooleanAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#booleanAtom. -TelListener.prototype.exitBooleanAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#taxonSlugAtom. -TelListener.prototype.enterTaxonSlugAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#taxonSlugAtom. -TelListener.prototype.exitTaxonSlugAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#singleQuotedAtom. -TelListener.prototype.enterSingleQuotedAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#singleQuotedAtom. -TelListener.prototype.exitSingleQuotedAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#stringConstantAtom. -TelListener.prototype.enterStringConstantAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#stringConstantAtom. -TelListener.prototype.exitStringConstantAtom = function(ctx) { -}; - - - -exports.TelListener = TelListener; \ No newline at end of file diff --git a/js-temp/TelParser.js b/js-temp/TelParser.js index 73e6910..5bb34a0 100644 --- a/js-temp/TelParser.js +++ b/js-temp/TelParser.js @@ -1,66 +1,65 @@ -// Generated from grammar/Tel.g4 by ANTLR 4.8 +// Generated from grammar/TelParser.g4 by ANTLR 4.8 // jshint ignore: start var antlr4 = require('antlr4/index'); -var TelListener = require('./TelListener').TelListener; -var TelVisitor = require('./TelVisitor').TelVisitor; +var TelParserListener = require('./TelParserListener').TelParserListener; +var TelParserVisitor = require('./TelParserVisitor').TelParserVisitor; -var grammarFileName = "Tel.g4"; +var grammarFileName = "TelParser.g4"; var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u0003\u001fT\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t", - "\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0003", - "\u0002\u0003\u0002\u0003\u0002\u0005\u0002\u0012\n\u0002\u0003\u0002", - "\u0003\u0002\u0007\u0002\u0016\n\u0002\f\u0002\u000e\u0002\u0019\u000b", - "\u0002\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003\u0003\u0005", - "\u0003 \n\u0003\u0003\u0003\u0003\u0003\u0005\u0003$\n\u0003\u0003\u0004", - "\u0005\u0004\'\n\u0004\u0003\u0004\u0003\u0004\u0003\u0005\u0003\u0005", - "\u0003\u0005\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005\u0006", - "2\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006", - "\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0006", - "\u0003\u0006\u0005\u0006@\n\u0006\u0003\u0006\u0007\u0006C\n\u0006\f", - "\u0006\u000e\u0006F\u000b\u0006\u0003\u0007\u0003\u0007\u0003\u0007", - "\u0003\u0007\u0003\u0007\u0003\u0007\u0003\u0007\u0003\u0007\u0003\u0007", - "\u0003\u0007\u0005\u0007R\n\u0007\u0003\u0007\u0002\u0003\n\b\u0002", - "\u0004\u0006\b\n\f\u0002\u0007\u0003\u0002\u001c\u001d\u0003\u0002\u001a", - "\u001b\u0003\u0002\u0012\u0019\u0003\u0002\u0003\u0004\u0003\u0002\u0005", - "\u0006\u0002^\u0002\u000e\u0003\u0002\u0002\u0002\u0004\u001c\u0003", - "\u0002\u0002\u0002\u0006&\u0003\u0002\u0002\u0002\b*\u0003\u0002\u0002", - "\u0002\n1\u0003\u0002\u0002\u0002\fQ\u0003\u0002\u0002\u0002\u000e\u000f", - "\u0007\n\u0002\u0002\u000f\u0011\u0007\r\u0002\u0002\u0010\u0012\u0005", - "\n\u0006\u0002\u0011\u0010\u0003\u0002\u0002\u0002\u0011\u0012\u0003", - "\u0002\u0002\u0002\u0012\u0017\u0003\u0002\u0002\u0002\u0013\u0014\u0007", - "\u0011\u0002\u0002\u0014\u0016\u0005\n\u0006\u0002\u0015\u0013\u0003", - "\u0002\u0002\u0002\u0016\u0019\u0003\u0002\u0002\u0002\u0017\u0015\u0003", - "\u0002\u0002\u0002\u0017\u0018\u0003\u0002\u0002\u0002\u0018\u001a\u0003", - "\u0002\u0002\u0002\u0019\u0017\u0003\u0002\u0002\u0002\u001a\u001b\u0007", - "\u000e\u0002\u0002\u001b\u0003\u0003\u0002\u0002\u0002\u001c\u001f\u0007", - "\n\u0002\u0002\u001d\u001e\u0007\u000f\u0002\u0002\u001e \u0007\n\u0002", - "\u0002\u001f\u001d\u0003\u0002\u0002\u0002\u001f \u0003\u0002\u0002", - "\u0002 #\u0003\u0002\u0002\u0002!\"\u0007\u0010\u0002\u0002\"$\u0007", - "\n\u0002\u0002#!\u0003\u0002\u0002\u0002#$\u0003\u0002\u0002\u0002$", - "\u0005\u0003\u0002\u0002\u0002%\'\u0007\u001e\u0002\u0002&%\u0003\u0002", - "\u0002\u0002&\'\u0003\u0002\u0002\u0002\'(\u0003\u0002\u0002\u0002(", - ")\u0005\u0004\u0003\u0002)\u0007\u0003\u0002\u0002\u0002*+\u0005\n\u0006", - "\u0002+,\u0007\u0002\u0002\u0003,\t\u0003\u0002\u0002\u0002-.\b\u0006", - "\u0001\u0002./\u0007\u0007\u0002\u0002/2\u0005\n\u0006\b02\u0005\f\u0007", - "\u00021-\u0003\u0002\u0002\u000210\u0003\u0002\u0002\u00022D\u0003\u0002", - "\u0002\u000234\f\u0007\u0002\u000245\t\u0002\u0002\u00025C\u0005\n\u0006", - "\b67\f\u0006\u0002\u000278\t\u0003\u0002\u00028C\u0005\n\u0006\u0007", - "9:\f\u0005\u0002\u0002:;\t\u0004\u0002\u0002;C\u0005\n\u0006\u0006<", - "=\f\u0004\u0002\u0002=?\u0007\b\u0002\u0002>@\u0007\u0007\u0002\u0002", - "?>\u0003\u0002\u0002\u0002?@\u0003\u0002\u0002\u0002@A\u0003\u0002\u0002", - "\u0002AC\u0007\t\u0002\u0002B3\u0003\u0002\u0002\u0002B6\u0003\u0002", - "\u0002\u0002B9\u0003\u0002\u0002\u0002B<\u0003\u0002\u0002\u0002CF\u0003", - "\u0002\u0002\u0002DB\u0003\u0002\u0002\u0002DE\u0003\u0002\u0002\u0002", - "E\u000b\u0003\u0002\u0002\u0002FD\u0003\u0002\u0002\u0002GH\u0007\r", - "\u0002\u0002HI\u0005\n\u0006\u0002IJ\u0007\u000e\u0002\u0002JR\u0003", - "\u0002\u0002\u0002KR\t\u0005\u0002\u0002LR\u0005\u0002\u0002\u0002M", - "R\t\u0006\u0002\u0002NR\u0005\u0006\u0004\u0002OR\u0007\f\u0002\u0002", - "PR\u0007\u000b\u0002\u0002QG\u0003\u0002\u0002\u0002QK\u0003\u0002\u0002", - "\u0002QL\u0003\u0002\u0002\u0002QM\u0003\u0002\u0002\u0002QN\u0003\u0002", - "\u0002\u0002QO\u0003\u0002\u0002\u0002QP\u0003\u0002\u0002\u0002R\r", - "\u0003\u0002\u0002\u0002\f\u0011\u0017\u001f#&1?BDQ"].join(""); + "\u0003 P\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004", + "\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0003\u0002\u0003\u0002\u0003", + "\u0002\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0005\u0003\u0014", + "\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0005\u0003\"\n\u0003\u0003\u0003\u0007\u0003%\n\u0003", + "\f\u0003\u000e\u0003(\u000b\u0003\u0003\u0004\u0003\u0004\u0003\u0004", + "\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004", + "\u0003\u0004\u0005\u00044\n\u0004\u0003\u0005\u0003\u0005\u0003\u0005", + "\u0005\u00059\n\u0005\u0003\u0005\u0003\u0005\u0007\u0005=\n\u0005\f", + "\u0005\u000e\u0005@\u000b\u0005\u0003\u0005\u0003\u0005\u0003\u0006", + "\u0005\u0006E\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005\u0006", + "J\n\u0006\u0003\u0006\u0003\u0006\u0005\u0006N\n\u0006\u0003\u0006\u0002", + "\u0003\u0004\u0007\u0002\u0004\u0006\b\n\u0002\u0007\u0003\u0002\u001c", + "\u001d\u0003\u0002\u001a\u001b\u0003\u0002\u0012\u0019\u0003\u0002\u0003", + "\u0004\u0003\u0002\u0005\u0006\u0002[\u0002\f\u0003\u0002\u0002\u0002", + "\u0004\u0013\u0003\u0002\u0002\u0002\u00063\u0003\u0002\u0002\u0002", + "\b5\u0003\u0002\u0002\u0002\nD\u0003\u0002\u0002\u0002\f\r\u0005\u0004", + "\u0003\u0002\r\u000e\u0007\u0002\u0002\u0003\u000e\u0003\u0003\u0002", + "\u0002\u0002\u000f\u0010\b\u0003\u0001\u0002\u0010\u0011\u0007\u0007", + "\u0002\u0002\u0011\u0014\u0005\u0004\u0003\b\u0012\u0014\u0005\u0006", + "\u0004\u0002\u0013\u000f\u0003\u0002\u0002\u0002\u0013\u0012\u0003\u0002", + "\u0002\u0002\u0014&\u0003\u0002\u0002\u0002\u0015\u0016\f\u0007\u0002", + "\u0002\u0016\u0017\t\u0002\u0002\u0002\u0017%\u0005\u0004\u0003\b\u0018", + "\u0019\f\u0006\u0002\u0002\u0019\u001a\t\u0003\u0002\u0002\u001a%\u0005", + "\u0004\u0003\u0007\u001b\u001c\f\u0005\u0002\u0002\u001c\u001d\t\u0004", + "\u0002\u0002\u001d%\u0005\u0004\u0003\u0006\u001e\u001f\f\u0004\u0002", + "\u0002\u001f!\u0007\b\u0002\u0002 \"\u0007\u0007\u0002\u0002! \u0003", + "\u0002\u0002\u0002!\"\u0003\u0002\u0002\u0002\"#\u0003\u0002\u0002\u0002", + "#%\u0007\t\u0002\u0002$\u0015\u0003\u0002\u0002\u0002$\u0018\u0003\u0002", + "\u0002\u0002$\u001b\u0003\u0002\u0002\u0002$\u001e\u0003\u0002\u0002", + "\u0002%(\u0003\u0002\u0002\u0002&$\u0003\u0002\u0002\u0002&\'\u0003", + "\u0002\u0002\u0002\'\u0005\u0003\u0002\u0002\u0002(&\u0003\u0002\u0002", + "\u0002)*\u0007\r\u0002\u0002*+\u0005\u0004\u0003\u0002+,\u0007\u000e", + "\u0002\u0002,4\u0003\u0002\u0002\u0002-4\t\u0005\u0002\u0002.4\t\u0006", + "\u0002\u0002/4\u0007\f\u0002\u000204\u0007\u000b\u0002\u000214\u0005", + "\b\u0005\u000224\u0005\n\u0006\u00023)\u0003\u0002\u0002\u00023-\u0003", + "\u0002\u0002\u00023.\u0003\u0002\u0002\u00023/\u0003\u0002\u0002\u0002", + "30\u0003\u0002\u0002\u000231\u0003\u0002\u0002\u000232\u0003\u0002\u0002", + "\u00024\u0007\u0003\u0002\u0002\u000256\u0007\n\u0002\u000268\u0007", + "\r\u0002\u000279\u0005\u0004\u0003\u000287\u0003\u0002\u0002\u00028", + "9\u0003\u0002\u0002\u00029>\u0003\u0002\u0002\u0002:;\u0007\u0011\u0002", + "\u0002;=\u0005\u0004\u0003\u0002<:\u0003\u0002\u0002\u0002=@\u0003\u0002", + "\u0002\u0002><\u0003\u0002\u0002\u0002>?\u0003\u0002\u0002\u0002?A\u0003", + "\u0002\u0002\u0002@>\u0003\u0002\u0002\u0002AB\u0007\u000e\u0002\u0002", + "B\t\u0003\u0002\u0002\u0002CE\u0007\u001e\u0002\u0002DC\u0003\u0002", + "\u0002\u0002DE\u0003\u0002\u0002\u0002EF\u0003\u0002\u0002\u0002FI\u0007", + "\n\u0002\u0002GH\u0007\u000f\u0002\u0002HJ\u0007\n\u0002\u0002IG\u0003", + "\u0002\u0002\u0002IJ\u0003\u0002\u0002\u0002JM\u0003\u0002\u0002\u0002", + "KL\u0007\u0010\u0002\u0002LN\u0007\n\u0002\u0002MK\u0003\u0002\u0002", + "\u0002MN\u0003\u0002\u0002\u0002N\u000b\u0003\u0002\u0002\u0002\f\u0013", + "!$&38>DIM"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -80,9 +79,9 @@ var symbolicNames = [ null, "INT", "REAL", "TRUE", "FALSE", "NOT", "KW_IS", "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", "OR", "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", - "WS" ]; + "SINGLE_LINE_COMMENT", "WS" ]; -var ruleNames = [ "fn", "taxon", "taxon_expr", "parse", "expr", "atom" ]; +var ruleNames = [ "parse", "expr", "atom", "fn", "taxon" ]; function TelParser (input) { antlr4.Parser.call(this, input); @@ -131,323 +130,14 @@ TelParser.MINUS = 25; TelParser.MULT = 26; TelParser.DIV = 27; TelParser.OPTIONAL_TAXON_OPERATOR = 28; -TelParser.WS = 29; +TelParser.SINGLE_LINE_COMMENT = 29; +TelParser.WS = 30; -TelParser.RULE_fn = 0; -TelParser.RULE_taxon = 1; -TelParser.RULE_taxon_expr = 2; -TelParser.RULE_parse = 3; -TelParser.RULE_expr = 4; -TelParser.RULE_atom = 5; - - -function FnContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = TelParser.RULE_fn; - return this; -} - -FnContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -FnContext.prototype.constructor = FnContext; - -FnContext.prototype.WORD = function() { - return this.getToken(TelParser.WORD, 0); -}; - -FnContext.prototype.L_BRACKET = function() { - return this.getToken(TelParser.L_BRACKET, 0); -}; - -FnContext.prototype.R_BRACKET = function() { - return this.getToken(TelParser.R_BRACKET, 0); -}; - -FnContext.prototype.expr = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTypedRuleContexts(ExprContext); - } else { - return this.getTypedRuleContext(ExprContext,i); - } -}; - -FnContext.prototype.FN_PARAMETER_DELIMITER = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTokens(TelParser.FN_PARAMETER_DELIMITER); - } else { - return this.getToken(TelParser.FN_PARAMETER_DELIMITER, i); - } -}; - - -FnContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { - listener.enterFn(this); - } -}; - -FnContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { - listener.exitFn(this); - } -}; - -FnContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { - return visitor.visitFn(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -TelParser.FnContext = FnContext; - -TelParser.prototype.fn = function() { - - var localctx = new FnContext(this, this._ctx, this.state); - this.enterRule(localctx, 0, TelParser.RULE_fn); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 12; - this.match(TelParser.WORD); - this.state = 13; - this.match(TelParser.L_BRACKET); - this.state = 15; - this._errHandler.sync(this); - _la = this._input.LA(1); - if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.INT) | (1 << TelParser.REAL) | (1 << TelParser.TRUE) | (1 << TelParser.FALSE) | (1 << TelParser.NOT) | (1 << TelParser.WORD) | (1 << TelParser.STRING_CONSTANT) | (1 << TelParser.SINGLE_QUOTED_ELEMENT) | (1 << TelParser.L_BRACKET) | (1 << TelParser.OPTIONAL_TAXON_OPERATOR))) !== 0)) { - this.state = 14; - this.expr(0); - } - - this.state = 21; - this._errHandler.sync(this); - _la = this._input.LA(1); - while(_la===TelParser.FN_PARAMETER_DELIMITER) { - this.state = 17; - this.match(TelParser.FN_PARAMETER_DELIMITER); - this.state = 18; - this.expr(0); - this.state = 23; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 24; - this.match(TelParser.R_BRACKET); - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function TaxonContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = TelParser.RULE_taxon; - return this; -} - -TaxonContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -TaxonContext.prototype.constructor = TaxonContext; - -TaxonContext.prototype.WORD = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTokens(TelParser.WORD); - } else { - return this.getToken(TelParser.WORD, i); - } -}; - - -TaxonContext.prototype.TAXON_NAMESPACE_DELIMITER = function() { - return this.getToken(TelParser.TAXON_NAMESPACE_DELIMITER, 0); -}; - -TaxonContext.prototype.TAXON_TAG_DELIMITER = function() { - return this.getToken(TelParser.TAXON_TAG_DELIMITER, 0); -}; - -TaxonContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { - listener.enterTaxon(this); - } -}; - -TaxonContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { - listener.exitTaxon(this); - } -}; - -TaxonContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { - return visitor.visitTaxon(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -TelParser.TaxonContext = TaxonContext; - -TelParser.prototype.taxon = function() { - - var localctx = new TaxonContext(this, this._ctx, this.state); - this.enterRule(localctx, 2, TelParser.RULE_taxon); - try { - this.enterOuterAlt(localctx, 1); - this.state = 26; - this.match(TelParser.WORD); - this.state = 29; - this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,2,this._ctx); - if(la_===1) { - this.state = 27; - this.match(TelParser.TAXON_NAMESPACE_DELIMITER); - this.state = 28; - this.match(TelParser.WORD); - - } - this.state = 33; - this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,3,this._ctx); - if(la_===1) { - this.state = 31; - this.match(TelParser.TAXON_TAG_DELIMITER); - this.state = 32; - this.match(TelParser.WORD); - - } - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function Taxon_exprContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = TelParser.RULE_taxon_expr; - return this; -} - -Taxon_exprContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -Taxon_exprContext.prototype.constructor = Taxon_exprContext; - -Taxon_exprContext.prototype.taxon = function() { - return this.getTypedRuleContext(TaxonContext,0); -}; - -Taxon_exprContext.prototype.OPTIONAL_TAXON_OPERATOR = function() { - return this.getToken(TelParser.OPTIONAL_TAXON_OPERATOR, 0); -}; - -Taxon_exprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { - listener.enterTaxon_expr(this); - } -}; - -Taxon_exprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { - listener.exitTaxon_expr(this); - } -}; - -Taxon_exprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { - return visitor.visitTaxon_expr(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -TelParser.Taxon_exprContext = Taxon_exprContext; - -TelParser.prototype.taxon_expr = function() { - - var localctx = new Taxon_exprContext(this, this._ctx, this.state); - this.enterRule(localctx, 4, TelParser.RULE_taxon_expr); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 36; - this._errHandler.sync(this); - _la = this._input.LA(1); - if(_la===TelParser.OPTIONAL_TAXON_OPERATOR) { - this.state = 35; - this.match(TelParser.OPTIONAL_TAXON_OPERATOR); - } - - this.state = 38; - this.taxon(); - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; +TelParser.RULE_parse = 0; +TelParser.RULE_expr = 1; +TelParser.RULE_atom = 2; +TelParser.RULE_fn = 3; +TelParser.RULE_taxon = 4; function ParseContext(parser, parent, invokingState) { @@ -475,19 +165,19 @@ ParseContext.prototype.EOF = function() { }; ParseContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterParse(this); } }; ParseContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitParse(this); } }; ParseContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitParse(this); } else { return visitor.visitChildren(this); @@ -502,12 +192,12 @@ TelParser.ParseContext = ParseContext; TelParser.prototype.parse = function() { var localctx = new ParseContext(this, this._ctx, this.state); - this.enterRule(localctx, 6, TelParser.RULE_parse); + this.enterRule(localctx, 0, TelParser.RULE_parse); try { this.enterOuterAlt(localctx, 1); - this.state = 40; + this.state = 10; this.expr(0); - this.state = 41; + this.state = 11; this.match(TelParser.EOF); } catch (re) { if(re instanceof antlr4.error.RecognitionException) { @@ -573,19 +263,19 @@ NullTestExprContext.prototype.NOT = function() { return this.getToken(TelParser.NOT, 0); }; NullTestExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterNullTestExpr(this); } }; NullTestExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitNullTestExpr(this); } }; NullTestExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitNullTestExpr(this); } else { return visitor.visitChildren(this); @@ -612,19 +302,19 @@ NotExprContext.prototype.expr = function() { return this.getTypedRuleContext(ExprContext,0); }; NotExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterNotExpr(this); } }; NotExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitNotExpr(this); } }; NotExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitNotExpr(this); } else { return visitor.visitChildren(this); @@ -687,19 +377,19 @@ LogicalExprContext.prototype.LTEQ = function() { return this.getToken(TelParser.LTEQ, 0); }; LogicalExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterLogicalExpr(this); } }; LogicalExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitLogicalExpr(this); } }; LogicalExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitLogicalExpr(this); } else { return visitor.visitChildren(this); @@ -738,19 +428,19 @@ MultiplicationExprContext.prototype.DIV = function() { return this.getToken(TelParser.DIV, 0); }; MultiplicationExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterMultiplicationExpr(this); } }; MultiplicationExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitMultiplicationExpr(this); } }; MultiplicationExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitMultiplicationExpr(this); } else { return visitor.visitChildren(this); @@ -773,19 +463,19 @@ AtomExprContext.prototype.atom = function() { return this.getTypedRuleContext(AtomContext,0); }; AtomExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterAtomExpr(this); } }; AtomExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitAtomExpr(this); } }; AtomExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitAtomExpr(this); } else { return visitor.visitChildren(this); @@ -824,19 +514,19 @@ AdditiveExprContext.prototype.MINUS = function() { return this.getToken(TelParser.MINUS, 0); }; AdditiveExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterAdditiveExpr(this); } }; AdditiveExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitAdditiveExpr(this); } }; AdditiveExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitAdditiveExpr(this); } else { return visitor.visitChildren(this); @@ -853,12 +543,12 @@ TelParser.prototype.expr = function(_p) { var _parentState = this.state; var localctx = new ExprContext(this, this._ctx, _parentState); var _prevctx = localctx; - var _startState = 8; - this.enterRecursionRule(localctx, 8, TelParser.RULE_expr, _p); + var _startState = 2; + this.enterRecursionRule(localctx, 2, TelParser.RULE_expr, _p); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 47; + this.state = 17; this._errHandler.sync(this); switch(this._input.LA(1)) { case TelParser.NOT: @@ -866,9 +556,9 @@ TelParser.prototype.expr = function(_p) { this._ctx = localctx; _prevctx = localctx; - this.state = 44; + this.state = 14; this.match(TelParser.NOT); - this.state = 45; + this.state = 15; this.expr(6); break; case TelParser.INT: @@ -883,34 +573,34 @@ TelParser.prototype.expr = function(_p) { localctx = new AtomExprContext(this, localctx); this._ctx = localctx; _prevctx = localctx; - this.state = 46; + this.state = 16; this.atom(); break; default: throw new antlr4.error.NoViableAltException(this); } this._ctx.stop = this._input.LT(-1); - this.state = 66; + this.state = 36; this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,8,this._ctx) + var _alt = this._interp.adaptivePredict(this._input,3,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { if(_alt===1) { if(this._parseListeners!==null) { this.triggerExitRuleEvent(); } _prevctx = localctx; - this.state = 64; + this.state = 34; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,7,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,2,this._ctx); switch(la_) { case 1: localctx = new MultiplicationExprContext(this, new ExprContext(this, _parentctx, _parentState)); this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 49; + this.state = 19; if (!( this.precpred(this._ctx, 5))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); } - this.state = 50; + this.state = 20; localctx.op = this._input.LT(1); _la = this._input.LA(1); if(!(_la===TelParser.MULT || _la===TelParser.DIV)) { @@ -920,18 +610,18 @@ TelParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 51; + this.state = 21; this.expr(6); break; case 2: localctx = new AdditiveExprContext(this, new ExprContext(this, _parentctx, _parentState)); this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 52; + this.state = 22; if (!( this.precpred(this._ctx, 4))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 4)"); } - this.state = 53; + this.state = 23; localctx.op = this._input.LT(1); _la = this._input.LA(1); if(!(_la===TelParser.PLUS || _la===TelParser.MINUS)) { @@ -941,18 +631,18 @@ TelParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 54; + this.state = 24; this.expr(5); break; case 3: localctx = new LogicalExprContext(this, new ExprContext(this, _parentctx, _parentState)); this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 55; + this.state = 25; if (!( this.precpred(this._ctx, 3))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 3)"); } - this.state = 56; + this.state = 26; localctx.op = this._input.LT(1); _la = this._input.LA(1); if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.OR) | (1 << TelParser.AND) | (1 << TelParser.EQ) | (1 << TelParser.NEQ) | (1 << TelParser.GT) | (1 << TelParser.LT) | (1 << TelParser.GTEQ) | (1 << TelParser.LTEQ))) !== 0))) { @@ -962,36 +652,36 @@ TelParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 57; + this.state = 27; this.expr(4); break; case 4: localctx = new NullTestExprContext(this, new ExprContext(this, _parentctx, _parentState)); this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 58; + this.state = 28; if (!( this.precpred(this._ctx, 2))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 2)"); } - this.state = 59; + this.state = 29; this.match(TelParser.KW_IS); - this.state = 61; + this.state = 31; this._errHandler.sync(this); _la = this._input.LA(1); if(_la===TelParser.NOT) { - this.state = 60; + this.state = 30; this.match(TelParser.NOT); } - this.state = 63; + this.state = 33; this.match(TelParser.KW_NULL); break; } } - this.state = 68; + this.state = 38; this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,8,this._ctx); + _alt = this._interp.adaptivePredict(this._input,3,this._ctx); } } catch( error) { @@ -1047,19 +737,19 @@ FnExprContext.prototype.fn = function() { return this.getTypedRuleContext(FnContext,0); }; FnExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterFnExpr(this); } }; FnExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitFnExpr(this); } }; FnExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitFnExpr(this); } else { return visitor.visitChildren(this); @@ -1078,23 +768,23 @@ TaxonSlugAtomContext.prototype.constructor = TaxonSlugAtomContext; TelParser.TaxonSlugAtomContext = TaxonSlugAtomContext; -TaxonSlugAtomContext.prototype.taxon_expr = function() { - return this.getTypedRuleContext(Taxon_exprContext,0); +TaxonSlugAtomContext.prototype.taxon = function() { + return this.getTypedRuleContext(TaxonContext,0); }; TaxonSlugAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterTaxonSlugAtom(this); } }; TaxonSlugAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitTaxonSlugAtom(this); } }; TaxonSlugAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitTaxonSlugAtom(this); } else { return visitor.visitChildren(this); @@ -1121,19 +811,19 @@ BooleanAtomContext.prototype.FALSE = function() { return this.getToken(TelParser.FALSE, 0); }; BooleanAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterBooleanAtom(this); } }; BooleanAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitBooleanAtom(this); } }; BooleanAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitBooleanAtom(this); } else { return visitor.visitChildren(this); @@ -1164,19 +854,19 @@ BracketExprContext.prototype.R_BRACKET = function() { return this.getToken(TelParser.R_BRACKET, 0); }; BracketExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterBracketExpr(this); } }; BracketExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitBracketExpr(this); } }; BracketExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitBracketExpr(this); } else { return visitor.visitChildren(this); @@ -1199,19 +889,19 @@ SingleQuotedAtomContext.prototype.SINGLE_QUOTED_ELEMENT = function() { return this.getToken(TelParser.SINGLE_QUOTED_ELEMENT, 0); }; SingleQuotedAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterSingleQuotedAtom(this); } }; SingleQuotedAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitSingleQuotedAtom(this); } }; SingleQuotedAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitSingleQuotedAtom(this); } else { return visitor.visitChildren(this); @@ -1238,19 +928,19 @@ NumberAtomContext.prototype.REAL = function() { return this.getToken(TelParser.REAL, 0); }; NumberAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterNumberAtom(this); } }; NumberAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitNumberAtom(this); } }; NumberAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitNumberAtom(this); } else { return visitor.visitChildren(this); @@ -1273,19 +963,19 @@ StringConstantAtomContext.prototype.STRING_CONSTANT = function() { return this.getToken(TelParser.STRING_CONSTANT, 0); }; StringConstantAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.enterStringConstantAtom(this); } }; StringConstantAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelListener ) { + if(listener instanceof TelParserListener ) { listener.exitStringConstantAtom(this); } }; StringConstantAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelVisitor ) { + if ( visitor instanceof TelParserVisitor ) { return visitor.visitStringConstantAtom(this); } else { return visitor.visitChildren(this); @@ -1299,28 +989,28 @@ TelParser.AtomContext = AtomContext; TelParser.prototype.atom = function() { var localctx = new AtomContext(this, this._ctx, this.state); - this.enterRule(localctx, 10, TelParser.RULE_atom); + this.enterRule(localctx, 4, TelParser.RULE_atom); var _la = 0; // Token type try { - this.state = 79; + this.state = 49; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,9,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,4,this._ctx); switch(la_) { case 1: localctx = new BracketExprContext(this, localctx); this.enterOuterAlt(localctx, 1); - this.state = 69; + this.state = 39; this.match(TelParser.L_BRACKET); - this.state = 70; + this.state = 40; this.expr(0); - this.state = 71; + this.state = 41; this.match(TelParser.R_BRACKET); break; case 2: localctx = new NumberAtomContext(this, localctx); this.enterOuterAlt(localctx, 2); - this.state = 73; + this.state = 43; _la = this._input.LA(1); if(!(_la===TelParser.INT || _la===TelParser.REAL)) { this._errHandler.recoverInline(this); @@ -1332,16 +1022,9 @@ TelParser.prototype.atom = function() { break; case 3: - localctx = new FnExprContext(this, localctx); - this.enterOuterAlt(localctx, 3); - this.state = 74; - this.fn(); - break; - - case 4: localctx = new BooleanAtomContext(this, localctx); - this.enterOuterAlt(localctx, 4); - this.state = 75; + this.enterOuterAlt(localctx, 3); + this.state = 44; _la = this._input.LA(1); if(!(_la===TelParser.TRUE || _la===TelParser.FALSE)) { this._errHandler.recoverInline(this); @@ -1352,25 +1035,32 @@ TelParser.prototype.atom = function() { } break; + case 4: + localctx = new SingleQuotedAtomContext(this, localctx); + this.enterOuterAlt(localctx, 4); + this.state = 45; + this.match(TelParser.SINGLE_QUOTED_ELEMENT); + break; + case 5: - localctx = new TaxonSlugAtomContext(this, localctx); + localctx = new StringConstantAtomContext(this, localctx); this.enterOuterAlt(localctx, 5); - this.state = 76; - this.taxon_expr(); + this.state = 46; + this.match(TelParser.STRING_CONSTANT); break; case 6: - localctx = new SingleQuotedAtomContext(this, localctx); + localctx = new FnExprContext(this, localctx); this.enterOuterAlt(localctx, 6); - this.state = 77; - this.match(TelParser.SINGLE_QUOTED_ELEMENT); + this.state = 47; + this.fn(); break; case 7: - localctx = new StringConstantAtomContext(this, localctx); + localctx = new TaxonSlugAtomContext(this, localctx); this.enterOuterAlt(localctx, 7); - this.state = 78; - this.match(TelParser.STRING_CONSTANT); + this.state = 48; + this.taxon(); break; } @@ -1389,9 +1079,250 @@ TelParser.prototype.atom = function() { }; +function FnContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = TelParser.RULE_fn; + return this; +} + +FnContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +FnContext.prototype.constructor = FnContext; + +FnContext.prototype.WORD = function() { + return this.getToken(TelParser.WORD, 0); +}; + +FnContext.prototype.L_BRACKET = function() { + return this.getToken(TelParser.L_BRACKET, 0); +}; + +FnContext.prototype.R_BRACKET = function() { + return this.getToken(TelParser.R_BRACKET, 0); +}; + +FnContext.prototype.expr = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(ExprContext); + } else { + return this.getTypedRuleContext(ExprContext,i); + } +}; + +FnContext.prototype.FN_PARAMETER_DELIMITER = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(TelParser.FN_PARAMETER_DELIMITER); + } else { + return this.getToken(TelParser.FN_PARAMETER_DELIMITER, i); + } +}; + + +FnContext.prototype.enterRule = function(listener) { + if(listener instanceof TelParserListener ) { + listener.enterFn(this); + } +}; + +FnContext.prototype.exitRule = function(listener) { + if(listener instanceof TelParserListener ) { + listener.exitFn(this); + } +}; + +FnContext.prototype.accept = function(visitor) { + if ( visitor instanceof TelParserVisitor ) { + return visitor.visitFn(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +TelParser.FnContext = FnContext; + +TelParser.prototype.fn = function() { + + var localctx = new FnContext(this, this._ctx, this.state); + this.enterRule(localctx, 6, TelParser.RULE_fn); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 51; + this.match(TelParser.WORD); + this.state = 52; + this.match(TelParser.L_BRACKET); + this.state = 54; + this._errHandler.sync(this); + _la = this._input.LA(1); + if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.INT) | (1 << TelParser.REAL) | (1 << TelParser.TRUE) | (1 << TelParser.FALSE) | (1 << TelParser.NOT) | (1 << TelParser.WORD) | (1 << TelParser.STRING_CONSTANT) | (1 << TelParser.SINGLE_QUOTED_ELEMENT) | (1 << TelParser.L_BRACKET) | (1 << TelParser.OPTIONAL_TAXON_OPERATOR))) !== 0)) { + this.state = 53; + this.expr(0); + } + + this.state = 60; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===TelParser.FN_PARAMETER_DELIMITER) { + this.state = 56; + this.match(TelParser.FN_PARAMETER_DELIMITER); + this.state = 57; + this.expr(0); + this.state = 62; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + this.state = 63; + this.match(TelParser.R_BRACKET); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function TaxonContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = TelParser.RULE_taxon; + return this; +} + +TaxonContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +TaxonContext.prototype.constructor = TaxonContext; + +TaxonContext.prototype.WORD = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(TelParser.WORD); + } else { + return this.getToken(TelParser.WORD, i); + } +}; + + +TaxonContext.prototype.OPTIONAL_TAXON_OPERATOR = function() { + return this.getToken(TelParser.OPTIONAL_TAXON_OPERATOR, 0); +}; + +TaxonContext.prototype.TAXON_NAMESPACE_DELIMITER = function() { + return this.getToken(TelParser.TAXON_NAMESPACE_DELIMITER, 0); +}; + +TaxonContext.prototype.TAXON_TAG_DELIMITER = function() { + return this.getToken(TelParser.TAXON_TAG_DELIMITER, 0); +}; + +TaxonContext.prototype.enterRule = function(listener) { + if(listener instanceof TelParserListener ) { + listener.enterTaxon(this); + } +}; + +TaxonContext.prototype.exitRule = function(listener) { + if(listener instanceof TelParserListener ) { + listener.exitTaxon(this); + } +}; + +TaxonContext.prototype.accept = function(visitor) { + if ( visitor instanceof TelParserVisitor ) { + return visitor.visitTaxon(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +TelParser.TaxonContext = TaxonContext; + +TelParser.prototype.taxon = function() { + + var localctx = new TaxonContext(this, this._ctx, this.state); + this.enterRule(localctx, 8, TelParser.RULE_taxon); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 66; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===TelParser.OPTIONAL_TAXON_OPERATOR) { + this.state = 65; + this.match(TelParser.OPTIONAL_TAXON_OPERATOR); + } + + this.state = 68; + this.match(TelParser.WORD); + this.state = 71; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,8,this._ctx); + if(la_===1) { + this.state = 69; + this.match(TelParser.TAXON_NAMESPACE_DELIMITER); + this.state = 70; + this.match(TelParser.WORD); + + } + this.state = 75; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,9,this._ctx); + if(la_===1) { + this.state = 73; + this.match(TelParser.TAXON_TAG_DELIMITER); + this.state = 74; + this.match(TelParser.WORD); + + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + TelParser.prototype.sempred = function(localctx, ruleIndex, predIndex) { switch(ruleIndex) { - case 4: + case 1: return this.expr_sempred(localctx, predIndex); default: throw "No predicate with index:" + ruleIndex; diff --git a/js-temp/TelParserListener.js b/js-temp/TelParserListener.js new file mode 100644 index 0000000..a1ff30f --- /dev/null +++ b/js-temp/TelParserListener.js @@ -0,0 +1,159 @@ +// Generated from grammar/TelParser.g4 by ANTLR 4.8 +// jshint ignore: start +var antlr4 = require('antlr4/index'); + +// This class defines a complete listener for a parse tree produced by TelParser. +function TelParserListener() { + antlr4.tree.ParseTreeListener.call(this); + return this; +} + +TelParserListener.prototype = Object.create(antlr4.tree.ParseTreeListener.prototype); +TelParserListener.prototype.constructor = TelParserListener; + +// Enter a parse tree produced by TelParser#parse. +TelParserListener.prototype.enterParse = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#parse. +TelParserListener.prototype.exitParse = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#nullTestExpr. +TelParserListener.prototype.enterNullTestExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#nullTestExpr. +TelParserListener.prototype.exitNullTestExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#notExpr. +TelParserListener.prototype.enterNotExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#notExpr. +TelParserListener.prototype.exitNotExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#logicalExpr. +TelParserListener.prototype.enterLogicalExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#logicalExpr. +TelParserListener.prototype.exitLogicalExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#multiplicationExpr. +TelParserListener.prototype.enterMultiplicationExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#multiplicationExpr. +TelParserListener.prototype.exitMultiplicationExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#atomExpr. +TelParserListener.prototype.enterAtomExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#atomExpr. +TelParserListener.prototype.exitAtomExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#additiveExpr. +TelParserListener.prototype.enterAdditiveExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#additiveExpr. +TelParserListener.prototype.exitAdditiveExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#bracketExpr. +TelParserListener.prototype.enterBracketExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#bracketExpr. +TelParserListener.prototype.exitBracketExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#numberAtom. +TelParserListener.prototype.enterNumberAtom = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#numberAtom. +TelParserListener.prototype.exitNumberAtom = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#booleanAtom. +TelParserListener.prototype.enterBooleanAtom = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#booleanAtom. +TelParserListener.prototype.exitBooleanAtom = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#singleQuotedAtom. +TelParserListener.prototype.enterSingleQuotedAtom = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#singleQuotedAtom. +TelParserListener.prototype.exitSingleQuotedAtom = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#stringConstantAtom. +TelParserListener.prototype.enterStringConstantAtom = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#stringConstantAtom. +TelParserListener.prototype.exitStringConstantAtom = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#fnExpr. +TelParserListener.prototype.enterFnExpr = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#fnExpr. +TelParserListener.prototype.exitFnExpr = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#taxonSlugAtom. +TelParserListener.prototype.enterTaxonSlugAtom = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#taxonSlugAtom. +TelParserListener.prototype.exitTaxonSlugAtom = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#fn. +TelParserListener.prototype.enterFn = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#fn. +TelParserListener.prototype.exitFn = function(ctx) { +}; + + +// Enter a parse tree produced by TelParser#taxon. +TelParserListener.prototype.enterTaxon = function(ctx) { +}; + +// Exit a parse tree produced by TelParser#taxon. +TelParserListener.prototype.exitTaxon = function(ctx) { +}; + + + +exports.TelParserListener = TelParserListener; \ No newline at end of file diff --git a/js-temp/TelVisitor.js b/js-temp/TelParserVisitor.js similarity index 57% rename from js-temp/TelVisitor.js rename to js-temp/TelParserVisitor.js index 0e2495a..a32da69 100644 --- a/js-temp/TelVisitor.js +++ b/js-temp/TelParserVisitor.js @@ -1,118 +1,112 @@ -// Generated from grammar/Tel.g4 by ANTLR 4.8 +// Generated from grammar/TelParser.g4 by ANTLR 4.8 // jshint ignore: start var antlr4 = require('antlr4/index'); // This class defines a complete generic visitor for a parse tree produced by TelParser. -function TelVisitor() { +function TelParserVisitor() { antlr4.tree.ParseTreeVisitor.call(this); return this; } -TelVisitor.prototype = Object.create(antlr4.tree.ParseTreeVisitor.prototype); -TelVisitor.prototype.constructor = TelVisitor; - -// Visit a parse tree produced by TelParser#fn. -TelVisitor.prototype.visitFn = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#taxon. -TelVisitor.prototype.visitTaxon = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#taxon_expr. -TelVisitor.prototype.visitTaxon_expr = function(ctx) { - return this.visitChildren(ctx); -}; - +TelParserVisitor.prototype = Object.create(antlr4.tree.ParseTreeVisitor.prototype); +TelParserVisitor.prototype.constructor = TelParserVisitor; // Visit a parse tree produced by TelParser#parse. -TelVisitor.prototype.visitParse = function(ctx) { +TelParserVisitor.prototype.visitParse = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#nullTestExpr. -TelVisitor.prototype.visitNullTestExpr = function(ctx) { +TelParserVisitor.prototype.visitNullTestExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#notExpr. -TelVisitor.prototype.visitNotExpr = function(ctx) { +TelParserVisitor.prototype.visitNotExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#logicalExpr. -TelVisitor.prototype.visitLogicalExpr = function(ctx) { +TelParserVisitor.prototype.visitLogicalExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#multiplicationExpr. -TelVisitor.prototype.visitMultiplicationExpr = function(ctx) { +TelParserVisitor.prototype.visitMultiplicationExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#atomExpr. -TelVisitor.prototype.visitAtomExpr = function(ctx) { +TelParserVisitor.prototype.visitAtomExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#additiveExpr. -TelVisitor.prototype.visitAdditiveExpr = function(ctx) { +TelParserVisitor.prototype.visitAdditiveExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#bracketExpr. -TelVisitor.prototype.visitBracketExpr = function(ctx) { +TelParserVisitor.prototype.visitBracketExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#numberAtom. -TelVisitor.prototype.visitNumberAtom = function(ctx) { +TelParserVisitor.prototype.visitNumberAtom = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#fnExpr. -TelVisitor.prototype.visitFnExpr = function(ctx) { +// Visit a parse tree produced by TelParser#booleanAtom. +TelParserVisitor.prototype.visitBooleanAtom = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#booleanAtom. -TelVisitor.prototype.visitBooleanAtom = function(ctx) { +// Visit a parse tree produced by TelParser#singleQuotedAtom. +TelParserVisitor.prototype.visitSingleQuotedAtom = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by TelParser#stringConstantAtom. +TelParserVisitor.prototype.visitStringConstantAtom = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by TelParser#fnExpr. +TelParserVisitor.prototype.visitFnExpr = function(ctx) { return this.visitChildren(ctx); }; // Visit a parse tree produced by TelParser#taxonSlugAtom. -TelVisitor.prototype.visitTaxonSlugAtom = function(ctx) { +TelParserVisitor.prototype.visitTaxonSlugAtom = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#singleQuotedAtom. -TelVisitor.prototype.visitSingleQuotedAtom = function(ctx) { +// Visit a parse tree produced by TelParser#fn. +TelParserVisitor.prototype.visitFn = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#stringConstantAtom. -TelVisitor.prototype.visitStringConstantAtom = function(ctx) { +// Visit a parse tree produced by TelParser#taxon. +TelParserVisitor.prototype.visitTaxon = function(ctx) { return this.visitChildren(ctx); }; -exports.TelVisitor = TelVisitor; \ No newline at end of file +exports.TelParserVisitor = TelParserVisitor; \ No newline at end of file diff --git a/python/src/tel_grammar/antlr/TelLexer.py b/python/src/tel_grammar/antlr/TelLexer.py index 78f4fd9..c513bfd 100644 --- a/python/src/tel_grammar/antlr/TelLexer.py +++ b/python/src/tel_grammar/antlr/TelLexer.py @@ -1,4 +1,4 @@ -# Generated from grammar/Tel.g4 by ANTLR 4.8 +# Generated from grammar/TelLexer.g4 by ANTLR 4.8 from antlr4 import * from io import StringIO from typing.io import TextIO @@ -8,88 +8,137 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\37") - buf.write("\u00cd\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2 ") + buf.write("\u0135\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") - buf.write("\t\36\3\2\5\2?\n\2\3\2\6\2B\n\2\r\2\16\2C\3\3\5\3G\n\3") - buf.write("\3\3\6\3J\n\3\r\3\16\3K\3\3\3\3\6\3P\n\3\r\3\16\3Q\3\4") - buf.write("\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\\\n\4\3\5\3\5\3\5\3\5") - buf.write("\3\5\3\5\3\5\3\5\3\5\3\5\5\5h\n\5\3\6\3\6\3\6\3\6\3\6") - buf.write("\3\6\5\6p\n\6\3\7\3\7\3\7\3\7\5\7v\n\7\3\b\3\b\3\b\3\b") - buf.write("\3\b\3\b\3\b\3\b\5\b\u0080\n\b\3\t\6\t\u0083\n\t\r\t\16") - buf.write("\t\u0084\3\n\3\n\3\n\3\n\7\n\u008b\n\n\f\n\16\n\u008e") - buf.write("\13\n\3\n\3\n\3\13\3\13\3\13\3\13\7\13\u0096\n\13\f\13") - buf.write("\16\13\u0099\13\13\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16") - buf.write("\3\17\3\17\3\20\3\20\3\21\3\21\3\21\3\22\3\22\3\22\3\23") - buf.write("\3\23\3\23\3\24\3\24\3\24\3\25\3\25\3\26\3\26\3\27\3\27") - buf.write("\3\27\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34") - buf.write("\3\34\3\35\3\35\3\36\6\36\u00c8\n\36\r\36\16\36\u00c9") - buf.write("\3\36\3\36\2\2\37\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23") - buf.write("\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25") - buf.write(")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37\3\2\7") - buf.write("\3\2\62;\7\2\60\60\62;C\\aac|\3\2$$\3\2))\5\2\13\f\17") - buf.write("\17\"\"\2\u00dc\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2") + buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") + buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") + buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64") + buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") + buf.write("\3\2\5\2w\n\2\3\2\6\2z\n\2\r\2\16\2{\3\3\5\3\177\n\3\3") + buf.write("\3\6\3\u0082\n\3\r\3\16\3\u0083\3\3\3\3\6\3\u0088\n\3") + buf.write("\r\3\16\3\u0089\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3") + buf.write("\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b") + buf.write("\3\t\3\t\7\t\u00a5\n\t\f\t\16\t\u00a8\13\t\3\n\3\n\3\n") + buf.write("\3\n\7\n\u00ae\n\n\f\n\16\n\u00b1\13\n\3\n\3\n\3\13\3") + buf.write("\13\3\13\3\13\7\13\u00b9\n\13\f\13\16\13\u00bc\13\13\3") + buf.write("\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20") + buf.write("\3\21\3\21\3\21\3\22\3\22\3\22\3\23\3\23\3\23\3\24\3\24") + buf.write("\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3\27\3\30\3\30\3\30") + buf.write("\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36") + buf.write("\3\36\3\36\3\36\3\36\5\36\u00ef\n\36\3\36\7\36\u00f2\n") + buf.write("\36\f\36\16\36\u00f5\13\36\3\36\3\36\3\37\6\37\u00fa\n") + buf.write("\37\r\37\16\37\u00fb\3\37\3\37\3 \3 \3!\3!\3\"\3\"\3#") + buf.write("\3#\3$\3$\3%\3%\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+") + buf.write("\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62") + buf.write("\3\63\3\63\3\64\3\64\3\65\3\65\3\66\3\66\3\67\3\67\38") + buf.write("\38\39\39\3:\3:\2\2;\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21") + buf.write("\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24") + buf.write("\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37") + buf.write("= ?\2A\2C\2E\2G\2I\2K\2M\2O\2Q\2S\2U\2W\2Y\2[\2]\2_\2") + buf.write("a\2c\2e\2g\2i\2k\2m\2o\2q\2s\2\3\2#\5\2C\\aac|\b\2&&\60") + buf.write("\60\62;C\\aac|\3\2$$\3\2))\4\2\f\f\17\17\5\2\13\f\17\17") + buf.write("\"\"\3\2\62;\4\2CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4") + buf.write("\2HHhh\4\2IIii\4\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNn") + buf.write("n\4\2OOoo\4\2PPpp\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2") + buf.write("UUuu\4\2VVvv\4\2WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4") + buf.write("\2\\\\||\2\u0127\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2") buf.write("\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21") buf.write("\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3") buf.write("\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2") buf.write("\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2") buf.write("\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2") - buf.write("\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\3>\3\2") - buf.write("\2\2\5F\3\2\2\2\7[\3\2\2\2\tg\3\2\2\2\13o\3\2\2\2\ru\3") - buf.write("\2\2\2\17\177\3\2\2\2\21\u0082\3\2\2\2\23\u0086\3\2\2") - buf.write("\2\25\u0091\3\2\2\2\27\u009c\3\2\2\2\31\u009e\3\2\2\2") - buf.write("\33\u00a0\3\2\2\2\35\u00a2\3\2\2\2\37\u00a4\3\2\2\2!\u00a6") - buf.write("\3\2\2\2#\u00a9\3\2\2\2%\u00ac\3\2\2\2\'\u00af\3\2\2\2") - buf.write(")\u00b2\3\2\2\2+\u00b4\3\2\2\2-\u00b6\3\2\2\2/\u00b9\3") - buf.write("\2\2\2\61\u00bc\3\2\2\2\63\u00be\3\2\2\2\65\u00c0\3\2") - buf.write("\2\2\67\u00c2\3\2\2\29\u00c4\3\2\2\2;\u00c7\3\2\2\2=?") - buf.write("\7/\2\2>=\3\2\2\2>?\3\2\2\2?A\3\2\2\2@B\t\2\2\2A@\3\2") - buf.write("\2\2BC\3\2\2\2CA\3\2\2\2CD\3\2\2\2D\4\3\2\2\2EG\7/\2\2") - buf.write("FE\3\2\2\2FG\3\2\2\2GI\3\2\2\2HJ\t\2\2\2IH\3\2\2\2JK\3") - buf.write("\2\2\2KI\3\2\2\2KL\3\2\2\2LM\3\2\2\2MO\7\60\2\2NP\t\2") - buf.write("\2\2ON\3\2\2\2PQ\3\2\2\2QO\3\2\2\2QR\3\2\2\2R\6\3\2\2") - buf.write("\2ST\7v\2\2TU\7t\2\2UV\7w\2\2V\\\7g\2\2WX\7V\2\2XY\7T") - buf.write("\2\2YZ\7W\2\2Z\\\7G\2\2[S\3\2\2\2[W\3\2\2\2\\\b\3\2\2") - buf.write("\2]^\7h\2\2^_\7c\2\2_`\7n\2\2`a\7u\2\2ah\7g\2\2bc\7H\2") - buf.write("\2cd\7C\2\2de\7N\2\2ef\7U\2\2fh\7G\2\2g]\3\2\2\2gb\3\2") - buf.write("\2\2h\n\3\2\2\2ij\7p\2\2jk\7q\2\2kp\7v\2\2lm\7P\2\2mn") - buf.write("\7Q\2\2np\7V\2\2oi\3\2\2\2ol\3\2\2\2p\f\3\2\2\2qr\7k\2") - buf.write("\2rv\7u\2\2st\7K\2\2tv\7U\2\2uq\3\2\2\2us\3\2\2\2v\16") - buf.write("\3\2\2\2wx\7p\2\2xy\7w\2\2yz\7n\2\2z\u0080\7n\2\2{|\7") - buf.write("P\2\2|}\7W\2\2}~\7N\2\2~\u0080\7N\2\2\177w\3\2\2\2\177") - buf.write("{\3\2\2\2\u0080\20\3\2\2\2\u0081\u0083\t\3\2\2\u0082\u0081") - buf.write("\3\2\2\2\u0083\u0084\3\2\2\2\u0084\u0082\3\2\2\2\u0084") - buf.write("\u0085\3\2\2\2\u0085\22\3\2\2\2\u0086\u008c\7$\2\2\u0087") - buf.write("\u0088\7^\2\2\u0088\u008b\7$\2\2\u0089\u008b\n\4\2\2\u008a") - buf.write("\u0087\3\2\2\2\u008a\u0089\3\2\2\2\u008b\u008e\3\2\2\2") - buf.write("\u008c\u008a\3\2\2\2\u008c\u008d\3\2\2\2\u008d\u008f\3") - buf.write("\2\2\2\u008e\u008c\3\2\2\2\u008f\u0090\7$\2\2\u0090\24") - buf.write("\3\2\2\2\u0091\u0097\7)\2\2\u0092\u0093\7^\2\2\u0093\u0096") - buf.write("\7)\2\2\u0094\u0096\n\5\2\2\u0095\u0092\3\2\2\2\u0095") - buf.write("\u0094\3\2\2\2\u0096\u0099\3\2\2\2\u0097\u0095\3\2\2\2") - buf.write("\u0097\u0098\3\2\2\2\u0098\u009a\3\2\2\2\u0099\u0097\3") - buf.write("\2\2\2\u009a\u009b\7)\2\2\u009b\26\3\2\2\2\u009c\u009d") - buf.write("\7*\2\2\u009d\30\3\2\2\2\u009e\u009f\7+\2\2\u009f\32\3") - buf.write("\2\2\2\u00a0\u00a1\7~\2\2\u00a1\34\3\2\2\2\u00a2\u00a3") - buf.write("\7<\2\2\u00a3\36\3\2\2\2\u00a4\u00a5\7.\2\2\u00a5 \3\2") - buf.write("\2\2\u00a6\u00a7\7~\2\2\u00a7\u00a8\7~\2\2\u00a8\"\3\2") - buf.write("\2\2\u00a9\u00aa\7(\2\2\u00aa\u00ab\7(\2\2\u00ab$\3\2") - buf.write("\2\2\u00ac\u00ad\7?\2\2\u00ad\u00ae\7?\2\2\u00ae&\3\2") - buf.write("\2\2\u00af\u00b0\7#\2\2\u00b0\u00b1\7?\2\2\u00b1(\3\2") - buf.write("\2\2\u00b2\u00b3\7@\2\2\u00b3*\3\2\2\2\u00b4\u00b5\7>") - buf.write("\2\2\u00b5,\3\2\2\2\u00b6\u00b7\7@\2\2\u00b7\u00b8\7?") - buf.write("\2\2\u00b8.\3\2\2\2\u00b9\u00ba\7>\2\2\u00ba\u00bb\7?") - buf.write("\2\2\u00bb\60\3\2\2\2\u00bc\u00bd\7-\2\2\u00bd\62\3\2") - buf.write("\2\2\u00be\u00bf\7/\2\2\u00bf\64\3\2\2\2\u00c0\u00c1\7") - buf.write(",\2\2\u00c1\66\3\2\2\2\u00c2\u00c3\7\61\2\2\u00c38\3\2") - buf.write("\2\2\u00c4\u00c5\7A\2\2\u00c5:\3\2\2\2\u00c6\u00c8\t\6") - buf.write("\2\2\u00c7\u00c6\3\2\2\2\u00c8\u00c9\3\2\2\2\u00c9\u00c7") - buf.write("\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb") - buf.write("\u00cc\b\36\2\2\u00cc<\3\2\2\2\23\2>CFKQ[gou\177\u0084") - buf.write("\u008a\u008c\u0095\u0097\u00c9\3\b\2\2") + buf.write("\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2") + buf.write("\2\2\3v\3\2\2\2\5~\3\2\2\2\7\u008b\3\2\2\2\t\u0090\3\2") + buf.write("\2\2\13\u0096\3\2\2\2\r\u009a\3\2\2\2\17\u009d\3\2\2\2") + buf.write("\21\u00a2\3\2\2\2\23\u00a9\3\2\2\2\25\u00b4\3\2\2\2\27") + buf.write("\u00bf\3\2\2\2\31\u00c1\3\2\2\2\33\u00c3\3\2\2\2\35\u00c5") + buf.write("\3\2\2\2\37\u00c7\3\2\2\2!\u00c9\3\2\2\2#\u00cc\3\2\2") + buf.write("\2%\u00cf\3\2\2\2\'\u00d2\3\2\2\2)\u00d5\3\2\2\2+\u00d7") + buf.write("\3\2\2\2-\u00d9\3\2\2\2/\u00dc\3\2\2\2\61\u00df\3\2\2") + buf.write("\2\63\u00e1\3\2\2\2\65\u00e3\3\2\2\2\67\u00e5\3\2\2\2") + buf.write("9\u00e7\3\2\2\2;\u00ee\3\2\2\2=\u00f9\3\2\2\2?\u00ff\3") + buf.write("\2\2\2A\u0101\3\2\2\2C\u0103\3\2\2\2E\u0105\3\2\2\2G\u0107") + buf.write("\3\2\2\2I\u0109\3\2\2\2K\u010b\3\2\2\2M\u010d\3\2\2\2") + buf.write("O\u010f\3\2\2\2Q\u0111\3\2\2\2S\u0113\3\2\2\2U\u0115\3") + buf.write("\2\2\2W\u0117\3\2\2\2Y\u0119\3\2\2\2[\u011b\3\2\2\2]\u011d") + buf.write("\3\2\2\2_\u011f\3\2\2\2a\u0121\3\2\2\2c\u0123\3\2\2\2") + buf.write("e\u0125\3\2\2\2g\u0127\3\2\2\2i\u0129\3\2\2\2k\u012b\3") + buf.write("\2\2\2m\u012d\3\2\2\2o\u012f\3\2\2\2q\u0131\3\2\2\2s\u0133") + buf.write("\3\2\2\2uw\7/\2\2vu\3\2\2\2vw\3\2\2\2wy\3\2\2\2xz\5? ") + buf.write("\2yx\3\2\2\2z{\3\2\2\2{y\3\2\2\2{|\3\2\2\2|\4\3\2\2\2") + buf.write("}\177\7/\2\2~}\3\2\2\2~\177\3\2\2\2\177\u0081\3\2\2\2") + buf.write("\u0080\u0082\5? \2\u0081\u0080\3\2\2\2\u0082\u0083\3\2") + buf.write("\2\2\u0083\u0081\3\2\2\2\u0083\u0084\3\2\2\2\u0084\u0085") + buf.write("\3\2\2\2\u0085\u0087\7\60\2\2\u0086\u0088\5? \2\u0087") + buf.write("\u0086\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u0087\3\2\2\2") + buf.write("\u0089\u008a\3\2\2\2\u008a\6\3\2\2\2\u008b\u008c\5g\64") + buf.write("\2\u008c\u008d\5c\62\2\u008d\u008e\5i\65\2\u008e\u008f") + buf.write("\5I%\2\u008f\b\3\2\2\2\u0090\u0091\5K&\2\u0091\u0092\5") + buf.write("A!\2\u0092\u0093\5W,\2\u0093\u0094\5e\63\2\u0094\u0095") + buf.write("\5I%\2\u0095\n\3\2\2\2\u0096\u0097\5[.\2\u0097\u0098\5") + buf.write("]/\2\u0098\u0099\5g\64\2\u0099\f\3\2\2\2\u009a\u009b\5") + buf.write("Q)\2\u009b\u009c\5e\63\2\u009c\16\3\2\2\2\u009d\u009e") + buf.write("\5[.\2\u009e\u009f\5i\65\2\u009f\u00a0\5W,\2\u00a0\u00a1") + buf.write("\5W,\2\u00a1\20\3\2\2\2\u00a2\u00a6\t\2\2\2\u00a3\u00a5") + buf.write("\t\3\2\2\u00a4\u00a3\3\2\2\2\u00a5\u00a8\3\2\2\2\u00a6") + buf.write("\u00a4\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\22\3\2\2\2\u00a8") + buf.write("\u00a6\3\2\2\2\u00a9\u00af\7$\2\2\u00aa\u00ab\7^\2\2\u00ab") + buf.write("\u00ae\7$\2\2\u00ac\u00ae\n\4\2\2\u00ad\u00aa\3\2\2\2") + buf.write("\u00ad\u00ac\3\2\2\2\u00ae\u00b1\3\2\2\2\u00af\u00ad\3") + buf.write("\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b2\3\2\2\2\u00b1\u00af") + buf.write("\3\2\2\2\u00b2\u00b3\7$\2\2\u00b3\24\3\2\2\2\u00b4\u00ba") + buf.write("\7)\2\2\u00b5\u00b6\7^\2\2\u00b6\u00b9\7)\2\2\u00b7\u00b9") + buf.write("\n\5\2\2\u00b8\u00b5\3\2\2\2\u00b8\u00b7\3\2\2\2\u00b9") + buf.write("\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00ba\u00bb\3\2\2\2") + buf.write("\u00bb\u00bd\3\2\2\2\u00bc\u00ba\3\2\2\2\u00bd\u00be\7") + buf.write(")\2\2\u00be\26\3\2\2\2\u00bf\u00c0\7*\2\2\u00c0\30\3\2") + buf.write("\2\2\u00c1\u00c2\7+\2\2\u00c2\32\3\2\2\2\u00c3\u00c4\7") + buf.write("~\2\2\u00c4\34\3\2\2\2\u00c5\u00c6\7<\2\2\u00c6\36\3\2") + buf.write("\2\2\u00c7\u00c8\7.\2\2\u00c8 \3\2\2\2\u00c9\u00ca\7~") + buf.write("\2\2\u00ca\u00cb\7~\2\2\u00cb\"\3\2\2\2\u00cc\u00cd\7") + buf.write("(\2\2\u00cd\u00ce\7(\2\2\u00ce$\3\2\2\2\u00cf\u00d0\7") + buf.write("?\2\2\u00d0\u00d1\7?\2\2\u00d1&\3\2\2\2\u00d2\u00d3\7") + buf.write("#\2\2\u00d3\u00d4\7?\2\2\u00d4(\3\2\2\2\u00d5\u00d6\7") + buf.write("@\2\2\u00d6*\3\2\2\2\u00d7\u00d8\7>\2\2\u00d8,\3\2\2\2") + buf.write("\u00d9\u00da\7@\2\2\u00da\u00db\7?\2\2\u00db.\3\2\2\2") + buf.write("\u00dc\u00dd\7>\2\2\u00dd\u00de\7?\2\2\u00de\60\3\2\2") + buf.write("\2\u00df\u00e0\7-\2\2\u00e0\62\3\2\2\2\u00e1\u00e2\7/") + buf.write("\2\2\u00e2\64\3\2\2\2\u00e3\u00e4\7,\2\2\u00e4\66\3\2") + buf.write("\2\2\u00e5\u00e6\7\61\2\2\u00e68\3\2\2\2\u00e7\u00e8\7") + buf.write("A\2\2\u00e8:\3\2\2\2\u00e9\u00ea\7/\2\2\u00ea\u00ef\7") + buf.write("/\2\2\u00eb\u00ec\7\61\2\2\u00ec\u00ef\7\61\2\2\u00ed") + buf.write("\u00ef\7%\2\2\u00ee\u00e9\3\2\2\2\u00ee\u00eb\3\2\2\2") + buf.write("\u00ee\u00ed\3\2\2\2\u00ef\u00f3\3\2\2\2\u00f0\u00f2\n") + buf.write("\6\2\2\u00f1\u00f0\3\2\2\2\u00f2\u00f5\3\2\2\2\u00f3\u00f1") + buf.write("\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4\u00f6\3\2\2\2\u00f5") + buf.write("\u00f3\3\2\2\2\u00f6\u00f7\b\36\2\2\u00f7<\3\2\2\2\u00f8") + buf.write("\u00fa\t\7\2\2\u00f9\u00f8\3\2\2\2\u00fa\u00fb\3\2\2\2") + buf.write("\u00fb\u00f9\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\u00fd\3") + buf.write("\2\2\2\u00fd\u00fe\b\37\2\2\u00fe>\3\2\2\2\u00ff\u0100") + buf.write("\t\b\2\2\u0100@\3\2\2\2\u0101\u0102\t\t\2\2\u0102B\3\2") + buf.write("\2\2\u0103\u0104\t\n\2\2\u0104D\3\2\2\2\u0105\u0106\t") + buf.write("\13\2\2\u0106F\3\2\2\2\u0107\u0108\t\f\2\2\u0108H\3\2") + buf.write("\2\2\u0109\u010a\t\r\2\2\u010aJ\3\2\2\2\u010b\u010c\t") + buf.write("\16\2\2\u010cL\3\2\2\2\u010d\u010e\t\17\2\2\u010eN\3\2") + buf.write("\2\2\u010f\u0110\t\20\2\2\u0110P\3\2\2\2\u0111\u0112\t") + buf.write("\21\2\2\u0112R\3\2\2\2\u0113\u0114\t\22\2\2\u0114T\3\2") + buf.write("\2\2\u0115\u0116\t\23\2\2\u0116V\3\2\2\2\u0117\u0118\t") + buf.write("\24\2\2\u0118X\3\2\2\2\u0119\u011a\t\25\2\2\u011aZ\3\2") + buf.write("\2\2\u011b\u011c\t\26\2\2\u011c\\\3\2\2\2\u011d\u011e") + buf.write("\t\27\2\2\u011e^\3\2\2\2\u011f\u0120\t\30\2\2\u0120`\3") + buf.write("\2\2\2\u0121\u0122\t\31\2\2\u0122b\3\2\2\2\u0123\u0124") + buf.write("\t\32\2\2\u0124d\3\2\2\2\u0125\u0126\t\33\2\2\u0126f\3") + buf.write("\2\2\2\u0127\u0128\t\34\2\2\u0128h\3\2\2\2\u0129\u012a") + buf.write("\t\35\2\2\u012aj\3\2\2\2\u012b\u012c\t\36\2\2\u012cl\3") + buf.write("\2\2\2\u012d\u012e\t\37\2\2\u012en\3\2\2\2\u012f\u0130") + buf.write("\t \2\2\u0130p\3\2\2\2\u0131\u0132\t!\2\2\u0132r\3\2\2") + buf.write("\2\u0133\u0134\t\"\2\2\u0134t\3\2\2\2\20\2v{~\u0083\u0089") + buf.write("\u00a6\u00ad\u00af\u00b8\u00ba\u00ee\u00f3\u00fb\3\2\3") + buf.write("\2") return buf.getvalue() @@ -127,7 +176,8 @@ class TelLexer(Lexer): MULT = 26 DIV = 27 OPTIONAL_TAXON_OPERATOR = 28 - WS = 29 + SINGLE_LINE_COMMENT = 29 + WS = 30 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] @@ -142,16 +192,20 @@ class TelLexer(Lexer): "STRING_CONSTANT", "SINGLE_QUOTED_ELEMENT", "L_BRACKET", "R_BRACKET", "TAXON_NAMESPACE_DELIMITER", "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", "OR", "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", - "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", "WS" ] + "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", "SINGLE_LINE_COMMENT", + "WS" ] ruleNames = [ "INT", "REAL", "TRUE", "FALSE", "NOT", "KW_IS", "KW_NULL", "WORD", "STRING_CONSTANT", "SINGLE_QUOTED_ELEMENT", "L_BRACKET", "R_BRACKET", "TAXON_NAMESPACE_DELIMITER", "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", "OR", "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", "MINUS", "MULT", "DIV", - "OPTIONAL_TAXON_OPERATOR", "WS" ] + "OPTIONAL_TAXON_OPERATOR", "SINGLE_LINE_COMMENT", "WS", + "DIGIT", "A", "B", "C", "D", "E", "F", "G", "H", "I", + "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", + "U", "V", "W", "X", "Y", "Z" ] - grammarFileName = "Tel.g4" + grammarFileName = "TelLexer.g4" def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) diff --git a/python/src/tel_grammar/antlr/TelParser.py b/python/src/tel_grammar/antlr/TelParser.py index 2a5230a..855e9b3 100644 --- a/python/src/tel_grammar/antlr/TelParser.py +++ b/python/src/tel_grammar/antlr/TelParser.py @@ -1,4 +1,4 @@ -# Generated from grammar/Tel.g4 by ANTLR 4.8 +# Generated from grammar/TelParser.g4 by ANTLR 4.8 # encoding: utf-8 from antlr4 import * from io import StringIO @@ -11,42 +11,41 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\37") - buf.write("T\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\3\2") - buf.write("\3\2\3\2\5\2\22\n\2\3\2\3\2\7\2\26\n\2\f\2\16\2\31\13") - buf.write("\2\3\2\3\2\3\3\3\3\3\3\5\3 \n\3\3\3\3\3\5\3$\n\3\3\4\5") - buf.write("\4\'\n\4\3\4\3\4\3\5\3\5\3\5\3\6\3\6\3\6\3\6\5\6\62\n") - buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6") - buf.write("@\n\6\3\6\7\6C\n\6\f\6\16\6F\13\6\3\7\3\7\3\7\3\7\3\7") - buf.write("\3\7\3\7\3\7\3\7\3\7\5\7R\n\7\3\7\2\3\n\b\2\4\6\b\n\f") - buf.write("\2\7\3\2\34\35\3\2\32\33\3\2\22\31\3\2\3\4\3\2\5\6\2^") - buf.write("\2\16\3\2\2\2\4\34\3\2\2\2\6&\3\2\2\2\b*\3\2\2\2\n\61") - buf.write("\3\2\2\2\fQ\3\2\2\2\16\17\7\n\2\2\17\21\7\r\2\2\20\22") - buf.write("\5\n\6\2\21\20\3\2\2\2\21\22\3\2\2\2\22\27\3\2\2\2\23") - buf.write("\24\7\21\2\2\24\26\5\n\6\2\25\23\3\2\2\2\26\31\3\2\2\2") - buf.write("\27\25\3\2\2\2\27\30\3\2\2\2\30\32\3\2\2\2\31\27\3\2\2") - buf.write("\2\32\33\7\16\2\2\33\3\3\2\2\2\34\37\7\n\2\2\35\36\7\17") - buf.write("\2\2\36 \7\n\2\2\37\35\3\2\2\2\37 \3\2\2\2 #\3\2\2\2!") - buf.write("\"\7\20\2\2\"$\7\n\2\2#!\3\2\2\2#$\3\2\2\2$\5\3\2\2\2") - buf.write("%\'\7\36\2\2&%\3\2\2\2&\'\3\2\2\2\'(\3\2\2\2()\5\4\3\2") - buf.write(")\7\3\2\2\2*+\5\n\6\2+,\7\2\2\3,\t\3\2\2\2-.\b\6\1\2.") - buf.write("/\7\7\2\2/\62\5\n\6\b\60\62\5\f\7\2\61-\3\2\2\2\61\60") - buf.write("\3\2\2\2\62D\3\2\2\2\63\64\f\7\2\2\64\65\t\2\2\2\65C\5") - buf.write("\n\6\b\66\67\f\6\2\2\678\t\3\2\28C\5\n\6\79:\f\5\2\2:") - buf.write(";\t\4\2\2;C\5\n\6\6<=\f\4\2\2=?\7\b\2\2>@\7\7\2\2?>\3") - buf.write("\2\2\2?@\3\2\2\2@A\3\2\2\2AC\7\t\2\2B\63\3\2\2\2B\66\3") - buf.write("\2\2\2B9\3\2\2\2B<\3\2\2\2CF\3\2\2\2DB\3\2\2\2DE\3\2\2") - buf.write("\2E\13\3\2\2\2FD\3\2\2\2GH\7\r\2\2HI\5\n\6\2IJ\7\16\2") - buf.write("\2JR\3\2\2\2KR\t\5\2\2LR\5\2\2\2MR\t\6\2\2NR\5\6\4\2O") - buf.write("R\7\f\2\2PR\7\13\2\2QG\3\2\2\2QK\3\2\2\2QL\3\2\2\2QM\3") - buf.write("\2\2\2QN\3\2\2\2QO\3\2\2\2QP\3\2\2\2R\r\3\2\2\2\f\21\27") - buf.write("\37#&\61?BDQ") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3 ") + buf.write("P\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\3\2\3\2\3\2") + buf.write("\3\3\3\3\3\3\3\3\5\3\24\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3") + buf.write("\3\3\3\3\3\3\3\3\3\3\3\5\3\"\n\3\3\3\7\3%\n\3\f\3\16\3") + buf.write("(\13\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\64") + buf.write("\n\4\3\5\3\5\3\5\5\59\n\5\3\5\3\5\7\5=\n\5\f\5\16\5@\13") + buf.write("\5\3\5\3\5\3\6\5\6E\n\6\3\6\3\6\3\6\5\6J\n\6\3\6\3\6\5") + buf.write("\6N\n\6\3\6\2\3\4\7\2\4\6\b\n\2\7\3\2\34\35\3\2\32\33") + buf.write("\3\2\22\31\3\2\3\4\3\2\5\6\2[\2\f\3\2\2\2\4\23\3\2\2\2") + buf.write("\6\63\3\2\2\2\b\65\3\2\2\2\nD\3\2\2\2\f\r\5\4\3\2\r\16") + buf.write("\7\2\2\3\16\3\3\2\2\2\17\20\b\3\1\2\20\21\7\7\2\2\21\24") + buf.write("\5\4\3\b\22\24\5\6\4\2\23\17\3\2\2\2\23\22\3\2\2\2\24") + buf.write("&\3\2\2\2\25\26\f\7\2\2\26\27\t\2\2\2\27%\5\4\3\b\30\31") + buf.write("\f\6\2\2\31\32\t\3\2\2\32%\5\4\3\7\33\34\f\5\2\2\34\35") + buf.write("\t\4\2\2\35%\5\4\3\6\36\37\f\4\2\2\37!\7\b\2\2 \"\7\7") + buf.write("\2\2! \3\2\2\2!\"\3\2\2\2\"#\3\2\2\2#%\7\t\2\2$\25\3\2") + buf.write("\2\2$\30\3\2\2\2$\33\3\2\2\2$\36\3\2\2\2%(\3\2\2\2&$\3") + buf.write("\2\2\2&\'\3\2\2\2\'\5\3\2\2\2(&\3\2\2\2)*\7\r\2\2*+\5") + buf.write("\4\3\2+,\7\16\2\2,\64\3\2\2\2-\64\t\5\2\2.\64\t\6\2\2") + buf.write("/\64\7\f\2\2\60\64\7\13\2\2\61\64\5\b\5\2\62\64\5\n\6") + buf.write("\2\63)\3\2\2\2\63-\3\2\2\2\63.\3\2\2\2\63/\3\2\2\2\63") + buf.write("\60\3\2\2\2\63\61\3\2\2\2\63\62\3\2\2\2\64\7\3\2\2\2\65") + buf.write("\66\7\n\2\2\668\7\r\2\2\679\5\4\3\28\67\3\2\2\289\3\2") + buf.write("\2\29>\3\2\2\2:;\7\21\2\2;=\5\4\3\2<:\3\2\2\2=@\3\2\2") + buf.write("\2><\3\2\2\2>?\3\2\2\2?A\3\2\2\2@>\3\2\2\2AB\7\16\2\2") + buf.write("B\t\3\2\2\2CE\7\36\2\2DC\3\2\2\2DE\3\2\2\2EF\3\2\2\2F") + buf.write("I\7\n\2\2GH\7\17\2\2HJ\7\n\2\2IG\3\2\2\2IJ\3\2\2\2JM\3") + buf.write("\2\2\2KL\7\20\2\2LN\7\n\2\2MK\3\2\2\2MN\3\2\2\2N\13\3") + buf.write("\2\2\2\f\23!$&\638>DIM") return buf.getvalue() class TelParser ( Parser ): - grammarFileName = "Tel.g4" + grammarFileName = "TelParser.g4" atn = ATNDeserializer().deserialize(serializedATN()) @@ -67,16 +66,15 @@ class TelParser ( Parser ): "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", "OR", "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", - "WS" ] + "SINGLE_LINE_COMMENT", "WS" ] - RULE_fn = 0 - RULE_taxon = 1 - RULE_taxon_expr = 2 - RULE_parse = 3 - RULE_expr = 4 - RULE_atom = 5 + RULE_parse = 0 + RULE_expr = 1 + RULE_atom = 2 + RULE_fn = 3 + RULE_taxon = 4 - ruleNames = [ "fn", "taxon", "taxon_expr", "parse", "expr", "atom" ] + ruleNames = [ "parse", "expr", "atom", "fn", "taxon" ] EOF = Token.EOF INT=1 @@ -107,7 +105,8 @@ class TelParser ( Parser ): MULT=26 DIV=27 OPTIONAL_TAXON_OPERATOR=28 - WS=29 + SINGLE_LINE_COMMENT=29 + WS=30 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -118,230 +117,6 @@ def __init__(self, input:TokenStream, output:TextIO = sys.stdout): - class FnContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def WORD(self): - return self.getToken(TelParser.WORD, 0) - - def L_BRACKET(self): - return self.getToken(TelParser.L_BRACKET, 0) - - def R_BRACKET(self): - return self.getToken(TelParser.R_BRACKET, 0) - - def expr(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(TelParser.ExprContext) - else: - return self.getTypedRuleContext(TelParser.ExprContext,i) - - - def FN_PARAMETER_DELIMITER(self, i:int=None): - if i is None: - return self.getTokens(TelParser.FN_PARAMETER_DELIMITER) - else: - return self.getToken(TelParser.FN_PARAMETER_DELIMITER, i) - - def getRuleIndex(self): - return TelParser.RULE_fn - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterFn" ): - listener.enterFn(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitFn" ): - listener.exitFn(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitFn" ): - return visitor.visitFn(self) - else: - return visitor.visitChildren(self) - - - - - def fn(self): - - localctx = TelParser.FnContext(self, self._ctx, self.state) - self.enterRule(localctx, 0, self.RULE_fn) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 12 - self.match(TelParser.WORD) - self.state = 13 - self.match(TelParser.L_BRACKET) - self.state = 15 - self._errHandler.sync(self) - _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TelParser.INT) | (1 << TelParser.REAL) | (1 << TelParser.TRUE) | (1 << TelParser.FALSE) | (1 << TelParser.NOT) | (1 << TelParser.WORD) | (1 << TelParser.STRING_CONSTANT) | (1 << TelParser.SINGLE_QUOTED_ELEMENT) | (1 << TelParser.L_BRACKET) | (1 << TelParser.OPTIONAL_TAXON_OPERATOR))) != 0): - self.state = 14 - self.expr(0) - - - self.state = 21 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==TelParser.FN_PARAMETER_DELIMITER: - self.state = 17 - self.match(TelParser.FN_PARAMETER_DELIMITER) - self.state = 18 - self.expr(0) - self.state = 23 - self._errHandler.sync(self) - _la = self._input.LA(1) - - self.state = 24 - self.match(TelParser.R_BRACKET) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class TaxonContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def WORD(self, i:int=None): - if i is None: - return self.getTokens(TelParser.WORD) - else: - return self.getToken(TelParser.WORD, i) - - def TAXON_NAMESPACE_DELIMITER(self): - return self.getToken(TelParser.TAXON_NAMESPACE_DELIMITER, 0) - - def TAXON_TAG_DELIMITER(self): - return self.getToken(TelParser.TAXON_TAG_DELIMITER, 0) - - def getRuleIndex(self): - return TelParser.RULE_taxon - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterTaxon" ): - listener.enterTaxon(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitTaxon" ): - listener.exitTaxon(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitTaxon" ): - return visitor.visitTaxon(self) - else: - return visitor.visitChildren(self) - - - - - def taxon(self): - - localctx = TelParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 2, self.RULE_taxon) - try: - self.enterOuterAlt(localctx, 1) - self.state = 26 - self.match(TelParser.WORD) - self.state = 29 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,2,self._ctx) - if la_ == 1: - self.state = 27 - self.match(TelParser.TAXON_NAMESPACE_DELIMITER) - self.state = 28 - self.match(TelParser.WORD) - - - self.state = 33 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,3,self._ctx) - if la_ == 1: - self.state = 31 - self.match(TelParser.TAXON_TAG_DELIMITER) - self.state = 32 - self.match(TelParser.WORD) - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class Taxon_exprContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def taxon(self): - return self.getTypedRuleContext(TelParser.TaxonContext,0) - - - def OPTIONAL_TAXON_OPERATOR(self): - return self.getToken(TelParser.OPTIONAL_TAXON_OPERATOR, 0) - - def getRuleIndex(self): - return TelParser.RULE_taxon_expr - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterTaxon_expr" ): - listener.enterTaxon_expr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitTaxon_expr" ): - listener.exitTaxon_expr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitTaxon_expr" ): - return visitor.visitTaxon_expr(self) - else: - return visitor.visitChildren(self) - - - - - def taxon_expr(self): - - localctx = TelParser.Taxon_exprContext(self, self._ctx, self.state) - self.enterRule(localctx, 4, self.RULE_taxon_expr) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 36 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==TelParser.OPTIONAL_TAXON_OPERATOR: - self.state = 35 - self.match(TelParser.OPTIONAL_TAXON_OPERATOR) - - - self.state = 38 - self.taxon() - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - class ParseContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): @@ -378,12 +153,12 @@ def accept(self, visitor:ParseTreeVisitor): def parse(self): localctx = TelParser.ParseContext(self, self._ctx, self.state) - self.enterRule(localctx, 6, self.RULE_parse) + self.enterRule(localctx, 0, self.RULE_parse) try: self.enterOuterAlt(localctx, 1) - self.state = 40 + self.state = 10 self.expr(0) - self.state = 41 + self.state = 11 self.match(TelParser.EOF) except RecognitionException as re: localctx.exception = re @@ -609,12 +384,12 @@ def expr(self, _p:int=0): _parentState = self.state localctx = TelParser.ExprContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 8 - self.enterRecursionRule(localctx, 8, self.RULE_expr, _p) + _startState = 2 + self.enterRecursionRule(localctx, 2, self.RULE_expr, _p) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 47 + self.state = 17 self._errHandler.sync(self) token = self._input.LA(1) if token in [TelParser.NOT]: @@ -622,41 +397,41 @@ def expr(self, _p:int=0): self._ctx = localctx _prevctx = localctx - self.state = 44 + self.state = 14 self.match(TelParser.NOT) - self.state = 45 + self.state = 15 self.expr(6) pass elif token in [TelParser.INT, TelParser.REAL, TelParser.TRUE, TelParser.FALSE, TelParser.WORD, TelParser.STRING_CONSTANT, TelParser.SINGLE_QUOTED_ELEMENT, TelParser.L_BRACKET, TelParser.OPTIONAL_TAXON_OPERATOR]: localctx = TelParser.AtomExprContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 46 + self.state = 16 self.atom() pass else: raise NoViableAltException(self) self._ctx.stop = self._input.LT(-1) - self.state = 66 + self.state = 36 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,8,self._ctx) + _alt = self._interp.adaptivePredict(self._input,3,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 64 + self.state = 34 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,7,self._ctx) + la_ = self._interp.adaptivePredict(self._input,2,self._ctx) if la_ == 1: localctx = TelParser.MultiplicationExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 49 + self.state = 19 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 50 + self.state = 20 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not(_la==TelParser.MULT or _la==TelParser.DIV): @@ -664,18 +439,18 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 51 + self.state = 21 self.expr(6) pass elif la_ == 2: localctx = TelParser.AdditiveExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 52 + self.state = 22 if not self.precpred(self._ctx, 4): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") - self.state = 53 + self.state = 23 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not(_la==TelParser.PLUS or _la==TelParser.MINUS): @@ -683,18 +458,18 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 54 + self.state = 24 self.expr(5) pass elif la_ == 3: localctx = TelParser.LogicalExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 55 + self.state = 25 if not self.precpred(self._ctx, 3): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 56 + self.state = 26 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TelParser.OR) | (1 << TelParser.AND) | (1 << TelParser.EQ) | (1 << TelParser.NEQ) | (1 << TelParser.GT) | (1 << TelParser.LT) | (1 << TelParser.GTEQ) | (1 << TelParser.LTEQ))) != 0)): @@ -702,35 +477,35 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 57 + self.state = 27 self.expr(4) pass elif la_ == 4: localctx = TelParser.NullTestExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 58 + self.state = 28 if not self.precpred(self._ctx, 2): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") - self.state = 59 + self.state = 29 self.match(TelParser.KW_IS) - self.state = 61 + self.state = 31 self._errHandler.sync(self) _la = self._input.LA(1) if _la==TelParser.NOT: - self.state = 60 + self.state = 30 self.match(TelParser.NOT) - self.state = 63 + self.state = 33 self.match(TelParser.KW_NULL) pass - self.state = 68 + self.state = 38 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,8,self._ctx) + _alt = self._interp.adaptivePredict(self._input,3,self._ctx) except RecognitionException as re: localctx.exception = re @@ -788,8 +563,8 @@ def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomCo super().__init__(parser) self.copyFrom(ctx) - def taxon_expr(self): - return self.getTypedRuleContext(TelParser.Taxon_exprContext,0) + def taxon(self): + return self.getTypedRuleContext(TelParser.TaxonContext,0) def enterRule(self, listener:ParseTreeListener): @@ -940,27 +715,27 @@ def accept(self, visitor:ParseTreeVisitor): def atom(self): localctx = TelParser.AtomContext(self, self._ctx, self.state) - self.enterRule(localctx, 10, self.RULE_atom) + self.enterRule(localctx, 4, self.RULE_atom) self._la = 0 # Token type try: - self.state = 79 + self.state = 49 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,9,self._ctx) + la_ = self._interp.adaptivePredict(self._input,4,self._ctx) if la_ == 1: localctx = TelParser.BracketExprContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 69 + self.state = 39 self.match(TelParser.L_BRACKET) - self.state = 70 + self.state = 40 self.expr(0) - self.state = 71 + self.state = 41 self.match(TelParser.R_BRACKET) pass elif la_ == 2: localctx = TelParser.NumberAtomContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 73 + self.state = 43 _la = self._input.LA(1) if not(_la==TelParser.INT or _la==TelParser.REAL): self._errHandler.recoverInline(self) @@ -970,16 +745,9 @@ def atom(self): pass elif la_ == 3: - localctx = TelParser.FnExprContext(self, localctx) - self.enterOuterAlt(localctx, 3) - self.state = 74 - self.fn() - pass - - elif la_ == 4: localctx = TelParser.BooleanAtomContext(self, localctx) - self.enterOuterAlt(localctx, 4) - self.state = 75 + self.enterOuterAlt(localctx, 3) + self.state = 44 _la = self._input.LA(1) if not(_la==TelParser.TRUE or _la==TelParser.FALSE): self._errHandler.recoverInline(self) @@ -988,25 +756,32 @@ def atom(self): self.consume() pass + elif la_ == 4: + localctx = TelParser.SingleQuotedAtomContext(self, localctx) + self.enterOuterAlt(localctx, 4) + self.state = 45 + self.match(TelParser.SINGLE_QUOTED_ELEMENT) + pass + elif la_ == 5: - localctx = TelParser.TaxonSlugAtomContext(self, localctx) + localctx = TelParser.StringConstantAtomContext(self, localctx) self.enterOuterAlt(localctx, 5) - self.state = 76 - self.taxon_expr() + self.state = 46 + self.match(TelParser.STRING_CONSTANT) pass elif la_ == 6: - localctx = TelParser.SingleQuotedAtomContext(self, localctx) + localctx = TelParser.FnExprContext(self, localctx) self.enterOuterAlt(localctx, 6) - self.state = 77 - self.match(TelParser.SINGLE_QUOTED_ELEMENT) + self.state = 47 + self.fn() pass elif la_ == 7: - localctx = TelParser.StringConstantAtomContext(self, localctx) + localctx = TelParser.TaxonSlugAtomContext(self, localctx) self.enterOuterAlt(localctx, 7) - self.state = 78 - self.match(TelParser.STRING_CONSTANT) + self.state = 48 + self.taxon() pass @@ -1019,11 +794,188 @@ def atom(self): return localctx + class FnContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def WORD(self): + return self.getToken(TelParser.WORD, 0) + + def L_BRACKET(self): + return self.getToken(TelParser.L_BRACKET, 0) + + def R_BRACKET(self): + return self.getToken(TelParser.R_BRACKET, 0) + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(TelParser.ExprContext) + else: + return self.getTypedRuleContext(TelParser.ExprContext,i) + + + def FN_PARAMETER_DELIMITER(self, i:int=None): + if i is None: + return self.getTokens(TelParser.FN_PARAMETER_DELIMITER) + else: + return self.getToken(TelParser.FN_PARAMETER_DELIMITER, i) + + def getRuleIndex(self): + return TelParser.RULE_fn + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterFn" ): + listener.enterFn(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitFn" ): + listener.exitFn(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitFn" ): + return visitor.visitFn(self) + else: + return visitor.visitChildren(self) + + + + + def fn(self): + + localctx = TelParser.FnContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_fn) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 51 + self.match(TelParser.WORD) + self.state = 52 + self.match(TelParser.L_BRACKET) + self.state = 54 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TelParser.INT) | (1 << TelParser.REAL) | (1 << TelParser.TRUE) | (1 << TelParser.FALSE) | (1 << TelParser.NOT) | (1 << TelParser.WORD) | (1 << TelParser.STRING_CONSTANT) | (1 << TelParser.SINGLE_QUOTED_ELEMENT) | (1 << TelParser.L_BRACKET) | (1 << TelParser.OPTIONAL_TAXON_OPERATOR))) != 0): + self.state = 53 + self.expr(0) + + + self.state = 60 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==TelParser.FN_PARAMETER_DELIMITER: + self.state = 56 + self.match(TelParser.FN_PARAMETER_DELIMITER) + self.state = 57 + self.expr(0) + self.state = 62 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 63 + self.match(TelParser.R_BRACKET) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class TaxonContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def WORD(self, i:int=None): + if i is None: + return self.getTokens(TelParser.WORD) + else: + return self.getToken(TelParser.WORD, i) + + def OPTIONAL_TAXON_OPERATOR(self): + return self.getToken(TelParser.OPTIONAL_TAXON_OPERATOR, 0) + + def TAXON_NAMESPACE_DELIMITER(self): + return self.getToken(TelParser.TAXON_NAMESPACE_DELIMITER, 0) + + def TAXON_TAG_DELIMITER(self): + return self.getToken(TelParser.TAXON_TAG_DELIMITER, 0) + + def getRuleIndex(self): + return TelParser.RULE_taxon + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTaxon" ): + listener.enterTaxon(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTaxon" ): + listener.exitTaxon(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTaxon" ): + return visitor.visitTaxon(self) + else: + return visitor.visitChildren(self) + + + + + def taxon(self): + + localctx = TelParser.TaxonContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_taxon) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 66 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==TelParser.OPTIONAL_TAXON_OPERATOR: + self.state = 65 + self.match(TelParser.OPTIONAL_TAXON_OPERATOR) + + + self.state = 68 + self.match(TelParser.WORD) + self.state = 71 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,8,self._ctx) + if la_ == 1: + self.state = 69 + self.match(TelParser.TAXON_NAMESPACE_DELIMITER) + self.state = 70 + self.match(TelParser.WORD) + + + self.state = 75 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,9,self._ctx) + if la_ == 1: + self.state = 73 + self.match(TelParser.TAXON_TAG_DELIMITER) + self.state = 74 + self.match(TelParser.WORD) + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates == None: self._predicates = dict() - self._predicates[4] = self.expr_sempred + self._predicates[1] = self.expr_sempred pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) diff --git a/python/src/tel_grammar/antlr/TelListener.py b/python/src/tel_grammar/antlr/TelParserListener.py similarity index 92% rename from python/src/tel_grammar/antlr/TelListener.py rename to python/src/tel_grammar/antlr/TelParserListener.py index 2ddec0c..5b1119a 100644 --- a/python/src/tel_grammar/antlr/TelListener.py +++ b/python/src/tel_grammar/antlr/TelParserListener.py @@ -1,4 +1,4 @@ -# Generated from grammar/Tel.g4 by ANTLR 4.8 +# Generated from grammar/TelParser.g4 by ANTLR 4.8 from antlr4 import * if __name__ is not None and "." in __name__: from .TelParser import TelParser @@ -6,34 +6,7 @@ from TelParser import TelParser # This class defines a complete listener for a parse tree produced by TelParser. -class TelListener(ParseTreeListener): - - # Enter a parse tree produced by TelParser#fn. - def enterFn(self, ctx:TelParser.FnContext): - pass - - # Exit a parse tree produced by TelParser#fn. - def exitFn(self, ctx:TelParser.FnContext): - pass - - - # Enter a parse tree produced by TelParser#taxon. - def enterTaxon(self, ctx:TelParser.TaxonContext): - pass - - # Exit a parse tree produced by TelParser#taxon. - def exitTaxon(self, ctx:TelParser.TaxonContext): - pass - - - # Enter a parse tree produced by TelParser#taxon_expr. - def enterTaxon_expr(self, ctx:TelParser.Taxon_exprContext): - pass - - # Exit a parse tree produced by TelParser#taxon_expr. - def exitTaxon_expr(self, ctx:TelParser.Taxon_exprContext): - pass - +class TelParserListener(ParseTreeListener): # Enter a parse tree produced by TelParser#parse. def enterParse(self, ctx:TelParser.ParseContext): @@ -116,21 +89,39 @@ def exitNumberAtom(self, ctx:TelParser.NumberAtomContext): pass - # Enter a parse tree produced by TelParser#fnExpr. - def enterFnExpr(self, ctx:TelParser.FnExprContext): + # Enter a parse tree produced by TelParser#booleanAtom. + def enterBooleanAtom(self, ctx:TelParser.BooleanAtomContext): pass - # Exit a parse tree produced by TelParser#fnExpr. - def exitFnExpr(self, ctx:TelParser.FnExprContext): + # Exit a parse tree produced by TelParser#booleanAtom. + def exitBooleanAtom(self, ctx:TelParser.BooleanAtomContext): pass - # Enter a parse tree produced by TelParser#booleanAtom. - def enterBooleanAtom(self, ctx:TelParser.BooleanAtomContext): + # Enter a parse tree produced by TelParser#singleQuotedAtom. + def enterSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): pass - # Exit a parse tree produced by TelParser#booleanAtom. - def exitBooleanAtom(self, ctx:TelParser.BooleanAtomContext): + # Exit a parse tree produced by TelParser#singleQuotedAtom. + def exitSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): + pass + + + # Enter a parse tree produced by TelParser#stringConstantAtom. + def enterStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): + pass + + # Exit a parse tree produced by TelParser#stringConstantAtom. + def exitStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): + pass + + + # Enter a parse tree produced by TelParser#fnExpr. + def enterFnExpr(self, ctx:TelParser.FnExprContext): + pass + + # Exit a parse tree produced by TelParser#fnExpr. + def exitFnExpr(self, ctx:TelParser.FnExprContext): pass @@ -143,21 +134,21 @@ def exitTaxonSlugAtom(self, ctx:TelParser.TaxonSlugAtomContext): pass - # Enter a parse tree produced by TelParser#singleQuotedAtom. - def enterSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): + # Enter a parse tree produced by TelParser#fn. + def enterFn(self, ctx:TelParser.FnContext): pass - # Exit a parse tree produced by TelParser#singleQuotedAtom. - def exitSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): + # Exit a parse tree produced by TelParser#fn. + def exitFn(self, ctx:TelParser.FnContext): pass - # Enter a parse tree produced by TelParser#stringConstantAtom. - def enterStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): + # Enter a parse tree produced by TelParser#taxon. + def enterTaxon(self, ctx:TelParser.TaxonContext): pass - # Exit a parse tree produced by TelParser#stringConstantAtom. - def exitStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): + # Exit a parse tree produced by TelParser#taxon. + def exitTaxon(self, ctx:TelParser.TaxonContext): pass diff --git a/python/src/tel_grammar/antlr/TelVisitor.py b/python/src/tel_grammar/antlr/TelParserVisitor.py similarity index 92% rename from python/src/tel_grammar/antlr/TelVisitor.py rename to python/src/tel_grammar/antlr/TelParserVisitor.py index 5bdd2ba..cdce04d 100644 --- a/python/src/tel_grammar/antlr/TelVisitor.py +++ b/python/src/tel_grammar/antlr/TelParserVisitor.py @@ -1,4 +1,4 @@ -# Generated from grammar/Tel.g4 by ANTLR 4.8 +# Generated from grammar/TelParser.g4 by ANTLR 4.8 from antlr4 import * if __name__ is not None and "." in __name__: from .TelParser import TelParser @@ -7,22 +7,7 @@ # This class defines a complete generic visitor for a parse tree produced by TelParser. -class TelVisitor(ParseTreeVisitor): - - # Visit a parse tree produced by TelParser#fn. - def visitFn(self, ctx:TelParser.FnContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#taxon. - def visitTaxon(self, ctx:TelParser.TaxonContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#taxon_expr. - def visitTaxon_expr(self, ctx:TelParser.Taxon_exprContext): - return self.visitChildren(ctx) - +class TelParserVisitor(ParseTreeVisitor): # Visit a parse tree produced by TelParser#parse. def visitParse(self, ctx:TelParser.ParseContext): @@ -69,13 +54,23 @@ def visitNumberAtom(self, ctx:TelParser.NumberAtomContext): return self.visitChildren(ctx) - # Visit a parse tree produced by TelParser#fnExpr. - def visitFnExpr(self, ctx:TelParser.FnExprContext): + # Visit a parse tree produced by TelParser#booleanAtom. + def visitBooleanAtom(self, ctx:TelParser.BooleanAtomContext): return self.visitChildren(ctx) - # Visit a parse tree produced by TelParser#booleanAtom. - def visitBooleanAtom(self, ctx:TelParser.BooleanAtomContext): + # Visit a parse tree produced by TelParser#singleQuotedAtom. + def visitSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TelParser#stringConstantAtom. + def visitStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by TelParser#fnExpr. + def visitFnExpr(self, ctx:TelParser.FnExprContext): return self.visitChildren(ctx) @@ -84,13 +79,13 @@ def visitTaxonSlugAtom(self, ctx:TelParser.TaxonSlugAtomContext): return self.visitChildren(ctx) - # Visit a parse tree produced by TelParser#singleQuotedAtom. - def visitSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): + # Visit a parse tree produced by TelParser#fn. + def visitFn(self, ctx:TelParser.FnContext): return self.visitChildren(ctx) - # Visit a parse tree produced by TelParser#stringConstantAtom. - def visitStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): + # Visit a parse tree produced by TelParser#taxon. + def visitTaxon(self, ctx:TelParser.TaxonContext): return self.visitChildren(ctx) diff --git a/python/tests/antlr_tel/grammar_test.py b/python/tests/antlr_tel/grammar_test.py index a9c255d..a9cc7fb 100644 --- a/python/tests/antlr_tel/grammar_test.py +++ b/python/tests/antlr_tel/grammar_test.py @@ -7,7 +7,7 @@ sys.path.append('./src') from tel_grammar.antlr.TelLexer import TelLexer from tel_grammar.antlr.TelParser import TelParser -from tel_grammar.antlr.TelVisitor import TelVisitor +from tel_grammar.antlr.TelParserVisitor import TelParserVisitor as TelVisitor class AssertTelVisitor(TelVisitor): From fe85ebf40fc937e908ae719a7637ce253330af42 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Mon, 9 Nov 2020 03:57:25 -0800 Subject: [PATCH 09/32] dump out Tel parser. Embed into PqlParser and expose as own entry point --- Makefile | 18 +++---- grammar/PqlLexer.g4 | 118 +++++++++++++++++++++++++++++++++++++++++++ grammar/PqlParser.g4 | 89 ++++++++++++++++++++++++++++++++ grammar/TelLexer.g4 | 66 ------------------------ grammar/TelParser.g4 | 29 ----------- 5 files changed, 216 insertions(+), 104 deletions(-) create mode 100644 grammar/PqlLexer.g4 create mode 100644 grammar/PqlParser.g4 delete mode 100644 grammar/TelLexer.g4 delete mode 100644 grammar/TelParser.g4 diff --git a/Makefile b/Makefile index 87e3ffb..493b108 100644 --- a/Makefile +++ b/Makefile @@ -69,25 +69,25 @@ image-antlr: # https://github.com/antlr/antlr4/issues/2335 # solves "cannot find token file" error -grammar/TelLexer.tokens: grammar/TelLexer.g4 +grammar/PqlLexer.tokens: grammar/PqlLexer.g4 docker run --rm \ -v $(PWD):/mnt \ antlr \ -o ./ \ - grammar/TelLexer.g4 + grammar/PqlLexer.g4 -build-code-python: grammar/TelLexer.tokens grammar/TelParser.g4 # image-antlr +build-code-python: grammar/PqlLexer.tokens grammar/PqlParser.g4# image-antlr docker run --rm \ -v $(PWD):/mnt \ antlr \ -visitor \ -Dlanguage=Python3 \ -Xexact-output-dir \ - -o python/src/tel_grammar/antlr \ - grammar/TelLexer.g4 \ - grammar/TelParser.g4 + -o python/src/pql_grammar/antlr \ + grammar/PqlLexer.g4 \ + grammar/PqlParser.g4 -build-code-js: grammar/TelLexer.tokens grammar/TelParser.g4 # image-antlr +build-code-js: grammar/PqlLexer.tokens grammar/PqlParser.g4 # image-antlr docker run --rm \ -v $(PWD):/mnt \ antlr \ @@ -95,8 +95,8 @@ build-code-js: grammar/TelLexer.tokens grammar/TelParser.g4 # image-antlr -Dlanguage=JavaScript \ -Xexact-output-dir \ -o js-temp/ \ - grammar/TelLexer.g4 \ - grammar/TelParser.g4 + grammar/PqlLexer.g4 \ + grammar/PqlParser.g4 build-code: build-code-python build-code-js diff --git a/grammar/PqlLexer.g4 b/grammar/PqlLexer.g4 new file mode 100644 index 0000000..099a33a --- /dev/null +++ b/grammar/PqlLexer.g4 @@ -0,0 +1,118 @@ +lexer grammar PqlLexer; + +TAXON_TAG_DELIMITER: ':'; +TAXON_OPTIONAL_OPERATOR: '?'; // Taxon slug prefix noting, that the taxon slug is optional. + +// SQL-compatible (except for some TEL-isms): + +AND : '&&'; // TEL +EQ : '=='; +GT_EQ : '>='; +LT_EQ : '<='; +NOT_EQ1 : '!='; +NOT_EQ2 : '<>'; +OR : '||'; // TEL +SHIFT_LEFT : '<<'; +SHIFT_RIGHT : '>>'; + + +AMP : '&'; +ASSIGN : '='; +CLOSE_PAREN : ')'; +COMMA : ','; +DOT : '.'; +FORWARD_SLASH : '/'; +GT : '>'; +LT : '<'; +MINUS : '-'; +MOD : '%'; +OPEN_PAREN : '('; +PIPE : '|'; +PLUS : '+'; +SCOL : ';'; +STAR : '*'; +TILDE : '~'; +UNDER: '_'; + +// SQL keywords we adapt: +K_AND : A N D; +K_ASC : A S C; +K_BY : B Y; +K_DESC : D E S C; +K_FALSE : F A L S E; +K_IS : I S; +K_ISNULL : I S N U L L; +K_LIKE : L I K E; +K_LIMIT : L I M I T; +K_NOT : N O T; +K_NOTNULL : N O T N U L L; +K_NULL : N U L L; +K_OR : O R; +K_ORDER : O R D E R; +K_SELECT : S E L E C T; +K_TRUE : T R U E; +K_WHERE : W H E R E; + +NUMERIC_LITERAL + : DIGIT+ ( '.' DIGIT* )? ( E [-+]? DIGIT+ )? + | '.' DIGIT+ ( E [-+]? DIGIT+ )? + ; + +// Note, use of TEL escaping variant, +// escaping is NOT SQL style "double-char": +// TODO: allow both in TEL to avoid translation headaches +DOUBLE_QUOTED_STRING: DOUBLE_QUOTED_STRING_TEL ; +DOUBLE_QUOTED_STRING_TEL : '"' ( '\\"' | ~'"' )* '"' ; +DOUBLE_QUOTED_STRING_SQL : '"' ( '""' | ~'"' )* '"' ; + +// Note, use of TEL escaping variant, +// Note, escaping is NOT SQL style "double-char": +// TODO: allow both in TEL to avoid translation headaches +SINGLE_QUOTED_STRING: SINGLE_QUOTED_STRING_TEL ; +SINGLE_QUOTED_STRING_TEL: '\'' ( '\\\'' | ~'\'' )* '\'' ; +SINGLE_QUOTED_STRING_SQL: '\'' ( '\'\'' | ~'\'' )* '\'' ; + +SINGLE_LINE_COMMENT + : ('--'|'//'|'#') ~[\r\n]* -> channel(HIDDEN) + ; + +MULTILINE_COMMENT + : '/*' .*? ( '*/' | EOF ) -> channel(HIDDEN) + ; + +SPACES + : [ \u000B\t\r\n] -> channel(HIDDEN) + ; + +WORD + : [a-zA-Z_][a-zA-Z_0-9]* + ; + +fragment DIGIT : [0-9]; + +fragment A : [aA]; +fragment B : [bB]; +fragment C : [cC]; +fragment D : [dD]; +fragment E : [eE]; +fragment F : [fF]; +fragment G : [gG]; +fragment H : [hH]; +fragment I : [iI]; +fragment J : [jJ]; +fragment K : [kK]; +fragment L : [lL]; +fragment M : [mM]; +fragment N : [nN]; +fragment O : [oO]; +fragment P : [pP]; +fragment Q : [qQ]; +fragment R : [rR]; +fragment S : [sS]; +fragment T : [tT]; +fragment U : [uU]; +fragment V : [vV]; +fragment W : [wW]; +fragment X : [xX]; +fragment Y : [yY]; +fragment Z : [zZ]; diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 new file mode 100644 index 0000000..00a6344 --- /dev/null +++ b/grammar/PqlParser.g4 @@ -0,0 +1,89 @@ +/* +SQL-inspired "Pano Query Language" syntax +Subset of SQL Select statement with just 2 clauses supported: + - select plethora of taxons and TEL expressions combination + - where clause supporting plethora of taxons and TEL expressions logical comparisons + +Subset of https://github.com/panoramichq/entity-tree-sql-service/blob/master/src/sql/SQLSelect.g4 +*/ + +parser grammar PqlParser; + +options { + tokenVocab = PqlLexer; +} + +// we have 2 entry points: +// parse Tel expression: +parseTel: expr EOF ; +// parse PQL statements with TEL inside: +parsePql : ( sqlStmtList )* EOF ; + +sqlStmtList + : ';'* sqlStmt ( ';'+ sqlStmt )* ';'* + ; + +// this is where you add more statement types, like SET and other top-level SQL statements +sqlStmt + : selectStmt + ; + +selectStmt + : K_SELECT columns + ( whereClause )? + ( orderByClause )? + ( limitClause )? + ; + +columns: expr ( COMMA expr )* ; + +whereClause + : K_WHERE expr + ; + +orderByClause + : K_ORDER K_BY orderExpr ( COMMA orderExpr )* + ; + +orderExpr + : expr ( K_ASC | K_DESC )? + ; + +limitClause + : K_LIMIT limit=expr // ( ( K_OFFSET | COMMA ) expr )? + ; + +expr + : unary_operator=( MINUS | PLUS | K_NOT ) right=expr + | left=expr operator=( STAR | FORWARD_SLASH | MOD ) right=expr + | left=expr operator=( PLUS | MINUS ) right=expr + | left=expr operator=( LT | LT_EQ | GT | GT_EQ ) right=expr + | left=expr operator=( ASSIGN | EQ | NOT_EQ1 | NOT_EQ2 | K_IS ) right=expr +// | left=expr is_negated=K_NOT? operator=( K_LIKE | K_BETWEEN ) right=expr +// | left=expr is_negated=K_NOT? operator=K_IN '(' ( right=expr ( ',' right=expr )* )? ')' + | left=expr operator=( K_AND | AND ) right=expr + | left=expr operator=( K_OR | OR ) right=expr + | OPEN_PAREN inner=expr CLOSE_PAREN + | literalValue + | function_name=identifierMultipart OPEN_PAREN ( expr ( COMMA expr )* )? CLOSE_PAREN + | taxon + ; + +// TODO: TAXON_TAG_DELIMITER is being killed off. Remove when we migrate out of taxon tags. +taxon: + TAXON_OPTIONAL_OPERATOR? + ( namespace=identifierMultipart PIPE )? + slug=identifierMultipart + ( TAXON_TAG_DELIMITER tag=identifierMultipart )? + ; + +identifierMultipart: WORD ( DOT WORD )* ; + +literalValue + : NUMERIC_LITERAL + | DOUBLE_QUOTED_STRING + | SINGLE_QUOTED_STRING + | K_NULL + | K_TRUE + | K_FALSE + ; diff --git a/grammar/TelLexer.g4 b/grammar/TelLexer.g4 deleted file mode 100644 index e9a5cbf..0000000 --- a/grammar/TelLexer.g4 +++ /dev/null @@ -1,66 +0,0 @@ -lexer grammar TelLexer; - -INT : '-'? DIGIT+ ; // integer -REAL : '-'? DIGIT+ '.' DIGIT+ ; // integer -TRUE : T R U E; -FALSE : F A L S E; -NOT : N O T; -KW_IS : I S; -KW_NULL : N U L L; -WORD : [a-zA-Z_][a-zA-Z_0-9$.]*; // one word (either part of slug or fn name). must start with non-digit - -STRING_CONSTANT : '"' ( '\\"' | ~'"' )* '"' ; // string constant. Not greedy, and supports \ to escape " char. -SINGLE_QUOTED_ELEMENT: '\'' ( '\\\'' | ~'\'' )* '\'' ; // string element surrounded by single quotes. Not greedy, and supports \ to escape ' char. - -L_BRACKET: '('; -R_BRACKET: ')'; -TAXON_NAMESPACE_DELIMITER: '|'; -TAXON_TAG_DELIMITER: ':'; -FN_PARAMETER_DELIMITER: ','; -// OPERATORS -OR : '||'; -AND : '&&'; -EQ : '=='; -NEQ : '!='; -GT : '>'; -LT : '<'; -GTEQ : '>='; -LTEQ : '<='; -PLUS : '+'; -MINUS : '-'; -MULT : '*'; -DIV : '/'; -OPTIONAL_TAXON_OPERATOR: '?'; // Taxon slug prefix noting, that the taxon slug is optional. - -// support SQL, JavaScript and Python style syntax for single-line comment -SINGLE_LINE_COMMENT : ('--'|'//'|'#') ~[\r\n]* -> channel(HIDDEN) ; - -WS : [ \t\r\n]+ -> channel(HIDDEN) ; // skip spaces, tabs, newlines - -fragment DIGIT : [0-9]; -fragment A : [aA]; -fragment B : [bB]; -fragment C : [cC]; -fragment D : [dD]; -fragment E : [eE]; -fragment F : [fF]; -fragment G : [gG]; -fragment H : [hH]; -fragment I : [iI]; -fragment J : [jJ]; -fragment K : [kK]; -fragment L : [lL]; -fragment M : [mM]; -fragment N : [nN]; -fragment O : [oO]; -fragment P : [pP]; -fragment Q : [qQ]; -fragment R : [rR]; -fragment S : [sS]; -fragment T : [tT]; -fragment U : [uU]; -fragment V : [vV]; -fragment W : [wW]; -fragment X : [xX]; -fragment Y : [yY]; -fragment Z : [zZ]; diff --git a/grammar/TelParser.g4 b/grammar/TelParser.g4 deleted file mode 100644 index 61948da..0000000 --- a/grammar/TelParser.g4 +++ /dev/null @@ -1,29 +0,0 @@ -parser grammar TelParser; - -options { - tokenVocab = TelLexer; -} - -parse: expr EOF; // main rule for parsing - -expr -: NOT expr #notExpr -| expr op=(MULT | DIV) expr #multiplicationExpr -| expr op=(PLUS | MINUS) expr #additiveExpr -| expr op=(OR | AND | EQ | NEQ | GT | LT | GTEQ | LTEQ) expr #logicalExpr -| expr KW_IS NOT? KW_NULL #nullTestExpr -| atom #atomExpr -; - -atom -: L_BRACKET expr R_BRACKET #bracketExpr -| (INT | REAL) #numberAtom -| (TRUE | FALSE) #booleanAtom -| SINGLE_QUOTED_ELEMENT #singleQuotedAtom -| STRING_CONSTANT #stringConstantAtom -| fn #fnExpr -| taxon #taxonSlugAtom -; - -fn : WORD L_BRACKET expr? (FN_PARAMETER_DELIMITER expr)* R_BRACKET ; -taxon: OPTIONAL_TAXON_OPERATOR? WORD (TAXON_NAMESPACE_DELIMITER WORD)? (TAXON_TAG_DELIMITER WORD)? ; From 849e936792456558d2f2b59629f470482001419e Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Mon, 9 Nov 2020 04:00:31 -0800 Subject: [PATCH 10/32] PQLParser example + test --- js-temp/PqlLexer.js | 452 ++++ js-temp/PqlParser.js | 1933 +++++++++++++++++ js-temp/PqlParserListener.js | 141 ++ js-temp/PqlParserVisitor.js | 100 + js-temp/TelLexer.js | 658 ++++-- js-temp/TelParser.js | 1476 ++++++------- js-temp/TelParserListener.js | 129 +- js-temp/TelParserVisitor.js | 78 +- .../{tel_grammar => pql_grammar}/__init__.py | 0 python/src/pql_grammar/antlr/PqlLexer.py | 349 +++ python/src/pql_grammar/antlr/PqlParser.py | 1482 +++++++++++++ .../pql_grammar/antlr/PqlParserListener.py | 138 ++ .../src/pql_grammar/antlr/PqlParserVisitor.py | 83 + .../antlr/__init__.py | 0 python/src/pql_grammar/operators.py | 421 ++++ python/src/tel_grammar/antlr/TelLexer.py | 217 -- python/src/tel_grammar/antlr/TelParser.py | 1004 --------- .../tel_grammar/antlr/TelParserListener.py | 156 -- .../src/tel_grammar/antlr/TelParserVisitor.py | 93 - python/tests/pql/pql_test.py | 306 +++ .../tests/{antlr_tel => tel}/grammar_test.py | 35 +- 21 files changed, 6589 insertions(+), 2662 deletions(-) create mode 100644 js-temp/PqlLexer.js create mode 100644 js-temp/PqlParser.js create mode 100644 js-temp/PqlParserListener.js create mode 100644 js-temp/PqlParserVisitor.js rename python/src/{tel_grammar => pql_grammar}/__init__.py (100%) create mode 100644 python/src/pql_grammar/antlr/PqlLexer.py create mode 100644 python/src/pql_grammar/antlr/PqlParser.py create mode 100644 python/src/pql_grammar/antlr/PqlParserListener.py create mode 100644 python/src/pql_grammar/antlr/PqlParserVisitor.py rename python/src/{tel_grammar => pql_grammar}/antlr/__init__.py (100%) create mode 100644 python/src/pql_grammar/operators.py delete mode 100644 python/src/tel_grammar/antlr/TelLexer.py delete mode 100644 python/src/tel_grammar/antlr/TelParser.py delete mode 100644 python/src/tel_grammar/antlr/TelParserListener.py delete mode 100644 python/src/tel_grammar/antlr/TelParserVisitor.py create mode 100644 python/tests/pql/pql_test.py rename python/tests/{antlr_tel => tel}/grammar_test.py (71%) diff --git a/js-temp/PqlLexer.js b/js-temp/PqlLexer.js new file mode 100644 index 0000000..bf254b5 --- /dev/null +++ b/js-temp/PqlLexer.js @@ -0,0 +1,452 @@ +// Generated from grammar/PqlLexer.g4 by ANTLR 4.8 +// jshint ignore: start +var antlr4 = require('antlr4/index'); + + + +var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", + "\u0002:\u01fd\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", + "\u0004\t\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t", + "\u0007\u0004\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004", + "\f\t\f\u0004\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0004\u0010", + "\t\u0010\u0004\u0011\t\u0011\u0004\u0012\t\u0012\u0004\u0013\t\u0013", + "\u0004\u0014\t\u0014\u0004\u0015\t\u0015\u0004\u0016\t\u0016\u0004\u0017", + "\t\u0017\u0004\u0018\t\u0018\u0004\u0019\t\u0019\u0004\u001a\t\u001a", + "\u0004\u001b\t\u001b\u0004\u001c\t\u001c\u0004\u001d\t\u001d\u0004\u001e", + "\t\u001e\u0004\u001f\t\u001f\u0004 \t \u0004!\t!\u0004\"\t\"\u0004#", + "\t#\u0004$\t$\u0004%\t%\u0004&\t&\u0004\'\t\'\u0004(\t(\u0004)\t)\u0004", + "*\t*\u0004+\t+\u0004,\t,\u0004-\t-\u0004.\t.\u0004/\t/\u00040\t0\u0004", + "1\t1\u00042\t2\u00043\t3\u00044\t4\u00045\t5\u00046\t6\u00047\t7\u0004", + "8\t8\u00049\t9\u0004:\t:\u0004;\t;\u0004<\t<\u0004=\t=\u0004>\t>\u0004", + "?\t?\u0004@\t@\u0004A\tA\u0004B\tB\u0004C\tC\u0004D\tD\u0004E\tE\u0004", + "F\tF\u0004G\tG\u0004H\tH\u0004I\tI\u0004J\tJ\u0004K\tK\u0004L\tL\u0004", + "M\tM\u0004N\tN\u0004O\tO\u0004P\tP\u0004Q\tQ\u0004R\tR\u0004S\tS\u0004", + "T\tT\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003\u0004\u0003", + "\u0004\u0003\u0004\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0006\u0003", + "\u0006\u0003\u0006\u0003\u0007\u0003\u0007\u0003\u0007\u0003\b\u0003", + "\b\u0003\b\u0003\t\u0003\t\u0003\t\u0003\n\u0003\n\u0003\n\u0003\u000b", + "\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003\f\u0003\r\u0003\r\u0003", + "\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010\u0003\u0010\u0003", + "\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003\u0013\u0003\u0013\u0003", + "\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016\u0003\u0016\u0003", + "\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0019\u0003\u0019\u0003", + "\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c\u0003\u001c\u0003", + "\u001d\u0003\u001d\u0003\u001e\u0003\u001e\u0003\u001e\u0003\u001e\u0003", + "\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003 \u0003 \u0003 \u0003", + "!\u0003!\u0003!\u0003!\u0003!\u0003\"\u0003\"\u0003\"\u0003\"\u0003", + "\"\u0003\"\u0003#\u0003#\u0003#\u0003$\u0003$\u0003$\u0003$\u0003$\u0003", + "$\u0003$\u0003%\u0003%\u0003%\u0003%\u0003%\u0003&\u0003&\u0003&\u0003", + "&\u0003&\u0003&\u0003\'\u0003\'\u0003\'\u0003\'\u0003(\u0003(\u0003", + "(\u0003(\u0003(\u0003(\u0003(\u0003(\u0003)\u0003)\u0003)\u0003)\u0003", + ")\u0003*\u0003*\u0003*\u0003+\u0003+\u0003+\u0003+\u0003+\u0003+\u0003", + ",\u0003,\u0003,\u0003,\u0003,\u0003,\u0003,\u0003-\u0003-\u0003-\u0003", + "-\u0003-\u0003.\u0003.\u0003.\u0003.\u0003.\u0003.\u0003/\u0006/\u0143", + "\n/\r/\u000e/\u0144\u0003/\u0003/\u0007/\u0149\n/\f/\u000e/\u014c\u000b", + "/\u0005/\u014e\n/\u0003/\u0003/\u0005/\u0152\n/\u0003/\u0006/\u0155", + "\n/\r/\u000e/\u0156\u0005/\u0159\n/\u0003/\u0003/\u0006/\u015d\n/\r", + "/\u000e/\u015e\u0003/\u0003/\u0005/\u0163\n/\u0003/\u0006/\u0166\n/", + "\r/\u000e/\u0167\u0005/\u016a\n/\u0005/\u016c\n/\u00030\u00030\u0003", + "1\u00031\u00031\u00031\u00071\u0174\n1\f1\u000e1\u0177\u000b1\u0003", + "1\u00031\u00032\u00032\u00032\u00032\u00072\u017f\n2\f2\u000e2\u0182", + "\u000b2\u00032\u00032\u00033\u00033\u00034\u00034\u00034\u00034\u0007", + "4\u018c\n4\f4\u000e4\u018f\u000b4\u00034\u00034\u00035\u00035\u0003", + "5\u00035\u00075\u0197\n5\f5\u000e5\u019a\u000b5\u00035\u00035\u0003", + "6\u00036\u00036\u00036\u00036\u00056\u01a3\n6\u00036\u00076\u01a6\n", + "6\f6\u000e6\u01a9\u000b6\u00036\u00036\u00037\u00037\u00037\u00037\u0007", + "7\u01b1\n7\f7\u000e7\u01b4\u000b7\u00037\u00037\u00037\u00057\u01b9", + "\n7\u00037\u00037\u00038\u00038\u00038\u00038\u00039\u00039\u00079\u01c3", + "\n9\f9\u000e9\u01c6\u000b9\u0003:\u0003:\u0003;\u0003;\u0003<\u0003", + "<\u0003=\u0003=\u0003>\u0003>\u0003?\u0003?\u0003@\u0003@\u0003A\u0003", + "A\u0003B\u0003B\u0003C\u0003C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003", + "F\u0003G\u0003G\u0003H\u0003H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003", + "K\u0003L\u0003L\u0003M\u0003M\u0003N\u0003N\u0003O\u0003O\u0003P\u0003", + "P\u0003Q\u0003Q\u0003R\u0003R\u0003S\u0003S\u0003T\u0003T\u0003\u01b2", + "\u0002U\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b\u0007\r\b", + "\u000f\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e\u001b\u000f", + "\u001d\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016+\u0017", + "-\u0018/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?!A\"C#E$", + "G%I&K\'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7m8o9q:s\u0002u\u0002w\u0002y", + "\u0002{\u0002}\u0002\u007f\u0002\u0081\u0002\u0083\u0002\u0085\u0002", + "\u0087\u0002\u0089\u0002\u008b\u0002\u008d\u0002\u008f\u0002\u0091\u0002", + "\u0093\u0002\u0095\u0002\u0097\u0002\u0099\u0002\u009b\u0002\u009d\u0002", + "\u009f\u0002\u00a1\u0002\u00a3\u0002\u00a5\u0002\u00a7\u0002\u0003\u0002", + "$\u0004\u0002--//\u0003\u0002$$\u0003\u0002))\u0004\u0002\f\f\u000f", + "\u000f\u0005\u0002\u000b\r\u000f\u000f\"\"\u0005\u0002C\\aac|\u0006", + "\u00022;C\\aac|\u0003\u00022;\u0004\u0002CCcc\u0004\u0002DDdd\u0004", + "\u0002EEee\u0004\u0002FFff\u0004\u0002GGgg\u0004\u0002HHhh\u0004\u0002", + "IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004\u0002LLll\u0004\u0002MMm", + "m\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002PPpp\u0004\u0002QQqq\u0004", + "\u0002RRrr\u0004\u0002SSss\u0004\u0002TTtt\u0004\u0002UUuu\u0004\u0002", + "VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004\u0002YYyy\u0004\u0002ZZz", + "z\u0004\u0002[[{{\u0004\u0002\\\\||\u0002\u01fa\u0002\u0003\u0003\u0002", + "\u0002\u0002\u0002\u0005\u0003\u0002\u0002\u0002\u0002\u0007\u0003\u0002", + "\u0002\u0002\u0002\t\u0003\u0002\u0002\u0002\u0002\u000b\u0003\u0002", + "\u0002\u0002\u0002\r\u0003\u0002\u0002\u0002\u0002\u000f\u0003\u0002", + "\u0002\u0002\u0002\u0011\u0003\u0002\u0002\u0002\u0002\u0013\u0003\u0002", + "\u0002\u0002\u0002\u0015\u0003\u0002\u0002\u0002\u0002\u0017\u0003\u0002", + "\u0002\u0002\u0002\u0019\u0003\u0002\u0002\u0002\u0002\u001b\u0003\u0002", + "\u0002\u0002\u0002\u001d\u0003\u0002\u0002\u0002\u0002\u001f\u0003\u0002", + "\u0002\u0002\u0002!\u0003\u0002\u0002\u0002\u0002#\u0003\u0002\u0002", + "\u0002\u0002%\u0003\u0002\u0002\u0002\u0002\'\u0003\u0002\u0002\u0002", + "\u0002)\u0003\u0002\u0002\u0002\u0002+\u0003\u0002\u0002\u0002\u0002", + "-\u0003\u0002\u0002\u0002\u0002/\u0003\u0002\u0002\u0002\u00021\u0003", + "\u0002\u0002\u0002\u00023\u0003\u0002\u0002\u0002\u00025\u0003\u0002", + "\u0002\u0002\u00027\u0003\u0002\u0002\u0002\u00029\u0003\u0002\u0002", + "\u0002\u0002;\u0003\u0002\u0002\u0002\u0002=\u0003\u0002\u0002\u0002", + "\u0002?\u0003\u0002\u0002\u0002\u0002A\u0003\u0002\u0002\u0002\u0002", + "C\u0003\u0002\u0002\u0002\u0002E\u0003\u0002\u0002\u0002\u0002G\u0003", + "\u0002\u0002\u0002\u0002I\u0003\u0002\u0002\u0002\u0002K\u0003\u0002", + "\u0002\u0002\u0002M\u0003\u0002\u0002\u0002\u0002O\u0003\u0002\u0002", + "\u0002\u0002Q\u0003\u0002\u0002\u0002\u0002S\u0003\u0002\u0002\u0002", + "\u0002U\u0003\u0002\u0002\u0002\u0002W\u0003\u0002\u0002\u0002\u0002", + "Y\u0003\u0002\u0002\u0002\u0002[\u0003\u0002\u0002\u0002\u0002]\u0003", + "\u0002\u0002\u0002\u0002_\u0003\u0002\u0002\u0002\u0002a\u0003\u0002", + "\u0002\u0002\u0002c\u0003\u0002\u0002\u0002\u0002e\u0003\u0002\u0002", + "\u0002\u0002g\u0003\u0002\u0002\u0002\u0002i\u0003\u0002\u0002\u0002", + "\u0002k\u0003\u0002\u0002\u0002\u0002m\u0003\u0002\u0002\u0002\u0002", + "o\u0003\u0002\u0002\u0002\u0002q\u0003\u0002\u0002\u0002\u0003\u00a9", + "\u0003\u0002\u0002\u0002\u0005\u00ab\u0003\u0002\u0002\u0002\u0007\u00ad", + "\u0003\u0002\u0002\u0002\t\u00b0\u0003\u0002\u0002\u0002\u000b\u00b3", + "\u0003\u0002\u0002\u0002\r\u00b6\u0003\u0002\u0002\u0002\u000f\u00b9", + "\u0003\u0002\u0002\u0002\u0011\u00bc\u0003\u0002\u0002\u0002\u0013\u00bf", + "\u0003\u0002\u0002\u0002\u0015\u00c2\u0003\u0002\u0002\u0002\u0017\u00c5", + "\u0003\u0002\u0002\u0002\u0019\u00c8\u0003\u0002\u0002\u0002\u001b\u00ca", + "\u0003\u0002\u0002\u0002\u001d\u00cc\u0003\u0002\u0002\u0002\u001f\u00ce", + "\u0003\u0002\u0002\u0002!\u00d0\u0003\u0002\u0002\u0002#\u00d2\u0003", + "\u0002\u0002\u0002%\u00d4\u0003\u0002\u0002\u0002\'\u00d6\u0003\u0002", + "\u0002\u0002)\u00d8\u0003\u0002\u0002\u0002+\u00da\u0003\u0002\u0002", + "\u0002-\u00dc\u0003\u0002\u0002\u0002/\u00de\u0003\u0002\u0002\u0002", + "1\u00e0\u0003\u0002\u0002\u00023\u00e2\u0003\u0002\u0002\u00025\u00e4", + "\u0003\u0002\u0002\u00027\u00e6\u0003\u0002\u0002\u00029\u00e8\u0003", + "\u0002\u0002\u0002;\u00ea\u0003\u0002\u0002\u0002=\u00ee\u0003\u0002", + "\u0002\u0002?\u00f2\u0003\u0002\u0002\u0002A\u00f5\u0003\u0002\u0002", + "\u0002C\u00fa\u0003\u0002\u0002\u0002E\u0100\u0003\u0002\u0002\u0002", + "G\u0103\u0003\u0002\u0002\u0002I\u010a\u0003\u0002\u0002\u0002K\u010f", + "\u0003\u0002\u0002\u0002M\u0115\u0003\u0002\u0002\u0002O\u0119\u0003", + "\u0002\u0002\u0002Q\u0121\u0003\u0002\u0002\u0002S\u0126\u0003\u0002", + "\u0002\u0002U\u0129\u0003\u0002\u0002\u0002W\u012f\u0003\u0002\u0002", + "\u0002Y\u0136\u0003\u0002\u0002\u0002[\u013b\u0003\u0002\u0002\u0002", + "]\u016b\u0003\u0002\u0002\u0002_\u016d\u0003\u0002\u0002\u0002a\u016f", + "\u0003\u0002\u0002\u0002c\u017a\u0003\u0002\u0002\u0002e\u0185\u0003", + "\u0002\u0002\u0002g\u0187\u0003\u0002\u0002\u0002i\u0192\u0003\u0002", + "\u0002\u0002k\u01a2\u0003\u0002\u0002\u0002m\u01ac\u0003\u0002\u0002", + "\u0002o\u01bc\u0003\u0002\u0002\u0002q\u01c0\u0003\u0002\u0002\u0002", + "s\u01c7\u0003\u0002\u0002\u0002u\u01c9\u0003\u0002\u0002\u0002w\u01cb", + "\u0003\u0002\u0002\u0002y\u01cd\u0003\u0002\u0002\u0002{\u01cf\u0003", + "\u0002\u0002\u0002}\u01d1\u0003\u0002\u0002\u0002\u007f\u01d3\u0003", + "\u0002\u0002\u0002\u0081\u01d5\u0003\u0002\u0002\u0002\u0083\u01d7\u0003", + "\u0002\u0002\u0002\u0085\u01d9\u0003\u0002\u0002\u0002\u0087\u01db\u0003", + "\u0002\u0002\u0002\u0089\u01dd\u0003\u0002\u0002\u0002\u008b\u01df\u0003", + "\u0002\u0002\u0002\u008d\u01e1\u0003\u0002\u0002\u0002\u008f\u01e3\u0003", + "\u0002\u0002\u0002\u0091\u01e5\u0003\u0002\u0002\u0002\u0093\u01e7\u0003", + "\u0002\u0002\u0002\u0095\u01e9\u0003\u0002\u0002\u0002\u0097\u01eb\u0003", + "\u0002\u0002\u0002\u0099\u01ed\u0003\u0002\u0002\u0002\u009b\u01ef\u0003", + "\u0002\u0002\u0002\u009d\u01f1\u0003\u0002\u0002\u0002\u009f\u01f3\u0003", + "\u0002\u0002\u0002\u00a1\u01f5\u0003\u0002\u0002\u0002\u00a3\u01f7\u0003", + "\u0002\u0002\u0002\u00a5\u01f9\u0003\u0002\u0002\u0002\u00a7\u01fb\u0003", + "\u0002\u0002\u0002\u00a9\u00aa\u0007<\u0002\u0002\u00aa\u0004\u0003", + "\u0002\u0002\u0002\u00ab\u00ac\u0007A\u0002\u0002\u00ac\u0006\u0003", + "\u0002\u0002\u0002\u00ad\u00ae\u0007(\u0002\u0002\u00ae\u00af\u0007", + "(\u0002\u0002\u00af\b\u0003\u0002\u0002\u0002\u00b0\u00b1\u0007?\u0002", + "\u0002\u00b1\u00b2\u0007?\u0002\u0002\u00b2\n\u0003\u0002\u0002\u0002", + "\u00b3\u00b4\u0007@\u0002\u0002\u00b4\u00b5\u0007?\u0002\u0002\u00b5", + "\f\u0003\u0002\u0002\u0002\u00b6\u00b7\u0007>\u0002\u0002\u00b7\u00b8", + "\u0007?\u0002\u0002\u00b8\u000e\u0003\u0002\u0002\u0002\u00b9\u00ba", + "\u0007#\u0002\u0002\u00ba\u00bb\u0007?\u0002\u0002\u00bb\u0010\u0003", + "\u0002\u0002\u0002\u00bc\u00bd\u0007>\u0002\u0002\u00bd\u00be\u0007", + "@\u0002\u0002\u00be\u0012\u0003\u0002\u0002\u0002\u00bf\u00c0\u0007", + "~\u0002\u0002\u00c0\u00c1\u0007~\u0002\u0002\u00c1\u0014\u0003\u0002", + "\u0002\u0002\u00c2\u00c3\u0007>\u0002\u0002\u00c3\u00c4\u0007>\u0002", + "\u0002\u00c4\u0016\u0003\u0002\u0002\u0002\u00c5\u00c6\u0007@\u0002", + "\u0002\u00c6\u00c7\u0007@\u0002\u0002\u00c7\u0018\u0003\u0002\u0002", + "\u0002\u00c8\u00c9\u0007(\u0002\u0002\u00c9\u001a\u0003\u0002\u0002", + "\u0002\u00ca\u00cb\u0007?\u0002\u0002\u00cb\u001c\u0003\u0002\u0002", + "\u0002\u00cc\u00cd\u0007+\u0002\u0002\u00cd\u001e\u0003\u0002\u0002", + "\u0002\u00ce\u00cf\u0007.\u0002\u0002\u00cf \u0003\u0002\u0002\u0002", + "\u00d0\u00d1\u00070\u0002\u0002\u00d1\"\u0003\u0002\u0002\u0002\u00d2", + "\u00d3\u00071\u0002\u0002\u00d3$\u0003\u0002\u0002\u0002\u00d4\u00d5", + "\u0007@\u0002\u0002\u00d5&\u0003\u0002\u0002\u0002\u00d6\u00d7\u0007", + ">\u0002\u0002\u00d7(\u0003\u0002\u0002\u0002\u00d8\u00d9\u0007/\u0002", + "\u0002\u00d9*\u0003\u0002\u0002\u0002\u00da\u00db\u0007\'\u0002\u0002", + "\u00db,\u0003\u0002\u0002\u0002\u00dc\u00dd\u0007*\u0002\u0002\u00dd", + ".\u0003\u0002\u0002\u0002\u00de\u00df\u0007~\u0002\u0002\u00df0\u0003", + "\u0002\u0002\u0002\u00e0\u00e1\u0007-\u0002\u0002\u00e12\u0003\u0002", + "\u0002\u0002\u00e2\u00e3\u0007=\u0002\u0002\u00e34\u0003\u0002\u0002", + "\u0002\u00e4\u00e5\u0007,\u0002\u0002\u00e56\u0003\u0002\u0002\u0002", + "\u00e6\u00e7\u0007\u0080\u0002\u0002\u00e78\u0003\u0002\u0002\u0002", + "\u00e8\u00e9\u0007a\u0002\u0002\u00e9:\u0003\u0002\u0002\u0002\u00ea", + "\u00eb\u0005u;\u0002\u00eb\u00ec\u0005\u008fH\u0002\u00ec\u00ed\u0005", + "{>\u0002\u00ed<\u0003\u0002\u0002\u0002\u00ee\u00ef\u0005u;\u0002\u00ef", + "\u00f0\u0005\u0099M\u0002\u00f0\u00f1\u0005y=\u0002\u00f1>\u0003\u0002", + "\u0002\u0002\u00f2\u00f3\u0005w<\u0002\u00f3\u00f4\u0005\u00a5S\u0002", + "\u00f4@\u0003\u0002\u0002\u0002\u00f5\u00f6\u0005{>\u0002\u00f6\u00f7", + "\u0005}?\u0002\u00f7\u00f8\u0005\u0099M\u0002\u00f8\u00f9\u0005y=\u0002", + "\u00f9B\u0003\u0002\u0002\u0002\u00fa\u00fb\u0005\u007f@\u0002\u00fb", + "\u00fc\u0005u;\u0002\u00fc\u00fd\u0005\u008bF\u0002\u00fd\u00fe\u0005", + "\u0099M\u0002\u00fe\u00ff\u0005}?\u0002\u00ffD\u0003\u0002\u0002\u0002", + "\u0100\u0101\u0005\u0085C\u0002\u0101\u0102\u0005\u0099M\u0002\u0102", + "F\u0003\u0002\u0002\u0002\u0103\u0104\u0005\u0085C\u0002\u0104\u0105", + "\u0005\u0099M\u0002\u0105\u0106\u0005\u008fH\u0002\u0106\u0107\u0005", + "\u009dO\u0002\u0107\u0108\u0005\u008bF\u0002\u0108\u0109\u0005\u008b", + "F\u0002\u0109H\u0003\u0002\u0002\u0002\u010a\u010b\u0005\u008bF\u0002", + "\u010b\u010c\u0005\u0085C\u0002\u010c\u010d\u0005\u0089E\u0002\u010d", + "\u010e\u0005}?\u0002\u010eJ\u0003\u0002\u0002\u0002\u010f\u0110\u0005", + "\u008bF\u0002\u0110\u0111\u0005\u0085C\u0002\u0111\u0112\u0005\u008d", + "G\u0002\u0112\u0113\u0005\u0085C\u0002\u0113\u0114\u0005\u009bN\u0002", + "\u0114L\u0003\u0002\u0002\u0002\u0115\u0116\u0005\u008fH\u0002\u0116", + "\u0117\u0005\u0091I\u0002\u0117\u0118\u0005\u009bN\u0002\u0118N\u0003", + "\u0002\u0002\u0002\u0119\u011a\u0005\u008fH\u0002\u011a\u011b\u0005", + "\u0091I\u0002\u011b\u011c\u0005\u009bN\u0002\u011c\u011d\u0005\u008f", + "H\u0002\u011d\u011e\u0005\u009dO\u0002\u011e\u011f\u0005\u008bF\u0002", + "\u011f\u0120\u0005\u008bF\u0002\u0120P\u0003\u0002\u0002\u0002\u0121", + "\u0122\u0005\u008fH\u0002\u0122\u0123\u0005\u009dO\u0002\u0123\u0124", + "\u0005\u008bF\u0002\u0124\u0125\u0005\u008bF\u0002\u0125R\u0003\u0002", + "\u0002\u0002\u0126\u0127\u0005\u0091I\u0002\u0127\u0128\u0005\u0097", + "L\u0002\u0128T\u0003\u0002\u0002\u0002\u0129\u012a\u0005\u0091I\u0002", + "\u012a\u012b\u0005\u0097L\u0002\u012b\u012c\u0005{>\u0002\u012c\u012d", + "\u0005}?\u0002\u012d\u012e\u0005\u0097L\u0002\u012eV\u0003\u0002\u0002", + "\u0002\u012f\u0130\u0005\u0099M\u0002\u0130\u0131\u0005}?\u0002\u0131", + "\u0132\u0005\u008bF\u0002\u0132\u0133\u0005}?\u0002\u0133\u0134\u0005", + "y=\u0002\u0134\u0135\u0005\u009bN\u0002\u0135X\u0003\u0002\u0002\u0002", + "\u0136\u0137\u0005\u009bN\u0002\u0137\u0138\u0005\u0097L\u0002\u0138", + "\u0139\u0005\u009dO\u0002\u0139\u013a\u0005}?\u0002\u013aZ\u0003\u0002", + "\u0002\u0002\u013b\u013c\u0005\u00a1Q\u0002\u013c\u013d\u0005\u0083", + "B\u0002\u013d\u013e\u0005}?\u0002\u013e\u013f\u0005\u0097L\u0002\u013f", + "\u0140\u0005}?\u0002\u0140\\\u0003\u0002\u0002\u0002\u0141\u0143\u0005", + "s:\u0002\u0142\u0141\u0003\u0002\u0002\u0002\u0143\u0144\u0003\u0002", + "\u0002\u0002\u0144\u0142\u0003\u0002\u0002\u0002\u0144\u0145\u0003\u0002", + "\u0002\u0002\u0145\u014d\u0003\u0002\u0002\u0002\u0146\u014a\u00070", + "\u0002\u0002\u0147\u0149\u0005s:\u0002\u0148\u0147\u0003\u0002\u0002", + "\u0002\u0149\u014c\u0003\u0002\u0002\u0002\u014a\u0148\u0003\u0002\u0002", + "\u0002\u014a\u014b\u0003\u0002\u0002\u0002\u014b\u014e\u0003\u0002\u0002", + "\u0002\u014c\u014a\u0003\u0002\u0002\u0002\u014d\u0146\u0003\u0002\u0002", + "\u0002\u014d\u014e\u0003\u0002\u0002\u0002\u014e\u0158\u0003\u0002\u0002", + "\u0002\u014f\u0151\u0005}?\u0002\u0150\u0152\t\u0002\u0002\u0002\u0151", + "\u0150\u0003\u0002\u0002\u0002\u0151\u0152\u0003\u0002\u0002\u0002\u0152", + "\u0154\u0003\u0002\u0002\u0002\u0153\u0155\u0005s:\u0002\u0154\u0153", + "\u0003\u0002\u0002\u0002\u0155\u0156\u0003\u0002\u0002\u0002\u0156\u0154", + "\u0003\u0002\u0002\u0002\u0156\u0157\u0003\u0002\u0002\u0002\u0157\u0159", + "\u0003\u0002\u0002\u0002\u0158\u014f\u0003\u0002\u0002\u0002\u0158\u0159", + "\u0003\u0002\u0002\u0002\u0159\u016c\u0003\u0002\u0002\u0002\u015a\u015c", + "\u00070\u0002\u0002\u015b\u015d\u0005s:\u0002\u015c\u015b\u0003\u0002", + "\u0002\u0002\u015d\u015e\u0003\u0002\u0002\u0002\u015e\u015c\u0003\u0002", + "\u0002\u0002\u015e\u015f\u0003\u0002\u0002\u0002\u015f\u0169\u0003\u0002", + "\u0002\u0002\u0160\u0162\u0005}?\u0002\u0161\u0163\t\u0002\u0002\u0002", + "\u0162\u0161\u0003\u0002\u0002\u0002\u0162\u0163\u0003\u0002\u0002\u0002", + "\u0163\u0165\u0003\u0002\u0002\u0002\u0164\u0166\u0005s:\u0002\u0165", + "\u0164\u0003\u0002\u0002\u0002\u0166\u0167\u0003\u0002\u0002\u0002\u0167", + "\u0165\u0003\u0002\u0002\u0002\u0167\u0168\u0003\u0002\u0002\u0002\u0168", + "\u016a\u0003\u0002\u0002\u0002\u0169\u0160\u0003\u0002\u0002\u0002\u0169", + "\u016a\u0003\u0002\u0002\u0002\u016a\u016c\u0003\u0002\u0002\u0002\u016b", + "\u0142\u0003\u0002\u0002\u0002\u016b\u015a\u0003\u0002\u0002\u0002\u016c", + "^\u0003\u0002\u0002\u0002\u016d\u016e\u0005a1\u0002\u016e`\u0003\u0002", + "\u0002\u0002\u016f\u0175\u0007$\u0002\u0002\u0170\u0171\u0007^\u0002", + "\u0002\u0171\u0174\u0007$\u0002\u0002\u0172\u0174\n\u0003\u0002\u0002", + "\u0173\u0170\u0003\u0002\u0002\u0002\u0173\u0172\u0003\u0002\u0002\u0002", + "\u0174\u0177\u0003\u0002\u0002\u0002\u0175\u0173\u0003\u0002\u0002\u0002", + "\u0175\u0176\u0003\u0002\u0002\u0002\u0176\u0178\u0003\u0002\u0002\u0002", + "\u0177\u0175\u0003\u0002\u0002\u0002\u0178\u0179\u0007$\u0002\u0002", + "\u0179b\u0003\u0002\u0002\u0002\u017a\u0180\u0007$\u0002\u0002\u017b", + "\u017c\u0007$\u0002\u0002\u017c\u017f\u0007$\u0002\u0002\u017d\u017f", + "\n\u0003\u0002\u0002\u017e\u017b\u0003\u0002\u0002\u0002\u017e\u017d", + "\u0003\u0002\u0002\u0002\u017f\u0182\u0003\u0002\u0002\u0002\u0180\u017e", + "\u0003\u0002\u0002\u0002\u0180\u0181\u0003\u0002\u0002\u0002\u0181\u0183", + "\u0003\u0002\u0002\u0002\u0182\u0180\u0003\u0002\u0002\u0002\u0183\u0184", + "\u0007$\u0002\u0002\u0184d\u0003\u0002\u0002\u0002\u0185\u0186\u0005", + "g4\u0002\u0186f\u0003\u0002\u0002\u0002\u0187\u018d\u0007)\u0002\u0002", + "\u0188\u0189\u0007^\u0002\u0002\u0189\u018c\u0007)\u0002\u0002\u018a", + "\u018c\n\u0004\u0002\u0002\u018b\u0188\u0003\u0002\u0002\u0002\u018b", + "\u018a\u0003\u0002\u0002\u0002\u018c\u018f\u0003\u0002\u0002\u0002\u018d", + "\u018b\u0003\u0002\u0002\u0002\u018d\u018e\u0003\u0002\u0002\u0002\u018e", + "\u0190\u0003\u0002\u0002\u0002\u018f\u018d\u0003\u0002\u0002\u0002\u0190", + "\u0191\u0007)\u0002\u0002\u0191h\u0003\u0002\u0002\u0002\u0192\u0198", + "\u0007)\u0002\u0002\u0193\u0194\u0007)\u0002\u0002\u0194\u0197\u0007", + ")\u0002\u0002\u0195\u0197\n\u0004\u0002\u0002\u0196\u0193\u0003\u0002", + "\u0002\u0002\u0196\u0195\u0003\u0002\u0002\u0002\u0197\u019a\u0003\u0002", + "\u0002\u0002\u0198\u0196\u0003\u0002\u0002\u0002\u0198\u0199\u0003\u0002", + "\u0002\u0002\u0199\u019b\u0003\u0002\u0002\u0002\u019a\u0198\u0003\u0002", + "\u0002\u0002\u019b\u019c\u0007)\u0002\u0002\u019cj\u0003\u0002\u0002", + "\u0002\u019d\u019e\u0007/\u0002\u0002\u019e\u01a3\u0007/\u0002\u0002", + "\u019f\u01a0\u00071\u0002\u0002\u01a0\u01a3\u00071\u0002\u0002\u01a1", + "\u01a3\u0007%\u0002\u0002\u01a2\u019d\u0003\u0002\u0002\u0002\u01a2", + "\u019f\u0003\u0002\u0002\u0002\u01a2\u01a1\u0003\u0002\u0002\u0002\u01a3", + "\u01a7\u0003\u0002\u0002\u0002\u01a4\u01a6\n\u0005\u0002\u0002\u01a5", + "\u01a4\u0003\u0002\u0002\u0002\u01a6\u01a9\u0003\u0002\u0002\u0002\u01a7", + "\u01a5\u0003\u0002\u0002\u0002\u01a7\u01a8\u0003\u0002\u0002\u0002\u01a8", + "\u01aa\u0003\u0002\u0002\u0002\u01a9\u01a7\u0003\u0002\u0002\u0002\u01aa", + "\u01ab\b6\u0002\u0002\u01abl\u0003\u0002\u0002\u0002\u01ac\u01ad\u0007", + "1\u0002\u0002\u01ad\u01ae\u0007,\u0002\u0002\u01ae\u01b2\u0003\u0002", + "\u0002\u0002\u01af\u01b1\u000b\u0002\u0002\u0002\u01b0\u01af\u0003\u0002", + "\u0002\u0002\u01b1\u01b4\u0003\u0002\u0002\u0002\u01b2\u01b3\u0003\u0002", + "\u0002\u0002\u01b2\u01b0\u0003\u0002\u0002\u0002\u01b3\u01b8\u0003\u0002", + "\u0002\u0002\u01b4\u01b2\u0003\u0002\u0002\u0002\u01b5\u01b6\u0007,", + "\u0002\u0002\u01b6\u01b9\u00071\u0002\u0002\u01b7\u01b9\u0007\u0002", + "\u0002\u0003\u01b8\u01b5\u0003\u0002\u0002\u0002\u01b8\u01b7\u0003\u0002", + "\u0002\u0002\u01b9\u01ba\u0003\u0002\u0002\u0002\u01ba\u01bb\b7\u0002", + "\u0002\u01bbn\u0003\u0002\u0002\u0002\u01bc\u01bd\t\u0006\u0002\u0002", + "\u01bd\u01be\u0003\u0002\u0002\u0002\u01be\u01bf\b8\u0002\u0002\u01bf", + "p\u0003\u0002\u0002\u0002\u01c0\u01c4\t\u0007\u0002\u0002\u01c1\u01c3", + "\t\b\u0002\u0002\u01c2\u01c1\u0003\u0002\u0002\u0002\u01c3\u01c6\u0003", + "\u0002\u0002\u0002\u01c4\u01c2\u0003\u0002\u0002\u0002\u01c4\u01c5\u0003", + "\u0002\u0002\u0002\u01c5r\u0003\u0002\u0002\u0002\u01c6\u01c4\u0003", + "\u0002\u0002\u0002\u01c7\u01c8\t\t\u0002\u0002\u01c8t\u0003\u0002\u0002", + "\u0002\u01c9\u01ca\t\n\u0002\u0002\u01cav\u0003\u0002\u0002\u0002\u01cb", + "\u01cc\t\u000b\u0002\u0002\u01ccx\u0003\u0002\u0002\u0002\u01cd\u01ce", + "\t\f\u0002\u0002\u01cez\u0003\u0002\u0002\u0002\u01cf\u01d0\t\r\u0002", + "\u0002\u01d0|\u0003\u0002\u0002\u0002\u01d1\u01d2\t\u000e\u0002\u0002", + "\u01d2~\u0003\u0002\u0002\u0002\u01d3\u01d4\t\u000f\u0002\u0002\u01d4", + "\u0080\u0003\u0002\u0002\u0002\u01d5\u01d6\t\u0010\u0002\u0002\u01d6", + "\u0082\u0003\u0002\u0002\u0002\u01d7\u01d8\t\u0011\u0002\u0002\u01d8", + "\u0084\u0003\u0002\u0002\u0002\u01d9\u01da\t\u0012\u0002\u0002\u01da", + "\u0086\u0003\u0002\u0002\u0002\u01db\u01dc\t\u0013\u0002\u0002\u01dc", + "\u0088\u0003\u0002\u0002\u0002\u01dd\u01de\t\u0014\u0002\u0002\u01de", + "\u008a\u0003\u0002\u0002\u0002\u01df\u01e0\t\u0015\u0002\u0002\u01e0", + "\u008c\u0003\u0002\u0002\u0002\u01e1\u01e2\t\u0016\u0002\u0002\u01e2", + "\u008e\u0003\u0002\u0002\u0002\u01e3\u01e4\t\u0017\u0002\u0002\u01e4", + "\u0090\u0003\u0002\u0002\u0002\u01e5\u01e6\t\u0018\u0002\u0002\u01e6", + "\u0092\u0003\u0002\u0002\u0002\u01e7\u01e8\t\u0019\u0002\u0002\u01e8", + "\u0094\u0003\u0002\u0002\u0002\u01e9\u01ea\t\u001a\u0002\u0002\u01ea", + "\u0096\u0003\u0002\u0002\u0002\u01eb\u01ec\t\u001b\u0002\u0002\u01ec", + "\u0098\u0003\u0002\u0002\u0002\u01ed\u01ee\t\u001c\u0002\u0002\u01ee", + "\u009a\u0003\u0002\u0002\u0002\u01ef\u01f0\t\u001d\u0002\u0002\u01f0", + "\u009c\u0003\u0002\u0002\u0002\u01f1\u01f2\t\u001e\u0002\u0002\u01f2", + "\u009e\u0003\u0002\u0002\u0002\u01f3\u01f4\t\u001f\u0002\u0002\u01f4", + "\u00a0\u0003\u0002\u0002\u0002\u01f5\u01f6\t \u0002\u0002\u01f6\u00a2", + "\u0003\u0002\u0002\u0002\u01f7\u01f8\t!\u0002\u0002\u01f8\u00a4\u0003", + "\u0002\u0002\u0002\u01f9\u01fa\t\"\u0002\u0002\u01fa\u00a6\u0003\u0002", + "\u0002\u0002\u01fb\u01fc\t#\u0002\u0002\u01fc\u00a8\u0003\u0002\u0002", + "\u0002\u001b\u0002\u0144\u014a\u014d\u0151\u0156\u0158\u015e\u0162\u0167", + "\u0169\u016b\u0173\u0175\u017e\u0180\u018b\u018d\u0196\u0198\u01a2\u01a7", + "\u01b2\u01b8\u01c4\u0003\u0002\u0003\u0002"].join(""); + + +var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); + +var decisionsToDFA = atn.decisionToState.map( function(ds, index) { return new antlr4.dfa.DFA(ds, index); }); + +function PqlLexer(input) { + antlr4.Lexer.call(this, input); + this._interp = new antlr4.atn.LexerATNSimulator(this, atn, decisionsToDFA, new antlr4.PredictionContextCache()); + return this; +} + +PqlLexer.prototype = Object.create(antlr4.Lexer.prototype); +PqlLexer.prototype.constructor = PqlLexer; + +Object.defineProperty(PqlLexer.prototype, "atn", { + get : function() { + return atn; + } +}); + +PqlLexer.EOF = antlr4.Token.EOF; +PqlLexer.TAXON_TAG_DELIMITER = 1; +PqlLexer.TAXON_OPTIONAL_OPERATOR = 2; +PqlLexer.AND = 3; +PqlLexer.EQ = 4; +PqlLexer.GT_EQ = 5; +PqlLexer.LT_EQ = 6; +PqlLexer.NOT_EQ1 = 7; +PqlLexer.NOT_EQ2 = 8; +PqlLexer.OR = 9; +PqlLexer.SHIFT_LEFT = 10; +PqlLexer.SHIFT_RIGHT = 11; +PqlLexer.AMP = 12; +PqlLexer.ASSIGN = 13; +PqlLexer.CLOSE_PAREN = 14; +PqlLexer.COMMA = 15; +PqlLexer.DOT = 16; +PqlLexer.FORWARD_SLASH = 17; +PqlLexer.GT = 18; +PqlLexer.LT = 19; +PqlLexer.MINUS = 20; +PqlLexer.MOD = 21; +PqlLexer.OPEN_PAREN = 22; +PqlLexer.PIPE = 23; +PqlLexer.PLUS = 24; +PqlLexer.SCOL = 25; +PqlLexer.STAR = 26; +PqlLexer.TILDE = 27; +PqlLexer.UNDER = 28; +PqlLexer.K_AND = 29; +PqlLexer.K_ASC = 30; +PqlLexer.K_BY = 31; +PqlLexer.K_DESC = 32; +PqlLexer.K_FALSE = 33; +PqlLexer.K_IS = 34; +PqlLexer.K_ISNULL = 35; +PqlLexer.K_LIKE = 36; +PqlLexer.K_LIMIT = 37; +PqlLexer.K_NOT = 38; +PqlLexer.K_NOTNULL = 39; +PqlLexer.K_NULL = 40; +PqlLexer.K_OR = 41; +PqlLexer.K_ORDER = 42; +PqlLexer.K_SELECT = 43; +PqlLexer.K_TRUE = 44; +PqlLexer.K_WHERE = 45; +PqlLexer.NUMERIC_LITERAL = 46; +PqlLexer.DOUBLE_QUOTED_STRING = 47; +PqlLexer.DOUBLE_QUOTED_STRING_TEL = 48; +PqlLexer.DOUBLE_QUOTED_STRING_SQL = 49; +PqlLexer.SINGLE_QUOTED_STRING = 50; +PqlLexer.SINGLE_QUOTED_STRING_TEL = 51; +PqlLexer.SINGLE_QUOTED_STRING_SQL = 52; +PqlLexer.SINGLE_LINE_COMMENT = 53; +PqlLexer.MULTILINE_COMMENT = 54; +PqlLexer.SPACES = 55; +PqlLexer.WORD = 56; + +PqlLexer.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; + +PqlLexer.prototype.modeNames = [ "DEFAULT_MODE" ]; + +PqlLexer.prototype.literalNames = [ null, "':'", "'?'", "'&&'", "'=='", + "'>='", "'<='", "'!='", "'<>'", "'||'", + "'<<'", "'>>'", "'&'", "'='", "')'", + "','", "'.'", "'/'", "'>'", "'<'", "'-'", + "'%'", "'('", "'|'", "'+'", "';'", "'*'", + "'~'", "'_'" ]; + +PqlLexer.prototype.symbolicNames = [ null, "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", + "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", + "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", + "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", + "DOT", "FORWARD_SLASH", "GT", "LT", + "MINUS", "MOD", "OPEN_PAREN", "PIPE", + "PLUS", "SCOL", "STAR", "TILDE", "UNDER", + "K_AND", "K_ASC", "K_BY", "K_DESC", + "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", + "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", + "K_OR", "K_ORDER", "K_SELECT", "K_TRUE", + "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "WORD" ]; + +PqlLexer.prototype.ruleNames = [ "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", + "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", + "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", + "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", + "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", + "MOD", "OPEN_PAREN", "PIPE", "PLUS", "SCOL", + "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", + "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", + "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_ORDER", "K_SELECT", + "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", + "SPACES", "WORD", "DIGIT", "A", "B", "C", + "D", "E", "F", "G", "H", "I", "J", "K", + "L", "M", "N", "O", "P", "Q", "R", "S", + "T", "U", "V", "W", "X", "Y", "Z" ]; + +PqlLexer.prototype.grammarFileName = "PqlLexer.g4"; + + +exports.PqlLexer = PqlLexer; + diff --git a/js-temp/PqlParser.js b/js-temp/PqlParser.js new file mode 100644 index 0000000..ff20d0e --- /dev/null +++ b/js-temp/PqlParser.js @@ -0,0 +1,1933 @@ +// Generated from grammar/PqlParser.g4 by ANTLR 4.8 +// jshint ignore: start +var antlr4 = require('antlr4/index'); +var PqlParserListener = require('./PqlParserListener').PqlParserListener; +var PqlParserVisitor = require('./PqlParserVisitor').PqlParserVisitor; + +var grammarFileName = "PqlParser.g4"; + + +var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", + "\u0003:\u00b2\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t", + "\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004", + "\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004\f\t\f\u0004", + "\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0003\u0002\u0003\u0002", + "\u0003\u0002\u0003\u0003\u0007\u0003#\n\u0003\f\u0003\u000e\u0003&\u000b", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0004\u0007\u0004+\n\u0004\f\u0004", + "\u000e\u0004.\u000b\u0004\u0003\u0004\u0003\u0004\u0006\u00042\n\u0004", + "\r\u0004\u000e\u00043\u0003\u0004\u0007\u00047\n\u0004\f\u0004\u000e", + "\u0004:\u000b\u0004\u0003\u0004\u0007\u0004=\n\u0004\f\u0004\u000e\u0004", + "@\u000b\u0004\u0003\u0005\u0003\u0005\u0003\u0006\u0003\u0006\u0003", + "\u0006\u0005\u0006G\n\u0006\u0003\u0006\u0005\u0006J\n\u0006\u0003\u0006", + "\u0005\u0006M\n\u0006\u0003\u0007\u0003\u0007\u0003\u0007\u0007\u0007", + "R\n\u0007\f\u0007\u000e\u0007U\u000b\u0007\u0003\b\u0003\b\u0003\b\u0003", + "\t\u0003\t\u0003\t\u0003\t\u0003\t\u0007\t_\n\t\f\t\u000e\tb\u000b\t", + "\u0003\n\u0003\n\u0005\nf\n\n\u0003\u000b\u0003\u000b\u0003\u000b\u0003", + "\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003", + "\f\u0003\f\u0003\f\u0003\f\u0007\fx\n\f\f\f\u000e\f{\u000b\f\u0005\f", + "}\n\f\u0003\f\u0003\f\u0003\f\u0005\f\u0082\n\f\u0003\f\u0003\f\u0003", + "\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003", + "\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0007\f\u0096\n\f", + "\f\f\u000e\f\u0099\u000b\f\u0003\r\u0005\r\u009c\n\r\u0003\r\u0003\r", + "\u0003\r\u0005\r\u00a1\n\r\u0003\r\u0003\r\u0003\r\u0005\r\u00a6\n\r", + "\u0003\u000e\u0003\u000e\u0003\u000e\u0007\u000e\u00ab\n\u000e\f\u000e", + "\u000e\u000e\u00ae\u000b\u000e\u0003\u000f\u0003\u000f\u0003\u000f\u0002", + "\u0003\u0016\u0010\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016", + "\u0018\u001a\u001c\u0002\u000b\u0004\u0002 \"\"\u0005\u0002\u0016\u0016", + "\u001a\u001a((\u0005\u0002\u0013\u0013\u0017\u0017\u001c\u001c\u0004", + "\u0002\u0016\u0016\u001a\u001a\u0004\u0002\u0007\b\u0014\u0015\u0006", + "\u0002\u0006\u0006\t\n\u000f\u000f$$\u0004\u0002\u0005\u0005\u001f\u001f", + "\u0004\u0002\u000b\u000b++\u0007\u0002##**..0144\u0002\u00be\u0002\u001e", + "\u0003\u0002\u0002\u0002\u0004$\u0003\u0002\u0002\u0002\u0006,\u0003", + "\u0002\u0002\u0002\bA\u0003\u0002\u0002\u0002\nC\u0003\u0002\u0002\u0002", + "\fN\u0003\u0002\u0002\u0002\u000eV\u0003\u0002\u0002\u0002\u0010Y\u0003", + "\u0002\u0002\u0002\u0012c\u0003\u0002\u0002\u0002\u0014g\u0003\u0002", + "\u0002\u0002\u0016\u0081\u0003\u0002\u0002\u0002\u0018\u009b\u0003\u0002", + "\u0002\u0002\u001a\u00a7\u0003\u0002\u0002\u0002\u001c\u00af\u0003\u0002", + "\u0002\u0002\u001e\u001f\u0005\u0016\f\u0002\u001f \u0007\u0002\u0002", + "\u0003 \u0003\u0003\u0002\u0002\u0002!#\u0005\u0006\u0004\u0002\"!\u0003", + "\u0002\u0002\u0002#&\u0003\u0002\u0002\u0002$\"\u0003\u0002\u0002\u0002", + "$%\u0003\u0002\u0002\u0002%\'\u0003\u0002\u0002\u0002&$\u0003\u0002", + "\u0002\u0002\'(\u0007\u0002\u0002\u0003(\u0005\u0003\u0002\u0002\u0002", + ")+\u0007\u001b\u0002\u0002*)\u0003\u0002\u0002\u0002+.\u0003\u0002\u0002", + "\u0002,*\u0003\u0002\u0002\u0002,-\u0003\u0002\u0002\u0002-/\u0003\u0002", + "\u0002\u0002.,\u0003\u0002\u0002\u0002/8\u0005\b\u0005\u000202\u0007", + "\u001b\u0002\u000210\u0003\u0002\u0002\u000223\u0003\u0002\u0002\u0002", + "31\u0003\u0002\u0002\u000234\u0003\u0002\u0002\u000245\u0003\u0002\u0002", + "\u000257\u0005\b\u0005\u000261\u0003\u0002\u0002\u00027:\u0003\u0002", + "\u0002\u000286\u0003\u0002\u0002\u000289\u0003\u0002\u0002\u00029>\u0003", + "\u0002\u0002\u0002:8\u0003\u0002\u0002\u0002;=\u0007\u001b\u0002\u0002", + "<;\u0003\u0002\u0002\u0002=@\u0003\u0002\u0002\u0002><\u0003\u0002\u0002", + "\u0002>?\u0003\u0002\u0002\u0002?\u0007\u0003\u0002\u0002\u0002@>\u0003", + "\u0002\u0002\u0002AB\u0005\n\u0006\u0002B\t\u0003\u0002\u0002\u0002", + "CD\u0007-\u0002\u0002DF\u0005\f\u0007\u0002EG\u0005\u000e\b\u0002FE", + "\u0003\u0002\u0002\u0002FG\u0003\u0002\u0002\u0002GI\u0003\u0002\u0002", + "\u0002HJ\u0005\u0010\t\u0002IH\u0003\u0002\u0002\u0002IJ\u0003\u0002", + "\u0002\u0002JL\u0003\u0002\u0002\u0002KM\u0005\u0014\u000b\u0002LK\u0003", + "\u0002\u0002\u0002LM\u0003\u0002\u0002\u0002M\u000b\u0003\u0002\u0002", + "\u0002NS\u0005\u0016\f\u0002OP\u0007\u0011\u0002\u0002PR\u0005\u0016", + "\f\u0002QO\u0003\u0002\u0002\u0002RU\u0003\u0002\u0002\u0002SQ\u0003", + "\u0002\u0002\u0002ST\u0003\u0002\u0002\u0002T\r\u0003\u0002\u0002\u0002", + "US\u0003\u0002\u0002\u0002VW\u0007/\u0002\u0002WX\u0005\u0016\f\u0002", + "X\u000f\u0003\u0002\u0002\u0002YZ\u0007,\u0002\u0002Z[\u0007!\u0002", + "\u0002[`\u0005\u0012\n\u0002\\]\u0007\u0011\u0002\u0002]_\u0005\u0012", + "\n\u0002^\\\u0003\u0002\u0002\u0002_b\u0003\u0002\u0002\u0002`^\u0003", + "\u0002\u0002\u0002`a\u0003\u0002\u0002\u0002a\u0011\u0003\u0002\u0002", + "\u0002b`\u0003\u0002\u0002\u0002ce\u0005\u0016\f\u0002df\t\u0002\u0002", + "\u0002ed\u0003\u0002\u0002\u0002ef\u0003\u0002\u0002\u0002f\u0013\u0003", + "\u0002\u0002\u0002gh\u0007\'\u0002\u0002hi\u0005\u0016\f\u0002i\u0015", + "\u0003\u0002\u0002\u0002jk\b\f\u0001\u0002kl\t\u0003\u0002\u0002l\u0082", + "\u0005\u0016\f\rmn\u0007\u0018\u0002\u0002no\u0005\u0016\f\u0002op\u0007", + "\u0010\u0002\u0002p\u0082\u0003\u0002\u0002\u0002q\u0082\u0005\u001c", + "\u000f\u0002rs\u0005\u001a\u000e\u0002s|\u0007\u0018\u0002\u0002ty\u0005", + "\u0016\f\u0002uv\u0007\u0011\u0002\u0002vx\u0005\u0016\f\u0002wu\u0003", + "\u0002\u0002\u0002x{\u0003\u0002\u0002\u0002yw\u0003\u0002\u0002\u0002", + "yz\u0003\u0002\u0002\u0002z}\u0003\u0002\u0002\u0002{y\u0003\u0002\u0002", + "\u0002|t\u0003\u0002\u0002\u0002|}\u0003\u0002\u0002\u0002}~\u0003\u0002", + "\u0002\u0002~\u007f\u0007\u0010\u0002\u0002\u007f\u0082\u0003\u0002", + "\u0002\u0002\u0080\u0082\u0005\u0018\r\u0002\u0081j\u0003\u0002\u0002", + "\u0002\u0081m\u0003\u0002\u0002\u0002\u0081q\u0003\u0002\u0002\u0002", + "\u0081r\u0003\u0002\u0002\u0002\u0081\u0080\u0003\u0002\u0002\u0002", + "\u0082\u0097\u0003\u0002\u0002\u0002\u0083\u0084\f\f\u0002\u0002\u0084", + "\u0085\t\u0004\u0002\u0002\u0085\u0096\u0005\u0016\f\r\u0086\u0087\f", + "\u000b\u0002\u0002\u0087\u0088\t\u0005\u0002\u0002\u0088\u0096\u0005", + "\u0016\f\f\u0089\u008a\f\n\u0002\u0002\u008a\u008b\t\u0006\u0002\u0002", + "\u008b\u0096\u0005\u0016\f\u000b\u008c\u008d\f\t\u0002\u0002\u008d\u008e", + "\t\u0007\u0002\u0002\u008e\u0096\u0005\u0016\f\n\u008f\u0090\f\b\u0002", + "\u0002\u0090\u0091\t\b\u0002\u0002\u0091\u0096\u0005\u0016\f\t\u0092", + "\u0093\f\u0007\u0002\u0002\u0093\u0094\t\t\u0002\u0002\u0094\u0096\u0005", + "\u0016\f\b\u0095\u0083\u0003\u0002\u0002\u0002\u0095\u0086\u0003\u0002", + "\u0002\u0002\u0095\u0089\u0003\u0002\u0002\u0002\u0095\u008c\u0003\u0002", + "\u0002\u0002\u0095\u008f\u0003\u0002\u0002\u0002\u0095\u0092\u0003\u0002", + "\u0002\u0002\u0096\u0099\u0003\u0002\u0002\u0002\u0097\u0095\u0003\u0002", + "\u0002\u0002\u0097\u0098\u0003\u0002\u0002\u0002\u0098\u0017\u0003\u0002", + "\u0002\u0002\u0099\u0097\u0003\u0002\u0002\u0002\u009a\u009c\u0007\u0004", + "\u0002\u0002\u009b\u009a\u0003\u0002\u0002\u0002\u009b\u009c\u0003\u0002", + "\u0002\u0002\u009c\u00a0\u0003\u0002\u0002\u0002\u009d\u009e\u0005\u001a", + "\u000e\u0002\u009e\u009f\u0007\u0019\u0002\u0002\u009f\u00a1\u0003\u0002", + "\u0002\u0002\u00a0\u009d\u0003\u0002\u0002\u0002\u00a0\u00a1\u0003\u0002", + "\u0002\u0002\u00a1\u00a2\u0003\u0002\u0002\u0002\u00a2\u00a5\u0005\u001a", + "\u000e\u0002\u00a3\u00a4\u0007\u0003\u0002\u0002\u00a4\u00a6\u0005\u001a", + "\u000e\u0002\u00a5\u00a3\u0003\u0002\u0002\u0002\u00a5\u00a6\u0003\u0002", + "\u0002\u0002\u00a6\u0019\u0003\u0002\u0002\u0002\u00a7\u00ac\u0007:", + "\u0002\u0002\u00a8\u00a9\u0007\u0012\u0002\u0002\u00a9\u00ab\u0007:", + "\u0002\u0002\u00aa\u00a8\u0003\u0002\u0002\u0002\u00ab\u00ae\u0003\u0002", + "\u0002\u0002\u00ac\u00aa\u0003\u0002\u0002\u0002\u00ac\u00ad\u0003\u0002", + "\u0002\u0002\u00ad\u001b\u0003\u0002\u0002\u0002\u00ae\u00ac\u0003\u0002", + "\u0002\u0002\u00af\u00b0\t\n\u0002\u0002\u00b0\u001d\u0003\u0002\u0002", + "\u0002\u0016$,38>FILS`ey|\u0081\u0095\u0097\u009b\u00a0\u00a5\u00ac"].join(""); + + +var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); + +var decisionsToDFA = atn.decisionToState.map( function(ds, index) { return new antlr4.dfa.DFA(ds, index); }); + +var sharedContextCache = new antlr4.PredictionContextCache(); + +var literalNames = [ null, "':'", "'?'", "'&&'", "'=='", "'>='", "'<='", + "'!='", "'<>'", "'||'", "'<<'", "'>>'", "'&'", "'='", + "')'", "','", "'.'", "'/'", "'>'", "'<'", "'-'", "'%'", + "'('", "'|'", "'+'", "';'", "'*'", "'~'", "'_'" ]; + +var symbolicNames = [ null, "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", + "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", + "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", + "CLOSE_PAREN", "COMMA", "DOT", "FORWARD_SLASH", "GT", + "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", + "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", + "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", + "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", + "WORD" ]; + +var ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "selectStmt", + "columns", "whereClause", "orderByClause", "orderExpr", + "limitClause", "expr", "taxon", "identifierMultipart", + "literalValue" ]; + +function PqlParser (input) { + antlr4.Parser.call(this, input); + this._interp = new antlr4.atn.ParserATNSimulator(this, atn, decisionsToDFA, sharedContextCache); + this.ruleNames = ruleNames; + this.literalNames = literalNames; + this.symbolicNames = symbolicNames; + return this; +} + +PqlParser.prototype = Object.create(antlr4.Parser.prototype); +PqlParser.prototype.constructor = PqlParser; + +Object.defineProperty(PqlParser.prototype, "atn", { + get : function() { + return atn; + } +}); + +PqlParser.EOF = antlr4.Token.EOF; +PqlParser.TAXON_TAG_DELIMITER = 1; +PqlParser.TAXON_OPTIONAL_OPERATOR = 2; +PqlParser.AND = 3; +PqlParser.EQ = 4; +PqlParser.GT_EQ = 5; +PqlParser.LT_EQ = 6; +PqlParser.NOT_EQ1 = 7; +PqlParser.NOT_EQ2 = 8; +PqlParser.OR = 9; +PqlParser.SHIFT_LEFT = 10; +PqlParser.SHIFT_RIGHT = 11; +PqlParser.AMP = 12; +PqlParser.ASSIGN = 13; +PqlParser.CLOSE_PAREN = 14; +PqlParser.COMMA = 15; +PqlParser.DOT = 16; +PqlParser.FORWARD_SLASH = 17; +PqlParser.GT = 18; +PqlParser.LT = 19; +PqlParser.MINUS = 20; +PqlParser.MOD = 21; +PqlParser.OPEN_PAREN = 22; +PqlParser.PIPE = 23; +PqlParser.PLUS = 24; +PqlParser.SCOL = 25; +PqlParser.STAR = 26; +PqlParser.TILDE = 27; +PqlParser.UNDER = 28; +PqlParser.K_AND = 29; +PqlParser.K_ASC = 30; +PqlParser.K_BY = 31; +PqlParser.K_DESC = 32; +PqlParser.K_FALSE = 33; +PqlParser.K_IS = 34; +PqlParser.K_ISNULL = 35; +PqlParser.K_LIKE = 36; +PqlParser.K_LIMIT = 37; +PqlParser.K_NOT = 38; +PqlParser.K_NOTNULL = 39; +PqlParser.K_NULL = 40; +PqlParser.K_OR = 41; +PqlParser.K_ORDER = 42; +PqlParser.K_SELECT = 43; +PqlParser.K_TRUE = 44; +PqlParser.K_WHERE = 45; +PqlParser.NUMERIC_LITERAL = 46; +PqlParser.DOUBLE_QUOTED_STRING = 47; +PqlParser.DOUBLE_QUOTED_STRING_TEL = 48; +PqlParser.DOUBLE_QUOTED_STRING_SQL = 49; +PqlParser.SINGLE_QUOTED_STRING = 50; +PqlParser.SINGLE_QUOTED_STRING_TEL = 51; +PqlParser.SINGLE_QUOTED_STRING_SQL = 52; +PqlParser.SINGLE_LINE_COMMENT = 53; +PqlParser.MULTILINE_COMMENT = 54; +PqlParser.SPACES = 55; +PqlParser.WORD = 56; + +PqlParser.RULE_parseTel = 0; +PqlParser.RULE_parsePql = 1; +PqlParser.RULE_sqlStmtList = 2; +PqlParser.RULE_sqlStmt = 3; +PqlParser.RULE_selectStmt = 4; +PqlParser.RULE_columns = 5; +PqlParser.RULE_whereClause = 6; +PqlParser.RULE_orderByClause = 7; +PqlParser.RULE_orderExpr = 8; +PqlParser.RULE_limitClause = 9; +PqlParser.RULE_expr = 10; +PqlParser.RULE_taxon = 11; +PqlParser.RULE_identifierMultipart = 12; +PqlParser.RULE_literalValue = 13; + + +function ParseTelContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_parseTel; + return this; +} + +ParseTelContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +ParseTelContext.prototype.constructor = ParseTelContext; + +ParseTelContext.prototype.expr = function() { + return this.getTypedRuleContext(ExprContext,0); +}; + +ParseTelContext.prototype.EOF = function() { + return this.getToken(PqlParser.EOF, 0); +}; + +ParseTelContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterParseTel(this); + } +}; + +ParseTelContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitParseTel(this); + } +}; + +ParseTelContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitParseTel(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.ParseTelContext = ParseTelContext; + +PqlParser.prototype.parseTel = function() { + + var localctx = new ParseTelContext(this, this._ctx, this.state); + this.enterRule(localctx, 0, PqlParser.RULE_parseTel); + try { + this.enterOuterAlt(localctx, 1); + this.state = 28; + this.expr(0); + this.state = 29; + this.match(PqlParser.EOF); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function ParsePqlContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_parsePql; + return this; +} + +ParsePqlContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +ParsePqlContext.prototype.constructor = ParsePqlContext; + +ParsePqlContext.prototype.EOF = function() { + return this.getToken(PqlParser.EOF, 0); +}; + +ParsePqlContext.prototype.sqlStmtList = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(SqlStmtListContext); + } else { + return this.getTypedRuleContext(SqlStmtListContext,i); + } +}; + +ParsePqlContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterParsePql(this); + } +}; + +ParsePqlContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitParsePql(this); + } +}; + +ParsePqlContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitParsePql(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.ParsePqlContext = ParsePqlContext; + +PqlParser.prototype.parsePql = function() { + + var localctx = new ParsePqlContext(this, this._ctx, this.state); + this.enterRule(localctx, 2, PqlParser.RULE_parsePql); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 34; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===PqlParser.SCOL || _la===PqlParser.K_SELECT) { + this.state = 31; + this.sqlStmtList(); + this.state = 36; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + this.state = 37; + this.match(PqlParser.EOF); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function SqlStmtListContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_sqlStmtList; + return this; +} + +SqlStmtListContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +SqlStmtListContext.prototype.constructor = SqlStmtListContext; + +SqlStmtListContext.prototype.sqlStmt = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(SqlStmtContext); + } else { + return this.getTypedRuleContext(SqlStmtContext,i); + } +}; + +SqlStmtListContext.prototype.SCOL = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.SCOL); + } else { + return this.getToken(PqlParser.SCOL, i); + } +}; + + +SqlStmtListContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterSqlStmtList(this); + } +}; + +SqlStmtListContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitSqlStmtList(this); + } +}; + +SqlStmtListContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitSqlStmtList(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.SqlStmtListContext = SqlStmtListContext; + +PqlParser.prototype.sqlStmtList = function() { + + var localctx = new SqlStmtListContext(this, this._ctx, this.state); + this.enterRule(localctx, 4, PqlParser.RULE_sqlStmtList); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 42; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===PqlParser.SCOL) { + this.state = 39; + this.match(PqlParser.SCOL); + this.state = 44; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + this.state = 45; + this.sqlStmt(); + this.state = 54; + this._errHandler.sync(this); + var _alt = this._interp.adaptivePredict(this._input,3,this._ctx) + while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { + if(_alt===1) { + this.state = 47; + this._errHandler.sync(this); + _la = this._input.LA(1); + do { + this.state = 46; + this.match(PqlParser.SCOL); + this.state = 49; + this._errHandler.sync(this); + _la = this._input.LA(1); + } while(_la===PqlParser.SCOL); + this.state = 51; + this.sqlStmt(); + } + this.state = 56; + this._errHandler.sync(this); + _alt = this._interp.adaptivePredict(this._input,3,this._ctx); + } + + this.state = 60; + this._errHandler.sync(this); + var _alt = this._interp.adaptivePredict(this._input,4,this._ctx) + while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { + if(_alt===1) { + this.state = 57; + this.match(PqlParser.SCOL); + } + this.state = 62; + this._errHandler.sync(this); + _alt = this._interp.adaptivePredict(this._input,4,this._ctx); + } + + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function SqlStmtContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_sqlStmt; + return this; +} + +SqlStmtContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +SqlStmtContext.prototype.constructor = SqlStmtContext; + +SqlStmtContext.prototype.selectStmt = function() { + return this.getTypedRuleContext(SelectStmtContext,0); +}; + +SqlStmtContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterSqlStmt(this); + } +}; + +SqlStmtContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitSqlStmt(this); + } +}; + +SqlStmtContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitSqlStmt(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.SqlStmtContext = SqlStmtContext; + +PqlParser.prototype.sqlStmt = function() { + + var localctx = new SqlStmtContext(this, this._ctx, this.state); + this.enterRule(localctx, 6, PqlParser.RULE_sqlStmt); + try { + this.enterOuterAlt(localctx, 1); + this.state = 63; + this.selectStmt(); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function SelectStmtContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_selectStmt; + return this; +} + +SelectStmtContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +SelectStmtContext.prototype.constructor = SelectStmtContext; + +SelectStmtContext.prototype.K_SELECT = function() { + return this.getToken(PqlParser.K_SELECT, 0); +}; + +SelectStmtContext.prototype.columns = function() { + return this.getTypedRuleContext(ColumnsContext,0); +}; + +SelectStmtContext.prototype.whereClause = function() { + return this.getTypedRuleContext(WhereClauseContext,0); +}; + +SelectStmtContext.prototype.orderByClause = function() { + return this.getTypedRuleContext(OrderByClauseContext,0); +}; + +SelectStmtContext.prototype.limitClause = function() { + return this.getTypedRuleContext(LimitClauseContext,0); +}; + +SelectStmtContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterSelectStmt(this); + } +}; + +SelectStmtContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitSelectStmt(this); + } +}; + +SelectStmtContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitSelectStmt(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.SelectStmtContext = SelectStmtContext; + +PqlParser.prototype.selectStmt = function() { + + var localctx = new SelectStmtContext(this, this._ctx, this.state); + this.enterRule(localctx, 8, PqlParser.RULE_selectStmt); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 65; + this.match(PqlParser.K_SELECT); + this.state = 66; + this.columns(); + this.state = 68; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.K_WHERE) { + this.state = 67; + this.whereClause(); + } + + this.state = 71; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.K_ORDER) { + this.state = 70; + this.orderByClause(); + } + + this.state = 74; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.K_LIMIT) { + this.state = 73; + this.limitClause(); + } + + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function ColumnsContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_columns; + return this; +} + +ColumnsContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +ColumnsContext.prototype.constructor = ColumnsContext; + +ColumnsContext.prototype.expr = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(ExprContext); + } else { + return this.getTypedRuleContext(ExprContext,i); + } +}; + +ColumnsContext.prototype.COMMA = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.COMMA); + } else { + return this.getToken(PqlParser.COMMA, i); + } +}; + + +ColumnsContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterColumns(this); + } +}; + +ColumnsContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitColumns(this); + } +}; + +ColumnsContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitColumns(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.ColumnsContext = ColumnsContext; + +PqlParser.prototype.columns = function() { + + var localctx = new ColumnsContext(this, this._ctx, this.state); + this.enterRule(localctx, 10, PqlParser.RULE_columns); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 76; + this.expr(0); + this.state = 81; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===PqlParser.COMMA) { + this.state = 77; + this.match(PqlParser.COMMA); + this.state = 78; + this.expr(0); + this.state = 83; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function WhereClauseContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_whereClause; + return this; +} + +WhereClauseContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +WhereClauseContext.prototype.constructor = WhereClauseContext; + +WhereClauseContext.prototype.K_WHERE = function() { + return this.getToken(PqlParser.K_WHERE, 0); +}; + +WhereClauseContext.prototype.expr = function() { + return this.getTypedRuleContext(ExprContext,0); +}; + +WhereClauseContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterWhereClause(this); + } +}; + +WhereClauseContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitWhereClause(this); + } +}; + +WhereClauseContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitWhereClause(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.WhereClauseContext = WhereClauseContext; + +PqlParser.prototype.whereClause = function() { + + var localctx = new WhereClauseContext(this, this._ctx, this.state); + this.enterRule(localctx, 12, PqlParser.RULE_whereClause); + try { + this.enterOuterAlt(localctx, 1); + this.state = 84; + this.match(PqlParser.K_WHERE); + this.state = 85; + this.expr(0); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function OrderByClauseContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_orderByClause; + return this; +} + +OrderByClauseContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +OrderByClauseContext.prototype.constructor = OrderByClauseContext; + +OrderByClauseContext.prototype.K_ORDER = function() { + return this.getToken(PqlParser.K_ORDER, 0); +}; + +OrderByClauseContext.prototype.K_BY = function() { + return this.getToken(PqlParser.K_BY, 0); +}; + +OrderByClauseContext.prototype.orderExpr = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(OrderExprContext); + } else { + return this.getTypedRuleContext(OrderExprContext,i); + } +}; + +OrderByClauseContext.prototype.COMMA = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.COMMA); + } else { + return this.getToken(PqlParser.COMMA, i); + } +}; + + +OrderByClauseContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterOrderByClause(this); + } +}; + +OrderByClauseContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitOrderByClause(this); + } +}; + +OrderByClauseContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitOrderByClause(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.OrderByClauseContext = OrderByClauseContext; + +PqlParser.prototype.orderByClause = function() { + + var localctx = new OrderByClauseContext(this, this._ctx, this.state); + this.enterRule(localctx, 14, PqlParser.RULE_orderByClause); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 87; + this.match(PqlParser.K_ORDER); + this.state = 88; + this.match(PqlParser.K_BY); + this.state = 89; + this.orderExpr(); + this.state = 94; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===PqlParser.COMMA) { + this.state = 90; + this.match(PqlParser.COMMA); + this.state = 91; + this.orderExpr(); + this.state = 96; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function OrderExprContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_orderExpr; + return this; +} + +OrderExprContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +OrderExprContext.prototype.constructor = OrderExprContext; + +OrderExprContext.prototype.expr = function() { + return this.getTypedRuleContext(ExprContext,0); +}; + +OrderExprContext.prototype.K_ASC = function() { + return this.getToken(PqlParser.K_ASC, 0); +}; + +OrderExprContext.prototype.K_DESC = function() { + return this.getToken(PqlParser.K_DESC, 0); +}; + +OrderExprContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterOrderExpr(this); + } +}; + +OrderExprContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitOrderExpr(this); + } +}; + +OrderExprContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitOrderExpr(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.OrderExprContext = OrderExprContext; + +PqlParser.prototype.orderExpr = function() { + + var localctx = new OrderExprContext(this, this._ctx, this.state); + this.enterRule(localctx, 16, PqlParser.RULE_orderExpr); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 97; + this.expr(0); + this.state = 99; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.K_ASC || _la===PqlParser.K_DESC) { + this.state = 98; + _la = this._input.LA(1); + if(!(_la===PqlParser.K_ASC || _la===PqlParser.K_DESC)) { + this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + } + + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function LimitClauseContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_limitClause; + this.limit = null; // ExprContext + return this; +} + +LimitClauseContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +LimitClauseContext.prototype.constructor = LimitClauseContext; + +LimitClauseContext.prototype.K_LIMIT = function() { + return this.getToken(PqlParser.K_LIMIT, 0); +}; + +LimitClauseContext.prototype.expr = function() { + return this.getTypedRuleContext(ExprContext,0); +}; + +LimitClauseContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterLimitClause(this); + } +}; + +LimitClauseContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitLimitClause(this); + } +}; + +LimitClauseContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitLimitClause(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.LimitClauseContext = LimitClauseContext; + +PqlParser.prototype.limitClause = function() { + + var localctx = new LimitClauseContext(this, this._ctx, this.state); + this.enterRule(localctx, 18, PqlParser.RULE_limitClause); + try { + this.enterOuterAlt(localctx, 1); + this.state = 101; + this.match(PqlParser.K_LIMIT); + this.state = 102; + localctx.limit = this.expr(0); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function ExprContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_expr; + this.left = null; // ExprContext + this.unary_operator = null; // Token + this.right = null; // ExprContext + this.inner = null; // ExprContext + this.function_name = null; // IdentifierMultipartContext + this.operator = null; // Token + return this; +} + +ExprContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +ExprContext.prototype.constructor = ExprContext; + +ExprContext.prototype.expr = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(ExprContext); + } else { + return this.getTypedRuleContext(ExprContext,i); + } +}; + +ExprContext.prototype.MINUS = function() { + return this.getToken(PqlParser.MINUS, 0); +}; + +ExprContext.prototype.PLUS = function() { + return this.getToken(PqlParser.PLUS, 0); +}; + +ExprContext.prototype.K_NOT = function() { + return this.getToken(PqlParser.K_NOT, 0); +}; + +ExprContext.prototype.OPEN_PAREN = function() { + return this.getToken(PqlParser.OPEN_PAREN, 0); +}; + +ExprContext.prototype.CLOSE_PAREN = function() { + return this.getToken(PqlParser.CLOSE_PAREN, 0); +}; + +ExprContext.prototype.literalValue = function() { + return this.getTypedRuleContext(LiteralValueContext,0); +}; + +ExprContext.prototype.identifierMultipart = function() { + return this.getTypedRuleContext(IdentifierMultipartContext,0); +}; + +ExprContext.prototype.COMMA = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.COMMA); + } else { + return this.getToken(PqlParser.COMMA, i); + } +}; + + +ExprContext.prototype.taxon = function() { + return this.getTypedRuleContext(TaxonContext,0); +}; + +ExprContext.prototype.STAR = function() { + return this.getToken(PqlParser.STAR, 0); +}; + +ExprContext.prototype.FORWARD_SLASH = function() { + return this.getToken(PqlParser.FORWARD_SLASH, 0); +}; + +ExprContext.prototype.MOD = function() { + return this.getToken(PqlParser.MOD, 0); +}; + +ExprContext.prototype.LT = function() { + return this.getToken(PqlParser.LT, 0); +}; + +ExprContext.prototype.LT_EQ = function() { + return this.getToken(PqlParser.LT_EQ, 0); +}; + +ExprContext.prototype.GT = function() { + return this.getToken(PqlParser.GT, 0); +}; + +ExprContext.prototype.GT_EQ = function() { + return this.getToken(PqlParser.GT_EQ, 0); +}; + +ExprContext.prototype.ASSIGN = function() { + return this.getToken(PqlParser.ASSIGN, 0); +}; + +ExprContext.prototype.EQ = function() { + return this.getToken(PqlParser.EQ, 0); +}; + +ExprContext.prototype.NOT_EQ1 = function() { + return this.getToken(PqlParser.NOT_EQ1, 0); +}; + +ExprContext.prototype.NOT_EQ2 = function() { + return this.getToken(PqlParser.NOT_EQ2, 0); +}; + +ExprContext.prototype.K_IS = function() { + return this.getToken(PqlParser.K_IS, 0); +}; + +ExprContext.prototype.K_AND = function() { + return this.getToken(PqlParser.K_AND, 0); +}; + +ExprContext.prototype.AND = function() { + return this.getToken(PqlParser.AND, 0); +}; + +ExprContext.prototype.K_OR = function() { + return this.getToken(PqlParser.K_OR, 0); +}; + +ExprContext.prototype.OR = function() { + return this.getToken(PqlParser.OR, 0); +}; + +ExprContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterExpr(this); + } +}; + +ExprContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitExpr(this); + } +}; + +ExprContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitExpr(this); + } else { + return visitor.visitChildren(this); + } +}; + + + +PqlParser.prototype.expr = function(_p) { + if(_p===undefined) { + _p = 0; + } + var _parentctx = this._ctx; + var _parentState = this.state; + var localctx = new ExprContext(this, this._ctx, _parentState); + var _prevctx = localctx; + var _startState = 20; + this.enterRecursionRule(localctx, 20, PqlParser.RULE_expr, _p); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 127; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,13,this._ctx); + switch(la_) { + case 1: + this.state = 105; + localctx.unary_operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(((((_la - 20)) & ~0x1f) == 0 && ((1 << (_la - 20)) & ((1 << (PqlParser.MINUS - 20)) | (1 << (PqlParser.PLUS - 20)) | (1 << (PqlParser.K_NOT - 20)))) !== 0))) { + localctx.unary_operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 106; + localctx.right = this.expr(11); + break; + + case 2: + this.state = 107; + this.match(PqlParser.OPEN_PAREN); + this.state = 108; + localctx.inner = this.expr(0); + this.state = 109; + this.match(PqlParser.CLOSE_PAREN); + break; + + case 3: + this.state = 111; + this.literalValue(); + break; + + case 4: + this.state = 112; + localctx.function_name = this.identifierMultipart(); + this.state = 113; + this.match(PqlParser.OPEN_PAREN); + this.state = 122; + this._errHandler.sync(this); + _la = this._input.LA(1); + if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.TAXON_OPTIONAL_OPERATOR) | (1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS))) !== 0) || ((((_la - 33)) & ~0x1f) == 0 && ((1 << (_la - 33)) & ((1 << (PqlParser.K_FALSE - 33)) | (1 << (PqlParser.K_NOT - 33)) | (1 << (PqlParser.K_NULL - 33)) | (1 << (PqlParser.K_TRUE - 33)) | (1 << (PqlParser.NUMERIC_LITERAL - 33)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 33)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 33)) | (1 << (PqlParser.WORD - 33)))) !== 0)) { + this.state = 114; + this.expr(0); + this.state = 119; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===PqlParser.COMMA) { + this.state = 115; + this.match(PqlParser.COMMA); + this.state = 116; + this.expr(0); + this.state = 121; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + } + + this.state = 124; + this.match(PqlParser.CLOSE_PAREN); + break; + + case 5: + this.state = 126; + this.taxon(); + break; + + } + this._ctx.stop = this._input.LT(-1); + this.state = 149; + this._errHandler.sync(this); + var _alt = this._interp.adaptivePredict(this._input,15,this._ctx) + while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { + if(_alt===1) { + if(this._parseListeners!==null) { + this.triggerExitRuleEvent(); + } + _prevctx = localctx; + this.state = 147; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,14,this._ctx); + switch(la_) { + case 1: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 129; + if (!( this.precpred(this._ctx, 10))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 10)"); + } + this.state = 130; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) !== 0))) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 131; + localctx.right = this.expr(11); + break; + + case 2: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 132; + if (!( this.precpred(this._ctx, 9))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 9)"); + } + this.state = 133; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(_la===PqlParser.MINUS || _la===PqlParser.PLUS)) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 134; + localctx.right = this.expr(10); + break; + + case 3: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 135; + if (!( this.precpred(this._ctx, 8))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 8)"); + } + this.state = 136; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) !== 0))) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 137; + localctx.right = this.expr(9); + break; + + case 4: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 138; + if (!( this.precpred(this._ctx, 7))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 7)"); + } + this.state = 139; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(((((_la - 4)) & ~0x1f) == 0 && ((1 << (_la - 4)) & ((1 << (PqlParser.EQ - 4)) | (1 << (PqlParser.NOT_EQ1 - 4)) | (1 << (PqlParser.NOT_EQ2 - 4)) | (1 << (PqlParser.ASSIGN - 4)) | (1 << (PqlParser.K_IS - 4)))) !== 0))) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 140; + localctx.right = this.expr(8); + break; + + case 5: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 141; + if (!( this.precpred(this._ctx, 6))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 6)"); + } + this.state = 142; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(_la===PqlParser.AND || _la===PqlParser.K_AND)) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 143; + localctx.right = this.expr(7); + break; + + case 6: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 144; + if (!( this.precpred(this._ctx, 5))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); + } + this.state = 145; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(_la===PqlParser.OR || _la===PqlParser.K_OR)) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 146; + localctx.right = this.expr(6); + break; + + } + } + this.state = 151; + this._errHandler.sync(this); + _alt = this._interp.adaptivePredict(this._input,15,this._ctx); + } + + } catch( error) { + if(error instanceof antlr4.error.RecognitionException) { + localctx.exception = error; + this._errHandler.reportError(this, error); + this._errHandler.recover(this, error); + } else { + throw error; + } + } finally { + this.unrollRecursionContexts(_parentctx) + } + return localctx; +}; + + +function TaxonContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_taxon; + this.namespace = null; // IdentifierMultipartContext + this.slug = null; // IdentifierMultipartContext + this.tag = null; // IdentifierMultipartContext + return this; +} + +TaxonContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +TaxonContext.prototype.constructor = TaxonContext; + +TaxonContext.prototype.identifierMultipart = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(IdentifierMultipartContext); + } else { + return this.getTypedRuleContext(IdentifierMultipartContext,i); + } +}; + +TaxonContext.prototype.TAXON_OPTIONAL_OPERATOR = function() { + return this.getToken(PqlParser.TAXON_OPTIONAL_OPERATOR, 0); +}; + +TaxonContext.prototype.PIPE = function() { + return this.getToken(PqlParser.PIPE, 0); +}; + +TaxonContext.prototype.TAXON_TAG_DELIMITER = function() { + return this.getToken(PqlParser.TAXON_TAG_DELIMITER, 0); +}; + +TaxonContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterTaxon(this); + } +}; + +TaxonContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitTaxon(this); + } +}; + +TaxonContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitTaxon(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.TaxonContext = TaxonContext; + +PqlParser.prototype.taxon = function() { + + var localctx = new TaxonContext(this, this._ctx, this.state); + this.enterRule(localctx, 22, PqlParser.RULE_taxon); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 153; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.TAXON_OPTIONAL_OPERATOR) { + this.state = 152; + this.match(PqlParser.TAXON_OPTIONAL_OPERATOR); + } + + this.state = 158; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,17,this._ctx); + if(la_===1) { + this.state = 155; + localctx.namespace = this.identifierMultipart(); + this.state = 156; + this.match(PqlParser.PIPE); + + } + this.state = 160; + localctx.slug = this.identifierMultipart(); + this.state = 163; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,18,this._ctx); + if(la_===1) { + this.state = 161; + this.match(PqlParser.TAXON_TAG_DELIMITER); + this.state = 162; + localctx.tag = this.identifierMultipart(); + + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function IdentifierMultipartContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_identifierMultipart; + return this; +} + +IdentifierMultipartContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +IdentifierMultipartContext.prototype.constructor = IdentifierMultipartContext; + +IdentifierMultipartContext.prototype.WORD = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.WORD); + } else { + return this.getToken(PqlParser.WORD, i); + } +}; + + +IdentifierMultipartContext.prototype.DOT = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.DOT); + } else { + return this.getToken(PqlParser.DOT, i); + } +}; + + +IdentifierMultipartContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterIdentifierMultipart(this); + } +}; + +IdentifierMultipartContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitIdentifierMultipart(this); + } +}; + +IdentifierMultipartContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitIdentifierMultipart(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.IdentifierMultipartContext = IdentifierMultipartContext; + +PqlParser.prototype.identifierMultipart = function() { + + var localctx = new IdentifierMultipartContext(this, this._ctx, this.state); + this.enterRule(localctx, 24, PqlParser.RULE_identifierMultipart); + try { + this.enterOuterAlt(localctx, 1); + this.state = 165; + this.match(PqlParser.WORD); + this.state = 170; + this._errHandler.sync(this); + var _alt = this._interp.adaptivePredict(this._input,19,this._ctx) + while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { + if(_alt===1) { + this.state = 166; + this.match(PqlParser.DOT); + this.state = 167; + this.match(PqlParser.WORD); + } + this.state = 172; + this._errHandler.sync(this); + _alt = this._interp.adaptivePredict(this._input,19,this._ctx); + } + + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function LiteralValueContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_literalValue; + return this; +} + +LiteralValueContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +LiteralValueContext.prototype.constructor = LiteralValueContext; + +LiteralValueContext.prototype.NUMERIC_LITERAL = function() { + return this.getToken(PqlParser.NUMERIC_LITERAL, 0); +}; + +LiteralValueContext.prototype.DOUBLE_QUOTED_STRING = function() { + return this.getToken(PqlParser.DOUBLE_QUOTED_STRING, 0); +}; + +LiteralValueContext.prototype.SINGLE_QUOTED_STRING = function() { + return this.getToken(PqlParser.SINGLE_QUOTED_STRING, 0); +}; + +LiteralValueContext.prototype.K_NULL = function() { + return this.getToken(PqlParser.K_NULL, 0); +}; + +LiteralValueContext.prototype.K_TRUE = function() { + return this.getToken(PqlParser.K_TRUE, 0); +}; + +LiteralValueContext.prototype.K_FALSE = function() { + return this.getToken(PqlParser.K_FALSE, 0); +}; + +LiteralValueContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterLiteralValue(this); + } +}; + +LiteralValueContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitLiteralValue(this); + } +}; + +LiteralValueContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitLiteralValue(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.LiteralValueContext = LiteralValueContext; + +PqlParser.prototype.literalValue = function() { + + var localctx = new LiteralValueContext(this, this._ctx, this.state); + this.enterRule(localctx, 26, PqlParser.RULE_literalValue); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 173; + _la = this._input.LA(1); + if(!(((((_la - 33)) & ~0x1f) == 0 && ((1 << (_la - 33)) & ((1 << (PqlParser.K_FALSE - 33)) | (1 << (PqlParser.K_NULL - 33)) | (1 << (PqlParser.K_TRUE - 33)) | (1 << (PqlParser.NUMERIC_LITERAL - 33)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 33)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 33)))) !== 0))) { + this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +PqlParser.prototype.sempred = function(localctx, ruleIndex, predIndex) { + switch(ruleIndex) { + case 10: + return this.expr_sempred(localctx, predIndex); + default: + throw "No predicate with index:" + ruleIndex; + } +}; + +PqlParser.prototype.expr_sempred = function(localctx, predIndex) { + switch(predIndex) { + case 0: + return this.precpred(this._ctx, 10); + case 1: + return this.precpred(this._ctx, 9); + case 2: + return this.precpred(this._ctx, 8); + case 3: + return this.precpred(this._ctx, 7); + case 4: + return this.precpred(this._ctx, 6); + case 5: + return this.precpred(this._ctx, 5); + default: + throw "No predicate with index:" + predIndex; + } +}; + + +exports.PqlParser = PqlParser; diff --git a/js-temp/PqlParserListener.js b/js-temp/PqlParserListener.js new file mode 100644 index 0000000..bd2ae90 --- /dev/null +++ b/js-temp/PqlParserListener.js @@ -0,0 +1,141 @@ +// Generated from grammar/PqlParser.g4 by ANTLR 4.8 +// jshint ignore: start +var antlr4 = require('antlr4/index'); + +// This class defines a complete listener for a parse tree produced by PqlParser. +function PqlParserListener() { + antlr4.tree.ParseTreeListener.call(this); + return this; +} + +PqlParserListener.prototype = Object.create(antlr4.tree.ParseTreeListener.prototype); +PqlParserListener.prototype.constructor = PqlParserListener; + +// Enter a parse tree produced by PqlParser#parseTel. +PqlParserListener.prototype.enterParseTel = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#parseTel. +PqlParserListener.prototype.exitParseTel = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#parsePql. +PqlParserListener.prototype.enterParsePql = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#parsePql. +PqlParserListener.prototype.exitParsePql = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#sqlStmtList. +PqlParserListener.prototype.enterSqlStmtList = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#sqlStmtList. +PqlParserListener.prototype.exitSqlStmtList = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#sqlStmt. +PqlParserListener.prototype.enterSqlStmt = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#sqlStmt. +PqlParserListener.prototype.exitSqlStmt = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#selectStmt. +PqlParserListener.prototype.enterSelectStmt = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#selectStmt. +PqlParserListener.prototype.exitSelectStmt = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#columns. +PqlParserListener.prototype.enterColumns = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#columns. +PqlParserListener.prototype.exitColumns = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#whereClause. +PqlParserListener.prototype.enterWhereClause = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#whereClause. +PqlParserListener.prototype.exitWhereClause = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#orderByClause. +PqlParserListener.prototype.enterOrderByClause = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#orderByClause. +PqlParserListener.prototype.exitOrderByClause = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#orderExpr. +PqlParserListener.prototype.enterOrderExpr = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#orderExpr. +PqlParserListener.prototype.exitOrderExpr = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#limitClause. +PqlParserListener.prototype.enterLimitClause = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#limitClause. +PqlParserListener.prototype.exitLimitClause = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#expr. +PqlParserListener.prototype.enterExpr = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#expr. +PqlParserListener.prototype.exitExpr = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#taxon. +PqlParserListener.prototype.enterTaxon = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#taxon. +PqlParserListener.prototype.exitTaxon = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#identifierMultipart. +PqlParserListener.prototype.enterIdentifierMultipart = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#identifierMultipart. +PqlParserListener.prototype.exitIdentifierMultipart = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#literalValue. +PqlParserListener.prototype.enterLiteralValue = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#literalValue. +PqlParserListener.prototype.exitLiteralValue = function(ctx) { +}; + + + +exports.PqlParserListener = PqlParserListener; \ No newline at end of file diff --git a/js-temp/PqlParserVisitor.js b/js-temp/PqlParserVisitor.js new file mode 100644 index 0000000..6970d4e --- /dev/null +++ b/js-temp/PqlParserVisitor.js @@ -0,0 +1,100 @@ +// Generated from grammar/PqlParser.g4 by ANTLR 4.8 +// jshint ignore: start +var antlr4 = require('antlr4/index'); + +// This class defines a complete generic visitor for a parse tree produced by PqlParser. + +function PqlParserVisitor() { + antlr4.tree.ParseTreeVisitor.call(this); + return this; +} + +PqlParserVisitor.prototype = Object.create(antlr4.tree.ParseTreeVisitor.prototype); +PqlParserVisitor.prototype.constructor = PqlParserVisitor; + +// Visit a parse tree produced by PqlParser#parseTel. +PqlParserVisitor.prototype.visitParseTel = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#parsePql. +PqlParserVisitor.prototype.visitParsePql = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#sqlStmtList. +PqlParserVisitor.prototype.visitSqlStmtList = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#sqlStmt. +PqlParserVisitor.prototype.visitSqlStmt = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#selectStmt. +PqlParserVisitor.prototype.visitSelectStmt = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#columns. +PqlParserVisitor.prototype.visitColumns = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#whereClause. +PqlParserVisitor.prototype.visitWhereClause = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#orderByClause. +PqlParserVisitor.prototype.visitOrderByClause = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#orderExpr. +PqlParserVisitor.prototype.visitOrderExpr = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#limitClause. +PqlParserVisitor.prototype.visitLimitClause = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#expr. +PqlParserVisitor.prototype.visitExpr = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#taxon. +PqlParserVisitor.prototype.visitTaxon = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#identifierMultipart. +PqlParserVisitor.prototype.visitIdentifierMultipart = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#literalValue. +PqlParserVisitor.prototype.visitLiteralValue = function(ctx) { + return this.visitChildren(ctx); +}; + + + +exports.PqlParserVisitor = PqlParserVisitor; \ No newline at end of file diff --git a/js-temp/TelLexer.js b/js-temp/TelLexer.js index 0697657..4a897ce 100644 --- a/js-temp/TelLexer.js +++ b/js-temp/TelLexer.js @@ -5,7 +5,7 @@ var antlr4 = require('antlr4/index'); var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u0002 \u0135\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", + "\u0002>\u0210\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", "\u0004\t\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t", "\u0007\u0004\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004", "\f\t\f\u0004\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0004\u0010", @@ -17,185 +17,320 @@ var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", "\t#\u0004$\t$\u0004%\t%\u0004&\t&\u0004\'\t\'\u0004(\t(\u0004)\t)\u0004", "*\t*\u0004+\t+\u0004,\t,\u0004-\t-\u0004.\t.\u0004/\t/\u00040\t0\u0004", "1\t1\u00042\t2\u00043\t3\u00044\t4\u00045\t5\u00046\t6\u00047\t7\u0004", - "8\t8\u00049\t9\u0004:\t:\u0003\u0002\u0005\u0002w\n\u0002\u0003\u0002", - "\u0006\u0002z\n\u0002\r\u0002\u000e\u0002{\u0003\u0003\u0005\u0003\u007f", - "\n\u0003\u0003\u0003\u0006\u0003\u0082\n\u0003\r\u0003\u000e\u0003\u0083", - "\u0003\u0003\u0003\u0003\u0006\u0003\u0088\n\u0003\r\u0003\u000e\u0003", - "\u0089\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003", - "\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0005\u0003", - "\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0007\u0003\u0007\u0003", - "\u0007\u0003\b\u0003\b\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0007", - "\t\u00a5\n\t\f\t\u000e\t\u00a8\u000b\t\u0003\n\u0003\n\u0003\n\u0003", - "\n\u0007\n\u00ae\n\n\f\n\u000e\n\u00b1\u000b\n\u0003\n\u0003\n\u0003", - "\u000b\u0003\u000b\u0003\u000b\u0003\u000b\u0007\u000b\u00b9\n\u000b", - "\f\u000b\u000e\u000b\u00bc\u000b\u000b\u0003\u000b\u0003\u000b\u0003", - "\f\u0003\f\u0003\r\u0003\r\u0003\u000e\u0003\u000e\u0003\u000f\u0003", - "\u000f\u0003\u0010\u0003\u0010\u0003\u0011\u0003\u0011\u0003\u0011\u0003", - "\u0012\u0003\u0012\u0003\u0012\u0003\u0013\u0003\u0013\u0003\u0013\u0003", - "\u0014\u0003\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016\u0003", - "\u0016\u0003\u0017\u0003\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003", - "\u0018\u0003\u0019\u0003\u0019\u0003\u001a\u0003\u001a\u0003\u001b\u0003", - "\u001b\u0003\u001c\u0003\u001c\u0003\u001d\u0003\u001d\u0003\u001e\u0003", - "\u001e\u0003\u001e\u0003\u001e\u0003\u001e\u0005\u001e\u00ef\n\u001e", - "\u0003\u001e\u0007\u001e\u00f2\n\u001e\f\u001e\u000e\u001e\u00f5\u000b", - "\u001e\u0003\u001e\u0003\u001e\u0003\u001f\u0006\u001f\u00fa\n\u001f", - "\r\u001f\u000e\u001f\u00fb\u0003\u001f\u0003\u001f\u0003 \u0003 \u0003", - "!\u0003!\u0003\"\u0003\"\u0003#\u0003#\u0003$\u0003$\u0003%\u0003%\u0003", - "&\u0003&\u0003\'\u0003\'\u0003(\u0003(\u0003)\u0003)\u0003*\u0003*\u0003", - "+\u0003+\u0003,\u0003,\u0003-\u0003-\u0003.\u0003.\u0003/\u0003/\u0003", - "0\u00030\u00031\u00031\u00032\u00032\u00033\u00033\u00034\u00034\u0003", - "5\u00035\u00036\u00036\u00037\u00037\u00038\u00038\u00039\u00039\u0003", - ":\u0003:\u0002\u0002;\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b", - "\u0007\r\b\u000f\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e\u001b", - "\u000f\u001d\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016+", - "\u0017-\u0018/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?\u0002", - "A\u0002C\u0002E\u0002G\u0002I\u0002K\u0002M\u0002O\u0002Q\u0002S\u0002", - "U\u0002W\u0002Y\u0002[\u0002]\u0002_\u0002a\u0002c\u0002e\u0002g\u0002", - "i\u0002k\u0002m\u0002o\u0002q\u0002s\u0002\u0003\u0002#\u0005\u0002", - "C\\aac|\b\u0002&&002;C\\aac|\u0003\u0002$$\u0003\u0002))\u0004\u0002", - "\f\f\u000f\u000f\u0005\u0002\u000b\f\u000f\u000f\"\"\u0003\u00022;\u0004", - "\u0002CCcc\u0004\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002", - "GGgg\u0004\u0002HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKk", - "k\u0004\u0002LLll\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004", - "\u0002PPpp\u0004\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002", - "TTtt\u0004\u0002UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXx", - "x\u0004\u0002YYyy\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||", - "\u0002\u0127\u0002\u0003\u0003\u0002\u0002\u0002\u0002\u0005\u0003\u0002", - "\u0002\u0002\u0002\u0007\u0003\u0002\u0002\u0002\u0002\t\u0003\u0002", - "\u0002\u0002\u0002\u000b\u0003\u0002\u0002\u0002\u0002\r\u0003\u0002", - "\u0002\u0002\u0002\u000f\u0003\u0002\u0002\u0002\u0002\u0011\u0003\u0002", - "\u0002\u0002\u0002\u0013\u0003\u0002\u0002\u0002\u0002\u0015\u0003\u0002", - "\u0002\u0002\u0002\u0017\u0003\u0002\u0002\u0002\u0002\u0019\u0003\u0002", - "\u0002\u0002\u0002\u001b\u0003\u0002\u0002\u0002\u0002\u001d\u0003\u0002", - "\u0002\u0002\u0002\u001f\u0003\u0002\u0002\u0002\u0002!\u0003\u0002", - "\u0002\u0002\u0002#\u0003\u0002\u0002\u0002\u0002%\u0003\u0002\u0002", - "\u0002\u0002\'\u0003\u0002\u0002\u0002\u0002)\u0003\u0002\u0002\u0002", - "\u0002+\u0003\u0002\u0002\u0002\u0002-\u0003\u0002\u0002\u0002\u0002", - "/\u0003\u0002\u0002\u0002\u00021\u0003\u0002\u0002\u0002\u00023\u0003", - "\u0002\u0002\u0002\u00025\u0003\u0002\u0002\u0002\u00027\u0003\u0002", - "\u0002\u0002\u00029\u0003\u0002\u0002\u0002\u0002;\u0003\u0002\u0002", - "\u0002\u0002=\u0003\u0002\u0002\u0002\u0003v\u0003\u0002\u0002\u0002", - "\u0005~\u0003\u0002\u0002\u0002\u0007\u008b\u0003\u0002\u0002\u0002", - "\t\u0090\u0003\u0002\u0002\u0002\u000b\u0096\u0003\u0002\u0002\u0002", - "\r\u009a\u0003\u0002\u0002\u0002\u000f\u009d\u0003\u0002\u0002\u0002", - "\u0011\u00a2\u0003\u0002\u0002\u0002\u0013\u00a9\u0003\u0002\u0002\u0002", - "\u0015\u00b4\u0003\u0002\u0002\u0002\u0017\u00bf\u0003\u0002\u0002\u0002", - "\u0019\u00c1\u0003\u0002\u0002\u0002\u001b\u00c3\u0003\u0002\u0002\u0002", - "\u001d\u00c5\u0003\u0002\u0002\u0002\u001f\u00c7\u0003\u0002\u0002\u0002", - "!\u00c9\u0003\u0002\u0002\u0002#\u00cc\u0003\u0002\u0002\u0002%\u00cf", - "\u0003\u0002\u0002\u0002\'\u00d2\u0003\u0002\u0002\u0002)\u00d5\u0003", - "\u0002\u0002\u0002+\u00d7\u0003\u0002\u0002\u0002-\u00d9\u0003\u0002", - "\u0002\u0002/\u00dc\u0003\u0002\u0002\u00021\u00df\u0003\u0002\u0002", - "\u00023\u00e1\u0003\u0002\u0002\u00025\u00e3\u0003\u0002\u0002\u0002", - "7\u00e5\u0003\u0002\u0002\u00029\u00e7\u0003\u0002\u0002\u0002;\u00ee", - "\u0003\u0002\u0002\u0002=\u00f9\u0003\u0002\u0002\u0002?\u00ff\u0003", - "\u0002\u0002\u0002A\u0101\u0003\u0002\u0002\u0002C\u0103\u0003\u0002", - "\u0002\u0002E\u0105\u0003\u0002\u0002\u0002G\u0107\u0003\u0002\u0002", - "\u0002I\u0109\u0003\u0002\u0002\u0002K\u010b\u0003\u0002\u0002\u0002", - "M\u010d\u0003\u0002\u0002\u0002O\u010f\u0003\u0002\u0002\u0002Q\u0111", - "\u0003\u0002\u0002\u0002S\u0113\u0003\u0002\u0002\u0002U\u0115\u0003", - "\u0002\u0002\u0002W\u0117\u0003\u0002\u0002\u0002Y\u0119\u0003\u0002", - "\u0002\u0002[\u011b\u0003\u0002\u0002\u0002]\u011d\u0003\u0002\u0002", - "\u0002_\u011f\u0003\u0002\u0002\u0002a\u0121\u0003\u0002\u0002\u0002", - "c\u0123\u0003\u0002\u0002\u0002e\u0125\u0003\u0002\u0002\u0002g\u0127", - "\u0003\u0002\u0002\u0002i\u0129\u0003\u0002\u0002\u0002k\u012b\u0003", - "\u0002\u0002\u0002m\u012d\u0003\u0002\u0002\u0002o\u012f\u0003\u0002", - "\u0002\u0002q\u0131\u0003\u0002\u0002\u0002s\u0133\u0003\u0002\u0002", - "\u0002uw\u0007/\u0002\u0002vu\u0003\u0002\u0002\u0002vw\u0003\u0002", - "\u0002\u0002wy\u0003\u0002\u0002\u0002xz\u0005? \u0002yx\u0003\u0002", - "\u0002\u0002z{\u0003\u0002\u0002\u0002{y\u0003\u0002\u0002\u0002{|\u0003", - "\u0002\u0002\u0002|\u0004\u0003\u0002\u0002\u0002}\u007f\u0007/\u0002", - "\u0002~}\u0003\u0002\u0002\u0002~\u007f\u0003\u0002\u0002\u0002\u007f", - "\u0081\u0003\u0002\u0002\u0002\u0080\u0082\u0005? \u0002\u0081\u0080", - "\u0003\u0002\u0002\u0002\u0082\u0083\u0003\u0002\u0002\u0002\u0083\u0081", - "\u0003\u0002\u0002\u0002\u0083\u0084\u0003\u0002\u0002\u0002\u0084\u0085", - "\u0003\u0002\u0002\u0002\u0085\u0087\u00070\u0002\u0002\u0086\u0088", - "\u0005? \u0002\u0087\u0086\u0003\u0002\u0002\u0002\u0088\u0089\u0003", - "\u0002\u0002\u0002\u0089\u0087\u0003\u0002\u0002\u0002\u0089\u008a\u0003", - "\u0002\u0002\u0002\u008a\u0006\u0003\u0002\u0002\u0002\u008b\u008c\u0005", - "g4\u0002\u008c\u008d\u0005c2\u0002\u008d\u008e\u0005i5\u0002\u008e\u008f", - "\u0005I%\u0002\u008f\b\u0003\u0002\u0002\u0002\u0090\u0091\u0005K&\u0002", - "\u0091\u0092\u0005A!\u0002\u0092\u0093\u0005W,\u0002\u0093\u0094\u0005", - "e3\u0002\u0094\u0095\u0005I%\u0002\u0095\n\u0003\u0002\u0002\u0002\u0096", - "\u0097\u0005[.\u0002\u0097\u0098\u0005]/\u0002\u0098\u0099\u0005g4\u0002", - "\u0099\f\u0003\u0002\u0002\u0002\u009a\u009b\u0005Q)\u0002\u009b\u009c", - "\u0005e3\u0002\u009c\u000e\u0003\u0002\u0002\u0002\u009d\u009e\u0005", - "[.\u0002\u009e\u009f\u0005i5\u0002\u009f\u00a0\u0005W,\u0002\u00a0\u00a1", - "\u0005W,\u0002\u00a1\u0010\u0003\u0002\u0002\u0002\u00a2\u00a6\t\u0002", - "\u0002\u0002\u00a3\u00a5\t\u0003\u0002\u0002\u00a4\u00a3\u0003\u0002", - "\u0002\u0002\u00a5\u00a8\u0003\u0002\u0002\u0002\u00a6\u00a4\u0003\u0002", - "\u0002\u0002\u00a6\u00a7\u0003\u0002\u0002\u0002\u00a7\u0012\u0003\u0002", - "\u0002\u0002\u00a8\u00a6\u0003\u0002\u0002\u0002\u00a9\u00af\u0007$", - "\u0002\u0002\u00aa\u00ab\u0007^\u0002\u0002\u00ab\u00ae\u0007$\u0002", - "\u0002\u00ac\u00ae\n\u0004\u0002\u0002\u00ad\u00aa\u0003\u0002\u0002", - "\u0002\u00ad\u00ac\u0003\u0002\u0002\u0002\u00ae\u00b1\u0003\u0002\u0002", - "\u0002\u00af\u00ad\u0003\u0002\u0002\u0002\u00af\u00b0\u0003\u0002\u0002", - "\u0002\u00b0\u00b2\u0003\u0002\u0002\u0002\u00b1\u00af\u0003\u0002\u0002", - "\u0002\u00b2\u00b3\u0007$\u0002\u0002\u00b3\u0014\u0003\u0002\u0002", - "\u0002\u00b4\u00ba\u0007)\u0002\u0002\u00b5\u00b6\u0007^\u0002\u0002", - "\u00b6\u00b9\u0007)\u0002\u0002\u00b7\u00b9\n\u0005\u0002\u0002\u00b8", - "\u00b5\u0003\u0002\u0002\u0002\u00b8\u00b7\u0003\u0002\u0002\u0002\u00b9", - "\u00bc\u0003\u0002\u0002\u0002\u00ba\u00b8\u0003\u0002\u0002\u0002\u00ba", - "\u00bb\u0003\u0002\u0002\u0002\u00bb\u00bd\u0003\u0002\u0002\u0002\u00bc", - "\u00ba\u0003\u0002\u0002\u0002\u00bd\u00be\u0007)\u0002\u0002\u00be", - "\u0016\u0003\u0002\u0002\u0002\u00bf\u00c0\u0007*\u0002\u0002\u00c0", - "\u0018\u0003\u0002\u0002\u0002\u00c1\u00c2\u0007+\u0002\u0002\u00c2", - "\u001a\u0003\u0002\u0002\u0002\u00c3\u00c4\u0007~\u0002\u0002\u00c4", - "\u001c\u0003\u0002\u0002\u0002\u00c5\u00c6\u0007<\u0002\u0002\u00c6", - "\u001e\u0003\u0002\u0002\u0002\u00c7\u00c8\u0007.\u0002\u0002\u00c8", - " \u0003\u0002\u0002\u0002\u00c9\u00ca\u0007~\u0002\u0002\u00ca\u00cb", - "\u0007~\u0002\u0002\u00cb\"\u0003\u0002\u0002\u0002\u00cc\u00cd\u0007", - "(\u0002\u0002\u00cd\u00ce\u0007(\u0002\u0002\u00ce$\u0003\u0002\u0002", - "\u0002\u00cf\u00d0\u0007?\u0002\u0002\u00d0\u00d1\u0007?\u0002\u0002", - "\u00d1&\u0003\u0002\u0002\u0002\u00d2\u00d3\u0007#\u0002\u0002\u00d3", - "\u00d4\u0007?\u0002\u0002\u00d4(\u0003\u0002\u0002\u0002\u00d5\u00d6", - "\u0007@\u0002\u0002\u00d6*\u0003\u0002\u0002\u0002\u00d7\u00d8\u0007", - ">\u0002\u0002\u00d8,\u0003\u0002\u0002\u0002\u00d9\u00da\u0007@\u0002", - "\u0002\u00da\u00db\u0007?\u0002\u0002\u00db.\u0003\u0002\u0002\u0002", - "\u00dc\u00dd\u0007>\u0002\u0002\u00dd\u00de\u0007?\u0002\u0002\u00de", - "0\u0003\u0002\u0002\u0002\u00df\u00e0\u0007-\u0002\u0002\u00e02\u0003", - "\u0002\u0002\u0002\u00e1\u00e2\u0007/\u0002\u0002\u00e24\u0003\u0002", - "\u0002\u0002\u00e3\u00e4\u0007,\u0002\u0002\u00e46\u0003\u0002\u0002", - "\u0002\u00e5\u00e6\u00071\u0002\u0002\u00e68\u0003\u0002\u0002\u0002", - "\u00e7\u00e8\u0007A\u0002\u0002\u00e8:\u0003\u0002\u0002\u0002\u00e9", - "\u00ea\u0007/\u0002\u0002\u00ea\u00ef\u0007/\u0002\u0002\u00eb\u00ec", - "\u00071\u0002\u0002\u00ec\u00ef\u00071\u0002\u0002\u00ed\u00ef\u0007", - "%\u0002\u0002\u00ee\u00e9\u0003\u0002\u0002\u0002\u00ee\u00eb\u0003", - "\u0002\u0002\u0002\u00ee\u00ed\u0003\u0002\u0002\u0002\u00ef\u00f3\u0003", - "\u0002\u0002\u0002\u00f0\u00f2\n\u0006\u0002\u0002\u00f1\u00f0\u0003", - "\u0002\u0002\u0002\u00f2\u00f5\u0003\u0002\u0002\u0002\u00f3\u00f1\u0003", - "\u0002\u0002\u0002\u00f3\u00f4\u0003\u0002\u0002\u0002\u00f4\u00f6\u0003", - "\u0002\u0002\u0002\u00f5\u00f3\u0003\u0002\u0002\u0002\u00f6\u00f7\b", - "\u001e\u0002\u0002\u00f7<\u0003\u0002\u0002\u0002\u00f8\u00fa\t\u0007", - "\u0002\u0002\u00f9\u00f8\u0003\u0002\u0002\u0002\u00fa\u00fb\u0003\u0002", - "\u0002\u0002\u00fb\u00f9\u0003\u0002\u0002\u0002\u00fb\u00fc\u0003\u0002", - "\u0002\u0002\u00fc\u00fd\u0003\u0002\u0002\u0002\u00fd\u00fe\b\u001f", - "\u0002\u0002\u00fe>\u0003\u0002\u0002\u0002\u00ff\u0100\t\b\u0002\u0002", - "\u0100@\u0003\u0002\u0002\u0002\u0101\u0102\t\t\u0002\u0002\u0102B\u0003", - "\u0002\u0002\u0002\u0103\u0104\t\n\u0002\u0002\u0104D\u0003\u0002\u0002", - "\u0002\u0105\u0106\t\u000b\u0002\u0002\u0106F\u0003\u0002\u0002\u0002", - "\u0107\u0108\t\f\u0002\u0002\u0108H\u0003\u0002\u0002\u0002\u0109\u010a", - "\t\r\u0002\u0002\u010aJ\u0003\u0002\u0002\u0002\u010b\u010c\t\u000e", - "\u0002\u0002\u010cL\u0003\u0002\u0002\u0002\u010d\u010e\t\u000f\u0002", - "\u0002\u010eN\u0003\u0002\u0002\u0002\u010f\u0110\t\u0010\u0002\u0002", - "\u0110P\u0003\u0002\u0002\u0002\u0111\u0112\t\u0011\u0002\u0002\u0112", - "R\u0003\u0002\u0002\u0002\u0113\u0114\t\u0012\u0002\u0002\u0114T\u0003", - "\u0002\u0002\u0002\u0115\u0116\t\u0013\u0002\u0002\u0116V\u0003\u0002", - "\u0002\u0002\u0117\u0118\t\u0014\u0002\u0002\u0118X\u0003\u0002\u0002", - "\u0002\u0119\u011a\t\u0015\u0002\u0002\u011aZ\u0003\u0002\u0002\u0002", - "\u011b\u011c\t\u0016\u0002\u0002\u011c\\\u0003\u0002\u0002\u0002\u011d", - "\u011e\t\u0017\u0002\u0002\u011e^\u0003\u0002\u0002\u0002\u011f\u0120", - "\t\u0018\u0002\u0002\u0120`\u0003\u0002\u0002\u0002\u0121\u0122\t\u0019", - "\u0002\u0002\u0122b\u0003\u0002\u0002\u0002\u0123\u0124\t\u001a\u0002", - "\u0002\u0124d\u0003\u0002\u0002\u0002\u0125\u0126\t\u001b\u0002\u0002", - "\u0126f\u0003\u0002\u0002\u0002\u0127\u0128\t\u001c\u0002\u0002\u0128", - "h\u0003\u0002\u0002\u0002\u0129\u012a\t\u001d\u0002\u0002\u012aj\u0003", - "\u0002\u0002\u0002\u012b\u012c\t\u001e\u0002\u0002\u012cl\u0003\u0002", - "\u0002\u0002\u012d\u012e\t\u001f\u0002\u0002\u012en\u0003\u0002\u0002", - "\u0002\u012f\u0130\t \u0002\u0002\u0130p\u0003\u0002\u0002\u0002\u0131", - "\u0132\t!\u0002\u0002\u0132r\u0003\u0002\u0002\u0002\u0133\u0134\t\"", - "\u0002\u0002\u0134t\u0003\u0002\u0002\u0002\u0010\u0002v{~\u0083\u0089", - "\u00a6\u00ad\u00af\u00b8\u00ba\u00ee\u00f3\u00fb\u0003\u0002\u0003\u0002"].join(""); + "8\t8\u00049\t9\u0004:\t:\u0004;\t;\u0004<\t<\u0004=\t=\u0004>\t>\u0004", + "?\t?\u0004@\t@\u0004A\tA\u0004B\tB\u0004C\tC\u0004D\tD\u0004E\tE\u0004", + "F\tF\u0004G\tG\u0004H\tH\u0004I\tI\u0004J\tJ\u0004K\tK\u0004L\tL\u0004", + "M\tM\u0004N\tN\u0004O\tO\u0004P\tP\u0004Q\tQ\u0004R\tR\u0004S\tS\u0004", + "T\tT\u0004U\tU\u0004V\tV\u0004W\tW\u0004X\tX\u0003\u0002\u0003\u0002", + "\u0003\u0003\u0003\u0003\u0003\u0004\u0003\u0004\u0003\u0005\u0006\u0005", + "\u00b9\n\u0005\r\u0005\u000e\u0005\u00ba\u0003\u0006\u0003\u0006\u0003", + "\u0006\u0003\u0007\u0003\u0007\u0003\u0007\u0003\b\u0003\b\u0003\b\u0003", + "\t\u0003\t\u0003\t\u0003\n\u0003\n\u0003\n\u0003\u000b\u0003\u000b\u0003", + "\u000b\u0003\f\u0003\f\u0003\f\u0003\r\u0003\r\u0003\r\u0003\u000e\u0003", + "\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010\u0003\u0010\u0003", + "\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003\u0013\u0003\u0013\u0003", + "\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016\u0003\u0016\u0003", + "\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0019\u0003\u0019\u0003", + "\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c\u0003\u001c\u0003", + "\u001d\u0003\u001d\u0003\u001e\u0003\u001e\u0003\u001f\u0003\u001f\u0003", + " \u0003 \u0003!\u0003!\u0003\"\u0003\"\u0003\"\u0003\"\u0003#\u0003", + "#\u0003#\u0003#\u0003$\u0003$\u0003$\u0003%\u0003%\u0003%\u0003%\u0003", + "%\u0003&\u0003&\u0003&\u0003&\u0003&\u0003&\u0003\'\u0003\'\u0003\'", + "\u0003(\u0003(\u0003(\u0003(\u0003(\u0003(\u0003(\u0003)\u0003)\u0003", + ")\u0003)\u0003)\u0003*\u0003*\u0003*\u0003*\u0003*\u0003*\u0003+\u0003", + "+\u0003+\u0003+\u0003,\u0003,\u0003,\u0003,\u0003,\u0003,\u0003,\u0003", + ",\u0003-\u0003-\u0003-\u0003-\u0003-\u0003.\u0003.\u0003.\u0003/\u0003", + "/\u0003/\u0003/\u0003/\u0003/\u00030\u00030\u00030\u00030\u00030\u0003", + "0\u00030\u00031\u00031\u00031\u00031\u00031\u00032\u00032\u00032\u0003", + "2\u00032\u00032\u00033\u00063\u0156\n3\r3\u000e3\u0157\u00033\u0003", + "3\u00073\u015c\n3\f3\u000e3\u015f\u000b3\u00053\u0161\n3\u00033\u0003", + "3\u00053\u0165\n3\u00033\u00063\u0168\n3\r3\u000e3\u0169\u00053\u016c", + "\n3\u00033\u00033\u00063\u0170\n3\r3\u000e3\u0171\u00033\u00033\u0005", + "3\u0176\n3\u00033\u00063\u0179\n3\r3\u000e3\u017a\u00053\u017d\n3\u0005", + "3\u017f\n3\u00034\u00034\u00035\u00035\u00035\u00035\u00075\u0187\n", + "5\f5\u000e5\u018a\u000b5\u00035\u00035\u00036\u00036\u00036\u00036\u0007", + "6\u0192\n6\f6\u000e6\u0195\u000b6\u00036\u00036\u00037\u00037\u0003", + "8\u00038\u00038\u00038\u00078\u019f\n8\f8\u000e8\u01a2\u000b8\u0003", + "8\u00038\u00039\u00039\u00039\u00039\u00079\u01aa\n9\f9\u000e9\u01ad", + "\u000b9\u00039\u00039\u0003:\u0003:\u0003:\u0003:\u0003:\u0005:\u01b6", + "\n:\u0003:\u0007:\u01b9\n:\f:\u000e:\u01bc\u000b:\u0003:\u0003:\u0003", + ";\u0003;\u0003;\u0003;\u0007;\u01c4\n;\f;\u000e;\u01c7\u000b;\u0003", + ";\u0003;\u0003;\u0005;\u01cc\n;\u0003;\u0003;\u0003<\u0003<\u0003<\u0003", + "<\u0003=\u0003=\u0007=\u01d6\n=\f=\u000e=\u01d9\u000b=\u0003>\u0003", + ">\u0003?\u0003?\u0003@\u0003@\u0003A\u0003A\u0003B\u0003B\u0003C\u0003", + "C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003F\u0003G\u0003G\u0003H\u0003", + "H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003K\u0003L\u0003L\u0003M\u0003", + "M\u0003N\u0003N\u0003O\u0003O\u0003P\u0003P\u0003Q\u0003Q\u0003R\u0003", + "R\u0003S\u0003S\u0003T\u0003T\u0003U\u0003U\u0003V\u0003V\u0003W\u0003", + "W\u0003X\u0003X\u0003\u01c5\u0002Y\u0003\u0003\u0005\u0004\u0007\u0005", + "\t\u0006\u000b\u0007\r\b\u000f\t\u0011\n\u0013\u000b\u0015\f\u0017\r", + "\u0019\u000e\u001b\u000f\u001d\u0010\u001f\u0011!\u0012#\u0013%\u0014", + "\'\u0015)\u0016+\u0017-\u0018/\u00191\u001a3\u001b5\u001c7\u001d9\u001e", + ";\u001f= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7m8o9q:s;u<", + "w=y>{\u0002}\u0002\u007f\u0002\u0081\u0002\u0083\u0002\u0085\u0002\u0087", + "\u0002\u0089\u0002\u008b\u0002\u008d\u0002\u008f\u0002\u0091\u0002\u0093", + "\u0002\u0095\u0002\u0097\u0002\u0099\u0002\u009b\u0002\u009d\u0002\u009f", + "\u0002\u00a1\u0002\u00a3\u0002\u00a5\u0002\u00a7\u0002\u00a9\u0002\u00ab", + "\u0002\u00ad\u0002\u00af\u0002\u0003\u0002%\u0007\u0002002;C\\aac|\u0004", + "\u0002--//\u0003\u0002$$\u0003\u0002))\u0004\u0002\f\f\u000f\u000f\u0005", + "\u0002\u000b\r\u000f\u000f\"\"\u0005\u0002C\\aac|\u0006\u00022;C\\a", + "ac|\u0003\u00022;\u0004\u0002CCcc\u0004\u0002DDdd\u0004\u0002EEee\u0004", + "\u0002FFff\u0004\u0002GGgg\u0004\u0002HHhh\u0004\u0002IIii\u0004\u0002", + "JJjj\u0004\u0002KKkk\u0004\u0002LLll\u0004\u0002MMmm\u0004\u0002NNn", + "n\u0004\u0002OOoo\u0004\u0002PPpp\u0004\u0002QQqq\u0004\u0002RRrr\u0004", + "\u0002SSss\u0004\u0002TTtt\u0004\u0002UUuu\u0004\u0002VVvv\u0004\u0002", + "WWww\u0004\u0002XXxx\u0004\u0002YYyy\u0004\u0002ZZzz\u0004\u0002[[{", + "{\u0004\u0002\\\\||\u0002\u020e\u0002\u0003\u0003\u0002\u0002\u0002", + "\u0002\u0005\u0003\u0002\u0002\u0002\u0002\u0007\u0003\u0002\u0002\u0002", + "\u0002\t\u0003\u0002\u0002\u0002\u0002\u000b\u0003\u0002\u0002\u0002", + "\u0002\r\u0003\u0002\u0002\u0002\u0002\u000f\u0003\u0002\u0002\u0002", + "\u0002\u0011\u0003\u0002\u0002\u0002\u0002\u0013\u0003\u0002\u0002\u0002", + "\u0002\u0015\u0003\u0002\u0002\u0002\u0002\u0017\u0003\u0002\u0002\u0002", + "\u0002\u0019\u0003\u0002\u0002\u0002\u0002\u001b\u0003\u0002\u0002\u0002", + "\u0002\u001d\u0003\u0002\u0002\u0002\u0002\u001f\u0003\u0002\u0002\u0002", + "\u0002!\u0003\u0002\u0002\u0002\u0002#\u0003\u0002\u0002\u0002\u0002", + "%\u0003\u0002\u0002\u0002\u0002\'\u0003\u0002\u0002\u0002\u0002)\u0003", + "\u0002\u0002\u0002\u0002+\u0003\u0002\u0002\u0002\u0002-\u0003\u0002", + "\u0002\u0002\u0002/\u0003\u0002\u0002\u0002\u00021\u0003\u0002\u0002", + "\u0002\u00023\u0003\u0002\u0002\u0002\u00025\u0003\u0002\u0002\u0002", + "\u00027\u0003\u0002\u0002\u0002\u00029\u0003\u0002\u0002\u0002\u0002", + ";\u0003\u0002\u0002\u0002\u0002=\u0003\u0002\u0002\u0002\u0002?\u0003", + "\u0002\u0002\u0002\u0002A\u0003\u0002\u0002\u0002\u0002C\u0003\u0002", + "\u0002\u0002\u0002E\u0003\u0002\u0002\u0002\u0002G\u0003\u0002\u0002", + "\u0002\u0002I\u0003\u0002\u0002\u0002\u0002K\u0003\u0002\u0002\u0002", + "\u0002M\u0003\u0002\u0002\u0002\u0002O\u0003\u0002\u0002\u0002\u0002", + "Q\u0003\u0002\u0002\u0002\u0002S\u0003\u0002\u0002\u0002\u0002U\u0003", + "\u0002\u0002\u0002\u0002W\u0003\u0002\u0002\u0002\u0002Y\u0003\u0002", + "\u0002\u0002\u0002[\u0003\u0002\u0002\u0002\u0002]\u0003\u0002\u0002", + "\u0002\u0002_\u0003\u0002\u0002\u0002\u0002a\u0003\u0002\u0002\u0002", + "\u0002c\u0003\u0002\u0002\u0002\u0002e\u0003\u0002\u0002\u0002\u0002", + "g\u0003\u0002\u0002\u0002\u0002i\u0003\u0002\u0002\u0002\u0002k\u0003", + "\u0002\u0002\u0002\u0002m\u0003\u0002\u0002\u0002\u0002o\u0003\u0002", + "\u0002\u0002\u0002q\u0003\u0002\u0002\u0002\u0002s\u0003\u0002\u0002", + "\u0002\u0002u\u0003\u0002\u0002\u0002\u0002w\u0003\u0002\u0002\u0002", + "\u0002y\u0003\u0002\u0002\u0002\u0003\u00b1\u0003\u0002\u0002\u0002", + "\u0005\u00b3\u0003\u0002\u0002\u0002\u0007\u00b5\u0003\u0002\u0002\u0002", + "\t\u00b8\u0003\u0002\u0002\u0002\u000b\u00bc\u0003\u0002\u0002\u0002", + "\r\u00bf\u0003\u0002\u0002\u0002\u000f\u00c2\u0003\u0002\u0002\u0002", + "\u0011\u00c5\u0003\u0002\u0002\u0002\u0013\u00c8\u0003\u0002\u0002\u0002", + "\u0015\u00cb\u0003\u0002\u0002\u0002\u0017\u00ce\u0003\u0002\u0002\u0002", + "\u0019\u00d1\u0003\u0002\u0002\u0002\u001b\u00d4\u0003\u0002\u0002\u0002", + "\u001d\u00d7\u0003\u0002\u0002\u0002\u001f\u00d9\u0003\u0002\u0002\u0002", + "!\u00db\u0003\u0002\u0002\u0002#\u00dd\u0003\u0002\u0002\u0002%\u00df", + "\u0003\u0002\u0002\u0002\'\u00e1\u0003\u0002\u0002\u0002)\u00e3\u0003", + "\u0002\u0002\u0002+\u00e5\u0003\u0002\u0002\u0002-\u00e7\u0003\u0002", + "\u0002\u0002/\u00e9\u0003\u0002\u0002\u00021\u00eb\u0003\u0002\u0002", + "\u00023\u00ed\u0003\u0002\u0002\u00025\u00ef\u0003\u0002\u0002\u0002", + "7\u00f1\u0003\u0002\u0002\u00029\u00f3\u0003\u0002\u0002\u0002;\u00f5", + "\u0003\u0002\u0002\u0002=\u00f7\u0003\u0002\u0002\u0002?\u00f9\u0003", + "\u0002\u0002\u0002A\u00fb\u0003\u0002\u0002\u0002C\u00fd\u0003\u0002", + "\u0002\u0002E\u0101\u0003\u0002\u0002\u0002G\u0105\u0003\u0002\u0002", + "\u0002I\u0108\u0003\u0002\u0002\u0002K\u010d\u0003\u0002\u0002\u0002", + "M\u0113\u0003\u0002\u0002\u0002O\u0116\u0003\u0002\u0002\u0002Q\u011d", + "\u0003\u0002\u0002\u0002S\u0122\u0003\u0002\u0002\u0002U\u0128\u0003", + "\u0002\u0002\u0002W\u012c\u0003\u0002\u0002\u0002Y\u0134\u0003\u0002", + "\u0002\u0002[\u0139\u0003\u0002\u0002\u0002]\u013c\u0003\u0002\u0002", + "\u0002_\u0142\u0003\u0002\u0002\u0002a\u0149\u0003\u0002\u0002\u0002", + "c\u014e\u0003\u0002\u0002\u0002e\u017e\u0003\u0002\u0002\u0002g\u0180", + "\u0003\u0002\u0002\u0002i\u0182\u0003\u0002\u0002\u0002k\u018d\u0003", + "\u0002\u0002\u0002m\u0198\u0003\u0002\u0002\u0002o\u019a\u0003\u0002", + "\u0002\u0002q\u01a5\u0003\u0002\u0002\u0002s\u01b5\u0003\u0002\u0002", + "\u0002u\u01bf\u0003\u0002\u0002\u0002w\u01cf\u0003\u0002\u0002\u0002", + "y\u01d3\u0003\u0002\u0002\u0002{\u01da\u0003\u0002\u0002\u0002}\u01dc", + "\u0003\u0002\u0002\u0002\u007f\u01de\u0003\u0002\u0002\u0002\u0081\u01e0", + "\u0003\u0002\u0002\u0002\u0083\u01e2\u0003\u0002\u0002\u0002\u0085\u01e4", + "\u0003\u0002\u0002\u0002\u0087\u01e6\u0003\u0002\u0002\u0002\u0089\u01e8", + "\u0003\u0002\u0002\u0002\u008b\u01ea\u0003\u0002\u0002\u0002\u008d\u01ec", + "\u0003\u0002\u0002\u0002\u008f\u01ee\u0003\u0002\u0002\u0002\u0091\u01f0", + "\u0003\u0002\u0002\u0002\u0093\u01f2\u0003\u0002\u0002\u0002\u0095\u01f4", + "\u0003\u0002\u0002\u0002\u0097\u01f6\u0003\u0002\u0002\u0002\u0099\u01f8", + "\u0003\u0002\u0002\u0002\u009b\u01fa\u0003\u0002\u0002\u0002\u009d\u01fc", + "\u0003\u0002\u0002\u0002\u009f\u01fe\u0003\u0002\u0002\u0002\u00a1\u0200", + "\u0003\u0002\u0002\u0002\u00a3\u0202\u0003\u0002\u0002\u0002\u00a5\u0204", + "\u0003\u0002\u0002\u0002\u00a7\u0206\u0003\u0002\u0002\u0002\u00a9\u0208", + "\u0003\u0002\u0002\u0002\u00ab\u020a\u0003\u0002\u0002\u0002\u00ad\u020c", + "\u0003\u0002\u0002\u0002\u00af\u020e\u0003\u0002\u0002\u0002\u00b1\u00b2", + "\u00057\u001c\u0002\u00b2\u0004\u0003\u0002\u0002\u0002\u00b3\u00b4", + "\u0007<\u0002\u0002\u00b4\u0006\u0003\u0002\u0002\u0002\u00b5\u00b6", + "\u0007A\u0002\u0002\u00b6\b\u0003\u0002\u0002\u0002\u00b7\u00b9\t\u0002", + "\u0002\u0002\u00b8\u00b7\u0003\u0002\u0002\u0002\u00b9\u00ba\u0003\u0002", + "\u0002\u0002\u00ba\u00b8\u0003\u0002\u0002\u0002\u00ba\u00bb\u0003\u0002", + "\u0002\u0002\u00bb\n\u0003\u0002\u0002\u0002\u00bc\u00bd\u0007(\u0002", + "\u0002\u00bd\u00be\u0007(\u0002\u0002\u00be\f\u0003\u0002\u0002\u0002", + "\u00bf\u00c0\u0007?\u0002\u0002\u00c0\u00c1\u0007?\u0002\u0002\u00c1", + "\u000e\u0003\u0002\u0002\u0002\u00c2\u00c3\u0007@\u0002\u0002\u00c3", + "\u00c4\u0007?\u0002\u0002\u00c4\u0010\u0003\u0002\u0002\u0002\u00c5", + "\u00c6\u0007>\u0002\u0002\u00c6\u00c7\u0007?\u0002\u0002\u00c7\u0012", + "\u0003\u0002\u0002\u0002\u00c8\u00c9\u0007#\u0002\u0002\u00c9\u00ca", + "\u0007?\u0002\u0002\u00ca\u0014\u0003\u0002\u0002\u0002\u00cb\u00cc", + "\u0007>\u0002\u0002\u00cc\u00cd\u0007@\u0002\u0002\u00cd\u0016\u0003", + "\u0002\u0002\u0002\u00ce\u00cf\u0007~\u0002\u0002\u00cf\u00d0\u0007", + "~\u0002\u0002\u00d0\u0018\u0003\u0002\u0002\u0002\u00d1\u00d2\u0007", + ">\u0002\u0002\u00d2\u00d3\u0007>\u0002\u0002\u00d3\u001a\u0003\u0002", + "\u0002\u0002\u00d4\u00d5\u0007@\u0002\u0002\u00d5\u00d6\u0007@\u0002", + "\u0002\u00d6\u001c\u0003\u0002\u0002\u0002\u00d7\u00d8\u0007(\u0002", + "\u0002\u00d8\u001e\u0003\u0002\u0002\u0002\u00d9\u00da\u0007?\u0002", + "\u0002\u00da \u0003\u0002\u0002\u0002\u00db\u00dc\u0007+\u0002\u0002", + "\u00dc\"\u0003\u0002\u0002\u0002\u00dd\u00de\u0007.\u0002\u0002\u00de", + "$\u0003\u0002\u0002\u0002\u00df\u00e0\u0005)\u0015\u0002\u00e0&\u0003", + "\u0002\u0002\u0002\u00e1\u00e2\u00070\u0002\u0002\u00e2(\u0003\u0002", + "\u0002\u0002\u00e3\u00e4\u00071\u0002\u0002\u00e4*\u0003\u0002\u0002", + "\u0002\u00e5\u00e6\u0007@\u0002\u0002\u00e6,\u0003\u0002\u0002\u0002", + "\u00e7\u00e8\u0007>\u0002\u0002\u00e8.\u0003\u0002\u0002\u0002\u00e9", + "\u00ea\u0007/\u0002\u0002\u00ea0\u0003\u0002\u0002\u0002\u00eb\u00ec", + "\u0007\'\u0002\u0002\u00ec2\u0003\u0002\u0002\u0002\u00ed\u00ee\u0005", + "=\u001f\u0002\u00ee4\u0003\u0002\u0002\u0002\u00ef\u00f0\u0007*\u0002", + "\u0002\u00f06\u0003\u0002\u0002\u0002\u00f1\u00f2\u0007~\u0002\u0002", + "\u00f28\u0003\u0002\u0002\u0002\u00f3\u00f4\u0007-\u0002\u0002\u00f4", + ":\u0003\u0002\u0002\u0002\u00f5\u00f6\u0007=\u0002\u0002\u00f6<\u0003", + "\u0002\u0002\u0002\u00f7\u00f8\u0007,\u0002\u0002\u00f8>\u0003\u0002", + "\u0002\u0002\u00f9\u00fa\u0007\u0080\u0002\u0002\u00fa@\u0003\u0002", + "\u0002\u0002\u00fb\u00fc\u0007a\u0002\u0002\u00fcB\u0003\u0002\u0002", + "\u0002\u00fd\u00fe\u0005}?\u0002\u00fe\u00ff\u0005\u0097L\u0002\u00ff", + "\u0100\u0005\u0083B\u0002\u0100D\u0003\u0002\u0002\u0002\u0101\u0102", + "\u0005}?\u0002\u0102\u0103\u0005\u00a1Q\u0002\u0103\u0104\u0005\u0081", + "A\u0002\u0104F\u0003\u0002\u0002\u0002\u0105\u0106\u0005\u007f@\u0002", + "\u0106\u0107\u0005\u00adW\u0002\u0107H\u0003\u0002\u0002\u0002\u0108", + "\u0109\u0005\u0083B\u0002\u0109\u010a\u0005\u0085C\u0002\u010a\u010b", + "\u0005\u00a1Q\u0002\u010b\u010c\u0005\u0081A\u0002\u010cJ\u0003\u0002", + "\u0002\u0002\u010d\u010e\u0005\u0087D\u0002\u010e\u010f\u0005}?\u0002", + "\u010f\u0110\u0005\u0093J\u0002\u0110\u0111\u0005\u00a1Q\u0002\u0111", + "\u0112\u0005\u0085C\u0002\u0112L\u0003\u0002\u0002\u0002\u0113\u0114", + "\u0005\u008dG\u0002\u0114\u0115\u0005\u00a1Q\u0002\u0115N\u0003\u0002", + "\u0002\u0002\u0116\u0117\u0005\u008dG\u0002\u0117\u0118\u0005\u00a1", + "Q\u0002\u0118\u0119\u0005\u0097L\u0002\u0119\u011a\u0005\u00a5S\u0002", + "\u011a\u011b\u0005\u0093J\u0002\u011b\u011c\u0005\u0093J\u0002\u011c", + "P\u0003\u0002\u0002\u0002\u011d\u011e\u0005\u0093J\u0002\u011e\u011f", + "\u0005\u008dG\u0002\u011f\u0120\u0005\u0091I\u0002\u0120\u0121\u0005", + "\u0085C\u0002\u0121R\u0003\u0002\u0002\u0002\u0122\u0123\u0005\u0093", + "J\u0002\u0123\u0124\u0005\u008dG\u0002\u0124\u0125\u0005\u0095K\u0002", + "\u0125\u0126\u0005\u008dG\u0002\u0126\u0127\u0005\u00a3R\u0002\u0127", + "T\u0003\u0002\u0002\u0002\u0128\u0129\u0005\u0097L\u0002\u0129\u012a", + "\u0005\u0099M\u0002\u012a\u012b\u0005\u00a3R\u0002\u012bV\u0003\u0002", + "\u0002\u0002\u012c\u012d\u0005\u0097L\u0002\u012d\u012e\u0005\u0099", + "M\u0002\u012e\u012f\u0005\u00a3R\u0002\u012f\u0130\u0005\u0097L\u0002", + "\u0130\u0131\u0005\u00a5S\u0002\u0131\u0132\u0005\u0093J\u0002\u0132", + "\u0133\u0005\u0093J\u0002\u0133X\u0003\u0002\u0002\u0002\u0134\u0135", + "\u0005\u0097L\u0002\u0135\u0136\u0005\u00a5S\u0002\u0136\u0137\u0005", + "\u0093J\u0002\u0137\u0138\u0005\u0093J\u0002\u0138Z\u0003\u0002\u0002", + "\u0002\u0139\u013a\u0005\u0099M\u0002\u013a\u013b\u0005\u009fP\u0002", + "\u013b\\\u0003\u0002\u0002\u0002\u013c\u013d\u0005\u0099M\u0002\u013d", + "\u013e\u0005\u009fP\u0002\u013e\u013f\u0005\u0083B\u0002\u013f\u0140", + "\u0005\u0085C\u0002\u0140\u0141\u0005\u009fP\u0002\u0141^\u0003\u0002", + "\u0002\u0002\u0142\u0143\u0005\u00a1Q\u0002\u0143\u0144\u0005\u0085", + "C\u0002\u0144\u0145\u0005\u0093J\u0002\u0145\u0146\u0005\u0085C\u0002", + "\u0146\u0147\u0005\u0081A\u0002\u0147\u0148\u0005\u00a3R\u0002\u0148", + "`\u0003\u0002\u0002\u0002\u0149\u014a\u0005\u00a3R\u0002\u014a\u014b", + "\u0005\u009fP\u0002\u014b\u014c\u0005\u00a5S\u0002\u014c\u014d\u0005", + "\u0085C\u0002\u014db\u0003\u0002\u0002\u0002\u014e\u014f\u0005\u00a9", + "U\u0002\u014f\u0150\u0005\u008bF\u0002\u0150\u0151\u0005\u0085C\u0002", + "\u0151\u0152\u0005\u009fP\u0002\u0152\u0153\u0005\u0085C\u0002\u0153", + "d\u0003\u0002\u0002\u0002\u0154\u0156\u0005{>\u0002\u0155\u0154\u0003", + "\u0002\u0002\u0002\u0156\u0157\u0003\u0002\u0002\u0002\u0157\u0155\u0003", + "\u0002\u0002\u0002\u0157\u0158\u0003\u0002\u0002\u0002\u0158\u0160\u0003", + "\u0002\u0002\u0002\u0159\u015d\u00070\u0002\u0002\u015a\u015c\u0005", + "{>\u0002\u015b\u015a\u0003\u0002\u0002\u0002\u015c\u015f\u0003\u0002", + "\u0002\u0002\u015d\u015b\u0003\u0002\u0002\u0002\u015d\u015e\u0003\u0002", + "\u0002\u0002\u015e\u0161\u0003\u0002\u0002\u0002\u015f\u015d\u0003\u0002", + "\u0002\u0002\u0160\u0159\u0003\u0002\u0002\u0002\u0160\u0161\u0003\u0002", + "\u0002\u0002\u0161\u016b\u0003\u0002\u0002\u0002\u0162\u0164\u0005\u0085", + "C\u0002\u0163\u0165\t\u0003\u0002\u0002\u0164\u0163\u0003\u0002\u0002", + "\u0002\u0164\u0165\u0003\u0002\u0002\u0002\u0165\u0167\u0003\u0002\u0002", + "\u0002\u0166\u0168\u0005{>\u0002\u0167\u0166\u0003\u0002\u0002\u0002", + "\u0168\u0169\u0003\u0002\u0002\u0002\u0169\u0167\u0003\u0002\u0002\u0002", + "\u0169\u016a\u0003\u0002\u0002\u0002\u016a\u016c\u0003\u0002\u0002\u0002", + "\u016b\u0162\u0003\u0002\u0002\u0002\u016b\u016c\u0003\u0002\u0002\u0002", + "\u016c\u017f\u0003\u0002\u0002\u0002\u016d\u016f\u00070\u0002\u0002", + "\u016e\u0170\u0005{>\u0002\u016f\u016e\u0003\u0002\u0002\u0002\u0170", + "\u0171\u0003\u0002\u0002\u0002\u0171\u016f\u0003\u0002\u0002\u0002\u0171", + "\u0172\u0003\u0002\u0002\u0002\u0172\u017c\u0003\u0002\u0002\u0002\u0173", + "\u0175\u0005\u0085C\u0002\u0174\u0176\t\u0003\u0002\u0002\u0175\u0174", + "\u0003\u0002\u0002\u0002\u0175\u0176\u0003\u0002\u0002\u0002\u0176\u0178", + "\u0003\u0002\u0002\u0002\u0177\u0179\u0005{>\u0002\u0178\u0177\u0003", + "\u0002\u0002\u0002\u0179\u017a\u0003\u0002\u0002\u0002\u017a\u0178\u0003", + "\u0002\u0002\u0002\u017a\u017b\u0003\u0002\u0002\u0002\u017b\u017d\u0003", + "\u0002\u0002\u0002\u017c\u0173\u0003\u0002\u0002\u0002\u017c\u017d\u0003", + "\u0002\u0002\u0002\u017d\u017f\u0003\u0002\u0002\u0002\u017e\u0155\u0003", + "\u0002\u0002\u0002\u017e\u016d\u0003\u0002\u0002\u0002\u017ff\u0003", + "\u0002\u0002\u0002\u0180\u0181\u0005i5\u0002\u0181h\u0003\u0002\u0002", + "\u0002\u0182\u0188\u0007$\u0002\u0002\u0183\u0184\u0007^\u0002\u0002", + "\u0184\u0187\u0007$\u0002\u0002\u0185\u0187\n\u0004\u0002\u0002\u0186", + "\u0183\u0003\u0002\u0002\u0002\u0186\u0185\u0003\u0002\u0002\u0002\u0187", + "\u018a\u0003\u0002\u0002\u0002\u0188\u0186\u0003\u0002\u0002\u0002\u0188", + "\u0189\u0003\u0002\u0002\u0002\u0189\u018b\u0003\u0002\u0002\u0002\u018a", + "\u0188\u0003\u0002\u0002\u0002\u018b\u018c\u0007$\u0002\u0002\u018c", + "j\u0003\u0002\u0002\u0002\u018d\u0193\u0007$\u0002\u0002\u018e\u018f", + "\u0007$\u0002\u0002\u018f\u0192\u0007$\u0002\u0002\u0190\u0192\n\u0004", + "\u0002\u0002\u0191\u018e\u0003\u0002\u0002\u0002\u0191\u0190\u0003\u0002", + "\u0002\u0002\u0192\u0195\u0003\u0002\u0002\u0002\u0193\u0191\u0003\u0002", + "\u0002\u0002\u0193\u0194\u0003\u0002\u0002\u0002\u0194\u0196\u0003\u0002", + "\u0002\u0002\u0195\u0193\u0003\u0002\u0002\u0002\u0196\u0197\u0007$", + "\u0002\u0002\u0197l\u0003\u0002\u0002\u0002\u0198\u0199\u0005o8\u0002", + "\u0199n\u0003\u0002\u0002\u0002\u019a\u01a0\u0007)\u0002\u0002\u019b", + "\u019c\u0007^\u0002\u0002\u019c\u019f\u0007)\u0002\u0002\u019d\u019f", + "\n\u0005\u0002\u0002\u019e\u019b\u0003\u0002\u0002\u0002\u019e\u019d", + "\u0003\u0002\u0002\u0002\u019f\u01a2\u0003\u0002\u0002\u0002\u01a0\u019e", + "\u0003\u0002\u0002\u0002\u01a0\u01a1\u0003\u0002\u0002\u0002\u01a1\u01a3", + "\u0003\u0002\u0002\u0002\u01a2\u01a0\u0003\u0002\u0002\u0002\u01a3\u01a4", + "\u0007)\u0002\u0002\u01a4p\u0003\u0002\u0002\u0002\u01a5\u01ab\u0007", + ")\u0002\u0002\u01a6\u01a7\u0007)\u0002\u0002\u01a7\u01aa\u0007)\u0002", + "\u0002\u01a8\u01aa\n\u0005\u0002\u0002\u01a9\u01a6\u0003\u0002\u0002", + "\u0002\u01a9\u01a8\u0003\u0002\u0002\u0002\u01aa\u01ad\u0003\u0002\u0002", + "\u0002\u01ab\u01a9\u0003\u0002\u0002\u0002\u01ab\u01ac\u0003\u0002\u0002", + "\u0002\u01ac\u01ae\u0003\u0002\u0002\u0002\u01ad\u01ab\u0003\u0002\u0002", + "\u0002\u01ae\u01af\u0007)\u0002\u0002\u01afr\u0003\u0002\u0002\u0002", + "\u01b0\u01b1\u0007/\u0002\u0002\u01b1\u01b6\u0007/\u0002\u0002\u01b2", + "\u01b3\u00071\u0002\u0002\u01b3\u01b6\u00071\u0002\u0002\u01b4\u01b6", + "\u0007%\u0002\u0002\u01b5\u01b0\u0003\u0002\u0002\u0002\u01b5\u01b2", + "\u0003\u0002\u0002\u0002\u01b5\u01b4\u0003\u0002\u0002\u0002\u01b6\u01ba", + "\u0003\u0002\u0002\u0002\u01b7\u01b9\n\u0006\u0002\u0002\u01b8\u01b7", + "\u0003\u0002\u0002\u0002\u01b9\u01bc\u0003\u0002\u0002\u0002\u01ba\u01b8", + "\u0003\u0002\u0002\u0002\u01ba\u01bb\u0003\u0002\u0002\u0002\u01bb\u01bd", + "\u0003\u0002\u0002\u0002\u01bc\u01ba\u0003\u0002\u0002\u0002\u01bd\u01be", + "\b:\u0002\u0002\u01bet\u0003\u0002\u0002\u0002\u01bf\u01c0\u00071\u0002", + "\u0002\u01c0\u01c1\u0007,\u0002\u0002\u01c1\u01c5\u0003\u0002\u0002", + "\u0002\u01c2\u01c4\u000b\u0002\u0002\u0002\u01c3\u01c2\u0003\u0002\u0002", + "\u0002\u01c4\u01c7\u0003\u0002\u0002\u0002\u01c5\u01c6\u0003\u0002\u0002", + "\u0002\u01c5\u01c3\u0003\u0002\u0002\u0002\u01c6\u01cb\u0003\u0002\u0002", + "\u0002\u01c7\u01c5\u0003\u0002\u0002\u0002\u01c8\u01c9\u0007,\u0002", + "\u0002\u01c9\u01cc\u00071\u0002\u0002\u01ca\u01cc\u0007\u0002\u0002", + "\u0003\u01cb\u01c8\u0003\u0002\u0002\u0002\u01cb\u01ca\u0003\u0002\u0002", + "\u0002\u01cc\u01cd\u0003\u0002\u0002\u0002\u01cd\u01ce\b;\u0002\u0002", + "\u01cev\u0003\u0002\u0002\u0002\u01cf\u01d0\t\u0007\u0002\u0002\u01d0", + "\u01d1\u0003\u0002\u0002\u0002\u01d1\u01d2\b<\u0002\u0002\u01d2x\u0003", + "\u0002\u0002\u0002\u01d3\u01d7\t\b\u0002\u0002\u01d4\u01d6\t\t\u0002", + "\u0002\u01d5\u01d4\u0003\u0002\u0002\u0002\u01d6\u01d9\u0003\u0002\u0002", + "\u0002\u01d7\u01d5\u0003\u0002\u0002\u0002\u01d7\u01d8\u0003\u0002\u0002", + "\u0002\u01d8z\u0003\u0002\u0002\u0002\u01d9\u01d7\u0003\u0002\u0002", + "\u0002\u01da\u01db\t\n\u0002\u0002\u01db|\u0003\u0002\u0002\u0002\u01dc", + "\u01dd\t\u000b\u0002\u0002\u01dd~\u0003\u0002\u0002\u0002\u01de\u01df", + "\t\f\u0002\u0002\u01df\u0080\u0003\u0002\u0002\u0002\u01e0\u01e1\t\r", + "\u0002\u0002\u01e1\u0082\u0003\u0002\u0002\u0002\u01e2\u01e3\t\u000e", + "\u0002\u0002\u01e3\u0084\u0003\u0002\u0002\u0002\u01e4\u01e5\t\u000f", + "\u0002\u0002\u01e5\u0086\u0003\u0002\u0002\u0002\u01e6\u01e7\t\u0010", + "\u0002\u0002\u01e7\u0088\u0003\u0002\u0002\u0002\u01e8\u01e9\t\u0011", + "\u0002\u0002\u01e9\u008a\u0003\u0002\u0002\u0002\u01ea\u01eb\t\u0012", + "\u0002\u0002\u01eb\u008c\u0003\u0002\u0002\u0002\u01ec\u01ed\t\u0013", + "\u0002\u0002\u01ed\u008e\u0003\u0002\u0002\u0002\u01ee\u01ef\t\u0014", + "\u0002\u0002\u01ef\u0090\u0003\u0002\u0002\u0002\u01f0\u01f1\t\u0015", + "\u0002\u0002\u01f1\u0092\u0003\u0002\u0002\u0002\u01f2\u01f3\t\u0016", + "\u0002\u0002\u01f3\u0094\u0003\u0002\u0002\u0002\u01f4\u01f5\t\u0017", + "\u0002\u0002\u01f5\u0096\u0003\u0002\u0002\u0002\u01f6\u01f7\t\u0018", + "\u0002\u0002\u01f7\u0098\u0003\u0002\u0002\u0002\u01f8\u01f9\t\u0019", + "\u0002\u0002\u01f9\u009a\u0003\u0002\u0002\u0002\u01fa\u01fb\t\u001a", + "\u0002\u0002\u01fb\u009c\u0003\u0002\u0002\u0002\u01fc\u01fd\t\u001b", + "\u0002\u0002\u01fd\u009e\u0003\u0002\u0002\u0002\u01fe\u01ff\t\u001c", + "\u0002\u0002\u01ff\u00a0\u0003\u0002\u0002\u0002\u0200\u0201\t\u001d", + "\u0002\u0002\u0201\u00a2\u0003\u0002\u0002\u0002\u0202\u0203\t\u001e", + "\u0002\u0002\u0203\u00a4\u0003\u0002\u0002\u0002\u0204\u0205\t\u001f", + "\u0002\u0002\u0205\u00a6\u0003\u0002\u0002\u0002\u0206\u0207\t \u0002", + "\u0002\u0207\u00a8\u0003\u0002\u0002\u0002\u0208\u0209\t!\u0002\u0002", + "\u0209\u00aa\u0003\u0002\u0002\u0002\u020a\u020b\t\"\u0002\u0002\u020b", + "\u00ac\u0003\u0002\u0002\u0002\u020c\u020d\t#\u0002\u0002\u020d\u00ae", + "\u0003\u0002\u0002\u0002\u020e\u020f\t$\u0002\u0002\u020f\u00b0\u0003", + "\u0002\u0002\u0002\u001c\u0002\u00ba\u0157\u015d\u0160\u0164\u0169\u016b", + "\u0171\u0175\u017a\u017c\u017e\u0186\u0188\u0191\u0193\u019e\u01a0\u01a9", + "\u01ab\u01b5\u01ba\u01c5\u01cb\u01d7\u0003\u0002\u0003\u0002"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -218,70 +353,119 @@ Object.defineProperty(TelLexer.prototype, "atn", { }); TelLexer.EOF = antlr4.Token.EOF; -TelLexer.INT = 1; -TelLexer.REAL = 2; -TelLexer.TRUE = 3; -TelLexer.FALSE = 4; -TelLexer.NOT = 5; -TelLexer.KW_IS = 6; -TelLexer.KW_NULL = 7; -TelLexer.WORD = 8; -TelLexer.STRING_CONSTANT = 9; -TelLexer.SINGLE_QUOTED_ELEMENT = 10; -TelLexer.L_BRACKET = 11; -TelLexer.R_BRACKET = 12; -TelLexer.TAXON_NAMESPACE_DELIMITER = 13; -TelLexer.TAXON_TAG_DELIMITER = 14; -TelLexer.FN_PARAMETER_DELIMITER = 15; -TelLexer.OR = 16; -TelLexer.AND = 17; -TelLexer.EQ = 18; -TelLexer.NEQ = 19; -TelLexer.GT = 20; -TelLexer.LT = 21; -TelLexer.GTEQ = 22; -TelLexer.LTEQ = 23; -TelLexer.PLUS = 24; -TelLexer.MINUS = 25; -TelLexer.MULT = 26; -TelLexer.DIV = 27; -TelLexer.OPTIONAL_TAXON_OPERATOR = 28; -TelLexer.SINGLE_LINE_COMMENT = 29; -TelLexer.WS = 30; +TelLexer.TAXON_NAMESPACE_DELIMITER = 1; +TelLexer.TAXON_TAG_DELIMITER = 2; +TelLexer.TAXON_OPTIONAL_OPERATOR = 3; +TelLexer.IDENTIFIER_TEL = 4; +TelLexer.AND = 5; +TelLexer.EQ = 6; +TelLexer.GT_EQ = 7; +TelLexer.LT_EQ = 8; +TelLexer.NOT_EQ1 = 9; +TelLexer.NOT_EQ2 = 10; +TelLexer.OR = 11; +TelLexer.SHIFT_LEFT = 12; +TelLexer.SHIFT_RIGHT = 13; +TelLexer.AMP = 14; +TelLexer.ASSIGN = 15; +TelLexer.CLOSE_PAREN = 16; +TelLexer.COMMA = 17; +TelLexer.DIV = 18; +TelLexer.DOT = 19; +TelLexer.FORWARD_SLASH = 20; +TelLexer.GT = 21; +TelLexer.LT = 22; +TelLexer.MINUS = 23; +TelLexer.MOD = 24; +TelLexer.MULT = 25; +TelLexer.OPEN_PAREN = 26; +TelLexer.PIPE = 27; +TelLexer.PLUS = 28; +TelLexer.SCOL = 29; +TelLexer.STAR = 30; +TelLexer.TILDE = 31; +TelLexer.UNDER = 32; +TelLexer.K_AND = 33; +TelLexer.K_ASC = 34; +TelLexer.K_BY = 35; +TelLexer.K_DESC = 36; +TelLexer.K_FALSE = 37; +TelLexer.K_IS = 38; +TelLexer.K_ISNULL = 39; +TelLexer.K_LIKE = 40; +TelLexer.K_LIMIT = 41; +TelLexer.K_NOT = 42; +TelLexer.K_NOTNULL = 43; +TelLexer.K_NULL = 44; +TelLexer.K_OR = 45; +TelLexer.K_ORDER = 46; +TelLexer.K_SELECT = 47; +TelLexer.K_TRUE = 48; +TelLexer.K_WHERE = 49; +TelLexer.NUMERIC_LITERAL = 50; +TelLexer.DOUBLE_QUOTED_STRING = 51; +TelLexer.DOUBLE_QUOTED_STRING_TEL = 52; +TelLexer.DOUBLE_QUOTED_STRING_SQL = 53; +TelLexer.SINGLE_QUOTED_STRING = 54; +TelLexer.SINGLE_QUOTED_STRING_TEL = 55; +TelLexer.SINGLE_QUOTED_STRING_SQL = 56; +TelLexer.SINGLE_LINE_COMMENT = 57; +TelLexer.MULTILINE_COMMENT = 58; +TelLexer.SPACES = 59; +TelLexer.IDENTIFIER = 60; TelLexer.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; TelLexer.prototype.modeNames = [ "DEFAULT_MODE" ]; -TelLexer.prototype.literalNames = [ null, null, null, null, null, null, - null, null, null, null, null, "'('", - "')'", "'|'", "':'", "','", "'||'", - "'&&'", "'=='", "'!='", "'>'", "'<'", - "'>='", "'<='", "'+'", "'-'", "'*'", - "'/'", "'?'" ]; +TelLexer.prototype.literalNames = [ null, null, "':'", "'?'", null, "'&&'", + "'=='", "'>='", "'<='", "'!='", "'<>'", + "'||'", "'<<'", "'>>'", "'&'", "'='", + "')'", "','", null, "'.'", "'/'", "'>'", + "'<'", "'-'", "'%'", null, "'('", "'|'", + "'+'", "';'", "'*'", "'~'", "'_'" ]; -TelLexer.prototype.symbolicNames = [ null, "INT", "REAL", "TRUE", "FALSE", - "NOT", "KW_IS", "KW_NULL", "WORD", - "STRING_CONSTANT", "SINGLE_QUOTED_ELEMENT", - "L_BRACKET", "R_BRACKET", "TAXON_NAMESPACE_DELIMITER", - "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", - "OR", "AND", "EQ", "NEQ", "GT", "LT", - "GTEQ", "LTEQ", "PLUS", "MINUS", "MULT", - "DIV", "OPTIONAL_TAXON_OPERATOR", "SINGLE_LINE_COMMENT", - "WS" ]; +TelLexer.prototype.symbolicNames = [ null, "TAXON_NAMESPACE_DELIMITER", + "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", + "IDENTIFIER_TEL", "AND", "EQ", "GT_EQ", + "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", + "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", + "ASSIGN", "CLOSE_PAREN", "COMMA", "DIV", + "DOT", "FORWARD_SLASH", "GT", "LT", + "MINUS", "MOD", "MULT", "OPEN_PAREN", + "PIPE", "PLUS", "SCOL", "STAR", "TILDE", + "UNDER", "K_AND", "K_ASC", "K_BY", + "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", + "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_ORDER", "K_SELECT", + "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", + "SPACES", "IDENTIFIER" ]; -TelLexer.prototype.ruleNames = [ "INT", "REAL", "TRUE", "FALSE", "NOT", - "KW_IS", "KW_NULL", "WORD", "STRING_CONSTANT", - "SINGLE_QUOTED_ELEMENT", "L_BRACKET", "R_BRACKET", - "TAXON_NAMESPACE_DELIMITER", "TAXON_TAG_DELIMITER", - "FN_PARAMETER_DELIMITER", "OR", "AND", - "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", - "PLUS", "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", - "SINGLE_LINE_COMMENT", "WS", "DIGIT", "A", - "B", "C", "D", "E", "F", "G", "H", "I", - "J", "K", "L", "M", "N", "O", "P", "Q", - "R", "S", "T", "U", "V", "W", "X", "Y", - "Z" ]; +TelLexer.prototype.ruleNames = [ "TAXON_NAMESPACE_DELIMITER", "TAXON_TAG_DELIMITER", + "TAXON_OPTIONAL_OPERATOR", "IDENTIFIER_TEL", + "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", + "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", + "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", + "DIV", "DOT", "FORWARD_SLASH", "GT", "LT", + "MINUS", "MOD", "MULT", "OPEN_PAREN", "PIPE", + "PLUS", "SCOL", "STAR", "TILDE", "UNDER", + "K_AND", "K_ASC", "K_BY", "K_DESC", "K_FALSE", + "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", + "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "IDENTIFIER", + "DIGIT", "A", "B", "C", "D", "E", "F", + "G", "H", "I", "J", "K", "L", "M", "N", + "O", "P", "Q", "R", "S", "T", "U", "V", + "W", "X", "Y", "Z" ]; TelLexer.prototype.grammarFileName = "TelLexer.g4"; diff --git a/js-temp/TelParser.js b/js-temp/TelParser.js index 5bb34a0..1d4dd04 100644 --- a/js-temp/TelParser.js +++ b/js-temp/TelParser.js @@ -8,58 +8,76 @@ var grammarFileName = "TelParser.g4"; var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u0003 P\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004", - "\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0003\u0002\u0003\u0002\u0003", - "\u0002\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0005\u0003\u0014", - "\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003>p\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004", + "\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004\b", + "\t\b\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0007", + "\u0003\u001c\n\u0003\f\u0003\u000e\u0003\u001f\u000b\u0003\u0005\u0003", + "!\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0005\u0003+\n\u0003\u0003\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", - "\u0003\u0003\u0005\u0003\"\n\u0003\u0003\u0003\u0007\u0003%\n\u0003", - "\f\u0003\u000e\u0003(\u000b\u0003\u0003\u0004\u0003\u0004\u0003\u0004", - "\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0004", - "\u0003\u0004\u0005\u00044\n\u0004\u0003\u0005\u0003\u0005\u0003\u0005", - "\u0005\u00059\n\u0005\u0003\u0005\u0003\u0005\u0007\u0005=\n\u0005\f", - "\u0005\u000e\u0005@\u000b\u0005\u0003\u0005\u0003\u0005\u0003\u0006", - "\u0005\u0006E\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005\u0006", - "J\n\u0006\u0003\u0006\u0003\u0006\u0005\u0006N\n\u0006\u0003\u0006\u0002", - "\u0003\u0004\u0007\u0002\u0004\u0006\b\n\u0002\u0007\u0003\u0002\u001c", - "\u001d\u0003\u0002\u001a\u001b\u0003\u0002\u0012\u0019\u0003\u0002\u0003", - "\u0004\u0003\u0002\u0005\u0006\u0002[\u0002\f\u0003\u0002\u0002\u0002", - "\u0004\u0013\u0003\u0002\u0002\u0002\u00063\u0003\u0002\u0002\u0002", - "\b5\u0003\u0002\u0002\u0002\nD\u0003\u0002\u0002\u0002\f\r\u0005\u0004", - "\u0003\u0002\r\u000e\u0007\u0002\u0002\u0003\u000e\u0003\u0003\u0002", - "\u0002\u0002\u000f\u0010\b\u0003\u0001\u0002\u0010\u0011\u0007\u0007", - "\u0002\u0002\u0011\u0014\u0005\u0004\u0003\b\u0012\u0014\u0005\u0006", - "\u0004\u0002\u0013\u000f\u0003\u0002\u0002\u0002\u0013\u0012\u0003\u0002", - "\u0002\u0002\u0014&\u0003\u0002\u0002\u0002\u0015\u0016\f\u0007\u0002", - "\u0002\u0016\u0017\t\u0002\u0002\u0002\u0017%\u0005\u0004\u0003\b\u0018", - "\u0019\f\u0006\u0002\u0002\u0019\u001a\t\u0003\u0002\u0002\u001a%\u0005", - "\u0004\u0003\u0007\u001b\u001c\f\u0005\u0002\u0002\u001c\u001d\t\u0004", - "\u0002\u0002\u001d%\u0005\u0004\u0003\u0006\u001e\u001f\f\u0004\u0002", - "\u0002\u001f!\u0007\b\u0002\u0002 \"\u0007\u0007\u0002\u0002! \u0003", - "\u0002\u0002\u0002!\"\u0003\u0002\u0002\u0002\"#\u0003\u0002\u0002\u0002", - "#%\u0007\t\u0002\u0002$\u0015\u0003\u0002\u0002\u0002$\u0018\u0003\u0002", - "\u0002\u0002$\u001b\u0003\u0002\u0002\u0002$\u001e\u0003\u0002\u0002", - "\u0002%(\u0003\u0002\u0002\u0002&$\u0003\u0002\u0002\u0002&\'\u0003", - "\u0002\u0002\u0002\'\u0005\u0003\u0002\u0002\u0002(&\u0003\u0002\u0002", - "\u0002)*\u0007\r\u0002\u0002*+\u0005\u0004\u0003\u0002+,\u0007\u000e", - "\u0002\u0002,4\u0003\u0002\u0002\u0002-4\t\u0005\u0002\u0002.4\t\u0006", - "\u0002\u0002/4\u0007\f\u0002\u000204\u0007\u000b\u0002\u000214\u0005", - "\b\u0005\u000224\u0005\n\u0006\u00023)\u0003\u0002\u0002\u00023-\u0003", - "\u0002\u0002\u00023.\u0003\u0002\u0002\u00023/\u0003\u0002\u0002\u0002", - "30\u0003\u0002\u0002\u000231\u0003\u0002\u0002\u000232\u0003\u0002\u0002", - "\u00024\u0007\u0003\u0002\u0002\u000256\u0007\n\u0002\u000268\u0007", - "\r\u0002\u000279\u0005\u0004\u0003\u000287\u0003\u0002\u0002\u00028", - "9\u0003\u0002\u0002\u00029>\u0003\u0002\u0002\u0002:;\u0007\u0011\u0002", - "\u0002;=\u0005\u0004\u0003\u0002<:\u0003\u0002\u0002\u0002=@\u0003\u0002", - "\u0002\u0002><\u0003\u0002\u0002\u0002>?\u0003\u0002\u0002\u0002?A\u0003", - "\u0002\u0002\u0002@>\u0003\u0002\u0002\u0002AB\u0007\u000e\u0002\u0002", - "B\t\u0003\u0002\u0002\u0002CE\u0007\u001e\u0002\u0002DC\u0003\u0002", - "\u0002\u0002DE\u0003\u0002\u0002\u0002EF\u0003\u0002\u0002\u0002FI\u0007", - "\n\u0002\u0002GH\u0007\u000f\u0002\u0002HJ\u0007\n\u0002\u0002IG\u0003", - "\u0002\u0002\u0002IJ\u0003\u0002\u0002\u0002JM\u0003\u0002\u0002\u0002", - "KL\u0007\u0010\u0002\u0002LN\u0007\n\u0002\u0002MK\u0003\u0002\u0002", - "\u0002MN\u0003\u0002\u0002\u0002N\u000b\u0003\u0002\u0002\u0002\f\u0013", - "!$&38>DIM"].join(""); + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0005\u0003B\n\u0003\u0007\u0003D\n\u0003\f", + "\u0003\u000e\u0003G\u000b\u0003\u0003\u0004\u0005\u0004J\n\u0004\u0003", + "\u0004\u0003\u0004\u0005\u0004N\n\u0004\u0003\u0004\u0003\u0004\u0005", + "\u0004R\n\u0004\u0003\u0005\u0003\u0005\u0003\u0005\u0005\u0005W\n\u0005", + "\u0003\u0006\u0005\u0006Z\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006", + "\u0005\u0006_\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005\u0006", + "d\n\u0006\u0003\u0007\u0003\u0007\u0003\u0007\u0007\u0007i\n\u0007\f", + "\u0007\u000e\u0007l\u000b\u0007\u0003\b\u0003\b\u0003\b\u0002\u0003", + "\u0004\t\u0002\u0004\u0006\b\n\f\u000e\u0002\n\u0005\u0002\u0019\u0019", + "\u001e\u001e,,\u0004\u0002\u0014\u0014\u001a\u001b\u0004\u0002\u0019", + "\u0019\u001e\u001e\u0004\u0002\t\n\u0017\u0018\u0005\u0002\b\b\u000b", + "\f\u0011\u0011\u0004\u0002\u0007\u0007##\u0004\u0002\r\r//\u0007\u0002", + "\'\'..224588\u0002~\u0002\u0010\u0003\u0002\u0002\u0002\u0004*\u0003", + "\u0002\u0002\u0002\u0006Q\u0003\u0002\u0002\u0002\bV\u0003\u0002\u0002", + "\u0002\nY\u0003\u0002\u0002\u0002\fe\u0003\u0002\u0002\u0002\u000em", + "\u0003\u0002\u0002\u0002\u0010\u0011\u0005\u0004\u0003\u0002\u0011\u0012", + "\u0007\u0002\u0002\u0003\u0012\u0003\u0003\u0002\u0002\u0002\u0013\u0014", + "\b\u0003\u0001\u0002\u0014\u0015\t\u0002\u0002\u0002\u0015+\u0005\u0004", + "\u0003\u000e\u0016\u0017\u0005\f\u0007\u0002\u0017 \u0007\u001c\u0002", + "\u0002\u0018\u001d\u0005\u0004\u0003\u0002\u0019\u001a\u0007\u0013\u0002", + "\u0002\u001a\u001c\u0005\u0004\u0003\u0002\u001b\u0019\u0003\u0002\u0002", + "\u0002\u001c\u001f\u0003\u0002\u0002\u0002\u001d\u001b\u0003\u0002\u0002", + "\u0002\u001d\u001e\u0003\u0002\u0002\u0002\u001e!\u0003\u0002\u0002", + "\u0002\u001f\u001d\u0003\u0002\u0002\u0002 \u0018\u0003\u0002\u0002", + "\u0002 !\u0003\u0002\u0002\u0002!\"\u0003\u0002\u0002\u0002\"#\u0007", + "\u0012\u0002\u0002#+\u0003\u0002\u0002\u0002$%\u0007\u001c\u0002\u0002", + "%&\u0005\u0004\u0003\u0002&\'\u0007\u0012\u0002\u0002\'+\u0003\u0002", + "\u0002\u0002(+\u0005\n\u0006\u0002)+\u0005\u000e\b\u0002*\u0013\u0003", + "\u0002\u0002\u0002*\u0016\u0003\u0002\u0002\u0002*$\u0003\u0002\u0002", + "\u0002*(\u0003\u0002\u0002\u0002*)\u0003\u0002\u0002\u0002+E\u0003\u0002", + "\u0002\u0002,-\f\u000b\u0002\u0002-.\t\u0003\u0002\u0002.D\u0005\u0004", + "\u0003\f/0\f\n\u0002\u000201\t\u0004\u0002\u00021D\u0005\u0004\u0003", + "\u000b23\f\t\u0002\u000234\t\u0005\u0002\u00024D\u0005\u0004\u0003\n", + "56\f\b\u0002\u000267\t\u0006\u0002\u00027D\u0005\u0004\u0003\t89\f\u0007", + "\u0002\u00029:\t\u0007\u0002\u0002:D\u0005\u0004\u0003\b;<\f\u0006\u0002", + "\u0002<=\t\b\u0002\u0002=D\u0005\u0004\u0003\u0007>A\f\f\u0002\u0002", + "?B\u0005\b\u0005\u0002@B\u0005\u0006\u0004\u0002A?\u0003\u0002\u0002", + "\u0002A@\u0003\u0002\u0002\u0002BD\u0003\u0002\u0002\u0002C,\u0003\u0002", + "\u0002\u0002C/\u0003\u0002\u0002\u0002C2\u0003\u0002\u0002\u0002C5\u0003", + "\u0002\u0002\u0002C8\u0003\u0002\u0002\u0002C;\u0003\u0002\u0002\u0002", + "C>\u0003\u0002\u0002\u0002DG\u0003\u0002\u0002\u0002EC\u0003\u0002\u0002", + "\u0002EF\u0003\u0002\u0002\u0002F\u0005\u0003\u0002\u0002\u0002GE\u0003", + "\u0002\u0002\u0002HJ\u0007(\u0002\u0002IH\u0003\u0002\u0002\u0002IJ", + "\u0003\u0002\u0002\u0002JK\u0003\u0002\u0002\u0002KR\u0007-\u0002\u0002", + "LN\u0007(\u0002\u0002ML\u0003\u0002\u0002\u0002MN\u0003\u0002\u0002", + "\u0002NO\u0003\u0002\u0002\u0002OP\u0007,\u0002\u0002PR\u0007.\u0002", + "\u0002QI\u0003\u0002\u0002\u0002QM\u0003\u0002\u0002\u0002R\u0007\u0003", + "\u0002\u0002\u0002SW\u0007)\u0002\u0002TU\u0007(\u0002\u0002UW\u0007", + ".\u0002\u0002VS\u0003\u0002\u0002\u0002VT\u0003\u0002\u0002\u0002W\t", + "\u0003\u0002\u0002\u0002XZ\u0007\u0005\u0002\u0002YX\u0003\u0002\u0002", + "\u0002YZ\u0003\u0002\u0002\u0002Z^\u0003\u0002\u0002\u0002[\\\u0005", + "\f\u0007\u0002\\]\u0007\u0003\u0002\u0002]_\u0003\u0002\u0002\u0002", + "^[\u0003\u0002\u0002\u0002^_\u0003\u0002\u0002\u0002_`\u0003\u0002\u0002", + "\u0002`c\u0005\f\u0007\u0002ab\u0007\u0004\u0002\u0002bd\u0005\f\u0007", + "\u0002ca\u0003\u0002\u0002\u0002cd\u0003\u0002\u0002\u0002d\u000b\u0003", + "\u0002\u0002\u0002ej\u0007>\u0002\u0002fg\u0007\u0015\u0002\u0002gi", + "\u0007>\u0002\u0002hf\u0003\u0002\u0002\u0002il\u0003\u0002\u0002\u0002", + "jh\u0003\u0002\u0002\u0002jk\u0003\u0002\u0002\u0002k\r\u0003\u0002", + "\u0002\u0002lj\u0003\u0002\u0002\u0002mn\t\t\u0002\u0002n\u000f\u0003", + "\u0002\u0002\u0002\u0010\u001d *ACEIMQVY^cj"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -68,20 +86,30 @@ var decisionsToDFA = atn.decisionToState.map( function(ds, index) { return new a var sharedContextCache = new antlr4.PredictionContextCache(); -var literalNames = [ null, null, null, null, null, null, null, null, null, - null, null, "'('", "')'", "'|'", "':'", "','", "'||'", - "'&&'", "'=='", "'!='", "'>'", "'<'", "'>='", "'<='", - "'+'", "'-'", "'*'", "'/'", "'?'" ]; - -var symbolicNames = [ null, "INT", "REAL", "TRUE", "FALSE", "NOT", "KW_IS", - "KW_NULL", "WORD", "STRING_CONSTANT", "SINGLE_QUOTED_ELEMENT", - "L_BRACKET", "R_BRACKET", "TAXON_NAMESPACE_DELIMITER", - "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", "OR", - "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", - "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", - "SINGLE_LINE_COMMENT", "WS" ]; - -var ruleNames = [ "parse", "expr", "atom", "fn", "taxon" ]; +var literalNames = [ null, null, "':'", "'?'", null, "'&&'", "'=='", "'>='", + "'<='", "'!='", "'<>'", "'||'", "'<<'", "'>>'", "'&'", + "'='", "')'", "','", null, "'.'", "'/'", "'>'", "'<'", + "'-'", "'%'", null, "'('", "'|'", "'+'", "';'", "'*'", + "'~'", "'_'" ]; + +var symbolicNames = [ null, "TAXON_NAMESPACE_DELIMITER", "TAXON_TAG_DELIMITER", + "TAXON_OPTIONAL_OPERATOR", "IDENTIFIER_TEL", "AND", + "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", + "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", + "COMMA", "DIV", "DOT", "FORWARD_SLASH", "GT", "LT", + "MINUS", "MOD", "MULT", "OPEN_PAREN", "PIPE", "PLUS", + "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", + "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", + "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", + "IDENTIFIER" ]; + +var ruleNames = [ "parse", "expr", "isNotNull", "isNull", "taxon", "identifierMultipart", + "literalValue" ]; function TelParser (input) { antlr4.Parser.call(this, input); @@ -102,42 +130,74 @@ Object.defineProperty(TelParser.prototype, "atn", { }); TelParser.EOF = antlr4.Token.EOF; -TelParser.INT = 1; -TelParser.REAL = 2; -TelParser.TRUE = 3; -TelParser.FALSE = 4; -TelParser.NOT = 5; -TelParser.KW_IS = 6; -TelParser.KW_NULL = 7; -TelParser.WORD = 8; -TelParser.STRING_CONSTANT = 9; -TelParser.SINGLE_QUOTED_ELEMENT = 10; -TelParser.L_BRACKET = 11; -TelParser.R_BRACKET = 12; -TelParser.TAXON_NAMESPACE_DELIMITER = 13; -TelParser.TAXON_TAG_DELIMITER = 14; -TelParser.FN_PARAMETER_DELIMITER = 15; -TelParser.OR = 16; -TelParser.AND = 17; -TelParser.EQ = 18; -TelParser.NEQ = 19; -TelParser.GT = 20; -TelParser.LT = 21; -TelParser.GTEQ = 22; -TelParser.LTEQ = 23; -TelParser.PLUS = 24; -TelParser.MINUS = 25; -TelParser.MULT = 26; -TelParser.DIV = 27; -TelParser.OPTIONAL_TAXON_OPERATOR = 28; -TelParser.SINGLE_LINE_COMMENT = 29; -TelParser.WS = 30; +TelParser.TAXON_NAMESPACE_DELIMITER = 1; +TelParser.TAXON_TAG_DELIMITER = 2; +TelParser.TAXON_OPTIONAL_OPERATOR = 3; +TelParser.IDENTIFIER_TEL = 4; +TelParser.AND = 5; +TelParser.EQ = 6; +TelParser.GT_EQ = 7; +TelParser.LT_EQ = 8; +TelParser.NOT_EQ1 = 9; +TelParser.NOT_EQ2 = 10; +TelParser.OR = 11; +TelParser.SHIFT_LEFT = 12; +TelParser.SHIFT_RIGHT = 13; +TelParser.AMP = 14; +TelParser.ASSIGN = 15; +TelParser.CLOSE_PAREN = 16; +TelParser.COMMA = 17; +TelParser.DIV = 18; +TelParser.DOT = 19; +TelParser.FORWARD_SLASH = 20; +TelParser.GT = 21; +TelParser.LT = 22; +TelParser.MINUS = 23; +TelParser.MOD = 24; +TelParser.MULT = 25; +TelParser.OPEN_PAREN = 26; +TelParser.PIPE = 27; +TelParser.PLUS = 28; +TelParser.SCOL = 29; +TelParser.STAR = 30; +TelParser.TILDE = 31; +TelParser.UNDER = 32; +TelParser.K_AND = 33; +TelParser.K_ASC = 34; +TelParser.K_BY = 35; +TelParser.K_DESC = 36; +TelParser.K_FALSE = 37; +TelParser.K_IS = 38; +TelParser.K_ISNULL = 39; +TelParser.K_LIKE = 40; +TelParser.K_LIMIT = 41; +TelParser.K_NOT = 42; +TelParser.K_NOTNULL = 43; +TelParser.K_NULL = 44; +TelParser.K_OR = 45; +TelParser.K_ORDER = 46; +TelParser.K_SELECT = 47; +TelParser.K_TRUE = 48; +TelParser.K_WHERE = 49; +TelParser.NUMERIC_LITERAL = 50; +TelParser.DOUBLE_QUOTED_STRING = 51; +TelParser.DOUBLE_QUOTED_STRING_TEL = 52; +TelParser.DOUBLE_QUOTED_STRING_SQL = 53; +TelParser.SINGLE_QUOTED_STRING = 54; +TelParser.SINGLE_QUOTED_STRING_TEL = 55; +TelParser.SINGLE_QUOTED_STRING_SQL = 56; +TelParser.SINGLE_LINE_COMMENT = 57; +TelParser.MULTILINE_COMMENT = 58; +TelParser.SPACES = 59; +TelParser.IDENTIFIER = 60; TelParser.RULE_parse = 0; TelParser.RULE_expr = 1; -TelParser.RULE_atom = 2; -TelParser.RULE_fn = 3; +TelParser.RULE_isNotNull = 2; +TelParser.RULE_isNull = 3; TelParser.RULE_taxon = 4; +TelParser.RULE_identifierMultipart = 5; +TelParser.RULE_literalValue = 6; function ParseContext(parser, parent, invokingState) { @@ -195,9 +255,9 @@ TelParser.prototype.parse = function() { this.enterRule(localctx, 0, TelParser.RULE_parse); try { this.enterOuterAlt(localctx, 1); - this.state = 10; + this.state = 14; this.expr(0); - this.state = 11; + this.state = 15; this.match(TelParser.EOF); } catch (re) { if(re instanceof antlr4.error.RecognitionException) { @@ -224,117 +284,21 @@ function ExprContext(parser, parent, invokingState) { antlr4.ParserRuleContext.call(this, parent, invokingState); this.parser = parser; this.ruleIndex = TelParser.RULE_expr; + this.left = null; // ExprContext + this.unary_operator = null; // Token + this.right = null; // ExprContext + this.function_name = null; // IdentifierMultipartContext + this.arguments = null; // ExprContext + this.operator = null; // Token + this.is_null = null; // IsNullContext + this.is_not_null = null; // IsNotNullContext return this; } ExprContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); ExprContext.prototype.constructor = ExprContext; - - -ExprContext.prototype.copyFrom = function(ctx) { - antlr4.ParserRuleContext.prototype.copyFrom.call(this, ctx); -}; - -function NullTestExprContext(parser, ctx) { - ExprContext.call(this, parser); - ExprContext.prototype.copyFrom.call(this, ctx); - return this; -} - -NullTestExprContext.prototype = Object.create(ExprContext.prototype); -NullTestExprContext.prototype.constructor = NullTestExprContext; - -TelParser.NullTestExprContext = NullTestExprContext; - -NullTestExprContext.prototype.expr = function() { - return this.getTypedRuleContext(ExprContext,0); -}; - -NullTestExprContext.prototype.KW_IS = function() { - return this.getToken(TelParser.KW_IS, 0); -}; - -NullTestExprContext.prototype.KW_NULL = function() { - return this.getToken(TelParser.KW_NULL, 0); -}; - -NullTestExprContext.prototype.NOT = function() { - return this.getToken(TelParser.NOT, 0); -}; -NullTestExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterNullTestExpr(this); - } -}; - -NullTestExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitNullTestExpr(this); - } -}; - -NullTestExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitNullTestExpr(this); - } else { - return visitor.visitChildren(this); - } -}; - - -function NotExprContext(parser, ctx) { - ExprContext.call(this, parser); - ExprContext.prototype.copyFrom.call(this, ctx); - return this; -} - -NotExprContext.prototype = Object.create(ExprContext.prototype); -NotExprContext.prototype.constructor = NotExprContext; - -TelParser.NotExprContext = NotExprContext; - -NotExprContext.prototype.NOT = function() { - return this.getToken(TelParser.NOT, 0); -}; - -NotExprContext.prototype.expr = function() { - return this.getTypedRuleContext(ExprContext,0); -}; -NotExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterNotExpr(this); - } -}; - -NotExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitNotExpr(this); - } -}; - -NotExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitNotExpr(this); - } else { - return visitor.visitChildren(this); - } -}; - - -function LogicalExprContext(parser, ctx) { - ExprContext.call(this, parser); - this.op = null; // Token; - ExprContext.prototype.copyFrom.call(this, ctx); - return this; -} - -LogicalExprContext.prototype = Object.create(ExprContext.prototype); -LogicalExprContext.prototype.constructor = LogicalExprContext; - -TelParser.LogicalExprContext = LogicalExprContext; - -LogicalExprContext.prototype.expr = function(i) { +ExprContext.prototype.expr = function(i) { if(i===undefined) { i = null; } @@ -345,189 +309,133 @@ LogicalExprContext.prototype.expr = function(i) { } }; -LogicalExprContext.prototype.OR = function() { - return this.getToken(TelParser.OR, 0); -}; - -LogicalExprContext.prototype.AND = function() { - return this.getToken(TelParser.AND, 0); -}; - -LogicalExprContext.prototype.EQ = function() { - return this.getToken(TelParser.EQ, 0); +ExprContext.prototype.MINUS = function() { + return this.getToken(TelParser.MINUS, 0); }; -LogicalExprContext.prototype.NEQ = function() { - return this.getToken(TelParser.NEQ, 0); +ExprContext.prototype.PLUS = function() { + return this.getToken(TelParser.PLUS, 0); }; -LogicalExprContext.prototype.GT = function() { - return this.getToken(TelParser.GT, 0); +ExprContext.prototype.K_NOT = function() { + return this.getToken(TelParser.K_NOT, 0); }; -LogicalExprContext.prototype.LT = function() { - return this.getToken(TelParser.LT, 0); +ExprContext.prototype.OPEN_PAREN = function() { + return this.getToken(TelParser.OPEN_PAREN, 0); }; -LogicalExprContext.prototype.GTEQ = function() { - return this.getToken(TelParser.GTEQ, 0); +ExprContext.prototype.CLOSE_PAREN = function() { + return this.getToken(TelParser.CLOSE_PAREN, 0); }; -LogicalExprContext.prototype.LTEQ = function() { - return this.getToken(TelParser.LTEQ, 0); -}; -LogicalExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterLogicalExpr(this); - } +ExprContext.prototype.identifierMultipart = function() { + return this.getTypedRuleContext(IdentifierMultipartContext,0); }; -LogicalExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitLogicalExpr(this); +ExprContext.prototype.COMMA = function(i) { + if(i===undefined) { + i = null; } -}; - -LogicalExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitLogicalExpr(this); + if(i===null) { + return this.getTokens(TelParser.COMMA); } else { - return visitor.visitChildren(this); + return this.getToken(TelParser.COMMA, i); } }; -function MultiplicationExprContext(parser, ctx) { - ExprContext.call(this, parser); - this.op = null; // Token; - ExprContext.prototype.copyFrom.call(this, ctx); - return this; -} - -MultiplicationExprContext.prototype = Object.create(ExprContext.prototype); -MultiplicationExprContext.prototype.constructor = MultiplicationExprContext; - -TelParser.MultiplicationExprContext = MultiplicationExprContext; +ExprContext.prototype.taxon = function() { + return this.getTypedRuleContext(TaxonContext,0); +}; -MultiplicationExprContext.prototype.expr = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTypedRuleContexts(ExprContext); - } else { - return this.getTypedRuleContext(ExprContext,i); - } +ExprContext.prototype.literalValue = function() { + return this.getTypedRuleContext(LiteralValueContext,0); }; -MultiplicationExprContext.prototype.MULT = function() { +ExprContext.prototype.MULT = function() { return this.getToken(TelParser.MULT, 0); }; -MultiplicationExprContext.prototype.DIV = function() { +ExprContext.prototype.DIV = function() { return this.getToken(TelParser.DIV, 0); }; -MultiplicationExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterMultiplicationExpr(this); - } -}; -MultiplicationExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitMultiplicationExpr(this); - } +ExprContext.prototype.MOD = function() { + return this.getToken(TelParser.MOD, 0); }; -MultiplicationExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitMultiplicationExpr(this); - } else { - return visitor.visitChildren(this); - } +ExprContext.prototype.LT = function() { + return this.getToken(TelParser.LT, 0); }; +ExprContext.prototype.LT_EQ = function() { + return this.getToken(TelParser.LT_EQ, 0); +}; -function AtomExprContext(parser, ctx) { - ExprContext.call(this, parser); - ExprContext.prototype.copyFrom.call(this, ctx); - return this; -} - -AtomExprContext.prototype = Object.create(ExprContext.prototype); -AtomExprContext.prototype.constructor = AtomExprContext; - -TelParser.AtomExprContext = AtomExprContext; +ExprContext.prototype.GT = function() { + return this.getToken(TelParser.GT, 0); +}; -AtomExprContext.prototype.atom = function() { - return this.getTypedRuleContext(AtomContext,0); +ExprContext.prototype.GT_EQ = function() { + return this.getToken(TelParser.GT_EQ, 0); }; -AtomExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterAtomExpr(this); - } + +ExprContext.prototype.ASSIGN = function() { + return this.getToken(TelParser.ASSIGN, 0); }; -AtomExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitAtomExpr(this); - } +ExprContext.prototype.EQ = function() { + return this.getToken(TelParser.EQ, 0); }; -AtomExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitAtomExpr(this); - } else { - return visitor.visitChildren(this); - } +ExprContext.prototype.NOT_EQ1 = function() { + return this.getToken(TelParser.NOT_EQ1, 0); }; +ExprContext.prototype.NOT_EQ2 = function() { + return this.getToken(TelParser.NOT_EQ2, 0); +}; -function AdditiveExprContext(parser, ctx) { - ExprContext.call(this, parser); - this.op = null; // Token; - ExprContext.prototype.copyFrom.call(this, ctx); - return this; -} +ExprContext.prototype.K_AND = function() { + return this.getToken(TelParser.K_AND, 0); +}; -AdditiveExprContext.prototype = Object.create(ExprContext.prototype); -AdditiveExprContext.prototype.constructor = AdditiveExprContext; +ExprContext.prototype.AND = function() { + return this.getToken(TelParser.AND, 0); +}; -TelParser.AdditiveExprContext = AdditiveExprContext; +ExprContext.prototype.K_OR = function() { + return this.getToken(TelParser.K_OR, 0); +}; -AdditiveExprContext.prototype.expr = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTypedRuleContexts(ExprContext); - } else { - return this.getTypedRuleContext(ExprContext,i); - } +ExprContext.prototype.OR = function() { + return this.getToken(TelParser.OR, 0); }; -AdditiveExprContext.prototype.PLUS = function() { - return this.getToken(TelParser.PLUS, 0); +ExprContext.prototype.isNull = function() { + return this.getTypedRuleContext(IsNullContext,0); }; -AdditiveExprContext.prototype.MINUS = function() { - return this.getToken(TelParser.MINUS, 0); +ExprContext.prototype.isNotNull = function() { + return this.getTypedRuleContext(IsNotNullContext,0); }; -AdditiveExprContext.prototype.enterRule = function(listener) { + +ExprContext.prototype.enterRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.enterAdditiveExpr(this); + listener.enterExpr(this); } }; -AdditiveExprContext.prototype.exitRule = function(listener) { +ExprContext.prototype.exitRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.exitAdditiveExpr(this); + listener.exitExpr(this); } }; -AdditiveExprContext.prototype.accept = function(visitor) { +ExprContext.prototype.accept = function(visitor) { if ( visitor instanceof TelParserVisitor ) { - return visitor.visitAdditiveExpr(this); + return visitor.visitExpr(this); } else { return visitor.visitChildren(this); } @@ -548,140 +456,250 @@ TelParser.prototype.expr = function(_p) { var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 17; + this.state = 40; this._errHandler.sync(this); - switch(this._input.LA(1)) { - case TelParser.NOT: - localctx = new NotExprContext(this, localctx); - this._ctx = localctx; - _prevctx = localctx; - - this.state = 14; - this.match(TelParser.NOT); - this.state = 15; - this.expr(6); + var la_ = this._interp.adaptivePredict(this._input,2,this._ctx); + switch(la_) { + case 1: + this.state = 18; + localctx.unary_operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(((((_la - 23)) & ~0x1f) == 0 && ((1 << (_la - 23)) & ((1 << (TelParser.MINUS - 23)) | (1 << (TelParser.PLUS - 23)) | (1 << (TelParser.K_NOT - 23)))) !== 0))) { + localctx.unary_operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 19; + localctx.right = this.expr(12); + break; + + case 2: + this.state = 20; + localctx.function_name = this.identifierMultipart(); + this.state = 21; + this.match(TelParser.OPEN_PAREN); + this.state = 30; + this._errHandler.sync(this); + _la = this._input.LA(1); + if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.TAXON_OPTIONAL_OPERATOR) | (1 << TelParser.MINUS) | (1 << TelParser.OPEN_PAREN) | (1 << TelParser.PLUS))) !== 0) || ((((_la - 37)) & ~0x1f) == 0 && ((1 << (_la - 37)) & ((1 << (TelParser.K_FALSE - 37)) | (1 << (TelParser.K_NOT - 37)) | (1 << (TelParser.K_NULL - 37)) | (1 << (TelParser.K_TRUE - 37)) | (1 << (TelParser.NUMERIC_LITERAL - 37)) | (1 << (TelParser.DOUBLE_QUOTED_STRING - 37)) | (1 << (TelParser.SINGLE_QUOTED_STRING - 37)) | (1 << (TelParser.IDENTIFIER - 37)))) !== 0)) { + this.state = 22; + localctx.arguments = this.expr(0); + this.state = 27; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===TelParser.COMMA) { + this.state = 23; + this.match(TelParser.COMMA); + this.state = 24; + localctx.arguments = this.expr(0); + this.state = 29; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + } + + this.state = 32; + this.match(TelParser.CLOSE_PAREN); break; - case TelParser.INT: - case TelParser.REAL: - case TelParser.TRUE: - case TelParser.FALSE: - case TelParser.WORD: - case TelParser.STRING_CONSTANT: - case TelParser.SINGLE_QUOTED_ELEMENT: - case TelParser.L_BRACKET: - case TelParser.OPTIONAL_TAXON_OPERATOR: - localctx = new AtomExprContext(this, localctx); - this._ctx = localctx; - _prevctx = localctx; - this.state = 16; - this.atom(); + + case 3: + this.state = 34; + this.match(TelParser.OPEN_PAREN); + this.state = 35; + this.expr(0); + this.state = 36; + this.match(TelParser.CLOSE_PAREN); break; - default: - throw new antlr4.error.NoViableAltException(this); + + case 4: + this.state = 38; + this.taxon(); + break; + + case 5: + this.state = 39; + this.literalValue(); + break; + } this._ctx.stop = this._input.LT(-1); - this.state = 36; + this.state = 67; this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,3,this._ctx) + var _alt = this._interp.adaptivePredict(this._input,5,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { if(_alt===1) { if(this._parseListeners!==null) { this.triggerExitRuleEvent(); } _prevctx = localctx; - this.state = 34; + this.state = 65; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,2,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,4,this._ctx); switch(la_) { case 1: - localctx = new MultiplicationExprContext(this, new ExprContext(this, _parentctx, _parentState)); + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 19; - if (!( this.precpred(this._ctx, 5))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); + this.state = 42; + if (!( this.precpred(this._ctx, 9))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 9)"); } - this.state = 20; - localctx.op = this._input.LT(1); + this.state = 43; + localctx.operator = this._input.LT(1); _la = this._input.LA(1); - if(!(_la===TelParser.MULT || _la===TelParser.DIV)) { - localctx.op = this._errHandler.recoverInline(this); + if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.DIV) | (1 << TelParser.MOD) | (1 << TelParser.MULT))) !== 0))) { + localctx.operator = this._errHandler.recoverInline(this); } else { this._errHandler.reportMatch(this); this.consume(); } - this.state = 21; - this.expr(6); + this.state = 44; + localctx.right = this.expr(10); break; case 2: - localctx = new AdditiveExprContext(this, new ExprContext(this, _parentctx, _parentState)); + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 22; - if (!( this.precpred(this._ctx, 4))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 4)"); + this.state = 45; + if (!( this.precpred(this._ctx, 8))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 8)"); } - this.state = 23; - localctx.op = this._input.LT(1); + this.state = 46; + localctx.operator = this._input.LT(1); _la = this._input.LA(1); - if(!(_la===TelParser.PLUS || _la===TelParser.MINUS)) { - localctx.op = this._errHandler.recoverInline(this); + if(!(_la===TelParser.MINUS || _la===TelParser.PLUS)) { + localctx.operator = this._errHandler.recoverInline(this); } else { this._errHandler.reportMatch(this); this.consume(); } - this.state = 24; - this.expr(5); + this.state = 47; + localctx.right = this.expr(9); break; case 3: - localctx = new LogicalExprContext(this, new ExprContext(this, _parentctx, _parentState)); + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 25; - if (!( this.precpred(this._ctx, 3))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 3)"); + this.state = 48; + if (!( this.precpred(this._ctx, 7))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 7)"); } - this.state = 26; - localctx.op = this._input.LT(1); + this.state = 49; + localctx.operator = this._input.LT(1); _la = this._input.LA(1); - if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.OR) | (1 << TelParser.AND) | (1 << TelParser.EQ) | (1 << TelParser.NEQ) | (1 << TelParser.GT) | (1 << TelParser.LT) | (1 << TelParser.GTEQ) | (1 << TelParser.LTEQ))) !== 0))) { - localctx.op = this._errHandler.recoverInline(this); + if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.GT_EQ) | (1 << TelParser.LT_EQ) | (1 << TelParser.GT) | (1 << TelParser.LT))) !== 0))) { + localctx.operator = this._errHandler.recoverInline(this); } else { this._errHandler.reportMatch(this); this.consume(); } - this.state = 27; - this.expr(4); + this.state = 50; + localctx.right = this.expr(8); break; case 4: - localctx = new NullTestExprContext(this, new ExprContext(this, _parentctx, _parentState)); + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); - this.state = 28; - if (!( this.precpred(this._ctx, 2))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 2)"); + this.state = 51; + if (!( this.precpred(this._ctx, 6))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 6)"); } - this.state = 29; - this.match(TelParser.KW_IS); - this.state = 31; - this._errHandler.sync(this); + this.state = 52; + localctx.operator = this._input.LT(1); _la = this._input.LA(1); - if(_la===TelParser.NOT) { - this.state = 30; - this.match(TelParser.NOT); + if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.EQ) | (1 << TelParser.NOT_EQ1) | (1 << TelParser.NOT_EQ2) | (1 << TelParser.ASSIGN))) !== 0))) { + localctx.operator = this._errHandler.recoverInline(this); } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 53; + localctx.right = this.expr(7); + break; + + case 5: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); + this.state = 54; + if (!( this.precpred(this._ctx, 5))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); + } + this.state = 55; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(_la===TelParser.AND || _la===TelParser.K_AND)) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 56; + localctx.right = this.expr(6); + break; - this.state = 33; - this.match(TelParser.KW_NULL); + case 6: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); + this.state = 57; + if (!( this.precpred(this._ctx, 4))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 4)"); + } + this.state = 58; + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(_la===TelParser.OR || _la===TelParser.K_OR)) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } + this.state = 59; + localctx.right = this.expr(5); + break; + + case 7: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, TelParser.RULE_expr); + this.state = 60; + if (!( this.precpred(this._ctx, 10))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 10)"); + } + this.state = 63; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,3,this._ctx); + switch(la_) { + case 1: + this.state = 61; + localctx.is_null = this.isNull(); + break; + + case 2: + this.state = 62; + localctx.is_not_null = this.isNotNull(); + break; + + } break; } } - this.state = 38; + this.state = 69; this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,3,this._ctx); + _alt = this._interp.adaptivePredict(this._input,5,this._ctx); } } catch( error) { @@ -699,7 +717,7 @@ TelParser.prototype.expr = function(_p) { }; -function AtomContext(parser, parent, invokingState) { +function IsNotNullContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; } @@ -708,275 +726,258 @@ function AtomContext(parser, parent, invokingState) { } antlr4.ParserRuleContext.call(this, parent, invokingState); this.parser = parser; - this.ruleIndex = TelParser.RULE_atom; + this.ruleIndex = TelParser.RULE_isNotNull; return this; } -AtomContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -AtomContext.prototype.constructor = AtomContext; +IsNotNullContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +IsNotNullContext.prototype.constructor = IsNotNullContext; - - -AtomContext.prototype.copyFrom = function(ctx) { - antlr4.ParserRuleContext.prototype.copyFrom.call(this, ctx); +IsNotNullContext.prototype.K_NOTNULL = function() { + return this.getToken(TelParser.K_NOTNULL, 0); }; - -function FnExprContext(parser, ctx) { - AtomContext.call(this, parser); - AtomContext.prototype.copyFrom.call(this, ctx); - return this; -} - -FnExprContext.prototype = Object.create(AtomContext.prototype); -FnExprContext.prototype.constructor = FnExprContext; - -TelParser.FnExprContext = FnExprContext; - -FnExprContext.prototype.fn = function() { - return this.getTypedRuleContext(FnContext,0); -}; -FnExprContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterFnExpr(this); - } +IsNotNullContext.prototype.K_IS = function() { + return this.getToken(TelParser.K_IS, 0); }; -FnExprContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitFnExpr(this); - } +IsNotNullContext.prototype.K_NOT = function() { + return this.getToken(TelParser.K_NOT, 0); }; -FnExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitFnExpr(this); - } else { - return visitor.visitChildren(this); - } +IsNotNullContext.prototype.K_NULL = function() { + return this.getToken(TelParser.K_NULL, 0); }; - -function TaxonSlugAtomContext(parser, ctx) { - AtomContext.call(this, parser); - AtomContext.prototype.copyFrom.call(this, ctx); - return this; -} - -TaxonSlugAtomContext.prototype = Object.create(AtomContext.prototype); -TaxonSlugAtomContext.prototype.constructor = TaxonSlugAtomContext; - -TelParser.TaxonSlugAtomContext = TaxonSlugAtomContext; - -TaxonSlugAtomContext.prototype.taxon = function() { - return this.getTypedRuleContext(TaxonContext,0); -}; -TaxonSlugAtomContext.prototype.enterRule = function(listener) { +IsNotNullContext.prototype.enterRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.enterTaxonSlugAtom(this); + listener.enterIsNotNull(this); } }; -TaxonSlugAtomContext.prototype.exitRule = function(listener) { +IsNotNullContext.prototype.exitRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.exitTaxonSlugAtom(this); + listener.exitIsNotNull(this); } }; -TaxonSlugAtomContext.prototype.accept = function(visitor) { +IsNotNullContext.prototype.accept = function(visitor) { if ( visitor instanceof TelParserVisitor ) { - return visitor.visitTaxonSlugAtom(this); + return visitor.visitIsNotNull(this); } else { return visitor.visitChildren(this); } }; -function BooleanAtomContext(parser, ctx) { - AtomContext.call(this, parser); - AtomContext.prototype.copyFrom.call(this, ctx); - return this; -} -BooleanAtomContext.prototype = Object.create(AtomContext.prototype); -BooleanAtomContext.prototype.constructor = BooleanAtomContext; -TelParser.BooleanAtomContext = BooleanAtomContext; +TelParser.IsNotNullContext = IsNotNullContext; -BooleanAtomContext.prototype.TRUE = function() { - return this.getToken(TelParser.TRUE, 0); -}; +TelParser.prototype.isNotNull = function() { -BooleanAtomContext.prototype.FALSE = function() { - return this.getToken(TelParser.FALSE, 0); -}; -BooleanAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterBooleanAtom(this); - } -}; + var localctx = new IsNotNullContext(this, this._ctx, this.state); + this.enterRule(localctx, 4, TelParser.RULE_isNotNull); + var _la = 0; // Token type + try { + this.state = 79; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,8,this._ctx); + switch(la_) { + case 1: + this.enterOuterAlt(localctx, 1); + this.state = 71; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===TelParser.K_IS) { + this.state = 70; + this.match(TelParser.K_IS); + } -BooleanAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitBooleanAtom(this); - } -}; + this.state = 73; + this.match(TelParser.K_NOTNULL); + break; -BooleanAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitBooleanAtom(this); - } else { - return visitor.visitChildren(this); + case 2: + this.enterOuterAlt(localctx, 2); + this.state = 75; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===TelParser.K_IS) { + this.state = 74; + this.match(TelParser.K_IS); + } + + this.state = 77; + this.match(TelParser.K_NOT); + this.state = 78; + this.match(TelParser.K_NULL); + break; + + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); } + return localctx; }; -function BracketExprContext(parser, ctx) { - AtomContext.call(this, parser); - AtomContext.prototype.copyFrom.call(this, ctx); +function IsNullContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = TelParser.RULE_isNull; return this; } -BracketExprContext.prototype = Object.create(AtomContext.prototype); -BracketExprContext.prototype.constructor = BracketExprContext; +IsNullContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +IsNullContext.prototype.constructor = IsNullContext; -TelParser.BracketExprContext = BracketExprContext; - -BracketExprContext.prototype.L_BRACKET = function() { - return this.getToken(TelParser.L_BRACKET, 0); +IsNullContext.prototype.K_ISNULL = function() { + return this.getToken(TelParser.K_ISNULL, 0); }; -BracketExprContext.prototype.expr = function() { - return this.getTypedRuleContext(ExprContext,0); +IsNullContext.prototype.K_IS = function() { + return this.getToken(TelParser.K_IS, 0); }; -BracketExprContext.prototype.R_BRACKET = function() { - return this.getToken(TelParser.R_BRACKET, 0); +IsNullContext.prototype.K_NULL = function() { + return this.getToken(TelParser.K_NULL, 0); }; -BracketExprContext.prototype.enterRule = function(listener) { + +IsNullContext.prototype.enterRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.enterBracketExpr(this); + listener.enterIsNull(this); } }; -BracketExprContext.prototype.exitRule = function(listener) { +IsNullContext.prototype.exitRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.exitBracketExpr(this); + listener.exitIsNull(this); } }; -BracketExprContext.prototype.accept = function(visitor) { +IsNullContext.prototype.accept = function(visitor) { if ( visitor instanceof TelParserVisitor ) { - return visitor.visitBracketExpr(this); + return visitor.visitIsNull(this); } else { return visitor.visitChildren(this); } }; -function SingleQuotedAtomContext(parser, ctx) { - AtomContext.call(this, parser); - AtomContext.prototype.copyFrom.call(this, ctx); - return this; -} - -SingleQuotedAtomContext.prototype = Object.create(AtomContext.prototype); -SingleQuotedAtomContext.prototype.constructor = SingleQuotedAtomContext; -TelParser.SingleQuotedAtomContext = SingleQuotedAtomContext; -SingleQuotedAtomContext.prototype.SINGLE_QUOTED_ELEMENT = function() { - return this.getToken(TelParser.SINGLE_QUOTED_ELEMENT, 0); -}; -SingleQuotedAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterSingleQuotedAtom(this); - } -}; +TelParser.IsNullContext = IsNullContext; -SingleQuotedAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitSingleQuotedAtom(this); - } -}; +TelParser.prototype.isNull = function() { -SingleQuotedAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitSingleQuotedAtom(this); - } else { - return visitor.visitChildren(this); + var localctx = new IsNullContext(this, this._ctx, this.state); + this.enterRule(localctx, 6, TelParser.RULE_isNull); + try { + this.state = 84; + this._errHandler.sync(this); + switch(this._input.LA(1)) { + case TelParser.K_ISNULL: + this.enterOuterAlt(localctx, 1); + this.state = 81; + this.match(TelParser.K_ISNULL); + break; + case TelParser.K_IS: + this.enterOuterAlt(localctx, 2); + this.state = 82; + this.match(TelParser.K_IS); + this.state = 83; + this.match(TelParser.K_NULL); + break; + default: + throw new antlr4.error.NoViableAltException(this); + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); } + return localctx; }; -function NumberAtomContext(parser, ctx) { - AtomContext.call(this, parser); - AtomContext.prototype.copyFrom.call(this, ctx); +function TaxonContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = TelParser.RULE_taxon; + this.namespace = null; // IdentifierMultipartContext + this.slug = null; // IdentifierMultipartContext + this.tag = null; // IdentifierMultipartContext return this; } -NumberAtomContext.prototype = Object.create(AtomContext.prototype); -NumberAtomContext.prototype.constructor = NumberAtomContext; - -TelParser.NumberAtomContext = NumberAtomContext; +TaxonContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +TaxonContext.prototype.constructor = TaxonContext; -NumberAtomContext.prototype.INT = function() { - return this.getToken(TelParser.INT, 0); +TaxonContext.prototype.identifierMultipart = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(IdentifierMultipartContext); + } else { + return this.getTypedRuleContext(IdentifierMultipartContext,i); + } }; -NumberAtomContext.prototype.REAL = function() { - return this.getToken(TelParser.REAL, 0); -}; -NumberAtomContext.prototype.enterRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.enterNumberAtom(this); - } +TaxonContext.prototype.TAXON_OPTIONAL_OPERATOR = function() { + return this.getToken(TelParser.TAXON_OPTIONAL_OPERATOR, 0); }; -NumberAtomContext.prototype.exitRule = function(listener) { - if(listener instanceof TelParserListener ) { - listener.exitNumberAtom(this); - } +TaxonContext.prototype.TAXON_NAMESPACE_DELIMITER = function() { + return this.getToken(TelParser.TAXON_NAMESPACE_DELIMITER, 0); }; -NumberAtomContext.prototype.accept = function(visitor) { - if ( visitor instanceof TelParserVisitor ) { - return visitor.visitNumberAtom(this); - } else { - return visitor.visitChildren(this); - } +TaxonContext.prototype.TAXON_TAG_DELIMITER = function() { + return this.getToken(TelParser.TAXON_TAG_DELIMITER, 0); }; - -function StringConstantAtomContext(parser, ctx) { - AtomContext.call(this, parser); - AtomContext.prototype.copyFrom.call(this, ctx); - return this; -} - -StringConstantAtomContext.prototype = Object.create(AtomContext.prototype); -StringConstantAtomContext.prototype.constructor = StringConstantAtomContext; - -TelParser.StringConstantAtomContext = StringConstantAtomContext; - -StringConstantAtomContext.prototype.STRING_CONSTANT = function() { - return this.getToken(TelParser.STRING_CONSTANT, 0); -}; -StringConstantAtomContext.prototype.enterRule = function(listener) { +TaxonContext.prototype.enterRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.enterStringConstantAtom(this); + listener.enterTaxon(this); } }; -StringConstantAtomContext.prototype.exitRule = function(listener) { +TaxonContext.prototype.exitRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.exitStringConstantAtom(this); + listener.exitTaxon(this); } }; -StringConstantAtomContext.prototype.accept = function(visitor) { +TaxonContext.prototype.accept = function(visitor) { if ( visitor instanceof TelParserVisitor ) { - return visitor.visitStringConstantAtom(this); + return visitor.visitTaxon(this); } else { return visitor.visitChildren(this); } @@ -984,84 +985,44 @@ StringConstantAtomContext.prototype.accept = function(visitor) { -TelParser.AtomContext = AtomContext; -TelParser.prototype.atom = function() { +TelParser.TaxonContext = TaxonContext; + +TelParser.prototype.taxon = function() { - var localctx = new AtomContext(this, this._ctx, this.state); - this.enterRule(localctx, 4, TelParser.RULE_atom); + var localctx = new TaxonContext(this, this._ctx, this.state); + this.enterRule(localctx, 8, TelParser.RULE_taxon); var _la = 0; // Token type try { - this.state = 49; + this.enterOuterAlt(localctx, 1); + this.state = 87; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,4,this._ctx); - switch(la_) { - case 1: - localctx = new BracketExprContext(this, localctx); - this.enterOuterAlt(localctx, 1); - this.state = 39; - this.match(TelParser.L_BRACKET); - this.state = 40; - this.expr(0); - this.state = 41; - this.match(TelParser.R_BRACKET); - break; - - case 2: - localctx = new NumberAtomContext(this, localctx); - this.enterOuterAlt(localctx, 2); - this.state = 43; - _la = this._input.LA(1); - if(!(_la===TelParser.INT || _la===TelParser.REAL)) { - this._errHandler.recoverInline(this); - } - else { - this._errHandler.reportMatch(this); - this.consume(); - } - break; - - case 3: - localctx = new BooleanAtomContext(this, localctx); - this.enterOuterAlt(localctx, 3); - this.state = 44; - _la = this._input.LA(1); - if(!(_la===TelParser.TRUE || _la===TelParser.FALSE)) { - this._errHandler.recoverInline(this); - } - else { - this._errHandler.reportMatch(this); - this.consume(); - } - break; - - case 4: - localctx = new SingleQuotedAtomContext(this, localctx); - this.enterOuterAlt(localctx, 4); - this.state = 45; - this.match(TelParser.SINGLE_QUOTED_ELEMENT); - break; - - case 5: - localctx = new StringConstantAtomContext(this, localctx); - this.enterOuterAlt(localctx, 5); - this.state = 46; - this.match(TelParser.STRING_CONSTANT); - break; + _la = this._input.LA(1); + if(_la===TelParser.TAXON_OPTIONAL_OPERATOR) { + this.state = 86; + this.match(TelParser.TAXON_OPTIONAL_OPERATOR); + } - case 6: - localctx = new FnExprContext(this, localctx); - this.enterOuterAlt(localctx, 6); - this.state = 47; - this.fn(); - break; + this.state = 92; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,11,this._ctx); + if(la_===1) { + this.state = 89; + localctx.namespace = this.identifierMultipart(); + this.state = 90; + this.match(TelParser.TAXON_NAMESPACE_DELIMITER); - case 7: - localctx = new TaxonSlugAtomContext(this, localctx); - this.enterOuterAlt(localctx, 7); - this.state = 48; - this.taxon(); - break; + } + this.state = 94; + localctx.slug = this.identifierMultipart(); + this.state = 97; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,12,this._ctx); + if(la_===1) { + this.state = 95; + this.match(TelParser.TAXON_TAG_DELIMITER); + this.state = 96; + localctx.tag = this.identifierMultipart(); } } catch (re) { @@ -1079,7 +1040,7 @@ TelParser.prototype.atom = function() { }; -function FnContext(parser, parent, invokingState) { +function IdentifierMultipartContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; } @@ -1088,63 +1049,53 @@ function FnContext(parser, parent, invokingState) { } antlr4.ParserRuleContext.call(this, parent, invokingState); this.parser = parser; - this.ruleIndex = TelParser.RULE_fn; + this.ruleIndex = TelParser.RULE_identifierMultipart; + this.parts = null; // Token return this; } -FnContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -FnContext.prototype.constructor = FnContext; - -FnContext.prototype.WORD = function() { - return this.getToken(TelParser.WORD, 0); -}; - -FnContext.prototype.L_BRACKET = function() { - return this.getToken(TelParser.L_BRACKET, 0); -}; - -FnContext.prototype.R_BRACKET = function() { - return this.getToken(TelParser.R_BRACKET, 0); -}; +IdentifierMultipartContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +IdentifierMultipartContext.prototype.constructor = IdentifierMultipartContext; -FnContext.prototype.expr = function(i) { - if(i===undefined) { - i = null; - } +IdentifierMultipartContext.prototype.IDENTIFIER = function(i) { + if(i===undefined) { + i = null; + } if(i===null) { - return this.getTypedRuleContexts(ExprContext); + return this.getTokens(TelParser.IDENTIFIER); } else { - return this.getTypedRuleContext(ExprContext,i); + return this.getToken(TelParser.IDENTIFIER, i); } }; -FnContext.prototype.FN_PARAMETER_DELIMITER = function(i) { + +IdentifierMultipartContext.prototype.DOT = function(i) { if(i===undefined) { i = null; } if(i===null) { - return this.getTokens(TelParser.FN_PARAMETER_DELIMITER); + return this.getTokens(TelParser.DOT); } else { - return this.getToken(TelParser.FN_PARAMETER_DELIMITER, i); + return this.getToken(TelParser.DOT, i); } }; -FnContext.prototype.enterRule = function(listener) { +IdentifierMultipartContext.prototype.enterRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.enterFn(this); + listener.enterIdentifierMultipart(this); } }; -FnContext.prototype.exitRule = function(listener) { +IdentifierMultipartContext.prototype.exitRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.exitFn(this); + listener.exitIdentifierMultipart(this); } }; -FnContext.prototype.accept = function(visitor) { +IdentifierMultipartContext.prototype.accept = function(visitor) { if ( visitor instanceof TelParserVisitor ) { - return visitor.visitFn(this); + return visitor.visitIdentifierMultipart(this); } else { return visitor.visitChildren(this); } @@ -1153,41 +1104,31 @@ FnContext.prototype.accept = function(visitor) { -TelParser.FnContext = FnContext; +TelParser.IdentifierMultipartContext = IdentifierMultipartContext; -TelParser.prototype.fn = function() { +TelParser.prototype.identifierMultipart = function() { - var localctx = new FnContext(this, this._ctx, this.state); - this.enterRule(localctx, 6, TelParser.RULE_fn); - var _la = 0; // Token type + var localctx = new IdentifierMultipartContext(this, this._ctx, this.state); + this.enterRule(localctx, 10, TelParser.RULE_identifierMultipart); try { this.enterOuterAlt(localctx, 1); - this.state = 51; - this.match(TelParser.WORD); - this.state = 52; - this.match(TelParser.L_BRACKET); - this.state = 54; - this._errHandler.sync(this); - _la = this._input.LA(1); - if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << TelParser.INT) | (1 << TelParser.REAL) | (1 << TelParser.TRUE) | (1 << TelParser.FALSE) | (1 << TelParser.NOT) | (1 << TelParser.WORD) | (1 << TelParser.STRING_CONSTANT) | (1 << TelParser.SINGLE_QUOTED_ELEMENT) | (1 << TelParser.L_BRACKET) | (1 << TelParser.OPTIONAL_TAXON_OPERATOR))) !== 0)) { - this.state = 53; - this.expr(0); - } - - this.state = 60; + this.state = 99; + localctx.parts = this.match(TelParser.IDENTIFIER); + this.state = 104; this._errHandler.sync(this); - _la = this._input.LA(1); - while(_la===TelParser.FN_PARAMETER_DELIMITER) { - this.state = 56; - this.match(TelParser.FN_PARAMETER_DELIMITER); - this.state = 57; - this.expr(0); - this.state = 62; + var _alt = this._interp.adaptivePredict(this._input,13,this._ctx) + while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { + if(_alt===1) { + this.state = 100; + this.match(TelParser.DOT); + this.state = 101; + localctx.parts = this.match(TelParser.IDENTIFIER); + } + this.state = 106; this._errHandler.sync(this); - _la = this._input.LA(1); + _alt = this._interp.adaptivePredict(this._input,13,this._ctx); } - this.state = 63; - this.match(TelParser.R_BRACKET); + } catch (re) { if(re instanceof antlr4.error.RecognitionException) { localctx.exception = re; @@ -1203,7 +1144,7 @@ TelParser.prototype.fn = function() { }; -function TaxonContext(parser, parent, invokingState) { +function LiteralValueContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; } @@ -1212,52 +1153,52 @@ function TaxonContext(parser, parent, invokingState) { } antlr4.ParserRuleContext.call(this, parent, invokingState); this.parser = parser; - this.ruleIndex = TelParser.RULE_taxon; + this.ruleIndex = TelParser.RULE_literalValue; return this; } -TaxonContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -TaxonContext.prototype.constructor = TaxonContext; +LiteralValueContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +LiteralValueContext.prototype.constructor = LiteralValueContext; -TaxonContext.prototype.WORD = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTokens(TelParser.WORD); - } else { - return this.getToken(TelParser.WORD, i); - } +LiteralValueContext.prototype.NUMERIC_LITERAL = function() { + return this.getToken(TelParser.NUMERIC_LITERAL, 0); }; +LiteralValueContext.prototype.DOUBLE_QUOTED_STRING = function() { + return this.getToken(TelParser.DOUBLE_QUOTED_STRING, 0); +}; -TaxonContext.prototype.OPTIONAL_TAXON_OPERATOR = function() { - return this.getToken(TelParser.OPTIONAL_TAXON_OPERATOR, 0); +LiteralValueContext.prototype.SINGLE_QUOTED_STRING = function() { + return this.getToken(TelParser.SINGLE_QUOTED_STRING, 0); }; -TaxonContext.prototype.TAXON_NAMESPACE_DELIMITER = function() { - return this.getToken(TelParser.TAXON_NAMESPACE_DELIMITER, 0); +LiteralValueContext.prototype.K_NULL = function() { + return this.getToken(TelParser.K_NULL, 0); }; -TaxonContext.prototype.TAXON_TAG_DELIMITER = function() { - return this.getToken(TelParser.TAXON_TAG_DELIMITER, 0); +LiteralValueContext.prototype.K_TRUE = function() { + return this.getToken(TelParser.K_TRUE, 0); }; -TaxonContext.prototype.enterRule = function(listener) { +LiteralValueContext.prototype.K_FALSE = function() { + return this.getToken(TelParser.K_FALSE, 0); +}; + +LiteralValueContext.prototype.enterRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.enterTaxon(this); + listener.enterLiteralValue(this); } }; -TaxonContext.prototype.exitRule = function(listener) { +LiteralValueContext.prototype.exitRule = function(listener) { if(listener instanceof TelParserListener ) { - listener.exitTaxon(this); + listener.exitLiteralValue(this); } }; -TaxonContext.prototype.accept = function(visitor) { +LiteralValueContext.prototype.accept = function(visitor) { if ( visitor instanceof TelParserVisitor ) { - return visitor.visitTaxon(this); + return visitor.visitLiteralValue(this); } else { return visitor.visitChildren(this); } @@ -1266,44 +1207,23 @@ TaxonContext.prototype.accept = function(visitor) { -TelParser.TaxonContext = TaxonContext; +TelParser.LiteralValueContext = LiteralValueContext; -TelParser.prototype.taxon = function() { +TelParser.prototype.literalValue = function() { - var localctx = new TaxonContext(this, this._ctx, this.state); - this.enterRule(localctx, 8, TelParser.RULE_taxon); + var localctx = new LiteralValueContext(this, this._ctx, this.state); + this.enterRule(localctx, 12, TelParser.RULE_literalValue); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 66; - this._errHandler.sync(this); + this.state = 107; _la = this._input.LA(1); - if(_la===TelParser.OPTIONAL_TAXON_OPERATOR) { - this.state = 65; - this.match(TelParser.OPTIONAL_TAXON_OPERATOR); + if(!(((((_la - 37)) & ~0x1f) == 0 && ((1 << (_la - 37)) & ((1 << (TelParser.K_FALSE - 37)) | (1 << (TelParser.K_NULL - 37)) | (1 << (TelParser.K_TRUE - 37)) | (1 << (TelParser.NUMERIC_LITERAL - 37)) | (1 << (TelParser.DOUBLE_QUOTED_STRING - 37)) | (1 << (TelParser.SINGLE_QUOTED_STRING - 37)))) !== 0))) { + this._errHandler.recoverInline(this); } - - this.state = 68; - this.match(TelParser.WORD); - this.state = 71; - this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,8,this._ctx); - if(la_===1) { - this.state = 69; - this.match(TelParser.TAXON_NAMESPACE_DELIMITER); - this.state = 70; - this.match(TelParser.WORD); - - } - this.state = 75; - this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,9,this._ctx); - if(la_===1) { - this.state = 73; - this.match(TelParser.TAXON_TAG_DELIMITER); - this.state = 74; - this.match(TelParser.WORD); - + else { + this._errHandler.reportMatch(this); + this.consume(); } } catch (re) { if(re instanceof antlr4.error.RecognitionException) { @@ -1332,13 +1252,19 @@ TelParser.prototype.sempred = function(localctx, ruleIndex, predIndex) { TelParser.prototype.expr_sempred = function(localctx, predIndex) { switch(predIndex) { case 0: - return this.precpred(this._ctx, 5); + return this.precpred(this._ctx, 9); case 1: - return this.precpred(this._ctx, 4); + return this.precpred(this._ctx, 8); case 2: - return this.precpred(this._ctx, 3); + return this.precpred(this._ctx, 7); case 3: - return this.precpred(this._ctx, 2); + return this.precpred(this._ctx, 6); + case 4: + return this.precpred(this._ctx, 5); + case 5: + return this.precpred(this._ctx, 4); + case 6: + return this.precpred(this._ctx, 10); default: throw "No predicate with index:" + predIndex; } diff --git a/js-temp/TelParserListener.js b/js-temp/TelParserListener.js index a1ff30f..df607dc 100644 --- a/js-temp/TelParserListener.js +++ b/js-temp/TelParserListener.js @@ -20,138 +20,57 @@ TelParserListener.prototype.exitParse = function(ctx) { }; -// Enter a parse tree produced by TelParser#nullTestExpr. -TelParserListener.prototype.enterNullTestExpr = function(ctx) { +// Enter a parse tree produced by TelParser#expr. +TelParserListener.prototype.enterExpr = function(ctx) { }; -// Exit a parse tree produced by TelParser#nullTestExpr. -TelParserListener.prototype.exitNullTestExpr = function(ctx) { +// Exit a parse tree produced by TelParser#expr. +TelParserListener.prototype.exitExpr = function(ctx) { }; -// Enter a parse tree produced by TelParser#notExpr. -TelParserListener.prototype.enterNotExpr = function(ctx) { +// Enter a parse tree produced by TelParser#isNotNull. +TelParserListener.prototype.enterIsNotNull = function(ctx) { }; -// Exit a parse tree produced by TelParser#notExpr. -TelParserListener.prototype.exitNotExpr = function(ctx) { +// Exit a parse tree produced by TelParser#isNotNull. +TelParserListener.prototype.exitIsNotNull = function(ctx) { }; -// Enter a parse tree produced by TelParser#logicalExpr. -TelParserListener.prototype.enterLogicalExpr = function(ctx) { +// Enter a parse tree produced by TelParser#isNull. +TelParserListener.prototype.enterIsNull = function(ctx) { }; -// Exit a parse tree produced by TelParser#logicalExpr. -TelParserListener.prototype.exitLogicalExpr = function(ctx) { +// Exit a parse tree produced by TelParser#isNull. +TelParserListener.prototype.exitIsNull = function(ctx) { }; -// Enter a parse tree produced by TelParser#multiplicationExpr. -TelParserListener.prototype.enterMultiplicationExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#multiplicationExpr. -TelParserListener.prototype.exitMultiplicationExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#atomExpr. -TelParserListener.prototype.enterAtomExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#atomExpr. -TelParserListener.prototype.exitAtomExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#additiveExpr. -TelParserListener.prototype.enterAdditiveExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#additiveExpr. -TelParserListener.prototype.exitAdditiveExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#bracketExpr. -TelParserListener.prototype.enterBracketExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#bracketExpr. -TelParserListener.prototype.exitBracketExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#numberAtom. -TelParserListener.prototype.enterNumberAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#numberAtom. -TelParserListener.prototype.exitNumberAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#booleanAtom. -TelParserListener.prototype.enterBooleanAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#booleanAtom. -TelParserListener.prototype.exitBooleanAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#singleQuotedAtom. -TelParserListener.prototype.enterSingleQuotedAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#singleQuotedAtom. -TelParserListener.prototype.exitSingleQuotedAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#stringConstantAtom. -TelParserListener.prototype.enterStringConstantAtom = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#stringConstantAtom. -TelParserListener.prototype.exitStringConstantAtom = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#fnExpr. -TelParserListener.prototype.enterFnExpr = function(ctx) { -}; - -// Exit a parse tree produced by TelParser#fnExpr. -TelParserListener.prototype.exitFnExpr = function(ctx) { -}; - - -// Enter a parse tree produced by TelParser#taxonSlugAtom. -TelParserListener.prototype.enterTaxonSlugAtom = function(ctx) { +// Enter a parse tree produced by TelParser#taxon. +TelParserListener.prototype.enterTaxon = function(ctx) { }; -// Exit a parse tree produced by TelParser#taxonSlugAtom. -TelParserListener.prototype.exitTaxonSlugAtom = function(ctx) { +// Exit a parse tree produced by TelParser#taxon. +TelParserListener.prototype.exitTaxon = function(ctx) { }; -// Enter a parse tree produced by TelParser#fn. -TelParserListener.prototype.enterFn = function(ctx) { +// Enter a parse tree produced by TelParser#identifierMultipart. +TelParserListener.prototype.enterIdentifierMultipart = function(ctx) { }; -// Exit a parse tree produced by TelParser#fn. -TelParserListener.prototype.exitFn = function(ctx) { +// Exit a parse tree produced by TelParser#identifierMultipart. +TelParserListener.prototype.exitIdentifierMultipart = function(ctx) { }; -// Enter a parse tree produced by TelParser#taxon. -TelParserListener.prototype.enterTaxon = function(ctx) { +// Enter a parse tree produced by TelParser#literalValue. +TelParserListener.prototype.enterLiteralValue = function(ctx) { }; -// Exit a parse tree produced by TelParser#taxon. -TelParserListener.prototype.exitTaxon = function(ctx) { +// Exit a parse tree produced by TelParser#literalValue. +TelParserListener.prototype.exitLiteralValue = function(ctx) { }; diff --git a/js-temp/TelParserVisitor.js b/js-temp/TelParserVisitor.js index a32da69..753430d 100644 --- a/js-temp/TelParserVisitor.js +++ b/js-temp/TelParserVisitor.js @@ -18,92 +18,38 @@ TelParserVisitor.prototype.visitParse = function(ctx) { }; -// Visit a parse tree produced by TelParser#nullTestExpr. -TelParserVisitor.prototype.visitNullTestExpr = function(ctx) { +// Visit a parse tree produced by TelParser#expr. +TelParserVisitor.prototype.visitExpr = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#notExpr. -TelParserVisitor.prototype.visitNotExpr = function(ctx) { +// Visit a parse tree produced by TelParser#isNotNull. +TelParserVisitor.prototype.visitIsNotNull = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#logicalExpr. -TelParserVisitor.prototype.visitLogicalExpr = function(ctx) { +// Visit a parse tree produced by TelParser#isNull. +TelParserVisitor.prototype.visitIsNull = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#multiplicationExpr. -TelParserVisitor.prototype.visitMultiplicationExpr = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#atomExpr. -TelParserVisitor.prototype.visitAtomExpr = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#additiveExpr. -TelParserVisitor.prototype.visitAdditiveExpr = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#bracketExpr. -TelParserVisitor.prototype.visitBracketExpr = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#numberAtom. -TelParserVisitor.prototype.visitNumberAtom = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#booleanAtom. -TelParserVisitor.prototype.visitBooleanAtom = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#singleQuotedAtom. -TelParserVisitor.prototype.visitSingleQuotedAtom = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#stringConstantAtom. -TelParserVisitor.prototype.visitStringConstantAtom = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#fnExpr. -TelParserVisitor.prototype.visitFnExpr = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by TelParser#taxonSlugAtom. -TelParserVisitor.prototype.visitTaxonSlugAtom = function(ctx) { +// Visit a parse tree produced by TelParser#taxon. +TelParserVisitor.prototype.visitTaxon = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#fn. -TelParserVisitor.prototype.visitFn = function(ctx) { +// Visit a parse tree produced by TelParser#identifierMultipart. +TelParserVisitor.prototype.visitIdentifierMultipart = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by TelParser#taxon. -TelParserVisitor.prototype.visitTaxon = function(ctx) { +// Visit a parse tree produced by TelParser#literalValue. +TelParserVisitor.prototype.visitLiteralValue = function(ctx) { return this.visitChildren(ctx); }; diff --git a/python/src/tel_grammar/__init__.py b/python/src/pql_grammar/__init__.py similarity index 100% rename from python/src/tel_grammar/__init__.py rename to python/src/pql_grammar/__init__.py diff --git a/python/src/pql_grammar/antlr/PqlLexer.py b/python/src/pql_grammar/antlr/PqlLexer.py new file mode 100644 index 0000000..d7f4023 --- /dev/null +++ b/python/src/pql_grammar/antlr/PqlLexer.py @@ -0,0 +1,349 @@ +# Generated from grammar/PqlLexer.g4 by ANTLR 4.8 +from antlr4 import * +from io import StringIO +from typing.io import TextIO +import sys + + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2:") + buf.write("\u01fd\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") + buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") + buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") + buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") + buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") + buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") + buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64") + buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") + buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") + buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") + buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\3\2") + buf.write("\3\2\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3") + buf.write("\7\3\7\3\b\3\b\3\b\3\t\3\t\3\t\3\n\3\n\3\n\3\13\3\13\3") + buf.write("\13\3\f\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20") + buf.write("\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25\3\26") + buf.write("\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33") + buf.write("\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36\3\37\3\37\3\37") + buf.write("\3\37\3 \3 \3 \3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"") + buf.write("\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3&\3&\3") + buf.write("&\3&\3&\3&\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3(\3)") + buf.write("\3)\3)\3)\3)\3*\3*\3*\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3") + buf.write(",\3,\3,\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3/\6/\u0143\n") + buf.write("/\r/\16/\u0144\3/\3/\7/\u0149\n/\f/\16/\u014c\13/\5/\u014e") + buf.write("\n/\3/\3/\5/\u0152\n/\3/\6/\u0155\n/\r/\16/\u0156\5/\u0159") + buf.write("\n/\3/\3/\6/\u015d\n/\r/\16/\u015e\3/\3/\5/\u0163\n/\3") + buf.write("/\6/\u0166\n/\r/\16/\u0167\5/\u016a\n/\5/\u016c\n/\3\60") + buf.write("\3\60\3\61\3\61\3\61\3\61\7\61\u0174\n\61\f\61\16\61\u0177") + buf.write("\13\61\3\61\3\61\3\62\3\62\3\62\3\62\7\62\u017f\n\62\f") + buf.write("\62\16\62\u0182\13\62\3\62\3\62\3\63\3\63\3\64\3\64\3") + buf.write("\64\3\64\7\64\u018c\n\64\f\64\16\64\u018f\13\64\3\64\3") + buf.write("\64\3\65\3\65\3\65\3\65\7\65\u0197\n\65\f\65\16\65\u019a") + buf.write("\13\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\5\66\u01a3\n") + buf.write("\66\3\66\7\66\u01a6\n\66\f\66\16\66\u01a9\13\66\3\66\3") + buf.write("\66\3\67\3\67\3\67\3\67\7\67\u01b1\n\67\f\67\16\67\u01b4") + buf.write("\13\67\3\67\3\67\3\67\5\67\u01b9\n\67\3\67\3\67\38\38") + buf.write("\38\38\39\39\79\u01c3\n9\f9\169\u01c6\139\3:\3:\3;\3;") + buf.write("\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3D\3") + buf.write("D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3") + buf.write("M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3S\3T\3T\3\u01b2\2") + buf.write("U\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31") + buf.write("\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31") + buf.write("\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O") + buf.write(")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s\2") + buf.write("u\2w\2y\2{\2}\2\177\2\u0081\2\u0083\2\u0085\2\u0087\2") + buf.write("\u0089\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093\2\u0095") + buf.write("\2\u0097\2\u0099\2\u009b\2\u009d\2\u009f\2\u00a1\2\u00a3") + buf.write("\2\u00a5\2\u00a7\2\3\2$\4\2--//\3\2$$\3\2))\4\2\f\f\17") + buf.write("\17\5\2\13\r\17\17\"\"\5\2C\\aac|\6\2\62;C\\aac|\3\2\62") + buf.write(";\4\2CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2") + buf.write("IIii\4\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4") + buf.write("\2PPpp\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VVv") + buf.write("v\4\2WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2") + buf.write("\u01fa\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2") + buf.write("\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2") + buf.write("\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33") + buf.write("\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2") + buf.write("\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2") + buf.write("\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2") + buf.write("\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2") + buf.write("\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3") + buf.write("\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S") + buf.write("\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2") + buf.write("]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2") + buf.write("\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2") + buf.write("\2\2q\3\2\2\2\3\u00a9\3\2\2\2\5\u00ab\3\2\2\2\7\u00ad") + buf.write("\3\2\2\2\t\u00b0\3\2\2\2\13\u00b3\3\2\2\2\r\u00b6\3\2") + buf.write("\2\2\17\u00b9\3\2\2\2\21\u00bc\3\2\2\2\23\u00bf\3\2\2") + buf.write("\2\25\u00c2\3\2\2\2\27\u00c5\3\2\2\2\31\u00c8\3\2\2\2") + buf.write("\33\u00ca\3\2\2\2\35\u00cc\3\2\2\2\37\u00ce\3\2\2\2!\u00d0") + buf.write("\3\2\2\2#\u00d2\3\2\2\2%\u00d4\3\2\2\2\'\u00d6\3\2\2\2") + buf.write(")\u00d8\3\2\2\2+\u00da\3\2\2\2-\u00dc\3\2\2\2/\u00de\3") + buf.write("\2\2\2\61\u00e0\3\2\2\2\63\u00e2\3\2\2\2\65\u00e4\3\2") + buf.write("\2\2\67\u00e6\3\2\2\29\u00e8\3\2\2\2;\u00ea\3\2\2\2=\u00ee") + buf.write("\3\2\2\2?\u00f2\3\2\2\2A\u00f5\3\2\2\2C\u00fa\3\2\2\2") + buf.write("E\u0100\3\2\2\2G\u0103\3\2\2\2I\u010a\3\2\2\2K\u010f\3") + buf.write("\2\2\2M\u0115\3\2\2\2O\u0119\3\2\2\2Q\u0121\3\2\2\2S\u0126") + buf.write("\3\2\2\2U\u0129\3\2\2\2W\u012f\3\2\2\2Y\u0136\3\2\2\2") + buf.write("[\u013b\3\2\2\2]\u016b\3\2\2\2_\u016d\3\2\2\2a\u016f\3") + buf.write("\2\2\2c\u017a\3\2\2\2e\u0185\3\2\2\2g\u0187\3\2\2\2i\u0192") + buf.write("\3\2\2\2k\u01a2\3\2\2\2m\u01ac\3\2\2\2o\u01bc\3\2\2\2") + buf.write("q\u01c0\3\2\2\2s\u01c7\3\2\2\2u\u01c9\3\2\2\2w\u01cb\3") + buf.write("\2\2\2y\u01cd\3\2\2\2{\u01cf\3\2\2\2}\u01d1\3\2\2\2\177") + buf.write("\u01d3\3\2\2\2\u0081\u01d5\3\2\2\2\u0083\u01d7\3\2\2\2") + buf.write("\u0085\u01d9\3\2\2\2\u0087\u01db\3\2\2\2\u0089\u01dd\3") + buf.write("\2\2\2\u008b\u01df\3\2\2\2\u008d\u01e1\3\2\2\2\u008f\u01e3") + buf.write("\3\2\2\2\u0091\u01e5\3\2\2\2\u0093\u01e7\3\2\2\2\u0095") + buf.write("\u01e9\3\2\2\2\u0097\u01eb\3\2\2\2\u0099\u01ed\3\2\2\2") + buf.write("\u009b\u01ef\3\2\2\2\u009d\u01f1\3\2\2\2\u009f\u01f3\3") + buf.write("\2\2\2\u00a1\u01f5\3\2\2\2\u00a3\u01f7\3\2\2\2\u00a5\u01f9") + buf.write("\3\2\2\2\u00a7\u01fb\3\2\2\2\u00a9\u00aa\7<\2\2\u00aa") + buf.write("\4\3\2\2\2\u00ab\u00ac\7A\2\2\u00ac\6\3\2\2\2\u00ad\u00ae") + buf.write("\7(\2\2\u00ae\u00af\7(\2\2\u00af\b\3\2\2\2\u00b0\u00b1") + buf.write("\7?\2\2\u00b1\u00b2\7?\2\2\u00b2\n\3\2\2\2\u00b3\u00b4") + buf.write("\7@\2\2\u00b4\u00b5\7?\2\2\u00b5\f\3\2\2\2\u00b6\u00b7") + buf.write("\7>\2\2\u00b7\u00b8\7?\2\2\u00b8\16\3\2\2\2\u00b9\u00ba") + buf.write("\7#\2\2\u00ba\u00bb\7?\2\2\u00bb\20\3\2\2\2\u00bc\u00bd") + buf.write("\7>\2\2\u00bd\u00be\7@\2\2\u00be\22\3\2\2\2\u00bf\u00c0") + buf.write("\7~\2\2\u00c0\u00c1\7~\2\2\u00c1\24\3\2\2\2\u00c2\u00c3") + buf.write("\7>\2\2\u00c3\u00c4\7>\2\2\u00c4\26\3\2\2\2\u00c5\u00c6") + buf.write("\7@\2\2\u00c6\u00c7\7@\2\2\u00c7\30\3\2\2\2\u00c8\u00c9") + buf.write("\7(\2\2\u00c9\32\3\2\2\2\u00ca\u00cb\7?\2\2\u00cb\34\3") + buf.write("\2\2\2\u00cc\u00cd\7+\2\2\u00cd\36\3\2\2\2\u00ce\u00cf") + buf.write("\7.\2\2\u00cf \3\2\2\2\u00d0\u00d1\7\60\2\2\u00d1\"\3") + buf.write("\2\2\2\u00d2\u00d3\7\61\2\2\u00d3$\3\2\2\2\u00d4\u00d5") + buf.write("\7@\2\2\u00d5&\3\2\2\2\u00d6\u00d7\7>\2\2\u00d7(\3\2\2") + buf.write("\2\u00d8\u00d9\7/\2\2\u00d9*\3\2\2\2\u00da\u00db\7\'\2") + buf.write("\2\u00db,\3\2\2\2\u00dc\u00dd\7*\2\2\u00dd.\3\2\2\2\u00de") + buf.write("\u00df\7~\2\2\u00df\60\3\2\2\2\u00e0\u00e1\7-\2\2\u00e1") + buf.write("\62\3\2\2\2\u00e2\u00e3\7=\2\2\u00e3\64\3\2\2\2\u00e4") + buf.write("\u00e5\7,\2\2\u00e5\66\3\2\2\2\u00e6\u00e7\7\u0080\2\2") + buf.write("\u00e78\3\2\2\2\u00e8\u00e9\7a\2\2\u00e9:\3\2\2\2\u00ea") + buf.write("\u00eb\5u;\2\u00eb\u00ec\5\u008fH\2\u00ec\u00ed\5{>\2") + buf.write("\u00ed<\3\2\2\2\u00ee\u00ef\5u;\2\u00ef\u00f0\5\u0099") + buf.write("M\2\u00f0\u00f1\5y=\2\u00f1>\3\2\2\2\u00f2\u00f3\5w<\2") + buf.write("\u00f3\u00f4\5\u00a5S\2\u00f4@\3\2\2\2\u00f5\u00f6\5{") + buf.write(">\2\u00f6\u00f7\5}?\2\u00f7\u00f8\5\u0099M\2\u00f8\u00f9") + buf.write("\5y=\2\u00f9B\3\2\2\2\u00fa\u00fb\5\177@\2\u00fb\u00fc") + buf.write("\5u;\2\u00fc\u00fd\5\u008bF\2\u00fd\u00fe\5\u0099M\2\u00fe") + buf.write("\u00ff\5}?\2\u00ffD\3\2\2\2\u0100\u0101\5\u0085C\2\u0101") + buf.write("\u0102\5\u0099M\2\u0102F\3\2\2\2\u0103\u0104\5\u0085C") + buf.write("\2\u0104\u0105\5\u0099M\2\u0105\u0106\5\u008fH\2\u0106") + buf.write("\u0107\5\u009dO\2\u0107\u0108\5\u008bF\2\u0108\u0109\5") + buf.write("\u008bF\2\u0109H\3\2\2\2\u010a\u010b\5\u008bF\2\u010b") + buf.write("\u010c\5\u0085C\2\u010c\u010d\5\u0089E\2\u010d\u010e\5") + buf.write("}?\2\u010eJ\3\2\2\2\u010f\u0110\5\u008bF\2\u0110\u0111") + buf.write("\5\u0085C\2\u0111\u0112\5\u008dG\2\u0112\u0113\5\u0085") + buf.write("C\2\u0113\u0114\5\u009bN\2\u0114L\3\2\2\2\u0115\u0116") + buf.write("\5\u008fH\2\u0116\u0117\5\u0091I\2\u0117\u0118\5\u009b") + buf.write("N\2\u0118N\3\2\2\2\u0119\u011a\5\u008fH\2\u011a\u011b") + buf.write("\5\u0091I\2\u011b\u011c\5\u009bN\2\u011c\u011d\5\u008f") + buf.write("H\2\u011d\u011e\5\u009dO\2\u011e\u011f\5\u008bF\2\u011f") + buf.write("\u0120\5\u008bF\2\u0120P\3\2\2\2\u0121\u0122\5\u008fH") + buf.write("\2\u0122\u0123\5\u009dO\2\u0123\u0124\5\u008bF\2\u0124") + buf.write("\u0125\5\u008bF\2\u0125R\3\2\2\2\u0126\u0127\5\u0091I") + buf.write("\2\u0127\u0128\5\u0097L\2\u0128T\3\2\2\2\u0129\u012a\5") + buf.write("\u0091I\2\u012a\u012b\5\u0097L\2\u012b\u012c\5{>\2\u012c") + buf.write("\u012d\5}?\2\u012d\u012e\5\u0097L\2\u012eV\3\2\2\2\u012f") + buf.write("\u0130\5\u0099M\2\u0130\u0131\5}?\2\u0131\u0132\5\u008b") + buf.write("F\2\u0132\u0133\5}?\2\u0133\u0134\5y=\2\u0134\u0135\5") + buf.write("\u009bN\2\u0135X\3\2\2\2\u0136\u0137\5\u009bN\2\u0137") + buf.write("\u0138\5\u0097L\2\u0138\u0139\5\u009dO\2\u0139\u013a\5") + buf.write("}?\2\u013aZ\3\2\2\2\u013b\u013c\5\u00a1Q\2\u013c\u013d") + buf.write("\5\u0083B\2\u013d\u013e\5}?\2\u013e\u013f\5\u0097L\2\u013f") + buf.write("\u0140\5}?\2\u0140\\\3\2\2\2\u0141\u0143\5s:\2\u0142\u0141") + buf.write("\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0142\3\2\2\2\u0144") + buf.write("\u0145\3\2\2\2\u0145\u014d\3\2\2\2\u0146\u014a\7\60\2") + buf.write("\2\u0147\u0149\5s:\2\u0148\u0147\3\2\2\2\u0149\u014c\3") + buf.write("\2\2\2\u014a\u0148\3\2\2\2\u014a\u014b\3\2\2\2\u014b\u014e") + buf.write("\3\2\2\2\u014c\u014a\3\2\2\2\u014d\u0146\3\2\2\2\u014d") + buf.write("\u014e\3\2\2\2\u014e\u0158\3\2\2\2\u014f\u0151\5}?\2\u0150") + buf.write("\u0152\t\2\2\2\u0151\u0150\3\2\2\2\u0151\u0152\3\2\2\2") + buf.write("\u0152\u0154\3\2\2\2\u0153\u0155\5s:\2\u0154\u0153\3\2") + buf.write("\2\2\u0155\u0156\3\2\2\2\u0156\u0154\3\2\2\2\u0156\u0157") + buf.write("\3\2\2\2\u0157\u0159\3\2\2\2\u0158\u014f\3\2\2\2\u0158") + buf.write("\u0159\3\2\2\2\u0159\u016c\3\2\2\2\u015a\u015c\7\60\2") + buf.write("\2\u015b\u015d\5s:\2\u015c\u015b\3\2\2\2\u015d\u015e\3") + buf.write("\2\2\2\u015e\u015c\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0169") + buf.write("\3\2\2\2\u0160\u0162\5}?\2\u0161\u0163\t\2\2\2\u0162\u0161") + buf.write("\3\2\2\2\u0162\u0163\3\2\2\2\u0163\u0165\3\2\2\2\u0164") + buf.write("\u0166\5s:\2\u0165\u0164\3\2\2\2\u0166\u0167\3\2\2\2\u0167") + buf.write("\u0165\3\2\2\2\u0167\u0168\3\2\2\2\u0168\u016a\3\2\2\2") + buf.write("\u0169\u0160\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016c\3") + buf.write("\2\2\2\u016b\u0142\3\2\2\2\u016b\u015a\3\2\2\2\u016c^") + buf.write("\3\2\2\2\u016d\u016e\5a\61\2\u016e`\3\2\2\2\u016f\u0175") + buf.write("\7$\2\2\u0170\u0171\7^\2\2\u0171\u0174\7$\2\2\u0172\u0174") + buf.write("\n\3\2\2\u0173\u0170\3\2\2\2\u0173\u0172\3\2\2\2\u0174") + buf.write("\u0177\3\2\2\2\u0175\u0173\3\2\2\2\u0175\u0176\3\2\2\2") + buf.write("\u0176\u0178\3\2\2\2\u0177\u0175\3\2\2\2\u0178\u0179\7") + buf.write("$\2\2\u0179b\3\2\2\2\u017a\u0180\7$\2\2\u017b\u017c\7") + buf.write("$\2\2\u017c\u017f\7$\2\2\u017d\u017f\n\3\2\2\u017e\u017b") + buf.write("\3\2\2\2\u017e\u017d\3\2\2\2\u017f\u0182\3\2\2\2\u0180") + buf.write("\u017e\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0183\3\2\2\2") + buf.write("\u0182\u0180\3\2\2\2\u0183\u0184\7$\2\2\u0184d\3\2\2\2") + buf.write("\u0185\u0186\5g\64\2\u0186f\3\2\2\2\u0187\u018d\7)\2\2") + buf.write("\u0188\u0189\7^\2\2\u0189\u018c\7)\2\2\u018a\u018c\n\4") + buf.write("\2\2\u018b\u0188\3\2\2\2\u018b\u018a\3\2\2\2\u018c\u018f") + buf.write("\3\2\2\2\u018d\u018b\3\2\2\2\u018d\u018e\3\2\2\2\u018e") + buf.write("\u0190\3\2\2\2\u018f\u018d\3\2\2\2\u0190\u0191\7)\2\2") + buf.write("\u0191h\3\2\2\2\u0192\u0198\7)\2\2\u0193\u0194\7)\2\2") + buf.write("\u0194\u0197\7)\2\2\u0195\u0197\n\4\2\2\u0196\u0193\3") + buf.write("\2\2\2\u0196\u0195\3\2\2\2\u0197\u019a\3\2\2\2\u0198\u0196") + buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\3\2\2\2\u019a") + buf.write("\u0198\3\2\2\2\u019b\u019c\7)\2\2\u019cj\3\2\2\2\u019d") + buf.write("\u019e\7/\2\2\u019e\u01a3\7/\2\2\u019f\u01a0\7\61\2\2") + buf.write("\u01a0\u01a3\7\61\2\2\u01a1\u01a3\7%\2\2\u01a2\u019d\3") + buf.write("\2\2\2\u01a2\u019f\3\2\2\2\u01a2\u01a1\3\2\2\2\u01a3\u01a7") + buf.write("\3\2\2\2\u01a4\u01a6\n\5\2\2\u01a5\u01a4\3\2\2\2\u01a6") + buf.write("\u01a9\3\2\2\2\u01a7\u01a5\3\2\2\2\u01a7\u01a8\3\2\2\2") + buf.write("\u01a8\u01aa\3\2\2\2\u01a9\u01a7\3\2\2\2\u01aa\u01ab\b") + buf.write("\66\2\2\u01abl\3\2\2\2\u01ac\u01ad\7\61\2\2\u01ad\u01ae") + buf.write("\7,\2\2\u01ae\u01b2\3\2\2\2\u01af\u01b1\13\2\2\2\u01b0") + buf.write("\u01af\3\2\2\2\u01b1\u01b4\3\2\2\2\u01b2\u01b3\3\2\2\2") + buf.write("\u01b2\u01b0\3\2\2\2\u01b3\u01b8\3\2\2\2\u01b4\u01b2\3") + buf.write("\2\2\2\u01b5\u01b6\7,\2\2\u01b6\u01b9\7\61\2\2\u01b7\u01b9") + buf.write("\7\2\2\3\u01b8\u01b5\3\2\2\2\u01b8\u01b7\3\2\2\2\u01b9") + buf.write("\u01ba\3\2\2\2\u01ba\u01bb\b\67\2\2\u01bbn\3\2\2\2\u01bc") + buf.write("\u01bd\t\6\2\2\u01bd\u01be\3\2\2\2\u01be\u01bf\b8\2\2") + buf.write("\u01bfp\3\2\2\2\u01c0\u01c4\t\7\2\2\u01c1\u01c3\t\b\2") + buf.write("\2\u01c2\u01c1\3\2\2\2\u01c3\u01c6\3\2\2\2\u01c4\u01c2") + buf.write("\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5r\3\2\2\2\u01c6\u01c4") + buf.write("\3\2\2\2\u01c7\u01c8\t\t\2\2\u01c8t\3\2\2\2\u01c9\u01ca") + buf.write("\t\n\2\2\u01cav\3\2\2\2\u01cb\u01cc\t\13\2\2\u01ccx\3") + buf.write("\2\2\2\u01cd\u01ce\t\f\2\2\u01cez\3\2\2\2\u01cf\u01d0") + buf.write("\t\r\2\2\u01d0|\3\2\2\2\u01d1\u01d2\t\16\2\2\u01d2~\3") + buf.write("\2\2\2\u01d3\u01d4\t\17\2\2\u01d4\u0080\3\2\2\2\u01d5") + buf.write("\u01d6\t\20\2\2\u01d6\u0082\3\2\2\2\u01d7\u01d8\t\21\2") + buf.write("\2\u01d8\u0084\3\2\2\2\u01d9\u01da\t\22\2\2\u01da\u0086") + buf.write("\3\2\2\2\u01db\u01dc\t\23\2\2\u01dc\u0088\3\2\2\2\u01dd") + buf.write("\u01de\t\24\2\2\u01de\u008a\3\2\2\2\u01df\u01e0\t\25\2") + buf.write("\2\u01e0\u008c\3\2\2\2\u01e1\u01e2\t\26\2\2\u01e2\u008e") + buf.write("\3\2\2\2\u01e3\u01e4\t\27\2\2\u01e4\u0090\3\2\2\2\u01e5") + buf.write("\u01e6\t\30\2\2\u01e6\u0092\3\2\2\2\u01e7\u01e8\t\31\2") + buf.write("\2\u01e8\u0094\3\2\2\2\u01e9\u01ea\t\32\2\2\u01ea\u0096") + buf.write("\3\2\2\2\u01eb\u01ec\t\33\2\2\u01ec\u0098\3\2\2\2\u01ed") + buf.write("\u01ee\t\34\2\2\u01ee\u009a\3\2\2\2\u01ef\u01f0\t\35\2") + buf.write("\2\u01f0\u009c\3\2\2\2\u01f1\u01f2\t\36\2\2\u01f2\u009e") + buf.write("\3\2\2\2\u01f3\u01f4\t\37\2\2\u01f4\u00a0\3\2\2\2\u01f5") + buf.write("\u01f6\t \2\2\u01f6\u00a2\3\2\2\2\u01f7\u01f8\t!\2\2\u01f8") + buf.write("\u00a4\3\2\2\2\u01f9\u01fa\t\"\2\2\u01fa\u00a6\3\2\2\2") + buf.write("\u01fb\u01fc\t#\2\2\u01fc\u00a8\3\2\2\2\33\2\u0144\u014a") + buf.write("\u014d\u0151\u0156\u0158\u015e\u0162\u0167\u0169\u016b") + buf.write("\u0173\u0175\u017e\u0180\u018b\u018d\u0196\u0198\u01a2") + buf.write("\u01a7\u01b2\u01b8\u01c4\3\2\3\2") + return buf.getvalue() + + +class PqlLexer(Lexer): + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + TAXON_TAG_DELIMITER = 1 + TAXON_OPTIONAL_OPERATOR = 2 + AND = 3 + EQ = 4 + GT_EQ = 5 + LT_EQ = 6 + NOT_EQ1 = 7 + NOT_EQ2 = 8 + OR = 9 + SHIFT_LEFT = 10 + SHIFT_RIGHT = 11 + AMP = 12 + ASSIGN = 13 + CLOSE_PAREN = 14 + COMMA = 15 + DOT = 16 + FORWARD_SLASH = 17 + GT = 18 + LT = 19 + MINUS = 20 + MOD = 21 + OPEN_PAREN = 22 + PIPE = 23 + PLUS = 24 + SCOL = 25 + STAR = 26 + TILDE = 27 + UNDER = 28 + K_AND = 29 + K_ASC = 30 + K_BY = 31 + K_DESC = 32 + K_FALSE = 33 + K_IS = 34 + K_ISNULL = 35 + K_LIKE = 36 + K_LIMIT = 37 + K_NOT = 38 + K_NOTNULL = 39 + K_NULL = 40 + K_OR = 41 + K_ORDER = 42 + K_SELECT = 43 + K_TRUE = 44 + K_WHERE = 45 + NUMERIC_LITERAL = 46 + DOUBLE_QUOTED_STRING = 47 + DOUBLE_QUOTED_STRING_TEL = 48 + DOUBLE_QUOTED_STRING_SQL = 49 + SINGLE_QUOTED_STRING = 50 + SINGLE_QUOTED_STRING_TEL = 51 + SINGLE_QUOTED_STRING_SQL = 52 + SINGLE_LINE_COMMENT = 53 + MULTILINE_COMMENT = 54 + SPACES = 55 + WORD = 56 + + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + + modeNames = [ "DEFAULT_MODE" ] + + literalNames = [ "", + "':'", "'?'", "'&&'", "'=='", "'>='", "'<='", "'!='", "'<>'", + "'||'", "'<<'", "'>>'", "'&'", "'='", "')'", "','", "'.'", "'/'", + "'>'", "'<'", "'-'", "'%'", "'('", "'|'", "'+'", "';'", "'*'", + "'~'", "'_'" ] + + symbolicNames = [ "", + "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", "AND", "EQ", + "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", + "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", "DOT", + "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", + "PLUS", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", + "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", "K_SELECT", + "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "WORD" ] + + ruleNames = [ "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", "AND", + "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", + "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", + "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", + "PIPE", "PLUS", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", + "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", + "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", + "WORD", "DIGIT", "A", "B", "C", "D", "E", "F", "G", "H", + "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", + "T", "U", "V", "W", "X", "Y", "Z" ] + + grammarFileName = "PqlLexer.g4" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.8") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None + + diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py new file mode 100644 index 0000000..a5c7d16 --- /dev/null +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -0,0 +1,1482 @@ +# Generated from grammar/PqlParser.g4 by ANTLR 4.8 +# encoding: utf-8 +from antlr4 import * +from io import StringIO +import sys +if sys.version_info[1] > 5: + from typing import TextIO +else: + from typing.io import TextIO + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3:") + buf.write("\u00b2\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") + buf.write("\t\16\4\17\t\17\3\2\3\2\3\2\3\3\7\3#\n\3\f\3\16\3&\13") + buf.write("\3\3\3\3\3\3\4\7\4+\n\4\f\4\16\4.\13\4\3\4\3\4\6\4\62") + buf.write("\n\4\r\4\16\4\63\3\4\7\4\67\n\4\f\4\16\4:\13\4\3\4\7\4") + buf.write("=\n\4\f\4\16\4@\13\4\3\5\3\5\3\6\3\6\3\6\5\6G\n\6\3\6") + buf.write("\5\6J\n\6\3\6\5\6M\n\6\3\7\3\7\3\7\7\7R\n\7\f\7\16\7U") + buf.write("\13\7\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\7\t_\n\t\f\t\16") + buf.write("\tb\13\t\3\n\3\n\5\nf\n\n\3\13\3\13\3\13\3\f\3\f\3\f\3") + buf.write("\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\7\fx\n\f\f\f\16") + buf.write("\f{\13\f\5\f}\n\f\3\f\3\f\3\f\5\f\u0082\n\f\3\f\3\f\3") + buf.write("\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f") + buf.write("\3\f\3\f\7\f\u0096\n\f\f\f\16\f\u0099\13\f\3\r\5\r\u009c") + buf.write("\n\r\3\r\3\r\3\r\5\r\u00a1\n\r\3\r\3\r\3\r\5\r\u00a6\n") + buf.write("\r\3\16\3\16\3\16\7\16\u00ab\n\16\f\16\16\16\u00ae\13") + buf.write("\16\3\17\3\17\3\17\2\3\26\20\2\4\6\b\n\f\16\20\22\24\26") + buf.write("\30\32\34\2\13\4\2 \"\"\5\2\26\26\32\32((\5\2\23\23\27") + buf.write("\27\34\34\4\2\26\26\32\32\4\2\7\b\24\25\6\2\6\6\t\n\17") + buf.write("\17$$\4\2\5\5\37\37\4\2\13\13++\7\2##**..\60\61\64\64") + buf.write("\2\u00be\2\36\3\2\2\2\4$\3\2\2\2\6,\3\2\2\2\bA\3\2\2\2") + buf.write("\nC\3\2\2\2\fN\3\2\2\2\16V\3\2\2\2\20Y\3\2\2\2\22c\3\2") + buf.write("\2\2\24g\3\2\2\2\26\u0081\3\2\2\2\30\u009b\3\2\2\2\32") + buf.write("\u00a7\3\2\2\2\34\u00af\3\2\2\2\36\37\5\26\f\2\37 \7\2") + buf.write("\2\3 \3\3\2\2\2!#\5\6\4\2\"!\3\2\2\2#&\3\2\2\2$\"\3\2") + buf.write("\2\2$%\3\2\2\2%\'\3\2\2\2&$\3\2\2\2\'(\7\2\2\3(\5\3\2") + buf.write("\2\2)+\7\33\2\2*)\3\2\2\2+.\3\2\2\2,*\3\2\2\2,-\3\2\2") + buf.write("\2-/\3\2\2\2.,\3\2\2\2/8\5\b\5\2\60\62\7\33\2\2\61\60") + buf.write("\3\2\2\2\62\63\3\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64") + buf.write("\65\3\2\2\2\65\67\5\b\5\2\66\61\3\2\2\2\67:\3\2\2\28\66") + buf.write("\3\2\2\289\3\2\2\29>\3\2\2\2:8\3\2\2\2;=\7\33\2\2<;\3") + buf.write("\2\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?\7\3\2\2\2@>\3\2") + buf.write("\2\2AB\5\n\6\2B\t\3\2\2\2CD\7-\2\2DF\5\f\7\2EG\5\16\b") + buf.write("\2FE\3\2\2\2FG\3\2\2\2GI\3\2\2\2HJ\5\20\t\2IH\3\2\2\2") + buf.write("IJ\3\2\2\2JL\3\2\2\2KM\5\24\13\2LK\3\2\2\2LM\3\2\2\2M") + buf.write("\13\3\2\2\2NS\5\26\f\2OP\7\21\2\2PR\5\26\f\2QO\3\2\2\2") + buf.write("RU\3\2\2\2SQ\3\2\2\2ST\3\2\2\2T\r\3\2\2\2US\3\2\2\2VW") + buf.write("\7/\2\2WX\5\26\f\2X\17\3\2\2\2YZ\7,\2\2Z[\7!\2\2[`\5\22") + buf.write("\n\2\\]\7\21\2\2]_\5\22\n\2^\\\3\2\2\2_b\3\2\2\2`^\3\2") + buf.write("\2\2`a\3\2\2\2a\21\3\2\2\2b`\3\2\2\2ce\5\26\f\2df\t\2") + buf.write("\2\2ed\3\2\2\2ef\3\2\2\2f\23\3\2\2\2gh\7\'\2\2hi\5\26") + buf.write("\f\2i\25\3\2\2\2jk\b\f\1\2kl\t\3\2\2l\u0082\5\26\f\rm") + buf.write("n\7\30\2\2no\5\26\f\2op\7\20\2\2p\u0082\3\2\2\2q\u0082") + buf.write("\5\34\17\2rs\5\32\16\2s|\7\30\2\2ty\5\26\f\2uv\7\21\2") + buf.write("\2vx\5\26\f\2wu\3\2\2\2x{\3\2\2\2yw\3\2\2\2yz\3\2\2\2") + buf.write("z}\3\2\2\2{y\3\2\2\2|t\3\2\2\2|}\3\2\2\2}~\3\2\2\2~\177") + buf.write("\7\20\2\2\177\u0082\3\2\2\2\u0080\u0082\5\30\r\2\u0081") + buf.write("j\3\2\2\2\u0081m\3\2\2\2\u0081q\3\2\2\2\u0081r\3\2\2\2") + buf.write("\u0081\u0080\3\2\2\2\u0082\u0097\3\2\2\2\u0083\u0084\f") + buf.write("\f\2\2\u0084\u0085\t\4\2\2\u0085\u0096\5\26\f\r\u0086") + buf.write("\u0087\f\13\2\2\u0087\u0088\t\5\2\2\u0088\u0096\5\26\f") + buf.write("\f\u0089\u008a\f\n\2\2\u008a\u008b\t\6\2\2\u008b\u0096") + buf.write("\5\26\f\13\u008c\u008d\f\t\2\2\u008d\u008e\t\7\2\2\u008e") + buf.write("\u0096\5\26\f\n\u008f\u0090\f\b\2\2\u0090\u0091\t\b\2") + buf.write("\2\u0091\u0096\5\26\f\t\u0092\u0093\f\7\2\2\u0093\u0094") + buf.write("\t\t\2\2\u0094\u0096\5\26\f\b\u0095\u0083\3\2\2\2\u0095") + buf.write("\u0086\3\2\2\2\u0095\u0089\3\2\2\2\u0095\u008c\3\2\2\2") + buf.write("\u0095\u008f\3\2\2\2\u0095\u0092\3\2\2\2\u0096\u0099\3") + buf.write("\2\2\2\u0097\u0095\3\2\2\2\u0097\u0098\3\2\2\2\u0098\27") + buf.write("\3\2\2\2\u0099\u0097\3\2\2\2\u009a\u009c\7\4\2\2\u009b") + buf.write("\u009a\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u00a0\3\2\2\2") + buf.write("\u009d\u009e\5\32\16\2\u009e\u009f\7\31\2\2\u009f\u00a1") + buf.write("\3\2\2\2\u00a0\u009d\3\2\2\2\u00a0\u00a1\3\2\2\2\u00a1") + buf.write("\u00a2\3\2\2\2\u00a2\u00a5\5\32\16\2\u00a3\u00a4\7\3\2") + buf.write("\2\u00a4\u00a6\5\32\16\2\u00a5\u00a3\3\2\2\2\u00a5\u00a6") + buf.write("\3\2\2\2\u00a6\31\3\2\2\2\u00a7\u00ac\7:\2\2\u00a8\u00a9") + buf.write("\7\22\2\2\u00a9\u00ab\7:\2\2\u00aa\u00a8\3\2\2\2\u00ab") + buf.write("\u00ae\3\2\2\2\u00ac\u00aa\3\2\2\2\u00ac\u00ad\3\2\2\2") + buf.write("\u00ad\33\3\2\2\2\u00ae\u00ac\3\2\2\2\u00af\u00b0\t\n") + buf.write("\2\2\u00b0\35\3\2\2\2\26$,\638>FILS`ey|\u0081\u0095\u0097") + buf.write("\u009b\u00a0\u00a5\u00ac") + return buf.getvalue() + + +class PqlParser ( Parser ): + + grammarFileName = "PqlParser.g4" + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + sharedContextCache = PredictionContextCache() + + literalNames = [ "", "':'", "'?'", "'&&'", "'=='", "'>='", + "'<='", "'!='", "'<>'", "'||'", "'<<'", "'>>'", "'&'", + "'='", "')'", "','", "'.'", "'/'", "'>'", "'<'", "'-'", + "'%'", "'('", "'|'", "'+'", "';'", "'*'", "'~'", "'_'" ] + + symbolicNames = [ "", "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", + "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", + "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", + "CLOSE_PAREN", "COMMA", "DOT", "FORWARD_SLASH", "GT", + "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", + "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", + "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", + "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", + "WORD" ] + + RULE_parseTel = 0 + RULE_parsePql = 1 + RULE_sqlStmtList = 2 + RULE_sqlStmt = 3 + RULE_selectStmt = 4 + RULE_columns = 5 + RULE_whereClause = 6 + RULE_orderByClause = 7 + RULE_orderExpr = 8 + RULE_limitClause = 9 + RULE_expr = 10 + RULE_taxon = 11 + RULE_identifierMultipart = 12 + RULE_literalValue = 13 + + ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "selectStmt", + "columns", "whereClause", "orderByClause", "orderExpr", + "limitClause", "expr", "taxon", "identifierMultipart", + "literalValue" ] + + EOF = Token.EOF + TAXON_TAG_DELIMITER=1 + TAXON_OPTIONAL_OPERATOR=2 + AND=3 + EQ=4 + GT_EQ=5 + LT_EQ=6 + NOT_EQ1=7 + NOT_EQ2=8 + OR=9 + SHIFT_LEFT=10 + SHIFT_RIGHT=11 + AMP=12 + ASSIGN=13 + CLOSE_PAREN=14 + COMMA=15 + DOT=16 + FORWARD_SLASH=17 + GT=18 + LT=19 + MINUS=20 + MOD=21 + OPEN_PAREN=22 + PIPE=23 + PLUS=24 + SCOL=25 + STAR=26 + TILDE=27 + UNDER=28 + K_AND=29 + K_ASC=30 + K_BY=31 + K_DESC=32 + K_FALSE=33 + K_IS=34 + K_ISNULL=35 + K_LIKE=36 + K_LIMIT=37 + K_NOT=38 + K_NOTNULL=39 + K_NULL=40 + K_OR=41 + K_ORDER=42 + K_SELECT=43 + K_TRUE=44 + K_WHERE=45 + NUMERIC_LITERAL=46 + DOUBLE_QUOTED_STRING=47 + DOUBLE_QUOTED_STRING_TEL=48 + DOUBLE_QUOTED_STRING_SQL=49 + SINGLE_QUOTED_STRING=50 + SINGLE_QUOTED_STRING_TEL=51 + SINGLE_QUOTED_STRING_SQL=52 + SINGLE_LINE_COMMENT=53 + MULTILINE_COMMENT=54 + SPACES=55 + WORD=56 + + def __init__(self, input:TokenStream, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.8") + self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) + self._predicates = None + + + + + class ParseTelContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self): + return self.getTypedRuleContext(PqlParser.ExprContext,0) + + + def EOF(self): + return self.getToken(PqlParser.EOF, 0) + + def getRuleIndex(self): + return PqlParser.RULE_parseTel + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterParseTel" ): + listener.enterParseTel(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitParseTel" ): + listener.exitParseTel(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitParseTel" ): + return visitor.visitParseTel(self) + else: + return visitor.visitChildren(self) + + + + + def parseTel(self): + + localctx = PqlParser.ParseTelContext(self, self._ctx, self.state) + self.enterRule(localctx, 0, self.RULE_parseTel) + try: + self.enterOuterAlt(localctx, 1) + self.state = 28 + self.expr(0) + self.state = 29 + self.match(PqlParser.EOF) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ParsePqlContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def EOF(self): + return self.getToken(PqlParser.EOF, 0) + + def sqlStmtList(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.SqlStmtListContext) + else: + return self.getTypedRuleContext(PqlParser.SqlStmtListContext,i) + + + def getRuleIndex(self): + return PqlParser.RULE_parsePql + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterParsePql" ): + listener.enterParsePql(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitParsePql" ): + listener.exitParsePql(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitParsePql" ): + return visitor.visitParsePql(self) + else: + return visitor.visitChildren(self) + + + + + def parsePql(self): + + localctx = PqlParser.ParsePqlContext(self, self._ctx, self.state) + self.enterRule(localctx, 2, self.RULE_parsePql) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 34 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.SCOL or _la==PqlParser.K_SELECT: + self.state = 31 + self.sqlStmtList() + self.state = 36 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 37 + self.match(PqlParser.EOF) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SqlStmtListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def sqlStmt(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.SqlStmtContext) + else: + return self.getTypedRuleContext(PqlParser.SqlStmtContext,i) + + + def SCOL(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.SCOL) + else: + return self.getToken(PqlParser.SCOL, i) + + def getRuleIndex(self): + return PqlParser.RULE_sqlStmtList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterSqlStmtList" ): + listener.enterSqlStmtList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitSqlStmtList" ): + listener.exitSqlStmtList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitSqlStmtList" ): + return visitor.visitSqlStmtList(self) + else: + return visitor.visitChildren(self) + + + + + def sqlStmtList(self): + + localctx = PqlParser.SqlStmtListContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_sqlStmtList) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 42 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.SCOL: + self.state = 39 + self.match(PqlParser.SCOL) + self.state = 44 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 45 + self.sqlStmt() + self.state = 54 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,3,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 47 + self._errHandler.sync(self) + _la = self._input.LA(1) + while True: + self.state = 46 + self.match(PqlParser.SCOL) + self.state = 49 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not (_la==PqlParser.SCOL): + break + + self.state = 51 + self.sqlStmt() + self.state = 56 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,3,self._ctx) + + self.state = 60 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,4,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 57 + self.match(PqlParser.SCOL) + self.state = 62 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,4,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SqlStmtContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def selectStmt(self): + return self.getTypedRuleContext(PqlParser.SelectStmtContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_sqlStmt + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterSqlStmt" ): + listener.enterSqlStmt(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitSqlStmt" ): + listener.exitSqlStmt(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitSqlStmt" ): + return visitor.visitSqlStmt(self) + else: + return visitor.visitChildren(self) + + + + + def sqlStmt(self): + + localctx = PqlParser.SqlStmtContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_sqlStmt) + try: + self.enterOuterAlt(localctx, 1) + self.state = 63 + self.selectStmt() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SelectStmtContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def K_SELECT(self): + return self.getToken(PqlParser.K_SELECT, 0) + + def columns(self): + return self.getTypedRuleContext(PqlParser.ColumnsContext,0) + + + def whereClause(self): + return self.getTypedRuleContext(PqlParser.WhereClauseContext,0) + + + def orderByClause(self): + return self.getTypedRuleContext(PqlParser.OrderByClauseContext,0) + + + def limitClause(self): + return self.getTypedRuleContext(PqlParser.LimitClauseContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_selectStmt + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterSelectStmt" ): + listener.enterSelectStmt(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitSelectStmt" ): + listener.exitSelectStmt(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitSelectStmt" ): + return visitor.visitSelectStmt(self) + else: + return visitor.visitChildren(self) + + + + + def selectStmt(self): + + localctx = PqlParser.SelectStmtContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_selectStmt) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 65 + self.match(PqlParser.K_SELECT) + self.state = 66 + self.columns() + self.state = 68 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_WHERE: + self.state = 67 + self.whereClause() + + + self.state = 71 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_ORDER: + self.state = 70 + self.orderByClause() + + + self.state = 74 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_LIMIT: + self.state = 73 + self.limitClause() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ColumnsContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.ExprContext) + else: + return self.getTypedRuleContext(PqlParser.ExprContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COMMA) + else: + return self.getToken(PqlParser.COMMA, i) + + def getRuleIndex(self): + return PqlParser.RULE_columns + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterColumns" ): + listener.enterColumns(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitColumns" ): + listener.exitColumns(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitColumns" ): + return visitor.visitColumns(self) + else: + return visitor.visitChildren(self) + + + + + def columns(self): + + localctx = PqlParser.ColumnsContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_columns) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 76 + self.expr(0) + self.state = 81 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.COMMA: + self.state = 77 + self.match(PqlParser.COMMA) + self.state = 78 + self.expr(0) + self.state = 83 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class WhereClauseContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def K_WHERE(self): + return self.getToken(PqlParser.K_WHERE, 0) + + def expr(self): + return self.getTypedRuleContext(PqlParser.ExprContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_whereClause + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterWhereClause" ): + listener.enterWhereClause(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitWhereClause" ): + listener.exitWhereClause(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitWhereClause" ): + return visitor.visitWhereClause(self) + else: + return visitor.visitChildren(self) + + + + + def whereClause(self): + + localctx = PqlParser.WhereClauseContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_whereClause) + try: + self.enterOuterAlt(localctx, 1) + self.state = 84 + self.match(PqlParser.K_WHERE) + self.state = 85 + self.expr(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class OrderByClauseContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def K_ORDER(self): + return self.getToken(PqlParser.K_ORDER, 0) + + def K_BY(self): + return self.getToken(PqlParser.K_BY, 0) + + def orderExpr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.OrderExprContext) + else: + return self.getTypedRuleContext(PqlParser.OrderExprContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COMMA) + else: + return self.getToken(PqlParser.COMMA, i) + + def getRuleIndex(self): + return PqlParser.RULE_orderByClause + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterOrderByClause" ): + listener.enterOrderByClause(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitOrderByClause" ): + listener.exitOrderByClause(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitOrderByClause" ): + return visitor.visitOrderByClause(self) + else: + return visitor.visitChildren(self) + + + + + def orderByClause(self): + + localctx = PqlParser.OrderByClauseContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_orderByClause) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 87 + self.match(PqlParser.K_ORDER) + self.state = 88 + self.match(PqlParser.K_BY) + self.state = 89 + self.orderExpr() + self.state = 94 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.COMMA: + self.state = 90 + self.match(PqlParser.COMMA) + self.state = 91 + self.orderExpr() + self.state = 96 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class OrderExprContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self): + return self.getTypedRuleContext(PqlParser.ExprContext,0) + + + def K_ASC(self): + return self.getToken(PqlParser.K_ASC, 0) + + def K_DESC(self): + return self.getToken(PqlParser.K_DESC, 0) + + def getRuleIndex(self): + return PqlParser.RULE_orderExpr + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterOrderExpr" ): + listener.enterOrderExpr(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitOrderExpr" ): + listener.exitOrderExpr(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitOrderExpr" ): + return visitor.visitOrderExpr(self) + else: + return visitor.visitChildren(self) + + + + + def orderExpr(self): + + localctx = PqlParser.OrderExprContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_orderExpr) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 97 + self.expr(0) + self.state = 99 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_ASC or _la==PqlParser.K_DESC: + self.state = 98 + _la = self._input.LA(1) + if not(_la==PqlParser.K_ASC or _la==PqlParser.K_DESC): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class LimitClauseContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.limit = None # ExprContext + + def K_LIMIT(self): + return self.getToken(PqlParser.K_LIMIT, 0) + + def expr(self): + return self.getTypedRuleContext(PqlParser.ExprContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_limitClause + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterLimitClause" ): + listener.enterLimitClause(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitLimitClause" ): + listener.exitLimitClause(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitLimitClause" ): + return visitor.visitLimitClause(self) + else: + return visitor.visitChildren(self) + + + + + def limitClause(self): + + localctx = PqlParser.LimitClauseContext(self, self._ctx, self.state) + self.enterRule(localctx, 18, self.RULE_limitClause) + try: + self.enterOuterAlt(localctx, 1) + self.state = 101 + self.match(PqlParser.K_LIMIT) + self.state = 102 + localctx.limit = self.expr(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ExprContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.left = None # ExprContext + self.unary_operator = None # Token + self.right = None # ExprContext + self.inner = None # ExprContext + self.function_name = None # IdentifierMultipartContext + self.operator = None # Token + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.ExprContext) + else: + return self.getTypedRuleContext(PqlParser.ExprContext,i) + + + def MINUS(self): + return self.getToken(PqlParser.MINUS, 0) + + def PLUS(self): + return self.getToken(PqlParser.PLUS, 0) + + def K_NOT(self): + return self.getToken(PqlParser.K_NOT, 0) + + def OPEN_PAREN(self): + return self.getToken(PqlParser.OPEN_PAREN, 0) + + def CLOSE_PAREN(self): + return self.getToken(PqlParser.CLOSE_PAREN, 0) + + def literalValue(self): + return self.getTypedRuleContext(PqlParser.LiteralValueContext,0) + + + def identifierMultipart(self): + return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,0) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COMMA) + else: + return self.getToken(PqlParser.COMMA, i) + + def taxon(self): + return self.getTypedRuleContext(PqlParser.TaxonContext,0) + + + def STAR(self): + return self.getToken(PqlParser.STAR, 0) + + def FORWARD_SLASH(self): + return self.getToken(PqlParser.FORWARD_SLASH, 0) + + def MOD(self): + return self.getToken(PqlParser.MOD, 0) + + def LT(self): + return self.getToken(PqlParser.LT, 0) + + def LT_EQ(self): + return self.getToken(PqlParser.LT_EQ, 0) + + def GT(self): + return self.getToken(PqlParser.GT, 0) + + def GT_EQ(self): + return self.getToken(PqlParser.GT_EQ, 0) + + def ASSIGN(self): + return self.getToken(PqlParser.ASSIGN, 0) + + def EQ(self): + return self.getToken(PqlParser.EQ, 0) + + def NOT_EQ1(self): + return self.getToken(PqlParser.NOT_EQ1, 0) + + def NOT_EQ2(self): + return self.getToken(PqlParser.NOT_EQ2, 0) + + def K_IS(self): + return self.getToken(PqlParser.K_IS, 0) + + def K_AND(self): + return self.getToken(PqlParser.K_AND, 0) + + def AND(self): + return self.getToken(PqlParser.AND, 0) + + def K_OR(self): + return self.getToken(PqlParser.K_OR, 0) + + def OR(self): + return self.getToken(PqlParser.OR, 0) + + def getRuleIndex(self): + return PqlParser.RULE_expr + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterExpr" ): + listener.enterExpr(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitExpr" ): + listener.exitExpr(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExpr" ): + return visitor.visitExpr(self) + else: + return visitor.visitChildren(self) + + + + def expr(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = PqlParser.ExprContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 20 + self.enterRecursionRule(localctx, 20, self.RULE_expr, _p) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 127 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,13,self._ctx) + if la_ == 1: + self.state = 105 + localctx.unary_operator = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_NOT))) != 0)): + localctx.unary_operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 106 + localctx.right = self.expr(11) + pass + + elif la_ == 2: + self.state = 107 + self.match(PqlParser.OPEN_PAREN) + self.state = 108 + localctx.inner = self.expr(0) + self.state = 109 + self.match(PqlParser.CLOSE_PAREN) + pass + + elif la_ == 3: + self.state = 111 + self.literalValue() + pass + + elif la_ == 4: + self.state = 112 + localctx.function_name = self.identifierMultipart() + self.state = 113 + self.match(PqlParser.OPEN_PAREN) + self.state = 122 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.TAXON_OPTIONAL_OPERATOR) | (1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): + self.state = 114 + self.expr(0) + self.state = 119 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.COMMA: + self.state = 115 + self.match(PqlParser.COMMA) + self.state = 116 + self.expr(0) + self.state = 121 + self._errHandler.sync(self) + _la = self._input.LA(1) + + + + self.state = 124 + self.match(PqlParser.CLOSE_PAREN) + pass + + elif la_ == 5: + self.state = 126 + self.taxon() + pass + + + self._ctx.stop = self._input.LT(-1) + self.state = 149 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,15,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + self.state = 147 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,14,self._ctx) + if la_ == 1: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 129 + if not self.precpred(self._ctx, 10): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") + self.state = 130 + localctx.operator = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) != 0)): + localctx.operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 131 + localctx.right = self.expr(11) + pass + + elif la_ == 2: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 132 + if not self.precpred(self._ctx, 9): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") + self.state = 133 + localctx.operator = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==PqlParser.MINUS or _la==PqlParser.PLUS): + localctx.operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 134 + localctx.right = self.expr(10) + pass + + elif la_ == 3: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 135 + if not self.precpred(self._ctx, 8): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") + self.state = 136 + localctx.operator = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) != 0)): + localctx.operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 137 + localctx.right = self.expr(9) + pass + + elif la_ == 4: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 138 + if not self.precpred(self._ctx, 7): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") + self.state = 139 + localctx.operator = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) != 0)): + localctx.operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 140 + localctx.right = self.expr(8) + pass + + elif la_ == 5: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 141 + if not self.precpred(self._ctx, 6): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") + self.state = 142 + localctx.operator = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==PqlParser.AND or _la==PqlParser.K_AND): + localctx.operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 143 + localctx.right = self.expr(7) + pass + + elif la_ == 6: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 144 + if not self.precpred(self._ctx, 5): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") + self.state = 145 + localctx.operator = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==PqlParser.OR or _la==PqlParser.K_OR): + localctx.operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 146 + localctx.right = self.expr(6) + pass + + + self.state = 151 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,15,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class TaxonContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.namespace = None # IdentifierMultipartContext + self.slug = None # IdentifierMultipartContext + self.tag = None # IdentifierMultipartContext + + def identifierMultipart(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.IdentifierMultipartContext) + else: + return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,i) + + + def TAXON_OPTIONAL_OPERATOR(self): + return self.getToken(PqlParser.TAXON_OPTIONAL_OPERATOR, 0) + + def PIPE(self): + return self.getToken(PqlParser.PIPE, 0) + + def TAXON_TAG_DELIMITER(self): + return self.getToken(PqlParser.TAXON_TAG_DELIMITER, 0) + + def getRuleIndex(self): + return PqlParser.RULE_taxon + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTaxon" ): + listener.enterTaxon(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTaxon" ): + listener.exitTaxon(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTaxon" ): + return visitor.visitTaxon(self) + else: + return visitor.visitChildren(self) + + + + + def taxon(self): + + localctx = PqlParser.TaxonContext(self, self._ctx, self.state) + self.enterRule(localctx, 22, self.RULE_taxon) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 153 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.TAXON_OPTIONAL_OPERATOR: + self.state = 152 + self.match(PqlParser.TAXON_OPTIONAL_OPERATOR) + + + self.state = 158 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,17,self._ctx) + if la_ == 1: + self.state = 155 + localctx.namespace = self.identifierMultipart() + self.state = 156 + self.match(PqlParser.PIPE) + + + self.state = 160 + localctx.slug = self.identifierMultipart() + self.state = 163 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,18,self._ctx) + if la_ == 1: + self.state = 161 + self.match(PqlParser.TAXON_TAG_DELIMITER) + self.state = 162 + localctx.tag = self.identifierMultipart() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class IdentifierMultipartContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def WORD(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.WORD) + else: + return self.getToken(PqlParser.WORD, i) + + def DOT(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.DOT) + else: + return self.getToken(PqlParser.DOT, i) + + def getRuleIndex(self): + return PqlParser.RULE_identifierMultipart + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterIdentifierMultipart" ): + listener.enterIdentifierMultipart(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitIdentifierMultipart" ): + listener.exitIdentifierMultipart(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitIdentifierMultipart" ): + return visitor.visitIdentifierMultipart(self) + else: + return visitor.visitChildren(self) + + + + + def identifierMultipart(self): + + localctx = PqlParser.IdentifierMultipartContext(self, self._ctx, self.state) + self.enterRule(localctx, 24, self.RULE_identifierMultipart) + try: + self.enterOuterAlt(localctx, 1) + self.state = 165 + self.match(PqlParser.WORD) + self.state = 170 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,19,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + self.state = 166 + self.match(PqlParser.DOT) + self.state = 167 + self.match(PqlParser.WORD) + self.state = 172 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,19,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class LiteralValueContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def NUMERIC_LITERAL(self): + return self.getToken(PqlParser.NUMERIC_LITERAL, 0) + + def DOUBLE_QUOTED_STRING(self): + return self.getToken(PqlParser.DOUBLE_QUOTED_STRING, 0) + + def SINGLE_QUOTED_STRING(self): + return self.getToken(PqlParser.SINGLE_QUOTED_STRING, 0) + + def K_NULL(self): + return self.getToken(PqlParser.K_NULL, 0) + + def K_TRUE(self): + return self.getToken(PqlParser.K_TRUE, 0) + + def K_FALSE(self): + return self.getToken(PqlParser.K_FALSE, 0) + + def getRuleIndex(self): + return PqlParser.RULE_literalValue + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterLiteralValue" ): + listener.enterLiteralValue(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitLiteralValue" ): + listener.exitLiteralValue(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitLiteralValue" ): + return visitor.visitLiteralValue(self) + else: + return visitor.visitChildren(self) + + + + + def literalValue(self): + + localctx = PqlParser.LiteralValueContext(self, self._ctx, self.state) + self.enterRule(localctx, 26, self.RULE_literalValue) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 173 + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + + def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): + if self._predicates == None: + self._predicates = dict() + self._predicates[10] = self.expr_sempred + pred = self._predicates.get(ruleIndex, None) + if pred is None: + raise Exception("No predicate with index:" + str(ruleIndex)) + else: + return pred(localctx, predIndex) + + def expr_sempred(self, localctx:ExprContext, predIndex:int): + if predIndex == 0: + return self.precpred(self._ctx, 10) + + + if predIndex == 1: + return self.precpred(self._ctx, 9) + + + if predIndex == 2: + return self.precpred(self._ctx, 8) + + + if predIndex == 3: + return self.precpred(self._ctx, 7) + + + if predIndex == 4: + return self.precpred(self._ctx, 6) + + + if predIndex == 5: + return self.precpred(self._ctx, 5) + + + + + diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py new file mode 100644 index 0000000..72789a0 --- /dev/null +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -0,0 +1,138 @@ +# Generated from grammar/PqlParser.g4 by ANTLR 4.8 +from antlr4 import * +if __name__ is not None and "." in __name__: + from .PqlParser import PqlParser +else: + from PqlParser import PqlParser + +# This class defines a complete listener for a parse tree produced by PqlParser. +class PqlParserListener(ParseTreeListener): + + # Enter a parse tree produced by PqlParser#parseTel. + def enterParseTel(self, ctx:PqlParser.ParseTelContext): + pass + + # Exit a parse tree produced by PqlParser#parseTel. + def exitParseTel(self, ctx:PqlParser.ParseTelContext): + pass + + + # Enter a parse tree produced by PqlParser#parsePql. + def enterParsePql(self, ctx:PqlParser.ParsePqlContext): + pass + + # Exit a parse tree produced by PqlParser#parsePql. + def exitParsePql(self, ctx:PqlParser.ParsePqlContext): + pass + + + # Enter a parse tree produced by PqlParser#sqlStmtList. + def enterSqlStmtList(self, ctx:PqlParser.SqlStmtListContext): + pass + + # Exit a parse tree produced by PqlParser#sqlStmtList. + def exitSqlStmtList(self, ctx:PqlParser.SqlStmtListContext): + pass + + + # Enter a parse tree produced by PqlParser#sqlStmt. + def enterSqlStmt(self, ctx:PqlParser.SqlStmtContext): + pass + + # Exit a parse tree produced by PqlParser#sqlStmt. + def exitSqlStmt(self, ctx:PqlParser.SqlStmtContext): + pass + + + # Enter a parse tree produced by PqlParser#selectStmt. + def enterSelectStmt(self, ctx:PqlParser.SelectStmtContext): + pass + + # Exit a parse tree produced by PqlParser#selectStmt. + def exitSelectStmt(self, ctx:PqlParser.SelectStmtContext): + pass + + + # Enter a parse tree produced by PqlParser#columns. + def enterColumns(self, ctx:PqlParser.ColumnsContext): + pass + + # Exit a parse tree produced by PqlParser#columns. + def exitColumns(self, ctx:PqlParser.ColumnsContext): + pass + + + # Enter a parse tree produced by PqlParser#whereClause. + def enterWhereClause(self, ctx:PqlParser.WhereClauseContext): + pass + + # Exit a parse tree produced by PqlParser#whereClause. + def exitWhereClause(self, ctx:PqlParser.WhereClauseContext): + pass + + + # Enter a parse tree produced by PqlParser#orderByClause. + def enterOrderByClause(self, ctx:PqlParser.OrderByClauseContext): + pass + + # Exit a parse tree produced by PqlParser#orderByClause. + def exitOrderByClause(self, ctx:PqlParser.OrderByClauseContext): + pass + + + # Enter a parse tree produced by PqlParser#orderExpr. + def enterOrderExpr(self, ctx:PqlParser.OrderExprContext): + pass + + # Exit a parse tree produced by PqlParser#orderExpr. + def exitOrderExpr(self, ctx:PqlParser.OrderExprContext): + pass + + + # Enter a parse tree produced by PqlParser#limitClause. + def enterLimitClause(self, ctx:PqlParser.LimitClauseContext): + pass + + # Exit a parse tree produced by PqlParser#limitClause. + def exitLimitClause(self, ctx:PqlParser.LimitClauseContext): + pass + + + # Enter a parse tree produced by PqlParser#expr. + def enterExpr(self, ctx:PqlParser.ExprContext): + pass + + # Exit a parse tree produced by PqlParser#expr. + def exitExpr(self, ctx:PqlParser.ExprContext): + pass + + + # Enter a parse tree produced by PqlParser#taxon. + def enterTaxon(self, ctx:PqlParser.TaxonContext): + pass + + # Exit a parse tree produced by PqlParser#taxon. + def exitTaxon(self, ctx:PqlParser.TaxonContext): + pass + + + # Enter a parse tree produced by PqlParser#identifierMultipart. + def enterIdentifierMultipart(self, ctx:PqlParser.IdentifierMultipartContext): + pass + + # Exit a parse tree produced by PqlParser#identifierMultipart. + def exitIdentifierMultipart(self, ctx:PqlParser.IdentifierMultipartContext): + pass + + + # Enter a parse tree produced by PqlParser#literalValue. + def enterLiteralValue(self, ctx:PqlParser.LiteralValueContext): + pass + + # Exit a parse tree produced by PqlParser#literalValue. + def exitLiteralValue(self, ctx:PqlParser.LiteralValueContext): + pass + + + +del PqlParser \ No newline at end of file diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py new file mode 100644 index 0000000..3946ecf --- /dev/null +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -0,0 +1,83 @@ +# Generated from grammar/PqlParser.g4 by ANTLR 4.8 +from antlr4 import * +if __name__ is not None and "." in __name__: + from .PqlParser import PqlParser +else: + from PqlParser import PqlParser + +# This class defines a complete generic visitor for a parse tree produced by PqlParser. + +class PqlParserVisitor(ParseTreeVisitor): + + # Visit a parse tree produced by PqlParser#parseTel. + def visitParseTel(self, ctx:PqlParser.ParseTelContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#parsePql. + def visitParsePql(self, ctx:PqlParser.ParsePqlContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#sqlStmtList. + def visitSqlStmtList(self, ctx:PqlParser.SqlStmtListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#sqlStmt. + def visitSqlStmt(self, ctx:PqlParser.SqlStmtContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#selectStmt. + def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#columns. + def visitColumns(self, ctx:PqlParser.ColumnsContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#whereClause. + def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#orderByClause. + def visitOrderByClause(self, ctx:PqlParser.OrderByClauseContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#orderExpr. + def visitOrderExpr(self, ctx:PqlParser.OrderExprContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#limitClause. + def visitLimitClause(self, ctx:PqlParser.LimitClauseContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#expr. + def visitExpr(self, ctx:PqlParser.ExprContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#taxon. + def visitTaxon(self, ctx:PqlParser.TaxonContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#identifierMultipart. + def visitIdentifierMultipart(self, ctx:PqlParser.IdentifierMultipartContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#literalValue. + def visitLiteralValue(self, ctx:PqlParser.LiteralValueContext): + return self.visitChildren(ctx) + + + +del PqlParser \ No newline at end of file diff --git a/python/src/tel_grammar/antlr/__init__.py b/python/src/pql_grammar/antlr/__init__.py similarity index 100% rename from python/src/tel_grammar/antlr/__init__.py rename to python/src/pql_grammar/antlr/__init__.py diff --git a/python/src/pql_grammar/operators.py b/python/src/pql_grammar/operators.py new file mode 100644 index 0000000..1c9aedb --- /dev/null +++ b/python/src/pql_grammar/operators.py @@ -0,0 +1,421 @@ +""" +Conditional logic runners designed for latent evaluation. + +First you construct the logic, then pass into resulting callable an object that contains values mentioned in the logic +Designed for expressing a SQL WHERE clause logic before you have data and then running each row's data through it. + + # WHERE columnA = 'gold' AND columnB > 100 + clause = AND( + EQ( + attr('columnA'), + 'gold' + ), + GT( + attr('columnB'), + 100 + ) + ) + + remaining_data = [ + row + for row in rows + if clause(row) + ] + +Works for dict's (keys and values over `.get(` interface) AND object attributes (properties) (`getattr(o, attr)`) + +(This is NOT designed to be serialized for pushing over wire. Make something else for that.) + +""" +import enum +import logging +import re + +from dataclasses import dataclass +from typing import Tuple, List, Any, Optional, Union, Literal + + +logger = logging.getLogger(__name__) + + +_v = lambda v, o: v(o) if callable(v) else v + + +def AND(*children): + def _(o): + return all(( + _v(a, o) + for a in children + )) + return _ + + +def OR(*children): + def _(o): + return any(( + _v(a, o) + for a in children + )) + return _ + + +def NOT(a): + def _(o): + return not _v(a, o) + return _ + + +def EQ(a, b): + def _(o): + return _v(a, o) == _v(b, o) + return _ + + +def NEQ(a, b): + def _(o): + return _v(a, o) != _v(b, o) + return _ + + +def IS(a, b): + def _(o): + return _v(a, o) is _v(b, o) + return _ + + +# https://codereview.stackexchange.com/a/36864/229677 +_char_regex_map = { + ch : '\\'+ch + for ch in '.^$*+?{}[]|()\\' +} +_char_regex_map['%'] = '.*?' +_char_regex_map['_'] = '.' +def sql_like_fragment_to_regex_string(fragment): + return '^' + ''.join([ + _char_regex_map.get(ch, ch) + for ch in fragment + ]) + '$' + + +def LIKE(a, fragment): + _regex = re.compile(sql_like_fragment_to_regex_string(fragment)) + def _(o): + return bool(_regex.match(_v(a, o))) + return _ + + +def GT(a, b): + def _(o): + return _v(a, o) > _v(b, o) + return _ + + +def GTE(a, b): + def _(o): + return _v(a, o) >= _v(b, o) + return _ + + +def LT(a, b): + def _(o): + return _v(a, o) < _v(b, o) + return _ + + +def LTE(a, b): + def _(o): + return _v(a, o) <= _v(b, o) + return _ + + +def PLUS_UNARY(a): + def _(o): + # no-op + return _v(a, o) + return _ + + +def MINUS_UNARY(a): + def _(o): + return -1 * _v(a, o) + return _ + + +def PLUS(a, b): + def _(o): + return _v(a, o) + _v(b, o) + return _ + + +def MINUS(a, b): + def _(o): + return _v(a, o) - _v(b, o) + return _ + + +def STAR(a, b): + def _(o): + return _v(a, o) * _v(b, o) + return _ + + +def DIV(a, b): + def _(o): + return _v(a, o) / _v(b, o) + return _ + + +def MOD(a, b): + def _(o): + return _v(a, o) % _v(b, o) + return _ + + +class TableColumnName(list): + def __init__(self, column_name, table_name=None, schema_name=None, catalog_name=None): + super().__init__([ + e + for e in [column_name, table_name, schema_name, catalog_name] + if not (e is None) + ]) + + +def attr(name: Union[str, list, TableColumnName], default=None): + """ + This attr getter works only on Class-like objects, + where you access values through attributes, not keys + Done so specifically to stay away from ambiguity of working with named tuples. + Also allows repackaging instances of objects ON_DEMAND where values are hiding in @properties + and are, thus allowing Where logic to trigger expensive properties only when needed, + as opposed to forcing serialization of full object into dict before piping through the where clause. + + Best examples of what `o` is - @dataclass or namedtuple instances. + Obviously, pydantic models and all other class instances will do too. + """ + def _(o): + # do NOT add .get( handling here, especially do NOT add it as first action. + # You will break namedtuples, where .get(index) is present + # but o.attr_name is the only right way to ask for data by name (not index) + if isinstance(o, (TableColumnName, list)): + if len(o) > 1: + logger.warning(f"WHERE clause references column by long name '{list(reversed(o))}', " + "which is not compatible with our functional WHERE logic processor. " + f"Droping all parts except for '{o[0]}' during comparison") + return getattr(o[0], name, default) + else: + return getattr(o, name, default) + return _ + + +class OpName: + AND = 'AND' + OR = 'OR' + NOT = 'NOT' + EQ = 'EQ' + NEQ = 'NEQ' + IS = 'IS' + LIKE = 'LIKE' + GT = 'GT' + GTE = 'GTE' + LT = 'LT' + LTE = 'LTE' + PLUS = 'PLUS' + MINUS = 'MINUS' + STAR = 'STAR' + DIV = 'DIV' + MOD = 'MOD' + attr = 'attr' + + +name_operator_map = { + OpName.AND:AND, + OpName.OR:OR, + OpName.NOT:NOT, + OpName.EQ:EQ, + OpName.NEQ:NEQ, + OpName.IS:IS, + OpName.LIKE:LIKE, + OpName.GT:GT, + OpName.GTE:GTE, + OpName.LT:LT, + OpName.LTE:LTE, + OpName.PLUS:PLUS, + OpName.MINUS:MINUS, + OpName.STAR:STAR, + OpName.DIV:DIV, + OpName.MOD:MOD, + OpName.attr:attr, +} + + +operator_name_map = { + fn: name + for name, fn in name_operator_map.items() +} + + +OperatorName = str # OP_NAME if it was (str, enum.Enum), but that produces ugly output structures +OperatorSchemaLiteral = Tuple[Literal['@literalValue'], Any] +# it's actually a List that's returned, but Python typing system does not? allow +# expressing a list with defined items inside in their order. +# have to use Tuple to specify order of things, but return type is actually list +# This structure is supposed to natively serializable to and from JSON> +OperatorSchema = Union[ + Tuple[OperatorName, 'OperatorSchema', Optional['OperatorSchema']], + OperatorSchemaLiteral +] + + +LITERAL = '@literalValue' + + +def schema_literal(o) -> OperatorSchemaLiteral: + return [LITERAL, o] + + +def schema_stanza(operator_name, *args) -> OperatorSchema: + aa = [] + for a in args: + # lists usually dont make sense as literals, so checking is just being overly safe + # however, `WHERE a IN ('a','b','c')` clause produces a tuple-like literal, + # first element in which MAY be value 'literal' + # This tuple, if encoded as list object will break this code. + + # We don't support IN yet, but when we do, think about ^ this case and change + # literal tuple marker value to something that would be + # (a) serializable to JSON, yet + # (b) impossible to collide with first element in SQL IN array. + + # TODO: ^ account for WHERE IN literal value collision. + if isinstance(a, (list, tuple)) and len(a) > 1 and (a[0] == LITERAL or a[0] in name_operator_map): + # This is already encoded stanza, pass through as is + aa.append(a) + else: + aa.append(schema_literal(a)) + return [operator_name, *aa] + + +def schema_to_callable(schema: OperatorSchema): + if not schema: + return lambda o: True + + fn_name, *args = schema + if fn_name == LITERAL: + # only one value possible + return args[0] + else: + fn = name_operator_map[fn_name] + return fn(*( + schema_to_callable(arg) + for arg in args + )) + + +class _Any: + # used for comparing / finding stanzas in where schema when + # you want to match the structure but not some literal values + def __eq__(self, other): + return True +Any = _Any() + + +def schema_extract_top_level(schema_fragment: OperatorSchema, schema: OperatorSchema): + # when underlying function call requires a parameter + # we need to extract it from sql and ensure it's specified in unambigous way + # ambiguous for arg need_this_id: + # where a = 'orange' or (date < now()-3 or need_this_id = '1234') + # UNambiguous for arg need_this_id: + # where a = 'orange' AND need_this_id = '1234' + # In other words, condition we are looking to extract does not have to be the only one in + # the clause, but just have to participate in unambiguously top-level AND or just be by itself. + # This means we'll traverse down recursive ANDs until we find NON-AND and that NON-AND must equal our fragment. + + if schema_fragment == schema: + return schema + + if not schema: + return + + if schema[0] == OpName.AND: + for predicate in schema[1:]: + v = schema_extract_top_level(schema_fragment, predicate) + if v: + return v + + +@dataclass +class _Example: + col1: str + col2: int + col3: str + +_example_clause = AND( + NOT( + EQ( + attr('col1'), + 'dirt' # <- demonstrates non-callable, literal + ) + ), + GT( + attr('col2'), + 5 # <- demonstrates non-callable, literal + ), + LIKE( + attr('col3'), + '%super.match.com' + ), + True # <- demonstrates non-callable, literal +) + +assert False == _example_clause(_Example(col1='dirt', col2=7, col3='this is super.match.com')) # not asdf +assert False == _example_clause(_Example(col1='dirt', col2=3, col3='this is super.match.com')) # col2 < 5 +assert False == _example_clause(_Example(col1='gold', col2=7, col3='this is super.MISmatch.com')) # col3 issue +assert False == _example_clause(_Example(col1='gold', col2=7, col3='this is super.match.com trailing thing here')) # col3 issue +assert True == _example_clause(_Example(col1='gold', col2=7, col3='this is super.match.com')) + +# nesting ands just ot test "find fragment" code +_example_schema = [ + 'AND', + [ + 'AND', + [LITERAL, True], + [ + 'EQ', + ['attr', + [LITERAL, 'col1'] + ], + [LITERAL, 'gold'], + ] + ], + [LITERAL, True] +] + +assert False == schema_to_callable(_example_schema)(_Example(col1='dirt', col2=0, col3='')) +assert True == schema_to_callable(_example_schema)(_Example(col1='gold', col2=0, col3='')) + +_example_schema_exact_equal = schema_stanza( + OpName.EQ, + schema_stanza(OpName.attr, 'col1'), + schema_literal('gold') +) + +_example_schema_FUZZY_equal = schema_stanza( + OpName.EQ, + schema_stanza(OpName.attr, 'col1'), + schema_literal(Any) +) + +assert _example_schema_exact_equal == schema_extract_top_level(_example_schema_FUZZY_equal, _example_schema) +assert None == schema_extract_top_level(_example_schema_FUZZY_equal, schema_stanza( + OpName.OR, # <- OR is the issue.. creates ambiguity about which branch is difinitive + schema_stanza( + OpName.EQ, # <- while this guy + schema_stanza(OpName.attr, 'col1'), + schema_literal('a') + ), + schema_stanza( + OpName.EQ, # <- and this guy in isolation would have matched. + schema_stanza(OpName.attr, 'col1'), + schema_literal('b') + ) +)) diff --git a/python/src/tel_grammar/antlr/TelLexer.py b/python/src/tel_grammar/antlr/TelLexer.py deleted file mode 100644 index c513bfd..0000000 --- a/python/src/tel_grammar/antlr/TelLexer.py +++ /dev/null @@ -1,217 +0,0 @@ -# Generated from grammar/TelLexer.g4 by ANTLR 4.8 -from antlr4 import * -from io import StringIO -from typing.io import TextIO -import sys - - - -def serializedATN(): - with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2 ") - buf.write("\u0135\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") - buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") - buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") - buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") - buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") - buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") - buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") - buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64") - buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") - buf.write("\3\2\5\2w\n\2\3\2\6\2z\n\2\r\2\16\2{\3\3\5\3\177\n\3\3") - buf.write("\3\6\3\u0082\n\3\r\3\16\3\u0083\3\3\3\3\6\3\u0088\n\3") - buf.write("\r\3\16\3\u0089\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3") - buf.write("\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b") - buf.write("\3\t\3\t\7\t\u00a5\n\t\f\t\16\t\u00a8\13\t\3\n\3\n\3\n") - buf.write("\3\n\7\n\u00ae\n\n\f\n\16\n\u00b1\13\n\3\n\3\n\3\13\3") - buf.write("\13\3\13\3\13\7\13\u00b9\n\13\f\13\16\13\u00bc\13\13\3") - buf.write("\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20") - buf.write("\3\21\3\21\3\21\3\22\3\22\3\22\3\23\3\23\3\23\3\24\3\24") - buf.write("\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3\27\3\30\3\30\3\30") - buf.write("\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36") - buf.write("\3\36\3\36\3\36\3\36\5\36\u00ef\n\36\3\36\7\36\u00f2\n") - buf.write("\36\f\36\16\36\u00f5\13\36\3\36\3\36\3\37\6\37\u00fa\n") - buf.write("\37\r\37\16\37\u00fb\3\37\3\37\3 \3 \3!\3!\3\"\3\"\3#") - buf.write("\3#\3$\3$\3%\3%\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+") - buf.write("\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62") - buf.write("\3\63\3\63\3\64\3\64\3\65\3\65\3\66\3\66\3\67\3\67\38") - buf.write("\38\39\39\3:\3:\2\2;\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21") - buf.write("\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24") - buf.write("\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37") - buf.write("= ?\2A\2C\2E\2G\2I\2K\2M\2O\2Q\2S\2U\2W\2Y\2[\2]\2_\2") - buf.write("a\2c\2e\2g\2i\2k\2m\2o\2q\2s\2\3\2#\5\2C\\aac|\b\2&&\60") - buf.write("\60\62;C\\aac|\3\2$$\3\2))\4\2\f\f\17\17\5\2\13\f\17\17") - buf.write("\"\"\3\2\62;\4\2CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4") - buf.write("\2HHhh\4\2IIii\4\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNn") - buf.write("n\4\2OOoo\4\2PPpp\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2") - buf.write("UUuu\4\2VVvv\4\2WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4") - buf.write("\2\\\\||\2\u0127\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2") - buf.write("\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21") - buf.write("\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3") - buf.write("\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2") - buf.write("\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2") - buf.write("\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2") - buf.write("\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2") - buf.write("\2\2\3v\3\2\2\2\5~\3\2\2\2\7\u008b\3\2\2\2\t\u0090\3\2") - buf.write("\2\2\13\u0096\3\2\2\2\r\u009a\3\2\2\2\17\u009d\3\2\2\2") - buf.write("\21\u00a2\3\2\2\2\23\u00a9\3\2\2\2\25\u00b4\3\2\2\2\27") - buf.write("\u00bf\3\2\2\2\31\u00c1\3\2\2\2\33\u00c3\3\2\2\2\35\u00c5") - buf.write("\3\2\2\2\37\u00c7\3\2\2\2!\u00c9\3\2\2\2#\u00cc\3\2\2") - buf.write("\2%\u00cf\3\2\2\2\'\u00d2\3\2\2\2)\u00d5\3\2\2\2+\u00d7") - buf.write("\3\2\2\2-\u00d9\3\2\2\2/\u00dc\3\2\2\2\61\u00df\3\2\2") - buf.write("\2\63\u00e1\3\2\2\2\65\u00e3\3\2\2\2\67\u00e5\3\2\2\2") - buf.write("9\u00e7\3\2\2\2;\u00ee\3\2\2\2=\u00f9\3\2\2\2?\u00ff\3") - buf.write("\2\2\2A\u0101\3\2\2\2C\u0103\3\2\2\2E\u0105\3\2\2\2G\u0107") - buf.write("\3\2\2\2I\u0109\3\2\2\2K\u010b\3\2\2\2M\u010d\3\2\2\2") - buf.write("O\u010f\3\2\2\2Q\u0111\3\2\2\2S\u0113\3\2\2\2U\u0115\3") - buf.write("\2\2\2W\u0117\3\2\2\2Y\u0119\3\2\2\2[\u011b\3\2\2\2]\u011d") - buf.write("\3\2\2\2_\u011f\3\2\2\2a\u0121\3\2\2\2c\u0123\3\2\2\2") - buf.write("e\u0125\3\2\2\2g\u0127\3\2\2\2i\u0129\3\2\2\2k\u012b\3") - buf.write("\2\2\2m\u012d\3\2\2\2o\u012f\3\2\2\2q\u0131\3\2\2\2s\u0133") - buf.write("\3\2\2\2uw\7/\2\2vu\3\2\2\2vw\3\2\2\2wy\3\2\2\2xz\5? ") - buf.write("\2yx\3\2\2\2z{\3\2\2\2{y\3\2\2\2{|\3\2\2\2|\4\3\2\2\2") - buf.write("}\177\7/\2\2~}\3\2\2\2~\177\3\2\2\2\177\u0081\3\2\2\2") - buf.write("\u0080\u0082\5? \2\u0081\u0080\3\2\2\2\u0082\u0083\3\2") - buf.write("\2\2\u0083\u0081\3\2\2\2\u0083\u0084\3\2\2\2\u0084\u0085") - buf.write("\3\2\2\2\u0085\u0087\7\60\2\2\u0086\u0088\5? \2\u0087") - buf.write("\u0086\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u0087\3\2\2\2") - buf.write("\u0089\u008a\3\2\2\2\u008a\6\3\2\2\2\u008b\u008c\5g\64") - buf.write("\2\u008c\u008d\5c\62\2\u008d\u008e\5i\65\2\u008e\u008f") - buf.write("\5I%\2\u008f\b\3\2\2\2\u0090\u0091\5K&\2\u0091\u0092\5") - buf.write("A!\2\u0092\u0093\5W,\2\u0093\u0094\5e\63\2\u0094\u0095") - buf.write("\5I%\2\u0095\n\3\2\2\2\u0096\u0097\5[.\2\u0097\u0098\5") - buf.write("]/\2\u0098\u0099\5g\64\2\u0099\f\3\2\2\2\u009a\u009b\5") - buf.write("Q)\2\u009b\u009c\5e\63\2\u009c\16\3\2\2\2\u009d\u009e") - buf.write("\5[.\2\u009e\u009f\5i\65\2\u009f\u00a0\5W,\2\u00a0\u00a1") - buf.write("\5W,\2\u00a1\20\3\2\2\2\u00a2\u00a6\t\2\2\2\u00a3\u00a5") - buf.write("\t\3\2\2\u00a4\u00a3\3\2\2\2\u00a5\u00a8\3\2\2\2\u00a6") - buf.write("\u00a4\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\22\3\2\2\2\u00a8") - buf.write("\u00a6\3\2\2\2\u00a9\u00af\7$\2\2\u00aa\u00ab\7^\2\2\u00ab") - buf.write("\u00ae\7$\2\2\u00ac\u00ae\n\4\2\2\u00ad\u00aa\3\2\2\2") - buf.write("\u00ad\u00ac\3\2\2\2\u00ae\u00b1\3\2\2\2\u00af\u00ad\3") - buf.write("\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b2\3\2\2\2\u00b1\u00af") - buf.write("\3\2\2\2\u00b2\u00b3\7$\2\2\u00b3\24\3\2\2\2\u00b4\u00ba") - buf.write("\7)\2\2\u00b5\u00b6\7^\2\2\u00b6\u00b9\7)\2\2\u00b7\u00b9") - buf.write("\n\5\2\2\u00b8\u00b5\3\2\2\2\u00b8\u00b7\3\2\2\2\u00b9") - buf.write("\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00ba\u00bb\3\2\2\2") - buf.write("\u00bb\u00bd\3\2\2\2\u00bc\u00ba\3\2\2\2\u00bd\u00be\7") - buf.write(")\2\2\u00be\26\3\2\2\2\u00bf\u00c0\7*\2\2\u00c0\30\3\2") - buf.write("\2\2\u00c1\u00c2\7+\2\2\u00c2\32\3\2\2\2\u00c3\u00c4\7") - buf.write("~\2\2\u00c4\34\3\2\2\2\u00c5\u00c6\7<\2\2\u00c6\36\3\2") - buf.write("\2\2\u00c7\u00c8\7.\2\2\u00c8 \3\2\2\2\u00c9\u00ca\7~") - buf.write("\2\2\u00ca\u00cb\7~\2\2\u00cb\"\3\2\2\2\u00cc\u00cd\7") - buf.write("(\2\2\u00cd\u00ce\7(\2\2\u00ce$\3\2\2\2\u00cf\u00d0\7") - buf.write("?\2\2\u00d0\u00d1\7?\2\2\u00d1&\3\2\2\2\u00d2\u00d3\7") - buf.write("#\2\2\u00d3\u00d4\7?\2\2\u00d4(\3\2\2\2\u00d5\u00d6\7") - buf.write("@\2\2\u00d6*\3\2\2\2\u00d7\u00d8\7>\2\2\u00d8,\3\2\2\2") - buf.write("\u00d9\u00da\7@\2\2\u00da\u00db\7?\2\2\u00db.\3\2\2\2") - buf.write("\u00dc\u00dd\7>\2\2\u00dd\u00de\7?\2\2\u00de\60\3\2\2") - buf.write("\2\u00df\u00e0\7-\2\2\u00e0\62\3\2\2\2\u00e1\u00e2\7/") - buf.write("\2\2\u00e2\64\3\2\2\2\u00e3\u00e4\7,\2\2\u00e4\66\3\2") - buf.write("\2\2\u00e5\u00e6\7\61\2\2\u00e68\3\2\2\2\u00e7\u00e8\7") - buf.write("A\2\2\u00e8:\3\2\2\2\u00e9\u00ea\7/\2\2\u00ea\u00ef\7") - buf.write("/\2\2\u00eb\u00ec\7\61\2\2\u00ec\u00ef\7\61\2\2\u00ed") - buf.write("\u00ef\7%\2\2\u00ee\u00e9\3\2\2\2\u00ee\u00eb\3\2\2\2") - buf.write("\u00ee\u00ed\3\2\2\2\u00ef\u00f3\3\2\2\2\u00f0\u00f2\n") - buf.write("\6\2\2\u00f1\u00f0\3\2\2\2\u00f2\u00f5\3\2\2\2\u00f3\u00f1") - buf.write("\3\2\2\2\u00f3\u00f4\3\2\2\2\u00f4\u00f6\3\2\2\2\u00f5") - buf.write("\u00f3\3\2\2\2\u00f6\u00f7\b\36\2\2\u00f7<\3\2\2\2\u00f8") - buf.write("\u00fa\t\7\2\2\u00f9\u00f8\3\2\2\2\u00fa\u00fb\3\2\2\2") - buf.write("\u00fb\u00f9\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\u00fd\3") - buf.write("\2\2\2\u00fd\u00fe\b\37\2\2\u00fe>\3\2\2\2\u00ff\u0100") - buf.write("\t\b\2\2\u0100@\3\2\2\2\u0101\u0102\t\t\2\2\u0102B\3\2") - buf.write("\2\2\u0103\u0104\t\n\2\2\u0104D\3\2\2\2\u0105\u0106\t") - buf.write("\13\2\2\u0106F\3\2\2\2\u0107\u0108\t\f\2\2\u0108H\3\2") - buf.write("\2\2\u0109\u010a\t\r\2\2\u010aJ\3\2\2\2\u010b\u010c\t") - buf.write("\16\2\2\u010cL\3\2\2\2\u010d\u010e\t\17\2\2\u010eN\3\2") - buf.write("\2\2\u010f\u0110\t\20\2\2\u0110P\3\2\2\2\u0111\u0112\t") - buf.write("\21\2\2\u0112R\3\2\2\2\u0113\u0114\t\22\2\2\u0114T\3\2") - buf.write("\2\2\u0115\u0116\t\23\2\2\u0116V\3\2\2\2\u0117\u0118\t") - buf.write("\24\2\2\u0118X\3\2\2\2\u0119\u011a\t\25\2\2\u011aZ\3\2") - buf.write("\2\2\u011b\u011c\t\26\2\2\u011c\\\3\2\2\2\u011d\u011e") - buf.write("\t\27\2\2\u011e^\3\2\2\2\u011f\u0120\t\30\2\2\u0120`\3") - buf.write("\2\2\2\u0121\u0122\t\31\2\2\u0122b\3\2\2\2\u0123\u0124") - buf.write("\t\32\2\2\u0124d\3\2\2\2\u0125\u0126\t\33\2\2\u0126f\3") - buf.write("\2\2\2\u0127\u0128\t\34\2\2\u0128h\3\2\2\2\u0129\u012a") - buf.write("\t\35\2\2\u012aj\3\2\2\2\u012b\u012c\t\36\2\2\u012cl\3") - buf.write("\2\2\2\u012d\u012e\t\37\2\2\u012en\3\2\2\2\u012f\u0130") - buf.write("\t \2\2\u0130p\3\2\2\2\u0131\u0132\t!\2\2\u0132r\3\2\2") - buf.write("\2\u0133\u0134\t\"\2\2\u0134t\3\2\2\2\20\2v{~\u0083\u0089") - buf.write("\u00a6\u00ad\u00af\u00b8\u00ba\u00ee\u00f3\u00fb\3\2\3") - buf.write("\2") - return buf.getvalue() - - -class TelLexer(Lexer): - - atn = ATNDeserializer().deserialize(serializedATN()) - - decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] - - INT = 1 - REAL = 2 - TRUE = 3 - FALSE = 4 - NOT = 5 - KW_IS = 6 - KW_NULL = 7 - WORD = 8 - STRING_CONSTANT = 9 - SINGLE_QUOTED_ELEMENT = 10 - L_BRACKET = 11 - R_BRACKET = 12 - TAXON_NAMESPACE_DELIMITER = 13 - TAXON_TAG_DELIMITER = 14 - FN_PARAMETER_DELIMITER = 15 - OR = 16 - AND = 17 - EQ = 18 - NEQ = 19 - GT = 20 - LT = 21 - GTEQ = 22 - LTEQ = 23 - PLUS = 24 - MINUS = 25 - MULT = 26 - DIV = 27 - OPTIONAL_TAXON_OPERATOR = 28 - SINGLE_LINE_COMMENT = 29 - WS = 30 - - channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] - - modeNames = [ "DEFAULT_MODE" ] - - literalNames = [ "", - "'('", "')'", "'|'", "':'", "','", "'||'", "'&&'", "'=='", "'!='", - "'>'", "'<'", "'>='", "'<='", "'+'", "'-'", "'*'", "'/'", "'?'" ] - - symbolicNames = [ "", - "INT", "REAL", "TRUE", "FALSE", "NOT", "KW_IS", "KW_NULL", "WORD", - "STRING_CONSTANT", "SINGLE_QUOTED_ELEMENT", "L_BRACKET", "R_BRACKET", - "TAXON_NAMESPACE_DELIMITER", "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", - "OR", "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", - "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", "SINGLE_LINE_COMMENT", - "WS" ] - - ruleNames = [ "INT", "REAL", "TRUE", "FALSE", "NOT", "KW_IS", "KW_NULL", - "WORD", "STRING_CONSTANT", "SINGLE_QUOTED_ELEMENT", "L_BRACKET", - "R_BRACKET", "TAXON_NAMESPACE_DELIMITER", "TAXON_TAG_DELIMITER", - "FN_PARAMETER_DELIMITER", "OR", "AND", "EQ", "NEQ", "GT", - "LT", "GTEQ", "LTEQ", "PLUS", "MINUS", "MULT", "DIV", - "OPTIONAL_TAXON_OPERATOR", "SINGLE_LINE_COMMENT", "WS", - "DIGIT", "A", "B", "C", "D", "E", "F", "G", "H", "I", - "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", - "U", "V", "W", "X", "Y", "Z" ] - - grammarFileName = "TelLexer.g4" - - def __init__(self, input=None, output:TextIO = sys.stdout): - super().__init__(input, output) - self.checkVersion("4.8") - self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) - self._actions = None - self._predicates = None - - diff --git a/python/src/tel_grammar/antlr/TelParser.py b/python/src/tel_grammar/antlr/TelParser.py deleted file mode 100644 index 855e9b3..0000000 --- a/python/src/tel_grammar/antlr/TelParser.py +++ /dev/null @@ -1,1004 +0,0 @@ -# Generated from grammar/TelParser.g4 by ANTLR 4.8 -# encoding: utf-8 -from antlr4 import * -from io import StringIO -import sys -if sys.version_info[1] > 5: - from typing import TextIO -else: - from typing.io import TextIO - - -def serializedATN(): - with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3 ") - buf.write("P\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\3\2\3\2\3\2") - buf.write("\3\3\3\3\3\3\3\3\5\3\24\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3") - buf.write("\3\3\3\3\3\3\3\3\3\3\3\5\3\"\n\3\3\3\7\3%\n\3\f\3\16\3") - buf.write("(\13\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\64") - buf.write("\n\4\3\5\3\5\3\5\5\59\n\5\3\5\3\5\7\5=\n\5\f\5\16\5@\13") - buf.write("\5\3\5\3\5\3\6\5\6E\n\6\3\6\3\6\3\6\5\6J\n\6\3\6\3\6\5") - buf.write("\6N\n\6\3\6\2\3\4\7\2\4\6\b\n\2\7\3\2\34\35\3\2\32\33") - buf.write("\3\2\22\31\3\2\3\4\3\2\5\6\2[\2\f\3\2\2\2\4\23\3\2\2\2") - buf.write("\6\63\3\2\2\2\b\65\3\2\2\2\nD\3\2\2\2\f\r\5\4\3\2\r\16") - buf.write("\7\2\2\3\16\3\3\2\2\2\17\20\b\3\1\2\20\21\7\7\2\2\21\24") - buf.write("\5\4\3\b\22\24\5\6\4\2\23\17\3\2\2\2\23\22\3\2\2\2\24") - buf.write("&\3\2\2\2\25\26\f\7\2\2\26\27\t\2\2\2\27%\5\4\3\b\30\31") - buf.write("\f\6\2\2\31\32\t\3\2\2\32%\5\4\3\7\33\34\f\5\2\2\34\35") - buf.write("\t\4\2\2\35%\5\4\3\6\36\37\f\4\2\2\37!\7\b\2\2 \"\7\7") - buf.write("\2\2! \3\2\2\2!\"\3\2\2\2\"#\3\2\2\2#%\7\t\2\2$\25\3\2") - buf.write("\2\2$\30\3\2\2\2$\33\3\2\2\2$\36\3\2\2\2%(\3\2\2\2&$\3") - buf.write("\2\2\2&\'\3\2\2\2\'\5\3\2\2\2(&\3\2\2\2)*\7\r\2\2*+\5") - buf.write("\4\3\2+,\7\16\2\2,\64\3\2\2\2-\64\t\5\2\2.\64\t\6\2\2") - buf.write("/\64\7\f\2\2\60\64\7\13\2\2\61\64\5\b\5\2\62\64\5\n\6") - buf.write("\2\63)\3\2\2\2\63-\3\2\2\2\63.\3\2\2\2\63/\3\2\2\2\63") - buf.write("\60\3\2\2\2\63\61\3\2\2\2\63\62\3\2\2\2\64\7\3\2\2\2\65") - buf.write("\66\7\n\2\2\668\7\r\2\2\679\5\4\3\28\67\3\2\2\289\3\2") - buf.write("\2\29>\3\2\2\2:;\7\21\2\2;=\5\4\3\2<:\3\2\2\2=@\3\2\2") - buf.write("\2><\3\2\2\2>?\3\2\2\2?A\3\2\2\2@>\3\2\2\2AB\7\16\2\2") - buf.write("B\t\3\2\2\2CE\7\36\2\2DC\3\2\2\2DE\3\2\2\2EF\3\2\2\2F") - buf.write("I\7\n\2\2GH\7\17\2\2HJ\7\n\2\2IG\3\2\2\2IJ\3\2\2\2JM\3") - buf.write("\2\2\2KL\7\20\2\2LN\7\n\2\2MK\3\2\2\2MN\3\2\2\2N\13\3") - buf.write("\2\2\2\f\23!$&\638>DIM") - return buf.getvalue() - - -class TelParser ( Parser ): - - grammarFileName = "TelParser.g4" - - atn = ATNDeserializer().deserialize(serializedATN()) - - decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] - - sharedContextCache = PredictionContextCache() - - literalNames = [ "", "", "", "", - "", "", "", "", - "", "", "", "'('", "')'", - "'|'", "':'", "','", "'||'", "'&&'", "'=='", "'!='", - "'>'", "'<'", "'>='", "'<='", "'+'", "'-'", "'*'", - "'/'", "'?'" ] - - symbolicNames = [ "", "INT", "REAL", "TRUE", "FALSE", "NOT", - "KW_IS", "KW_NULL", "WORD", "STRING_CONSTANT", "SINGLE_QUOTED_ELEMENT", - "L_BRACKET", "R_BRACKET", "TAXON_NAMESPACE_DELIMITER", - "TAXON_TAG_DELIMITER", "FN_PARAMETER_DELIMITER", "OR", - "AND", "EQ", "NEQ", "GT", "LT", "GTEQ", "LTEQ", "PLUS", - "MINUS", "MULT", "DIV", "OPTIONAL_TAXON_OPERATOR", - "SINGLE_LINE_COMMENT", "WS" ] - - RULE_parse = 0 - RULE_expr = 1 - RULE_atom = 2 - RULE_fn = 3 - RULE_taxon = 4 - - ruleNames = [ "parse", "expr", "atom", "fn", "taxon" ] - - EOF = Token.EOF - INT=1 - REAL=2 - TRUE=3 - FALSE=4 - NOT=5 - KW_IS=6 - KW_NULL=7 - WORD=8 - STRING_CONSTANT=9 - SINGLE_QUOTED_ELEMENT=10 - L_BRACKET=11 - R_BRACKET=12 - TAXON_NAMESPACE_DELIMITER=13 - TAXON_TAG_DELIMITER=14 - FN_PARAMETER_DELIMITER=15 - OR=16 - AND=17 - EQ=18 - NEQ=19 - GT=20 - LT=21 - GTEQ=22 - LTEQ=23 - PLUS=24 - MINUS=25 - MULT=26 - DIV=27 - OPTIONAL_TAXON_OPERATOR=28 - SINGLE_LINE_COMMENT=29 - WS=30 - - def __init__(self, input:TokenStream, output:TextIO = sys.stdout): - super().__init__(input, output) - self.checkVersion("4.8") - self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) - self._predicates = None - - - - - class ParseContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def expr(self): - return self.getTypedRuleContext(TelParser.ExprContext,0) - - - def EOF(self): - return self.getToken(TelParser.EOF, 0) - - def getRuleIndex(self): - return TelParser.RULE_parse - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterParse" ): - listener.enterParse(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitParse" ): - listener.exitParse(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitParse" ): - return visitor.visitParse(self) - else: - return visitor.visitChildren(self) - - - - - def parse(self): - - localctx = TelParser.ParseContext(self, self._ctx, self.state) - self.enterRule(localctx, 0, self.RULE_parse) - try: - self.enterOuterAlt(localctx, 1) - self.state = 10 - self.expr(0) - self.state = 11 - self.match(TelParser.EOF) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class ExprContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - - def getRuleIndex(self): - return TelParser.RULE_expr - - - def copyFrom(self, ctx:ParserRuleContext): - super().copyFrom(ctx) - - - class NullTestExprContext(ExprContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.ExprContext - super().__init__(parser) - self.copyFrom(ctx) - - def expr(self): - return self.getTypedRuleContext(TelParser.ExprContext,0) - - def KW_IS(self): - return self.getToken(TelParser.KW_IS, 0) - def KW_NULL(self): - return self.getToken(TelParser.KW_NULL, 0) - def NOT(self): - return self.getToken(TelParser.NOT, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterNullTestExpr" ): - listener.enterNullTestExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitNullTestExpr" ): - listener.exitNullTestExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitNullTestExpr" ): - return visitor.visitNullTestExpr(self) - else: - return visitor.visitChildren(self) - - - class NotExprContext(ExprContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.ExprContext - super().__init__(parser) - self.copyFrom(ctx) - - def NOT(self): - return self.getToken(TelParser.NOT, 0) - def expr(self): - return self.getTypedRuleContext(TelParser.ExprContext,0) - - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterNotExpr" ): - listener.enterNotExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitNotExpr" ): - listener.exitNotExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitNotExpr" ): - return visitor.visitNotExpr(self) - else: - return visitor.visitChildren(self) - - - class LogicalExprContext(ExprContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.ExprContext - super().__init__(parser) - self.op = None # Token - self.copyFrom(ctx) - - def expr(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(TelParser.ExprContext) - else: - return self.getTypedRuleContext(TelParser.ExprContext,i) - - def OR(self): - return self.getToken(TelParser.OR, 0) - def AND(self): - return self.getToken(TelParser.AND, 0) - def EQ(self): - return self.getToken(TelParser.EQ, 0) - def NEQ(self): - return self.getToken(TelParser.NEQ, 0) - def GT(self): - return self.getToken(TelParser.GT, 0) - def LT(self): - return self.getToken(TelParser.LT, 0) - def GTEQ(self): - return self.getToken(TelParser.GTEQ, 0) - def LTEQ(self): - return self.getToken(TelParser.LTEQ, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterLogicalExpr" ): - listener.enterLogicalExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitLogicalExpr" ): - listener.exitLogicalExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitLogicalExpr" ): - return visitor.visitLogicalExpr(self) - else: - return visitor.visitChildren(self) - - - class MultiplicationExprContext(ExprContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.ExprContext - super().__init__(parser) - self.op = None # Token - self.copyFrom(ctx) - - def expr(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(TelParser.ExprContext) - else: - return self.getTypedRuleContext(TelParser.ExprContext,i) - - def MULT(self): - return self.getToken(TelParser.MULT, 0) - def DIV(self): - return self.getToken(TelParser.DIV, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterMultiplicationExpr" ): - listener.enterMultiplicationExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitMultiplicationExpr" ): - listener.exitMultiplicationExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitMultiplicationExpr" ): - return visitor.visitMultiplicationExpr(self) - else: - return visitor.visitChildren(self) - - - class AtomExprContext(ExprContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.ExprContext - super().__init__(parser) - self.copyFrom(ctx) - - def atom(self): - return self.getTypedRuleContext(TelParser.AtomContext,0) - - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterAtomExpr" ): - listener.enterAtomExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitAtomExpr" ): - listener.exitAtomExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitAtomExpr" ): - return visitor.visitAtomExpr(self) - else: - return visitor.visitChildren(self) - - - class AdditiveExprContext(ExprContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.ExprContext - super().__init__(parser) - self.op = None # Token - self.copyFrom(ctx) - - def expr(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(TelParser.ExprContext) - else: - return self.getTypedRuleContext(TelParser.ExprContext,i) - - def PLUS(self): - return self.getToken(TelParser.PLUS, 0) - def MINUS(self): - return self.getToken(TelParser.MINUS, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterAdditiveExpr" ): - listener.enterAdditiveExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitAdditiveExpr" ): - listener.exitAdditiveExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitAdditiveExpr" ): - return visitor.visitAdditiveExpr(self) - else: - return visitor.visitChildren(self) - - - - def expr(self, _p:int=0): - _parentctx = self._ctx - _parentState = self.state - localctx = TelParser.ExprContext(self, self._ctx, _parentState) - _prevctx = localctx - _startState = 2 - self.enterRecursionRule(localctx, 2, self.RULE_expr, _p) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 17 - self._errHandler.sync(self) - token = self._input.LA(1) - if token in [TelParser.NOT]: - localctx = TelParser.NotExprContext(self, localctx) - self._ctx = localctx - _prevctx = localctx - - self.state = 14 - self.match(TelParser.NOT) - self.state = 15 - self.expr(6) - pass - elif token in [TelParser.INT, TelParser.REAL, TelParser.TRUE, TelParser.FALSE, TelParser.WORD, TelParser.STRING_CONSTANT, TelParser.SINGLE_QUOTED_ELEMENT, TelParser.L_BRACKET, TelParser.OPTIONAL_TAXON_OPERATOR]: - localctx = TelParser.AtomExprContext(self, localctx) - self._ctx = localctx - _prevctx = localctx - self.state = 16 - self.atom() - pass - else: - raise NoViableAltException(self) - - self._ctx.stop = self._input.LT(-1) - self.state = 36 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: - if _alt==1: - if self._parseListeners is not None: - self.triggerExitRuleEvent() - _prevctx = localctx - self.state = 34 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,2,self._ctx) - if la_ == 1: - localctx = TelParser.MultiplicationExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) - self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 19 - if not self.precpred(self._ctx, 5): - from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 20 - localctx.op = self._input.LT(1) - _la = self._input.LA(1) - if not(_la==TelParser.MULT or _la==TelParser.DIV): - localctx.op = self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() - self.state = 21 - self.expr(6) - pass - - elif la_ == 2: - localctx = TelParser.AdditiveExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) - self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 22 - if not self.precpred(self._ctx, 4): - from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 4)") - self.state = 23 - localctx.op = self._input.LT(1) - _la = self._input.LA(1) - if not(_la==TelParser.PLUS or _la==TelParser.MINUS): - localctx.op = self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() - self.state = 24 - self.expr(5) - pass - - elif la_ == 3: - localctx = TelParser.LogicalExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) - self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 25 - if not self.precpred(self._ctx, 3): - from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") - self.state = 26 - localctx.op = self._input.LT(1) - _la = self._input.LA(1) - if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TelParser.OR) | (1 << TelParser.AND) | (1 << TelParser.EQ) | (1 << TelParser.NEQ) | (1 << TelParser.GT) | (1 << TelParser.LT) | (1 << TelParser.GTEQ) | (1 << TelParser.LTEQ))) != 0)): - localctx.op = self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() - self.state = 27 - self.expr(4) - pass - - elif la_ == 4: - localctx = TelParser.NullTestExprContext(self, TelParser.ExprContext(self, _parentctx, _parentState)) - self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 28 - if not self.precpred(self._ctx, 2): - from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 2)") - self.state = 29 - self.match(TelParser.KW_IS) - self.state = 31 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==TelParser.NOT: - self.state = 30 - self.match(TelParser.NOT) - - - self.state = 33 - self.match(TelParser.KW_NULL) - pass - - - self.state = 38 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.unrollRecursionContexts(_parentctx) - return localctx - - - class AtomContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - - def getRuleIndex(self): - return TelParser.RULE_atom - - - def copyFrom(self, ctx:ParserRuleContext): - super().copyFrom(ctx) - - - - class FnExprContext(AtomContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomContext - super().__init__(parser) - self.copyFrom(ctx) - - def fn(self): - return self.getTypedRuleContext(TelParser.FnContext,0) - - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterFnExpr" ): - listener.enterFnExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitFnExpr" ): - listener.exitFnExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitFnExpr" ): - return visitor.visitFnExpr(self) - else: - return visitor.visitChildren(self) - - - class TaxonSlugAtomContext(AtomContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomContext - super().__init__(parser) - self.copyFrom(ctx) - - def taxon(self): - return self.getTypedRuleContext(TelParser.TaxonContext,0) - - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterTaxonSlugAtom" ): - listener.enterTaxonSlugAtom(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitTaxonSlugAtom" ): - listener.exitTaxonSlugAtom(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitTaxonSlugAtom" ): - return visitor.visitTaxonSlugAtom(self) - else: - return visitor.visitChildren(self) - - - class BooleanAtomContext(AtomContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomContext - super().__init__(parser) - self.copyFrom(ctx) - - def TRUE(self): - return self.getToken(TelParser.TRUE, 0) - def FALSE(self): - return self.getToken(TelParser.FALSE, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterBooleanAtom" ): - listener.enterBooleanAtom(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitBooleanAtom" ): - listener.exitBooleanAtom(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitBooleanAtom" ): - return visitor.visitBooleanAtom(self) - else: - return visitor.visitChildren(self) - - - class BracketExprContext(AtomContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomContext - super().__init__(parser) - self.copyFrom(ctx) - - def L_BRACKET(self): - return self.getToken(TelParser.L_BRACKET, 0) - def expr(self): - return self.getTypedRuleContext(TelParser.ExprContext,0) - - def R_BRACKET(self): - return self.getToken(TelParser.R_BRACKET, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterBracketExpr" ): - listener.enterBracketExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitBracketExpr" ): - listener.exitBracketExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitBracketExpr" ): - return visitor.visitBracketExpr(self) - else: - return visitor.visitChildren(self) - - - class SingleQuotedAtomContext(AtomContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomContext - super().__init__(parser) - self.copyFrom(ctx) - - def SINGLE_QUOTED_ELEMENT(self): - return self.getToken(TelParser.SINGLE_QUOTED_ELEMENT, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterSingleQuotedAtom" ): - listener.enterSingleQuotedAtom(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitSingleQuotedAtom" ): - listener.exitSingleQuotedAtom(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitSingleQuotedAtom" ): - return visitor.visitSingleQuotedAtom(self) - else: - return visitor.visitChildren(self) - - - class NumberAtomContext(AtomContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomContext - super().__init__(parser) - self.copyFrom(ctx) - - def INT(self): - return self.getToken(TelParser.INT, 0) - def REAL(self): - return self.getToken(TelParser.REAL, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterNumberAtom" ): - listener.enterNumberAtom(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitNumberAtom" ): - listener.exitNumberAtom(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitNumberAtom" ): - return visitor.visitNumberAtom(self) - else: - return visitor.visitChildren(self) - - - class StringConstantAtomContext(AtomContext): - - def __init__(self, parser, ctx:ParserRuleContext): # actually a TelParser.AtomContext - super().__init__(parser) - self.copyFrom(ctx) - - def STRING_CONSTANT(self): - return self.getToken(TelParser.STRING_CONSTANT, 0) - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterStringConstantAtom" ): - listener.enterStringConstantAtom(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitStringConstantAtom" ): - listener.exitStringConstantAtom(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitStringConstantAtom" ): - return visitor.visitStringConstantAtom(self) - else: - return visitor.visitChildren(self) - - - - def atom(self): - - localctx = TelParser.AtomContext(self, self._ctx, self.state) - self.enterRule(localctx, 4, self.RULE_atom) - self._la = 0 # Token type - try: - self.state = 49 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,4,self._ctx) - if la_ == 1: - localctx = TelParser.BracketExprContext(self, localctx) - self.enterOuterAlt(localctx, 1) - self.state = 39 - self.match(TelParser.L_BRACKET) - self.state = 40 - self.expr(0) - self.state = 41 - self.match(TelParser.R_BRACKET) - pass - - elif la_ == 2: - localctx = TelParser.NumberAtomContext(self, localctx) - self.enterOuterAlt(localctx, 2) - self.state = 43 - _la = self._input.LA(1) - if not(_la==TelParser.INT or _la==TelParser.REAL): - self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() - pass - - elif la_ == 3: - localctx = TelParser.BooleanAtomContext(self, localctx) - self.enterOuterAlt(localctx, 3) - self.state = 44 - _la = self._input.LA(1) - if not(_la==TelParser.TRUE or _la==TelParser.FALSE): - self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() - pass - - elif la_ == 4: - localctx = TelParser.SingleQuotedAtomContext(self, localctx) - self.enterOuterAlt(localctx, 4) - self.state = 45 - self.match(TelParser.SINGLE_QUOTED_ELEMENT) - pass - - elif la_ == 5: - localctx = TelParser.StringConstantAtomContext(self, localctx) - self.enterOuterAlt(localctx, 5) - self.state = 46 - self.match(TelParser.STRING_CONSTANT) - pass - - elif la_ == 6: - localctx = TelParser.FnExprContext(self, localctx) - self.enterOuterAlt(localctx, 6) - self.state = 47 - self.fn() - pass - - elif la_ == 7: - localctx = TelParser.TaxonSlugAtomContext(self, localctx) - self.enterOuterAlt(localctx, 7) - self.state = 48 - self.taxon() - pass - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class FnContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def WORD(self): - return self.getToken(TelParser.WORD, 0) - - def L_BRACKET(self): - return self.getToken(TelParser.L_BRACKET, 0) - - def R_BRACKET(self): - return self.getToken(TelParser.R_BRACKET, 0) - - def expr(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(TelParser.ExprContext) - else: - return self.getTypedRuleContext(TelParser.ExprContext,i) - - - def FN_PARAMETER_DELIMITER(self, i:int=None): - if i is None: - return self.getTokens(TelParser.FN_PARAMETER_DELIMITER) - else: - return self.getToken(TelParser.FN_PARAMETER_DELIMITER, i) - - def getRuleIndex(self): - return TelParser.RULE_fn - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterFn" ): - listener.enterFn(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitFn" ): - listener.exitFn(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitFn" ): - return visitor.visitFn(self) - else: - return visitor.visitChildren(self) - - - - - def fn(self): - - localctx = TelParser.FnContext(self, self._ctx, self.state) - self.enterRule(localctx, 6, self.RULE_fn) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 51 - self.match(TelParser.WORD) - self.state = 52 - self.match(TelParser.L_BRACKET) - self.state = 54 - self._errHandler.sync(self) - _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TelParser.INT) | (1 << TelParser.REAL) | (1 << TelParser.TRUE) | (1 << TelParser.FALSE) | (1 << TelParser.NOT) | (1 << TelParser.WORD) | (1 << TelParser.STRING_CONSTANT) | (1 << TelParser.SINGLE_QUOTED_ELEMENT) | (1 << TelParser.L_BRACKET) | (1 << TelParser.OPTIONAL_TAXON_OPERATOR))) != 0): - self.state = 53 - self.expr(0) - - - self.state = 60 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==TelParser.FN_PARAMETER_DELIMITER: - self.state = 56 - self.match(TelParser.FN_PARAMETER_DELIMITER) - self.state = 57 - self.expr(0) - self.state = 62 - self._errHandler.sync(self) - _la = self._input.LA(1) - - self.state = 63 - self.match(TelParser.R_BRACKET) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class TaxonContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def WORD(self, i:int=None): - if i is None: - return self.getTokens(TelParser.WORD) - else: - return self.getToken(TelParser.WORD, i) - - def OPTIONAL_TAXON_OPERATOR(self): - return self.getToken(TelParser.OPTIONAL_TAXON_OPERATOR, 0) - - def TAXON_NAMESPACE_DELIMITER(self): - return self.getToken(TelParser.TAXON_NAMESPACE_DELIMITER, 0) - - def TAXON_TAG_DELIMITER(self): - return self.getToken(TelParser.TAXON_TAG_DELIMITER, 0) - - def getRuleIndex(self): - return TelParser.RULE_taxon - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterTaxon" ): - listener.enterTaxon(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitTaxon" ): - listener.exitTaxon(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitTaxon" ): - return visitor.visitTaxon(self) - else: - return visitor.visitChildren(self) - - - - - def taxon(self): - - localctx = TelParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 8, self.RULE_taxon) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 66 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==TelParser.OPTIONAL_TAXON_OPERATOR: - self.state = 65 - self.match(TelParser.OPTIONAL_TAXON_OPERATOR) - - - self.state = 68 - self.match(TelParser.WORD) - self.state = 71 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,8,self._ctx) - if la_ == 1: - self.state = 69 - self.match(TelParser.TAXON_NAMESPACE_DELIMITER) - self.state = 70 - self.match(TelParser.WORD) - - - self.state = 75 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,9,self._ctx) - if la_ == 1: - self.state = 73 - self.match(TelParser.TAXON_TAG_DELIMITER) - self.state = 74 - self.match(TelParser.WORD) - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - - def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): - if self._predicates == None: - self._predicates = dict() - self._predicates[1] = self.expr_sempred - pred = self._predicates.get(ruleIndex, None) - if pred is None: - raise Exception("No predicate with index:" + str(ruleIndex)) - else: - return pred(localctx, predIndex) - - def expr_sempred(self, localctx:ExprContext, predIndex:int): - if predIndex == 0: - return self.precpred(self._ctx, 5) - - - if predIndex == 1: - return self.precpred(self._ctx, 4) - - - if predIndex == 2: - return self.precpred(self._ctx, 3) - - - if predIndex == 3: - return self.precpred(self._ctx, 2) - - - - - diff --git a/python/src/tel_grammar/antlr/TelParserListener.py b/python/src/tel_grammar/antlr/TelParserListener.py deleted file mode 100644 index 5b1119a..0000000 --- a/python/src/tel_grammar/antlr/TelParserListener.py +++ /dev/null @@ -1,156 +0,0 @@ -# Generated from grammar/TelParser.g4 by ANTLR 4.8 -from antlr4 import * -if __name__ is not None and "." in __name__: - from .TelParser import TelParser -else: - from TelParser import TelParser - -# This class defines a complete listener for a parse tree produced by TelParser. -class TelParserListener(ParseTreeListener): - - # Enter a parse tree produced by TelParser#parse. - def enterParse(self, ctx:TelParser.ParseContext): - pass - - # Exit a parse tree produced by TelParser#parse. - def exitParse(self, ctx:TelParser.ParseContext): - pass - - - # Enter a parse tree produced by TelParser#nullTestExpr. - def enterNullTestExpr(self, ctx:TelParser.NullTestExprContext): - pass - - # Exit a parse tree produced by TelParser#nullTestExpr. - def exitNullTestExpr(self, ctx:TelParser.NullTestExprContext): - pass - - - # Enter a parse tree produced by TelParser#notExpr. - def enterNotExpr(self, ctx:TelParser.NotExprContext): - pass - - # Exit a parse tree produced by TelParser#notExpr. - def exitNotExpr(self, ctx:TelParser.NotExprContext): - pass - - - # Enter a parse tree produced by TelParser#logicalExpr. - def enterLogicalExpr(self, ctx:TelParser.LogicalExprContext): - pass - - # Exit a parse tree produced by TelParser#logicalExpr. - def exitLogicalExpr(self, ctx:TelParser.LogicalExprContext): - pass - - - # Enter a parse tree produced by TelParser#multiplicationExpr. - def enterMultiplicationExpr(self, ctx:TelParser.MultiplicationExprContext): - pass - - # Exit a parse tree produced by TelParser#multiplicationExpr. - def exitMultiplicationExpr(self, ctx:TelParser.MultiplicationExprContext): - pass - - - # Enter a parse tree produced by TelParser#atomExpr. - def enterAtomExpr(self, ctx:TelParser.AtomExprContext): - pass - - # Exit a parse tree produced by TelParser#atomExpr. - def exitAtomExpr(self, ctx:TelParser.AtomExprContext): - pass - - - # Enter a parse tree produced by TelParser#additiveExpr. - def enterAdditiveExpr(self, ctx:TelParser.AdditiveExprContext): - pass - - # Exit a parse tree produced by TelParser#additiveExpr. - def exitAdditiveExpr(self, ctx:TelParser.AdditiveExprContext): - pass - - - # Enter a parse tree produced by TelParser#bracketExpr. - def enterBracketExpr(self, ctx:TelParser.BracketExprContext): - pass - - # Exit a parse tree produced by TelParser#bracketExpr. - def exitBracketExpr(self, ctx:TelParser.BracketExprContext): - pass - - - # Enter a parse tree produced by TelParser#numberAtom. - def enterNumberAtom(self, ctx:TelParser.NumberAtomContext): - pass - - # Exit a parse tree produced by TelParser#numberAtom. - def exitNumberAtom(self, ctx:TelParser.NumberAtomContext): - pass - - - # Enter a parse tree produced by TelParser#booleanAtom. - def enterBooleanAtom(self, ctx:TelParser.BooleanAtomContext): - pass - - # Exit a parse tree produced by TelParser#booleanAtom. - def exitBooleanAtom(self, ctx:TelParser.BooleanAtomContext): - pass - - - # Enter a parse tree produced by TelParser#singleQuotedAtom. - def enterSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): - pass - - # Exit a parse tree produced by TelParser#singleQuotedAtom. - def exitSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): - pass - - - # Enter a parse tree produced by TelParser#stringConstantAtom. - def enterStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): - pass - - # Exit a parse tree produced by TelParser#stringConstantAtom. - def exitStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): - pass - - - # Enter a parse tree produced by TelParser#fnExpr. - def enterFnExpr(self, ctx:TelParser.FnExprContext): - pass - - # Exit a parse tree produced by TelParser#fnExpr. - def exitFnExpr(self, ctx:TelParser.FnExprContext): - pass - - - # Enter a parse tree produced by TelParser#taxonSlugAtom. - def enterTaxonSlugAtom(self, ctx:TelParser.TaxonSlugAtomContext): - pass - - # Exit a parse tree produced by TelParser#taxonSlugAtom. - def exitTaxonSlugAtom(self, ctx:TelParser.TaxonSlugAtomContext): - pass - - - # Enter a parse tree produced by TelParser#fn. - def enterFn(self, ctx:TelParser.FnContext): - pass - - # Exit a parse tree produced by TelParser#fn. - def exitFn(self, ctx:TelParser.FnContext): - pass - - - # Enter a parse tree produced by TelParser#taxon. - def enterTaxon(self, ctx:TelParser.TaxonContext): - pass - - # Exit a parse tree produced by TelParser#taxon. - def exitTaxon(self, ctx:TelParser.TaxonContext): - pass - - - -del TelParser \ No newline at end of file diff --git a/python/src/tel_grammar/antlr/TelParserVisitor.py b/python/src/tel_grammar/antlr/TelParserVisitor.py deleted file mode 100644 index cdce04d..0000000 --- a/python/src/tel_grammar/antlr/TelParserVisitor.py +++ /dev/null @@ -1,93 +0,0 @@ -# Generated from grammar/TelParser.g4 by ANTLR 4.8 -from antlr4 import * -if __name__ is not None and "." in __name__: - from .TelParser import TelParser -else: - from TelParser import TelParser - -# This class defines a complete generic visitor for a parse tree produced by TelParser. - -class TelParserVisitor(ParseTreeVisitor): - - # Visit a parse tree produced by TelParser#parse. - def visitParse(self, ctx:TelParser.ParseContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#nullTestExpr. - def visitNullTestExpr(self, ctx:TelParser.NullTestExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#notExpr. - def visitNotExpr(self, ctx:TelParser.NotExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#logicalExpr. - def visitLogicalExpr(self, ctx:TelParser.LogicalExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#multiplicationExpr. - def visitMultiplicationExpr(self, ctx:TelParser.MultiplicationExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#atomExpr. - def visitAtomExpr(self, ctx:TelParser.AtomExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#additiveExpr. - def visitAdditiveExpr(self, ctx:TelParser.AdditiveExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#bracketExpr. - def visitBracketExpr(self, ctx:TelParser.BracketExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#numberAtom. - def visitNumberAtom(self, ctx:TelParser.NumberAtomContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#booleanAtom. - def visitBooleanAtom(self, ctx:TelParser.BooleanAtomContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#singleQuotedAtom. - def visitSingleQuotedAtom(self, ctx:TelParser.SingleQuotedAtomContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#stringConstantAtom. - def visitStringConstantAtom(self, ctx:TelParser.StringConstantAtomContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#fnExpr. - def visitFnExpr(self, ctx:TelParser.FnExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#taxonSlugAtom. - def visitTaxonSlugAtom(self, ctx:TelParser.TaxonSlugAtomContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#fn. - def visitFn(self, ctx:TelParser.FnContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by TelParser#taxon. - def visitTaxon(self, ctx:TelParser.TaxonContext): - return self.visitChildren(ctx) - - - -del TelParser \ No newline at end of file diff --git a/python/tests/pql/pql_test.py b/python/tests/pql/pql_test.py new file mode 100644 index 0000000..d24acf4 --- /dev/null +++ b/python/tests/pql/pql_test.py @@ -0,0 +1,306 @@ +import sys +sys.path.append('./src') + +from antlr4 import CommonTokenStream, InputStream, ParserRuleContext +from antlr4.tree import Tree +from unittest import mock, TestCase +from typing import Optional + +from pql_grammar.antlr.PqlLexer import PqlLexer +from pql_grammar.antlr.PqlParser import PqlParser +from pql_grammar.antlr.PqlParserVisitor import PqlParserVisitor +from pql_grammar import operators as op + + +def full_text(ctx: ParserRuleContext) -> str: + # extracts full text from a tree of nodes, + # including white space. + if ctx: + if isinstance(ctx, ParserRuleContext): + return ctx.start.getInputStream().getText(ctx.start.start, ctx.stop.stop) + else: + try: + # some primitive context object + return ctx.text + except AttributeError: + # Terminal Node of some sort + return str(ctx) + else: + return None + + +def unquote(s: str): + # Quoted schema, table, column names come in Postgres style - double-quotes + # in-string double-quotes are escaped by doubling the double-quotes ANSI SQL style. + # https://docs.oracle.com/goldengate/1212/gg-winux/GWURF/gg_parameters183.htm#GWURF728 + # Example: + # '"table name ""with quoted portion"""' becomes 'table name "with quoted portion"' + if not s: + return s + if s[0] == '"' and s[-1] == '"': + s = s[1:-1] + return s.replace('""', '"') + + +class WhereClauseParser: + + @staticmethod + def _literalValue_to_python_native(e:PqlParser.LiteralValueContext): + is_number = e.NUMERIC_LITERAL() + is_string = e.DOUBLE_QUOTED_STRING() or e.SINGLE_QUOTED_STRING() + is_null = e.K_NULL() + is_bool = e.K_TRUE() or e.K_FALSE() + + # TODO: + # - BLOB_LITERAL + # - CURRENT_[DATE|TIME|TIMESTAMP] + + if is_null: + return None + + if is_bool: + return bool(e.K_TRUE()) + + try: + v = e.getText() + except IndexError: + raise Exception(f"Could not extract literal value node from '{e.getText()}'.") + + if is_number: + # TODO: contemplate decimal type instead + try: + return int(v) + except ValueError: + try: + return float(v) + except Exception: + raise Exception(f"Could not convert SQL number {v} to native number representation.") + + if is_string: + return unquote(v) + + return v + + _sql_name_map = { + 'AND': op.OpName.AND, + 'OR': op.OpName.OR, + 'NOT': op.OpName.NOT, + 'IS': op.OpName.IS, + '=': op.OpName.EQ, # notice WHERE clause specific handling. NOT assignment. EQ! + '==': op.OpName.EQ, # opportunistic inclusion, while we don't expect to see it in WHERE + '<>': op.OpName.NEQ, + '!=': op.OpName.NEQ, # opportunistic inclusion, while we don't expect to see it in WHERE + '>': op.OpName.GT, + '>=': op.OpName.GTE, + '<': op.OpName.LT, + '<=': op.OpName.LTE, + 'LIKE': op.OpName.LIKE, + '+':op.OpName.PLUS, + '-':op.OpName.MINUS, + '*':op.OpName.STAR, + '/':op.OpName.DIV, + '%':op.OpName.MOD, + } + + @classmethod + def _unwrap_expr_parens(cls, e: PqlParser.ExprContext) -> PqlParser.ExprContext: + # it's allowed to wrap expressions into superflous amounts of parens + # (((column > 5))) + # These come across as triple-nested [TerminalNodeImpl('('), expr, TerminalNodeImpl(')')] + # Here we check for len == 3 and if last and first Terminals are (), return middle element - expression, + # Run this recursively. + if e.inner: + return cls._unwrap_expr_parens(e.inner) + else: + return e + + @classmethod + def _lookup_operator_internal_name(cls, sql_operator: str): + op_name = cls._sql_name_map.get(sql_operator.upper()) + if not op_name: + raise Exception(f"Could not match operator '{sql_operator}' in where clause to a supported action.") + return op_name + + @classmethod + def _parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> op.OperatorSchema : + ctx = cls._unwrap_expr_parens(ctx) + v = ctx.literalValue() + if v: + return op.schema_literal(cls._literalValue_to_python_native(v)) + + v = ctx.unary_operator + if v: + operator = cls._lookup_operator_internal_name(full_text(v)) + if operator in (op.OpName.PLUS, op.OpName.MINUS): + return op.schema_stanza( + operator, + op.schema_literal(0), + cls._parse_where_clause_expr(ctx.right), + ) + if operator == op.OpName.NOT: + return op.schema_stanza( + operator, + cls._parse_where_clause_expr(ctx.right), + ) + + v: PqlParser.taxon = ctx.taxon() + if v: + return op.schema_stanza( + op.OpName.attr, + op.schema_literal(full_text(v)) + ) + + # v: PqlParser.NullComparisonContext = ctx.nullComparison() + # if v: + # # Note, converting `V is (NOT) null` + # # into SQL-incompatible `V ==/!= null` that we CAN do in python. + # # Mostly to avoid creating redundant operators module code. + # is_negated = bool(v.K_NOT() or v.K_NOTNULL()) + # if is_negated: + # return op.schema_stanza( + # op.OpName.NOT, + # op.schema_stanza( + # op.OpName.IS, + # cls._parse_where_clause_expr(ctx.left), + # op.schema_literal(None) + # ) + # ) + # else: + # return op.schema_stanza( + # op.OpName.IS, + # cls._parse_where_clause_expr(ctx.left), + # op.schema_literal(None) + # ) + + v: Optional[str] = full_text(ctx.operator) + if v: + # this is super generic expression of type + # left (NOT) OP right + # with a lot of options for OP value. + # not all of these values are supported by our `operators` module logic. + # Next call throws errors for unmatched operators + operator = cls._lookup_operator_internal_name(v) + _rv = op.schema_stanza( + operator, + cls._parse_where_clause_expr(ctx.left), + cls._parse_where_clause_expr(ctx.right), + ) + # if bool(ctx.is_negated): + # return op.schema_stanza( + # op.OpName.NOT, + # _rv + # ) + # else: + return _rv + + # v = ctx.K_IN() + # if v: + # if ctx.compoundSelectStmt(): + # raise Exception(f'Where expression "{full_text(ctx.compoundSelectStmt())}" is not supported yet.') + # expressions = ctx.expressions() + # if expressions: + # comps = [ + # cls._parse_where_clause_expr(expr) + # for expr in expressions.expr() + # ] + # # converting these into multiple OR equal + # left = cls._parse_where_clause_expr(ctx.left) + # clause = op.schema_stanza( + # op.OpName.OR, + # *[ + # op.schema_stanza( + # op.OpName.EQ, + # left, + # comp + # ) + # for comp in comps + # ] + # ) + # if bool(ctx.is_negated): + # return op.schema_stanza( + # op.OpName.NOT, + # clause, + # ) + # else: + # return clause + + if ctx.function_name: + raise NotImplementedError('Dont know how to pack functions yet') + raise Exception(f'Where expression "{full_text(ctx)}" is not supported yet.') + + +class AssertPqlVisitor(PqlParserVisitor, WhereClauseParser): + """ + Special TelVisitor for testing grammar. Throws error in case of invalid node. + """ + def visitErrorNode(self, node): + wrong_symbol = node.symbol.text + position = node.symbol.column + 1 + details = f'Unexpected symbol "{wrong_symbol}" at position {position}' + raise AssertionError(details) + + @classmethod + def parse_string(cls, s): + inp_stream = InputStream(s) + lexer = PqlLexer(inp_stream) + stream = CommonTokenStream(lexer) + parser = PqlParser(stream) + tree = parser.parsePql() + # Use error visitor on parsed tree to test it + visitor = cls() + visitor.visit(tree) + + +class PQLTests(TestCase): + + def test_select_no_filter(self): + + pql = """ + select + ?ns1|taxon1, + ?ns2|taxon2, + slug1, + (?ns3|taxon3 + (slug2 - 1234)), + fn_4(fn_1(slug)) + where + ns6|taxon6 > 1234 + and (ns0|taxon10 + 1234) == 0 + """ + + columns = [] + where_clause = [] + class V(AssertPqlVisitor): + def visitColumns(self, ctx:PqlParser.ColumnsContext): + for column in ctx.expr(): + columns.append(full_text(column)) + def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): + ww = self._parse_where_clause_expr(ctx.expr()) + where_clause.extend(ww) + + V.parse_string(pql) + + assert columns == [ + '?ns1|taxon1', + '?ns2|taxon2', + 'slug1', + '(?ns3|taxon3 + (slug2 - 1234))', + 'fn_4(fn_1(slug))' + ] + assert where_clause == [ + 'AND', + ['GT', + ['attr', + ['@literalValue', 'ns6|taxon6'] + ], + ['@literalValue', 1234] + ], + ['EQ', + ['PLUS', + ['attr', + ['@literalValue', 'ns0|taxon10'] + ], + ['@literalValue', 1234] + ], + ['@literalValue', 0] + ] + ] diff --git a/python/tests/antlr_tel/grammar_test.py b/python/tests/tel/grammar_test.py similarity index 71% rename from python/tests/antlr_tel/grammar_test.py rename to python/tests/tel/grammar_test.py index a9cc7fb..3cdcdfb 100644 --- a/python/tests/antlr_tel/grammar_test.py +++ b/python/tests/tel/grammar_test.py @@ -2,15 +2,32 @@ import sys import pytest -from antlr4 import CommonTokenStream, InputStream +from antlr4 import CommonTokenStream, InputStream, ParserRuleContext sys.path.append('./src') -from tel_grammar.antlr.TelLexer import TelLexer -from tel_grammar.antlr.TelParser import TelParser -from tel_grammar.antlr.TelParserVisitor import TelParserVisitor as TelVisitor +from pql_grammar.antlr.PqlLexer import PqlLexer +from pql_grammar.antlr.PqlParser import PqlParser +from pql_grammar.antlr.PqlParserVisitor import PqlParserVisitor -class AssertTelVisitor(TelVisitor): +def full_text(ctx: ParserRuleContext) -> str: + # extracts full text from a tree of nodes, + # including white space. + if ctx: + if isinstance(ctx, ParserRuleContext): + return ctx.start.getInputStream().getText(ctx.start.start, ctx.stop.stop) + else: + try: + # some primitive context object + return ctx.text + except AttributeError: + # Terminal Node of some sort + return str(ctx) + else: + return None + + +class AssertTelVisitor(PqlParserVisitor): """ Special TelVisitor for testing grammar. Throws error in case of invalid node. """ @@ -59,7 +76,7 @@ def visitErrorNode(self, node): # Handle taxon slugs and functions with dot ('db.prod',), ('db.prod|schema.table.column',), - ('db.prod|schema.table.column:v2.0',), + ('db.prod|schema.table.column:v2.b0',), ('fn.contains()',), ('fn.contains.v2(db.prod, 3.14)',), # Handle not operator @@ -76,10 +93,10 @@ def visitErrorNode(self, node): ) def test_grammar(test_case): inp_stream = InputStream(test_case) - lexer = TelLexer(inp_stream) + lexer = PqlLexer(inp_stream) stream = CommonTokenStream(lexer) - parser = TelParser(stream) - tree = parser.parse() + parser = PqlParser(stream) + tree = parser.parseTel() # Use error visitor on parsed tree to test it visitor = AssertTelVisitor() visitor.visit(tree) From be271be90c4476faafb61932b7fc651872a24e92 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Mon, 9 Nov 2020 21:04:20 -0800 Subject: [PATCH 11/32] add column ::TypeCast() and AS Alias support --- grammar/PqlLexer.g4 | 11 +- grammar/PqlParser.g4 | 40 +- python/src/pql_grammar/antlr/PqlLexer.py | 583 ++++++------ python/src/pql_grammar/antlr/PqlParser.py | 860 ++++++++++++------ .../pql_grammar/antlr/PqlParserListener.py | 36 + .../src/pql_grammar/antlr/PqlParserVisitor.py | 20 + python/tests/pql/pql_test.py | 38 +- 7 files changed, 978 insertions(+), 610 deletions(-) diff --git a/grammar/PqlLexer.g4 b/grammar/PqlLexer.g4 index 099a33a..b2320a0 100644 --- a/grammar/PqlLexer.g4 +++ b/grammar/PqlLexer.g4 @@ -1,9 +1,6 @@ lexer grammar PqlLexer; -TAXON_TAG_DELIMITER: ':'; -TAXON_OPTIONAL_OPERATOR: '?'; // Taxon slug prefix noting, that the taxon slug is optional. - -// SQL-compatible (except for some TEL-isms): +// mostly SQL-compatible (except for some TEL-isms where marked): AND : '&&'; // TEL EQ : '=='; @@ -11,14 +8,14 @@ GT_EQ : '>='; LT_EQ : '<='; NOT_EQ1 : '!='; NOT_EQ2 : '<>'; -OR : '||'; // TEL +OR : '||'; // TEL. !! CONFLICT WITH SQL where it's string concatenator !! SHIFT_LEFT : '<<'; SHIFT_RIGHT : '>>'; - AMP : '&'; ASSIGN : '='; CLOSE_PAREN : ')'; +COLON: ':'; COMMA : ','; DOT : '.'; FORWARD_SLASH : '/'; @@ -29,6 +26,7 @@ MOD : '%'; OPEN_PAREN : '('; PIPE : '|'; PLUS : '+'; +QUESTION_MARK: '?'; SCOL : ';'; STAR : '*'; TILDE : '~'; @@ -36,6 +34,7 @@ UNDER: '_'; // SQL keywords we adapt: K_AND : A N D; +K_AS : A S; K_ASC : A S C; K_BY : B Y; K_DESC : D E S C; diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index 00a6344..74c8525 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -35,7 +35,25 @@ selectStmt ( limitClause )? ; -columns: expr ( COMMA expr )* ; +columns: column ( COMMA column )* ; + +// Column is a complicated structure of many parts: +// {tel expression (includes taxon)}{::Type Cast function or token} {{AS} taxon-like} +// Example: +// (?ns3|taxon3 + (slug2 - 1234))::TypeHint(agg=ave) as ns1|custom_data1, +column: value=expr type_cast=typeCast? (K_AS alias=taxon)? ; +// this conflicts with end of taxon ":tag" +// This means that typecasting cannot be used on naked taxon +// Must wrap whatever expression into parens or other non-taxon before Type Casting +// WRONG: +// ns1|taxon:tag:TypeCast() +// ns1|taxon::TypeCast() +// CORRECT: +// (ns1|taxon:tag)::TypeCast() +// (ns1|taxon)::TypeCast() +// While SQL allows non-function and function type casts, +// we stick with requireing parens always for simplicity of syntax parser. +typeCast: COLON COLON function ; whereClause : K_WHERE expr @@ -60,21 +78,33 @@ expr | left=expr operator=( LT | LT_EQ | GT | GT_EQ ) right=expr | left=expr operator=( ASSIGN | EQ | NOT_EQ1 | NOT_EQ2 | K_IS ) right=expr // | left=expr is_negated=K_NOT? operator=( K_LIKE | K_BETWEEN ) right=expr -// | left=expr is_negated=K_NOT? operator=K_IN '(' ( right=expr ( ',' right=expr )* )? ')' +// | left=expr is_negated=K_NOT? operator=K_IN '(' exprList? ')' | left=expr operator=( K_AND | AND ) right=expr | left=expr operator=( K_OR | OR ) right=expr | OPEN_PAREN inner=expr CLOSE_PAREN | literalValue - | function_name=identifierMultipart OPEN_PAREN ( expr ( COMMA expr )* )? CLOSE_PAREN + | function | taxon ; +// Note that function supports optional list of arguments trapped as `expr` +// which allows us to have +// named (`arg1=value1, arg2=value2'` and +// positional (`value1, value2`) args. +// Named ones will come as `expr` with left=expr,operator=ASSIGN,right=expr contents. +// You might need to express these as ordered dict / list of tuples to preserve names of args. +// Positional will be whatever literal or other single-valued expr content could be. +function: function_name=identifierMultipart OPEN_PAREN arguments=exprList? CLOSE_PAREN; +exprList: expr ( COMMA expr )* ; + // TODO: TAXON_TAG_DELIMITER is being killed off. Remove when we migrate out of taxon tags. taxon: - TAXON_OPTIONAL_OPERATOR? + QUESTION_MARK? ( namespace=identifierMultipart PIPE )? slug=identifierMultipart - ( TAXON_TAG_DELIMITER tag=identifierMultipart )? + // TODO: drop this when we drop Data Tags system. + // May conflict with TypeCast expression + ( COLON tag=identifierMultipart )? ; identifierMultipart: WORD ( DOT WORD )* ; diff --git a/python/src/pql_grammar/antlr/PqlLexer.py b/python/src/pql_grammar/antlr/PqlLexer.py index d7f4023..888284c 100644 --- a/python/src/pql_grammar/antlr/PqlLexer.py +++ b/python/src/pql_grammar/antlr/PqlLexer.py @@ -8,8 +8,8 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2:") - buf.write("\u01fd\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2;") + buf.write("\u0202\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") @@ -20,219 +20,224 @@ def serializedATN(): buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") - buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\3\2") - buf.write("\3\2\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3") - buf.write("\7\3\7\3\b\3\b\3\b\3\t\3\t\3\t\3\n\3\n\3\n\3\13\3\13\3") - buf.write("\13\3\f\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20\3\20") - buf.write("\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25\3\26") - buf.write("\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33") - buf.write("\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36\3\37\3\37\3\37") - buf.write("\3\37\3 \3 \3 \3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"") - buf.write("\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3&\3&\3") - buf.write("&\3&\3&\3&\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3(\3)") - buf.write("\3)\3)\3)\3)\3*\3*\3*\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3") - buf.write(",\3,\3,\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3/\6/\u0143\n") - buf.write("/\r/\16/\u0144\3/\3/\7/\u0149\n/\f/\16/\u014c\13/\5/\u014e") - buf.write("\n/\3/\3/\5/\u0152\n/\3/\6/\u0155\n/\r/\16/\u0156\5/\u0159") - buf.write("\n/\3/\3/\6/\u015d\n/\r/\16/\u015e\3/\3/\5/\u0163\n/\3") - buf.write("/\6/\u0166\n/\r/\16/\u0167\5/\u016a\n/\5/\u016c\n/\3\60") - buf.write("\3\60\3\61\3\61\3\61\3\61\7\61\u0174\n\61\f\61\16\61\u0177") - buf.write("\13\61\3\61\3\61\3\62\3\62\3\62\3\62\7\62\u017f\n\62\f") - buf.write("\62\16\62\u0182\13\62\3\62\3\62\3\63\3\63\3\64\3\64\3") - buf.write("\64\3\64\7\64\u018c\n\64\f\64\16\64\u018f\13\64\3\64\3") - buf.write("\64\3\65\3\65\3\65\3\65\7\65\u0197\n\65\f\65\16\65\u019a") - buf.write("\13\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\5\66\u01a3\n") - buf.write("\66\3\66\7\66\u01a6\n\66\f\66\16\66\u01a9\13\66\3\66\3") - buf.write("\66\3\67\3\67\3\67\3\67\7\67\u01b1\n\67\f\67\16\67\u01b4") - buf.write("\13\67\3\67\3\67\3\67\5\67\u01b9\n\67\3\67\3\67\38\38") - buf.write("\38\38\39\39\79\u01c3\n9\f9\169\u01c6\139\3:\3:\3;\3;") - buf.write("\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3D\3") - buf.write("D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3") - buf.write("M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3S\3T\3T\3\u01b2\2") - buf.write("U\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31") - buf.write("\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31") - buf.write("\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O") - buf.write(")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s\2") - buf.write("u\2w\2y\2{\2}\2\177\2\u0081\2\u0083\2\u0085\2\u0087\2") - buf.write("\u0089\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093\2\u0095") - buf.write("\2\u0097\2\u0099\2\u009b\2\u009d\2\u009f\2\u00a1\2\u00a3") - buf.write("\2\u00a5\2\u00a7\2\3\2$\4\2--//\3\2$$\3\2))\4\2\f\f\17") - buf.write("\17\5\2\13\r\17\17\"\"\5\2C\\aac|\6\2\62;C\\aac|\3\2\62") - buf.write(";\4\2CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2") - buf.write("IIii\4\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4") - buf.write("\2PPpp\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VVv") - buf.write("v\4\2WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2") - buf.write("\u01fa\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2") - buf.write("\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2") - buf.write("\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33") - buf.write("\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2") - buf.write("\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2") - buf.write("\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2") - buf.write("\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2") - buf.write("\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3") - buf.write("\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S") - buf.write("\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2") - buf.write("]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2") - buf.write("\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2") - buf.write("\2\2q\3\2\2\2\3\u00a9\3\2\2\2\5\u00ab\3\2\2\2\7\u00ad") - buf.write("\3\2\2\2\t\u00b0\3\2\2\2\13\u00b3\3\2\2\2\r\u00b6\3\2") - buf.write("\2\2\17\u00b9\3\2\2\2\21\u00bc\3\2\2\2\23\u00bf\3\2\2") - buf.write("\2\25\u00c2\3\2\2\2\27\u00c5\3\2\2\2\31\u00c8\3\2\2\2") - buf.write("\33\u00ca\3\2\2\2\35\u00cc\3\2\2\2\37\u00ce\3\2\2\2!\u00d0") - buf.write("\3\2\2\2#\u00d2\3\2\2\2%\u00d4\3\2\2\2\'\u00d6\3\2\2\2") - buf.write(")\u00d8\3\2\2\2+\u00da\3\2\2\2-\u00dc\3\2\2\2/\u00de\3") - buf.write("\2\2\2\61\u00e0\3\2\2\2\63\u00e2\3\2\2\2\65\u00e4\3\2") - buf.write("\2\2\67\u00e6\3\2\2\29\u00e8\3\2\2\2;\u00ea\3\2\2\2=\u00ee") - buf.write("\3\2\2\2?\u00f2\3\2\2\2A\u00f5\3\2\2\2C\u00fa\3\2\2\2") - buf.write("E\u0100\3\2\2\2G\u0103\3\2\2\2I\u010a\3\2\2\2K\u010f\3") - buf.write("\2\2\2M\u0115\3\2\2\2O\u0119\3\2\2\2Q\u0121\3\2\2\2S\u0126") - buf.write("\3\2\2\2U\u0129\3\2\2\2W\u012f\3\2\2\2Y\u0136\3\2\2\2") - buf.write("[\u013b\3\2\2\2]\u016b\3\2\2\2_\u016d\3\2\2\2a\u016f\3") - buf.write("\2\2\2c\u017a\3\2\2\2e\u0185\3\2\2\2g\u0187\3\2\2\2i\u0192") - buf.write("\3\2\2\2k\u01a2\3\2\2\2m\u01ac\3\2\2\2o\u01bc\3\2\2\2") - buf.write("q\u01c0\3\2\2\2s\u01c7\3\2\2\2u\u01c9\3\2\2\2w\u01cb\3") - buf.write("\2\2\2y\u01cd\3\2\2\2{\u01cf\3\2\2\2}\u01d1\3\2\2\2\177") - buf.write("\u01d3\3\2\2\2\u0081\u01d5\3\2\2\2\u0083\u01d7\3\2\2\2") - buf.write("\u0085\u01d9\3\2\2\2\u0087\u01db\3\2\2\2\u0089\u01dd\3") - buf.write("\2\2\2\u008b\u01df\3\2\2\2\u008d\u01e1\3\2\2\2\u008f\u01e3") - buf.write("\3\2\2\2\u0091\u01e5\3\2\2\2\u0093\u01e7\3\2\2\2\u0095") - buf.write("\u01e9\3\2\2\2\u0097\u01eb\3\2\2\2\u0099\u01ed\3\2\2\2") - buf.write("\u009b\u01ef\3\2\2\2\u009d\u01f1\3\2\2\2\u009f\u01f3\3") - buf.write("\2\2\2\u00a1\u01f5\3\2\2\2\u00a3\u01f7\3\2\2\2\u00a5\u01f9") - buf.write("\3\2\2\2\u00a7\u01fb\3\2\2\2\u00a9\u00aa\7<\2\2\u00aa") - buf.write("\4\3\2\2\2\u00ab\u00ac\7A\2\2\u00ac\6\3\2\2\2\u00ad\u00ae") - buf.write("\7(\2\2\u00ae\u00af\7(\2\2\u00af\b\3\2\2\2\u00b0\u00b1") - buf.write("\7?\2\2\u00b1\u00b2\7?\2\2\u00b2\n\3\2\2\2\u00b3\u00b4") - buf.write("\7@\2\2\u00b4\u00b5\7?\2\2\u00b5\f\3\2\2\2\u00b6\u00b7") - buf.write("\7>\2\2\u00b7\u00b8\7?\2\2\u00b8\16\3\2\2\2\u00b9\u00ba") - buf.write("\7#\2\2\u00ba\u00bb\7?\2\2\u00bb\20\3\2\2\2\u00bc\u00bd") - buf.write("\7>\2\2\u00bd\u00be\7@\2\2\u00be\22\3\2\2\2\u00bf\u00c0") - buf.write("\7~\2\2\u00c0\u00c1\7~\2\2\u00c1\24\3\2\2\2\u00c2\u00c3") - buf.write("\7>\2\2\u00c3\u00c4\7>\2\2\u00c4\26\3\2\2\2\u00c5\u00c6") - buf.write("\7@\2\2\u00c6\u00c7\7@\2\2\u00c7\30\3\2\2\2\u00c8\u00c9") - buf.write("\7(\2\2\u00c9\32\3\2\2\2\u00ca\u00cb\7?\2\2\u00cb\34\3") - buf.write("\2\2\2\u00cc\u00cd\7+\2\2\u00cd\36\3\2\2\2\u00ce\u00cf") - buf.write("\7.\2\2\u00cf \3\2\2\2\u00d0\u00d1\7\60\2\2\u00d1\"\3") - buf.write("\2\2\2\u00d2\u00d3\7\61\2\2\u00d3$\3\2\2\2\u00d4\u00d5") - buf.write("\7@\2\2\u00d5&\3\2\2\2\u00d6\u00d7\7>\2\2\u00d7(\3\2\2") - buf.write("\2\u00d8\u00d9\7/\2\2\u00d9*\3\2\2\2\u00da\u00db\7\'\2") - buf.write("\2\u00db,\3\2\2\2\u00dc\u00dd\7*\2\2\u00dd.\3\2\2\2\u00de") - buf.write("\u00df\7~\2\2\u00df\60\3\2\2\2\u00e0\u00e1\7-\2\2\u00e1") - buf.write("\62\3\2\2\2\u00e2\u00e3\7=\2\2\u00e3\64\3\2\2\2\u00e4") - buf.write("\u00e5\7,\2\2\u00e5\66\3\2\2\2\u00e6\u00e7\7\u0080\2\2") - buf.write("\u00e78\3\2\2\2\u00e8\u00e9\7a\2\2\u00e9:\3\2\2\2\u00ea") - buf.write("\u00eb\5u;\2\u00eb\u00ec\5\u008fH\2\u00ec\u00ed\5{>\2") - buf.write("\u00ed<\3\2\2\2\u00ee\u00ef\5u;\2\u00ef\u00f0\5\u0099") - buf.write("M\2\u00f0\u00f1\5y=\2\u00f1>\3\2\2\2\u00f2\u00f3\5w<\2") - buf.write("\u00f3\u00f4\5\u00a5S\2\u00f4@\3\2\2\2\u00f5\u00f6\5{") - buf.write(">\2\u00f6\u00f7\5}?\2\u00f7\u00f8\5\u0099M\2\u00f8\u00f9") - buf.write("\5y=\2\u00f9B\3\2\2\2\u00fa\u00fb\5\177@\2\u00fb\u00fc") - buf.write("\5u;\2\u00fc\u00fd\5\u008bF\2\u00fd\u00fe\5\u0099M\2\u00fe") - buf.write("\u00ff\5}?\2\u00ffD\3\2\2\2\u0100\u0101\5\u0085C\2\u0101") - buf.write("\u0102\5\u0099M\2\u0102F\3\2\2\2\u0103\u0104\5\u0085C") - buf.write("\2\u0104\u0105\5\u0099M\2\u0105\u0106\5\u008fH\2\u0106") - buf.write("\u0107\5\u009dO\2\u0107\u0108\5\u008bF\2\u0108\u0109\5") - buf.write("\u008bF\2\u0109H\3\2\2\2\u010a\u010b\5\u008bF\2\u010b") - buf.write("\u010c\5\u0085C\2\u010c\u010d\5\u0089E\2\u010d\u010e\5") - buf.write("}?\2\u010eJ\3\2\2\2\u010f\u0110\5\u008bF\2\u0110\u0111") - buf.write("\5\u0085C\2\u0111\u0112\5\u008dG\2\u0112\u0113\5\u0085") - buf.write("C\2\u0113\u0114\5\u009bN\2\u0114L\3\2\2\2\u0115\u0116") - buf.write("\5\u008fH\2\u0116\u0117\5\u0091I\2\u0117\u0118\5\u009b") - buf.write("N\2\u0118N\3\2\2\2\u0119\u011a\5\u008fH\2\u011a\u011b") - buf.write("\5\u0091I\2\u011b\u011c\5\u009bN\2\u011c\u011d\5\u008f") - buf.write("H\2\u011d\u011e\5\u009dO\2\u011e\u011f\5\u008bF\2\u011f") - buf.write("\u0120\5\u008bF\2\u0120P\3\2\2\2\u0121\u0122\5\u008fH") - buf.write("\2\u0122\u0123\5\u009dO\2\u0123\u0124\5\u008bF\2\u0124") - buf.write("\u0125\5\u008bF\2\u0125R\3\2\2\2\u0126\u0127\5\u0091I") - buf.write("\2\u0127\u0128\5\u0097L\2\u0128T\3\2\2\2\u0129\u012a\5") - buf.write("\u0091I\2\u012a\u012b\5\u0097L\2\u012b\u012c\5{>\2\u012c") - buf.write("\u012d\5}?\2\u012d\u012e\5\u0097L\2\u012eV\3\2\2\2\u012f") - buf.write("\u0130\5\u0099M\2\u0130\u0131\5}?\2\u0131\u0132\5\u008b") - buf.write("F\2\u0132\u0133\5}?\2\u0133\u0134\5y=\2\u0134\u0135\5") - buf.write("\u009bN\2\u0135X\3\2\2\2\u0136\u0137\5\u009bN\2\u0137") - buf.write("\u0138\5\u0097L\2\u0138\u0139\5\u009dO\2\u0139\u013a\5") - buf.write("}?\2\u013aZ\3\2\2\2\u013b\u013c\5\u00a1Q\2\u013c\u013d") - buf.write("\5\u0083B\2\u013d\u013e\5}?\2\u013e\u013f\5\u0097L\2\u013f") - buf.write("\u0140\5}?\2\u0140\\\3\2\2\2\u0141\u0143\5s:\2\u0142\u0141") - buf.write("\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0142\3\2\2\2\u0144") - buf.write("\u0145\3\2\2\2\u0145\u014d\3\2\2\2\u0146\u014a\7\60\2") - buf.write("\2\u0147\u0149\5s:\2\u0148\u0147\3\2\2\2\u0149\u014c\3") - buf.write("\2\2\2\u014a\u0148\3\2\2\2\u014a\u014b\3\2\2\2\u014b\u014e") - buf.write("\3\2\2\2\u014c\u014a\3\2\2\2\u014d\u0146\3\2\2\2\u014d") - buf.write("\u014e\3\2\2\2\u014e\u0158\3\2\2\2\u014f\u0151\5}?\2\u0150") - buf.write("\u0152\t\2\2\2\u0151\u0150\3\2\2\2\u0151\u0152\3\2\2\2") - buf.write("\u0152\u0154\3\2\2\2\u0153\u0155\5s:\2\u0154\u0153\3\2") - buf.write("\2\2\u0155\u0156\3\2\2\2\u0156\u0154\3\2\2\2\u0156\u0157") - buf.write("\3\2\2\2\u0157\u0159\3\2\2\2\u0158\u014f\3\2\2\2\u0158") - buf.write("\u0159\3\2\2\2\u0159\u016c\3\2\2\2\u015a\u015c\7\60\2") - buf.write("\2\u015b\u015d\5s:\2\u015c\u015b\3\2\2\2\u015d\u015e\3") - buf.write("\2\2\2\u015e\u015c\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0169") - buf.write("\3\2\2\2\u0160\u0162\5}?\2\u0161\u0163\t\2\2\2\u0162\u0161") - buf.write("\3\2\2\2\u0162\u0163\3\2\2\2\u0163\u0165\3\2\2\2\u0164") - buf.write("\u0166\5s:\2\u0165\u0164\3\2\2\2\u0166\u0167\3\2\2\2\u0167") - buf.write("\u0165\3\2\2\2\u0167\u0168\3\2\2\2\u0168\u016a\3\2\2\2") - buf.write("\u0169\u0160\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016c\3") - buf.write("\2\2\2\u016b\u0142\3\2\2\2\u016b\u015a\3\2\2\2\u016c^") - buf.write("\3\2\2\2\u016d\u016e\5a\61\2\u016e`\3\2\2\2\u016f\u0175") - buf.write("\7$\2\2\u0170\u0171\7^\2\2\u0171\u0174\7$\2\2\u0172\u0174") - buf.write("\n\3\2\2\u0173\u0170\3\2\2\2\u0173\u0172\3\2\2\2\u0174") - buf.write("\u0177\3\2\2\2\u0175\u0173\3\2\2\2\u0175\u0176\3\2\2\2") - buf.write("\u0176\u0178\3\2\2\2\u0177\u0175\3\2\2\2\u0178\u0179\7") - buf.write("$\2\2\u0179b\3\2\2\2\u017a\u0180\7$\2\2\u017b\u017c\7") - buf.write("$\2\2\u017c\u017f\7$\2\2\u017d\u017f\n\3\2\2\u017e\u017b") - buf.write("\3\2\2\2\u017e\u017d\3\2\2\2\u017f\u0182\3\2\2\2\u0180") - buf.write("\u017e\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0183\3\2\2\2") - buf.write("\u0182\u0180\3\2\2\2\u0183\u0184\7$\2\2\u0184d\3\2\2\2") - buf.write("\u0185\u0186\5g\64\2\u0186f\3\2\2\2\u0187\u018d\7)\2\2") - buf.write("\u0188\u0189\7^\2\2\u0189\u018c\7)\2\2\u018a\u018c\n\4") - buf.write("\2\2\u018b\u0188\3\2\2\2\u018b\u018a\3\2\2\2\u018c\u018f") - buf.write("\3\2\2\2\u018d\u018b\3\2\2\2\u018d\u018e\3\2\2\2\u018e") - buf.write("\u0190\3\2\2\2\u018f\u018d\3\2\2\2\u0190\u0191\7)\2\2") - buf.write("\u0191h\3\2\2\2\u0192\u0198\7)\2\2\u0193\u0194\7)\2\2") - buf.write("\u0194\u0197\7)\2\2\u0195\u0197\n\4\2\2\u0196\u0193\3") - buf.write("\2\2\2\u0196\u0195\3\2\2\2\u0197\u019a\3\2\2\2\u0198\u0196") - buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\3\2\2\2\u019a") - buf.write("\u0198\3\2\2\2\u019b\u019c\7)\2\2\u019cj\3\2\2\2\u019d") - buf.write("\u019e\7/\2\2\u019e\u01a3\7/\2\2\u019f\u01a0\7\61\2\2") - buf.write("\u01a0\u01a3\7\61\2\2\u01a1\u01a3\7%\2\2\u01a2\u019d\3") - buf.write("\2\2\2\u01a2\u019f\3\2\2\2\u01a2\u01a1\3\2\2\2\u01a3\u01a7") - buf.write("\3\2\2\2\u01a4\u01a6\n\5\2\2\u01a5\u01a4\3\2\2\2\u01a6") - buf.write("\u01a9\3\2\2\2\u01a7\u01a5\3\2\2\2\u01a7\u01a8\3\2\2\2") - buf.write("\u01a8\u01aa\3\2\2\2\u01a9\u01a7\3\2\2\2\u01aa\u01ab\b") - buf.write("\66\2\2\u01abl\3\2\2\2\u01ac\u01ad\7\61\2\2\u01ad\u01ae") - buf.write("\7,\2\2\u01ae\u01b2\3\2\2\2\u01af\u01b1\13\2\2\2\u01b0") - buf.write("\u01af\3\2\2\2\u01b1\u01b4\3\2\2\2\u01b2\u01b3\3\2\2\2") - buf.write("\u01b2\u01b0\3\2\2\2\u01b3\u01b8\3\2\2\2\u01b4\u01b2\3") - buf.write("\2\2\2\u01b5\u01b6\7,\2\2\u01b6\u01b9\7\61\2\2\u01b7\u01b9") - buf.write("\7\2\2\3\u01b8\u01b5\3\2\2\2\u01b8\u01b7\3\2\2\2\u01b9") - buf.write("\u01ba\3\2\2\2\u01ba\u01bb\b\67\2\2\u01bbn\3\2\2\2\u01bc") - buf.write("\u01bd\t\6\2\2\u01bd\u01be\3\2\2\2\u01be\u01bf\b8\2\2") - buf.write("\u01bfp\3\2\2\2\u01c0\u01c4\t\7\2\2\u01c1\u01c3\t\b\2") - buf.write("\2\u01c2\u01c1\3\2\2\2\u01c3\u01c6\3\2\2\2\u01c4\u01c2") - buf.write("\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5r\3\2\2\2\u01c6\u01c4") - buf.write("\3\2\2\2\u01c7\u01c8\t\t\2\2\u01c8t\3\2\2\2\u01c9\u01ca") - buf.write("\t\n\2\2\u01cav\3\2\2\2\u01cb\u01cc\t\13\2\2\u01ccx\3") - buf.write("\2\2\2\u01cd\u01ce\t\f\2\2\u01cez\3\2\2\2\u01cf\u01d0") - buf.write("\t\r\2\2\u01d0|\3\2\2\2\u01d1\u01d2\t\16\2\2\u01d2~\3") - buf.write("\2\2\2\u01d3\u01d4\t\17\2\2\u01d4\u0080\3\2\2\2\u01d5") - buf.write("\u01d6\t\20\2\2\u01d6\u0082\3\2\2\2\u01d7\u01d8\t\21\2") - buf.write("\2\u01d8\u0084\3\2\2\2\u01d9\u01da\t\22\2\2\u01da\u0086") - buf.write("\3\2\2\2\u01db\u01dc\t\23\2\2\u01dc\u0088\3\2\2\2\u01dd") - buf.write("\u01de\t\24\2\2\u01de\u008a\3\2\2\2\u01df\u01e0\t\25\2") - buf.write("\2\u01e0\u008c\3\2\2\2\u01e1\u01e2\t\26\2\2\u01e2\u008e") - buf.write("\3\2\2\2\u01e3\u01e4\t\27\2\2\u01e4\u0090\3\2\2\2\u01e5") - buf.write("\u01e6\t\30\2\2\u01e6\u0092\3\2\2\2\u01e7\u01e8\t\31\2") - buf.write("\2\u01e8\u0094\3\2\2\2\u01e9\u01ea\t\32\2\2\u01ea\u0096") - buf.write("\3\2\2\2\u01eb\u01ec\t\33\2\2\u01ec\u0098\3\2\2\2\u01ed") - buf.write("\u01ee\t\34\2\2\u01ee\u009a\3\2\2\2\u01ef\u01f0\t\35\2") - buf.write("\2\u01f0\u009c\3\2\2\2\u01f1\u01f2\t\36\2\2\u01f2\u009e") - buf.write("\3\2\2\2\u01f3\u01f4\t\37\2\2\u01f4\u00a0\3\2\2\2\u01f5") - buf.write("\u01f6\t \2\2\u01f6\u00a2\3\2\2\2\u01f7\u01f8\t!\2\2\u01f8") - buf.write("\u00a4\3\2\2\2\u01f9\u01fa\t\"\2\2\u01fa\u00a6\3\2\2\2") - buf.write("\u01fb\u01fc\t#\2\2\u01fc\u00a8\3\2\2\2\33\2\u0144\u014a") - buf.write("\u014d\u0151\u0156\u0158\u015e\u0162\u0167\u0169\u016b") - buf.write("\u0173\u0175\u017e\u0180\u018b\u018d\u0196\u0198\u01a2") - buf.write("\u01a7\u01b2\u01b8\u01c4\3\2\3\2") + buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t") + buf.write("U\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3\5\3\6") + buf.write("\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3\t\3\n\3\n\3") + buf.write("\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20") + buf.write("\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25") + buf.write("\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32\3\33") + buf.write("\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36\3\37\3\37") + buf.write("\3\37\3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3#\3#\3") + buf.write("#\3#\3#\3#\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3") + buf.write("&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3)\3)\3") + buf.write(")\3)\3)\3*\3*\3*\3*\3*\3+\3+\3+\3,\3,\3,\3,\3,\3,\3-\3") + buf.write("-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3\60") + buf.write("\6\60\u0148\n\60\r\60\16\60\u0149\3\60\3\60\7\60\u014e") + buf.write("\n\60\f\60\16\60\u0151\13\60\5\60\u0153\n\60\3\60\3\60") + buf.write("\5\60\u0157\n\60\3\60\6\60\u015a\n\60\r\60\16\60\u015b") + buf.write("\5\60\u015e\n\60\3\60\3\60\6\60\u0162\n\60\r\60\16\60") + buf.write("\u0163\3\60\3\60\5\60\u0168\n\60\3\60\6\60\u016b\n\60") + buf.write("\r\60\16\60\u016c\5\60\u016f\n\60\5\60\u0171\n\60\3\61") + buf.write("\3\61\3\62\3\62\3\62\3\62\7\62\u0179\n\62\f\62\16\62\u017c") + buf.write("\13\62\3\62\3\62\3\63\3\63\3\63\3\63\7\63\u0184\n\63\f") + buf.write("\63\16\63\u0187\13\63\3\63\3\63\3\64\3\64\3\65\3\65\3") + buf.write("\65\3\65\7\65\u0191\n\65\f\65\16\65\u0194\13\65\3\65\3") + buf.write("\65\3\66\3\66\3\66\3\66\7\66\u019c\n\66\f\66\16\66\u019f") + buf.write("\13\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\5\67\u01a8\n") + buf.write("\67\3\67\7\67\u01ab\n\67\f\67\16\67\u01ae\13\67\3\67\3") + buf.write("\67\38\38\38\38\78\u01b6\n8\f8\168\u01b9\138\38\38\38") + buf.write("\58\u01be\n8\38\38\39\39\39\39\3:\3:\7:\u01c8\n:\f:\16") + buf.write(":\u01cb\13:\3;\3;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A") + buf.write("\3B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3") + buf.write("J\3K\3K\3L\3L\3M\3M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3") + buf.write("S\3T\3T\3U\3U\3\u01b7\2V\3\3\5\4\7\5\t\6\13\7\r\b\17\t") + buf.write("\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23") + buf.write("%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36") + buf.write(";\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63") + buf.write("e\64g\65i\66k\67m8o9q:s;u\2w\2y\2{\2}\2\177\2\u0081\2") + buf.write("\u0083\2\u0085\2\u0087\2\u0089\2\u008b\2\u008d\2\u008f") + buf.write("\2\u0091\2\u0093\2\u0095\2\u0097\2\u0099\2\u009b\2\u009d") + buf.write("\2\u009f\2\u00a1\2\u00a3\2\u00a5\2\u00a7\2\u00a9\2\3\2") + buf.write("$\4\2--//\3\2$$\3\2))\4\2\f\f\17\17\5\2\13\r\17\17\"\"") + buf.write("\5\2C\\aac|\6\2\62;C\\aac|\3\2\62;\4\2CCcc\4\2DDdd\4\2") + buf.write("EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4\2JJjj\4\2KKkk\4") + buf.write("\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPpp\4\2QQqq\4\2RRr") + buf.write("r\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2WWww\4\2XXxx\4\2") + buf.write("YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u01ff\2\3\3\2\2\2\2\5") + buf.write("\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2") + buf.write("\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2") + buf.write("\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2") + buf.write("\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2") + buf.write("\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61") + buf.write("\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2") + buf.write("\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3") + buf.write("\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M") + buf.write("\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2") + buf.write("W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2") + buf.write("\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2") + buf.write("\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2") + buf.write("\2\2\3\u00ab\3\2\2\2\5\u00ae\3\2\2\2\7\u00b1\3\2\2\2\t") + buf.write("\u00b4\3\2\2\2\13\u00b7\3\2\2\2\r\u00ba\3\2\2\2\17\u00bd") + buf.write("\3\2\2\2\21\u00c0\3\2\2\2\23\u00c3\3\2\2\2\25\u00c6\3") + buf.write("\2\2\2\27\u00c8\3\2\2\2\31\u00ca\3\2\2\2\33\u00cc\3\2") + buf.write("\2\2\35\u00ce\3\2\2\2\37\u00d0\3\2\2\2!\u00d2\3\2\2\2") + buf.write("#\u00d4\3\2\2\2%\u00d6\3\2\2\2\'\u00d8\3\2\2\2)\u00da") + buf.write("\3\2\2\2+\u00dc\3\2\2\2-\u00de\3\2\2\2/\u00e0\3\2\2\2") + buf.write("\61\u00e2\3\2\2\2\63\u00e4\3\2\2\2\65\u00e6\3\2\2\2\67") + buf.write("\u00e8\3\2\2\29\u00ea\3\2\2\2;\u00ec\3\2\2\2=\u00f0\3") + buf.write("\2\2\2?\u00f3\3\2\2\2A\u00f7\3\2\2\2C\u00fa\3\2\2\2E\u00ff") + buf.write("\3\2\2\2G\u0105\3\2\2\2I\u0108\3\2\2\2K\u010f\3\2\2\2") + buf.write("M\u0114\3\2\2\2O\u011a\3\2\2\2Q\u011e\3\2\2\2S\u0126\3") + buf.write("\2\2\2U\u012b\3\2\2\2W\u012e\3\2\2\2Y\u0134\3\2\2\2[\u013b") + buf.write("\3\2\2\2]\u0140\3\2\2\2_\u0170\3\2\2\2a\u0172\3\2\2\2") + buf.write("c\u0174\3\2\2\2e\u017f\3\2\2\2g\u018a\3\2\2\2i\u018c\3") + buf.write("\2\2\2k\u0197\3\2\2\2m\u01a7\3\2\2\2o\u01b1\3\2\2\2q\u01c1") + buf.write("\3\2\2\2s\u01c5\3\2\2\2u\u01cc\3\2\2\2w\u01ce\3\2\2\2") + buf.write("y\u01d0\3\2\2\2{\u01d2\3\2\2\2}\u01d4\3\2\2\2\177\u01d6") + buf.write("\3\2\2\2\u0081\u01d8\3\2\2\2\u0083\u01da\3\2\2\2\u0085") + buf.write("\u01dc\3\2\2\2\u0087\u01de\3\2\2\2\u0089\u01e0\3\2\2\2") + buf.write("\u008b\u01e2\3\2\2\2\u008d\u01e4\3\2\2\2\u008f\u01e6\3") + buf.write("\2\2\2\u0091\u01e8\3\2\2\2\u0093\u01ea\3\2\2\2\u0095\u01ec") + buf.write("\3\2\2\2\u0097\u01ee\3\2\2\2\u0099\u01f0\3\2\2\2\u009b") + buf.write("\u01f2\3\2\2\2\u009d\u01f4\3\2\2\2\u009f\u01f6\3\2\2\2") + buf.write("\u00a1\u01f8\3\2\2\2\u00a3\u01fa\3\2\2\2\u00a5\u01fc\3") + buf.write("\2\2\2\u00a7\u01fe\3\2\2\2\u00a9\u0200\3\2\2\2\u00ab\u00ac") + buf.write("\7(\2\2\u00ac\u00ad\7(\2\2\u00ad\4\3\2\2\2\u00ae\u00af") + buf.write("\7?\2\2\u00af\u00b0\7?\2\2\u00b0\6\3\2\2\2\u00b1\u00b2") + buf.write("\7@\2\2\u00b2\u00b3\7?\2\2\u00b3\b\3\2\2\2\u00b4\u00b5") + buf.write("\7>\2\2\u00b5\u00b6\7?\2\2\u00b6\n\3\2\2\2\u00b7\u00b8") + buf.write("\7#\2\2\u00b8\u00b9\7?\2\2\u00b9\f\3\2\2\2\u00ba\u00bb") + buf.write("\7>\2\2\u00bb\u00bc\7@\2\2\u00bc\16\3\2\2\2\u00bd\u00be") + buf.write("\7~\2\2\u00be\u00bf\7~\2\2\u00bf\20\3\2\2\2\u00c0\u00c1") + buf.write("\7>\2\2\u00c1\u00c2\7>\2\2\u00c2\22\3\2\2\2\u00c3\u00c4") + buf.write("\7@\2\2\u00c4\u00c5\7@\2\2\u00c5\24\3\2\2\2\u00c6\u00c7") + buf.write("\7(\2\2\u00c7\26\3\2\2\2\u00c8\u00c9\7?\2\2\u00c9\30\3") + buf.write("\2\2\2\u00ca\u00cb\7+\2\2\u00cb\32\3\2\2\2\u00cc\u00cd") + buf.write("\7<\2\2\u00cd\34\3\2\2\2\u00ce\u00cf\7.\2\2\u00cf\36\3") + buf.write("\2\2\2\u00d0\u00d1\7\60\2\2\u00d1 \3\2\2\2\u00d2\u00d3") + buf.write("\7\61\2\2\u00d3\"\3\2\2\2\u00d4\u00d5\7@\2\2\u00d5$\3") + buf.write("\2\2\2\u00d6\u00d7\7>\2\2\u00d7&\3\2\2\2\u00d8\u00d9\7") + buf.write("/\2\2\u00d9(\3\2\2\2\u00da\u00db\7\'\2\2\u00db*\3\2\2") + buf.write("\2\u00dc\u00dd\7*\2\2\u00dd,\3\2\2\2\u00de\u00df\7~\2") + buf.write("\2\u00df.\3\2\2\2\u00e0\u00e1\7-\2\2\u00e1\60\3\2\2\2") + buf.write("\u00e2\u00e3\7A\2\2\u00e3\62\3\2\2\2\u00e4\u00e5\7=\2") + buf.write("\2\u00e5\64\3\2\2\2\u00e6\u00e7\7,\2\2\u00e7\66\3\2\2") + buf.write("\2\u00e8\u00e9\7\u0080\2\2\u00e98\3\2\2\2\u00ea\u00eb") + buf.write("\7a\2\2\u00eb:\3\2\2\2\u00ec\u00ed\5w<\2\u00ed\u00ee\5") + buf.write("\u0091I\2\u00ee\u00ef\5}?\2\u00ef<\3\2\2\2\u00f0\u00f1") + buf.write("\5w<\2\u00f1\u00f2\5\u009bN\2\u00f2>\3\2\2\2\u00f3\u00f4") + buf.write("\5w<\2\u00f4\u00f5\5\u009bN\2\u00f5\u00f6\5{>\2\u00f6") + buf.write("@\3\2\2\2\u00f7\u00f8\5y=\2\u00f8\u00f9\5\u00a7T\2\u00f9") + buf.write("B\3\2\2\2\u00fa\u00fb\5}?\2\u00fb\u00fc\5\177@\2\u00fc") + buf.write("\u00fd\5\u009bN\2\u00fd\u00fe\5{>\2\u00feD\3\2\2\2\u00ff") + buf.write("\u0100\5\u0081A\2\u0100\u0101\5w<\2\u0101\u0102\5\u008d") + buf.write("G\2\u0102\u0103\5\u009bN\2\u0103\u0104\5\177@\2\u0104") + buf.write("F\3\2\2\2\u0105\u0106\5\u0087D\2\u0106\u0107\5\u009bN") + buf.write("\2\u0107H\3\2\2\2\u0108\u0109\5\u0087D\2\u0109\u010a\5") + buf.write("\u009bN\2\u010a\u010b\5\u0091I\2\u010b\u010c\5\u009fP") + buf.write("\2\u010c\u010d\5\u008dG\2\u010d\u010e\5\u008dG\2\u010e") + buf.write("J\3\2\2\2\u010f\u0110\5\u008dG\2\u0110\u0111\5\u0087D") + buf.write("\2\u0111\u0112\5\u008bF\2\u0112\u0113\5\177@\2\u0113L") + buf.write("\3\2\2\2\u0114\u0115\5\u008dG\2\u0115\u0116\5\u0087D\2") + buf.write("\u0116\u0117\5\u008fH\2\u0117\u0118\5\u0087D\2\u0118\u0119") + buf.write("\5\u009dO\2\u0119N\3\2\2\2\u011a\u011b\5\u0091I\2\u011b") + buf.write("\u011c\5\u0093J\2\u011c\u011d\5\u009dO\2\u011dP\3\2\2") + buf.write("\2\u011e\u011f\5\u0091I\2\u011f\u0120\5\u0093J\2\u0120") + buf.write("\u0121\5\u009dO\2\u0121\u0122\5\u0091I\2\u0122\u0123\5") + buf.write("\u009fP\2\u0123\u0124\5\u008dG\2\u0124\u0125\5\u008dG") + buf.write("\2\u0125R\3\2\2\2\u0126\u0127\5\u0091I\2\u0127\u0128\5") + buf.write("\u009fP\2\u0128\u0129\5\u008dG\2\u0129\u012a\5\u008dG") + buf.write("\2\u012aT\3\2\2\2\u012b\u012c\5\u0093J\2\u012c\u012d\5") + buf.write("\u0099M\2\u012dV\3\2\2\2\u012e\u012f\5\u0093J\2\u012f") + buf.write("\u0130\5\u0099M\2\u0130\u0131\5}?\2\u0131\u0132\5\177") + buf.write("@\2\u0132\u0133\5\u0099M\2\u0133X\3\2\2\2\u0134\u0135") + buf.write("\5\u009bN\2\u0135\u0136\5\177@\2\u0136\u0137\5\u008dG") + buf.write("\2\u0137\u0138\5\177@\2\u0138\u0139\5{>\2\u0139\u013a") + buf.write("\5\u009dO\2\u013aZ\3\2\2\2\u013b\u013c\5\u009dO\2\u013c") + buf.write("\u013d\5\u0099M\2\u013d\u013e\5\u009fP\2\u013e\u013f\5") + buf.write("\177@\2\u013f\\\3\2\2\2\u0140\u0141\5\u00a3R\2\u0141\u0142") + buf.write("\5\u0085C\2\u0142\u0143\5\177@\2\u0143\u0144\5\u0099M") + buf.write("\2\u0144\u0145\5\177@\2\u0145^\3\2\2\2\u0146\u0148\5u") + buf.write(";\2\u0147\u0146\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u0147") + buf.write("\3\2\2\2\u0149\u014a\3\2\2\2\u014a\u0152\3\2\2\2\u014b") + buf.write("\u014f\7\60\2\2\u014c\u014e\5u;\2\u014d\u014c\3\2\2\2") + buf.write("\u014e\u0151\3\2\2\2\u014f\u014d\3\2\2\2\u014f\u0150\3") + buf.write("\2\2\2\u0150\u0153\3\2\2\2\u0151\u014f\3\2\2\2\u0152\u014b") + buf.write("\3\2\2\2\u0152\u0153\3\2\2\2\u0153\u015d\3\2\2\2\u0154") + buf.write("\u0156\5\177@\2\u0155\u0157\t\2\2\2\u0156\u0155\3\2\2") + buf.write("\2\u0156\u0157\3\2\2\2\u0157\u0159\3\2\2\2\u0158\u015a") + buf.write("\5u;\2\u0159\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u0159") + buf.write("\3\2\2\2\u015b\u015c\3\2\2\2\u015c\u015e\3\2\2\2\u015d") + buf.write("\u0154\3\2\2\2\u015d\u015e\3\2\2\2\u015e\u0171\3\2\2\2") + buf.write("\u015f\u0161\7\60\2\2\u0160\u0162\5u;\2\u0161\u0160\3") + buf.write("\2\2\2\u0162\u0163\3\2\2\2\u0163\u0161\3\2\2\2\u0163\u0164") + buf.write("\3\2\2\2\u0164\u016e\3\2\2\2\u0165\u0167\5\177@\2\u0166") + buf.write("\u0168\t\2\2\2\u0167\u0166\3\2\2\2\u0167\u0168\3\2\2\2") + buf.write("\u0168\u016a\3\2\2\2\u0169\u016b\5u;\2\u016a\u0169\3\2") + buf.write("\2\2\u016b\u016c\3\2\2\2\u016c\u016a\3\2\2\2\u016c\u016d") + buf.write("\3\2\2\2\u016d\u016f\3\2\2\2\u016e\u0165\3\2\2\2\u016e") + buf.write("\u016f\3\2\2\2\u016f\u0171\3\2\2\2\u0170\u0147\3\2\2\2") + buf.write("\u0170\u015f\3\2\2\2\u0171`\3\2\2\2\u0172\u0173\5c\62") + buf.write("\2\u0173b\3\2\2\2\u0174\u017a\7$\2\2\u0175\u0176\7^\2") + buf.write("\2\u0176\u0179\7$\2\2\u0177\u0179\n\3\2\2\u0178\u0175") + buf.write("\3\2\2\2\u0178\u0177\3\2\2\2\u0179\u017c\3\2\2\2\u017a") + buf.write("\u0178\3\2\2\2\u017a\u017b\3\2\2\2\u017b\u017d\3\2\2\2") + buf.write("\u017c\u017a\3\2\2\2\u017d\u017e\7$\2\2\u017ed\3\2\2\2") + buf.write("\u017f\u0185\7$\2\2\u0180\u0181\7$\2\2\u0181\u0184\7$") + buf.write("\2\2\u0182\u0184\n\3\2\2\u0183\u0180\3\2\2\2\u0183\u0182") + buf.write("\3\2\2\2\u0184\u0187\3\2\2\2\u0185\u0183\3\2\2\2\u0185") + buf.write("\u0186\3\2\2\2\u0186\u0188\3\2\2\2\u0187\u0185\3\2\2\2") + buf.write("\u0188\u0189\7$\2\2\u0189f\3\2\2\2\u018a\u018b\5i\65\2") + buf.write("\u018bh\3\2\2\2\u018c\u0192\7)\2\2\u018d\u018e\7^\2\2") + buf.write("\u018e\u0191\7)\2\2\u018f\u0191\n\4\2\2\u0190\u018d\3") + buf.write("\2\2\2\u0190\u018f\3\2\2\2\u0191\u0194\3\2\2\2\u0192\u0190") + buf.write("\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0195\3\2\2\2\u0194") + buf.write("\u0192\3\2\2\2\u0195\u0196\7)\2\2\u0196j\3\2\2\2\u0197") + buf.write("\u019d\7)\2\2\u0198\u0199\7)\2\2\u0199\u019c\7)\2\2\u019a") + buf.write("\u019c\n\4\2\2\u019b\u0198\3\2\2\2\u019b\u019a\3\2\2\2") + buf.write("\u019c\u019f\3\2\2\2\u019d\u019b\3\2\2\2\u019d\u019e\3") + buf.write("\2\2\2\u019e\u01a0\3\2\2\2\u019f\u019d\3\2\2\2\u01a0\u01a1") + buf.write("\7)\2\2\u01a1l\3\2\2\2\u01a2\u01a3\7/\2\2\u01a3\u01a8") + buf.write("\7/\2\2\u01a4\u01a5\7\61\2\2\u01a5\u01a8\7\61\2\2\u01a6") + buf.write("\u01a8\7%\2\2\u01a7\u01a2\3\2\2\2\u01a7\u01a4\3\2\2\2") + buf.write("\u01a7\u01a6\3\2\2\2\u01a8\u01ac\3\2\2\2\u01a9\u01ab\n") + buf.write("\5\2\2\u01aa\u01a9\3\2\2\2\u01ab\u01ae\3\2\2\2\u01ac\u01aa") + buf.write("\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01af\3\2\2\2\u01ae") + buf.write("\u01ac\3\2\2\2\u01af\u01b0\b\67\2\2\u01b0n\3\2\2\2\u01b1") + buf.write("\u01b2\7\61\2\2\u01b2\u01b3\7,\2\2\u01b3\u01b7\3\2\2\2") + buf.write("\u01b4\u01b6\13\2\2\2\u01b5\u01b4\3\2\2\2\u01b6\u01b9") + buf.write("\3\2\2\2\u01b7\u01b8\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8") + buf.write("\u01bd\3\2\2\2\u01b9\u01b7\3\2\2\2\u01ba\u01bb\7,\2\2") + buf.write("\u01bb\u01be\7\61\2\2\u01bc\u01be\7\2\2\3\u01bd\u01ba") + buf.write("\3\2\2\2\u01bd\u01bc\3\2\2\2\u01be\u01bf\3\2\2\2\u01bf") + buf.write("\u01c0\b8\2\2\u01c0p\3\2\2\2\u01c1\u01c2\t\6\2\2\u01c2") + buf.write("\u01c3\3\2\2\2\u01c3\u01c4\b9\2\2\u01c4r\3\2\2\2\u01c5") + buf.write("\u01c9\t\7\2\2\u01c6\u01c8\t\b\2\2\u01c7\u01c6\3\2\2\2") + buf.write("\u01c8\u01cb\3\2\2\2\u01c9\u01c7\3\2\2\2\u01c9\u01ca\3") + buf.write("\2\2\2\u01cat\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cc\u01cd") + buf.write("\t\t\2\2\u01cdv\3\2\2\2\u01ce\u01cf\t\n\2\2\u01cfx\3\2") + buf.write("\2\2\u01d0\u01d1\t\13\2\2\u01d1z\3\2\2\2\u01d2\u01d3\t") + buf.write("\f\2\2\u01d3|\3\2\2\2\u01d4\u01d5\t\r\2\2\u01d5~\3\2\2") + buf.write("\2\u01d6\u01d7\t\16\2\2\u01d7\u0080\3\2\2\2\u01d8\u01d9") + buf.write("\t\17\2\2\u01d9\u0082\3\2\2\2\u01da\u01db\t\20\2\2\u01db") + buf.write("\u0084\3\2\2\2\u01dc\u01dd\t\21\2\2\u01dd\u0086\3\2\2") + buf.write("\2\u01de\u01df\t\22\2\2\u01df\u0088\3\2\2\2\u01e0\u01e1") + buf.write("\t\23\2\2\u01e1\u008a\3\2\2\2\u01e2\u01e3\t\24\2\2\u01e3") + buf.write("\u008c\3\2\2\2\u01e4\u01e5\t\25\2\2\u01e5\u008e\3\2\2") + buf.write("\2\u01e6\u01e7\t\26\2\2\u01e7\u0090\3\2\2\2\u01e8\u01e9") + buf.write("\t\27\2\2\u01e9\u0092\3\2\2\2\u01ea\u01eb\t\30\2\2\u01eb") + buf.write("\u0094\3\2\2\2\u01ec\u01ed\t\31\2\2\u01ed\u0096\3\2\2") + buf.write("\2\u01ee\u01ef\t\32\2\2\u01ef\u0098\3\2\2\2\u01f0\u01f1") + buf.write("\t\33\2\2\u01f1\u009a\3\2\2\2\u01f2\u01f3\t\34\2\2\u01f3") + buf.write("\u009c\3\2\2\2\u01f4\u01f5\t\35\2\2\u01f5\u009e\3\2\2") + buf.write("\2\u01f6\u01f7\t\36\2\2\u01f7\u00a0\3\2\2\2\u01f8\u01f9") + buf.write("\t\37\2\2\u01f9\u00a2\3\2\2\2\u01fa\u01fb\t \2\2\u01fb") + buf.write("\u00a4\3\2\2\2\u01fc\u01fd\t!\2\2\u01fd\u00a6\3\2\2\2") + buf.write("\u01fe\u01ff\t\"\2\2\u01ff\u00a8\3\2\2\2\u0200\u0201\t") + buf.write("#\2\2\u0201\u00aa\3\2\2\2\33\2\u0149\u014f\u0152\u0156") + buf.write("\u015b\u015d\u0163\u0167\u016c\u016e\u0170\u0178\u017a") + buf.write("\u0183\u0185\u0190\u0192\u019b\u019d\u01a7\u01ac\u01b7") + buf.write("\u01bd\u01c9\3\2\3\2") return buf.getvalue() @@ -242,95 +247,95 @@ class PqlLexer(Lexer): decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] - TAXON_TAG_DELIMITER = 1 - TAXON_OPTIONAL_OPERATOR = 2 - AND = 3 - EQ = 4 - GT_EQ = 5 - LT_EQ = 6 - NOT_EQ1 = 7 - NOT_EQ2 = 8 - OR = 9 - SHIFT_LEFT = 10 - SHIFT_RIGHT = 11 - AMP = 12 - ASSIGN = 13 - CLOSE_PAREN = 14 - COMMA = 15 - DOT = 16 - FORWARD_SLASH = 17 - GT = 18 - LT = 19 - MINUS = 20 - MOD = 21 - OPEN_PAREN = 22 - PIPE = 23 - PLUS = 24 + AND = 1 + EQ = 2 + GT_EQ = 3 + LT_EQ = 4 + NOT_EQ1 = 5 + NOT_EQ2 = 6 + OR = 7 + SHIFT_LEFT = 8 + SHIFT_RIGHT = 9 + AMP = 10 + ASSIGN = 11 + CLOSE_PAREN = 12 + COLON = 13 + COMMA = 14 + DOT = 15 + FORWARD_SLASH = 16 + GT = 17 + LT = 18 + MINUS = 19 + MOD = 20 + OPEN_PAREN = 21 + PIPE = 22 + PLUS = 23 + QUESTION_MARK = 24 SCOL = 25 STAR = 26 TILDE = 27 UNDER = 28 K_AND = 29 - K_ASC = 30 - K_BY = 31 - K_DESC = 32 - K_FALSE = 33 - K_IS = 34 - K_ISNULL = 35 - K_LIKE = 36 - K_LIMIT = 37 - K_NOT = 38 - K_NOTNULL = 39 - K_NULL = 40 - K_OR = 41 - K_ORDER = 42 - K_SELECT = 43 - K_TRUE = 44 - K_WHERE = 45 - NUMERIC_LITERAL = 46 - DOUBLE_QUOTED_STRING = 47 - DOUBLE_QUOTED_STRING_TEL = 48 - DOUBLE_QUOTED_STRING_SQL = 49 - SINGLE_QUOTED_STRING = 50 - SINGLE_QUOTED_STRING_TEL = 51 - SINGLE_QUOTED_STRING_SQL = 52 - SINGLE_LINE_COMMENT = 53 - MULTILINE_COMMENT = 54 - SPACES = 55 - WORD = 56 + K_AS = 30 + K_ASC = 31 + K_BY = 32 + K_DESC = 33 + K_FALSE = 34 + K_IS = 35 + K_ISNULL = 36 + K_LIKE = 37 + K_LIMIT = 38 + K_NOT = 39 + K_NOTNULL = 40 + K_NULL = 41 + K_OR = 42 + K_ORDER = 43 + K_SELECT = 44 + K_TRUE = 45 + K_WHERE = 46 + NUMERIC_LITERAL = 47 + DOUBLE_QUOTED_STRING = 48 + DOUBLE_QUOTED_STRING_TEL = 49 + DOUBLE_QUOTED_STRING_SQL = 50 + SINGLE_QUOTED_STRING = 51 + SINGLE_QUOTED_STRING_TEL = 52 + SINGLE_QUOTED_STRING_SQL = 53 + SINGLE_LINE_COMMENT = 54 + MULTILINE_COMMENT = 55 + SPACES = 56 + WORD = 57 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] modeNames = [ "DEFAULT_MODE" ] literalNames = [ "", - "':'", "'?'", "'&&'", "'=='", "'>='", "'<='", "'!='", "'<>'", - "'||'", "'<<'", "'>>'", "'&'", "'='", "')'", "','", "'.'", "'/'", - "'>'", "'<'", "'-'", "'%'", "'('", "'|'", "'+'", "';'", "'*'", + "'&&'", "'=='", "'>='", "'<='", "'!='", "'<>'", "'||'", "'<<'", + "'>>'", "'&'", "'='", "')'", "':'", "','", "'.'", "'/'", "'>'", + "'<'", "'-'", "'%'", "'('", "'|'", "'+'", "'?'", "';'", "'*'", "'~'", "'_'" ] symbolicNames = [ "", - "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", "AND", "EQ", - "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", - "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", "DOT", - "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", - "PLUS", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", - "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", - "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", "K_SELECT", - "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", - "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", - "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", - "MULTILINE_COMMENT", "SPACES", "WORD" ] + "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", + "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", + "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", + "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", + "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_IS", + "K_ISNULL", "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", + "K_OR", "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD" ] - ruleNames = [ "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", "AND", - "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", - "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", - "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", - "PIPE", "PLUS", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", - "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", - "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", - "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", - "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", + "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", + "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", + "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", + "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_AS", "K_ASC", + "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", + "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", + "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD", "DIGIT", "A", "B", "C", "D", "E", "F", "G", "H", diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index a5c7d16..c68919c 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -11,77 +11,89 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3:") - buf.write("\u00b2\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3;") + buf.write("\u00c8\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") - buf.write("\t\16\4\17\t\17\3\2\3\2\3\2\3\3\7\3#\n\3\f\3\16\3&\13") - buf.write("\3\3\3\3\3\3\4\7\4+\n\4\f\4\16\4.\13\4\3\4\3\4\6\4\62") - buf.write("\n\4\r\4\16\4\63\3\4\7\4\67\n\4\f\4\16\4:\13\4\3\4\7\4") - buf.write("=\n\4\f\4\16\4@\13\4\3\5\3\5\3\6\3\6\3\6\5\6G\n\6\3\6") - buf.write("\5\6J\n\6\3\6\5\6M\n\6\3\7\3\7\3\7\7\7R\n\7\f\7\16\7U") - buf.write("\13\7\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\7\t_\n\t\f\t\16") - buf.write("\tb\13\t\3\n\3\n\5\nf\n\n\3\13\3\13\3\13\3\f\3\f\3\f\3") - buf.write("\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\7\fx\n\f\f\f\16") - buf.write("\f{\13\f\5\f}\n\f\3\f\3\f\3\f\5\f\u0082\n\f\3\f\3\f\3") - buf.write("\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f") - buf.write("\3\f\3\f\7\f\u0096\n\f\f\f\16\f\u0099\13\f\3\r\5\r\u009c") - buf.write("\n\r\3\r\3\r\3\r\5\r\u00a1\n\r\3\r\3\r\3\r\5\r\u00a6\n") - buf.write("\r\3\16\3\16\3\16\7\16\u00ab\n\16\f\16\16\16\u00ae\13") - buf.write("\16\3\17\3\17\3\17\2\3\26\20\2\4\6\b\n\f\16\20\22\24\26") - buf.write("\30\32\34\2\13\4\2 \"\"\5\2\26\26\32\32((\5\2\23\23\27") - buf.write("\27\34\34\4\2\26\26\32\32\4\2\7\b\24\25\6\2\6\6\t\n\17") - buf.write("\17$$\4\2\5\5\37\37\4\2\13\13++\7\2##**..\60\61\64\64") - buf.write("\2\u00be\2\36\3\2\2\2\4$\3\2\2\2\6,\3\2\2\2\bA\3\2\2\2") - buf.write("\nC\3\2\2\2\fN\3\2\2\2\16V\3\2\2\2\20Y\3\2\2\2\22c\3\2") - buf.write("\2\2\24g\3\2\2\2\26\u0081\3\2\2\2\30\u009b\3\2\2\2\32") - buf.write("\u00a7\3\2\2\2\34\u00af\3\2\2\2\36\37\5\26\f\2\37 \7\2") - buf.write("\2\3 \3\3\2\2\2!#\5\6\4\2\"!\3\2\2\2#&\3\2\2\2$\"\3\2") - buf.write("\2\2$%\3\2\2\2%\'\3\2\2\2&$\3\2\2\2\'(\7\2\2\3(\5\3\2") - buf.write("\2\2)+\7\33\2\2*)\3\2\2\2+.\3\2\2\2,*\3\2\2\2,-\3\2\2") - buf.write("\2-/\3\2\2\2.,\3\2\2\2/8\5\b\5\2\60\62\7\33\2\2\61\60") - buf.write("\3\2\2\2\62\63\3\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64") - buf.write("\65\3\2\2\2\65\67\5\b\5\2\66\61\3\2\2\2\67:\3\2\2\28\66") - buf.write("\3\2\2\289\3\2\2\29>\3\2\2\2:8\3\2\2\2;=\7\33\2\2<;\3") - buf.write("\2\2\2=@\3\2\2\2><\3\2\2\2>?\3\2\2\2?\7\3\2\2\2@>\3\2") - buf.write("\2\2AB\5\n\6\2B\t\3\2\2\2CD\7-\2\2DF\5\f\7\2EG\5\16\b") - buf.write("\2FE\3\2\2\2FG\3\2\2\2GI\3\2\2\2HJ\5\20\t\2IH\3\2\2\2") - buf.write("IJ\3\2\2\2JL\3\2\2\2KM\5\24\13\2LK\3\2\2\2LM\3\2\2\2M") - buf.write("\13\3\2\2\2NS\5\26\f\2OP\7\21\2\2PR\5\26\f\2QO\3\2\2\2") - buf.write("RU\3\2\2\2SQ\3\2\2\2ST\3\2\2\2T\r\3\2\2\2US\3\2\2\2VW") - buf.write("\7/\2\2WX\5\26\f\2X\17\3\2\2\2YZ\7,\2\2Z[\7!\2\2[`\5\22") - buf.write("\n\2\\]\7\21\2\2]_\5\22\n\2^\\\3\2\2\2_b\3\2\2\2`^\3\2") - buf.write("\2\2`a\3\2\2\2a\21\3\2\2\2b`\3\2\2\2ce\5\26\f\2df\t\2") - buf.write("\2\2ed\3\2\2\2ef\3\2\2\2f\23\3\2\2\2gh\7\'\2\2hi\5\26") - buf.write("\f\2i\25\3\2\2\2jk\b\f\1\2kl\t\3\2\2l\u0082\5\26\f\rm") - buf.write("n\7\30\2\2no\5\26\f\2op\7\20\2\2p\u0082\3\2\2\2q\u0082") - buf.write("\5\34\17\2rs\5\32\16\2s|\7\30\2\2ty\5\26\f\2uv\7\21\2") - buf.write("\2vx\5\26\f\2wu\3\2\2\2x{\3\2\2\2yw\3\2\2\2yz\3\2\2\2") - buf.write("z}\3\2\2\2{y\3\2\2\2|t\3\2\2\2|}\3\2\2\2}~\3\2\2\2~\177") - buf.write("\7\20\2\2\177\u0082\3\2\2\2\u0080\u0082\5\30\r\2\u0081") - buf.write("j\3\2\2\2\u0081m\3\2\2\2\u0081q\3\2\2\2\u0081r\3\2\2\2") - buf.write("\u0081\u0080\3\2\2\2\u0082\u0097\3\2\2\2\u0083\u0084\f") - buf.write("\f\2\2\u0084\u0085\t\4\2\2\u0085\u0096\5\26\f\r\u0086") - buf.write("\u0087\f\13\2\2\u0087\u0088\t\5\2\2\u0088\u0096\5\26\f") - buf.write("\f\u0089\u008a\f\n\2\2\u008a\u008b\t\6\2\2\u008b\u0096") - buf.write("\5\26\f\13\u008c\u008d\f\t\2\2\u008d\u008e\t\7\2\2\u008e") - buf.write("\u0096\5\26\f\n\u008f\u0090\f\b\2\2\u0090\u0091\t\b\2") - buf.write("\2\u0091\u0096\5\26\f\t\u0092\u0093\f\7\2\2\u0093\u0094") - buf.write("\t\t\2\2\u0094\u0096\5\26\f\b\u0095\u0083\3\2\2\2\u0095") - buf.write("\u0086\3\2\2\2\u0095\u0089\3\2\2\2\u0095\u008c\3\2\2\2") - buf.write("\u0095\u008f\3\2\2\2\u0095\u0092\3\2\2\2\u0096\u0099\3") - buf.write("\2\2\2\u0097\u0095\3\2\2\2\u0097\u0098\3\2\2\2\u0098\27") - buf.write("\3\2\2\2\u0099\u0097\3\2\2\2\u009a\u009c\7\4\2\2\u009b") - buf.write("\u009a\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u00a0\3\2\2\2") - buf.write("\u009d\u009e\5\32\16\2\u009e\u009f\7\31\2\2\u009f\u00a1") - buf.write("\3\2\2\2\u00a0\u009d\3\2\2\2\u00a0\u00a1\3\2\2\2\u00a1") - buf.write("\u00a2\3\2\2\2\u00a2\u00a5\5\32\16\2\u00a3\u00a4\7\3\2") - buf.write("\2\u00a4\u00a6\5\32\16\2\u00a5\u00a3\3\2\2\2\u00a5\u00a6") - buf.write("\3\2\2\2\u00a6\31\3\2\2\2\u00a7\u00ac\7:\2\2\u00a8\u00a9") - buf.write("\7\22\2\2\u00a9\u00ab\7:\2\2\u00aa\u00a8\3\2\2\2\u00ab") - buf.write("\u00ae\3\2\2\2\u00ac\u00aa\3\2\2\2\u00ac\u00ad\3\2\2\2") - buf.write("\u00ad\33\3\2\2\2\u00ae\u00ac\3\2\2\2\u00af\u00b0\t\n") - buf.write("\2\2\u00b0\35\3\2\2\2\26$,\638>FILS`ey|\u0081\u0095\u0097") - buf.write("\u009b\u00a0\u00a5\u00ac") + buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") + buf.write("\3\2\3\2\3\2\3\3\7\3+\n\3\f\3\16\3.\13\3\3\3\3\3\3\4\7") + buf.write("\4\63\n\4\f\4\16\4\66\13\4\3\4\3\4\6\4:\n\4\r\4\16\4;") + buf.write("\3\4\7\4?\n\4\f\4\16\4B\13\4\3\4\7\4E\n\4\f\4\16\4H\13") + buf.write("\4\3\5\3\5\3\6\3\6\3\6\5\6O\n\6\3\6\5\6R\n\6\3\6\5\6U") + buf.write("\n\6\3\7\3\7\3\7\7\7Z\n\7\f\7\16\7]\13\7\3\b\3\b\5\ba") + buf.write("\n\b\3\b\3\b\5\be\n\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\13") + buf.write("\3\13\3\13\3\13\3\13\7\13s\n\13\f\13\16\13v\13\13\3\f") + buf.write("\3\f\5\fz\n\f\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16") + buf.write("\3\16\3\16\3\16\3\16\5\16\u0089\n\16\3\16\3\16\3\16\3") + buf.write("\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16") + buf.write("\3\16\3\16\3\16\3\16\7\16\u009d\n\16\f\16\16\16\u00a0") + buf.write("\13\16\3\17\3\17\3\17\5\17\u00a5\n\17\3\17\3\17\3\20\3") + buf.write("\20\3\20\7\20\u00ac\n\20\f\20\16\20\u00af\13\20\3\21\5") + buf.write("\21\u00b2\n\21\3\21\3\21\3\21\5\21\u00b7\n\21\3\21\3\21") + buf.write("\3\21\5\21\u00bc\n\21\3\22\3\22\3\22\7\22\u00c1\n\22\f") + buf.write("\22\16\22\u00c4\13\22\3\23\3\23\3\23\2\3\32\24\2\4\6\b") + buf.write("\n\f\16\20\22\24\26\30\32\34\36 \"$\2\13\4\2!!##\5\2\25") + buf.write("\25\31\31))\5\2\22\22\26\26\34\34\4\2\25\25\31\31\4\2") + buf.write("\5\6\23\24\6\2\4\4\7\b\r\r%%\4\2\3\3\37\37\4\2\t\t,,\7") + buf.write("\2$$++//\61\62\65\65\2\u00d2\2&\3\2\2\2\4,\3\2\2\2\6\64") + buf.write("\3\2\2\2\bI\3\2\2\2\nK\3\2\2\2\fV\3\2\2\2\16^\3\2\2\2") + buf.write("\20f\3\2\2\2\22j\3\2\2\2\24m\3\2\2\2\26w\3\2\2\2\30{\3") + buf.write("\2\2\2\32\u0088\3\2\2\2\34\u00a1\3\2\2\2\36\u00a8\3\2") + buf.write("\2\2 \u00b1\3\2\2\2\"\u00bd\3\2\2\2$\u00c5\3\2\2\2&\'") + buf.write("\5\32\16\2\'(\7\2\2\3(\3\3\2\2\2)+\5\6\4\2*)\3\2\2\2+") + buf.write(".\3\2\2\2,*\3\2\2\2,-\3\2\2\2-/\3\2\2\2.,\3\2\2\2/\60") + buf.write("\7\2\2\3\60\5\3\2\2\2\61\63\7\33\2\2\62\61\3\2\2\2\63") + buf.write("\66\3\2\2\2\64\62\3\2\2\2\64\65\3\2\2\2\65\67\3\2\2\2") + buf.write("\66\64\3\2\2\2\67@\5\b\5\28:\7\33\2\298\3\2\2\2:;\3\2") + buf.write("\2\2;9\3\2\2\2;<\3\2\2\2<=\3\2\2\2=?\5\b\5\2>9\3\2\2\2") + buf.write("?B\3\2\2\2@>\3\2\2\2@A\3\2\2\2AF\3\2\2\2B@\3\2\2\2CE\7") + buf.write("\33\2\2DC\3\2\2\2EH\3\2\2\2FD\3\2\2\2FG\3\2\2\2G\7\3\2") + buf.write("\2\2HF\3\2\2\2IJ\5\n\6\2J\t\3\2\2\2KL\7.\2\2LN\5\f\7\2") + buf.write("MO\5\22\n\2NM\3\2\2\2NO\3\2\2\2OQ\3\2\2\2PR\5\24\13\2") + buf.write("QP\3\2\2\2QR\3\2\2\2RT\3\2\2\2SU\5\30\r\2TS\3\2\2\2TU") + buf.write("\3\2\2\2U\13\3\2\2\2V[\5\16\b\2WX\7\20\2\2XZ\5\16\b\2") + buf.write("YW\3\2\2\2Z]\3\2\2\2[Y\3\2\2\2[\\\3\2\2\2\\\r\3\2\2\2") + buf.write("][\3\2\2\2^`\5\32\16\2_a\5\20\t\2`_\3\2\2\2`a\3\2\2\2") + buf.write("ad\3\2\2\2bc\7 \2\2ce\5 \21\2db\3\2\2\2de\3\2\2\2e\17") + buf.write("\3\2\2\2fg\7\17\2\2gh\7\17\2\2hi\5\34\17\2i\21\3\2\2\2") + buf.write("jk\7\60\2\2kl\5\32\16\2l\23\3\2\2\2mn\7-\2\2no\7\"\2\2") + buf.write("ot\5\26\f\2pq\7\20\2\2qs\5\26\f\2rp\3\2\2\2sv\3\2\2\2") + buf.write("tr\3\2\2\2tu\3\2\2\2u\25\3\2\2\2vt\3\2\2\2wy\5\32\16\2") + buf.write("xz\t\2\2\2yx\3\2\2\2yz\3\2\2\2z\27\3\2\2\2{|\7(\2\2|}") + buf.write("\5\32\16\2}\31\3\2\2\2~\177\b\16\1\2\177\u0080\t\3\2\2") + buf.write("\u0080\u0089\5\32\16\r\u0081\u0082\7\27\2\2\u0082\u0083") + buf.write("\5\32\16\2\u0083\u0084\7\16\2\2\u0084\u0089\3\2\2\2\u0085") + buf.write("\u0089\5$\23\2\u0086\u0089\5\34\17\2\u0087\u0089\5 \21") + buf.write("\2\u0088~\3\2\2\2\u0088\u0081\3\2\2\2\u0088\u0085\3\2") + buf.write("\2\2\u0088\u0086\3\2\2\2\u0088\u0087\3\2\2\2\u0089\u009e") + buf.write("\3\2\2\2\u008a\u008b\f\f\2\2\u008b\u008c\t\4\2\2\u008c") + buf.write("\u009d\5\32\16\r\u008d\u008e\f\13\2\2\u008e\u008f\t\5") + buf.write("\2\2\u008f\u009d\5\32\16\f\u0090\u0091\f\n\2\2\u0091\u0092") + buf.write("\t\6\2\2\u0092\u009d\5\32\16\13\u0093\u0094\f\t\2\2\u0094") + buf.write("\u0095\t\7\2\2\u0095\u009d\5\32\16\n\u0096\u0097\f\b\2") + buf.write("\2\u0097\u0098\t\b\2\2\u0098\u009d\5\32\16\t\u0099\u009a") + buf.write("\f\7\2\2\u009a\u009b\t\t\2\2\u009b\u009d\5\32\16\b\u009c") + buf.write("\u008a\3\2\2\2\u009c\u008d\3\2\2\2\u009c\u0090\3\2\2\2") + buf.write("\u009c\u0093\3\2\2\2\u009c\u0096\3\2\2\2\u009c\u0099\3") + buf.write("\2\2\2\u009d\u00a0\3\2\2\2\u009e\u009c\3\2\2\2\u009e\u009f") + buf.write("\3\2\2\2\u009f\33\3\2\2\2\u00a0\u009e\3\2\2\2\u00a1\u00a2") + buf.write("\5\"\22\2\u00a2\u00a4\7\27\2\2\u00a3\u00a5\5\36\20\2\u00a4") + buf.write("\u00a3\3\2\2\2\u00a4\u00a5\3\2\2\2\u00a5\u00a6\3\2\2\2") + buf.write("\u00a6\u00a7\7\16\2\2\u00a7\35\3\2\2\2\u00a8\u00ad\5\32") + buf.write("\16\2\u00a9\u00aa\7\20\2\2\u00aa\u00ac\5\32\16\2\u00ab") + buf.write("\u00a9\3\2\2\2\u00ac\u00af\3\2\2\2\u00ad\u00ab\3\2\2\2") + buf.write("\u00ad\u00ae\3\2\2\2\u00ae\37\3\2\2\2\u00af\u00ad\3\2") + buf.write("\2\2\u00b0\u00b2\7\32\2\2\u00b1\u00b0\3\2\2\2\u00b1\u00b2") + buf.write("\3\2\2\2\u00b2\u00b6\3\2\2\2\u00b3\u00b4\5\"\22\2\u00b4") + buf.write("\u00b5\7\30\2\2\u00b5\u00b7\3\2\2\2\u00b6\u00b3\3\2\2") + buf.write("\2\u00b6\u00b7\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00bb") + buf.write("\5\"\22\2\u00b9\u00ba\7\17\2\2\u00ba\u00bc\5\"\22\2\u00bb") + buf.write("\u00b9\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc!\3\2\2\2\u00bd") + buf.write("\u00c2\7;\2\2\u00be\u00bf\7\21\2\2\u00bf\u00c1\7;\2\2") + buf.write("\u00c0\u00be\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2\u00c0\3") + buf.write("\2\2\2\u00c2\u00c3\3\2\2\2\u00c3#\3\2\2\2\u00c4\u00c2") + buf.write("\3\2\2\2\u00c5\u00c6\t\n\2\2\u00c6%\3\2\2\2\30,\64;@F") + buf.write("NQT[`dty\u0088\u009c\u009e\u00a4\u00ad\u00b1\u00b6\u00bb") + buf.write("\u00c2") return buf.getvalue() @@ -95,20 +107,20 @@ class PqlParser ( Parser ): sharedContextCache = PredictionContextCache() - literalNames = [ "", "':'", "'?'", "'&&'", "'=='", "'>='", - "'<='", "'!='", "'<>'", "'||'", "'<<'", "'>>'", "'&'", - "'='", "')'", "','", "'.'", "'/'", "'>'", "'<'", "'-'", - "'%'", "'('", "'|'", "'+'", "';'", "'*'", "'~'", "'_'" ] - - symbolicNames = [ "", "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", - "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", - "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", - "CLOSE_PAREN", "COMMA", "DOT", "FORWARD_SLASH", "GT", - "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", - "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", - "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", - "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", - "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + literalNames = [ "", "'&&'", "'=='", "'>='", "'<='", "'!='", + "'<>'", "'||'", "'<<'", "'>>'", "'&'", "'='", "')'", + "':'", "','", "'.'", "'/'", "'>'", "'<'", "'-'", "'%'", + "'('", "'|'", "'+'", "'?'", "';'", "'*'", "'~'", "'_'" ] + + symbolicNames = [ "", "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", + "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", + "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", + "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", + "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", + "UNDER", "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", + "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", + "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", @@ -121,77 +133,82 @@ class PqlParser ( Parser ): RULE_sqlStmt = 3 RULE_selectStmt = 4 RULE_columns = 5 - RULE_whereClause = 6 - RULE_orderByClause = 7 - RULE_orderExpr = 8 - RULE_limitClause = 9 - RULE_expr = 10 - RULE_taxon = 11 - RULE_identifierMultipart = 12 - RULE_literalValue = 13 + RULE_column = 6 + RULE_typeCast = 7 + RULE_whereClause = 8 + RULE_orderByClause = 9 + RULE_orderExpr = 10 + RULE_limitClause = 11 + RULE_expr = 12 + RULE_function = 13 + RULE_exprList = 14 + RULE_taxon = 15 + RULE_identifierMultipart = 16 + RULE_literalValue = 17 ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "selectStmt", - "columns", "whereClause", "orderByClause", "orderExpr", - "limitClause", "expr", "taxon", "identifierMultipart", - "literalValue" ] + "columns", "column", "typeCast", "whereClause", "orderByClause", + "orderExpr", "limitClause", "expr", "function", "exprList", + "taxon", "identifierMultipart", "literalValue" ] EOF = Token.EOF - TAXON_TAG_DELIMITER=1 - TAXON_OPTIONAL_OPERATOR=2 - AND=3 - EQ=4 - GT_EQ=5 - LT_EQ=6 - NOT_EQ1=7 - NOT_EQ2=8 - OR=9 - SHIFT_LEFT=10 - SHIFT_RIGHT=11 - AMP=12 - ASSIGN=13 - CLOSE_PAREN=14 - COMMA=15 - DOT=16 - FORWARD_SLASH=17 - GT=18 - LT=19 - MINUS=20 - MOD=21 - OPEN_PAREN=22 - PIPE=23 - PLUS=24 + AND=1 + EQ=2 + GT_EQ=3 + LT_EQ=4 + NOT_EQ1=5 + NOT_EQ2=6 + OR=7 + SHIFT_LEFT=8 + SHIFT_RIGHT=9 + AMP=10 + ASSIGN=11 + CLOSE_PAREN=12 + COLON=13 + COMMA=14 + DOT=15 + FORWARD_SLASH=16 + GT=17 + LT=18 + MINUS=19 + MOD=20 + OPEN_PAREN=21 + PIPE=22 + PLUS=23 + QUESTION_MARK=24 SCOL=25 STAR=26 TILDE=27 UNDER=28 K_AND=29 - K_ASC=30 - K_BY=31 - K_DESC=32 - K_FALSE=33 - K_IS=34 - K_ISNULL=35 - K_LIKE=36 - K_LIMIT=37 - K_NOT=38 - K_NOTNULL=39 - K_NULL=40 - K_OR=41 - K_ORDER=42 - K_SELECT=43 - K_TRUE=44 - K_WHERE=45 - NUMERIC_LITERAL=46 - DOUBLE_QUOTED_STRING=47 - DOUBLE_QUOTED_STRING_TEL=48 - DOUBLE_QUOTED_STRING_SQL=49 - SINGLE_QUOTED_STRING=50 - SINGLE_QUOTED_STRING_TEL=51 - SINGLE_QUOTED_STRING_SQL=52 - SINGLE_LINE_COMMENT=53 - MULTILINE_COMMENT=54 - SPACES=55 - WORD=56 + K_AS=30 + K_ASC=31 + K_BY=32 + K_DESC=33 + K_FALSE=34 + K_IS=35 + K_ISNULL=36 + K_LIKE=37 + K_LIMIT=38 + K_NOT=39 + K_NOTNULL=40 + K_NULL=41 + K_OR=42 + K_ORDER=43 + K_SELECT=44 + K_TRUE=45 + K_WHERE=46 + NUMERIC_LITERAL=47 + DOUBLE_QUOTED_STRING=48 + DOUBLE_QUOTED_STRING_TEL=49 + DOUBLE_QUOTED_STRING_SQL=50 + SINGLE_QUOTED_STRING=51 + SINGLE_QUOTED_STRING_TEL=52 + SINGLE_QUOTED_STRING_SQL=53 + SINGLE_LINE_COMMENT=54 + MULTILINE_COMMENT=55 + SPACES=56 + WORD=57 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -241,9 +258,9 @@ def parseTel(self): self.enterRule(localctx, 0, self.RULE_parseTel) try: self.enterOuterAlt(localctx, 1) - self.state = 28 + self.state = 36 self.expr(0) - self.state = 29 + self.state = 37 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -297,17 +314,17 @@ def parsePql(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 34 + self.state = 42 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.SCOL or _la==PqlParser.K_SELECT: - self.state = 31 + self.state = 39 self.sqlStmtList() - self.state = 36 + self.state = 44 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 37 + self.state = 45 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -364,49 +381,49 @@ def sqlStmtList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 42 + self.state = 50 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.SCOL: - self.state = 39 + self.state = 47 self.match(PqlParser.SCOL) - self.state = 44 + self.state = 52 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 45 + self.state = 53 self.sqlStmt() - self.state = 54 + self.state = 62 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 47 + self.state = 55 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 46 + self.state = 54 self.match(PqlParser.SCOL) - self.state = 49 + self.state = 57 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PqlParser.SCOL): break - self.state = 51 + self.state = 59 self.sqlStmt() - self.state = 56 + self.state = 64 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - self.state = 60 + self.state = 68 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 57 + self.state = 65 self.match(PqlParser.SCOL) - self.state = 62 + self.state = 70 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) @@ -455,7 +472,7 @@ def sqlStmt(self): self.enterRule(localctx, 6, self.RULE_sqlStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 63 + self.state = 71 self.selectStmt() except RecognitionException as re: localctx.exception = re @@ -518,31 +535,31 @@ def selectStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 65 + self.state = 73 self.match(PqlParser.K_SELECT) - self.state = 66 + self.state = 74 self.columns() - self.state = 68 + self.state = 76 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_WHERE: - self.state = 67 + self.state = 75 self.whereClause() - self.state = 71 + self.state = 79 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ORDER: - self.state = 70 + self.state = 78 self.orderByClause() - self.state = 74 + self.state = 82 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_LIMIT: - self.state = 73 + self.state = 81 self.limitClause() @@ -561,11 +578,11 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser - def expr(self, i:int=None): + def column(self, i:int=None): if i is None: - return self.getTypedRuleContexts(PqlParser.ExprContext) + return self.getTypedRuleContexts(PqlParser.ColumnContext) else: - return self.getTypedRuleContext(PqlParser.ExprContext,i) + return self.getTypedRuleContext(PqlParser.ColumnContext,i) def COMMA(self, i:int=None): @@ -601,17 +618,17 @@ def columns(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 76 - self.expr(0) - self.state = 81 + self.state = 84 + self.column() + self.state = 89 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 77 + self.state = 85 self.match(PqlParser.COMMA) - self.state = 78 - self.expr(0) - self.state = 83 + self.state = 86 + self.column() + self.state = 91 self._errHandler.sync(self) _la = self._input.LA(1) @@ -624,6 +641,143 @@ def columns(self): return localctx + class ColumnContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.value = None # ExprContext + self.type_cast = None # TypeCastContext + self.alias = None # TaxonContext + + def expr(self): + return self.getTypedRuleContext(PqlParser.ExprContext,0) + + + def K_AS(self): + return self.getToken(PqlParser.K_AS, 0) + + def typeCast(self): + return self.getTypedRuleContext(PqlParser.TypeCastContext,0) + + + def taxon(self): + return self.getTypedRuleContext(PqlParser.TaxonContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_column + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterColumn" ): + listener.enterColumn(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitColumn" ): + listener.exitColumn(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitColumn" ): + return visitor.visitColumn(self) + else: + return visitor.visitChildren(self) + + + + + def column(self): + + localctx = PqlParser.ColumnContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_column) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 92 + localctx.value = self.expr(0) + self.state = 94 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.COLON: + self.state = 93 + localctx.type_cast = self.typeCast() + + + self.state = 98 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_AS: + self.state = 96 + self.match(PqlParser.K_AS) + self.state = 97 + localctx.alias = self.taxon() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class TypeCastContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def COLON(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COLON) + else: + return self.getToken(PqlParser.COLON, i) + + def function(self): + return self.getTypedRuleContext(PqlParser.FunctionContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_typeCast + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTypeCast" ): + listener.enterTypeCast(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTypeCast" ): + listener.exitTypeCast(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTypeCast" ): + return visitor.visitTypeCast(self) + else: + return visitor.visitChildren(self) + + + + + def typeCast(self): + + localctx = PqlParser.TypeCastContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_typeCast) + try: + self.enterOuterAlt(localctx, 1) + self.state = 100 + self.match(PqlParser.COLON) + self.state = 101 + self.match(PqlParser.COLON) + self.state = 102 + self.function() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class WhereClauseContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): @@ -660,12 +814,12 @@ def accept(self, visitor:ParseTreeVisitor): def whereClause(self): localctx = PqlParser.WhereClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 12, self.RULE_whereClause) + self.enterRule(localctx, 16, self.RULE_whereClause) try: self.enterOuterAlt(localctx, 1) - self.state = 84 + self.state = 104 self.match(PqlParser.K_WHERE) - self.state = 85 + self.state = 105 self.expr(0) except RecognitionException as re: localctx.exception = re @@ -724,25 +878,25 @@ def accept(self, visitor:ParseTreeVisitor): def orderByClause(self): localctx = PqlParser.OrderByClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 14, self.RULE_orderByClause) + self.enterRule(localctx, 18, self.RULE_orderByClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 87 + self.state = 107 self.match(PqlParser.K_ORDER) - self.state = 88 + self.state = 108 self.match(PqlParser.K_BY) - self.state = 89 + self.state = 109 self.orderExpr() - self.state = 94 + self.state = 114 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 90 + self.state = 110 self.match(PqlParser.COMMA) - self.state = 91 + self.state = 111 self.orderExpr() - self.state = 96 + self.state = 116 self._errHandler.sync(self) _la = self._input.LA(1) @@ -794,17 +948,17 @@ def accept(self, visitor:ParseTreeVisitor): def orderExpr(self): localctx = PqlParser.OrderExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 16, self.RULE_orderExpr) + self.enterRule(localctx, 20, self.RULE_orderExpr) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 97 + self.state = 117 self.expr(0) - self.state = 99 + self.state = 119 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ASC or _la==PqlParser.K_DESC: - self.state = 98 + self.state = 118 _la = self._input.LA(1) if not(_la==PqlParser.K_ASC or _la==PqlParser.K_DESC): self._errHandler.recoverInline(self) @@ -859,12 +1013,12 @@ def accept(self, visitor:ParseTreeVisitor): def limitClause(self): localctx = PqlParser.LimitClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 18, self.RULE_limitClause) + self.enterRule(localctx, 22, self.RULE_limitClause) try: self.enterOuterAlt(localctx, 1) - self.state = 101 + self.state = 121 self.match(PqlParser.K_LIMIT) - self.state = 102 + self.state = 122 localctx.limit = self.expr(0) except RecognitionException as re: localctx.exception = re @@ -884,7 +1038,6 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.unary_operator = None # Token self.right = None # ExprContext self.inner = None # ExprContext - self.function_name = None # IdentifierMultipartContext self.operator = None # Token def expr(self, i:int=None): @@ -913,16 +1066,10 @@ def literalValue(self): return self.getTypedRuleContext(PqlParser.LiteralValueContext,0) - def identifierMultipart(self): - return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,0) + def function(self): + return self.getTypedRuleContext(PqlParser.FunctionContext,0) - def COMMA(self, i:int=None): - if i is None: - return self.getTokens(PqlParser.COMMA) - else: - return self.getToken(PqlParser.COMMA, i) - def taxon(self): return self.getTypedRuleContext(PqlParser.TaxonContext,0) @@ -999,16 +1146,16 @@ def expr(self, _p:int=0): _parentState = self.state localctx = PqlParser.ExprContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 20 - self.enterRecursionRule(localctx, 20, self.RULE_expr, _p) + _startState = 24 + self.enterRecursionRule(localctx, 24, self.RULE_expr, _p) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 127 + self.state = 134 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,13,self._ctx) if la_ == 1: - self.state = 105 + self.state = 125 localctx.unary_operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_NOT))) != 0)): @@ -1016,61 +1163,37 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 106 + self.state = 126 localctx.right = self.expr(11) pass elif la_ == 2: - self.state = 107 + self.state = 127 self.match(PqlParser.OPEN_PAREN) - self.state = 108 + self.state = 128 localctx.inner = self.expr(0) - self.state = 109 + self.state = 129 self.match(PqlParser.CLOSE_PAREN) pass elif la_ == 3: - self.state = 111 + self.state = 131 self.literalValue() pass elif la_ == 4: - self.state = 112 - localctx.function_name = self.identifierMultipart() - self.state = 113 - self.match(PqlParser.OPEN_PAREN) - self.state = 122 - self._errHandler.sync(self) - _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.TAXON_OPTIONAL_OPERATOR) | (1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): - self.state = 114 - self.expr(0) - self.state = 119 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==PqlParser.COMMA: - self.state = 115 - self.match(PqlParser.COMMA) - self.state = 116 - self.expr(0) - self.state = 121 - self._errHandler.sync(self) - _la = self._input.LA(1) - - - - self.state = 124 - self.match(PqlParser.CLOSE_PAREN) + self.state = 132 + self.function() pass elif la_ == 5: - self.state = 126 + self.state = 133 self.taxon() pass self._ctx.stop = self._input.LT(-1) - self.state = 149 + self.state = 156 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,15,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -1078,18 +1201,18 @@ def expr(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 147 + self.state = 154 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,14,self._ctx) if la_ == 1: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 129 + self.state = 136 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 130 + self.state = 137 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) != 0)): @@ -1097,7 +1220,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 131 + self.state = 138 localctx.right = self.expr(11) pass @@ -1105,11 +1228,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 132 + self.state = 139 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 133 + self.state = 140 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.MINUS or _la==PqlParser.PLUS): @@ -1117,7 +1240,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 134 + self.state = 141 localctx.right = self.expr(10) pass @@ -1125,11 +1248,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 135 + self.state = 142 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 136 + self.state = 143 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) != 0)): @@ -1137,7 +1260,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 137 + self.state = 144 localctx.right = self.expr(9) pass @@ -1145,11 +1268,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 138 + self.state = 145 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 139 + self.state = 146 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) != 0)): @@ -1157,7 +1280,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 140 + self.state = 147 localctx.right = self.expr(8) pass @@ -1165,11 +1288,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 141 + self.state = 148 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 142 + self.state = 149 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.AND or _la==PqlParser.K_AND): @@ -1177,7 +1300,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 143 + self.state = 150 localctx.right = self.expr(7) pass @@ -1185,11 +1308,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 144 + self.state = 151 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 145 + self.state = 152 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.OR or _la==PqlParser.K_OR): @@ -1197,12 +1320,12 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 146 + self.state = 153 localctx.right = self.expr(6) pass - self.state = 151 + self.state = 158 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,15,self._ctx) @@ -1215,6 +1338,147 @@ def expr(self, _p:int=0): return localctx + class FunctionContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.function_name = None # IdentifierMultipartContext + self.arguments = None # ExprListContext + + def OPEN_PAREN(self): + return self.getToken(PqlParser.OPEN_PAREN, 0) + + def CLOSE_PAREN(self): + return self.getToken(PqlParser.CLOSE_PAREN, 0) + + def identifierMultipart(self): + return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,0) + + + def exprList(self): + return self.getTypedRuleContext(PqlParser.ExprListContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_function + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterFunction" ): + listener.enterFunction(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitFunction" ): + listener.exitFunction(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitFunction" ): + return visitor.visitFunction(self) + else: + return visitor.visitChildren(self) + + + + + def function(self): + + localctx = PqlParser.FunctionContext(self, self._ctx, self.state) + self.enterRule(localctx, 26, self.RULE_function) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 159 + localctx.function_name = self.identifierMultipart() + self.state = 160 + self.match(PqlParser.OPEN_PAREN) + self.state = 162 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): + self.state = 161 + localctx.arguments = self.exprList() + + + self.state = 164 + self.match(PqlParser.CLOSE_PAREN) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ExprListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.ExprContext) + else: + return self.getTypedRuleContext(PqlParser.ExprContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COMMA) + else: + return self.getToken(PqlParser.COMMA, i) + + def getRuleIndex(self): + return PqlParser.RULE_exprList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterExprList" ): + listener.enterExprList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitExprList" ): + listener.exitExprList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExprList" ): + return visitor.visitExprList(self) + else: + return visitor.visitChildren(self) + + + + + def exprList(self): + + localctx = PqlParser.ExprListContext(self, self._ctx, self.state) + self.enterRule(localctx, 28, self.RULE_exprList) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 166 + self.expr(0) + self.state = 171 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.COMMA: + self.state = 167 + self.match(PqlParser.COMMA) + self.state = 168 + self.expr(0) + self.state = 173 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class TaxonContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): @@ -1231,14 +1495,14 @@ def identifierMultipart(self, i:int=None): return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,i) - def TAXON_OPTIONAL_OPERATOR(self): - return self.getToken(PqlParser.TAXON_OPTIONAL_OPERATOR, 0) + def QUESTION_MARK(self): + return self.getToken(PqlParser.QUESTION_MARK, 0) def PIPE(self): return self.getToken(PqlParser.PIPE, 0) - def TAXON_TAG_DELIMITER(self): - return self.getToken(PqlParser.TAXON_TAG_DELIMITER, 0) + def COLON(self): + return self.getToken(PqlParser.COLON, 0) def getRuleIndex(self): return PqlParser.RULE_taxon @@ -1263,37 +1527,37 @@ def accept(self, visitor:ParseTreeVisitor): def taxon(self): localctx = PqlParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 22, self.RULE_taxon) + self.enterRule(localctx, 30, self.RULE_taxon) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 153 + self.state = 175 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==PqlParser.TAXON_OPTIONAL_OPERATOR: - self.state = 152 - self.match(PqlParser.TAXON_OPTIONAL_OPERATOR) + if _la==PqlParser.QUESTION_MARK: + self.state = 174 + self.match(PqlParser.QUESTION_MARK) - self.state = 158 + self.state = 180 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,17,self._ctx) + la_ = self._interp.adaptivePredict(self._input,19,self._ctx) if la_ == 1: - self.state = 155 + self.state = 177 localctx.namespace = self.identifierMultipart() - self.state = 156 + self.state = 178 self.match(PqlParser.PIPE) - self.state = 160 + self.state = 182 localctx.slug = self.identifierMultipart() - self.state = 163 + self.state = 185 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,18,self._ctx) + la_ = self._interp.adaptivePredict(self._input,20,self._ctx) if la_ == 1: - self.state = 161 - self.match(PqlParser.TAXON_TAG_DELIMITER) - self.state = 162 + self.state = 183 + self.match(PqlParser.COLON) + self.state = 184 localctx.tag = self.identifierMultipart() @@ -1347,23 +1611,23 @@ def accept(self, visitor:ParseTreeVisitor): def identifierMultipart(self): localctx = PqlParser.IdentifierMultipartContext(self, self._ctx, self.state) - self.enterRule(localctx, 24, self.RULE_identifierMultipart) + self.enterRule(localctx, 32, self.RULE_identifierMultipart) try: self.enterOuterAlt(localctx, 1) - self.state = 165 + self.state = 187 self.match(PqlParser.WORD) - self.state = 170 + self.state = 192 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,19,self._ctx) + _alt = self._interp.adaptivePredict(self._input,21,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 166 + self.state = 188 self.match(PqlParser.DOT) - self.state = 167 + self.state = 189 self.match(PqlParser.WORD) - self.state = 172 + self.state = 194 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,19,self._ctx) + _alt = self._interp.adaptivePredict(self._input,21,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1421,11 +1685,11 @@ def accept(self, visitor:ParseTreeVisitor): def literalValue(self): localctx = PqlParser.LiteralValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 26, self.RULE_literalValue) + self.enterRule(localctx, 34, self.RULE_literalValue) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 173 + self.state = 195 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): self._errHandler.recoverInline(self) @@ -1445,7 +1709,7 @@ def literalValue(self): def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates == None: self._predicates = dict() - self._predicates[10] = self.expr_sempred + self._predicates[12] = self.expr_sempred pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py index 72789a0..31dc27c 100644 --- a/python/src/pql_grammar/antlr/PqlParserListener.py +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -62,6 +62,24 @@ def exitColumns(self, ctx:PqlParser.ColumnsContext): pass + # Enter a parse tree produced by PqlParser#column. + def enterColumn(self, ctx:PqlParser.ColumnContext): + pass + + # Exit a parse tree produced by PqlParser#column. + def exitColumn(self, ctx:PqlParser.ColumnContext): + pass + + + # Enter a parse tree produced by PqlParser#typeCast. + def enterTypeCast(self, ctx:PqlParser.TypeCastContext): + pass + + # Exit a parse tree produced by PqlParser#typeCast. + def exitTypeCast(self, ctx:PqlParser.TypeCastContext): + pass + + # Enter a parse tree produced by PqlParser#whereClause. def enterWhereClause(self, ctx:PqlParser.WhereClauseContext): pass @@ -107,6 +125,24 @@ def exitExpr(self, ctx:PqlParser.ExprContext): pass + # Enter a parse tree produced by PqlParser#function. + def enterFunction(self, ctx:PqlParser.FunctionContext): + pass + + # Exit a parse tree produced by PqlParser#function. + def exitFunction(self, ctx:PqlParser.FunctionContext): + pass + + + # Enter a parse tree produced by PqlParser#exprList. + def enterExprList(self, ctx:PqlParser.ExprListContext): + pass + + # Exit a parse tree produced by PqlParser#exprList. + def exitExprList(self, ctx:PqlParser.ExprListContext): + pass + + # Enter a parse tree produced by PqlParser#taxon. def enterTaxon(self, ctx:PqlParser.TaxonContext): pass diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py index 3946ecf..3193f54 100644 --- a/python/src/pql_grammar/antlr/PqlParserVisitor.py +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -39,6 +39,16 @@ def visitColumns(self, ctx:PqlParser.ColumnsContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PqlParser#column. + def visitColumn(self, ctx:PqlParser.ColumnContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#typeCast. + def visitTypeCast(self, ctx:PqlParser.TypeCastContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PqlParser#whereClause. def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): return self.visitChildren(ctx) @@ -64,6 +74,16 @@ def visitExpr(self, ctx:PqlParser.ExprContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PqlParser#function. + def visitFunction(self, ctx:PqlParser.FunctionContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#exprList. + def visitExprList(self, ctx:PqlParser.ExprListContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PqlParser#taxon. def visitTaxon(self, ctx:PqlParser.TaxonContext): return self.visitChildren(ctx) diff --git a/python/tests/pql/pql_test.py b/python/tests/pql/pql_test.py index d24acf4..a0d2cdb 100644 --- a/python/tests/pql/pql_test.py +++ b/python/tests/pql/pql_test.py @@ -3,8 +3,10 @@ from antlr4 import CommonTokenStream, InputStream, ParserRuleContext from antlr4.tree import Tree -from unittest import mock, TestCase +from dataclasses import dataclass +from collections import namedtuple from typing import Optional +from unittest import mock, TestCase from pql_grammar.antlr.PqlLexer import PqlLexer from pql_grammar.antlr.PqlParser import PqlParser @@ -224,8 +226,9 @@ def _parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> op.OperatorSche # else: # return clause - if ctx.function_name: + if ctx.function(): raise NotImplementedError('Dont know how to pack functions yet') + raise Exception(f'Where expression "{full_text(ctx)}" is not supported yet.') @@ -252,6 +255,7 @@ def parse_string(cls, s): class PQLTests(TestCase): + maxDiff = None def test_select_no_filter(self): @@ -259,20 +263,30 @@ def test_select_no_filter(self): select ?ns1|taxon1, ?ns2|taxon2, - slug1, - (?ns3|taxon3 + (slug2 - 1234)), - fn_4(fn_1(slug)) + slug1 as myns|slug1, + (?ns3|taxon3 + (slug2 - 1234)) as myns|custom_data, + fn_4(fn_1(slug))::TypeCast(arg1=value1) where ns6|taxon6 > 1234 and (ns0|taxon10 + 1234) == 0 """ + @dataclass + class Column: + value:str + type_cast:Optional[str] = None + alias:Optional[str] = None + columns = [] where_clause = [] class V(AssertPqlVisitor): def visitColumns(self, ctx:PqlParser.ColumnsContext): - for column in ctx.expr(): - columns.append(full_text(column)) + column : PqlParser.ColumnContext + for column in ctx.column(): + v = full_text(column.value) + type_cast = full_text(column.type_cast.function()) if column.type_cast else None + alias = full_text(column.alias) + columns.append(Column(v, type_cast, alias)) def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): ww = self._parse_where_clause_expr(ctx.expr()) where_clause.extend(ww) @@ -280,11 +294,11 @@ def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): V.parse_string(pql) assert columns == [ - '?ns1|taxon1', - '?ns2|taxon2', - 'slug1', - '(?ns3|taxon3 + (slug2 - 1234))', - 'fn_4(fn_1(slug))' + Column('?ns1|taxon1'), + Column('?ns2|taxon2'), + Column('slug1', None, 'myns|slug1'), + Column('(?ns3|taxon3 + (slug2 - 1234))', None, 'myns|custom_data'), + Column('fn_4(fn_1(slug))', 'TypeCast(arg1=value1)') ] assert where_clause == [ 'AND', From 764aefec7ecae3c3deeccbf7781df03b5eccff5d Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Wed, 11 Nov 2020 20:12:53 -0800 Subject: [PATCH 12/32] switch PQL parser to produce AST + ast.Node family of classes --- grammar/PqlParser.g4 | 12 +- python/src/pql_grammar/antlr/PqlParser.py | 566 ++++++++---------- .../pql_grammar/antlr/PqlParserListener.py | 25 +- .../src/pql_grammar/antlr/PqlParserVisitor.py | 13 +- python/src/pql_grammar/ast/__init__.py | 0 python/src/pql_grammar/ast/model.py | 80 +++ python/tests/pql/pql_ast_test.py | 358 +++++++++++ .../tests/pql/{pql_test.py => pql_atesta.py} | 0 8 files changed, 713 insertions(+), 341 deletions(-) create mode 100644 python/src/pql_grammar/ast/__init__.py create mode 100644 python/src/pql_grammar/ast/model.py create mode 100644 python/tests/pql/pql_ast_test.py rename python/tests/pql/{pql_test.py => pql_atesta.py} (100%) diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index 74c8525..512bd2e 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -29,20 +29,19 @@ sqlStmt ; selectStmt - : K_SELECT columns + : selectClause ( whereClause )? ( orderByClause )? ( limitClause )? ; -columns: column ( COMMA column )* ; - +selectClause: K_SELECT columns ( COMMA columns )* ; // Column is a complicated structure of many parts: // {tel expression (includes taxon)}{::Type Cast function or token} {{AS} taxon-like} // Example: // (?ns3|taxon3 + (slug2 - 1234))::TypeHint(agg=ave) as ns1|custom_data1, -column: value=expr type_cast=typeCast? (K_AS alias=taxon)? ; -// this conflicts with end of taxon ":tag" +columns: value=expr (COLON COLON type_cast=function)? (K_AS alias=taxon)? ; +// TypeCasting with ::TypeCast() conflicts with end of taxon ":tag" // This means that typecasting cannot be used on naked taxon // Must wrap whatever expression into parens or other non-taxon before Type Casting // WRONG: @@ -53,7 +52,6 @@ column: value=expr type_cast=typeCast? (K_AS alias=taxon)? ; // (ns1|taxon)::TypeCast() // While SQL allows non-function and function type casts, // we stick with requireing parens always for simplicity of syntax parser. -typeCast: COLON COLON function ; whereClause : K_WHERE expr @@ -99,7 +97,7 @@ exprList: expr ( COMMA expr )* ; // TODO: TAXON_TAG_DELIMITER is being killed off. Remove when we migrate out of taxon tags. taxon: - QUESTION_MARK? + is_optional=QUESTION_MARK? ( namespace=identifierMultipart PIPE )? slug=identifierMultipart // TODO: drop this when we drop Data Tags system. diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index c68919c..9a25bc5 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -12,88 +12,85 @@ def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3;") - buf.write("\u00c8\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\u00c4\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") - buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") - buf.write("\3\2\3\2\3\2\3\3\7\3+\n\3\f\3\16\3.\13\3\3\3\3\3\3\4\7") - buf.write("\4\63\n\4\f\4\16\4\66\13\4\3\4\3\4\6\4:\n\4\r\4\16\4;") - buf.write("\3\4\7\4?\n\4\f\4\16\4B\13\4\3\4\7\4E\n\4\f\4\16\4H\13") - buf.write("\4\3\5\3\5\3\6\3\6\3\6\5\6O\n\6\3\6\5\6R\n\6\3\6\5\6U") - buf.write("\n\6\3\7\3\7\3\7\7\7Z\n\7\f\7\16\7]\13\7\3\b\3\b\5\ba") - buf.write("\n\b\3\b\3\b\5\be\n\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\13") - buf.write("\3\13\3\13\3\13\3\13\7\13s\n\13\f\13\16\13v\13\13\3\f") - buf.write("\3\f\5\fz\n\f\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16") - buf.write("\3\16\3\16\3\16\3\16\5\16\u0089\n\16\3\16\3\16\3\16\3") - buf.write("\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16") - buf.write("\3\16\3\16\3\16\3\16\7\16\u009d\n\16\f\16\16\16\u00a0") - buf.write("\13\16\3\17\3\17\3\17\5\17\u00a5\n\17\3\17\3\17\3\20\3") - buf.write("\20\3\20\7\20\u00ac\n\20\f\20\16\20\u00af\13\20\3\21\5") - buf.write("\21\u00b2\n\21\3\21\3\21\3\21\5\21\u00b7\n\21\3\21\3\21") - buf.write("\3\21\5\21\u00bc\n\21\3\22\3\22\3\22\7\22\u00c1\n\22\f") - buf.write("\22\16\22\u00c4\13\22\3\23\3\23\3\23\2\3\32\24\2\4\6\b") - buf.write("\n\f\16\20\22\24\26\30\32\34\36 \"$\2\13\4\2!!##\5\2\25") - buf.write("\25\31\31))\5\2\22\22\26\26\34\34\4\2\25\25\31\31\4\2") - buf.write("\5\6\23\24\6\2\4\4\7\b\r\r%%\4\2\3\3\37\37\4\2\t\t,,\7") - buf.write("\2$$++//\61\62\65\65\2\u00d2\2&\3\2\2\2\4,\3\2\2\2\6\64") - buf.write("\3\2\2\2\bI\3\2\2\2\nK\3\2\2\2\fV\3\2\2\2\16^\3\2\2\2") - buf.write("\20f\3\2\2\2\22j\3\2\2\2\24m\3\2\2\2\26w\3\2\2\2\30{\3") - buf.write("\2\2\2\32\u0088\3\2\2\2\34\u00a1\3\2\2\2\36\u00a8\3\2") - buf.write("\2\2 \u00b1\3\2\2\2\"\u00bd\3\2\2\2$\u00c5\3\2\2\2&\'") - buf.write("\5\32\16\2\'(\7\2\2\3(\3\3\2\2\2)+\5\6\4\2*)\3\2\2\2+") - buf.write(".\3\2\2\2,*\3\2\2\2,-\3\2\2\2-/\3\2\2\2.,\3\2\2\2/\60") - buf.write("\7\2\2\3\60\5\3\2\2\2\61\63\7\33\2\2\62\61\3\2\2\2\63") - buf.write("\66\3\2\2\2\64\62\3\2\2\2\64\65\3\2\2\2\65\67\3\2\2\2") - buf.write("\66\64\3\2\2\2\67@\5\b\5\28:\7\33\2\298\3\2\2\2:;\3\2") - buf.write("\2\2;9\3\2\2\2;<\3\2\2\2<=\3\2\2\2=?\5\b\5\2>9\3\2\2\2") - buf.write("?B\3\2\2\2@>\3\2\2\2@A\3\2\2\2AF\3\2\2\2B@\3\2\2\2CE\7") - buf.write("\33\2\2DC\3\2\2\2EH\3\2\2\2FD\3\2\2\2FG\3\2\2\2G\7\3\2") - buf.write("\2\2HF\3\2\2\2IJ\5\n\6\2J\t\3\2\2\2KL\7.\2\2LN\5\f\7\2") - buf.write("MO\5\22\n\2NM\3\2\2\2NO\3\2\2\2OQ\3\2\2\2PR\5\24\13\2") - buf.write("QP\3\2\2\2QR\3\2\2\2RT\3\2\2\2SU\5\30\r\2TS\3\2\2\2TU") - buf.write("\3\2\2\2U\13\3\2\2\2V[\5\16\b\2WX\7\20\2\2XZ\5\16\b\2") - buf.write("YW\3\2\2\2Z]\3\2\2\2[Y\3\2\2\2[\\\3\2\2\2\\\r\3\2\2\2") - buf.write("][\3\2\2\2^`\5\32\16\2_a\5\20\t\2`_\3\2\2\2`a\3\2\2\2") - buf.write("ad\3\2\2\2bc\7 \2\2ce\5 \21\2db\3\2\2\2de\3\2\2\2e\17") - buf.write("\3\2\2\2fg\7\17\2\2gh\7\17\2\2hi\5\34\17\2i\21\3\2\2\2") - buf.write("jk\7\60\2\2kl\5\32\16\2l\23\3\2\2\2mn\7-\2\2no\7\"\2\2") - buf.write("ot\5\26\f\2pq\7\20\2\2qs\5\26\f\2rp\3\2\2\2sv\3\2\2\2") - buf.write("tr\3\2\2\2tu\3\2\2\2u\25\3\2\2\2vt\3\2\2\2wy\5\32\16\2") - buf.write("xz\t\2\2\2yx\3\2\2\2yz\3\2\2\2z\27\3\2\2\2{|\7(\2\2|}") - buf.write("\5\32\16\2}\31\3\2\2\2~\177\b\16\1\2\177\u0080\t\3\2\2") - buf.write("\u0080\u0089\5\32\16\r\u0081\u0082\7\27\2\2\u0082\u0083") - buf.write("\5\32\16\2\u0083\u0084\7\16\2\2\u0084\u0089\3\2\2\2\u0085") - buf.write("\u0089\5$\23\2\u0086\u0089\5\34\17\2\u0087\u0089\5 \21") - buf.write("\2\u0088~\3\2\2\2\u0088\u0081\3\2\2\2\u0088\u0085\3\2") - buf.write("\2\2\u0088\u0086\3\2\2\2\u0088\u0087\3\2\2\2\u0089\u009e") - buf.write("\3\2\2\2\u008a\u008b\f\f\2\2\u008b\u008c\t\4\2\2\u008c") - buf.write("\u009d\5\32\16\r\u008d\u008e\f\13\2\2\u008e\u008f\t\5") - buf.write("\2\2\u008f\u009d\5\32\16\f\u0090\u0091\f\n\2\2\u0091\u0092") - buf.write("\t\6\2\2\u0092\u009d\5\32\16\13\u0093\u0094\f\t\2\2\u0094") - buf.write("\u0095\t\7\2\2\u0095\u009d\5\32\16\n\u0096\u0097\f\b\2") - buf.write("\2\u0097\u0098\t\b\2\2\u0098\u009d\5\32\16\t\u0099\u009a") - buf.write("\f\7\2\2\u009a\u009b\t\t\2\2\u009b\u009d\5\32\16\b\u009c") - buf.write("\u008a\3\2\2\2\u009c\u008d\3\2\2\2\u009c\u0090\3\2\2\2") - buf.write("\u009c\u0093\3\2\2\2\u009c\u0096\3\2\2\2\u009c\u0099\3") - buf.write("\2\2\2\u009d\u00a0\3\2\2\2\u009e\u009c\3\2\2\2\u009e\u009f") - buf.write("\3\2\2\2\u009f\33\3\2\2\2\u00a0\u009e\3\2\2\2\u00a1\u00a2") - buf.write("\5\"\22\2\u00a2\u00a4\7\27\2\2\u00a3\u00a5\5\36\20\2\u00a4") - buf.write("\u00a3\3\2\2\2\u00a4\u00a5\3\2\2\2\u00a5\u00a6\3\2\2\2") - buf.write("\u00a6\u00a7\7\16\2\2\u00a7\35\3\2\2\2\u00a8\u00ad\5\32") - buf.write("\16\2\u00a9\u00aa\7\20\2\2\u00aa\u00ac\5\32\16\2\u00ab") - buf.write("\u00a9\3\2\2\2\u00ac\u00af\3\2\2\2\u00ad\u00ab\3\2\2\2") - buf.write("\u00ad\u00ae\3\2\2\2\u00ae\37\3\2\2\2\u00af\u00ad\3\2") - buf.write("\2\2\u00b0\u00b2\7\32\2\2\u00b1\u00b0\3\2\2\2\u00b1\u00b2") - buf.write("\3\2\2\2\u00b2\u00b6\3\2\2\2\u00b3\u00b4\5\"\22\2\u00b4") - buf.write("\u00b5\7\30\2\2\u00b5\u00b7\3\2\2\2\u00b6\u00b3\3\2\2") - buf.write("\2\u00b6\u00b7\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00bb") - buf.write("\5\"\22\2\u00b9\u00ba\7\17\2\2\u00ba\u00bc\5\"\22\2\u00bb") - buf.write("\u00b9\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc!\3\2\2\2\u00bd") - buf.write("\u00c2\7;\2\2\u00be\u00bf\7\21\2\2\u00bf\u00c1\7;\2\2") - buf.write("\u00c0\u00be\3\2\2\2\u00c1\u00c4\3\2\2\2\u00c2\u00c0\3") - buf.write("\2\2\2\u00c2\u00c3\3\2\2\2\u00c3#\3\2\2\2\u00c4\u00c2") - buf.write("\3\2\2\2\u00c5\u00c6\t\n\2\2\u00c6%\3\2\2\2\30,\64;@F") - buf.write("NQT[`dty\u0088\u009c\u009e\u00a4\u00ad\u00b1\u00b6\u00bb") - buf.write("\u00c2") + buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\3\2\3\2") + buf.write("\3\2\3\3\7\3)\n\3\f\3\16\3,\13\3\3\3\3\3\3\4\7\4\61\n") + buf.write("\4\f\4\16\4\64\13\4\3\4\3\4\6\48\n\4\r\4\16\49\3\4\7\4") + buf.write("=\n\4\f\4\16\4@\13\4\3\4\7\4C\n\4\f\4\16\4F\13\4\3\5\3") + buf.write("\5\3\6\3\6\5\6L\n\6\3\6\5\6O\n\6\3\6\5\6R\n\6\3\7\3\7") + buf.write("\3\7\3\7\7\7X\n\7\f\7\16\7[\13\7\3\b\3\b\3\b\3\b\5\ba") + buf.write("\n\b\3\b\3\b\5\be\n\b\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n") + buf.write("\7\no\n\n\f\n\16\nr\13\n\3\13\3\13\5\13v\n\13\3\f\3\f") + buf.write("\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\5\r\u0085") + buf.write("\n\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3") + buf.write("\r\3\r\3\r\3\r\3\r\3\r\7\r\u0099\n\r\f\r\16\r\u009c\13") + buf.write("\r\3\16\3\16\3\16\5\16\u00a1\n\16\3\16\3\16\3\17\3\17") + buf.write("\3\17\7\17\u00a8\n\17\f\17\16\17\u00ab\13\17\3\20\5\20") + buf.write("\u00ae\n\20\3\20\3\20\3\20\5\20\u00b3\n\20\3\20\3\20\3") + buf.write("\20\5\20\u00b8\n\20\3\21\3\21\3\21\7\21\u00bd\n\21\f\21") + buf.write("\16\21\u00c0\13\21\3\22\3\22\3\22\2\3\30\23\2\4\6\b\n") + buf.write("\f\16\20\22\24\26\30\32\34\36 \"\2\13\4\2!!##\5\2\25\25") + buf.write("\31\31))\5\2\22\22\26\26\34\34\4\2\25\25\31\31\4\2\5\6") + buf.write("\23\24\6\2\4\4\7\b\r\r%%\4\2\3\3\37\37\4\2\t\t,,\7\2$") + buf.write("$++//\61\62\65\65\2\u00cf\2$\3\2\2\2\4*\3\2\2\2\6\62\3") + buf.write("\2\2\2\bG\3\2\2\2\nI\3\2\2\2\fS\3\2\2\2\16\\\3\2\2\2\20") + buf.write("f\3\2\2\2\22i\3\2\2\2\24s\3\2\2\2\26w\3\2\2\2\30\u0084") + buf.write("\3\2\2\2\32\u009d\3\2\2\2\34\u00a4\3\2\2\2\36\u00ad\3") + buf.write("\2\2\2 \u00b9\3\2\2\2\"\u00c1\3\2\2\2$%\5\30\r\2%&\7\2") + buf.write("\2\3&\3\3\2\2\2\')\5\6\4\2(\'\3\2\2\2),\3\2\2\2*(\3\2") + buf.write("\2\2*+\3\2\2\2+-\3\2\2\2,*\3\2\2\2-.\7\2\2\3.\5\3\2\2") + buf.write("\2/\61\7\33\2\2\60/\3\2\2\2\61\64\3\2\2\2\62\60\3\2\2") + buf.write("\2\62\63\3\2\2\2\63\65\3\2\2\2\64\62\3\2\2\2\65>\5\b\5") + buf.write("\2\668\7\33\2\2\67\66\3\2\2\289\3\2\2\29\67\3\2\2\29:") + buf.write("\3\2\2\2:;\3\2\2\2;=\5\b\5\2<\67\3\2\2\2=@\3\2\2\2><\3") + buf.write("\2\2\2>?\3\2\2\2?D\3\2\2\2@>\3\2\2\2AC\7\33\2\2BA\3\2") + buf.write("\2\2CF\3\2\2\2DB\3\2\2\2DE\3\2\2\2E\7\3\2\2\2FD\3\2\2") + buf.write("\2GH\5\n\6\2H\t\3\2\2\2IK\5\f\7\2JL\5\20\t\2KJ\3\2\2\2") + buf.write("KL\3\2\2\2LN\3\2\2\2MO\5\22\n\2NM\3\2\2\2NO\3\2\2\2OQ") + buf.write("\3\2\2\2PR\5\26\f\2QP\3\2\2\2QR\3\2\2\2R\13\3\2\2\2ST") + buf.write("\7.\2\2TY\5\16\b\2UV\7\20\2\2VX\5\16\b\2WU\3\2\2\2X[\3") + buf.write("\2\2\2YW\3\2\2\2YZ\3\2\2\2Z\r\3\2\2\2[Y\3\2\2\2\\`\5\30") + buf.write("\r\2]^\7\17\2\2^_\7\17\2\2_a\5\32\16\2`]\3\2\2\2`a\3\2") + buf.write("\2\2ad\3\2\2\2bc\7 \2\2ce\5\36\20\2db\3\2\2\2de\3\2\2") + buf.write("\2e\17\3\2\2\2fg\7\60\2\2gh\5\30\r\2h\21\3\2\2\2ij\7-") + buf.write("\2\2jk\7\"\2\2kp\5\24\13\2lm\7\20\2\2mo\5\24\13\2nl\3") + buf.write("\2\2\2or\3\2\2\2pn\3\2\2\2pq\3\2\2\2q\23\3\2\2\2rp\3\2") + buf.write("\2\2su\5\30\r\2tv\t\2\2\2ut\3\2\2\2uv\3\2\2\2v\25\3\2") + buf.write("\2\2wx\7(\2\2xy\5\30\r\2y\27\3\2\2\2z{\b\r\1\2{|\t\3\2") + buf.write("\2|\u0085\5\30\r\r}~\7\27\2\2~\177\5\30\r\2\177\u0080") + buf.write("\7\16\2\2\u0080\u0085\3\2\2\2\u0081\u0085\5\"\22\2\u0082") + buf.write("\u0085\5\32\16\2\u0083\u0085\5\36\20\2\u0084z\3\2\2\2") + buf.write("\u0084}\3\2\2\2\u0084\u0081\3\2\2\2\u0084\u0082\3\2\2") + buf.write("\2\u0084\u0083\3\2\2\2\u0085\u009a\3\2\2\2\u0086\u0087") + buf.write("\f\f\2\2\u0087\u0088\t\4\2\2\u0088\u0099\5\30\r\r\u0089") + buf.write("\u008a\f\13\2\2\u008a\u008b\t\5\2\2\u008b\u0099\5\30\r") + buf.write("\f\u008c\u008d\f\n\2\2\u008d\u008e\t\6\2\2\u008e\u0099") + buf.write("\5\30\r\13\u008f\u0090\f\t\2\2\u0090\u0091\t\7\2\2\u0091") + buf.write("\u0099\5\30\r\n\u0092\u0093\f\b\2\2\u0093\u0094\t\b\2") + buf.write("\2\u0094\u0099\5\30\r\t\u0095\u0096\f\7\2\2\u0096\u0097") + buf.write("\t\t\2\2\u0097\u0099\5\30\r\b\u0098\u0086\3\2\2\2\u0098") + buf.write("\u0089\3\2\2\2\u0098\u008c\3\2\2\2\u0098\u008f\3\2\2\2") + buf.write("\u0098\u0092\3\2\2\2\u0098\u0095\3\2\2\2\u0099\u009c\3") + buf.write("\2\2\2\u009a\u0098\3\2\2\2\u009a\u009b\3\2\2\2\u009b\31") + buf.write("\3\2\2\2\u009c\u009a\3\2\2\2\u009d\u009e\5 \21\2\u009e") + buf.write("\u00a0\7\27\2\2\u009f\u00a1\5\34\17\2\u00a0\u009f\3\2") + buf.write("\2\2\u00a0\u00a1\3\2\2\2\u00a1\u00a2\3\2\2\2\u00a2\u00a3") + buf.write("\7\16\2\2\u00a3\33\3\2\2\2\u00a4\u00a9\5\30\r\2\u00a5") + buf.write("\u00a6\7\20\2\2\u00a6\u00a8\5\30\r\2\u00a7\u00a5\3\2\2") + buf.write("\2\u00a8\u00ab\3\2\2\2\u00a9\u00a7\3\2\2\2\u00a9\u00aa") + buf.write("\3\2\2\2\u00aa\35\3\2\2\2\u00ab\u00a9\3\2\2\2\u00ac\u00ae") + buf.write("\7\32\2\2\u00ad\u00ac\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae") + buf.write("\u00b2\3\2\2\2\u00af\u00b0\5 \21\2\u00b0\u00b1\7\30\2") + buf.write("\2\u00b1\u00b3\3\2\2\2\u00b2\u00af\3\2\2\2\u00b2\u00b3") + buf.write("\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\u00b7\5 \21\2\u00b5") + buf.write("\u00b6\7\17\2\2\u00b6\u00b8\5 \21\2\u00b7\u00b5\3\2\2") + buf.write("\2\u00b7\u00b8\3\2\2\2\u00b8\37\3\2\2\2\u00b9\u00be\7") + buf.write(";\2\2\u00ba\u00bb\7\21\2\2\u00bb\u00bd\7;\2\2\u00bc\u00ba") + buf.write("\3\2\2\2\u00bd\u00c0\3\2\2\2\u00be\u00bc\3\2\2\2\u00be") + buf.write("\u00bf\3\2\2\2\u00bf!\3\2\2\2\u00c0\u00be\3\2\2\2\u00c1") + buf.write("\u00c2\t\n\2\2\u00c2#\3\2\2\2\30*\629>DKNQY`dpu\u0084") + buf.write("\u0098\u009a\u00a0\u00a9\u00ad\u00b2\u00b7\u00be") return buf.getvalue() @@ -132,22 +129,21 @@ class PqlParser ( Parser ): RULE_sqlStmtList = 2 RULE_sqlStmt = 3 RULE_selectStmt = 4 - RULE_columns = 5 - RULE_column = 6 - RULE_typeCast = 7 - RULE_whereClause = 8 - RULE_orderByClause = 9 - RULE_orderExpr = 10 - RULE_limitClause = 11 - RULE_expr = 12 - RULE_function = 13 - RULE_exprList = 14 - RULE_taxon = 15 - RULE_identifierMultipart = 16 - RULE_literalValue = 17 + RULE_selectClause = 5 + RULE_columns = 6 + RULE_whereClause = 7 + RULE_orderByClause = 8 + RULE_orderExpr = 9 + RULE_limitClause = 10 + RULE_expr = 11 + RULE_function = 12 + RULE_exprList = 13 + RULE_taxon = 14 + RULE_identifierMultipart = 15 + RULE_literalValue = 16 ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "selectStmt", - "columns", "column", "typeCast", "whereClause", "orderByClause", + "selectClause", "columns", "whereClause", "orderByClause", "orderExpr", "limitClause", "expr", "function", "exprList", "taxon", "identifierMultipart", "literalValue" ] @@ -258,9 +254,9 @@ def parseTel(self): self.enterRule(localctx, 0, self.RULE_parseTel) try: self.enterOuterAlt(localctx, 1) - self.state = 36 + self.state = 34 self.expr(0) - self.state = 37 + self.state = 35 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -314,17 +310,17 @@ def parsePql(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 42 + self.state = 40 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.SCOL or _la==PqlParser.K_SELECT: - self.state = 39 + self.state = 37 self.sqlStmtList() - self.state = 44 + self.state = 42 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 45 + self.state = 43 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -381,49 +377,49 @@ def sqlStmtList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 50 + self.state = 48 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.SCOL: - self.state = 47 + self.state = 45 self.match(PqlParser.SCOL) - self.state = 52 + self.state = 50 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 53 + self.state = 51 self.sqlStmt() - self.state = 62 + self.state = 60 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 55 + self.state = 53 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 54 + self.state = 52 self.match(PqlParser.SCOL) - self.state = 57 + self.state = 55 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PqlParser.SCOL): break - self.state = 59 + self.state = 57 self.sqlStmt() - self.state = 64 + self.state = 62 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - self.state = 68 + self.state = 66 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 65 + self.state = 63 self.match(PqlParser.SCOL) - self.state = 70 + self.state = 68 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) @@ -472,7 +468,7 @@ def sqlStmt(self): self.enterRule(localctx, 6, self.RULE_sqlStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 71 + self.state = 69 self.selectStmt() except RecognitionException as re: localctx.exception = re @@ -489,11 +485,8 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser - def K_SELECT(self): - return self.getToken(PqlParser.K_SELECT, 0) - - def columns(self): - return self.getTypedRuleContext(PqlParser.ColumnsContext,0) + def selectClause(self): + return self.getTypedRuleContext(PqlParser.SelectClauseContext,0) def whereClause(self): @@ -535,31 +528,29 @@ def selectStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) + self.state = 71 + self.selectClause() self.state = 73 - self.match(PqlParser.K_SELECT) - self.state = 74 - self.columns() - self.state = 76 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_WHERE: - self.state = 75 + self.state = 72 self.whereClause() - self.state = 79 + self.state = 76 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ORDER: - self.state = 78 + self.state = 75 self.orderByClause() - self.state = 82 + self.state = 79 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_LIMIT: - self.state = 81 + self.state = 78 self.limitClause() @@ -572,17 +563,20 @@ def selectStmt(self): return localctx - class ColumnsContext(ParserRuleContext): + class SelectClauseContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser - def column(self, i:int=None): + def K_SELECT(self): + return self.getToken(PqlParser.K_SELECT, 0) + + def columns(self, i:int=None): if i is None: - return self.getTypedRuleContexts(PqlParser.ColumnContext) + return self.getTypedRuleContexts(PqlParser.ColumnsContext) else: - return self.getTypedRuleContext(PqlParser.ColumnContext,i) + return self.getTypedRuleContext(PqlParser.ColumnsContext,i) def COMMA(self, i:int=None): @@ -592,43 +586,45 @@ def COMMA(self, i:int=None): return self.getToken(PqlParser.COMMA, i) def getRuleIndex(self): - return PqlParser.RULE_columns + return PqlParser.RULE_selectClause def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterColumns" ): - listener.enterColumns(self) + if hasattr( listener, "enterSelectClause" ): + listener.enterSelectClause(self) def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitColumns" ): - listener.exitColumns(self) + if hasattr( listener, "exitSelectClause" ): + listener.exitSelectClause(self) def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitColumns" ): - return visitor.visitColumns(self) + if hasattr( visitor, "visitSelectClause" ): + return visitor.visitSelectClause(self) else: return visitor.visitChildren(self) - def columns(self): + def selectClause(self): - localctx = PqlParser.ColumnsContext(self, self._ctx, self.state) - self.enterRule(localctx, 10, self.RULE_columns) + localctx = PqlParser.SelectClauseContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_selectClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 84 - self.column() - self.state = 89 + self.state = 81 + self.match(PqlParser.K_SELECT) + self.state = 82 + self.columns() + self.state = 87 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 85 + self.state = 83 self.match(PqlParser.COMMA) - self.state = 86 - self.column() - self.state = 91 + self.state = 84 + self.columns() + self.state = 89 self._errHandler.sync(self) _la = self._input.LA(1) @@ -641,24 +637,30 @@ def columns(self): return localctx - class ColumnContext(ParserRuleContext): + class ColumnsContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.value = None # ExprContext - self.type_cast = None # TypeCastContext + self.type_cast = None # FunctionContext self.alias = None # TaxonContext def expr(self): return self.getTypedRuleContext(PqlParser.ExprContext,0) + def COLON(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COLON) + else: + return self.getToken(PqlParser.COLON, i) + def K_AS(self): return self.getToken(PqlParser.K_AS, 0) - def typeCast(self): - return self.getTypedRuleContext(PqlParser.TypeCastContext,0) + def function(self): + return self.getTypedRuleContext(PqlParser.FunctionContext,0) def taxon(self): @@ -666,40 +668,44 @@ def taxon(self): def getRuleIndex(self): - return PqlParser.RULE_column + return PqlParser.RULE_columns def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterColumn" ): - listener.enterColumn(self) + if hasattr( listener, "enterColumns" ): + listener.enterColumns(self) def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitColumn" ): - listener.exitColumn(self) + if hasattr( listener, "exitColumns" ): + listener.exitColumns(self) def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitColumn" ): - return visitor.visitColumn(self) + if hasattr( visitor, "visitColumns" ): + return visitor.visitColumns(self) else: return visitor.visitChildren(self) - def column(self): + def columns(self): - localctx = PqlParser.ColumnContext(self, self._ctx, self.state) - self.enterRule(localctx, 12, self.RULE_column) + localctx = PqlParser.ColumnsContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_columns) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 92 + self.state = 90 localctx.value = self.expr(0) self.state = 94 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.COLON: + self.state = 91 + self.match(PqlParser.COLON) + self.state = 92 + self.match(PqlParser.COLON) self.state = 93 - localctx.type_cast = self.typeCast() + localctx.type_cast = self.function() self.state = 98 @@ -721,63 +727,6 @@ def column(self): return localctx - class TypeCastContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def COLON(self, i:int=None): - if i is None: - return self.getTokens(PqlParser.COLON) - else: - return self.getToken(PqlParser.COLON, i) - - def function(self): - return self.getTypedRuleContext(PqlParser.FunctionContext,0) - - - def getRuleIndex(self): - return PqlParser.RULE_typeCast - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterTypeCast" ): - listener.enterTypeCast(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitTypeCast" ): - listener.exitTypeCast(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitTypeCast" ): - return visitor.visitTypeCast(self) - else: - return visitor.visitChildren(self) - - - - - def typeCast(self): - - localctx = PqlParser.TypeCastContext(self, self._ctx, self.state) - self.enterRule(localctx, 14, self.RULE_typeCast) - try: - self.enterOuterAlt(localctx, 1) - self.state = 100 - self.match(PqlParser.COLON) - self.state = 101 - self.match(PqlParser.COLON) - self.state = 102 - self.function() - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - class WhereClauseContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): @@ -814,12 +763,12 @@ def accept(self, visitor:ParseTreeVisitor): def whereClause(self): localctx = PqlParser.WhereClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 16, self.RULE_whereClause) + self.enterRule(localctx, 14, self.RULE_whereClause) try: self.enterOuterAlt(localctx, 1) - self.state = 104 + self.state = 100 self.match(PqlParser.K_WHERE) - self.state = 105 + self.state = 101 self.expr(0) except RecognitionException as re: localctx.exception = re @@ -878,25 +827,25 @@ def accept(self, visitor:ParseTreeVisitor): def orderByClause(self): localctx = PqlParser.OrderByClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 18, self.RULE_orderByClause) + self.enterRule(localctx, 16, self.RULE_orderByClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 107 + self.state = 103 self.match(PqlParser.K_ORDER) - self.state = 108 + self.state = 104 self.match(PqlParser.K_BY) - self.state = 109 + self.state = 105 self.orderExpr() - self.state = 114 + self.state = 110 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 110 + self.state = 106 self.match(PqlParser.COMMA) - self.state = 111 + self.state = 107 self.orderExpr() - self.state = 116 + self.state = 112 self._errHandler.sync(self) _la = self._input.LA(1) @@ -948,17 +897,17 @@ def accept(self, visitor:ParseTreeVisitor): def orderExpr(self): localctx = PqlParser.OrderExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 20, self.RULE_orderExpr) + self.enterRule(localctx, 18, self.RULE_orderExpr) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 117 + self.state = 113 self.expr(0) - self.state = 119 + self.state = 115 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ASC or _la==PqlParser.K_DESC: - self.state = 118 + self.state = 114 _la = self._input.LA(1) if not(_la==PqlParser.K_ASC or _la==PqlParser.K_DESC): self._errHandler.recoverInline(self) @@ -1013,12 +962,12 @@ def accept(self, visitor:ParseTreeVisitor): def limitClause(self): localctx = PqlParser.LimitClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 22, self.RULE_limitClause) + self.enterRule(localctx, 20, self.RULE_limitClause) try: self.enterOuterAlt(localctx, 1) - self.state = 121 + self.state = 117 self.match(PqlParser.K_LIMIT) - self.state = 122 + self.state = 118 localctx.limit = self.expr(0) except RecognitionException as re: localctx.exception = re @@ -1146,16 +1095,16 @@ def expr(self, _p:int=0): _parentState = self.state localctx = PqlParser.ExprContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 24 - self.enterRecursionRule(localctx, 24, self.RULE_expr, _p) + _startState = 22 + self.enterRecursionRule(localctx, 22, self.RULE_expr, _p) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 134 + self.state = 130 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,13,self._ctx) if la_ == 1: - self.state = 125 + self.state = 121 localctx.unary_operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_NOT))) != 0)): @@ -1163,37 +1112,37 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 126 + self.state = 122 localctx.right = self.expr(11) pass elif la_ == 2: - self.state = 127 + self.state = 123 self.match(PqlParser.OPEN_PAREN) - self.state = 128 + self.state = 124 localctx.inner = self.expr(0) - self.state = 129 + self.state = 125 self.match(PqlParser.CLOSE_PAREN) pass elif la_ == 3: - self.state = 131 + self.state = 127 self.literalValue() pass elif la_ == 4: - self.state = 132 + self.state = 128 self.function() pass elif la_ == 5: - self.state = 133 + self.state = 129 self.taxon() pass self._ctx.stop = self._input.LT(-1) - self.state = 156 + self.state = 152 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,15,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -1201,18 +1150,18 @@ def expr(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 154 + self.state = 150 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,14,self._ctx) if la_ == 1: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 136 + self.state = 132 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 137 + self.state = 133 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) != 0)): @@ -1220,7 +1169,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 138 + self.state = 134 localctx.right = self.expr(11) pass @@ -1228,11 +1177,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 139 + self.state = 135 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 140 + self.state = 136 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.MINUS or _la==PqlParser.PLUS): @@ -1240,7 +1189,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 141 + self.state = 137 localctx.right = self.expr(10) pass @@ -1248,11 +1197,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 142 + self.state = 138 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 143 + self.state = 139 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) != 0)): @@ -1260,7 +1209,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 144 + self.state = 140 localctx.right = self.expr(9) pass @@ -1268,11 +1217,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 145 + self.state = 141 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 146 + self.state = 142 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) != 0)): @@ -1280,7 +1229,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 147 + self.state = 143 localctx.right = self.expr(8) pass @@ -1288,11 +1237,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 148 + self.state = 144 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 149 + self.state = 145 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.AND or _la==PqlParser.K_AND): @@ -1300,7 +1249,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 150 + self.state = 146 localctx.right = self.expr(7) pass @@ -1308,11 +1257,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 151 + self.state = 147 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 152 + self.state = 148 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.OR or _la==PqlParser.K_OR): @@ -1320,12 +1269,12 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 153 + self.state = 149 localctx.right = self.expr(6) pass - self.state = 158 + self.state = 154 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,15,self._ctx) @@ -1383,23 +1332,23 @@ def accept(self, visitor:ParseTreeVisitor): def function(self): localctx = PqlParser.FunctionContext(self, self._ctx, self.state) - self.enterRule(localctx, 26, self.RULE_function) + self.enterRule(localctx, 24, self.RULE_function) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 159 + self.state = 155 localctx.function_name = self.identifierMultipart() - self.state = 160 + self.state = 156 self.match(PqlParser.OPEN_PAREN) - self.state = 162 + self.state = 158 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): - self.state = 161 + self.state = 157 localctx.arguments = self.exprList() - self.state = 164 + self.state = 160 self.match(PqlParser.CLOSE_PAREN) except RecognitionException as re: localctx.exception = re @@ -1452,21 +1401,21 @@ def accept(self, visitor:ParseTreeVisitor): def exprList(self): localctx = PqlParser.ExprListContext(self, self._ctx, self.state) - self.enterRule(localctx, 28, self.RULE_exprList) + self.enterRule(localctx, 26, self.RULE_exprList) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 166 + self.state = 162 self.expr(0) - self.state = 171 + self.state = 167 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 167 + self.state = 163 self.match(PqlParser.COMMA) - self.state = 168 + self.state = 164 self.expr(0) - self.state = 173 + self.state = 169 self._errHandler.sync(self) _la = self._input.LA(1) @@ -1484,6 +1433,7 @@ class TaxonContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser + self.is_optional = None # Token self.namespace = None # IdentifierMultipartContext self.slug = None # IdentifierMultipartContext self.tag = None # IdentifierMultipartContext @@ -1495,15 +1445,15 @@ def identifierMultipart(self, i:int=None): return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,i) - def QUESTION_MARK(self): - return self.getToken(PqlParser.QUESTION_MARK, 0) - def PIPE(self): return self.getToken(PqlParser.PIPE, 0) def COLON(self): return self.getToken(PqlParser.COLON, 0) + def QUESTION_MARK(self): + return self.getToken(PqlParser.QUESTION_MARK, 0) + def getRuleIndex(self): return PqlParser.RULE_taxon @@ -1527,37 +1477,37 @@ def accept(self, visitor:ParseTreeVisitor): def taxon(self): localctx = PqlParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 30, self.RULE_taxon) + self.enterRule(localctx, 28, self.RULE_taxon) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 175 + self.state = 171 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.QUESTION_MARK: - self.state = 174 - self.match(PqlParser.QUESTION_MARK) + self.state = 170 + localctx.is_optional = self.match(PqlParser.QUESTION_MARK) - self.state = 180 + self.state = 176 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,19,self._ctx) if la_ == 1: - self.state = 177 + self.state = 173 localctx.namespace = self.identifierMultipart() - self.state = 178 + self.state = 174 self.match(PqlParser.PIPE) - self.state = 182 + self.state = 178 localctx.slug = self.identifierMultipart() - self.state = 185 + self.state = 181 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,20,self._ctx) if la_ == 1: - self.state = 183 + self.state = 179 self.match(PqlParser.COLON) - self.state = 184 + self.state = 180 localctx.tag = self.identifierMultipart() @@ -1611,21 +1561,21 @@ def accept(self, visitor:ParseTreeVisitor): def identifierMultipart(self): localctx = PqlParser.IdentifierMultipartContext(self, self._ctx, self.state) - self.enterRule(localctx, 32, self.RULE_identifierMultipart) + self.enterRule(localctx, 30, self.RULE_identifierMultipart) try: self.enterOuterAlt(localctx, 1) - self.state = 187 + self.state = 183 self.match(PqlParser.WORD) - self.state = 192 + self.state = 188 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,21,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 188 + self.state = 184 self.match(PqlParser.DOT) - self.state = 189 + self.state = 185 self.match(PqlParser.WORD) - self.state = 194 + self.state = 190 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,21,self._ctx) @@ -1685,11 +1635,11 @@ def accept(self, visitor:ParseTreeVisitor): def literalValue(self): localctx = PqlParser.LiteralValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 34, self.RULE_literalValue) + self.enterRule(localctx, 32, self.RULE_literalValue) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 195 + self.state = 191 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): self._errHandler.recoverInline(self) @@ -1709,7 +1659,7 @@ def literalValue(self): def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates == None: self._predicates = dict() - self._predicates[12] = self.expr_sempred + self._predicates[11] = self.expr_sempred pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py index 31dc27c..33ee5c5 100644 --- a/python/src/pql_grammar/antlr/PqlParserListener.py +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -53,30 +53,21 @@ def exitSelectStmt(self, ctx:PqlParser.SelectStmtContext): pass - # Enter a parse tree produced by PqlParser#columns. - def enterColumns(self, ctx:PqlParser.ColumnsContext): + # Enter a parse tree produced by PqlParser#selectClause. + def enterSelectClause(self, ctx:PqlParser.SelectClauseContext): pass - # Exit a parse tree produced by PqlParser#columns. - def exitColumns(self, ctx:PqlParser.ColumnsContext): + # Exit a parse tree produced by PqlParser#selectClause. + def exitSelectClause(self, ctx:PqlParser.SelectClauseContext): pass - # Enter a parse tree produced by PqlParser#column. - def enterColumn(self, ctx:PqlParser.ColumnContext): - pass - - # Exit a parse tree produced by PqlParser#column. - def exitColumn(self, ctx:PqlParser.ColumnContext): - pass - - - # Enter a parse tree produced by PqlParser#typeCast. - def enterTypeCast(self, ctx:PqlParser.TypeCastContext): + # Enter a parse tree produced by PqlParser#columns. + def enterColumns(self, ctx:PqlParser.ColumnsContext): pass - # Exit a parse tree produced by PqlParser#typeCast. - def exitTypeCast(self, ctx:PqlParser.TypeCastContext): + # Exit a parse tree produced by PqlParser#columns. + def exitColumns(self, ctx:PqlParser.ColumnsContext): pass diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py index 3193f54..8333399 100644 --- a/python/src/pql_grammar/antlr/PqlParserVisitor.py +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -34,18 +34,13 @@ def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): return self.visitChildren(ctx) - # Visit a parse tree produced by PqlParser#columns. - def visitColumns(self, ctx:PqlParser.ColumnsContext): + # Visit a parse tree produced by PqlParser#selectClause. + def visitSelectClause(self, ctx:PqlParser.SelectClauseContext): return self.visitChildren(ctx) - # Visit a parse tree produced by PqlParser#column. - def visitColumn(self, ctx:PqlParser.ColumnContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#typeCast. - def visitTypeCast(self, ctx:PqlParser.TypeCastContext): + # Visit a parse tree produced by PqlParser#columns. + def visitColumns(self, ctx:PqlParser.ColumnsContext): return self.visitChildren(ctx) diff --git a/python/src/pql_grammar/ast/__init__.py b/python/src/pql_grammar/ast/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/pql_grammar/ast/model.py b/python/src/pql_grammar/ast/model.py new file mode 100644 index 0000000..ae0b81f --- /dev/null +++ b/python/src/pql_grammar/ast/model.py @@ -0,0 +1,80 @@ +from dataclasses import dataclass, fields +from decimal import Decimal +from typing import ( + Any, + List, + Optional, + Tuple, + Union, +) + + +class Node: + pass + +@dataclass +class Expr(Node): + """ arithmetic operation like 'a > b' in Pre-fix notation""" + operator: str + # some operations are unary. there will be only one arg + # most others are left-right, so len would be 2. + # rarely there will be len more than 2. + args: List[Any] + +@dataclass +class TelExpr(Node): + raw_value: str + +@dataclass +class Literal(Node): + value: Union[int,float,str,Decimal] + raw_value: str + +@dataclass +class Taxon(Node): + slug: str + namespace: Optional[str] = None + is_optional: Optional[bool] = False + tag: Optional[str] = None + +@dataclass +class Function(Node): + function_name: str + # support named args. + # each tuple is a pair of arg_name=arg_value in order of occurrence. + args: List[Tuple[Optional[str],str]] + +Values = Union[TelExpr,Function,Taxon,Literal] + +@dataclass +class Column(Node): + value: Values + type_cast: Optional[Function] = None + alias: Optional[Taxon] = None + +@dataclass +class SelectStmt(Node): + columns: List[Column] + where_clause: Optional[Expr] + + +def ast_diff(a, b, path=None): + if not path: + path = [] + + if type(a) != type(b): + raise Exception(f"Types of {a} and {b} are not same {type(a)} != {type(b)} for path {path}") + + path += [type(a).__name__] + + if isinstance(a, Node): + for f in fields(a): + ast_diff(getattr(a, f.name), getattr(b, f.name), path + [f.name]) + elif isinstance(a, (list,tuple)): + if len(a) != len(b): + raise Exception(f"Lengths are different for {a} and {b} for path {path}") + for i, (x,y) in enumerate(zip(a, b)): + ast_diff(x,y, path + [i]) + else: + if a != b: + raise Exception(f"Values of {a} and {b} are not same for path {path}") diff --git a/python/tests/pql/pql_ast_test.py b/python/tests/pql/pql_ast_test.py new file mode 100644 index 0000000..b00c8c8 --- /dev/null +++ b/python/tests/pql/pql_ast_test.py @@ -0,0 +1,358 @@ +import sys +sys.path.append('./src') + +from antlr4 import CommonTokenStream, InputStream, ParserRuleContext +from antlr4.tree import Tree +from dataclasses import dataclass +from collections import namedtuple +from typing import Optional, Tuple +from unittest import mock, TestCase + +from pql_grammar.antlr.PqlLexer import PqlLexer +from pql_grammar.antlr.PqlParser import PqlParser +from pql_grammar.antlr.PqlParserVisitor import PqlParserVisitor +from pql_grammar import operators as op +from pql_grammar.ast import model as ast + + +def full_text(ctx: ParserRuleContext) -> str: + # extracts full text from a tree of nodes, + # including white space. + if ctx: + if isinstance(ctx, ParserRuleContext): + return ctx.start.getInputStream().getText(ctx.start.start, ctx.stop.stop) + else: + try: + # some primitive context object + return ctx.text + except AttributeError: + # Terminal Node of some sort + return str(ctx) + else: + return None + + +def unquote(s: str): + # Quoted schema, table, column names come in Postgres style - double-quotes + # in-string double-quotes are escaped by doubling the double-quotes ANSI SQL style. + # https://docs.oracle.com/goldengate/1212/gg-winux/GWURF/gg_parameters183.htm#GWURF728 + # Example: + # '"table name ""with quoted portion"""' becomes 'table name "with quoted portion"' + if not s: + return s + if s[0] == '"' and s[-1] == '"': + s = s[1:-1] + return s.replace('""', '"') + + +class AstParser: + + @classmethod + def unwrap_expr_parens(cls, e: PqlParser.ExprContext) -> PqlParser.ExprContext: + # it's allowed to wrap expressions into superflous amounts of parens + # (((column > 5))) + # These come across as triple-nested [TerminalNodeImpl('('), expr, TerminalNodeImpl(')')] + # Here we check for len == 3 and if last and first Terminals are (), return middle element - expression, + # Run this recursively. + # inner attribute is enabled only on cleanly-paren-wrapped expressions + if e.inner: + return cls.unwrap_expr_parens(e.inner) + else: + return e + + @classmethod + def parse_taxon(cls, e: PqlParser.TaxonContext) -> ast.Taxon: + return ast.Taxon( + full_text(e.slug), + full_text(e.namespace), + bool(e.is_optional), + full_text(e.tag) + ) + + @classmethod + def parse_function_argument_pair(cls, e: PqlParser.ExprContext) -> Tuple[Optional[str],str]: + e = cls.unwrap_expr_parens(e) + o = full_text(e.operator) + if o == '=': + arg_name = full_text(e.left) + arg_value = full_text(e.right) + else: + arg_name = None + arg_value = full_text(e) + return (arg_name, arg_value) + + @classmethod + def parse_function(cls, e: PqlParser.FunctionContext) -> ast.Function: + return ast.Function( + full_text(e.function_name), + [ + cls.parse_function_argument_pair(expr) + for expr in e.arguments.expr() + ] + ) + + @classmethod + def parse_column_value(cls, v: PqlParser.expr) -> ast.Values: + # v is always PqlParser.expr, but anything can be inside + # It's not super relevant what's inside Expr, since + # we sent the original string-ified version of contenst to Husky anyway. + # There are some good reasons to parse the value for realz: + # - understanding if there is an outter `CAST( expr as TypeCast())` in there that needs re-syntaxing + # - deciding if specific value is taxon AND if it's in or is not in WHERE clause to channel it to pre/post agg + # However, we can do that crudely just on string representations of contents and avoid parsing them. + # Still, let's try to parse top level into one of: + # - Taxon + # - TelExpr where all other kinds of complex expressions are packed + # Specifically note that we allow Literal, Function other otherwise basic structures to be packed into Tel box. + + # So, if it's not Taxon object at top level, we unwrap redundant parens and pack string into TelExpr + v = cls.unwrap_expr_parens(v) + + t: Optional[PqlParser.TaxonContext] = v.taxon() + if t: + return cls.parse_taxon(t) + else: + return ast.TelExpr(full_text(v)) + + @classmethod + def parse_column_typecast(cls, v: PqlParser.FunctionContext) -> Optional[ast.Function]: + if not v: + return None + return cls.parse_function(v) + + @classmethod + def parse_column_alias(cls, v: PqlParser.TaxonContext) -> Optional[ast.Taxon]: + if not v: + return None + return cls.parse_taxon(v) + + @classmethod + def parse_column(cls, e: PqlParser.ColumnsContext): + return ast.Column( + cls.parse_column_value(e.value), + cls.parse_column_typecast(e.type_cast), + cls.parse_column_alias(e.alias) + ) + + @staticmethod + def _literalValue_to_python_native(e:PqlParser.LiteralValueContext): + is_number = e.NUMERIC_LITERAL() + is_string = e.DOUBLE_QUOTED_STRING() or e.SINGLE_QUOTED_STRING() + is_null = e.K_NULL() + is_bool = e.K_TRUE() or e.K_FALSE() + + # TODO: + # - BLOB_LITERAL + # - CURRENT_[DATE|TIME|TIMESTAMP] + + if is_null: + return None + + if is_bool: + return bool(e.K_TRUE()) + + try: + v = e.getText() + except IndexError: + raise Exception(f"Could not extract literal value node from '{e.getText()}'.") + + if is_number: + # TODO: contemplate decimal type instead + try: + return int(v) + except ValueError: + try: + return float(v) + except Exception: + raise Exception(f"Could not convert SQL number {v} to native number representation.") + + if is_string: + return unquote(v) + + return v + + _sql_name_map = { + 'AND': op.OpName.AND, + 'OR': op.OpName.OR, + 'NOT': op.OpName.NOT, + 'IS': op.OpName.IS, + '=': op.OpName.EQ, # notice WHERE clause specific handling. NOT assignment. EQ! + '==': op.OpName.EQ, # opportunistic inclusion, while we don't expect to see it in WHERE + '<>': op.OpName.NEQ, + '!=': op.OpName.NEQ, # opportunistic inclusion, while we don't expect to see it in WHERE + '>': op.OpName.GT, + '>=': op.OpName.GTE, + '<': op.OpName.LT, + '<=': op.OpName.LTE, + 'LIKE': op.OpName.LIKE, + '+':op.OpName.PLUS, + '-':op.OpName.MINUS, + '*':op.OpName.STAR, + '/':op.OpName.DIV, + '%':op.OpName.MOD, + } + + @classmethod + def _lookup_operator_internal_name(cls, sql_operator: str): + op_name = cls._sql_name_map.get(sql_operator.upper()) + if not op_name: + raise Exception(f"Could not match operator '{sql_operator}' in where clause to a supported action.") + return op_name + + @classmethod + def parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : + ctx = cls.unwrap_expr_parens(ctx) + + v = ctx.literalValue() + if v: + return ast.Literal( + cls._literalValue_to_python_native(v), + full_text(v) + ) + + v = ctx.unary_operator + if v: + operator = full_text(v).upper() + return ast.Expr( + operator, + [cls.parse_where_clause_expr(ctx.right)] + ) + + v: Optional[str] = full_text(ctx.operator) + if v: + # this is super generic expression of type + # left OP right + # with a lot of options for OP value. + return ast.Expr( + v.upper(), + [ + cls.parse_where_clause_expr(ctx.left), + cls.parse_where_clause_expr(ctx.right) + ] + ) + + v: PqlParser.TaxonContext = ctx.taxon() + if v: + return cls.parse_taxon(v) + + v: PqlParser.FunctionContext = ctx.function() + if v: + return cls.parse_function(v) + + raise Exception(f'Where expression "{full_text(ctx)}" is not supported yet.') + + +class AssertPqlVisitor(PqlParserVisitor): + """ + Special TelVisitor for testing grammar. Throws error in case of invalid node. + """ + def visitErrorNode(self, node): + wrong_symbol = node.symbol.text + position = node.symbol.column + 1 + details = f'Unexpected symbol "{wrong_symbol}" at position {position}' + raise AssertionError(details) + + @classmethod + def parse_string(cls, s): + inp_stream = InputStream(s) + lexer = PqlLexer(inp_stream) + stream = CommonTokenStream(lexer) + parser = PqlParser(stream) + tree = parser.parsePql() + # Use error visitor on parsed tree to test it + visitor = cls() + visitor.visit(tree) + + +class PqlAstTests(TestCase): + maxDiff = None + + def test_select(self): + + pql = """ + select + ?ns1|taxon1, + ns2|taxon2, + slug1 as myns|slug1, + (?ns3|taxon3 + (slug2 - 1234)) as myns|custom_data, + fn_4(fn_1(slug))::TypeCast(arg1=value1) + where + ns6|taxon6 > 1234 + and (ns0|taxon10 + 4321) == 0 + """ + + statements = [] + + class V(AssertPqlVisitor): + def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): + columns = [ + ast.Column( + AstParser.parse_column_value(column.value), + AstParser.parse_column_typecast(column.type_cast), + AstParser.parse_column_alias(column.alias) + ) + for column in ctx.selectClause().columns() + ] + where_clause = AstParser.parse_where_clause_expr(ctx.whereClause().expr()) + + statements.append(ast.SelectStmt( + columns, + where_clause + )) + + V.parse_string(pql) + + stmt_should_be = ast.SelectStmt( + [ + ast.Column(ast.Taxon('taxon1', 'ns1', True)), + ast.Column(ast.Taxon('taxon2', 'ns2', False)), + ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), + ast.Column( + ast.TelExpr('?ns3|taxon3 + (slug2 - 1234)'), + None, + ast.Taxon('custom_data', 'myns') + ), + ast.Column( + ast.TelExpr('fn_4(fn_1(slug))'), + ast.Function( + 'TypeCast', + [('arg1','value1')] + ), + ) + ], + ast.Expr( + 'AND', + [ + ast.Expr( + '>', + [ + ast.Taxon('taxon6', 'ns6'), + ast.Literal(1234, '1234') + ] + ), + ast.Expr( + '==', + [ + ast.Expr( + '+', + [ + ast.Taxon('taxon10', 'ns0'), + ast.Literal(4321, '4321') + ] + ), + ast.Literal(0, '0') + ] + ) + ] + ) + ) + + assert statements + stmt = statements[0] + + assert len(stmt.columns) == len(stmt_should_be.columns) + for result, should_be in zip(stmt.columns, stmt_should_be.columns): + assert result == should_be + + # ast.ast_diff(stmt.where_clause, stmt_should_be.where_clause) + assert stmt.where_clause == stmt_should_be.where_clause diff --git a/python/tests/pql/pql_test.py b/python/tests/pql/pql_atesta.py similarity index 100% rename from python/tests/pql/pql_test.py rename to python/tests/pql/pql_atesta.py From 47d6070e585c03e0ef26b81b2258868bdfe769ec Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Thu, 12 Nov 2020 20:09:59 -0800 Subject: [PATCH 13/32] round-trip PQL <> AST and AST <> JSON parsers, renderers --- python/src/pql_grammar/ast/from_json.py | 35 ++ .../pql_grammar/ast/from_pql.py} | 165 +------ python/src/pql_grammar/ast/model.py | 17 +- python/src/pql_grammar/ast/to_json.py | 37 ++ python/src/pql_grammar/ast/to_pql.py | 106 +++++ python/src/pql_grammar/operators.py | 421 ------------------ python/tests/ast_json_test.py | 219 +++++++++ python/tests/ast_pql_test.py | 142 ++++++ python/tests/pql/pql_atesta.py | 320 ------------- 9 files changed, 566 insertions(+), 896 deletions(-) create mode 100644 python/src/pql_grammar/ast/from_json.py rename python/{tests/pql/pql_ast_test.py => src/pql_grammar/ast/from_pql.py} (57%) create mode 100644 python/src/pql_grammar/ast/to_json.py create mode 100644 python/src/pql_grammar/ast/to_pql.py delete mode 100644 python/src/pql_grammar/operators.py create mode 100644 python/tests/ast_json_test.py create mode 100644 python/tests/ast_pql_test.py delete mode 100644 python/tests/pql/pql_atesta.py diff --git a/python/src/pql_grammar/ast/from_json.py b/python/src/pql_grammar/ast/from_json.py new file mode 100644 index 0000000..f755164 --- /dev/null +++ b/python/src/pql_grammar/ast/from_json.py @@ -0,0 +1,35 @@ +from dataclasses import fields +from typing import List, Tuple, Any +from . import model as ast + + +AST_NODE_MARKER = '__type__' + + +def from_json(o: dict): + + if isinstance(o, dict) and AST_NODE_MARKER in o: + name = o[AST_NODE_MARKER] + N = ast.inventory.get(name) + if not N: + raise NotImplementedError(f'Renderer for node type "{name}" is not implemented.') + + return N(**{ + k: from_json(v) + for k, v in o.items() + if k != AST_NODE_MARKER + }) + + if isinstance(o, (list, tuple)): + return [ + from_json(v) + for v in o + ] + + if isinstance(o, dict): + return { + k: from_json(v) + for k, v in o.items() + } + + return o diff --git a/python/tests/pql/pql_ast_test.py b/python/src/pql_grammar/ast/from_pql.py similarity index 57% rename from python/tests/pql/pql_ast_test.py rename to python/src/pql_grammar/ast/from_pql.py index b00c8c8..888161b 100644 --- a/python/tests/pql/pql_ast_test.py +++ b/python/src/pql_grammar/ast/from_pql.py @@ -1,18 +1,12 @@ -import sys -sys.path.append('./src') - from antlr4 import CommonTokenStream, InputStream, ParserRuleContext -from antlr4.tree import Tree -from dataclasses import dataclass -from collections import namedtuple +from antlr4 import ParserRuleContext from typing import Optional, Tuple -from unittest import mock, TestCase -from pql_grammar.antlr.PqlLexer import PqlLexer -from pql_grammar.antlr.PqlParser import PqlParser -from pql_grammar.antlr.PqlParserVisitor import PqlParserVisitor -from pql_grammar import operators as op -from pql_grammar.ast import model as ast +from ..antlr.PqlLexer import PqlLexer +from ..antlr.PqlParser import PqlParser +from ..antlr.PqlParserVisitor import PqlParserVisitor as _PqlParserVisitor + +from . import model as ast def full_text(ctx: ParserRuleContext) -> str: @@ -45,7 +39,7 @@ def unquote(s: str): return s.replace('""', '"') -class AstParser: +class PqlAntlrToAstParser: @classmethod def unwrap_expr_parens(cls, e: PqlParser.ExprContext) -> PqlParser.ExprContext: @@ -88,11 +82,11 @@ def parse_function(cls, e: PqlParser.FunctionContext) -> ast.Function: [ cls.parse_function_argument_pair(expr) for expr in e.arguments.expr() - ] + ] if e.arguments else None ) @classmethod - def parse_column_value(cls, v: PqlParser.expr) -> ast.Values: + def parse_column_value(cls, v: PqlParser.expr) -> ast.ColumnValue: # v is always PqlParser.expr, but anything can be inside # It's not super relevant what's inside Expr, since # we sent the original string-ified version of contenst to Husky anyway. @@ -171,34 +165,6 @@ def _literalValue_to_python_native(e:PqlParser.LiteralValueContext): return v - _sql_name_map = { - 'AND': op.OpName.AND, - 'OR': op.OpName.OR, - 'NOT': op.OpName.NOT, - 'IS': op.OpName.IS, - '=': op.OpName.EQ, # notice WHERE clause specific handling. NOT assignment. EQ! - '==': op.OpName.EQ, # opportunistic inclusion, while we don't expect to see it in WHERE - '<>': op.OpName.NEQ, - '!=': op.OpName.NEQ, # opportunistic inclusion, while we don't expect to see it in WHERE - '>': op.OpName.GT, - '>=': op.OpName.GTE, - '<': op.OpName.LT, - '<=': op.OpName.LTE, - 'LIKE': op.OpName.LIKE, - '+':op.OpName.PLUS, - '-':op.OpName.MINUS, - '*':op.OpName.STAR, - '/':op.OpName.DIV, - '%':op.OpName.MOD, - } - - @classmethod - def _lookup_operator_internal_name(cls, sql_operator: str): - op_name = cls._sql_name_map.get(sql_operator.upper()) - if not op_name: - raise Exception(f"Could not match operator '{sql_operator}' in where clause to a supported action.") - return op_name - @classmethod def parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : ctx = cls.unwrap_expr_parens(ctx) @@ -242,117 +208,12 @@ def parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : raise Exception(f'Where expression "{full_text(ctx)}" is not supported yet.') -class AssertPqlVisitor(PqlParserVisitor): - """ - Special TelVisitor for testing grammar. Throws error in case of invalid node. - """ - def visitErrorNode(self, node): - wrong_symbol = node.symbol.text - position = node.symbol.column + 1 - details = f'Unexpected symbol "{wrong_symbol}" at position {position}' - raise AssertionError(details) +class PqlVisitor(_PqlParserVisitor): - @classmethod - def parse_string(cls, s): - inp_stream = InputStream(s) + def visit_from_string(self, pql: str): + inp_stream = InputStream(pql) lexer = PqlLexer(inp_stream) stream = CommonTokenStream(lexer) parser = PqlParser(stream) tree = parser.parsePql() - # Use error visitor on parsed tree to test it - visitor = cls() - visitor.visit(tree) - - -class PqlAstTests(TestCase): - maxDiff = None - - def test_select(self): - - pql = """ - select - ?ns1|taxon1, - ns2|taxon2, - slug1 as myns|slug1, - (?ns3|taxon3 + (slug2 - 1234)) as myns|custom_data, - fn_4(fn_1(slug))::TypeCast(arg1=value1) - where - ns6|taxon6 > 1234 - and (ns0|taxon10 + 4321) == 0 - """ - - statements = [] - - class V(AssertPqlVisitor): - def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): - columns = [ - ast.Column( - AstParser.parse_column_value(column.value), - AstParser.parse_column_typecast(column.type_cast), - AstParser.parse_column_alias(column.alias) - ) - for column in ctx.selectClause().columns() - ] - where_clause = AstParser.parse_where_clause_expr(ctx.whereClause().expr()) - - statements.append(ast.SelectStmt( - columns, - where_clause - )) - - V.parse_string(pql) - - stmt_should_be = ast.SelectStmt( - [ - ast.Column(ast.Taxon('taxon1', 'ns1', True)), - ast.Column(ast.Taxon('taxon2', 'ns2', False)), - ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), - ast.Column( - ast.TelExpr('?ns3|taxon3 + (slug2 - 1234)'), - None, - ast.Taxon('custom_data', 'myns') - ), - ast.Column( - ast.TelExpr('fn_4(fn_1(slug))'), - ast.Function( - 'TypeCast', - [('arg1','value1')] - ), - ) - ], - ast.Expr( - 'AND', - [ - ast.Expr( - '>', - [ - ast.Taxon('taxon6', 'ns6'), - ast.Literal(1234, '1234') - ] - ), - ast.Expr( - '==', - [ - ast.Expr( - '+', - [ - ast.Taxon('taxon10', 'ns0'), - ast.Literal(4321, '4321') - ] - ), - ast.Literal(0, '0') - ] - ) - ] - ) - ) - - assert statements - stmt = statements[0] - - assert len(stmt.columns) == len(stmt_should_be.columns) - for result, should_be in zip(stmt.columns, stmt_should_be.columns): - assert result == should_be - - # ast.ast_diff(stmt.where_clause, stmt_should_be.where_clause) - assert stmt.where_clause == stmt_should_be.where_clause + self.visit(tree) diff --git a/python/src/pql_grammar/ast/model.py b/python/src/pql_grammar/ast/model.py index ae0b81f..da52b09 100644 --- a/python/src/pql_grammar/ast/model.py +++ b/python/src/pql_grammar/ast/model.py @@ -9,6 +9,10 @@ ) +# filled on the bottom +inventory = {} + + class Node: pass @@ -42,13 +46,13 @@ class Function(Node): function_name: str # support named args. # each tuple is a pair of arg_name=arg_value in order of occurrence. - args: List[Tuple[Optional[str],str]] + args: Optional[List[Tuple[Optional[str],str]]] = None -Values = Union[TelExpr,Function,Taxon,Literal] +ColumnValue = Union[TelExpr,Function,Taxon,Literal] @dataclass class Column(Node): - value: Values + value: ColumnValue type_cast: Optional[Function] = None alias: Optional[Taxon] = None @@ -78,3 +82,10 @@ def ast_diff(a, b, path=None): else: if a != b: raise Exception(f"Values of {a} and {b} are not same for path {path}") + + +inventory.update({ + k : v + for k, v in dict(locals()).items() + if type(v) is type and issubclass(v, Node) +}) diff --git a/python/src/pql_grammar/ast/to_json.py b/python/src/pql_grammar/ast/to_json.py new file mode 100644 index 0000000..e3952fc --- /dev/null +++ b/python/src/pql_grammar/ast/to_json.py @@ -0,0 +1,37 @@ +from dataclasses import fields +from . import model as ast + + +def node_to_tuples(n: ast.Node): + return [ + ('__type__', n.__class__.__name__) + ] + [ + (field.name, getattr(n, field.name)) + for field in fields(n) + ] + + +def to_json(o): + + if isinstance(o, ast.Node): + return { + k: to_json(v) + for k, v in node_to_tuples(o) + if not (v is None) # <-- diff from to_data_tuple + } + + if isinstance(o, (list, tuple)): + return [ + to_json(v) + for v in o + # if not (v is None) # <-- diff from to_data_tuple + ] + + if isinstance(o, dict): + return { + k: to_json(v) + for k, v in o.items() + if not (v is None) # <-- diff from to_data_tuple + } + + return o diff --git a/python/src/pql_grammar/ast/to_pql.py b/python/src/pql_grammar/ast/to_pql.py new file mode 100644 index 0000000..861f25d --- /dev/null +++ b/python/src/pql_grammar/ast/to_pql.py @@ -0,0 +1,106 @@ +from . import model as ast + + +FIRST=0 +SECOND=1 +LAST=-1 + +INDENT = ' ' * 4 + + +# populated further below, at the end of this file on module init. +renderer_map = {} + + +class Node: + def __init__(self, n): + self.n = n + + def __str__(self): + raise NotImplementedError(f'Renderer for "{self.n}" is not implemented.') + + +class Expr(Node): + n: ast.Expr + def __str__(self): + op = self.n.operator + if len(self.n.args) == 1: + right = self.n.args[FIRST] + # could be something like 'NOT' which needs padding + padding = '' if op in ('+','-') else ' ' + return f'{op}{padding}{to_r(right)}' + else: + left = self.n.args[FIRST] + right = self.n.args[SECOND] + return f'({to_r(left)} {op} {to_r(right)})' + + +class TelExpr(Node): + n: ast.TelExpr + def __str__(self): + return self.n.raw_value + + +class Literal(Node): + n: ast.Literal + def __str__(self): + return self.n.raw_value + + +class Taxon(Node): + n: ast.Taxon + def __str__(self): + n = self.n + is_optional = '?' if n.is_optional else '' + namespace = n.namespace + '|' if n.namespace else '' + slug = n.slug + tag = ':' + n.tag if n.tag else '' + return f'{is_optional}{namespace}{slug}{tag}' + + +class Function(Node): + n: ast.Function + def __str__(self): + fn = self.n.function_name + # args are string pairs, not parsed deeper at all. + args = ','.join([ + f'{n}={v}' if n else f'{v}' + for n,v in (self.n.args or []) + ]) + return f'{fn}({args})' + + +class Column(Node): + n: ast.Column + def __str__(self): + n = self.n + value = f'{to_r(n.value)}' + type_cast = f'::{to_r(n.type_cast)}' if n.type_cast else '' + if type_cast and (value[FIRST] != '(' or value[LAST] != ')'): + value = f'({value})' + alias = f' AS {to_r(n.alias)}' if n.alias else '' + return f'{value}{type_cast}{alias}' + + +class SelectStmt(Node): + n: ast.SelectStmt + def __str__(self): + n = self.n + cc = 'SELECT\n' + INDENT + (',\n' + INDENT).join(map(str, map(Column, n.columns))) + '\n' + w = 'WHERE\n' + INDENT + str(to_r(n.where_clause)) + '\n' + return cc + w + ';\n' + + +def to_r(n: ast.Node): + return renderer_map.get(type(n), Node)(n) + + +def to_pql(o: ast.Node): + return str(to_r(o)) + + +renderer_map.update({ + getattr(ast, k) : v + for k, v in dict(locals()).items() + if type(v) is type and issubclass(v, Node) and hasattr(ast, k) +}) diff --git a/python/src/pql_grammar/operators.py b/python/src/pql_grammar/operators.py deleted file mode 100644 index 1c9aedb..0000000 --- a/python/src/pql_grammar/operators.py +++ /dev/null @@ -1,421 +0,0 @@ -""" -Conditional logic runners designed for latent evaluation. - -First you construct the logic, then pass into resulting callable an object that contains values mentioned in the logic -Designed for expressing a SQL WHERE clause logic before you have data and then running each row's data through it. - - # WHERE columnA = 'gold' AND columnB > 100 - clause = AND( - EQ( - attr('columnA'), - 'gold' - ), - GT( - attr('columnB'), - 100 - ) - ) - - remaining_data = [ - row - for row in rows - if clause(row) - ] - -Works for dict's (keys and values over `.get(` interface) AND object attributes (properties) (`getattr(o, attr)`) - -(This is NOT designed to be serialized for pushing over wire. Make something else for that.) - -""" -import enum -import logging -import re - -from dataclasses import dataclass -from typing import Tuple, List, Any, Optional, Union, Literal - - -logger = logging.getLogger(__name__) - - -_v = lambda v, o: v(o) if callable(v) else v - - -def AND(*children): - def _(o): - return all(( - _v(a, o) - for a in children - )) - return _ - - -def OR(*children): - def _(o): - return any(( - _v(a, o) - for a in children - )) - return _ - - -def NOT(a): - def _(o): - return not _v(a, o) - return _ - - -def EQ(a, b): - def _(o): - return _v(a, o) == _v(b, o) - return _ - - -def NEQ(a, b): - def _(o): - return _v(a, o) != _v(b, o) - return _ - - -def IS(a, b): - def _(o): - return _v(a, o) is _v(b, o) - return _ - - -# https://codereview.stackexchange.com/a/36864/229677 -_char_regex_map = { - ch : '\\'+ch - for ch in '.^$*+?{}[]|()\\' -} -_char_regex_map['%'] = '.*?' -_char_regex_map['_'] = '.' -def sql_like_fragment_to_regex_string(fragment): - return '^' + ''.join([ - _char_regex_map.get(ch, ch) - for ch in fragment - ]) + '$' - - -def LIKE(a, fragment): - _regex = re.compile(sql_like_fragment_to_regex_string(fragment)) - def _(o): - return bool(_regex.match(_v(a, o))) - return _ - - -def GT(a, b): - def _(o): - return _v(a, o) > _v(b, o) - return _ - - -def GTE(a, b): - def _(o): - return _v(a, o) >= _v(b, o) - return _ - - -def LT(a, b): - def _(o): - return _v(a, o) < _v(b, o) - return _ - - -def LTE(a, b): - def _(o): - return _v(a, o) <= _v(b, o) - return _ - - -def PLUS_UNARY(a): - def _(o): - # no-op - return _v(a, o) - return _ - - -def MINUS_UNARY(a): - def _(o): - return -1 * _v(a, o) - return _ - - -def PLUS(a, b): - def _(o): - return _v(a, o) + _v(b, o) - return _ - - -def MINUS(a, b): - def _(o): - return _v(a, o) - _v(b, o) - return _ - - -def STAR(a, b): - def _(o): - return _v(a, o) * _v(b, o) - return _ - - -def DIV(a, b): - def _(o): - return _v(a, o) / _v(b, o) - return _ - - -def MOD(a, b): - def _(o): - return _v(a, o) % _v(b, o) - return _ - - -class TableColumnName(list): - def __init__(self, column_name, table_name=None, schema_name=None, catalog_name=None): - super().__init__([ - e - for e in [column_name, table_name, schema_name, catalog_name] - if not (e is None) - ]) - - -def attr(name: Union[str, list, TableColumnName], default=None): - """ - This attr getter works only on Class-like objects, - where you access values through attributes, not keys - Done so specifically to stay away from ambiguity of working with named tuples. - Also allows repackaging instances of objects ON_DEMAND where values are hiding in @properties - and are, thus allowing Where logic to trigger expensive properties only when needed, - as opposed to forcing serialization of full object into dict before piping through the where clause. - - Best examples of what `o` is - @dataclass or namedtuple instances. - Obviously, pydantic models and all other class instances will do too. - """ - def _(o): - # do NOT add .get( handling here, especially do NOT add it as first action. - # You will break namedtuples, where .get(index) is present - # but o.attr_name is the only right way to ask for data by name (not index) - if isinstance(o, (TableColumnName, list)): - if len(o) > 1: - logger.warning(f"WHERE clause references column by long name '{list(reversed(o))}', " - "which is not compatible with our functional WHERE logic processor. " - f"Droping all parts except for '{o[0]}' during comparison") - return getattr(o[0], name, default) - else: - return getattr(o, name, default) - return _ - - -class OpName: - AND = 'AND' - OR = 'OR' - NOT = 'NOT' - EQ = 'EQ' - NEQ = 'NEQ' - IS = 'IS' - LIKE = 'LIKE' - GT = 'GT' - GTE = 'GTE' - LT = 'LT' - LTE = 'LTE' - PLUS = 'PLUS' - MINUS = 'MINUS' - STAR = 'STAR' - DIV = 'DIV' - MOD = 'MOD' - attr = 'attr' - - -name_operator_map = { - OpName.AND:AND, - OpName.OR:OR, - OpName.NOT:NOT, - OpName.EQ:EQ, - OpName.NEQ:NEQ, - OpName.IS:IS, - OpName.LIKE:LIKE, - OpName.GT:GT, - OpName.GTE:GTE, - OpName.LT:LT, - OpName.LTE:LTE, - OpName.PLUS:PLUS, - OpName.MINUS:MINUS, - OpName.STAR:STAR, - OpName.DIV:DIV, - OpName.MOD:MOD, - OpName.attr:attr, -} - - -operator_name_map = { - fn: name - for name, fn in name_operator_map.items() -} - - -OperatorName = str # OP_NAME if it was (str, enum.Enum), but that produces ugly output structures -OperatorSchemaLiteral = Tuple[Literal['@literalValue'], Any] -# it's actually a List that's returned, but Python typing system does not? allow -# expressing a list with defined items inside in their order. -# have to use Tuple to specify order of things, but return type is actually list -# This structure is supposed to natively serializable to and from JSON> -OperatorSchema = Union[ - Tuple[OperatorName, 'OperatorSchema', Optional['OperatorSchema']], - OperatorSchemaLiteral -] - - -LITERAL = '@literalValue' - - -def schema_literal(o) -> OperatorSchemaLiteral: - return [LITERAL, o] - - -def schema_stanza(operator_name, *args) -> OperatorSchema: - aa = [] - for a in args: - # lists usually dont make sense as literals, so checking is just being overly safe - # however, `WHERE a IN ('a','b','c')` clause produces a tuple-like literal, - # first element in which MAY be value 'literal' - # This tuple, if encoded as list object will break this code. - - # We don't support IN yet, but when we do, think about ^ this case and change - # literal tuple marker value to something that would be - # (a) serializable to JSON, yet - # (b) impossible to collide with first element in SQL IN array. - - # TODO: ^ account for WHERE IN literal value collision. - if isinstance(a, (list, tuple)) and len(a) > 1 and (a[0] == LITERAL or a[0] in name_operator_map): - # This is already encoded stanza, pass through as is - aa.append(a) - else: - aa.append(schema_literal(a)) - return [operator_name, *aa] - - -def schema_to_callable(schema: OperatorSchema): - if not schema: - return lambda o: True - - fn_name, *args = schema - if fn_name == LITERAL: - # only one value possible - return args[0] - else: - fn = name_operator_map[fn_name] - return fn(*( - schema_to_callable(arg) - for arg in args - )) - - -class _Any: - # used for comparing / finding stanzas in where schema when - # you want to match the structure but not some literal values - def __eq__(self, other): - return True -Any = _Any() - - -def schema_extract_top_level(schema_fragment: OperatorSchema, schema: OperatorSchema): - # when underlying function call requires a parameter - # we need to extract it from sql and ensure it's specified in unambigous way - # ambiguous for arg need_this_id: - # where a = 'orange' or (date < now()-3 or need_this_id = '1234') - # UNambiguous for arg need_this_id: - # where a = 'orange' AND need_this_id = '1234' - # In other words, condition we are looking to extract does not have to be the only one in - # the clause, but just have to participate in unambiguously top-level AND or just be by itself. - # This means we'll traverse down recursive ANDs until we find NON-AND and that NON-AND must equal our fragment. - - if schema_fragment == schema: - return schema - - if not schema: - return - - if schema[0] == OpName.AND: - for predicate in schema[1:]: - v = schema_extract_top_level(schema_fragment, predicate) - if v: - return v - - -@dataclass -class _Example: - col1: str - col2: int - col3: str - -_example_clause = AND( - NOT( - EQ( - attr('col1'), - 'dirt' # <- demonstrates non-callable, literal - ) - ), - GT( - attr('col2'), - 5 # <- demonstrates non-callable, literal - ), - LIKE( - attr('col3'), - '%super.match.com' - ), - True # <- demonstrates non-callable, literal -) - -assert False == _example_clause(_Example(col1='dirt', col2=7, col3='this is super.match.com')) # not asdf -assert False == _example_clause(_Example(col1='dirt', col2=3, col3='this is super.match.com')) # col2 < 5 -assert False == _example_clause(_Example(col1='gold', col2=7, col3='this is super.MISmatch.com')) # col3 issue -assert False == _example_clause(_Example(col1='gold', col2=7, col3='this is super.match.com trailing thing here')) # col3 issue -assert True == _example_clause(_Example(col1='gold', col2=7, col3='this is super.match.com')) - -# nesting ands just ot test "find fragment" code -_example_schema = [ - 'AND', - [ - 'AND', - [LITERAL, True], - [ - 'EQ', - ['attr', - [LITERAL, 'col1'] - ], - [LITERAL, 'gold'], - ] - ], - [LITERAL, True] -] - -assert False == schema_to_callable(_example_schema)(_Example(col1='dirt', col2=0, col3='')) -assert True == schema_to_callable(_example_schema)(_Example(col1='gold', col2=0, col3='')) - -_example_schema_exact_equal = schema_stanza( - OpName.EQ, - schema_stanza(OpName.attr, 'col1'), - schema_literal('gold') -) - -_example_schema_FUZZY_equal = schema_stanza( - OpName.EQ, - schema_stanza(OpName.attr, 'col1'), - schema_literal(Any) -) - -assert _example_schema_exact_equal == schema_extract_top_level(_example_schema_FUZZY_equal, _example_schema) -assert None == schema_extract_top_level(_example_schema_FUZZY_equal, schema_stanza( - OpName.OR, # <- OR is the issue.. creates ambiguity about which branch is difinitive - schema_stanza( - OpName.EQ, # <- while this guy - schema_stanza(OpName.attr, 'col1'), - schema_literal('a') - ), - schema_stanza( - OpName.EQ, # <- and this guy in isolation would have matched. - schema_stanza(OpName.attr, 'col1'), - schema_literal('b') - ) -)) diff --git a/python/tests/ast_json_test.py b/python/tests/ast_json_test.py new file mode 100644 index 0000000..4225100 --- /dev/null +++ b/python/tests/ast_json_test.py @@ -0,0 +1,219 @@ +import sys +from unittest import TestCase + +sys.path.append('./src') + +from pql_grammar.ast import model as ast +from pql_grammar.ast.to_json import to_json +from pql_grammar.ast.from_json import from_json + + +null = None +false = False +true = True + + +ast_should_be = ast.SelectStmt( + [ + ast.Column(ast.Taxon('taxon1', 'ns1', True)), + ast.Column(ast.Taxon('taxon2', 'ns2', False)), + ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), + ast.Column( + ast.TelExpr('?ns3|taxon3 + (slug2 - 1234)'), + None, + ast.Taxon('custom_data', 'myns') + ), + ast.Column( + ast.TelExpr('ns3|taxon3 + 5'), + ast.Function( + 'TypeCast' + ), + ast.Taxon('custom_data_cast', 'myns') + ), + ast.Column( + ast.TelExpr('fn_4(fn_1(slug))'), + ast.Function( + 'TypeCast', + [['arg1','value1']] # normally inner pair is a tuple, but for comparison making list. + ), + ) + ], + ast.Expr( + 'AND', + [ + ast.Expr( + '>', + [ + ast.Taxon('taxon6', 'ns6'), + ast.Literal(1234, '1234') + ] + ), + ast.Expr( + '==', + [ + ast.Expr( + '+', + [ + ast.Taxon('taxon10', 'ns0'), + ast.Literal(4321, '4321') + ] + ), + ast.Literal(0, '0') + ] + ) + ] + ) +) + + +json_should_be = { + "__type__": "SelectStmt", + "columns": [ + { + "__type__": "Column", + "value": { + "__type__": "Taxon", + "slug": "taxon1", + "namespace": "ns1", + "is_optional": true + } + }, + { + "__type__": "Column", + "value": { + "__type__": "Taxon", + "slug": "taxon2", + "namespace": "ns2", + "is_optional": false + } + }, + { + "__type__": "Column", + "value": { + "__type__": "Taxon", + "slug": "slug1", + "is_optional": false + }, + "alias": { + "__type__": "Taxon", + "slug": "slug1", + "namespace": "myns", + "is_optional": false + } + }, + { + "__type__": "Column", + "value": { + "__type__": "TelExpr", + "raw_value": "?ns3|taxon3 + (slug2 - 1234)" + }, + "alias": { + "__type__": "Taxon", + "slug": "custom_data", + "namespace": "myns", + "is_optional": false + } + }, + { + "__type__": "Column", + "value": { + "__type__": "TelExpr", + "raw_value": "ns3|taxon3 + 5" + }, + "type_cast": { + "__type__": "Function", + "function_name": "TypeCast" + }, + "alias": { + "__type__": "Taxon", + "slug": "custom_data_cast", + "namespace": "myns", + "is_optional": false + } + }, + { + "__type__": "Column", + "value": { + "__type__": "TelExpr", + "raw_value": "fn_4(fn_1(slug))" + }, + "type_cast": { + "__type__": "Function", + "function_name": "TypeCast", + "args": [ + [ + "arg1", + "value1" + ] + ] + } + } + ], + "where_clause": { + "__type__": "Expr", + "operator": "AND", + "args": [ + { + "__type__": "Expr", + "operator": ">", + "args": [ + { + "__type__": "Taxon", + "slug": "taxon6", + "namespace": "ns6", + "is_optional": false + }, + { + "__type__": "Literal", + "value": 1234, + "raw_value": "1234" + } + ] + }, + { + "__type__": "Expr", + "operator": "==", + "args": [ + { + "__type__": "Expr", + "operator": "+", + "args": [ + { + "__type__": "Taxon", + "slug": "taxon10", + "namespace": "ns0", + "is_optional": false + }, + { + "__type__": "Literal", + "value": 4321, + "raw_value": "4321" + } + ] + }, + { + "__type__": "Literal", + "value": 0, + "raw_value": "0" + } + ] + } + ] + } +} + + +class JsonAstTests(TestCase): + maxDiff = None + + def test_render_json_from_ast(self): + json_result = to_json(ast_should_be) + # import json; print(json.dumps(json_result, indent=4)) + assert json_should_be == json_result + + def test_render_ast_from_json(self): + ast_result = from_json(json_should_be) + # import json; print(json.dumps(json_result, indent=4)) + + ast.ast_diff(ast_should_be, ast_result) + assert ast_should_be == ast_result diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py new file mode 100644 index 0000000..bcf85d2 --- /dev/null +++ b/python/tests/ast_pql_test.py @@ -0,0 +1,142 @@ +import sys +from unittest import TestCase + +sys.path.append('./src') + +from pql_grammar.ast import model as ast +from pql_grammar.ast.to_pql import to_pql +from pql_grammar.ast.from_pql import PqlAntlrToAstParser, PqlVisitor, PqlParser + + +class ErrorAssertingPqlVisitor(PqlVisitor): + """ + Special TelVisitor for testing grammar. Throws error in case of invalid node. + """ + def visitErrorNode(self, node): + wrong_symbol = node.symbol.text + position = node.symbol.column + 1 + details = f'Unexpected symbol "{wrong_symbol}" at position {position}' + raise AssertionError(details) + + +pql = """\ +select + ?ns1|taxon1, + ns2|taxon2, + slug1 as myns|slug1, + ?ns3|taxon3 + (slug2 - 1234) as myns|custom_data, + (ns3|taxon3 + 5)::TypeCast() as myns|custom_data_cast, + fn_4(fn_1(slug))::TypeCast(arg1=value1) +where + ns6|taxon6 > 1234 + and (ns0|taxon10 + 4321) == 0 +""" + +# renderer is recursive and ads parens for safety +# TODO: contemplate ways to avoid adding superfluous parens +# and upper-cases all keywords +pql_rendered_should_be = """\ +SELECT + ?ns1|taxon1, + ns2|taxon2, + slug1 AS myns|slug1, + ?ns3|taxon3 + (slug2 - 1234) AS myns|custom_data, + (ns3|taxon3 + 5)::TypeCast() AS myns|custom_data_cast, + (fn_4(fn_1(slug)))::TypeCast(arg1=value1) +WHERE + ((ns6|taxon6 > 1234) AND ((ns0|taxon10 + 4321) == 0)) +; +""" + +stmt_should_be = ast.SelectStmt( + [ + ast.Column(ast.Taxon('taxon1', 'ns1', True)), + ast.Column(ast.Taxon('taxon2', 'ns2', False)), + ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), + ast.Column( + ast.TelExpr('?ns3|taxon3 + (slug2 - 1234)'), + None, + ast.Taxon('custom_data', 'myns') + ), + ast.Column( + ast.TelExpr('ns3|taxon3 + 5'), + ast.Function( + 'TypeCast' + ), + ast.Taxon('custom_data_cast', 'myns') + ), + ast.Column( + ast.TelExpr('fn_4(fn_1(slug))'), + ast.Function( + 'TypeCast', + [('arg1','value1')] + ), + ) + ], + ast.Expr( + 'AND', + [ + ast.Expr( + '>', + [ + ast.Taxon('taxon6', 'ns6'), + ast.Literal(1234, '1234') + ] + ), + ast.Expr( + '==', + [ + ast.Expr( + '+', + [ + ast.Taxon('taxon10', 'ns0'), + ast.Literal(4321, '4321') + ] + ), + ast.Literal(0, '0') + ] + ) + ] + ) +) + + +class PqlAstTests(TestCase): + maxDiff = None + + def test_select(self): + + statements = [] + + class V(ErrorAssertingPqlVisitor): + def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): + columns = [ + ast.Column( + PqlAntlrToAstParser.parse_column_value(column.value), + PqlAntlrToAstParser.parse_column_typecast(column.type_cast), + PqlAntlrToAstParser.parse_column_alias(column.alias) + ) + for column in ctx.selectClause().columns() + ] + where_clause = PqlAntlrToAstParser.parse_where_clause_expr(ctx.whereClause().expr()) + + statements.append(ast.SelectStmt( + columns, + where_clause + )) + + V().visit_from_string(pql) + + assert statements + stmt = statements[0] + + assert len(stmt.columns) == len(stmt_should_be.columns) + for result, should_be in zip(stmt.columns, stmt_should_be.columns): + assert result == should_be + + # ast.ast_diff(stmt.where_clause, stmt_should_be.where_clause) + assert stmt.where_clause == stmt_should_be.where_clause + + def test_render_pql_from_ast(self): + pql_result = to_pql(stmt_should_be) + assert pql_rendered_should_be == pql_result diff --git a/python/tests/pql/pql_atesta.py b/python/tests/pql/pql_atesta.py deleted file mode 100644 index a0d2cdb..0000000 --- a/python/tests/pql/pql_atesta.py +++ /dev/null @@ -1,320 +0,0 @@ -import sys -sys.path.append('./src') - -from antlr4 import CommonTokenStream, InputStream, ParserRuleContext -from antlr4.tree import Tree -from dataclasses import dataclass -from collections import namedtuple -from typing import Optional -from unittest import mock, TestCase - -from pql_grammar.antlr.PqlLexer import PqlLexer -from pql_grammar.antlr.PqlParser import PqlParser -from pql_grammar.antlr.PqlParserVisitor import PqlParserVisitor -from pql_grammar import operators as op - - -def full_text(ctx: ParserRuleContext) -> str: - # extracts full text from a tree of nodes, - # including white space. - if ctx: - if isinstance(ctx, ParserRuleContext): - return ctx.start.getInputStream().getText(ctx.start.start, ctx.stop.stop) - else: - try: - # some primitive context object - return ctx.text - except AttributeError: - # Terminal Node of some sort - return str(ctx) - else: - return None - - -def unquote(s: str): - # Quoted schema, table, column names come in Postgres style - double-quotes - # in-string double-quotes are escaped by doubling the double-quotes ANSI SQL style. - # https://docs.oracle.com/goldengate/1212/gg-winux/GWURF/gg_parameters183.htm#GWURF728 - # Example: - # '"table name ""with quoted portion"""' becomes 'table name "with quoted portion"' - if not s: - return s - if s[0] == '"' and s[-1] == '"': - s = s[1:-1] - return s.replace('""', '"') - - -class WhereClauseParser: - - @staticmethod - def _literalValue_to_python_native(e:PqlParser.LiteralValueContext): - is_number = e.NUMERIC_LITERAL() - is_string = e.DOUBLE_QUOTED_STRING() or e.SINGLE_QUOTED_STRING() - is_null = e.K_NULL() - is_bool = e.K_TRUE() or e.K_FALSE() - - # TODO: - # - BLOB_LITERAL - # - CURRENT_[DATE|TIME|TIMESTAMP] - - if is_null: - return None - - if is_bool: - return bool(e.K_TRUE()) - - try: - v = e.getText() - except IndexError: - raise Exception(f"Could not extract literal value node from '{e.getText()}'.") - - if is_number: - # TODO: contemplate decimal type instead - try: - return int(v) - except ValueError: - try: - return float(v) - except Exception: - raise Exception(f"Could not convert SQL number {v} to native number representation.") - - if is_string: - return unquote(v) - - return v - - _sql_name_map = { - 'AND': op.OpName.AND, - 'OR': op.OpName.OR, - 'NOT': op.OpName.NOT, - 'IS': op.OpName.IS, - '=': op.OpName.EQ, # notice WHERE clause specific handling. NOT assignment. EQ! - '==': op.OpName.EQ, # opportunistic inclusion, while we don't expect to see it in WHERE - '<>': op.OpName.NEQ, - '!=': op.OpName.NEQ, # opportunistic inclusion, while we don't expect to see it in WHERE - '>': op.OpName.GT, - '>=': op.OpName.GTE, - '<': op.OpName.LT, - '<=': op.OpName.LTE, - 'LIKE': op.OpName.LIKE, - '+':op.OpName.PLUS, - '-':op.OpName.MINUS, - '*':op.OpName.STAR, - '/':op.OpName.DIV, - '%':op.OpName.MOD, - } - - @classmethod - def _unwrap_expr_parens(cls, e: PqlParser.ExprContext) -> PqlParser.ExprContext: - # it's allowed to wrap expressions into superflous amounts of parens - # (((column > 5))) - # These come across as triple-nested [TerminalNodeImpl('('), expr, TerminalNodeImpl(')')] - # Here we check for len == 3 and if last and first Terminals are (), return middle element - expression, - # Run this recursively. - if e.inner: - return cls._unwrap_expr_parens(e.inner) - else: - return e - - @classmethod - def _lookup_operator_internal_name(cls, sql_operator: str): - op_name = cls._sql_name_map.get(sql_operator.upper()) - if not op_name: - raise Exception(f"Could not match operator '{sql_operator}' in where clause to a supported action.") - return op_name - - @classmethod - def _parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> op.OperatorSchema : - ctx = cls._unwrap_expr_parens(ctx) - v = ctx.literalValue() - if v: - return op.schema_literal(cls._literalValue_to_python_native(v)) - - v = ctx.unary_operator - if v: - operator = cls._lookup_operator_internal_name(full_text(v)) - if operator in (op.OpName.PLUS, op.OpName.MINUS): - return op.schema_stanza( - operator, - op.schema_literal(0), - cls._parse_where_clause_expr(ctx.right), - ) - if operator == op.OpName.NOT: - return op.schema_stanza( - operator, - cls._parse_where_clause_expr(ctx.right), - ) - - v: PqlParser.taxon = ctx.taxon() - if v: - return op.schema_stanza( - op.OpName.attr, - op.schema_literal(full_text(v)) - ) - - # v: PqlParser.NullComparisonContext = ctx.nullComparison() - # if v: - # # Note, converting `V is (NOT) null` - # # into SQL-incompatible `V ==/!= null` that we CAN do in python. - # # Mostly to avoid creating redundant operators module code. - # is_negated = bool(v.K_NOT() or v.K_NOTNULL()) - # if is_negated: - # return op.schema_stanza( - # op.OpName.NOT, - # op.schema_stanza( - # op.OpName.IS, - # cls._parse_where_clause_expr(ctx.left), - # op.schema_literal(None) - # ) - # ) - # else: - # return op.schema_stanza( - # op.OpName.IS, - # cls._parse_where_clause_expr(ctx.left), - # op.schema_literal(None) - # ) - - v: Optional[str] = full_text(ctx.operator) - if v: - # this is super generic expression of type - # left (NOT) OP right - # with a lot of options for OP value. - # not all of these values are supported by our `operators` module logic. - # Next call throws errors for unmatched operators - operator = cls._lookup_operator_internal_name(v) - _rv = op.schema_stanza( - operator, - cls._parse_where_clause_expr(ctx.left), - cls._parse_where_clause_expr(ctx.right), - ) - # if bool(ctx.is_negated): - # return op.schema_stanza( - # op.OpName.NOT, - # _rv - # ) - # else: - return _rv - - # v = ctx.K_IN() - # if v: - # if ctx.compoundSelectStmt(): - # raise Exception(f'Where expression "{full_text(ctx.compoundSelectStmt())}" is not supported yet.') - # expressions = ctx.expressions() - # if expressions: - # comps = [ - # cls._parse_where_clause_expr(expr) - # for expr in expressions.expr() - # ] - # # converting these into multiple OR equal - # left = cls._parse_where_clause_expr(ctx.left) - # clause = op.schema_stanza( - # op.OpName.OR, - # *[ - # op.schema_stanza( - # op.OpName.EQ, - # left, - # comp - # ) - # for comp in comps - # ] - # ) - # if bool(ctx.is_negated): - # return op.schema_stanza( - # op.OpName.NOT, - # clause, - # ) - # else: - # return clause - - if ctx.function(): - raise NotImplementedError('Dont know how to pack functions yet') - - raise Exception(f'Where expression "{full_text(ctx)}" is not supported yet.') - - -class AssertPqlVisitor(PqlParserVisitor, WhereClauseParser): - """ - Special TelVisitor for testing grammar. Throws error in case of invalid node. - """ - def visitErrorNode(self, node): - wrong_symbol = node.symbol.text - position = node.symbol.column + 1 - details = f'Unexpected symbol "{wrong_symbol}" at position {position}' - raise AssertionError(details) - - @classmethod - def parse_string(cls, s): - inp_stream = InputStream(s) - lexer = PqlLexer(inp_stream) - stream = CommonTokenStream(lexer) - parser = PqlParser(stream) - tree = parser.parsePql() - # Use error visitor on parsed tree to test it - visitor = cls() - visitor.visit(tree) - - -class PQLTests(TestCase): - maxDiff = None - - def test_select_no_filter(self): - - pql = """ - select - ?ns1|taxon1, - ?ns2|taxon2, - slug1 as myns|slug1, - (?ns3|taxon3 + (slug2 - 1234)) as myns|custom_data, - fn_4(fn_1(slug))::TypeCast(arg1=value1) - where - ns6|taxon6 > 1234 - and (ns0|taxon10 + 1234) == 0 - """ - - @dataclass - class Column: - value:str - type_cast:Optional[str] = None - alias:Optional[str] = None - - columns = [] - where_clause = [] - class V(AssertPqlVisitor): - def visitColumns(self, ctx:PqlParser.ColumnsContext): - column : PqlParser.ColumnContext - for column in ctx.column(): - v = full_text(column.value) - type_cast = full_text(column.type_cast.function()) if column.type_cast else None - alias = full_text(column.alias) - columns.append(Column(v, type_cast, alias)) - def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): - ww = self._parse_where_clause_expr(ctx.expr()) - where_clause.extend(ww) - - V.parse_string(pql) - - assert columns == [ - Column('?ns1|taxon1'), - Column('?ns2|taxon2'), - Column('slug1', None, 'myns|slug1'), - Column('(?ns3|taxon3 + (slug2 - 1234))', None, 'myns|custom_data'), - Column('fn_4(fn_1(slug))', 'TypeCast(arg1=value1)') - ] - assert where_clause == [ - 'AND', - ['GT', - ['attr', - ['@literalValue', 'ns6|taxon6'] - ], - ['@literalValue', 1234] - ], - ['EQ', - ['PLUS', - ['attr', - ['@literalValue', 'ns0|taxon10'] - ], - ['@literalValue', 1234] - ], - ['@literalValue', 0] - ] - ] From fdf4e1ce4353ed93f073aff48e093d66eb3961c1 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sat, 14 Nov 2020 01:20:35 -0800 Subject: [PATCH 14/32] JSON representation now uses `__typename` as name of key for node name --- python/src/pql_grammar/ast/from_json.py | 8 ++-- python/src/pql_grammar/ast/to_json.py | 5 ++- python/tests/ast_json_test.py | 54 ++++++++++++------------- 3 files changed, 35 insertions(+), 32 deletions(-) diff --git a/python/src/pql_grammar/ast/from_json.py b/python/src/pql_grammar/ast/from_json.py index f755164..27a7d57 100644 --- a/python/src/pql_grammar/ast/from_json.py +++ b/python/src/pql_grammar/ast/from_json.py @@ -3,13 +3,13 @@ from . import model as ast -AST_NODE_MARKER = '__type__' +TYPE_ATTRIBUTE = '__typename' # GraphQL style def from_json(o: dict): - if isinstance(o, dict) and AST_NODE_MARKER in o: - name = o[AST_NODE_MARKER] + if isinstance(o, dict) and TYPE_ATTRIBUTE in o: + name = o[TYPE_ATTRIBUTE] N = ast.inventory.get(name) if not N: raise NotImplementedError(f'Renderer for node type "{name}" is not implemented.') @@ -17,7 +17,7 @@ def from_json(o: dict): return N(**{ k: from_json(v) for k, v in o.items() - if k != AST_NODE_MARKER + if k != TYPE_ATTRIBUTE }) if isinstance(o, (list, tuple)): diff --git a/python/src/pql_grammar/ast/to_json.py b/python/src/pql_grammar/ast/to_json.py index e3952fc..9fa6814 100644 --- a/python/src/pql_grammar/ast/to_json.py +++ b/python/src/pql_grammar/ast/to_json.py @@ -2,9 +2,12 @@ from . import model as ast +TYPE_ATTRIBUTE = '__typename' # GraphQL style + + def node_to_tuples(n: ast.Node): return [ - ('__type__', n.__class__.__name__) + (TYPE_ATTRIBUTE, n.__class__.__name__) ] + [ (field.name, getattr(n, field.name)) for field in fields(n) diff --git a/python/tests/ast_json_test.py b/python/tests/ast_json_test.py index 4225100..4bcfe94 100644 --- a/python/tests/ast_json_test.py +++ b/python/tests/ast_json_test.py @@ -67,78 +67,78 @@ json_should_be = { - "__type__": "SelectStmt", + "__typename": "SelectStmt", "columns": [ { - "__type__": "Column", + "__typename": "Column", "value": { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "taxon1", "namespace": "ns1", "is_optional": true } }, { - "__type__": "Column", + "__typename": "Column", "value": { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "taxon2", "namespace": "ns2", "is_optional": false } }, { - "__type__": "Column", + "__typename": "Column", "value": { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "slug1", "is_optional": false }, "alias": { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "slug1", "namespace": "myns", "is_optional": false } }, { - "__type__": "Column", + "__typename": "Column", "value": { - "__type__": "TelExpr", + "__typename": "TelExpr", "raw_value": "?ns3|taxon3 + (slug2 - 1234)" }, "alias": { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "custom_data", "namespace": "myns", "is_optional": false } }, { - "__type__": "Column", + "__typename": "Column", "value": { - "__type__": "TelExpr", + "__typename": "TelExpr", "raw_value": "ns3|taxon3 + 5" }, "type_cast": { - "__type__": "Function", + "__typename": "Function", "function_name": "TypeCast" }, "alias": { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "custom_data_cast", "namespace": "myns", "is_optional": false } }, { - "__type__": "Column", + "__typename": "Column", "value": { - "__type__": "TelExpr", + "__typename": "TelExpr", "raw_value": "fn_4(fn_1(slug))" }, "type_cast": { - "__type__": "Function", + "__typename": "Function", "function_name": "TypeCast", "args": [ [ @@ -150,49 +150,49 @@ } ], "where_clause": { - "__type__": "Expr", + "__typename": "Expr", "operator": "AND", "args": [ { - "__type__": "Expr", + "__typename": "Expr", "operator": ">", "args": [ { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "taxon6", "namespace": "ns6", "is_optional": false }, { - "__type__": "Literal", + "__typename": "Literal", "value": 1234, "raw_value": "1234" } ] }, { - "__type__": "Expr", + "__typename": "Expr", "operator": "==", "args": [ { - "__type__": "Expr", + "__typename": "Expr", "operator": "+", "args": [ { - "__type__": "Taxon", + "__typename": "Taxon", "slug": "taxon10", "namespace": "ns0", "is_optional": false }, { - "__type__": "Literal", + "__typename": "Literal", "value": 4321, "raw_value": "4321" } ] }, { - "__type__": "Literal", + "__typename": "Literal", "value": 0, "raw_value": "0" } From ff1368aef6d98052f71af64aff106e1c0d85ee40 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sat, 14 Nov 2020 01:21:24 -0800 Subject: [PATCH 15/32] add ANTLR support for SQL-like `set key = value` statement for communicating settings --- grammar/PqlLexer.g4 | 1 + grammar/PqlParser.g4 | 9 +- python/src/pql_grammar/antlr/PqlLexer.py | 442 ++++++------- python/src/pql_grammar/antlr/PqlParser.py | 582 ++++++++++-------- .../pql_grammar/antlr/PqlParserListener.py | 9 + .../src/pql_grammar/antlr/PqlParserVisitor.py | 5 + 6 files changed, 582 insertions(+), 466 deletions(-) diff --git a/grammar/PqlLexer.g4 b/grammar/PqlLexer.g4 index b2320a0..e81881e 100644 --- a/grammar/PqlLexer.g4 +++ b/grammar/PqlLexer.g4 @@ -49,6 +49,7 @@ K_NULL : N U L L; K_OR : O R; K_ORDER : O R D E R; K_SELECT : S E L E C T; +K_SET : S E T; K_TRUE : T R U E; K_WHERE : W H E R E; diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index 512bd2e..4b14c95 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -25,7 +25,14 @@ sqlStmtList // this is where you add more statement types, like SET and other top-level SQL statements sqlStmt - : selectStmt + : setStmt + | selectStmt + ; + +// a way to set query context settings and avoid sending them inside PQL +// Example: set "fill in dates for date-ranged sparse data" flag for Husky. +setStmt + : K_SET key=identifierMultipart ASSIGN values=expr ; selectStmt diff --git a/python/src/pql_grammar/antlr/PqlLexer.py b/python/src/pql_grammar/antlr/PqlLexer.py index 888284c..a93b9a2 100644 --- a/python/src/pql_grammar/antlr/PqlLexer.py +++ b/python/src/pql_grammar/antlr/PqlLexer.py @@ -8,8 +8,8 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2;") - buf.write("\u0202\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2<") + buf.write("\u0208\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") @@ -21,49 +21,49 @@ def serializedATN(): buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t") - buf.write("U\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3\5\3\6") - buf.write("\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3\t\3\n\3\n\3") - buf.write("\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\20") - buf.write("\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25") - buf.write("\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32\3\33") - buf.write("\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36\3\37\3\37") - buf.write("\3\37\3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3#\3#\3") - buf.write("#\3#\3#\3#\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3") - buf.write("&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3)\3)\3") - buf.write(")\3)\3)\3*\3*\3*\3*\3*\3+\3+\3+\3,\3,\3,\3,\3,\3,\3-\3") - buf.write("-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3\60") - buf.write("\6\60\u0148\n\60\r\60\16\60\u0149\3\60\3\60\7\60\u014e") - buf.write("\n\60\f\60\16\60\u0151\13\60\5\60\u0153\n\60\3\60\3\60") - buf.write("\5\60\u0157\n\60\3\60\6\60\u015a\n\60\r\60\16\60\u015b") - buf.write("\5\60\u015e\n\60\3\60\3\60\6\60\u0162\n\60\r\60\16\60") - buf.write("\u0163\3\60\3\60\5\60\u0168\n\60\3\60\6\60\u016b\n\60") - buf.write("\r\60\16\60\u016c\5\60\u016f\n\60\5\60\u0171\n\60\3\61") - buf.write("\3\61\3\62\3\62\3\62\3\62\7\62\u0179\n\62\f\62\16\62\u017c") - buf.write("\13\62\3\62\3\62\3\63\3\63\3\63\3\63\7\63\u0184\n\63\f") - buf.write("\63\16\63\u0187\13\63\3\63\3\63\3\64\3\64\3\65\3\65\3") - buf.write("\65\3\65\7\65\u0191\n\65\f\65\16\65\u0194\13\65\3\65\3") - buf.write("\65\3\66\3\66\3\66\3\66\7\66\u019c\n\66\f\66\16\66\u019f") - buf.write("\13\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67\5\67\u01a8\n") - buf.write("\67\3\67\7\67\u01ab\n\67\f\67\16\67\u01ae\13\67\3\67\3") - buf.write("\67\38\38\38\38\78\u01b6\n8\f8\168\u01b9\138\38\38\38") - buf.write("\58\u01be\n8\38\38\39\39\39\39\3:\3:\7:\u01c8\n:\f:\16") - buf.write(":\u01cb\13:\3;\3;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A") - buf.write("\3B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3") - buf.write("J\3K\3K\3L\3L\3M\3M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3") - buf.write("S\3T\3T\3U\3U\3\u01b7\2V\3\3\5\4\7\5\t\6\13\7\r\b\17\t") - buf.write("\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23") + buf.write("U\4V\tV\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3") + buf.write("\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3\t\3\n") + buf.write("\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17") + buf.write("\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25") + buf.write("\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32") + buf.write("\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36\3\37") + buf.write("\3\37\3\37\3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3") + buf.write("#\3#\3#\3#\3#\3#\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3") + buf.write("&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3") + buf.write(")\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3+\3+\3,\3,\3,\3,\3,\3") + buf.write(",\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60") + buf.write("\3\60\3\60\3\60\3\60\3\60\3\61\6\61\u014e\n\61\r\61\16") + buf.write("\61\u014f\3\61\3\61\7\61\u0154\n\61\f\61\16\61\u0157\13") + buf.write("\61\5\61\u0159\n\61\3\61\3\61\5\61\u015d\n\61\3\61\6\61") + buf.write("\u0160\n\61\r\61\16\61\u0161\5\61\u0164\n\61\3\61\3\61") + buf.write("\6\61\u0168\n\61\r\61\16\61\u0169\3\61\3\61\5\61\u016e") + buf.write("\n\61\3\61\6\61\u0171\n\61\r\61\16\61\u0172\5\61\u0175") + buf.write("\n\61\5\61\u0177\n\61\3\62\3\62\3\63\3\63\3\63\3\63\7") + buf.write("\63\u017f\n\63\f\63\16\63\u0182\13\63\3\63\3\63\3\64\3") + buf.write("\64\3\64\3\64\7\64\u018a\n\64\f\64\16\64\u018d\13\64\3") + buf.write("\64\3\64\3\65\3\65\3\66\3\66\3\66\3\66\7\66\u0197\n\66") + buf.write("\f\66\16\66\u019a\13\66\3\66\3\66\3\67\3\67\3\67\3\67") + buf.write("\7\67\u01a2\n\67\f\67\16\67\u01a5\13\67\3\67\3\67\38\3") + buf.write("8\38\38\38\58\u01ae\n8\38\78\u01b1\n8\f8\168\u01b4\13") + buf.write("8\38\38\39\39\39\39\79\u01bc\n9\f9\169\u01bf\139\39\3") + buf.write("9\39\59\u01c4\n9\39\39\3:\3:\3:\3:\3;\3;\7;\u01ce\n;\f") + buf.write(";\16;\u01d1\13;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3") + buf.write("B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3") + buf.write("K\3K\3L\3L\3M\3M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3S\3") + buf.write("T\3T\3U\3U\3V\3V\3\u01bd\2W\3\3\5\4\7\5\t\6\13\7\r\b\17") + buf.write("\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23") buf.write("%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36") buf.write(";\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63") - buf.write("e\64g\65i\66k\67m8o9q:s;u\2w\2y\2{\2}\2\177\2\u0081\2") - buf.write("\u0083\2\u0085\2\u0087\2\u0089\2\u008b\2\u008d\2\u008f") - buf.write("\2\u0091\2\u0093\2\u0095\2\u0097\2\u0099\2\u009b\2\u009d") - buf.write("\2\u009f\2\u00a1\2\u00a3\2\u00a5\2\u00a7\2\u00a9\2\3\2") + buf.write("e\64g\65i\66k\67m8o9q:s;u\2\2\u00b5\u00b6\7?\2\2\u00b6\n\3\2\2\2\u00b7\u00b8") - buf.write("\7#\2\2\u00b8\u00b9\7?\2\2\u00b9\f\3\2\2\2\u00ba\u00bb") - buf.write("\7>\2\2\u00bb\u00bc\7@\2\2\u00bc\16\3\2\2\2\u00bd\u00be") - buf.write("\7~\2\2\u00be\u00bf\7~\2\2\u00bf\20\3\2\2\2\u00c0\u00c1") - buf.write("\7>\2\2\u00c1\u00c2\7>\2\2\u00c2\22\3\2\2\2\u00c3\u00c4") - buf.write("\7@\2\2\u00c4\u00c5\7@\2\2\u00c5\24\3\2\2\2\u00c6\u00c7") - buf.write("\7(\2\2\u00c7\26\3\2\2\2\u00c8\u00c9\7?\2\2\u00c9\30\3") - buf.write("\2\2\2\u00ca\u00cb\7+\2\2\u00cb\32\3\2\2\2\u00cc\u00cd") - buf.write("\7<\2\2\u00cd\34\3\2\2\2\u00ce\u00cf\7.\2\2\u00cf\36\3") - buf.write("\2\2\2\u00d0\u00d1\7\60\2\2\u00d1 \3\2\2\2\u00d2\u00d3") - buf.write("\7\61\2\2\u00d3\"\3\2\2\2\u00d4\u00d5\7@\2\2\u00d5$\3") - buf.write("\2\2\2\u00d6\u00d7\7>\2\2\u00d7&\3\2\2\2\u00d8\u00d9\7") - buf.write("/\2\2\u00d9(\3\2\2\2\u00da\u00db\7\'\2\2\u00db*\3\2\2") - buf.write("\2\u00dc\u00dd\7*\2\2\u00dd,\3\2\2\2\u00de\u00df\7~\2") - buf.write("\2\u00df.\3\2\2\2\u00e0\u00e1\7-\2\2\u00e1\60\3\2\2\2") - buf.write("\u00e2\u00e3\7A\2\2\u00e3\62\3\2\2\2\u00e4\u00e5\7=\2") - buf.write("\2\u00e5\64\3\2\2\2\u00e6\u00e7\7,\2\2\u00e7\66\3\2\2") - buf.write("\2\u00e8\u00e9\7\u0080\2\2\u00e98\3\2\2\2\u00ea\u00eb") - buf.write("\7a\2\2\u00eb:\3\2\2\2\u00ec\u00ed\5w<\2\u00ed\u00ee\5") - buf.write("\u0091I\2\u00ee\u00ef\5}?\2\u00ef<\3\2\2\2\u00f0\u00f1") - buf.write("\5w<\2\u00f1\u00f2\5\u009bN\2\u00f2>\3\2\2\2\u00f3\u00f4") - buf.write("\5w<\2\u00f4\u00f5\5\u009bN\2\u00f5\u00f6\5{>\2\u00f6") - buf.write("@\3\2\2\2\u00f7\u00f8\5y=\2\u00f8\u00f9\5\u00a7T\2\u00f9") - buf.write("B\3\2\2\2\u00fa\u00fb\5}?\2\u00fb\u00fc\5\177@\2\u00fc") - buf.write("\u00fd\5\u009bN\2\u00fd\u00fe\5{>\2\u00feD\3\2\2\2\u00ff") - buf.write("\u0100\5\u0081A\2\u0100\u0101\5w<\2\u0101\u0102\5\u008d") - buf.write("G\2\u0102\u0103\5\u009bN\2\u0103\u0104\5\177@\2\u0104") - buf.write("F\3\2\2\2\u0105\u0106\5\u0087D\2\u0106\u0107\5\u009bN") - buf.write("\2\u0107H\3\2\2\2\u0108\u0109\5\u0087D\2\u0109\u010a\5") - buf.write("\u009bN\2\u010a\u010b\5\u0091I\2\u010b\u010c\5\u009fP") - buf.write("\2\u010c\u010d\5\u008dG\2\u010d\u010e\5\u008dG\2\u010e") - buf.write("J\3\2\2\2\u010f\u0110\5\u008dG\2\u0110\u0111\5\u0087D") - buf.write("\2\u0111\u0112\5\u008bF\2\u0112\u0113\5\177@\2\u0113L") - buf.write("\3\2\2\2\u0114\u0115\5\u008dG\2\u0115\u0116\5\u0087D\2") - buf.write("\u0116\u0117\5\u008fH\2\u0117\u0118\5\u0087D\2\u0118\u0119") - buf.write("\5\u009dO\2\u0119N\3\2\2\2\u011a\u011b\5\u0091I\2\u011b") - buf.write("\u011c\5\u0093J\2\u011c\u011d\5\u009dO\2\u011dP\3\2\2") - buf.write("\2\u011e\u011f\5\u0091I\2\u011f\u0120\5\u0093J\2\u0120") - buf.write("\u0121\5\u009dO\2\u0121\u0122\5\u0091I\2\u0122\u0123\5") - buf.write("\u009fP\2\u0123\u0124\5\u008dG\2\u0124\u0125\5\u008dG") - buf.write("\2\u0125R\3\2\2\2\u0126\u0127\5\u0091I\2\u0127\u0128\5") - buf.write("\u009fP\2\u0128\u0129\5\u008dG\2\u0129\u012a\5\u008dG") - buf.write("\2\u012aT\3\2\2\2\u012b\u012c\5\u0093J\2\u012c\u012d\5") - buf.write("\u0099M\2\u012dV\3\2\2\2\u012e\u012f\5\u0093J\2\u012f") - buf.write("\u0130\5\u0099M\2\u0130\u0131\5}?\2\u0131\u0132\5\177") - buf.write("@\2\u0132\u0133\5\u0099M\2\u0133X\3\2\2\2\u0134\u0135") - buf.write("\5\u009bN\2\u0135\u0136\5\177@\2\u0136\u0137\5\u008dG") - buf.write("\2\u0137\u0138\5\177@\2\u0138\u0139\5{>\2\u0139\u013a") - buf.write("\5\u009dO\2\u013aZ\3\2\2\2\u013b\u013c\5\u009dO\2\u013c") - buf.write("\u013d\5\u0099M\2\u013d\u013e\5\u009fP\2\u013e\u013f\5") - buf.write("\177@\2\u013f\\\3\2\2\2\u0140\u0141\5\u00a3R\2\u0141\u0142") - buf.write("\5\u0085C\2\u0142\u0143\5\177@\2\u0143\u0144\5\u0099M") - buf.write("\2\u0144\u0145\5\177@\2\u0145^\3\2\2\2\u0146\u0148\5u") - buf.write(";\2\u0147\u0146\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u0147") - buf.write("\3\2\2\2\u0149\u014a\3\2\2\2\u014a\u0152\3\2\2\2\u014b") - buf.write("\u014f\7\60\2\2\u014c\u014e\5u;\2\u014d\u014c\3\2\2\2") - buf.write("\u014e\u0151\3\2\2\2\u014f\u014d\3\2\2\2\u014f\u0150\3") - buf.write("\2\2\2\u0150\u0153\3\2\2\2\u0151\u014f\3\2\2\2\u0152\u014b") - buf.write("\3\2\2\2\u0152\u0153\3\2\2\2\u0153\u015d\3\2\2\2\u0154") - buf.write("\u0156\5\177@\2\u0155\u0157\t\2\2\2\u0156\u0155\3\2\2") - buf.write("\2\u0156\u0157\3\2\2\2\u0157\u0159\3\2\2\2\u0158\u015a") - buf.write("\5u;\2\u0159\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u0159") - buf.write("\3\2\2\2\u015b\u015c\3\2\2\2\u015c\u015e\3\2\2\2\u015d") - buf.write("\u0154\3\2\2\2\u015d\u015e\3\2\2\2\u015e\u0171\3\2\2\2") - buf.write("\u015f\u0161\7\60\2\2\u0160\u0162\5u;\2\u0161\u0160\3") - buf.write("\2\2\2\u0162\u0163\3\2\2\2\u0163\u0161\3\2\2\2\u0163\u0164") - buf.write("\3\2\2\2\u0164\u016e\3\2\2\2\u0165\u0167\5\177@\2\u0166") - buf.write("\u0168\t\2\2\2\u0167\u0166\3\2\2\2\u0167\u0168\3\2\2\2") - buf.write("\u0168\u016a\3\2\2\2\u0169\u016b\5u;\2\u016a\u0169\3\2") - buf.write("\2\2\u016b\u016c\3\2\2\2\u016c\u016a\3\2\2\2\u016c\u016d") - buf.write("\3\2\2\2\u016d\u016f\3\2\2\2\u016e\u0165\3\2\2\2\u016e") - buf.write("\u016f\3\2\2\2\u016f\u0171\3\2\2\2\u0170\u0147\3\2\2\2") - buf.write("\u0170\u015f\3\2\2\2\u0171`\3\2\2\2\u0172\u0173\5c\62") - buf.write("\2\u0173b\3\2\2\2\u0174\u017a\7$\2\2\u0175\u0176\7^\2") - buf.write("\2\u0176\u0179\7$\2\2\u0177\u0179\n\3\2\2\u0178\u0175") - buf.write("\3\2\2\2\u0178\u0177\3\2\2\2\u0179\u017c\3\2\2\2\u017a") - buf.write("\u0178\3\2\2\2\u017a\u017b\3\2\2\2\u017b\u017d\3\2\2\2") - buf.write("\u017c\u017a\3\2\2\2\u017d\u017e\7$\2\2\u017ed\3\2\2\2") - buf.write("\u017f\u0185\7$\2\2\u0180\u0181\7$\2\2\u0181\u0184\7$") - buf.write("\2\2\u0182\u0184\n\3\2\2\u0183\u0180\3\2\2\2\u0183\u0182") - buf.write("\3\2\2\2\u0184\u0187\3\2\2\2\u0185\u0183\3\2\2\2\u0185") - buf.write("\u0186\3\2\2\2\u0186\u0188\3\2\2\2\u0187\u0185\3\2\2\2") - buf.write("\u0188\u0189\7$\2\2\u0189f\3\2\2\2\u018a\u018b\5i\65\2") - buf.write("\u018bh\3\2\2\2\u018c\u0192\7)\2\2\u018d\u018e\7^\2\2") - buf.write("\u018e\u0191\7)\2\2\u018f\u0191\n\4\2\2\u0190\u018d\3") - buf.write("\2\2\2\u0190\u018f\3\2\2\2\u0191\u0194\3\2\2\2\u0192\u0190") - buf.write("\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0195\3\2\2\2\u0194") - buf.write("\u0192\3\2\2\2\u0195\u0196\7)\2\2\u0196j\3\2\2\2\u0197") - buf.write("\u019d\7)\2\2\u0198\u0199\7)\2\2\u0199\u019c\7)\2\2\u019a") - buf.write("\u019c\n\4\2\2\u019b\u0198\3\2\2\2\u019b\u019a\3\2\2\2") - buf.write("\u019c\u019f\3\2\2\2\u019d\u019b\3\2\2\2\u019d\u019e\3") - buf.write("\2\2\2\u019e\u01a0\3\2\2\2\u019f\u019d\3\2\2\2\u01a0\u01a1") - buf.write("\7)\2\2\u01a1l\3\2\2\2\u01a2\u01a3\7/\2\2\u01a3\u01a8") - buf.write("\7/\2\2\u01a4\u01a5\7\61\2\2\u01a5\u01a8\7\61\2\2\u01a6") - buf.write("\u01a8\7%\2\2\u01a7\u01a2\3\2\2\2\u01a7\u01a4\3\2\2\2") - buf.write("\u01a7\u01a6\3\2\2\2\u01a8\u01ac\3\2\2\2\u01a9\u01ab\n") - buf.write("\5\2\2\u01aa\u01a9\3\2\2\2\u01ab\u01ae\3\2\2\2\u01ac\u01aa") - buf.write("\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01af\3\2\2\2\u01ae") - buf.write("\u01ac\3\2\2\2\u01af\u01b0\b\67\2\2\u01b0n\3\2\2\2\u01b1") - buf.write("\u01b2\7\61\2\2\u01b2\u01b3\7,\2\2\u01b3\u01b7\3\2\2\2") - buf.write("\u01b4\u01b6\13\2\2\2\u01b5\u01b4\3\2\2\2\u01b6\u01b9") - buf.write("\3\2\2\2\u01b7\u01b8\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8") - buf.write("\u01bd\3\2\2\2\u01b9\u01b7\3\2\2\2\u01ba\u01bb\7,\2\2") - buf.write("\u01bb\u01be\7\61\2\2\u01bc\u01be\7\2\2\3\u01bd\u01ba") - buf.write("\3\2\2\2\u01bd\u01bc\3\2\2\2\u01be\u01bf\3\2\2\2\u01bf") - buf.write("\u01c0\b8\2\2\u01c0p\3\2\2\2\u01c1\u01c2\t\6\2\2\u01c2") - buf.write("\u01c3\3\2\2\2\u01c3\u01c4\b9\2\2\u01c4r\3\2\2\2\u01c5") - buf.write("\u01c9\t\7\2\2\u01c6\u01c8\t\b\2\2\u01c7\u01c6\3\2\2\2") - buf.write("\u01c8\u01cb\3\2\2\2\u01c9\u01c7\3\2\2\2\u01c9\u01ca\3") - buf.write("\2\2\2\u01cat\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cc\u01cd") - buf.write("\t\t\2\2\u01cdv\3\2\2\2\u01ce\u01cf\t\n\2\2\u01cfx\3\2") - buf.write("\2\2\u01d0\u01d1\t\13\2\2\u01d1z\3\2\2\2\u01d2\u01d3\t") - buf.write("\f\2\2\u01d3|\3\2\2\2\u01d4\u01d5\t\r\2\2\u01d5~\3\2\2") - buf.write("\2\u01d6\u01d7\t\16\2\2\u01d7\u0080\3\2\2\2\u01d8\u01d9") - buf.write("\t\17\2\2\u01d9\u0082\3\2\2\2\u01da\u01db\t\20\2\2\u01db") - buf.write("\u0084\3\2\2\2\u01dc\u01dd\t\21\2\2\u01dd\u0086\3\2\2") - buf.write("\2\u01de\u01df\t\22\2\2\u01df\u0088\3\2\2\2\u01e0\u01e1") - buf.write("\t\23\2\2\u01e1\u008a\3\2\2\2\u01e2\u01e3\t\24\2\2\u01e3") - buf.write("\u008c\3\2\2\2\u01e4\u01e5\t\25\2\2\u01e5\u008e\3\2\2") - buf.write("\2\u01e6\u01e7\t\26\2\2\u01e7\u0090\3\2\2\2\u01e8\u01e9") - buf.write("\t\27\2\2\u01e9\u0092\3\2\2\2\u01ea\u01eb\t\30\2\2\u01eb") - buf.write("\u0094\3\2\2\2\u01ec\u01ed\t\31\2\2\u01ed\u0096\3\2\2") - buf.write("\2\u01ee\u01ef\t\32\2\2\u01ef\u0098\3\2\2\2\u01f0\u01f1") - buf.write("\t\33\2\2\u01f1\u009a\3\2\2\2\u01f2\u01f3\t\34\2\2\u01f3") - buf.write("\u009c\3\2\2\2\u01f4\u01f5\t\35\2\2\u01f5\u009e\3\2\2") - buf.write("\2\u01f6\u01f7\t\36\2\2\u01f7\u00a0\3\2\2\2\u01f8\u01f9") - buf.write("\t\37\2\2\u01f9\u00a2\3\2\2\2\u01fa\u01fb\t \2\2\u01fb") - buf.write("\u00a4\3\2\2\2\u01fc\u01fd\t!\2\2\u01fd\u00a6\3\2\2\2") - buf.write("\u01fe\u01ff\t\"\2\2\u01ff\u00a8\3\2\2\2\u0200\u0201\t") - buf.write("#\2\2\u0201\u00aa\3\2\2\2\33\2\u0149\u014f\u0152\u0156") - buf.write("\u015b\u015d\u0163\u0167\u016c\u016e\u0170\u0178\u017a") - buf.write("\u0183\u0185\u0190\u0192\u019b\u019d\u01a7\u01ac\u01b7") - buf.write("\u01bd\u01c9\3\2\3\2") + buf.write("\2\2\2u\3\2\2\2\3\u00ad\3\2\2\2\5\u00b0\3\2\2\2\7\u00b3") + buf.write("\3\2\2\2\t\u00b6\3\2\2\2\13\u00b9\3\2\2\2\r\u00bc\3\2") + buf.write("\2\2\17\u00bf\3\2\2\2\21\u00c2\3\2\2\2\23\u00c5\3\2\2") + buf.write("\2\25\u00c8\3\2\2\2\27\u00ca\3\2\2\2\31\u00cc\3\2\2\2") + buf.write("\33\u00ce\3\2\2\2\35\u00d0\3\2\2\2\37\u00d2\3\2\2\2!\u00d4") + buf.write("\3\2\2\2#\u00d6\3\2\2\2%\u00d8\3\2\2\2\'\u00da\3\2\2\2") + buf.write(")\u00dc\3\2\2\2+\u00de\3\2\2\2-\u00e0\3\2\2\2/\u00e2\3") + buf.write("\2\2\2\61\u00e4\3\2\2\2\63\u00e6\3\2\2\2\65\u00e8\3\2") + buf.write("\2\2\67\u00ea\3\2\2\29\u00ec\3\2\2\2;\u00ee\3\2\2\2=\u00f2") + buf.write("\3\2\2\2?\u00f5\3\2\2\2A\u00f9\3\2\2\2C\u00fc\3\2\2\2") + buf.write("E\u0101\3\2\2\2G\u0107\3\2\2\2I\u010a\3\2\2\2K\u0111\3") + buf.write("\2\2\2M\u0116\3\2\2\2O\u011c\3\2\2\2Q\u0120\3\2\2\2S\u0128") + buf.write("\3\2\2\2U\u012d\3\2\2\2W\u0130\3\2\2\2Y\u0136\3\2\2\2") + buf.write("[\u013d\3\2\2\2]\u0141\3\2\2\2_\u0146\3\2\2\2a\u0176\3") + buf.write("\2\2\2c\u0178\3\2\2\2e\u017a\3\2\2\2g\u0185\3\2\2\2i\u0190") + buf.write("\3\2\2\2k\u0192\3\2\2\2m\u019d\3\2\2\2o\u01ad\3\2\2\2") + buf.write("q\u01b7\3\2\2\2s\u01c7\3\2\2\2u\u01cb\3\2\2\2w\u01d2\3") + buf.write("\2\2\2y\u01d4\3\2\2\2{\u01d6\3\2\2\2}\u01d8\3\2\2\2\177") + buf.write("\u01da\3\2\2\2\u0081\u01dc\3\2\2\2\u0083\u01de\3\2\2\2") + buf.write("\u0085\u01e0\3\2\2\2\u0087\u01e2\3\2\2\2\u0089\u01e4\3") + buf.write("\2\2\2\u008b\u01e6\3\2\2\2\u008d\u01e8\3\2\2\2\u008f\u01ea") + buf.write("\3\2\2\2\u0091\u01ec\3\2\2\2\u0093\u01ee\3\2\2\2\u0095") + buf.write("\u01f0\3\2\2\2\u0097\u01f2\3\2\2\2\u0099\u01f4\3\2\2\2") + buf.write("\u009b\u01f6\3\2\2\2\u009d\u01f8\3\2\2\2\u009f\u01fa\3") + buf.write("\2\2\2\u00a1\u01fc\3\2\2\2\u00a3\u01fe\3\2\2\2\u00a5\u0200") + buf.write("\3\2\2\2\u00a7\u0202\3\2\2\2\u00a9\u0204\3\2\2\2\u00ab") + buf.write("\u0206\3\2\2\2\u00ad\u00ae\7(\2\2\u00ae\u00af\7(\2\2\u00af") + buf.write("\4\3\2\2\2\u00b0\u00b1\7?\2\2\u00b1\u00b2\7?\2\2\u00b2") + buf.write("\6\3\2\2\2\u00b3\u00b4\7@\2\2\u00b4\u00b5\7?\2\2\u00b5") + buf.write("\b\3\2\2\2\u00b6\u00b7\7>\2\2\u00b7\u00b8\7?\2\2\u00b8") + buf.write("\n\3\2\2\2\u00b9\u00ba\7#\2\2\u00ba\u00bb\7?\2\2\u00bb") + buf.write("\f\3\2\2\2\u00bc\u00bd\7>\2\2\u00bd\u00be\7@\2\2\u00be") + buf.write("\16\3\2\2\2\u00bf\u00c0\7~\2\2\u00c0\u00c1\7~\2\2\u00c1") + buf.write("\20\3\2\2\2\u00c2\u00c3\7>\2\2\u00c3\u00c4\7>\2\2\u00c4") + buf.write("\22\3\2\2\2\u00c5\u00c6\7@\2\2\u00c6\u00c7\7@\2\2\u00c7") + buf.write("\24\3\2\2\2\u00c8\u00c9\7(\2\2\u00c9\26\3\2\2\2\u00ca") + buf.write("\u00cb\7?\2\2\u00cb\30\3\2\2\2\u00cc\u00cd\7+\2\2\u00cd") + buf.write("\32\3\2\2\2\u00ce\u00cf\7<\2\2\u00cf\34\3\2\2\2\u00d0") + buf.write("\u00d1\7.\2\2\u00d1\36\3\2\2\2\u00d2\u00d3\7\60\2\2\u00d3") + buf.write(" \3\2\2\2\u00d4\u00d5\7\61\2\2\u00d5\"\3\2\2\2\u00d6\u00d7") + buf.write("\7@\2\2\u00d7$\3\2\2\2\u00d8\u00d9\7>\2\2\u00d9&\3\2\2") + buf.write("\2\u00da\u00db\7/\2\2\u00db(\3\2\2\2\u00dc\u00dd\7\'\2") + buf.write("\2\u00dd*\3\2\2\2\u00de\u00df\7*\2\2\u00df,\3\2\2\2\u00e0") + buf.write("\u00e1\7~\2\2\u00e1.\3\2\2\2\u00e2\u00e3\7-\2\2\u00e3") + buf.write("\60\3\2\2\2\u00e4\u00e5\7A\2\2\u00e5\62\3\2\2\2\u00e6") + buf.write("\u00e7\7=\2\2\u00e7\64\3\2\2\2\u00e8\u00e9\7,\2\2\u00e9") + buf.write("\66\3\2\2\2\u00ea\u00eb\7\u0080\2\2\u00eb8\3\2\2\2\u00ec") + buf.write("\u00ed\7a\2\2\u00ed:\3\2\2\2\u00ee\u00ef\5y=\2\u00ef\u00f0") + buf.write("\5\u0093J\2\u00f0\u00f1\5\177@\2\u00f1<\3\2\2\2\u00f2") + buf.write("\u00f3\5y=\2\u00f3\u00f4\5\u009dO\2\u00f4>\3\2\2\2\u00f5") + buf.write("\u00f6\5y=\2\u00f6\u00f7\5\u009dO\2\u00f7\u00f8\5}?\2") + buf.write("\u00f8@\3\2\2\2\u00f9\u00fa\5{>\2\u00fa\u00fb\5\u00a9") + buf.write("U\2\u00fbB\3\2\2\2\u00fc\u00fd\5\177@\2\u00fd\u00fe\5") + buf.write("\u0081A\2\u00fe\u00ff\5\u009dO\2\u00ff\u0100\5}?\2\u0100") + buf.write("D\3\2\2\2\u0101\u0102\5\u0083B\2\u0102\u0103\5y=\2\u0103") + buf.write("\u0104\5\u008fH\2\u0104\u0105\5\u009dO\2\u0105\u0106\5") + buf.write("\u0081A\2\u0106F\3\2\2\2\u0107\u0108\5\u0089E\2\u0108") + buf.write("\u0109\5\u009dO\2\u0109H\3\2\2\2\u010a\u010b\5\u0089E") + buf.write("\2\u010b\u010c\5\u009dO\2\u010c\u010d\5\u0093J\2\u010d") + buf.write("\u010e\5\u00a1Q\2\u010e\u010f\5\u008fH\2\u010f\u0110\5") + buf.write("\u008fH\2\u0110J\3\2\2\2\u0111\u0112\5\u008fH\2\u0112") + buf.write("\u0113\5\u0089E\2\u0113\u0114\5\u008dG\2\u0114\u0115\5") + buf.write("\u0081A\2\u0115L\3\2\2\2\u0116\u0117\5\u008fH\2\u0117") + buf.write("\u0118\5\u0089E\2\u0118\u0119\5\u0091I\2\u0119\u011a\5") + buf.write("\u0089E\2\u011a\u011b\5\u009fP\2\u011bN\3\2\2\2\u011c") + buf.write("\u011d\5\u0093J\2\u011d\u011e\5\u0095K\2\u011e\u011f\5") + buf.write("\u009fP\2\u011fP\3\2\2\2\u0120\u0121\5\u0093J\2\u0121") + buf.write("\u0122\5\u0095K\2\u0122\u0123\5\u009fP\2\u0123\u0124\5") + buf.write("\u0093J\2\u0124\u0125\5\u00a1Q\2\u0125\u0126\5\u008fH") + buf.write("\2\u0126\u0127\5\u008fH\2\u0127R\3\2\2\2\u0128\u0129\5") + buf.write("\u0093J\2\u0129\u012a\5\u00a1Q\2\u012a\u012b\5\u008fH") + buf.write("\2\u012b\u012c\5\u008fH\2\u012cT\3\2\2\2\u012d\u012e\5") + buf.write("\u0095K\2\u012e\u012f\5\u009bN\2\u012fV\3\2\2\2\u0130") + buf.write("\u0131\5\u0095K\2\u0131\u0132\5\u009bN\2\u0132\u0133\5") + buf.write("\177@\2\u0133\u0134\5\u0081A\2\u0134\u0135\5\u009bN\2") + buf.write("\u0135X\3\2\2\2\u0136\u0137\5\u009dO\2\u0137\u0138\5\u0081") + buf.write("A\2\u0138\u0139\5\u008fH\2\u0139\u013a\5\u0081A\2\u013a") + buf.write("\u013b\5}?\2\u013b\u013c\5\u009fP\2\u013cZ\3\2\2\2\u013d") + buf.write("\u013e\5\u009dO\2\u013e\u013f\5\u0081A\2\u013f\u0140\5") + buf.write("\u009fP\2\u0140\\\3\2\2\2\u0141\u0142\5\u009fP\2\u0142") + buf.write("\u0143\5\u009bN\2\u0143\u0144\5\u00a1Q\2\u0144\u0145\5") + buf.write("\u0081A\2\u0145^\3\2\2\2\u0146\u0147\5\u00a5S\2\u0147") + buf.write("\u0148\5\u0087D\2\u0148\u0149\5\u0081A\2\u0149\u014a\5") + buf.write("\u009bN\2\u014a\u014b\5\u0081A\2\u014b`\3\2\2\2\u014c") + buf.write("\u014e\5w<\2\u014d\u014c\3\2\2\2\u014e\u014f\3\2\2\2\u014f") + buf.write("\u014d\3\2\2\2\u014f\u0150\3\2\2\2\u0150\u0158\3\2\2\2") + buf.write("\u0151\u0155\7\60\2\2\u0152\u0154\5w<\2\u0153\u0152\3") + buf.write("\2\2\2\u0154\u0157\3\2\2\2\u0155\u0153\3\2\2\2\u0155\u0156") + buf.write("\3\2\2\2\u0156\u0159\3\2\2\2\u0157\u0155\3\2\2\2\u0158") + buf.write("\u0151\3\2\2\2\u0158\u0159\3\2\2\2\u0159\u0163\3\2\2\2") + buf.write("\u015a\u015c\5\u0081A\2\u015b\u015d\t\2\2\2\u015c\u015b") + buf.write("\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u015f\3\2\2\2\u015e") + buf.write("\u0160\5w<\2\u015f\u015e\3\2\2\2\u0160\u0161\3\2\2\2\u0161") + buf.write("\u015f\3\2\2\2\u0161\u0162\3\2\2\2\u0162\u0164\3\2\2\2") + buf.write("\u0163\u015a\3\2\2\2\u0163\u0164\3\2\2\2\u0164\u0177\3") + buf.write("\2\2\2\u0165\u0167\7\60\2\2\u0166\u0168\5w<\2\u0167\u0166") + buf.write("\3\2\2\2\u0168\u0169\3\2\2\2\u0169\u0167\3\2\2\2\u0169") + buf.write("\u016a\3\2\2\2\u016a\u0174\3\2\2\2\u016b\u016d\5\u0081") + buf.write("A\2\u016c\u016e\t\2\2\2\u016d\u016c\3\2\2\2\u016d\u016e") + buf.write("\3\2\2\2\u016e\u0170\3\2\2\2\u016f\u0171\5w<\2\u0170\u016f") + buf.write("\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0170\3\2\2\2\u0172") + buf.write("\u0173\3\2\2\2\u0173\u0175\3\2\2\2\u0174\u016b\3\2\2\2") + buf.write("\u0174\u0175\3\2\2\2\u0175\u0177\3\2\2\2\u0176\u014d\3") + buf.write("\2\2\2\u0176\u0165\3\2\2\2\u0177b\3\2\2\2\u0178\u0179") + buf.write("\5e\63\2\u0179d\3\2\2\2\u017a\u0180\7$\2\2\u017b\u017c") + buf.write("\7^\2\2\u017c\u017f\7$\2\2\u017d\u017f\n\3\2\2\u017e\u017b") + buf.write("\3\2\2\2\u017e\u017d\3\2\2\2\u017f\u0182\3\2\2\2\u0180") + buf.write("\u017e\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0183\3\2\2\2") + buf.write("\u0182\u0180\3\2\2\2\u0183\u0184\7$\2\2\u0184f\3\2\2\2") + buf.write("\u0185\u018b\7$\2\2\u0186\u0187\7$\2\2\u0187\u018a\7$") + buf.write("\2\2\u0188\u018a\n\3\2\2\u0189\u0186\3\2\2\2\u0189\u0188") + buf.write("\3\2\2\2\u018a\u018d\3\2\2\2\u018b\u0189\3\2\2\2\u018b") + buf.write("\u018c\3\2\2\2\u018c\u018e\3\2\2\2\u018d\u018b\3\2\2\2") + buf.write("\u018e\u018f\7$\2\2\u018fh\3\2\2\2\u0190\u0191\5k\66\2") + buf.write("\u0191j\3\2\2\2\u0192\u0198\7)\2\2\u0193\u0194\7^\2\2") + buf.write("\u0194\u0197\7)\2\2\u0195\u0197\n\4\2\2\u0196\u0193\3") + buf.write("\2\2\2\u0196\u0195\3\2\2\2\u0197\u019a\3\2\2\2\u0198\u0196") + buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\3\2\2\2\u019a") + buf.write("\u0198\3\2\2\2\u019b\u019c\7)\2\2\u019cl\3\2\2\2\u019d") + buf.write("\u01a3\7)\2\2\u019e\u019f\7)\2\2\u019f\u01a2\7)\2\2\u01a0") + buf.write("\u01a2\n\4\2\2\u01a1\u019e\3\2\2\2\u01a1\u01a0\3\2\2\2") + buf.write("\u01a2\u01a5\3\2\2\2\u01a3\u01a1\3\2\2\2\u01a3\u01a4\3") + buf.write("\2\2\2\u01a4\u01a6\3\2\2\2\u01a5\u01a3\3\2\2\2\u01a6\u01a7") + buf.write("\7)\2\2\u01a7n\3\2\2\2\u01a8\u01a9\7/\2\2\u01a9\u01ae") + buf.write("\7/\2\2\u01aa\u01ab\7\61\2\2\u01ab\u01ae\7\61\2\2\u01ac") + buf.write("\u01ae\7%\2\2\u01ad\u01a8\3\2\2\2\u01ad\u01aa\3\2\2\2") + buf.write("\u01ad\u01ac\3\2\2\2\u01ae\u01b2\3\2\2\2\u01af\u01b1\n") + buf.write("\5\2\2\u01b0\u01af\3\2\2\2\u01b1\u01b4\3\2\2\2\u01b2\u01b0") + buf.write("\3\2\2\2\u01b2\u01b3\3\2\2\2\u01b3\u01b5\3\2\2\2\u01b4") + buf.write("\u01b2\3\2\2\2\u01b5\u01b6\b8\2\2\u01b6p\3\2\2\2\u01b7") + buf.write("\u01b8\7\61\2\2\u01b8\u01b9\7,\2\2\u01b9\u01bd\3\2\2\2") + buf.write("\u01ba\u01bc\13\2\2\2\u01bb\u01ba\3\2\2\2\u01bc\u01bf") + buf.write("\3\2\2\2\u01bd\u01be\3\2\2\2\u01bd\u01bb\3\2\2\2\u01be") + buf.write("\u01c3\3\2\2\2\u01bf\u01bd\3\2\2\2\u01c0\u01c1\7,\2\2") + buf.write("\u01c1\u01c4\7\61\2\2\u01c2\u01c4\7\2\2\3\u01c3\u01c0") + buf.write("\3\2\2\2\u01c3\u01c2\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5") + buf.write("\u01c6\b9\2\2\u01c6r\3\2\2\2\u01c7\u01c8\t\6\2\2\u01c8") + buf.write("\u01c9\3\2\2\2\u01c9\u01ca\b:\2\2\u01cat\3\2\2\2\u01cb") + buf.write("\u01cf\t\7\2\2\u01cc\u01ce\t\b\2\2\u01cd\u01cc\3\2\2\2") + buf.write("\u01ce\u01d1\3\2\2\2\u01cf\u01cd\3\2\2\2\u01cf\u01d0\3") + buf.write("\2\2\2\u01d0v\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d2\u01d3") + buf.write("\t\t\2\2\u01d3x\3\2\2\2\u01d4\u01d5\t\n\2\2\u01d5z\3\2") + buf.write("\2\2\u01d6\u01d7\t\13\2\2\u01d7|\3\2\2\2\u01d8\u01d9\t") + buf.write("\f\2\2\u01d9~\3\2\2\2\u01da\u01db\t\r\2\2\u01db\u0080") + buf.write("\3\2\2\2\u01dc\u01dd\t\16\2\2\u01dd\u0082\3\2\2\2\u01de") + buf.write("\u01df\t\17\2\2\u01df\u0084\3\2\2\2\u01e0\u01e1\t\20\2") + buf.write("\2\u01e1\u0086\3\2\2\2\u01e2\u01e3\t\21\2\2\u01e3\u0088") + buf.write("\3\2\2\2\u01e4\u01e5\t\22\2\2\u01e5\u008a\3\2\2\2\u01e6") + buf.write("\u01e7\t\23\2\2\u01e7\u008c\3\2\2\2\u01e8\u01e9\t\24\2") + buf.write("\2\u01e9\u008e\3\2\2\2\u01ea\u01eb\t\25\2\2\u01eb\u0090") + buf.write("\3\2\2\2\u01ec\u01ed\t\26\2\2\u01ed\u0092\3\2\2\2\u01ee") + buf.write("\u01ef\t\27\2\2\u01ef\u0094\3\2\2\2\u01f0\u01f1\t\30\2") + buf.write("\2\u01f1\u0096\3\2\2\2\u01f2\u01f3\t\31\2\2\u01f3\u0098") + buf.write("\3\2\2\2\u01f4\u01f5\t\32\2\2\u01f5\u009a\3\2\2\2\u01f6") + buf.write("\u01f7\t\33\2\2\u01f7\u009c\3\2\2\2\u01f8\u01f9\t\34\2") + buf.write("\2\u01f9\u009e\3\2\2\2\u01fa\u01fb\t\35\2\2\u01fb\u00a0") + buf.write("\3\2\2\2\u01fc\u01fd\t\36\2\2\u01fd\u00a2\3\2\2\2\u01fe") + buf.write("\u01ff\t\37\2\2\u01ff\u00a4\3\2\2\2\u0200\u0201\t \2\2") + buf.write("\u0201\u00a6\3\2\2\2\u0202\u0203\t!\2\2\u0203\u00a8\3") + buf.write("\2\2\2\u0204\u0205\t\"\2\2\u0205\u00aa\3\2\2\2\u0206\u0207") + buf.write("\t#\2\2\u0207\u00ac\3\2\2\2\33\2\u014f\u0155\u0158\u015c") + buf.write("\u0161\u0163\u0169\u016d\u0172\u0174\u0176\u017e\u0180") + buf.write("\u0189\u018b\u0196\u0198\u01a1\u01a3\u01ad\u01b2\u01bd") + buf.write("\u01c3\u01cf\3\2\3\2") return buf.getvalue() @@ -291,19 +293,20 @@ class PqlLexer(Lexer): K_OR = 42 K_ORDER = 43 K_SELECT = 44 - K_TRUE = 45 - K_WHERE = 46 - NUMERIC_LITERAL = 47 - DOUBLE_QUOTED_STRING = 48 - DOUBLE_QUOTED_STRING_TEL = 49 - DOUBLE_QUOTED_STRING_SQL = 50 - SINGLE_QUOTED_STRING = 51 - SINGLE_QUOTED_STRING_TEL = 52 - SINGLE_QUOTED_STRING_SQL = 53 - SINGLE_LINE_COMMENT = 54 - MULTILINE_COMMENT = 55 - SPACES = 56 - WORD = 57 + K_SET = 45 + K_TRUE = 46 + K_WHERE = 47 + NUMERIC_LITERAL = 48 + DOUBLE_QUOTED_STRING = 49 + DOUBLE_QUOTED_STRING_TEL = 50 + DOUBLE_QUOTED_STRING_SQL = 51 + SINGLE_QUOTED_STRING = 52 + SINGLE_QUOTED_STRING_TEL = 53 + SINGLE_QUOTED_STRING_SQL = 54 + SINGLE_LINE_COMMENT = 55 + MULTILINE_COMMENT = 56 + SPACES = 57 + WORD = 58 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] @@ -322,10 +325,11 @@ class PqlLexer(Lexer): "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", - "K_OR", "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", - "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", - "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", - "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD" ] + "K_OR", "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", + "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", + "SPACES", "WORD" ] ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", @@ -334,8 +338,8 @@ class PqlLexer(Lexer): "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", - "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", - "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD", "DIGIT", "A", "B", "C", "D", "E", "F", "G", "H", diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index 9a25bc5..5f84a35 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -11,86 +11,91 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3;") - buf.write("\u00c4\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3<") + buf.write("\u00cd\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") - buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\3\2\3\2") - buf.write("\3\2\3\3\7\3)\n\3\f\3\16\3,\13\3\3\3\3\3\3\4\7\4\61\n") - buf.write("\4\f\4\16\4\64\13\4\3\4\3\4\6\48\n\4\r\4\16\49\3\4\7\4") - buf.write("=\n\4\f\4\16\4@\13\4\3\4\7\4C\n\4\f\4\16\4F\13\4\3\5\3") - buf.write("\5\3\6\3\6\5\6L\n\6\3\6\5\6O\n\6\3\6\5\6R\n\6\3\7\3\7") - buf.write("\3\7\3\7\7\7X\n\7\f\7\16\7[\13\7\3\b\3\b\3\b\3\b\5\ba") - buf.write("\n\b\3\b\3\b\5\be\n\b\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n") - buf.write("\7\no\n\n\f\n\16\nr\13\n\3\13\3\13\5\13v\n\13\3\f\3\f") - buf.write("\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\5\r\u0085") - buf.write("\n\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3") - buf.write("\r\3\r\3\r\3\r\3\r\3\r\7\r\u0099\n\r\f\r\16\r\u009c\13") - buf.write("\r\3\16\3\16\3\16\5\16\u00a1\n\16\3\16\3\16\3\17\3\17") - buf.write("\3\17\7\17\u00a8\n\17\f\17\16\17\u00ab\13\17\3\20\5\20") - buf.write("\u00ae\n\20\3\20\3\20\3\20\5\20\u00b3\n\20\3\20\3\20\3") - buf.write("\20\5\20\u00b8\n\20\3\21\3\21\3\21\7\21\u00bd\n\21\f\21") - buf.write("\16\21\u00c0\13\21\3\22\3\22\3\22\2\3\30\23\2\4\6\b\n") - buf.write("\f\16\20\22\24\26\30\32\34\36 \"\2\13\4\2!!##\5\2\25\25") - buf.write("\31\31))\5\2\22\22\26\26\34\34\4\2\25\25\31\31\4\2\5\6") - buf.write("\23\24\6\2\4\4\7\b\r\r%%\4\2\3\3\37\37\4\2\t\t,,\7\2$") - buf.write("$++//\61\62\65\65\2\u00cf\2$\3\2\2\2\4*\3\2\2\2\6\62\3") - buf.write("\2\2\2\bG\3\2\2\2\nI\3\2\2\2\fS\3\2\2\2\16\\\3\2\2\2\20") - buf.write("f\3\2\2\2\22i\3\2\2\2\24s\3\2\2\2\26w\3\2\2\2\30\u0084") - buf.write("\3\2\2\2\32\u009d\3\2\2\2\34\u00a4\3\2\2\2\36\u00ad\3") - buf.write("\2\2\2 \u00b9\3\2\2\2\"\u00c1\3\2\2\2$%\5\30\r\2%&\7\2") - buf.write("\2\3&\3\3\2\2\2\')\5\6\4\2(\'\3\2\2\2),\3\2\2\2*(\3\2") - buf.write("\2\2*+\3\2\2\2+-\3\2\2\2,*\3\2\2\2-.\7\2\2\3.\5\3\2\2") - buf.write("\2/\61\7\33\2\2\60/\3\2\2\2\61\64\3\2\2\2\62\60\3\2\2") - buf.write("\2\62\63\3\2\2\2\63\65\3\2\2\2\64\62\3\2\2\2\65>\5\b\5") - buf.write("\2\668\7\33\2\2\67\66\3\2\2\289\3\2\2\29\67\3\2\2\29:") - buf.write("\3\2\2\2:;\3\2\2\2;=\5\b\5\2<\67\3\2\2\2=@\3\2\2\2><\3") - buf.write("\2\2\2>?\3\2\2\2?D\3\2\2\2@>\3\2\2\2AC\7\33\2\2BA\3\2") - buf.write("\2\2CF\3\2\2\2DB\3\2\2\2DE\3\2\2\2E\7\3\2\2\2FD\3\2\2") - buf.write("\2GH\5\n\6\2H\t\3\2\2\2IK\5\f\7\2JL\5\20\t\2KJ\3\2\2\2") - buf.write("KL\3\2\2\2LN\3\2\2\2MO\5\22\n\2NM\3\2\2\2NO\3\2\2\2OQ") - buf.write("\3\2\2\2PR\5\26\f\2QP\3\2\2\2QR\3\2\2\2R\13\3\2\2\2ST") - buf.write("\7.\2\2TY\5\16\b\2UV\7\20\2\2VX\5\16\b\2WU\3\2\2\2X[\3") - buf.write("\2\2\2YW\3\2\2\2YZ\3\2\2\2Z\r\3\2\2\2[Y\3\2\2\2\\`\5\30") - buf.write("\r\2]^\7\17\2\2^_\7\17\2\2_a\5\32\16\2`]\3\2\2\2`a\3\2") - buf.write("\2\2ad\3\2\2\2bc\7 \2\2ce\5\36\20\2db\3\2\2\2de\3\2\2") - buf.write("\2e\17\3\2\2\2fg\7\60\2\2gh\5\30\r\2h\21\3\2\2\2ij\7-") - buf.write("\2\2jk\7\"\2\2kp\5\24\13\2lm\7\20\2\2mo\5\24\13\2nl\3") - buf.write("\2\2\2or\3\2\2\2pn\3\2\2\2pq\3\2\2\2q\23\3\2\2\2rp\3\2") - buf.write("\2\2su\5\30\r\2tv\t\2\2\2ut\3\2\2\2uv\3\2\2\2v\25\3\2") - buf.write("\2\2wx\7(\2\2xy\5\30\r\2y\27\3\2\2\2z{\b\r\1\2{|\t\3\2") - buf.write("\2|\u0085\5\30\r\r}~\7\27\2\2~\177\5\30\r\2\177\u0080") - buf.write("\7\16\2\2\u0080\u0085\3\2\2\2\u0081\u0085\5\"\22\2\u0082") - buf.write("\u0085\5\32\16\2\u0083\u0085\5\36\20\2\u0084z\3\2\2\2") - buf.write("\u0084}\3\2\2\2\u0084\u0081\3\2\2\2\u0084\u0082\3\2\2") - buf.write("\2\u0084\u0083\3\2\2\2\u0085\u009a\3\2\2\2\u0086\u0087") - buf.write("\f\f\2\2\u0087\u0088\t\4\2\2\u0088\u0099\5\30\r\r\u0089") - buf.write("\u008a\f\13\2\2\u008a\u008b\t\5\2\2\u008b\u0099\5\30\r") - buf.write("\f\u008c\u008d\f\n\2\2\u008d\u008e\t\6\2\2\u008e\u0099") - buf.write("\5\30\r\13\u008f\u0090\f\t\2\2\u0090\u0091\t\7\2\2\u0091") - buf.write("\u0099\5\30\r\n\u0092\u0093\f\b\2\2\u0093\u0094\t\b\2") - buf.write("\2\u0094\u0099\5\30\r\t\u0095\u0096\f\7\2\2\u0096\u0097") - buf.write("\t\t\2\2\u0097\u0099\5\30\r\b\u0098\u0086\3\2\2\2\u0098") - buf.write("\u0089\3\2\2\2\u0098\u008c\3\2\2\2\u0098\u008f\3\2\2\2") - buf.write("\u0098\u0092\3\2\2\2\u0098\u0095\3\2\2\2\u0099\u009c\3") - buf.write("\2\2\2\u009a\u0098\3\2\2\2\u009a\u009b\3\2\2\2\u009b\31") - buf.write("\3\2\2\2\u009c\u009a\3\2\2\2\u009d\u009e\5 \21\2\u009e") - buf.write("\u00a0\7\27\2\2\u009f\u00a1\5\34\17\2\u00a0\u009f\3\2") - buf.write("\2\2\u00a0\u00a1\3\2\2\2\u00a1\u00a2\3\2\2\2\u00a2\u00a3") - buf.write("\7\16\2\2\u00a3\33\3\2\2\2\u00a4\u00a9\5\30\r\2\u00a5") - buf.write("\u00a6\7\20\2\2\u00a6\u00a8\5\30\r\2\u00a7\u00a5\3\2\2") - buf.write("\2\u00a8\u00ab\3\2\2\2\u00a9\u00a7\3\2\2\2\u00a9\u00aa") - buf.write("\3\2\2\2\u00aa\35\3\2\2\2\u00ab\u00a9\3\2\2\2\u00ac\u00ae") - buf.write("\7\32\2\2\u00ad\u00ac\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae") - buf.write("\u00b2\3\2\2\2\u00af\u00b0\5 \21\2\u00b0\u00b1\7\30\2") - buf.write("\2\u00b1\u00b3\3\2\2\2\u00b2\u00af\3\2\2\2\u00b2\u00b3") - buf.write("\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\u00b7\5 \21\2\u00b5") - buf.write("\u00b6\7\17\2\2\u00b6\u00b8\5 \21\2\u00b7\u00b5\3\2\2") - buf.write("\2\u00b7\u00b8\3\2\2\2\u00b8\37\3\2\2\2\u00b9\u00be\7") - buf.write(";\2\2\u00ba\u00bb\7\21\2\2\u00bb\u00bd\7;\2\2\u00bc\u00ba") - buf.write("\3\2\2\2\u00bd\u00c0\3\2\2\2\u00be\u00bc\3\2\2\2\u00be") - buf.write("\u00bf\3\2\2\2\u00bf!\3\2\2\2\u00c0\u00be\3\2\2\2\u00c1") - buf.write("\u00c2\t\n\2\2\u00c2#\3\2\2\2\30*\629>DKNQY`dpu\u0084") - buf.write("\u0098\u009a\u00a0\u00a9\u00ad\u00b2\u00b7\u00be") + buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") + buf.write("\3\2\3\2\3\2\3\3\7\3+\n\3\f\3\16\3.\13\3\3\3\3\3\3\4\7") + buf.write("\4\63\n\4\f\4\16\4\66\13\4\3\4\3\4\6\4:\n\4\r\4\16\4;") + buf.write("\3\4\7\4?\n\4\f\4\16\4B\13\4\3\4\7\4E\n\4\f\4\16\4H\13") + buf.write("\4\3\5\3\5\5\5L\n\5\3\6\3\6\3\6\3\6\3\6\3\7\3\7\5\7U\n") + buf.write("\7\3\7\5\7X\n\7\3\7\5\7[\n\7\3\b\3\b\3\b\3\b\7\ba\n\b") + buf.write("\f\b\16\bd\13\b\3\t\3\t\3\t\3\t\5\tj\n\t\3\t\3\t\5\tn") + buf.write("\n\t\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\7\13x\n\13\f") + buf.write("\13\16\13{\13\13\3\f\3\f\5\f\177\n\f\3\r\3\r\3\r\3\16") + buf.write("\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u008e") + buf.write("\n\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16") + buf.write("\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\7\16\u00a2\n") + buf.write("\16\f\16\16\16\u00a5\13\16\3\17\3\17\3\17\5\17\u00aa\n") + buf.write("\17\3\17\3\17\3\20\3\20\3\20\7\20\u00b1\n\20\f\20\16\20") + buf.write("\u00b4\13\20\3\21\5\21\u00b7\n\21\3\21\3\21\3\21\5\21") + buf.write("\u00bc\n\21\3\21\3\21\3\21\5\21\u00c1\n\21\3\22\3\22\3") + buf.write("\22\7\22\u00c6\n\22\f\22\16\22\u00c9\13\22\3\23\3\23\3") + buf.write("\23\2\3\32\24\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 ") + buf.write("\"$\2\13\4\2!!##\5\2\25\25\31\31))\5\2\22\22\26\26\34") + buf.write("\34\4\2\25\25\31\31\4\2\5\6\23\24\6\2\4\4\7\b\r\r%%\4") + buf.write("\2\3\3\37\37\4\2\t\t,,\7\2$$++\60\60\62\63\66\66\2\u00d8") + buf.write("\2&\3\2\2\2\4,\3\2\2\2\6\64\3\2\2\2\bK\3\2\2\2\nM\3\2") + buf.write("\2\2\fR\3\2\2\2\16\\\3\2\2\2\20e\3\2\2\2\22o\3\2\2\2\24") + buf.write("r\3\2\2\2\26|\3\2\2\2\30\u0080\3\2\2\2\32\u008d\3\2\2") + buf.write("\2\34\u00a6\3\2\2\2\36\u00ad\3\2\2\2 \u00b6\3\2\2\2\"") + buf.write("\u00c2\3\2\2\2$\u00ca\3\2\2\2&\'\5\32\16\2\'(\7\2\2\3") + buf.write("(\3\3\2\2\2)+\5\6\4\2*)\3\2\2\2+.\3\2\2\2,*\3\2\2\2,-") + buf.write("\3\2\2\2-/\3\2\2\2.,\3\2\2\2/\60\7\2\2\3\60\5\3\2\2\2") + buf.write("\61\63\7\33\2\2\62\61\3\2\2\2\63\66\3\2\2\2\64\62\3\2") + buf.write("\2\2\64\65\3\2\2\2\65\67\3\2\2\2\66\64\3\2\2\2\67@\5\b") + buf.write("\5\28:\7\33\2\298\3\2\2\2:;\3\2\2\2;9\3\2\2\2;<\3\2\2") + buf.write("\2<=\3\2\2\2=?\5\b\5\2>9\3\2\2\2?B\3\2\2\2@>\3\2\2\2@") + buf.write("A\3\2\2\2AF\3\2\2\2B@\3\2\2\2CE\7\33\2\2DC\3\2\2\2EH\3") + buf.write("\2\2\2FD\3\2\2\2FG\3\2\2\2G\7\3\2\2\2HF\3\2\2\2IL\5\n") + buf.write("\6\2JL\5\f\7\2KI\3\2\2\2KJ\3\2\2\2L\t\3\2\2\2MN\7/\2\2") + buf.write("NO\5\"\22\2OP\7\r\2\2PQ\5\32\16\2Q\13\3\2\2\2RT\5\16\b") + buf.write("\2SU\5\22\n\2TS\3\2\2\2TU\3\2\2\2UW\3\2\2\2VX\5\24\13") + buf.write("\2WV\3\2\2\2WX\3\2\2\2XZ\3\2\2\2Y[\5\30\r\2ZY\3\2\2\2") + buf.write("Z[\3\2\2\2[\r\3\2\2\2\\]\7.\2\2]b\5\20\t\2^_\7\20\2\2") + buf.write("_a\5\20\t\2`^\3\2\2\2ad\3\2\2\2b`\3\2\2\2bc\3\2\2\2c\17") + buf.write("\3\2\2\2db\3\2\2\2ei\5\32\16\2fg\7\17\2\2gh\7\17\2\2h") + buf.write("j\5\34\17\2if\3\2\2\2ij\3\2\2\2jm\3\2\2\2kl\7 \2\2ln\5") + buf.write(" \21\2mk\3\2\2\2mn\3\2\2\2n\21\3\2\2\2op\7\61\2\2pq\5") + buf.write("\32\16\2q\23\3\2\2\2rs\7-\2\2st\7\"\2\2ty\5\26\f\2uv\7") + buf.write("\20\2\2vx\5\26\f\2wu\3\2\2\2x{\3\2\2\2yw\3\2\2\2yz\3\2") + buf.write("\2\2z\25\3\2\2\2{y\3\2\2\2|~\5\32\16\2}\177\t\2\2\2~}") + buf.write("\3\2\2\2~\177\3\2\2\2\177\27\3\2\2\2\u0080\u0081\7(\2") + buf.write("\2\u0081\u0082\5\32\16\2\u0082\31\3\2\2\2\u0083\u0084") + buf.write("\b\16\1\2\u0084\u0085\t\3\2\2\u0085\u008e\5\32\16\r\u0086") + buf.write("\u0087\7\27\2\2\u0087\u0088\5\32\16\2\u0088\u0089\7\16") + buf.write("\2\2\u0089\u008e\3\2\2\2\u008a\u008e\5$\23\2\u008b\u008e") + buf.write("\5\34\17\2\u008c\u008e\5 \21\2\u008d\u0083\3\2\2\2\u008d") + buf.write("\u0086\3\2\2\2\u008d\u008a\3\2\2\2\u008d\u008b\3\2\2\2") + buf.write("\u008d\u008c\3\2\2\2\u008e\u00a3\3\2\2\2\u008f\u0090\f") + buf.write("\f\2\2\u0090\u0091\t\4\2\2\u0091\u00a2\5\32\16\r\u0092") + buf.write("\u0093\f\13\2\2\u0093\u0094\t\5\2\2\u0094\u00a2\5\32\16") + buf.write("\f\u0095\u0096\f\n\2\2\u0096\u0097\t\6\2\2\u0097\u00a2") + buf.write("\5\32\16\13\u0098\u0099\f\t\2\2\u0099\u009a\t\7\2\2\u009a") + buf.write("\u00a2\5\32\16\n\u009b\u009c\f\b\2\2\u009c\u009d\t\b\2") + buf.write("\2\u009d\u00a2\5\32\16\t\u009e\u009f\f\7\2\2\u009f\u00a0") + buf.write("\t\t\2\2\u00a0\u00a2\5\32\16\b\u00a1\u008f\3\2\2\2\u00a1") + buf.write("\u0092\3\2\2\2\u00a1\u0095\3\2\2\2\u00a1\u0098\3\2\2\2") + buf.write("\u00a1\u009b\3\2\2\2\u00a1\u009e\3\2\2\2\u00a2\u00a5\3") + buf.write("\2\2\2\u00a3\u00a1\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a4\33") + buf.write("\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00a7\5\"\22\2\u00a7") + buf.write("\u00a9\7\27\2\2\u00a8\u00aa\5\36\20\2\u00a9\u00a8\3\2") + buf.write("\2\2\u00a9\u00aa\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00ac") + buf.write("\7\16\2\2\u00ac\35\3\2\2\2\u00ad\u00b2\5\32\16\2\u00ae") + buf.write("\u00af\7\20\2\2\u00af\u00b1\5\32\16\2\u00b0\u00ae\3\2") + buf.write("\2\2\u00b1\u00b4\3\2\2\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3") + buf.write("\3\2\2\2\u00b3\37\3\2\2\2\u00b4\u00b2\3\2\2\2\u00b5\u00b7") + buf.write("\7\32\2\2\u00b6\u00b5\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7") + buf.write("\u00bb\3\2\2\2\u00b8\u00b9\5\"\22\2\u00b9\u00ba\7\30\2") + buf.write("\2\u00ba\u00bc\3\2\2\2\u00bb\u00b8\3\2\2\2\u00bb\u00bc") + buf.write("\3\2\2\2\u00bc\u00bd\3\2\2\2\u00bd\u00c0\5\"\22\2\u00be") + buf.write("\u00bf\7\17\2\2\u00bf\u00c1\5\"\22\2\u00c0\u00be\3\2\2") + buf.write("\2\u00c0\u00c1\3\2\2\2\u00c1!\3\2\2\2\u00c2\u00c7\7<\2") + buf.write("\2\u00c3\u00c4\7\21\2\2\u00c4\u00c6\7<\2\2\u00c5\u00c3") + buf.write("\3\2\2\2\u00c6\u00c9\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c7") + buf.write("\u00c8\3\2\2\2\u00c8#\3\2\2\2\u00c9\u00c7\3\2\2\2\u00ca") + buf.write("\u00cb\t\n\2\2\u00cb%\3\2\2\2\31,\64;@FKTWZbimy~\u008d") + buf.write("\u00a1\u00a3\u00a9\u00b2\u00b6\u00bb\u00c0\u00c7") return buf.getvalue() @@ -117,7 +122,7 @@ class PqlParser ( Parser ): "UNDER", "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", - "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", @@ -128,24 +133,26 @@ class PqlParser ( Parser ): RULE_parsePql = 1 RULE_sqlStmtList = 2 RULE_sqlStmt = 3 - RULE_selectStmt = 4 - RULE_selectClause = 5 - RULE_columns = 6 - RULE_whereClause = 7 - RULE_orderByClause = 8 - RULE_orderExpr = 9 - RULE_limitClause = 10 - RULE_expr = 11 - RULE_function = 12 - RULE_exprList = 13 - RULE_taxon = 14 - RULE_identifierMultipart = 15 - RULE_literalValue = 16 - - ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "selectStmt", - "selectClause", "columns", "whereClause", "orderByClause", - "orderExpr", "limitClause", "expr", "function", "exprList", - "taxon", "identifierMultipart", "literalValue" ] + RULE_setStmt = 4 + RULE_selectStmt = 5 + RULE_selectClause = 6 + RULE_columns = 7 + RULE_whereClause = 8 + RULE_orderByClause = 9 + RULE_orderExpr = 10 + RULE_limitClause = 11 + RULE_expr = 12 + RULE_function = 13 + RULE_exprList = 14 + RULE_taxon = 15 + RULE_identifierMultipart = 16 + RULE_literalValue = 17 + + ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "setStmt", + "selectStmt", "selectClause", "columns", "whereClause", + "orderByClause", "orderExpr", "limitClause", "expr", + "function", "exprList", "taxon", "identifierMultipart", + "literalValue" ] EOF = Token.EOF AND=1 @@ -192,19 +199,20 @@ class PqlParser ( Parser ): K_OR=42 K_ORDER=43 K_SELECT=44 - K_TRUE=45 - K_WHERE=46 - NUMERIC_LITERAL=47 - DOUBLE_QUOTED_STRING=48 - DOUBLE_QUOTED_STRING_TEL=49 - DOUBLE_QUOTED_STRING_SQL=50 - SINGLE_QUOTED_STRING=51 - SINGLE_QUOTED_STRING_TEL=52 - SINGLE_QUOTED_STRING_SQL=53 - SINGLE_LINE_COMMENT=54 - MULTILINE_COMMENT=55 - SPACES=56 - WORD=57 + K_SET=45 + K_TRUE=46 + K_WHERE=47 + NUMERIC_LITERAL=48 + DOUBLE_QUOTED_STRING=49 + DOUBLE_QUOTED_STRING_TEL=50 + DOUBLE_QUOTED_STRING_SQL=51 + SINGLE_QUOTED_STRING=52 + SINGLE_QUOTED_STRING_TEL=53 + SINGLE_QUOTED_STRING_SQL=54 + SINGLE_LINE_COMMENT=55 + MULTILINE_COMMENT=56 + SPACES=57 + WORD=58 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -254,9 +262,9 @@ def parseTel(self): self.enterRule(localctx, 0, self.RULE_parseTel) try: self.enterOuterAlt(localctx, 1) - self.state = 34 + self.state = 36 self.expr(0) - self.state = 35 + self.state = 37 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -310,17 +318,17 @@ def parsePql(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 40 + self.state = 42 self._errHandler.sync(self) _la = self._input.LA(1) - while _la==PqlParser.SCOL or _la==PqlParser.K_SELECT: - self.state = 37 + while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.SCOL) | (1 << PqlParser.K_SELECT) | (1 << PqlParser.K_SET))) != 0): + self.state = 39 self.sqlStmtList() - self.state = 42 + self.state = 44 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 43 + self.state = 45 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -377,49 +385,49 @@ def sqlStmtList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 48 + self.state = 50 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.SCOL: - self.state = 45 + self.state = 47 self.match(PqlParser.SCOL) - self.state = 50 + self.state = 52 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 51 + self.state = 53 self.sqlStmt() - self.state = 60 + self.state = 62 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 53 + self.state = 55 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 52 + self.state = 54 self.match(PqlParser.SCOL) - self.state = 55 + self.state = 57 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PqlParser.SCOL): break - self.state = 57 + self.state = 59 self.sqlStmt() - self.state = 62 + self.state = 64 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - self.state = 66 + self.state = 68 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 63 + self.state = 65 self.match(PqlParser.SCOL) - self.state = 68 + self.state = 70 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) @@ -438,6 +446,10 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser + def setStmt(self): + return self.getTypedRuleContext(PqlParser.SetStmtContext,0) + + def selectStmt(self): return self.getTypedRuleContext(PqlParser.SelectStmtContext,0) @@ -466,10 +478,88 @@ def sqlStmt(self): localctx = PqlParser.SqlStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 6, self.RULE_sqlStmt) + try: + self.state = 73 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [PqlParser.K_SET]: + self.enterOuterAlt(localctx, 1) + self.state = 71 + self.setStmt() + pass + elif token in [PqlParser.K_SELECT]: + self.enterOuterAlt(localctx, 2) + self.state = 72 + self.selectStmt() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SetStmtContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.key = None # IdentifierMultipartContext + self.values = None # ExprContext + + def K_SET(self): + return self.getToken(PqlParser.K_SET, 0) + + def ASSIGN(self): + return self.getToken(PqlParser.ASSIGN, 0) + + def identifierMultipart(self): + return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,0) + + + def expr(self): + return self.getTypedRuleContext(PqlParser.ExprContext,0) + + + def getRuleIndex(self): + return PqlParser.RULE_setStmt + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterSetStmt" ): + listener.enterSetStmt(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitSetStmt" ): + listener.exitSetStmt(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitSetStmt" ): + return visitor.visitSetStmt(self) + else: + return visitor.visitChildren(self) + + + + + def setStmt(self): + + localctx = PqlParser.SetStmtContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_setStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 69 - self.selectStmt() + self.state = 75 + self.match(PqlParser.K_SET) + self.state = 76 + localctx.key = self.identifierMultipart() + self.state = 77 + self.match(PqlParser.ASSIGN) + self.state = 78 + localctx.values = self.expr(0) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -524,33 +614,33 @@ def accept(self, visitor:ParseTreeVisitor): def selectStmt(self): localctx = PqlParser.SelectStmtContext(self, self._ctx, self.state) - self.enterRule(localctx, 8, self.RULE_selectStmt) + self.enterRule(localctx, 10, self.RULE_selectStmt) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 71 + self.state = 80 self.selectClause() - self.state = 73 + self.state = 82 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_WHERE: - self.state = 72 + self.state = 81 self.whereClause() - self.state = 76 + self.state = 85 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ORDER: - self.state = 75 + self.state = 84 self.orderByClause() - self.state = 79 + self.state = 88 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_LIMIT: - self.state = 78 + self.state = 87 self.limitClause() @@ -608,23 +698,23 @@ def accept(self, visitor:ParseTreeVisitor): def selectClause(self): localctx = PqlParser.SelectClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 10, self.RULE_selectClause) + self.enterRule(localctx, 12, self.RULE_selectClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 81 + self.state = 90 self.match(PqlParser.K_SELECT) - self.state = 82 + self.state = 91 self.columns() - self.state = 87 + self.state = 96 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 83 + self.state = 92 self.match(PqlParser.COMMA) - self.state = 84 + self.state = 93 self.columns() - self.state = 89 + self.state = 98 self._errHandler.sync(self) _la = self._input.LA(1) @@ -690,31 +780,31 @@ def accept(self, visitor:ParseTreeVisitor): def columns(self): localctx = PqlParser.ColumnsContext(self, self._ctx, self.state) - self.enterRule(localctx, 12, self.RULE_columns) + self.enterRule(localctx, 14, self.RULE_columns) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 90 + self.state = 99 localctx.value = self.expr(0) - self.state = 94 + self.state = 103 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.COLON: - self.state = 91 + self.state = 100 self.match(PqlParser.COLON) - self.state = 92 + self.state = 101 self.match(PqlParser.COLON) - self.state = 93 + self.state = 102 localctx.type_cast = self.function() - self.state = 98 + self.state = 107 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_AS: - self.state = 96 + self.state = 105 self.match(PqlParser.K_AS) - self.state = 97 + self.state = 106 localctx.alias = self.taxon() @@ -763,12 +853,12 @@ def accept(self, visitor:ParseTreeVisitor): def whereClause(self): localctx = PqlParser.WhereClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 14, self.RULE_whereClause) + self.enterRule(localctx, 16, self.RULE_whereClause) try: self.enterOuterAlt(localctx, 1) - self.state = 100 + self.state = 109 self.match(PqlParser.K_WHERE) - self.state = 101 + self.state = 110 self.expr(0) except RecognitionException as re: localctx.exception = re @@ -827,25 +917,25 @@ def accept(self, visitor:ParseTreeVisitor): def orderByClause(self): localctx = PqlParser.OrderByClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 16, self.RULE_orderByClause) + self.enterRule(localctx, 18, self.RULE_orderByClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 103 + self.state = 112 self.match(PqlParser.K_ORDER) - self.state = 104 + self.state = 113 self.match(PqlParser.K_BY) - self.state = 105 + self.state = 114 self.orderExpr() - self.state = 110 + self.state = 119 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 106 + self.state = 115 self.match(PqlParser.COMMA) - self.state = 107 + self.state = 116 self.orderExpr() - self.state = 112 + self.state = 121 self._errHandler.sync(self) _la = self._input.LA(1) @@ -897,17 +987,17 @@ def accept(self, visitor:ParseTreeVisitor): def orderExpr(self): localctx = PqlParser.OrderExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 18, self.RULE_orderExpr) + self.enterRule(localctx, 20, self.RULE_orderExpr) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 113 + self.state = 122 self.expr(0) - self.state = 115 + self.state = 124 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ASC or _la==PqlParser.K_DESC: - self.state = 114 + self.state = 123 _la = self._input.LA(1) if not(_la==PqlParser.K_ASC or _la==PqlParser.K_DESC): self._errHandler.recoverInline(self) @@ -962,12 +1052,12 @@ def accept(self, visitor:ParseTreeVisitor): def limitClause(self): localctx = PqlParser.LimitClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 20, self.RULE_limitClause) + self.enterRule(localctx, 22, self.RULE_limitClause) try: self.enterOuterAlt(localctx, 1) - self.state = 117 + self.state = 126 self.match(PqlParser.K_LIMIT) - self.state = 118 + self.state = 127 localctx.limit = self.expr(0) except RecognitionException as re: localctx.exception = re @@ -1095,16 +1185,16 @@ def expr(self, _p:int=0): _parentState = self.state localctx = PqlParser.ExprContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 22 - self.enterRecursionRule(localctx, 22, self.RULE_expr, _p) + _startState = 24 + self.enterRecursionRule(localctx, 24, self.RULE_expr, _p) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 130 + self.state = 139 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,13,self._ctx) + la_ = self._interp.adaptivePredict(self._input,14,self._ctx) if la_ == 1: - self.state = 121 + self.state = 130 localctx.unary_operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_NOT))) != 0)): @@ -1112,56 +1202,56 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 122 + self.state = 131 localctx.right = self.expr(11) pass elif la_ == 2: - self.state = 123 + self.state = 132 self.match(PqlParser.OPEN_PAREN) - self.state = 124 + self.state = 133 localctx.inner = self.expr(0) - self.state = 125 + self.state = 134 self.match(PqlParser.CLOSE_PAREN) pass elif la_ == 3: - self.state = 127 + self.state = 136 self.literalValue() pass elif la_ == 4: - self.state = 128 + self.state = 137 self.function() pass elif la_ == 5: - self.state = 129 + self.state = 138 self.taxon() pass self._ctx.stop = self._input.LT(-1) - self.state = 152 + self.state = 161 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,15,self._ctx) + _alt = self._interp.adaptivePredict(self._input,16,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 150 + self.state = 159 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,14,self._ctx) + la_ = self._interp.adaptivePredict(self._input,15,self._ctx) if la_ == 1: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 132 + self.state = 141 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 133 + self.state = 142 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) != 0)): @@ -1169,7 +1259,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 134 + self.state = 143 localctx.right = self.expr(11) pass @@ -1177,11 +1267,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 135 + self.state = 144 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 136 + self.state = 145 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.MINUS or _la==PqlParser.PLUS): @@ -1189,7 +1279,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 137 + self.state = 146 localctx.right = self.expr(10) pass @@ -1197,11 +1287,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 138 + self.state = 147 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 139 + self.state = 148 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) != 0)): @@ -1209,7 +1299,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 140 + self.state = 149 localctx.right = self.expr(9) pass @@ -1217,11 +1307,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 141 + self.state = 150 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 142 + self.state = 151 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) != 0)): @@ -1229,7 +1319,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 143 + self.state = 152 localctx.right = self.expr(8) pass @@ -1237,11 +1327,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 144 + self.state = 153 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 145 + self.state = 154 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.AND or _la==PqlParser.K_AND): @@ -1249,7 +1339,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 146 + self.state = 155 localctx.right = self.expr(7) pass @@ -1257,11 +1347,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 147 + self.state = 156 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 148 + self.state = 157 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.OR or _la==PqlParser.K_OR): @@ -1269,14 +1359,14 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 149 + self.state = 158 localctx.right = self.expr(6) pass - self.state = 154 + self.state = 163 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,15,self._ctx) + _alt = self._interp.adaptivePredict(self._input,16,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1332,23 +1422,23 @@ def accept(self, visitor:ParseTreeVisitor): def function(self): localctx = PqlParser.FunctionContext(self, self._ctx, self.state) - self.enterRule(localctx, 24, self.RULE_function) + self.enterRule(localctx, 26, self.RULE_function) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 155 + self.state = 164 localctx.function_name = self.identifierMultipart() - self.state = 156 + self.state = 165 self.match(PqlParser.OPEN_PAREN) - self.state = 158 + self.state = 167 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): - self.state = 157 + self.state = 166 localctx.arguments = self.exprList() - self.state = 160 + self.state = 169 self.match(PqlParser.CLOSE_PAREN) except RecognitionException as re: localctx.exception = re @@ -1401,21 +1491,21 @@ def accept(self, visitor:ParseTreeVisitor): def exprList(self): localctx = PqlParser.ExprListContext(self, self._ctx, self.state) - self.enterRule(localctx, 26, self.RULE_exprList) + self.enterRule(localctx, 28, self.RULE_exprList) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 162 + self.state = 171 self.expr(0) - self.state = 167 + self.state = 176 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 163 + self.state = 172 self.match(PqlParser.COMMA) - self.state = 164 + self.state = 173 self.expr(0) - self.state = 169 + self.state = 178 self._errHandler.sync(self) _la = self._input.LA(1) @@ -1477,37 +1567,37 @@ def accept(self, visitor:ParseTreeVisitor): def taxon(self): localctx = PqlParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 28, self.RULE_taxon) + self.enterRule(localctx, 30, self.RULE_taxon) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 171 + self.state = 180 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.QUESTION_MARK: - self.state = 170 + self.state = 179 localctx.is_optional = self.match(PqlParser.QUESTION_MARK) - self.state = 176 + self.state = 185 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,19,self._ctx) + la_ = self._interp.adaptivePredict(self._input,20,self._ctx) if la_ == 1: - self.state = 173 + self.state = 182 localctx.namespace = self.identifierMultipart() - self.state = 174 + self.state = 183 self.match(PqlParser.PIPE) - self.state = 178 + self.state = 187 localctx.slug = self.identifierMultipart() - self.state = 181 + self.state = 190 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,20,self._ctx) + la_ = self._interp.adaptivePredict(self._input,21,self._ctx) if la_ == 1: - self.state = 179 + self.state = 188 self.match(PqlParser.COLON) - self.state = 180 + self.state = 189 localctx.tag = self.identifierMultipart() @@ -1561,23 +1651,23 @@ def accept(self, visitor:ParseTreeVisitor): def identifierMultipart(self): localctx = PqlParser.IdentifierMultipartContext(self, self._ctx, self.state) - self.enterRule(localctx, 30, self.RULE_identifierMultipart) + self.enterRule(localctx, 32, self.RULE_identifierMultipart) try: self.enterOuterAlt(localctx, 1) - self.state = 183 + self.state = 192 self.match(PqlParser.WORD) - self.state = 188 + self.state = 197 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,21,self._ctx) + _alt = self._interp.adaptivePredict(self._input,22,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 184 + self.state = 193 self.match(PqlParser.DOT) - self.state = 185 + self.state = 194 self.match(PqlParser.WORD) - self.state = 190 + self.state = 199 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,21,self._ctx) + _alt = self._interp.adaptivePredict(self._input,22,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1635,11 +1725,11 @@ def accept(self, visitor:ParseTreeVisitor): def literalValue(self): localctx = PqlParser.LiteralValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 32, self.RULE_literalValue) + self.enterRule(localctx, 34, self.RULE_literalValue) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 191 + self.state = 200 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): self._errHandler.recoverInline(self) @@ -1659,7 +1749,7 @@ def literalValue(self): def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates == None: self._predicates = dict() - self._predicates[11] = self.expr_sempred + self._predicates[12] = self.expr_sempred pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py index 33ee5c5..5aa64ce 100644 --- a/python/src/pql_grammar/antlr/PqlParserListener.py +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -44,6 +44,15 @@ def exitSqlStmt(self, ctx:PqlParser.SqlStmtContext): pass + # Enter a parse tree produced by PqlParser#setStmt. + def enterSetStmt(self, ctx:PqlParser.SetStmtContext): + pass + + # Exit a parse tree produced by PqlParser#setStmt. + def exitSetStmt(self, ctx:PqlParser.SetStmtContext): + pass + + # Enter a parse tree produced by PqlParser#selectStmt. def enterSelectStmt(self, ctx:PqlParser.SelectStmtContext): pass diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py index 8333399..f9bca89 100644 --- a/python/src/pql_grammar/antlr/PqlParserVisitor.py +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -29,6 +29,11 @@ def visitSqlStmt(self, ctx:PqlParser.SqlStmtContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PqlParser#setStmt. + def visitSetStmt(self, ctx:PqlParser.SetStmtContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PqlParser#selectStmt. def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): return self.visitChildren(ctx) From dc058e3449e9b662171cef7952882d4e618f27f1 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 15 Nov 2020 08:39:40 -0800 Subject: [PATCH 16/32] add support for FROM statement --- grammar/PqlLexer.g4 | 1 + grammar/PqlParser.g4 | 16 +- python/src/pql_grammar/antlr/PqlLexer.py | 506 ++++++------- python/src/pql_grammar/antlr/PqlParser.py | 699 +++++++++++------- .../pql_grammar/antlr/PqlParserListener.py | 18 + .../src/pql_grammar/antlr/PqlParserVisitor.py | 10 + python/src/pql_grammar/ast/from_pql.py | 62 +- python/src/pql_grammar/ast/model.py | 13 +- python/src/pql_grammar/ast/to_pql.py | 16 +- python/tests/ast_json_test.py | 4 +- python/tests/ast_pql_test.py | 81 +- 11 files changed, 871 insertions(+), 555 deletions(-) diff --git a/grammar/PqlLexer.g4 b/grammar/PqlLexer.g4 index e81881e..9c381f3 100644 --- a/grammar/PqlLexer.g4 +++ b/grammar/PqlLexer.g4 @@ -39,6 +39,7 @@ K_ASC : A S C; K_BY : B Y; K_DESC : D E S C; K_FALSE : F A L S E; +K_FROM : F R O M ; K_IS : I S; K_ISNULL : I S N U L L; K_LIKE : L I K E; diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index 4b14c95..4113f0f 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -32,14 +32,15 @@ sqlStmt // a way to set query context settings and avoid sending them inside PQL // Example: set "fill in dates for date-ranged sparse data" flag for Husky. setStmt - : K_SET key=identifierMultipart ASSIGN values=expr + : K_SET key=identifierMultipart ASSIGN value=expr ; selectStmt : selectClause - ( whereClause )? - ( orderByClause )? - ( limitClause )? + ( fromClause )? + ( whereClause )? + ( orderByClause )? + ( limitClause )? ; selectClause: K_SELECT columns ( COMMA columns )* ; @@ -60,9 +61,10 @@ columns: value=expr (COLON COLON type_cast=function)? (K_AS alias=taxon)? ; // While SQL allows non-function and function type casts, // we stick with requireing parens always for simplicity of syntax parser. -whereClause - : K_WHERE expr - ; +fromClause: K_FROM tables (COMMA tables)* ; +tables: table_name=identifierMultipart ( K_AS? table_alias=identifierMultipart )? ; + +whereClause: K_WHERE expr; orderByClause : K_ORDER K_BY orderExpr ( COMMA orderExpr )* diff --git a/python/src/pql_grammar/antlr/PqlLexer.py b/python/src/pql_grammar/antlr/PqlLexer.py index a93b9a2..65332e9 100644 --- a/python/src/pql_grammar/antlr/PqlLexer.py +++ b/python/src/pql_grammar/antlr/PqlLexer.py @@ -8,8 +8,8 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2<") - buf.write("\u0208\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2=") + buf.write("\u020f\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") @@ -21,225 +21,228 @@ def serializedATN(): buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t") - buf.write("U\4V\tV\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3") - buf.write("\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3\t\3\n") - buf.write("\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17") - buf.write("\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25") - buf.write("\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32") - buf.write("\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36\3\37") - buf.write("\3\37\3\37\3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3") - buf.write("#\3#\3#\3#\3#\3#\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3") - buf.write("&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3") - buf.write(")\3)\3)\3)\3)\3*\3*\3*\3*\3*\3+\3+\3+\3,\3,\3,\3,\3,\3") - buf.write(",\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60") - buf.write("\3\60\3\60\3\60\3\60\3\60\3\61\6\61\u014e\n\61\r\61\16") - buf.write("\61\u014f\3\61\3\61\7\61\u0154\n\61\f\61\16\61\u0157\13") - buf.write("\61\5\61\u0159\n\61\3\61\3\61\5\61\u015d\n\61\3\61\6\61") - buf.write("\u0160\n\61\r\61\16\61\u0161\5\61\u0164\n\61\3\61\3\61") - buf.write("\6\61\u0168\n\61\r\61\16\61\u0169\3\61\3\61\5\61\u016e") - buf.write("\n\61\3\61\6\61\u0171\n\61\r\61\16\61\u0172\5\61\u0175") - buf.write("\n\61\5\61\u0177\n\61\3\62\3\62\3\63\3\63\3\63\3\63\7") - buf.write("\63\u017f\n\63\f\63\16\63\u0182\13\63\3\63\3\63\3\64\3") - buf.write("\64\3\64\3\64\7\64\u018a\n\64\f\64\16\64\u018d\13\64\3") - buf.write("\64\3\64\3\65\3\65\3\66\3\66\3\66\3\66\7\66\u0197\n\66") - buf.write("\f\66\16\66\u019a\13\66\3\66\3\66\3\67\3\67\3\67\3\67") - buf.write("\7\67\u01a2\n\67\f\67\16\67\u01a5\13\67\3\67\3\67\38\3") - buf.write("8\38\38\38\58\u01ae\n8\38\78\u01b1\n8\f8\168\u01b4\13") - buf.write("8\38\38\39\39\39\39\79\u01bc\n9\f9\169\u01bf\139\39\3") - buf.write("9\39\59\u01c4\n9\39\39\3:\3:\3:\3:\3;\3;\7;\u01ce\n;\f") - buf.write(";\16;\u01d1\13;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3") - buf.write("B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3") + buf.write("U\4V\tV\4W\tW\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5") + buf.write("\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3") + buf.write("\t\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17") + buf.write("\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24") + buf.write("\3\25\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32") + buf.write("\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36") + buf.write("\3\37\3\37\3\37\3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3") + buf.write("\"\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3%\3%\3%\3&\3&\3&") + buf.write("\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3)") + buf.write("\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3,\3") + buf.write(",\3,\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3") + buf.write("/\3\60\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3") + buf.write("\61\3\62\6\62\u0155\n\62\r\62\16\62\u0156\3\62\3\62\7") + buf.write("\62\u015b\n\62\f\62\16\62\u015e\13\62\5\62\u0160\n\62") + buf.write("\3\62\3\62\5\62\u0164\n\62\3\62\6\62\u0167\n\62\r\62\16") + buf.write("\62\u0168\5\62\u016b\n\62\3\62\3\62\6\62\u016f\n\62\r") + buf.write("\62\16\62\u0170\3\62\3\62\5\62\u0175\n\62\3\62\6\62\u0178") + buf.write("\n\62\r\62\16\62\u0179\5\62\u017c\n\62\5\62\u017e\n\62") + buf.write("\3\63\3\63\3\64\3\64\3\64\3\64\7\64\u0186\n\64\f\64\16") + buf.write("\64\u0189\13\64\3\64\3\64\3\65\3\65\3\65\3\65\7\65\u0191") + buf.write("\n\65\f\65\16\65\u0194\13\65\3\65\3\65\3\66\3\66\3\67") + buf.write("\3\67\3\67\3\67\7\67\u019e\n\67\f\67\16\67\u01a1\13\67") + buf.write("\3\67\3\67\38\38\38\38\78\u01a9\n8\f8\168\u01ac\138\3") + buf.write("8\38\39\39\39\39\39\59\u01b5\n9\39\79\u01b8\n9\f9\169") + buf.write("\u01bb\139\39\39\3:\3:\3:\3:\7:\u01c3\n:\f:\16:\u01c6") + buf.write("\13:\3:\3:\3:\5:\u01cb\n:\3:\3:\3;\3;\3;\3;\3<\3<\7<\u01d5") + buf.write("\n<\f<\16<\u01d8\13<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B") + buf.write("\3B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3") buf.write("K\3K\3L\3L\3M\3M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3S\3") - buf.write("T\3T\3U\3U\3V\3V\3\u01bd\2W\3\3\5\4\7\5\t\6\13\7\r\b\17") - buf.write("\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23") - buf.write("%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36") - buf.write(";\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63") - buf.write("e\64g\65i\66k\67m8o9q:s;u\2\2\u00b7\u00b8\7?\2\2\u00b8") - buf.write("\n\3\2\2\2\u00b9\u00ba\7#\2\2\u00ba\u00bb\7?\2\2\u00bb") - buf.write("\f\3\2\2\2\u00bc\u00bd\7>\2\2\u00bd\u00be\7@\2\2\u00be") - buf.write("\16\3\2\2\2\u00bf\u00c0\7~\2\2\u00c0\u00c1\7~\2\2\u00c1") - buf.write("\20\3\2\2\2\u00c2\u00c3\7>\2\2\u00c3\u00c4\7>\2\2\u00c4") - buf.write("\22\3\2\2\2\u00c5\u00c6\7@\2\2\u00c6\u00c7\7@\2\2\u00c7") - buf.write("\24\3\2\2\2\u00c8\u00c9\7(\2\2\u00c9\26\3\2\2\2\u00ca") - buf.write("\u00cb\7?\2\2\u00cb\30\3\2\2\2\u00cc\u00cd\7+\2\2\u00cd") - buf.write("\32\3\2\2\2\u00ce\u00cf\7<\2\2\u00cf\34\3\2\2\2\u00d0") - buf.write("\u00d1\7.\2\2\u00d1\36\3\2\2\2\u00d2\u00d3\7\60\2\2\u00d3") - buf.write(" \3\2\2\2\u00d4\u00d5\7\61\2\2\u00d5\"\3\2\2\2\u00d6\u00d7") - buf.write("\7@\2\2\u00d7$\3\2\2\2\u00d8\u00d9\7>\2\2\u00d9&\3\2\2") - buf.write("\2\u00da\u00db\7/\2\2\u00db(\3\2\2\2\u00dc\u00dd\7\'\2") - buf.write("\2\u00dd*\3\2\2\2\u00de\u00df\7*\2\2\u00df,\3\2\2\2\u00e0") - buf.write("\u00e1\7~\2\2\u00e1.\3\2\2\2\u00e2\u00e3\7-\2\2\u00e3") - buf.write("\60\3\2\2\2\u00e4\u00e5\7A\2\2\u00e5\62\3\2\2\2\u00e6") - buf.write("\u00e7\7=\2\2\u00e7\64\3\2\2\2\u00e8\u00e9\7,\2\2\u00e9") - buf.write("\66\3\2\2\2\u00ea\u00eb\7\u0080\2\2\u00eb8\3\2\2\2\u00ec") - buf.write("\u00ed\7a\2\2\u00ed:\3\2\2\2\u00ee\u00ef\5y=\2\u00ef\u00f0") - buf.write("\5\u0093J\2\u00f0\u00f1\5\177@\2\u00f1<\3\2\2\2\u00f2") - buf.write("\u00f3\5y=\2\u00f3\u00f4\5\u009dO\2\u00f4>\3\2\2\2\u00f5") - buf.write("\u00f6\5y=\2\u00f6\u00f7\5\u009dO\2\u00f7\u00f8\5}?\2") - buf.write("\u00f8@\3\2\2\2\u00f9\u00fa\5{>\2\u00fa\u00fb\5\u00a9") - buf.write("U\2\u00fbB\3\2\2\2\u00fc\u00fd\5\177@\2\u00fd\u00fe\5") - buf.write("\u0081A\2\u00fe\u00ff\5\u009dO\2\u00ff\u0100\5}?\2\u0100") - buf.write("D\3\2\2\2\u0101\u0102\5\u0083B\2\u0102\u0103\5y=\2\u0103") - buf.write("\u0104\5\u008fH\2\u0104\u0105\5\u009dO\2\u0105\u0106\5") - buf.write("\u0081A\2\u0106F\3\2\2\2\u0107\u0108\5\u0089E\2\u0108") - buf.write("\u0109\5\u009dO\2\u0109H\3\2\2\2\u010a\u010b\5\u0089E") - buf.write("\2\u010b\u010c\5\u009dO\2\u010c\u010d\5\u0093J\2\u010d") - buf.write("\u010e\5\u00a1Q\2\u010e\u010f\5\u008fH\2\u010f\u0110\5") - buf.write("\u008fH\2\u0110J\3\2\2\2\u0111\u0112\5\u008fH\2\u0112") - buf.write("\u0113\5\u0089E\2\u0113\u0114\5\u008dG\2\u0114\u0115\5") - buf.write("\u0081A\2\u0115L\3\2\2\2\u0116\u0117\5\u008fH\2\u0117") - buf.write("\u0118\5\u0089E\2\u0118\u0119\5\u0091I\2\u0119\u011a\5") - buf.write("\u0089E\2\u011a\u011b\5\u009fP\2\u011bN\3\2\2\2\u011c") - buf.write("\u011d\5\u0093J\2\u011d\u011e\5\u0095K\2\u011e\u011f\5") - buf.write("\u009fP\2\u011fP\3\2\2\2\u0120\u0121\5\u0093J\2\u0121") - buf.write("\u0122\5\u0095K\2\u0122\u0123\5\u009fP\2\u0123\u0124\5") - buf.write("\u0093J\2\u0124\u0125\5\u00a1Q\2\u0125\u0126\5\u008fH") - buf.write("\2\u0126\u0127\5\u008fH\2\u0127R\3\2\2\2\u0128\u0129\5") - buf.write("\u0093J\2\u0129\u012a\5\u00a1Q\2\u012a\u012b\5\u008fH") - buf.write("\2\u012b\u012c\5\u008fH\2\u012cT\3\2\2\2\u012d\u012e\5") - buf.write("\u0095K\2\u012e\u012f\5\u009bN\2\u012fV\3\2\2\2\u0130") - buf.write("\u0131\5\u0095K\2\u0131\u0132\5\u009bN\2\u0132\u0133\5") - buf.write("\177@\2\u0133\u0134\5\u0081A\2\u0134\u0135\5\u009bN\2") - buf.write("\u0135X\3\2\2\2\u0136\u0137\5\u009dO\2\u0137\u0138\5\u0081") - buf.write("A\2\u0138\u0139\5\u008fH\2\u0139\u013a\5\u0081A\2\u013a") - buf.write("\u013b\5}?\2\u013b\u013c\5\u009fP\2\u013cZ\3\2\2\2\u013d") - buf.write("\u013e\5\u009dO\2\u013e\u013f\5\u0081A\2\u013f\u0140\5") - buf.write("\u009fP\2\u0140\\\3\2\2\2\u0141\u0142\5\u009fP\2\u0142") - buf.write("\u0143\5\u009bN\2\u0143\u0144\5\u00a1Q\2\u0144\u0145\5") - buf.write("\u0081A\2\u0145^\3\2\2\2\u0146\u0147\5\u00a5S\2\u0147") - buf.write("\u0148\5\u0087D\2\u0148\u0149\5\u0081A\2\u0149\u014a\5") - buf.write("\u009bN\2\u014a\u014b\5\u0081A\2\u014b`\3\2\2\2\u014c") - buf.write("\u014e\5w<\2\u014d\u014c\3\2\2\2\u014e\u014f\3\2\2\2\u014f") - buf.write("\u014d\3\2\2\2\u014f\u0150\3\2\2\2\u0150\u0158\3\2\2\2") - buf.write("\u0151\u0155\7\60\2\2\u0152\u0154\5w<\2\u0153\u0152\3") - buf.write("\2\2\2\u0154\u0157\3\2\2\2\u0155\u0153\3\2\2\2\u0155\u0156") - buf.write("\3\2\2\2\u0156\u0159\3\2\2\2\u0157\u0155\3\2\2\2\u0158") - buf.write("\u0151\3\2\2\2\u0158\u0159\3\2\2\2\u0159\u0163\3\2\2\2") - buf.write("\u015a\u015c\5\u0081A\2\u015b\u015d\t\2\2\2\u015c\u015b") - buf.write("\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u015f\3\2\2\2\u015e") - buf.write("\u0160\5w<\2\u015f\u015e\3\2\2\2\u0160\u0161\3\2\2\2\u0161") - buf.write("\u015f\3\2\2\2\u0161\u0162\3\2\2\2\u0162\u0164\3\2\2\2") - buf.write("\u0163\u015a\3\2\2\2\u0163\u0164\3\2\2\2\u0164\u0177\3") - buf.write("\2\2\2\u0165\u0167\7\60\2\2\u0166\u0168\5w<\2\u0167\u0166") - buf.write("\3\2\2\2\u0168\u0169\3\2\2\2\u0169\u0167\3\2\2\2\u0169") - buf.write("\u016a\3\2\2\2\u016a\u0174\3\2\2\2\u016b\u016d\5\u0081") - buf.write("A\2\u016c\u016e\t\2\2\2\u016d\u016c\3\2\2\2\u016d\u016e") - buf.write("\3\2\2\2\u016e\u0170\3\2\2\2\u016f\u0171\5w<\2\u0170\u016f") - buf.write("\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0170\3\2\2\2\u0172") - buf.write("\u0173\3\2\2\2\u0173\u0175\3\2\2\2\u0174\u016b\3\2\2\2") - buf.write("\u0174\u0175\3\2\2\2\u0175\u0177\3\2\2\2\u0176\u014d\3") - buf.write("\2\2\2\u0176\u0165\3\2\2\2\u0177b\3\2\2\2\u0178\u0179") - buf.write("\5e\63\2\u0179d\3\2\2\2\u017a\u0180\7$\2\2\u017b\u017c") - buf.write("\7^\2\2\u017c\u017f\7$\2\2\u017d\u017f\n\3\2\2\u017e\u017b") - buf.write("\3\2\2\2\u017e\u017d\3\2\2\2\u017f\u0182\3\2\2\2\u0180") - buf.write("\u017e\3\2\2\2\u0180\u0181\3\2\2\2\u0181\u0183\3\2\2\2") - buf.write("\u0182\u0180\3\2\2\2\u0183\u0184\7$\2\2\u0184f\3\2\2\2") - buf.write("\u0185\u018b\7$\2\2\u0186\u0187\7$\2\2\u0187\u018a\7$") - buf.write("\2\2\u0188\u018a\n\3\2\2\u0189\u0186\3\2\2\2\u0189\u0188") - buf.write("\3\2\2\2\u018a\u018d\3\2\2\2\u018b\u0189\3\2\2\2\u018b") - buf.write("\u018c\3\2\2\2\u018c\u018e\3\2\2\2\u018d\u018b\3\2\2\2") - buf.write("\u018e\u018f\7$\2\2\u018fh\3\2\2\2\u0190\u0191\5k\66\2") - buf.write("\u0191j\3\2\2\2\u0192\u0198\7)\2\2\u0193\u0194\7^\2\2") - buf.write("\u0194\u0197\7)\2\2\u0195\u0197\n\4\2\2\u0196\u0193\3") - buf.write("\2\2\2\u0196\u0195\3\2\2\2\u0197\u019a\3\2\2\2\u0198\u0196") - buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\3\2\2\2\u019a") - buf.write("\u0198\3\2\2\2\u019b\u019c\7)\2\2\u019cl\3\2\2\2\u019d") - buf.write("\u01a3\7)\2\2\u019e\u019f\7)\2\2\u019f\u01a2\7)\2\2\u01a0") - buf.write("\u01a2\n\4\2\2\u01a1\u019e\3\2\2\2\u01a1\u01a0\3\2\2\2") - buf.write("\u01a2\u01a5\3\2\2\2\u01a3\u01a1\3\2\2\2\u01a3\u01a4\3") - buf.write("\2\2\2\u01a4\u01a6\3\2\2\2\u01a5\u01a3\3\2\2\2\u01a6\u01a7") - buf.write("\7)\2\2\u01a7n\3\2\2\2\u01a8\u01a9\7/\2\2\u01a9\u01ae") - buf.write("\7/\2\2\u01aa\u01ab\7\61\2\2\u01ab\u01ae\7\61\2\2\u01ac") - buf.write("\u01ae\7%\2\2\u01ad\u01a8\3\2\2\2\u01ad\u01aa\3\2\2\2") - buf.write("\u01ad\u01ac\3\2\2\2\u01ae\u01b2\3\2\2\2\u01af\u01b1\n") - buf.write("\5\2\2\u01b0\u01af\3\2\2\2\u01b1\u01b4\3\2\2\2\u01b2\u01b0") - buf.write("\3\2\2\2\u01b2\u01b3\3\2\2\2\u01b3\u01b5\3\2\2\2\u01b4") - buf.write("\u01b2\3\2\2\2\u01b5\u01b6\b8\2\2\u01b6p\3\2\2\2\u01b7") - buf.write("\u01b8\7\61\2\2\u01b8\u01b9\7,\2\2\u01b9\u01bd\3\2\2\2") - buf.write("\u01ba\u01bc\13\2\2\2\u01bb\u01ba\3\2\2\2\u01bc\u01bf") - buf.write("\3\2\2\2\u01bd\u01be\3\2\2\2\u01bd\u01bb\3\2\2\2\u01be") - buf.write("\u01c3\3\2\2\2\u01bf\u01bd\3\2\2\2\u01c0\u01c1\7,\2\2") - buf.write("\u01c1\u01c4\7\61\2\2\u01c2\u01c4\7\2\2\3\u01c3\u01c0") - buf.write("\3\2\2\2\u01c3\u01c2\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5") - buf.write("\u01c6\b9\2\2\u01c6r\3\2\2\2\u01c7\u01c8\t\6\2\2\u01c8") - buf.write("\u01c9\3\2\2\2\u01c9\u01ca\b:\2\2\u01cat\3\2\2\2\u01cb") - buf.write("\u01cf\t\7\2\2\u01cc\u01ce\t\b\2\2\u01cd\u01cc\3\2\2\2") - buf.write("\u01ce\u01d1\3\2\2\2\u01cf\u01cd\3\2\2\2\u01cf\u01d0\3") - buf.write("\2\2\2\u01d0v\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d2\u01d3") - buf.write("\t\t\2\2\u01d3x\3\2\2\2\u01d4\u01d5\t\n\2\2\u01d5z\3\2") - buf.write("\2\2\u01d6\u01d7\t\13\2\2\u01d7|\3\2\2\2\u01d8\u01d9\t") - buf.write("\f\2\2\u01d9~\3\2\2\2\u01da\u01db\t\r\2\2\u01db\u0080") - buf.write("\3\2\2\2\u01dc\u01dd\t\16\2\2\u01dd\u0082\3\2\2\2\u01de") - buf.write("\u01df\t\17\2\2\u01df\u0084\3\2\2\2\u01e0\u01e1\t\20\2") - buf.write("\2\u01e1\u0086\3\2\2\2\u01e2\u01e3\t\21\2\2\u01e3\u0088") - buf.write("\3\2\2\2\u01e4\u01e5\t\22\2\2\u01e5\u008a\3\2\2\2\u01e6") - buf.write("\u01e7\t\23\2\2\u01e7\u008c\3\2\2\2\u01e8\u01e9\t\24\2") - buf.write("\2\u01e9\u008e\3\2\2\2\u01ea\u01eb\t\25\2\2\u01eb\u0090") - buf.write("\3\2\2\2\u01ec\u01ed\t\26\2\2\u01ed\u0092\3\2\2\2\u01ee") - buf.write("\u01ef\t\27\2\2\u01ef\u0094\3\2\2\2\u01f0\u01f1\t\30\2") - buf.write("\2\u01f1\u0096\3\2\2\2\u01f2\u01f3\t\31\2\2\u01f3\u0098") - buf.write("\3\2\2\2\u01f4\u01f5\t\32\2\2\u01f5\u009a\3\2\2\2\u01f6") - buf.write("\u01f7\t\33\2\2\u01f7\u009c\3\2\2\2\u01f8\u01f9\t\34\2") - buf.write("\2\u01f9\u009e\3\2\2\2\u01fa\u01fb\t\35\2\2\u01fb\u00a0") - buf.write("\3\2\2\2\u01fc\u01fd\t\36\2\2\u01fd\u00a2\3\2\2\2\u01fe") - buf.write("\u01ff\t\37\2\2\u01ff\u00a4\3\2\2\2\u0200\u0201\t \2\2") - buf.write("\u0201\u00a6\3\2\2\2\u0202\u0203\t!\2\2\u0203\u00a8\3") - buf.write("\2\2\2\u0204\u0205\t\"\2\2\u0205\u00aa\3\2\2\2\u0206\u0207") - buf.write("\t#\2\2\u0207\u00ac\3\2\2\2\33\2\u014f\u0155\u0158\u015c") - buf.write("\u0161\u0163\u0169\u016d\u0172\u0174\u0176\u017e\u0180") - buf.write("\u0189\u018b\u0196\u0198\u01a1\u01a3\u01ad\u01b2\u01bd") - buf.write("\u01c3\u01cf\3\2\3\2") + buf.write("T\3T\3U\3U\3V\3V\3W\3W\3\u01c4\2X\3\3\5\4\7\5\t\6\13\7") + buf.write("\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21") + buf.write("!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67") + buf.write("\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61") + buf.write("a\62c\63e\64g\65i\66k\67m8o9q:s;u\2\2\u00b9\u00ba\7?\2\2\u00ba\n\3") + buf.write("\2\2\2\u00bb\u00bc\7#\2\2\u00bc\u00bd\7?\2\2\u00bd\f\3") + buf.write("\2\2\2\u00be\u00bf\7>\2\2\u00bf\u00c0\7@\2\2\u00c0\16") + buf.write("\3\2\2\2\u00c1\u00c2\7~\2\2\u00c2\u00c3\7~\2\2\u00c3\20") + buf.write("\3\2\2\2\u00c4\u00c5\7>\2\2\u00c5\u00c6\7>\2\2\u00c6\22") + buf.write("\3\2\2\2\u00c7\u00c8\7@\2\2\u00c8\u00c9\7@\2\2\u00c9\24") + buf.write("\3\2\2\2\u00ca\u00cb\7(\2\2\u00cb\26\3\2\2\2\u00cc\u00cd") + buf.write("\7?\2\2\u00cd\30\3\2\2\2\u00ce\u00cf\7+\2\2\u00cf\32\3") + buf.write("\2\2\2\u00d0\u00d1\7<\2\2\u00d1\34\3\2\2\2\u00d2\u00d3") + buf.write("\7.\2\2\u00d3\36\3\2\2\2\u00d4\u00d5\7\60\2\2\u00d5 \3") + buf.write("\2\2\2\u00d6\u00d7\7\61\2\2\u00d7\"\3\2\2\2\u00d8\u00d9") + buf.write("\7@\2\2\u00d9$\3\2\2\2\u00da\u00db\7>\2\2\u00db&\3\2\2") + buf.write("\2\u00dc\u00dd\7/\2\2\u00dd(\3\2\2\2\u00de\u00df\7\'\2") + buf.write("\2\u00df*\3\2\2\2\u00e0\u00e1\7*\2\2\u00e1,\3\2\2\2\u00e2") + buf.write("\u00e3\7~\2\2\u00e3.\3\2\2\2\u00e4\u00e5\7-\2\2\u00e5") + buf.write("\60\3\2\2\2\u00e6\u00e7\7A\2\2\u00e7\62\3\2\2\2\u00e8") + buf.write("\u00e9\7=\2\2\u00e9\64\3\2\2\2\u00ea\u00eb\7,\2\2\u00eb") + buf.write("\66\3\2\2\2\u00ec\u00ed\7\u0080\2\2\u00ed8\3\2\2\2\u00ee") + buf.write("\u00ef\7a\2\2\u00ef:\3\2\2\2\u00f0\u00f1\5{>\2\u00f1\u00f2") + buf.write("\5\u0095K\2\u00f2\u00f3\5\u0081A\2\u00f3<\3\2\2\2\u00f4") + buf.write("\u00f5\5{>\2\u00f5\u00f6\5\u009fP\2\u00f6>\3\2\2\2\u00f7") + buf.write("\u00f8\5{>\2\u00f8\u00f9\5\u009fP\2\u00f9\u00fa\5\177") + buf.write("@\2\u00fa@\3\2\2\2\u00fb\u00fc\5}?\2\u00fc\u00fd\5\u00ab") + buf.write("V\2\u00fdB\3\2\2\2\u00fe\u00ff\5\u0081A\2\u00ff\u0100") + buf.write("\5\u0083B\2\u0100\u0101\5\u009fP\2\u0101\u0102\5\177@") + buf.write("\2\u0102D\3\2\2\2\u0103\u0104\5\u0085C\2\u0104\u0105\5") + buf.write("{>\2\u0105\u0106\5\u0091I\2\u0106\u0107\5\u009fP\2\u0107") + buf.write("\u0108\5\u0083B\2\u0108F\3\2\2\2\u0109\u010a\5\u0085C") + buf.write("\2\u010a\u010b\5\u009dO\2\u010b\u010c\5\u0097L\2\u010c") + buf.write("\u010d\5\u0093J\2\u010dH\3\2\2\2\u010e\u010f\5\u008bF") + buf.write("\2\u010f\u0110\5\u009fP\2\u0110J\3\2\2\2\u0111\u0112\5") + buf.write("\u008bF\2\u0112\u0113\5\u009fP\2\u0113\u0114\5\u0095K") + buf.write("\2\u0114\u0115\5\u00a3R\2\u0115\u0116\5\u0091I\2\u0116") + buf.write("\u0117\5\u0091I\2\u0117L\3\2\2\2\u0118\u0119\5\u0091I") + buf.write("\2\u0119\u011a\5\u008bF\2\u011a\u011b\5\u008fH\2\u011b") + buf.write("\u011c\5\u0083B\2\u011cN\3\2\2\2\u011d\u011e\5\u0091I") + buf.write("\2\u011e\u011f\5\u008bF\2\u011f\u0120\5\u0093J\2\u0120") + buf.write("\u0121\5\u008bF\2\u0121\u0122\5\u00a1Q\2\u0122P\3\2\2") + buf.write("\2\u0123\u0124\5\u0095K\2\u0124\u0125\5\u0097L\2\u0125") + buf.write("\u0126\5\u00a1Q\2\u0126R\3\2\2\2\u0127\u0128\5\u0095K") + buf.write("\2\u0128\u0129\5\u0097L\2\u0129\u012a\5\u00a1Q\2\u012a") + buf.write("\u012b\5\u0095K\2\u012b\u012c\5\u00a3R\2\u012c\u012d\5") + buf.write("\u0091I\2\u012d\u012e\5\u0091I\2\u012eT\3\2\2\2\u012f") + buf.write("\u0130\5\u0095K\2\u0130\u0131\5\u00a3R\2\u0131\u0132\5") + buf.write("\u0091I\2\u0132\u0133\5\u0091I\2\u0133V\3\2\2\2\u0134") + buf.write("\u0135\5\u0097L\2\u0135\u0136\5\u009dO\2\u0136X\3\2\2") + buf.write("\2\u0137\u0138\5\u0097L\2\u0138\u0139\5\u009dO\2\u0139") + buf.write("\u013a\5\u0081A\2\u013a\u013b\5\u0083B\2\u013b\u013c\5") + buf.write("\u009dO\2\u013cZ\3\2\2\2\u013d\u013e\5\u009fP\2\u013e") + buf.write("\u013f\5\u0083B\2\u013f\u0140\5\u0091I\2\u0140\u0141\5") + buf.write("\u0083B\2\u0141\u0142\5\177@\2\u0142\u0143\5\u00a1Q\2") + buf.write("\u0143\\\3\2\2\2\u0144\u0145\5\u009fP\2\u0145\u0146\5") + buf.write("\u0083B\2\u0146\u0147\5\u00a1Q\2\u0147^\3\2\2\2\u0148") + buf.write("\u0149\5\u00a1Q\2\u0149\u014a\5\u009dO\2\u014a\u014b\5") + buf.write("\u00a3R\2\u014b\u014c\5\u0083B\2\u014c`\3\2\2\2\u014d") + buf.write("\u014e\5\u00a7T\2\u014e\u014f\5\u0089E\2\u014f\u0150\5") + buf.write("\u0083B\2\u0150\u0151\5\u009dO\2\u0151\u0152\5\u0083B") + buf.write("\2\u0152b\3\2\2\2\u0153\u0155\5y=\2\u0154\u0153\3\2\2") + buf.write("\2\u0155\u0156\3\2\2\2\u0156\u0154\3\2\2\2\u0156\u0157") + buf.write("\3\2\2\2\u0157\u015f\3\2\2\2\u0158\u015c\7\60\2\2\u0159") + buf.write("\u015b\5y=\2\u015a\u0159\3\2\2\2\u015b\u015e\3\2\2\2\u015c") + buf.write("\u015a\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u0160\3\2\2\2") + buf.write("\u015e\u015c\3\2\2\2\u015f\u0158\3\2\2\2\u015f\u0160\3") + buf.write("\2\2\2\u0160\u016a\3\2\2\2\u0161\u0163\5\u0083B\2\u0162") + buf.write("\u0164\t\2\2\2\u0163\u0162\3\2\2\2\u0163\u0164\3\2\2\2") + buf.write("\u0164\u0166\3\2\2\2\u0165\u0167\5y=\2\u0166\u0165\3\2") + buf.write("\2\2\u0167\u0168\3\2\2\2\u0168\u0166\3\2\2\2\u0168\u0169") + buf.write("\3\2\2\2\u0169\u016b\3\2\2\2\u016a\u0161\3\2\2\2\u016a") + buf.write("\u016b\3\2\2\2\u016b\u017e\3\2\2\2\u016c\u016e\7\60\2") + buf.write("\2\u016d\u016f\5y=\2\u016e\u016d\3\2\2\2\u016f\u0170\3") + buf.write("\2\2\2\u0170\u016e\3\2\2\2\u0170\u0171\3\2\2\2\u0171\u017b") + buf.write("\3\2\2\2\u0172\u0174\5\u0083B\2\u0173\u0175\t\2\2\2\u0174") + buf.write("\u0173\3\2\2\2\u0174\u0175\3\2\2\2\u0175\u0177\3\2\2\2") + buf.write("\u0176\u0178\5y=\2\u0177\u0176\3\2\2\2\u0178\u0179\3\2") + buf.write("\2\2\u0179\u0177\3\2\2\2\u0179\u017a\3\2\2\2\u017a\u017c") + buf.write("\3\2\2\2\u017b\u0172\3\2\2\2\u017b\u017c\3\2\2\2\u017c") + buf.write("\u017e\3\2\2\2\u017d\u0154\3\2\2\2\u017d\u016c\3\2\2\2") + buf.write("\u017ed\3\2\2\2\u017f\u0180\5g\64\2\u0180f\3\2\2\2\u0181") + buf.write("\u0187\7$\2\2\u0182\u0183\7^\2\2\u0183\u0186\7$\2\2\u0184") + buf.write("\u0186\n\3\2\2\u0185\u0182\3\2\2\2\u0185\u0184\3\2\2\2") + buf.write("\u0186\u0189\3\2\2\2\u0187\u0185\3\2\2\2\u0187\u0188\3") + buf.write("\2\2\2\u0188\u018a\3\2\2\2\u0189\u0187\3\2\2\2\u018a\u018b") + buf.write("\7$\2\2\u018bh\3\2\2\2\u018c\u0192\7$\2\2\u018d\u018e") + buf.write("\7$\2\2\u018e\u0191\7$\2\2\u018f\u0191\n\3\2\2\u0190\u018d") + buf.write("\3\2\2\2\u0190\u018f\3\2\2\2\u0191\u0194\3\2\2\2\u0192") + buf.write("\u0190\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0195\3\2\2\2") + buf.write("\u0194\u0192\3\2\2\2\u0195\u0196\7$\2\2\u0196j\3\2\2\2") + buf.write("\u0197\u0198\5m\67\2\u0198l\3\2\2\2\u0199\u019f\7)\2\2") + buf.write("\u019a\u019b\7^\2\2\u019b\u019e\7)\2\2\u019c\u019e\n\4") + buf.write("\2\2\u019d\u019a\3\2\2\2\u019d\u019c\3\2\2\2\u019e\u01a1") + buf.write("\3\2\2\2\u019f\u019d\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0") + buf.write("\u01a2\3\2\2\2\u01a1\u019f\3\2\2\2\u01a2\u01a3\7)\2\2") + buf.write("\u01a3n\3\2\2\2\u01a4\u01aa\7)\2\2\u01a5\u01a6\7)\2\2") + buf.write("\u01a6\u01a9\7)\2\2\u01a7\u01a9\n\4\2\2\u01a8\u01a5\3") + buf.write("\2\2\2\u01a8\u01a7\3\2\2\2\u01a9\u01ac\3\2\2\2\u01aa\u01a8") + buf.write("\3\2\2\2\u01aa\u01ab\3\2\2\2\u01ab\u01ad\3\2\2\2\u01ac") + buf.write("\u01aa\3\2\2\2\u01ad\u01ae\7)\2\2\u01aep\3\2\2\2\u01af") + buf.write("\u01b0\7/\2\2\u01b0\u01b5\7/\2\2\u01b1\u01b2\7\61\2\2") + buf.write("\u01b2\u01b5\7\61\2\2\u01b3\u01b5\7%\2\2\u01b4\u01af\3") + buf.write("\2\2\2\u01b4\u01b1\3\2\2\2\u01b4\u01b3\3\2\2\2\u01b5\u01b9") + buf.write("\3\2\2\2\u01b6\u01b8\n\5\2\2\u01b7\u01b6\3\2\2\2\u01b8") + buf.write("\u01bb\3\2\2\2\u01b9\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2") + buf.write("\u01ba\u01bc\3\2\2\2\u01bb\u01b9\3\2\2\2\u01bc\u01bd\b") + buf.write("9\2\2\u01bdr\3\2\2\2\u01be\u01bf\7\61\2\2\u01bf\u01c0") + buf.write("\7,\2\2\u01c0\u01c4\3\2\2\2\u01c1\u01c3\13\2\2\2\u01c2") + buf.write("\u01c1\3\2\2\2\u01c3\u01c6\3\2\2\2\u01c4\u01c5\3\2\2\2") + buf.write("\u01c4\u01c2\3\2\2\2\u01c5\u01ca\3\2\2\2\u01c6\u01c4\3") + buf.write("\2\2\2\u01c7\u01c8\7,\2\2\u01c8\u01cb\7\61\2\2\u01c9\u01cb") + buf.write("\7\2\2\3\u01ca\u01c7\3\2\2\2\u01ca\u01c9\3\2\2\2\u01cb") + buf.write("\u01cc\3\2\2\2\u01cc\u01cd\b:\2\2\u01cdt\3\2\2\2\u01ce") + buf.write("\u01cf\t\6\2\2\u01cf\u01d0\3\2\2\2\u01d0\u01d1\b;\2\2") + buf.write("\u01d1v\3\2\2\2\u01d2\u01d6\t\7\2\2\u01d3\u01d5\t\b\2") + buf.write("\2\u01d4\u01d3\3\2\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d4") + buf.write("\3\2\2\2\u01d6\u01d7\3\2\2\2\u01d7x\3\2\2\2\u01d8\u01d6") + buf.write("\3\2\2\2\u01d9\u01da\t\t\2\2\u01daz\3\2\2\2\u01db\u01dc") + buf.write("\t\n\2\2\u01dc|\3\2\2\2\u01dd\u01de\t\13\2\2\u01de~\3") + buf.write("\2\2\2\u01df\u01e0\t\f\2\2\u01e0\u0080\3\2\2\2\u01e1\u01e2") + buf.write("\t\r\2\2\u01e2\u0082\3\2\2\2\u01e3\u01e4\t\16\2\2\u01e4") + buf.write("\u0084\3\2\2\2\u01e5\u01e6\t\17\2\2\u01e6\u0086\3\2\2") + buf.write("\2\u01e7\u01e8\t\20\2\2\u01e8\u0088\3\2\2\2\u01e9\u01ea") + buf.write("\t\21\2\2\u01ea\u008a\3\2\2\2\u01eb\u01ec\t\22\2\2\u01ec") + buf.write("\u008c\3\2\2\2\u01ed\u01ee\t\23\2\2\u01ee\u008e\3\2\2") + buf.write("\2\u01ef\u01f0\t\24\2\2\u01f0\u0090\3\2\2\2\u01f1\u01f2") + buf.write("\t\25\2\2\u01f2\u0092\3\2\2\2\u01f3\u01f4\t\26\2\2\u01f4") + buf.write("\u0094\3\2\2\2\u01f5\u01f6\t\27\2\2\u01f6\u0096\3\2\2") + buf.write("\2\u01f7\u01f8\t\30\2\2\u01f8\u0098\3\2\2\2\u01f9\u01fa") + buf.write("\t\31\2\2\u01fa\u009a\3\2\2\2\u01fb\u01fc\t\32\2\2\u01fc") + buf.write("\u009c\3\2\2\2\u01fd\u01fe\t\33\2\2\u01fe\u009e\3\2\2") + buf.write("\2\u01ff\u0200\t\34\2\2\u0200\u00a0\3\2\2\2\u0201\u0202") + buf.write("\t\35\2\2\u0202\u00a2\3\2\2\2\u0203\u0204\t\36\2\2\u0204") + buf.write("\u00a4\3\2\2\2\u0205\u0206\t\37\2\2\u0206\u00a6\3\2\2") + buf.write("\2\u0207\u0208\t \2\2\u0208\u00a8\3\2\2\2\u0209\u020a") + buf.write("\t!\2\2\u020a\u00aa\3\2\2\2\u020b\u020c\t\"\2\2\u020c") + buf.write("\u00ac\3\2\2\2\u020d\u020e\t#\2\2\u020e\u00ae\3\2\2\2") + buf.write("\33\2\u0156\u015c\u015f\u0163\u0168\u016a\u0170\u0174") + buf.write("\u0179\u017b\u017d\u0185\u0187\u0190\u0192\u019d\u019f") + buf.write("\u01a8\u01aa\u01b4\u01b9\u01c4\u01ca\u01d6\3\2\3\2") return buf.getvalue() @@ -283,30 +286,31 @@ class PqlLexer(Lexer): K_BY = 32 K_DESC = 33 K_FALSE = 34 - K_IS = 35 - K_ISNULL = 36 - K_LIKE = 37 - K_LIMIT = 38 - K_NOT = 39 - K_NOTNULL = 40 - K_NULL = 41 - K_OR = 42 - K_ORDER = 43 - K_SELECT = 44 - K_SET = 45 - K_TRUE = 46 - K_WHERE = 47 - NUMERIC_LITERAL = 48 - DOUBLE_QUOTED_STRING = 49 - DOUBLE_QUOTED_STRING_TEL = 50 - DOUBLE_QUOTED_STRING_SQL = 51 - SINGLE_QUOTED_STRING = 52 - SINGLE_QUOTED_STRING_TEL = 53 - SINGLE_QUOTED_STRING_SQL = 54 - SINGLE_LINE_COMMENT = 55 - MULTILINE_COMMENT = 56 - SPACES = 57 - WORD = 58 + K_FROM = 35 + K_IS = 36 + K_ISNULL = 37 + K_LIKE = 38 + K_LIMIT = 39 + K_NOT = 40 + K_NOTNULL = 41 + K_NULL = 42 + K_OR = 43 + K_ORDER = 44 + K_SELECT = 45 + K_SET = 46 + K_TRUE = 47 + K_WHERE = 48 + NUMERIC_LITERAL = 49 + DOUBLE_QUOTED_STRING = 50 + DOUBLE_QUOTED_STRING_TEL = 51 + DOUBLE_QUOTED_STRING_SQL = 52 + SINGLE_QUOTED_STRING = 53 + SINGLE_QUOTED_STRING_TEL = 54 + SINGLE_QUOTED_STRING_SQL = 55 + SINGLE_LINE_COMMENT = 56 + MULTILINE_COMMENT = 57 + SPACES = 58 + WORD = 59 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] @@ -323,10 +327,10 @@ class PqlLexer(Lexer): "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", - "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_IS", - "K_ISNULL", "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", - "K_OR", "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", - "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_FROM", + "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", + "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD" ] @@ -336,9 +340,9 @@ class PqlLexer(Lexer): "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_AS", "K_ASC", - "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", - "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", - "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "K_BY", "K_DESC", "K_FALSE", "K_FROM", "K_IS", "K_ISNULL", + "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index 5f84a35..55fd58e 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -11,91 +11,103 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3<") - buf.write("\u00cd\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3=") + buf.write("\u00e4\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") - buf.write("\3\2\3\2\3\2\3\3\7\3+\n\3\f\3\16\3.\13\3\3\3\3\3\3\4\7") - buf.write("\4\63\n\4\f\4\16\4\66\13\4\3\4\3\4\6\4:\n\4\r\4\16\4;") - buf.write("\3\4\7\4?\n\4\f\4\16\4B\13\4\3\4\7\4E\n\4\f\4\16\4H\13") - buf.write("\4\3\5\3\5\5\5L\n\5\3\6\3\6\3\6\3\6\3\6\3\7\3\7\5\7U\n") - buf.write("\7\3\7\5\7X\n\7\3\7\5\7[\n\7\3\b\3\b\3\b\3\b\7\ba\n\b") - buf.write("\f\b\16\bd\13\b\3\t\3\t\3\t\3\t\5\tj\n\t\3\t\3\t\5\tn") - buf.write("\n\t\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\7\13x\n\13\f") - buf.write("\13\16\13{\13\13\3\f\3\f\5\f\177\n\f\3\r\3\r\3\r\3\16") - buf.write("\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u008e") - buf.write("\n\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16") - buf.write("\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\7\16\u00a2\n") - buf.write("\16\f\16\16\16\u00a5\13\16\3\17\3\17\3\17\5\17\u00aa\n") - buf.write("\17\3\17\3\17\3\20\3\20\3\20\7\20\u00b1\n\20\f\20\16\20") - buf.write("\u00b4\13\20\3\21\5\21\u00b7\n\21\3\21\3\21\3\21\5\21") - buf.write("\u00bc\n\21\3\21\3\21\3\21\5\21\u00c1\n\21\3\22\3\22\3") - buf.write("\22\7\22\u00c6\n\22\f\22\16\22\u00c9\13\22\3\23\3\23\3") - buf.write("\23\2\3\32\24\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 ") - buf.write("\"$\2\13\4\2!!##\5\2\25\25\31\31))\5\2\22\22\26\26\34") - buf.write("\34\4\2\25\25\31\31\4\2\5\6\23\24\6\2\4\4\7\b\r\r%%\4") - buf.write("\2\3\3\37\37\4\2\t\t,,\7\2$$++\60\60\62\63\66\66\2\u00d8") - buf.write("\2&\3\2\2\2\4,\3\2\2\2\6\64\3\2\2\2\bK\3\2\2\2\nM\3\2") - buf.write("\2\2\fR\3\2\2\2\16\\\3\2\2\2\20e\3\2\2\2\22o\3\2\2\2\24") - buf.write("r\3\2\2\2\26|\3\2\2\2\30\u0080\3\2\2\2\32\u008d\3\2\2") - buf.write("\2\34\u00a6\3\2\2\2\36\u00ad\3\2\2\2 \u00b6\3\2\2\2\"") - buf.write("\u00c2\3\2\2\2$\u00ca\3\2\2\2&\'\5\32\16\2\'(\7\2\2\3") - buf.write("(\3\3\2\2\2)+\5\6\4\2*)\3\2\2\2+.\3\2\2\2,*\3\2\2\2,-") - buf.write("\3\2\2\2-/\3\2\2\2.,\3\2\2\2/\60\7\2\2\3\60\5\3\2\2\2") - buf.write("\61\63\7\33\2\2\62\61\3\2\2\2\63\66\3\2\2\2\64\62\3\2") - buf.write("\2\2\64\65\3\2\2\2\65\67\3\2\2\2\66\64\3\2\2\2\67@\5\b") - buf.write("\5\28:\7\33\2\298\3\2\2\2:;\3\2\2\2;9\3\2\2\2;<\3\2\2") - buf.write("\2<=\3\2\2\2=?\5\b\5\2>9\3\2\2\2?B\3\2\2\2@>\3\2\2\2@") - buf.write("A\3\2\2\2AF\3\2\2\2B@\3\2\2\2CE\7\33\2\2DC\3\2\2\2EH\3") - buf.write("\2\2\2FD\3\2\2\2FG\3\2\2\2G\7\3\2\2\2HF\3\2\2\2IL\5\n") - buf.write("\6\2JL\5\f\7\2KI\3\2\2\2KJ\3\2\2\2L\t\3\2\2\2MN\7/\2\2") - buf.write("NO\5\"\22\2OP\7\r\2\2PQ\5\32\16\2Q\13\3\2\2\2RT\5\16\b") - buf.write("\2SU\5\22\n\2TS\3\2\2\2TU\3\2\2\2UW\3\2\2\2VX\5\24\13") - buf.write("\2WV\3\2\2\2WX\3\2\2\2XZ\3\2\2\2Y[\5\30\r\2ZY\3\2\2\2") - buf.write("Z[\3\2\2\2[\r\3\2\2\2\\]\7.\2\2]b\5\20\t\2^_\7\20\2\2") - buf.write("_a\5\20\t\2`^\3\2\2\2ad\3\2\2\2b`\3\2\2\2bc\3\2\2\2c\17") - buf.write("\3\2\2\2db\3\2\2\2ei\5\32\16\2fg\7\17\2\2gh\7\17\2\2h") - buf.write("j\5\34\17\2if\3\2\2\2ij\3\2\2\2jm\3\2\2\2kl\7 \2\2ln\5") - buf.write(" \21\2mk\3\2\2\2mn\3\2\2\2n\21\3\2\2\2op\7\61\2\2pq\5") - buf.write("\32\16\2q\23\3\2\2\2rs\7-\2\2st\7\"\2\2ty\5\26\f\2uv\7") - buf.write("\20\2\2vx\5\26\f\2wu\3\2\2\2x{\3\2\2\2yw\3\2\2\2yz\3\2") - buf.write("\2\2z\25\3\2\2\2{y\3\2\2\2|~\5\32\16\2}\177\t\2\2\2~}") - buf.write("\3\2\2\2~\177\3\2\2\2\177\27\3\2\2\2\u0080\u0081\7(\2") - buf.write("\2\u0081\u0082\5\32\16\2\u0082\31\3\2\2\2\u0083\u0084") - buf.write("\b\16\1\2\u0084\u0085\t\3\2\2\u0085\u008e\5\32\16\r\u0086") - buf.write("\u0087\7\27\2\2\u0087\u0088\5\32\16\2\u0088\u0089\7\16") - buf.write("\2\2\u0089\u008e\3\2\2\2\u008a\u008e\5$\23\2\u008b\u008e") - buf.write("\5\34\17\2\u008c\u008e\5 \21\2\u008d\u0083\3\2\2\2\u008d") - buf.write("\u0086\3\2\2\2\u008d\u008a\3\2\2\2\u008d\u008b\3\2\2\2") - buf.write("\u008d\u008c\3\2\2\2\u008e\u00a3\3\2\2\2\u008f\u0090\f") - buf.write("\f\2\2\u0090\u0091\t\4\2\2\u0091\u00a2\5\32\16\r\u0092") - buf.write("\u0093\f\13\2\2\u0093\u0094\t\5\2\2\u0094\u00a2\5\32\16") - buf.write("\f\u0095\u0096\f\n\2\2\u0096\u0097\t\6\2\2\u0097\u00a2") - buf.write("\5\32\16\13\u0098\u0099\f\t\2\2\u0099\u009a\t\7\2\2\u009a") - buf.write("\u00a2\5\32\16\n\u009b\u009c\f\b\2\2\u009c\u009d\t\b\2") - buf.write("\2\u009d\u00a2\5\32\16\t\u009e\u009f\f\7\2\2\u009f\u00a0") - buf.write("\t\t\2\2\u00a0\u00a2\5\32\16\b\u00a1\u008f\3\2\2\2\u00a1") - buf.write("\u0092\3\2\2\2\u00a1\u0095\3\2\2\2\u00a1\u0098\3\2\2\2") - buf.write("\u00a1\u009b\3\2\2\2\u00a1\u009e\3\2\2\2\u00a2\u00a5\3") - buf.write("\2\2\2\u00a3\u00a1\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a4\33") - buf.write("\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00a7\5\"\22\2\u00a7") - buf.write("\u00a9\7\27\2\2\u00a8\u00aa\5\36\20\2\u00a9\u00a8\3\2") - buf.write("\2\2\u00a9\u00aa\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00ac") - buf.write("\7\16\2\2\u00ac\35\3\2\2\2\u00ad\u00b2\5\32\16\2\u00ae") - buf.write("\u00af\7\20\2\2\u00af\u00b1\5\32\16\2\u00b0\u00ae\3\2") - buf.write("\2\2\u00b1\u00b4\3\2\2\2\u00b2\u00b0\3\2\2\2\u00b2\u00b3") - buf.write("\3\2\2\2\u00b3\37\3\2\2\2\u00b4\u00b2\3\2\2\2\u00b5\u00b7") - buf.write("\7\32\2\2\u00b6\u00b5\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7") - buf.write("\u00bb\3\2\2\2\u00b8\u00b9\5\"\22\2\u00b9\u00ba\7\30\2") - buf.write("\2\u00ba\u00bc\3\2\2\2\u00bb\u00b8\3\2\2\2\u00bb\u00bc") - buf.write("\3\2\2\2\u00bc\u00bd\3\2\2\2\u00bd\u00c0\5\"\22\2\u00be") - buf.write("\u00bf\7\17\2\2\u00bf\u00c1\5\"\22\2\u00c0\u00be\3\2\2") - buf.write("\2\u00c0\u00c1\3\2\2\2\u00c1!\3\2\2\2\u00c2\u00c7\7<\2") - buf.write("\2\u00c3\u00c4\7\21\2\2\u00c4\u00c6\7<\2\2\u00c5\u00c3") - buf.write("\3\2\2\2\u00c6\u00c9\3\2\2\2\u00c7\u00c5\3\2\2\2\u00c7") - buf.write("\u00c8\3\2\2\2\u00c8#\3\2\2\2\u00c9\u00c7\3\2\2\2\u00ca") - buf.write("\u00cb\t\n\2\2\u00cb%\3\2\2\2\31,\64;@FKTWZbimy~\u008d") - buf.write("\u00a1\u00a3\u00a9\u00b2\u00b6\u00bb\u00c0\u00c7") + buf.write("\4\24\t\24\4\25\t\25\3\2\3\2\3\2\3\3\7\3/\n\3\f\3\16\3") + buf.write("\62\13\3\3\3\3\3\3\4\7\4\67\n\4\f\4\16\4:\13\4\3\4\3\4") + buf.write("\6\4>\n\4\r\4\16\4?\3\4\7\4C\n\4\f\4\16\4F\13\4\3\4\7") + buf.write("\4I\n\4\f\4\16\4L\13\4\3\5\3\5\5\5P\n\5\3\6\3\6\3\6\3") + buf.write("\6\3\6\3\7\3\7\5\7Y\n\7\3\7\5\7\\\n\7\3\7\5\7_\n\7\3\7") + buf.write("\5\7b\n\7\3\b\3\b\3\b\3\b\7\bh\n\b\f\b\16\bk\13\b\3\t") + buf.write("\3\t\3\t\3\t\5\tq\n\t\3\t\3\t\5\tu\n\t\3\n\3\n\3\n\3\n") + buf.write("\7\n{\n\n\f\n\16\n~\13\n\3\13\3\13\5\13\u0082\n\13\3\13") + buf.write("\5\13\u0085\n\13\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\7\r\u008f") + buf.write("\n\r\f\r\16\r\u0092\13\r\3\16\3\16\5\16\u0096\n\16\3\17") + buf.write("\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20") + buf.write("\3\20\5\20\u00a5\n\20\3\20\3\20\3\20\3\20\3\20\3\20\3") + buf.write("\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20") + buf.write("\3\20\7\20\u00b9\n\20\f\20\16\20\u00bc\13\20\3\21\3\21") + buf.write("\3\21\5\21\u00c1\n\21\3\21\3\21\3\22\3\22\3\22\7\22\u00c8") + buf.write("\n\22\f\22\16\22\u00cb\13\22\3\23\5\23\u00ce\n\23\3\23") + buf.write("\3\23\3\23\5\23\u00d3\n\23\3\23\3\23\3\23\5\23\u00d8\n") + buf.write("\23\3\24\3\24\3\24\7\24\u00dd\n\24\f\24\16\24\u00e0\13") + buf.write("\24\3\25\3\25\3\25\2\3\36\26\2\4\6\b\n\f\16\20\22\24\26") + buf.write("\30\32\34\36 \"$&(\2\13\4\2!!##\5\2\25\25\31\31**\5\2") + buf.write("\22\22\26\26\34\34\4\2\25\25\31\31\4\2\5\6\23\24\6\2\4") + buf.write("\4\7\b\r\r&&\4\2\3\3\37\37\4\2\t\t--\7\2$$,,\61\61\63") + buf.write("\64\67\67\2\u00f1\2*\3\2\2\2\4\60\3\2\2\2\68\3\2\2\2\b") + buf.write("O\3\2\2\2\nQ\3\2\2\2\fV\3\2\2\2\16c\3\2\2\2\20l\3\2\2") + buf.write("\2\22v\3\2\2\2\24\177\3\2\2\2\26\u0086\3\2\2\2\30\u0089") + buf.write("\3\2\2\2\32\u0093\3\2\2\2\34\u0097\3\2\2\2\36\u00a4\3") + buf.write("\2\2\2 \u00bd\3\2\2\2\"\u00c4\3\2\2\2$\u00cd\3\2\2\2&") + buf.write("\u00d9\3\2\2\2(\u00e1\3\2\2\2*+\5\36\20\2+,\7\2\2\3,\3") + buf.write("\3\2\2\2-/\5\6\4\2.-\3\2\2\2/\62\3\2\2\2\60.\3\2\2\2\60") + buf.write("\61\3\2\2\2\61\63\3\2\2\2\62\60\3\2\2\2\63\64\7\2\2\3") + buf.write("\64\5\3\2\2\2\65\67\7\33\2\2\66\65\3\2\2\2\67:\3\2\2\2") + buf.write("8\66\3\2\2\289\3\2\2\29;\3\2\2\2:8\3\2\2\2;D\5\b\5\2<") + buf.write(">\7\33\2\2=<\3\2\2\2>?\3\2\2\2?=\3\2\2\2?@\3\2\2\2@A\3") + buf.write("\2\2\2AC\5\b\5\2B=\3\2\2\2CF\3\2\2\2DB\3\2\2\2DE\3\2\2") + buf.write("\2EJ\3\2\2\2FD\3\2\2\2GI\7\33\2\2HG\3\2\2\2IL\3\2\2\2") + buf.write("JH\3\2\2\2JK\3\2\2\2K\7\3\2\2\2LJ\3\2\2\2MP\5\n\6\2NP") + buf.write("\5\f\7\2OM\3\2\2\2ON\3\2\2\2P\t\3\2\2\2QR\7\60\2\2RS\5") + buf.write("&\24\2ST\7\r\2\2TU\5\36\20\2U\13\3\2\2\2VX\5\16\b\2WY") + buf.write("\5\22\n\2XW\3\2\2\2XY\3\2\2\2Y[\3\2\2\2Z\\\5\26\f\2[Z") + buf.write("\3\2\2\2[\\\3\2\2\2\\^\3\2\2\2]_\5\30\r\2^]\3\2\2\2^_") + buf.write("\3\2\2\2_a\3\2\2\2`b\5\34\17\2a`\3\2\2\2ab\3\2\2\2b\r") + buf.write("\3\2\2\2cd\7/\2\2di\5\20\t\2ef\7\20\2\2fh\5\20\t\2ge\3") + buf.write("\2\2\2hk\3\2\2\2ig\3\2\2\2ij\3\2\2\2j\17\3\2\2\2ki\3\2") + buf.write("\2\2lp\5\36\20\2mn\7\17\2\2no\7\17\2\2oq\5 \21\2pm\3\2") + buf.write("\2\2pq\3\2\2\2qt\3\2\2\2rs\7 \2\2su\5$\23\2tr\3\2\2\2") + buf.write("tu\3\2\2\2u\21\3\2\2\2vw\7%\2\2w|\5\24\13\2xy\7\20\2\2") + buf.write("y{\5\24\13\2zx\3\2\2\2{~\3\2\2\2|z\3\2\2\2|}\3\2\2\2}") + buf.write("\23\3\2\2\2~|\3\2\2\2\177\u0084\5&\24\2\u0080\u0082\7") + buf.write(" \2\2\u0081\u0080\3\2\2\2\u0081\u0082\3\2\2\2\u0082\u0083") + buf.write("\3\2\2\2\u0083\u0085\5&\24\2\u0084\u0081\3\2\2\2\u0084") + buf.write("\u0085\3\2\2\2\u0085\25\3\2\2\2\u0086\u0087\7\62\2\2\u0087") + buf.write("\u0088\5\36\20\2\u0088\27\3\2\2\2\u0089\u008a\7.\2\2\u008a") + buf.write("\u008b\7\"\2\2\u008b\u0090\5\32\16\2\u008c\u008d\7\20") + buf.write("\2\2\u008d\u008f\5\32\16\2\u008e\u008c\3\2\2\2\u008f\u0092") + buf.write("\3\2\2\2\u0090\u008e\3\2\2\2\u0090\u0091\3\2\2\2\u0091") + buf.write("\31\3\2\2\2\u0092\u0090\3\2\2\2\u0093\u0095\5\36\20\2") + buf.write("\u0094\u0096\t\2\2\2\u0095\u0094\3\2\2\2\u0095\u0096\3") + buf.write("\2\2\2\u0096\33\3\2\2\2\u0097\u0098\7)\2\2\u0098\u0099") + buf.write("\5\36\20\2\u0099\35\3\2\2\2\u009a\u009b\b\20\1\2\u009b") + buf.write("\u009c\t\3\2\2\u009c\u00a5\5\36\20\r\u009d\u009e\7\27") + buf.write("\2\2\u009e\u009f\5\36\20\2\u009f\u00a0\7\16\2\2\u00a0") + buf.write("\u00a5\3\2\2\2\u00a1\u00a5\5(\25\2\u00a2\u00a5\5 \21\2") + buf.write("\u00a3\u00a5\5$\23\2\u00a4\u009a\3\2\2\2\u00a4\u009d\3") + buf.write("\2\2\2\u00a4\u00a1\3\2\2\2\u00a4\u00a2\3\2\2\2\u00a4\u00a3") + buf.write("\3\2\2\2\u00a5\u00ba\3\2\2\2\u00a6\u00a7\f\f\2\2\u00a7") + buf.write("\u00a8\t\4\2\2\u00a8\u00b9\5\36\20\r\u00a9\u00aa\f\13") + buf.write("\2\2\u00aa\u00ab\t\5\2\2\u00ab\u00b9\5\36\20\f\u00ac\u00ad") + buf.write("\f\n\2\2\u00ad\u00ae\t\6\2\2\u00ae\u00b9\5\36\20\13\u00af") + buf.write("\u00b0\f\t\2\2\u00b0\u00b1\t\7\2\2\u00b1\u00b9\5\36\20") + buf.write("\n\u00b2\u00b3\f\b\2\2\u00b3\u00b4\t\b\2\2\u00b4\u00b9") + buf.write("\5\36\20\t\u00b5\u00b6\f\7\2\2\u00b6\u00b7\t\t\2\2\u00b7") + buf.write("\u00b9\5\36\20\b\u00b8\u00a6\3\2\2\2\u00b8\u00a9\3\2\2") + buf.write("\2\u00b8\u00ac\3\2\2\2\u00b8\u00af\3\2\2\2\u00b8\u00b2") + buf.write("\3\2\2\2\u00b8\u00b5\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba") + buf.write("\u00b8\3\2\2\2\u00ba\u00bb\3\2\2\2\u00bb\37\3\2\2\2\u00bc") + buf.write("\u00ba\3\2\2\2\u00bd\u00be\5&\24\2\u00be\u00c0\7\27\2") + buf.write("\2\u00bf\u00c1\5\"\22\2\u00c0\u00bf\3\2\2\2\u00c0\u00c1") + buf.write("\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c3\7\16\2\2\u00c3") + buf.write("!\3\2\2\2\u00c4\u00c9\5\36\20\2\u00c5\u00c6\7\20\2\2\u00c6") + buf.write("\u00c8\5\36\20\2\u00c7\u00c5\3\2\2\2\u00c8\u00cb\3\2\2") + buf.write("\2\u00c9\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca#\3\2") + buf.write("\2\2\u00cb\u00c9\3\2\2\2\u00cc\u00ce\7\32\2\2\u00cd\u00cc") + buf.write("\3\2\2\2\u00cd\u00ce\3\2\2\2\u00ce\u00d2\3\2\2\2\u00cf") + buf.write("\u00d0\5&\24\2\u00d0\u00d1\7\30\2\2\u00d1\u00d3\3\2\2") + buf.write("\2\u00d2\u00cf\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3\u00d4") + buf.write("\3\2\2\2\u00d4\u00d7\5&\24\2\u00d5\u00d6\7\17\2\2\u00d6") + buf.write("\u00d8\5&\24\2\u00d7\u00d5\3\2\2\2\u00d7\u00d8\3\2\2\2") + buf.write("\u00d8%\3\2\2\2\u00d9\u00de\7=\2\2\u00da\u00db\7\21\2") + buf.write("\2\u00db\u00dd\7=\2\2\u00dc\u00da\3\2\2\2\u00dd\u00e0") + buf.write("\3\2\2\2\u00de\u00dc\3\2\2\2\u00de\u00df\3\2\2\2\u00df") + buf.write("\'\3\2\2\2\u00e0\u00de\3\2\2\2\u00e1\u00e2\t\n\2\2\u00e2") + buf.write(")\3\2\2\2\35\608?DJOX[^aipt|\u0081\u0084\u0090\u0095\u00a4") + buf.write("\u00b8\u00ba\u00c0\u00c9\u00cd\u00d2\u00d7\u00de") return buf.getvalue() @@ -120,10 +132,10 @@ class PqlParser ( Parser ): "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", - "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", - "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_ORDER", - "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", - "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "K_FALSE", "K_FROM", "K_IS", "K_ISNULL", "K_LIKE", + "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", + "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", @@ -137,22 +149,24 @@ class PqlParser ( Parser ): RULE_selectStmt = 5 RULE_selectClause = 6 RULE_columns = 7 - RULE_whereClause = 8 - RULE_orderByClause = 9 - RULE_orderExpr = 10 - RULE_limitClause = 11 - RULE_expr = 12 - RULE_function = 13 - RULE_exprList = 14 - RULE_taxon = 15 - RULE_identifierMultipart = 16 - RULE_literalValue = 17 + RULE_fromClause = 8 + RULE_tables = 9 + RULE_whereClause = 10 + RULE_orderByClause = 11 + RULE_orderExpr = 12 + RULE_limitClause = 13 + RULE_expr = 14 + RULE_function = 15 + RULE_exprList = 16 + RULE_taxon = 17 + RULE_identifierMultipart = 18 + RULE_literalValue = 19 ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "setStmt", - "selectStmt", "selectClause", "columns", "whereClause", - "orderByClause", "orderExpr", "limitClause", "expr", - "function", "exprList", "taxon", "identifierMultipart", - "literalValue" ] + "selectStmt", "selectClause", "columns", "fromClause", + "tables", "whereClause", "orderByClause", "orderExpr", + "limitClause", "expr", "function", "exprList", "taxon", + "identifierMultipart", "literalValue" ] EOF = Token.EOF AND=1 @@ -189,30 +203,31 @@ class PqlParser ( Parser ): K_BY=32 K_DESC=33 K_FALSE=34 - K_IS=35 - K_ISNULL=36 - K_LIKE=37 - K_LIMIT=38 - K_NOT=39 - K_NOTNULL=40 - K_NULL=41 - K_OR=42 - K_ORDER=43 - K_SELECT=44 - K_SET=45 - K_TRUE=46 - K_WHERE=47 - NUMERIC_LITERAL=48 - DOUBLE_QUOTED_STRING=49 - DOUBLE_QUOTED_STRING_TEL=50 - DOUBLE_QUOTED_STRING_SQL=51 - SINGLE_QUOTED_STRING=52 - SINGLE_QUOTED_STRING_TEL=53 - SINGLE_QUOTED_STRING_SQL=54 - SINGLE_LINE_COMMENT=55 - MULTILINE_COMMENT=56 - SPACES=57 - WORD=58 + K_FROM=35 + K_IS=36 + K_ISNULL=37 + K_LIKE=38 + K_LIMIT=39 + K_NOT=40 + K_NOTNULL=41 + K_NULL=42 + K_OR=43 + K_ORDER=44 + K_SELECT=45 + K_SET=46 + K_TRUE=47 + K_WHERE=48 + NUMERIC_LITERAL=49 + DOUBLE_QUOTED_STRING=50 + DOUBLE_QUOTED_STRING_TEL=51 + DOUBLE_QUOTED_STRING_SQL=52 + SINGLE_QUOTED_STRING=53 + SINGLE_QUOTED_STRING_TEL=54 + SINGLE_QUOTED_STRING_SQL=55 + SINGLE_LINE_COMMENT=56 + MULTILINE_COMMENT=57 + SPACES=58 + WORD=59 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -262,9 +277,9 @@ def parseTel(self): self.enterRule(localctx, 0, self.RULE_parseTel) try: self.enterOuterAlt(localctx, 1) - self.state = 36 + self.state = 40 self.expr(0) - self.state = 37 + self.state = 41 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -318,17 +333,17 @@ def parsePql(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 42 + self.state = 46 self._errHandler.sync(self) _la = self._input.LA(1) while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.SCOL) | (1 << PqlParser.K_SELECT) | (1 << PqlParser.K_SET))) != 0): - self.state = 39 + self.state = 43 self.sqlStmtList() - self.state = 44 + self.state = 48 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 45 + self.state = 49 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -385,49 +400,49 @@ def sqlStmtList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 50 + self.state = 54 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.SCOL: - self.state = 47 + self.state = 51 self.match(PqlParser.SCOL) - self.state = 52 + self.state = 56 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 53 + self.state = 57 self.sqlStmt() - self.state = 62 + self.state = 66 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 55 + self.state = 59 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 54 + self.state = 58 self.match(PqlParser.SCOL) - self.state = 57 + self.state = 61 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la==PqlParser.SCOL): break - self.state = 59 + self.state = 63 self.sqlStmt() - self.state = 64 + self.state = 68 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - self.state = 68 + self.state = 72 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 65 + self.state = 69 self.match(PqlParser.SCOL) - self.state = 70 + self.state = 74 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,4,self._ctx) @@ -479,17 +494,17 @@ def sqlStmt(self): localctx = PqlParser.SqlStmtContext(self, self._ctx, self.state) self.enterRule(localctx, 6, self.RULE_sqlStmt) try: - self.state = 73 + self.state = 77 self._errHandler.sync(self) token = self._input.LA(1) if token in [PqlParser.K_SET]: self.enterOuterAlt(localctx, 1) - self.state = 71 + self.state = 75 self.setStmt() pass elif token in [PqlParser.K_SELECT]: self.enterOuterAlt(localctx, 2) - self.state = 72 + self.state = 76 self.selectStmt() pass else: @@ -510,7 +525,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.key = None # IdentifierMultipartContext - self.values = None # ExprContext + self.value = None # ExprContext def K_SET(self): return self.getToken(PqlParser.K_SET, 0) @@ -552,14 +567,14 @@ def setStmt(self): self.enterRule(localctx, 8, self.RULE_setStmt) try: self.enterOuterAlt(localctx, 1) - self.state = 75 + self.state = 79 self.match(PqlParser.K_SET) - self.state = 76 + self.state = 80 localctx.key = self.identifierMultipart() - self.state = 77 + self.state = 81 self.match(PqlParser.ASSIGN) - self.state = 78 - localctx.values = self.expr(0) + self.state = 82 + localctx.value = self.expr(0) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -579,6 +594,10 @@ def selectClause(self): return self.getTypedRuleContext(PqlParser.SelectClauseContext,0) + def fromClause(self): + return self.getTypedRuleContext(PqlParser.FromClauseContext,0) + + def whereClause(self): return self.getTypedRuleContext(PqlParser.WhereClauseContext,0) @@ -618,29 +637,37 @@ def selectStmt(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 80 + self.state = 84 self.selectClause() - self.state = 82 + self.state = 86 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_FROM: + self.state = 85 + self.fromClause() + + + self.state = 89 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_WHERE: - self.state = 81 + self.state = 88 self.whereClause() - self.state = 85 + self.state = 92 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ORDER: - self.state = 84 + self.state = 91 self.orderByClause() - self.state = 88 + self.state = 95 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_LIMIT: - self.state = 87 + self.state = 94 self.limitClause() @@ -702,19 +729,19 @@ def selectClause(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 90 + self.state = 97 self.match(PqlParser.K_SELECT) - self.state = 91 + self.state = 98 self.columns() - self.state = 96 + self.state = 103 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 92 + self.state = 99 self.match(PqlParser.COMMA) - self.state = 93 + self.state = 100 self.columns() - self.state = 98 + self.state = 105 self._errHandler.sync(self) _la = self._input.LA(1) @@ -784,27 +811,27 @@ def columns(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 99 + self.state = 106 localctx.value = self.expr(0) - self.state = 103 + self.state = 110 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.COLON: - self.state = 100 + self.state = 107 self.match(PqlParser.COLON) - self.state = 101 + self.state = 108 self.match(PqlParser.COLON) - self.state = 102 + self.state = 109 localctx.type_cast = self.function() - self.state = 107 + self.state = 114 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_AS: - self.state = 105 + self.state = 112 self.match(PqlParser.K_AS) - self.state = 106 + self.state = 113 localctx.alias = self.taxon() @@ -817,6 +844,152 @@ def columns(self): return localctx + class FromClauseContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def K_FROM(self): + return self.getToken(PqlParser.K_FROM, 0) + + def tables(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.TablesContext) + else: + return self.getTypedRuleContext(PqlParser.TablesContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COMMA) + else: + return self.getToken(PqlParser.COMMA, i) + + def getRuleIndex(self): + return PqlParser.RULE_fromClause + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterFromClause" ): + listener.enterFromClause(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitFromClause" ): + listener.exitFromClause(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitFromClause" ): + return visitor.visitFromClause(self) + else: + return visitor.visitChildren(self) + + + + + def fromClause(self): + + localctx = PqlParser.FromClauseContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_fromClause) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 116 + self.match(PqlParser.K_FROM) + self.state = 117 + self.tables() + self.state = 122 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.COMMA: + self.state = 118 + self.match(PqlParser.COMMA) + self.state = 119 + self.tables() + self.state = 124 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class TablesContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.table_name = None # IdentifierMultipartContext + self.table_alias = None # IdentifierMultipartContext + + def identifierMultipart(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.IdentifierMultipartContext) + else: + return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,i) + + + def K_AS(self): + return self.getToken(PqlParser.K_AS, 0) + + def getRuleIndex(self): + return PqlParser.RULE_tables + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterTables" ): + listener.enterTables(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitTables" ): + listener.exitTables(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitTables" ): + return visitor.visitTables(self) + else: + return visitor.visitChildren(self) + + + + + def tables(self): + + localctx = PqlParser.TablesContext(self, self._ctx, self.state) + self.enterRule(localctx, 18, self.RULE_tables) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 125 + localctx.table_name = self.identifierMultipart() + self.state = 130 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_AS or _la==PqlParser.WORD: + self.state = 127 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_AS: + self.state = 126 + self.match(PqlParser.K_AS) + + + self.state = 129 + localctx.table_alias = self.identifierMultipart() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class WhereClauseContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): @@ -853,12 +1026,12 @@ def accept(self, visitor:ParseTreeVisitor): def whereClause(self): localctx = PqlParser.WhereClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 16, self.RULE_whereClause) + self.enterRule(localctx, 20, self.RULE_whereClause) try: self.enterOuterAlt(localctx, 1) - self.state = 109 + self.state = 132 self.match(PqlParser.K_WHERE) - self.state = 110 + self.state = 133 self.expr(0) except RecognitionException as re: localctx.exception = re @@ -917,25 +1090,25 @@ def accept(self, visitor:ParseTreeVisitor): def orderByClause(self): localctx = PqlParser.OrderByClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 18, self.RULE_orderByClause) + self.enterRule(localctx, 22, self.RULE_orderByClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 112 + self.state = 135 self.match(PqlParser.K_ORDER) - self.state = 113 + self.state = 136 self.match(PqlParser.K_BY) - self.state = 114 + self.state = 137 self.orderExpr() - self.state = 119 + self.state = 142 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 115 + self.state = 138 self.match(PqlParser.COMMA) - self.state = 116 + self.state = 139 self.orderExpr() - self.state = 121 + self.state = 144 self._errHandler.sync(self) _la = self._input.LA(1) @@ -987,17 +1160,17 @@ def accept(self, visitor:ParseTreeVisitor): def orderExpr(self): localctx = PqlParser.OrderExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 20, self.RULE_orderExpr) + self.enterRule(localctx, 24, self.RULE_orderExpr) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 122 + self.state = 145 self.expr(0) - self.state = 124 + self.state = 147 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_ASC or _la==PqlParser.K_DESC: - self.state = 123 + self.state = 146 _la = self._input.LA(1) if not(_la==PqlParser.K_ASC or _la==PqlParser.K_DESC): self._errHandler.recoverInline(self) @@ -1052,12 +1225,12 @@ def accept(self, visitor:ParseTreeVisitor): def limitClause(self): localctx = PqlParser.LimitClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 22, self.RULE_limitClause) + self.enterRule(localctx, 26, self.RULE_limitClause) try: self.enterOuterAlt(localctx, 1) - self.state = 126 + self.state = 149 self.match(PqlParser.K_LIMIT) - self.state = 127 + self.state = 150 localctx.limit = self.expr(0) except RecognitionException as re: localctx.exception = re @@ -1185,16 +1358,16 @@ def expr(self, _p:int=0): _parentState = self.state localctx = PqlParser.ExprContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 24 - self.enterRecursionRule(localctx, 24, self.RULE_expr, _p) + _startState = 28 + self.enterRecursionRule(localctx, 28, self.RULE_expr, _p) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 139 + self.state = 162 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,14,self._ctx) + la_ = self._interp.adaptivePredict(self._input,18,self._ctx) if la_ == 1: - self.state = 130 + self.state = 153 localctx.unary_operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_NOT))) != 0)): @@ -1202,56 +1375,56 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 131 + self.state = 154 localctx.right = self.expr(11) pass elif la_ == 2: - self.state = 132 + self.state = 155 self.match(PqlParser.OPEN_PAREN) - self.state = 133 + self.state = 156 localctx.inner = self.expr(0) - self.state = 134 + self.state = 157 self.match(PqlParser.CLOSE_PAREN) pass elif la_ == 3: - self.state = 136 + self.state = 159 self.literalValue() pass elif la_ == 4: - self.state = 137 + self.state = 160 self.function() pass elif la_ == 5: - self.state = 138 + self.state = 161 self.taxon() pass self._ctx.stop = self._input.LT(-1) - self.state = 161 + self.state = 184 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,16,self._ctx) + _alt = self._interp.adaptivePredict(self._input,20,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 159 + self.state = 182 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,15,self._ctx) + la_ = self._interp.adaptivePredict(self._input,19,self._ctx) if la_ == 1: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 141 + self.state = 164 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 142 + self.state = 165 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) != 0)): @@ -1259,7 +1432,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 143 + self.state = 166 localctx.right = self.expr(11) pass @@ -1267,11 +1440,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 144 + self.state = 167 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 145 + self.state = 168 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.MINUS or _la==PqlParser.PLUS): @@ -1279,7 +1452,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 146 + self.state = 169 localctx.right = self.expr(10) pass @@ -1287,11 +1460,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 147 + self.state = 170 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 148 + self.state = 171 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) != 0)): @@ -1299,7 +1472,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 149 + self.state = 172 localctx.right = self.expr(9) pass @@ -1307,11 +1480,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 150 + self.state = 173 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 151 + self.state = 174 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) != 0)): @@ -1319,7 +1492,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 152 + self.state = 175 localctx.right = self.expr(8) pass @@ -1327,11 +1500,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 153 + self.state = 176 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 154 + self.state = 177 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.AND or _la==PqlParser.K_AND): @@ -1339,7 +1512,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 155 + self.state = 178 localctx.right = self.expr(7) pass @@ -1347,11 +1520,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 156 + self.state = 179 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 157 + self.state = 180 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.OR or _la==PqlParser.K_OR): @@ -1359,14 +1532,14 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 158 + self.state = 181 localctx.right = self.expr(6) pass - self.state = 163 + self.state = 186 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,16,self._ctx) + _alt = self._interp.adaptivePredict(self._input,20,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1422,23 +1595,23 @@ def accept(self, visitor:ParseTreeVisitor): def function(self): localctx = PqlParser.FunctionContext(self, self._ctx, self.state) - self.enterRule(localctx, 26, self.RULE_function) + self.enterRule(localctx, 30, self.RULE_function) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 164 + self.state = 187 localctx.function_name = self.identifierMultipart() - self.state = 165 + self.state = 188 self.match(PqlParser.OPEN_PAREN) - self.state = 167 + self.state = 190 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): - self.state = 166 + self.state = 189 localctx.arguments = self.exprList() - self.state = 169 + self.state = 192 self.match(PqlParser.CLOSE_PAREN) except RecognitionException as re: localctx.exception = re @@ -1491,21 +1664,21 @@ def accept(self, visitor:ParseTreeVisitor): def exprList(self): localctx = PqlParser.ExprListContext(self, self._ctx, self.state) - self.enterRule(localctx, 28, self.RULE_exprList) + self.enterRule(localctx, 32, self.RULE_exprList) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 171 + self.state = 194 self.expr(0) - self.state = 176 + self.state = 199 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 172 + self.state = 195 self.match(PqlParser.COMMA) - self.state = 173 + self.state = 196 self.expr(0) - self.state = 178 + self.state = 201 self._errHandler.sync(self) _la = self._input.LA(1) @@ -1567,37 +1740,37 @@ def accept(self, visitor:ParseTreeVisitor): def taxon(self): localctx = PqlParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 30, self.RULE_taxon) + self.enterRule(localctx, 34, self.RULE_taxon) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 180 + self.state = 203 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.QUESTION_MARK: - self.state = 179 + self.state = 202 localctx.is_optional = self.match(PqlParser.QUESTION_MARK) - self.state = 185 + self.state = 208 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,20,self._ctx) + la_ = self._interp.adaptivePredict(self._input,24,self._ctx) if la_ == 1: - self.state = 182 + self.state = 205 localctx.namespace = self.identifierMultipart() - self.state = 183 + self.state = 206 self.match(PqlParser.PIPE) - self.state = 187 + self.state = 210 localctx.slug = self.identifierMultipart() - self.state = 190 + self.state = 213 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,21,self._ctx) + la_ = self._interp.adaptivePredict(self._input,25,self._ctx) if la_ == 1: - self.state = 188 + self.state = 211 self.match(PqlParser.COLON) - self.state = 189 + self.state = 212 localctx.tag = self.identifierMultipart() @@ -1651,23 +1824,23 @@ def accept(self, visitor:ParseTreeVisitor): def identifierMultipart(self): localctx = PqlParser.IdentifierMultipartContext(self, self._ctx, self.state) - self.enterRule(localctx, 32, self.RULE_identifierMultipart) + self.enterRule(localctx, 36, self.RULE_identifierMultipart) try: self.enterOuterAlt(localctx, 1) - self.state = 192 + self.state = 215 self.match(PqlParser.WORD) - self.state = 197 + self.state = 220 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,22,self._ctx) + _alt = self._interp.adaptivePredict(self._input,26,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 193 + self.state = 216 self.match(PqlParser.DOT) - self.state = 194 + self.state = 217 self.match(PqlParser.WORD) - self.state = 199 + self.state = 222 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,22,self._ctx) + _alt = self._interp.adaptivePredict(self._input,26,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1725,11 +1898,11 @@ def accept(self, visitor:ParseTreeVisitor): def literalValue(self): localctx = PqlParser.LiteralValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 34, self.RULE_literalValue) + self.enterRule(localctx, 38, self.RULE_literalValue) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 200 + self.state = 223 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): self._errHandler.recoverInline(self) @@ -1749,7 +1922,7 @@ def literalValue(self): def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates == None: self._predicates = dict() - self._predicates[12] = self.expr_sempred + self._predicates[14] = self.expr_sempred pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py index 5aa64ce..8b1703e 100644 --- a/python/src/pql_grammar/antlr/PqlParserListener.py +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -80,6 +80,24 @@ def exitColumns(self, ctx:PqlParser.ColumnsContext): pass + # Enter a parse tree produced by PqlParser#fromClause. + def enterFromClause(self, ctx:PqlParser.FromClauseContext): + pass + + # Exit a parse tree produced by PqlParser#fromClause. + def exitFromClause(self, ctx:PqlParser.FromClauseContext): + pass + + + # Enter a parse tree produced by PqlParser#tables. + def enterTables(self, ctx:PqlParser.TablesContext): + pass + + # Exit a parse tree produced by PqlParser#tables. + def exitTables(self, ctx:PqlParser.TablesContext): + pass + + # Enter a parse tree produced by PqlParser#whereClause. def enterWhereClause(self, ctx:PqlParser.WhereClauseContext): pass diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py index f9bca89..cf73393 100644 --- a/python/src/pql_grammar/antlr/PqlParserVisitor.py +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -49,6 +49,16 @@ def visitColumns(self, ctx:PqlParser.ColumnsContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PqlParser#fromClause. + def visitFromClause(self, ctx:PqlParser.FromClauseContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#tables. + def visitTables(self, ctx:PqlParser.TablesContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PqlParser#whereClause. def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): return self.visitChildren(ctx) diff --git a/python/src/pql_grammar/ast/from_pql.py b/python/src/pql_grammar/ast/from_pql.py index 888161b..169bbbb 100644 --- a/python/src/pql_grammar/ast/from_pql.py +++ b/python/src/pql_grammar/ast/from_pql.py @@ -1,6 +1,6 @@ from antlr4 import CommonTokenStream, InputStream, ParserRuleContext from antlr4 import ParserRuleContext -from typing import Optional, Tuple +from typing import Optional, Tuple, List, Type from ..antlr.PqlLexer import PqlLexer from ..antlr.PqlParser import PqlParser @@ -165,6 +165,16 @@ def _literalValue_to_python_native(e:PqlParser.LiteralValueContext): return v + @classmethod + def parse_from_clause_expr(cls, ctx: PqlParser.FromClauseContext) -> List[ast.Table]: + return [ + ast.Table( + full_text(table.table_name), + full_text(table.table_alias) + ) + for table in ctx.tables() + ] + @classmethod def parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : ctx = cls.unwrap_expr_parens(ctx) @@ -217,3 +227,53 @@ def visit_from_string(self, pql: str): parser = PqlParser(stream) tree = parser.parsePql() self.visit(tree) + + +def from_pql(pql: str, cls:Type[PqlVisitor] = PqlVisitor) -> List[ast.Node]: + + statements = [] + + class V(cls): + + def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): + columns = [ + ast.Column( + PqlAntlrToAstParser.parse_column_value(column.value), + PqlAntlrToAstParser.parse_column_typecast(column.type_cast), + PqlAntlrToAstParser.parse_column_alias(column.alias) + ) + for column in ctx.selectClause().columns() + ] + + v = ctx.fromClause() + if v: + from_clause = PqlAntlrToAstParser.parse_from_clause_expr(v) + else: + from_clause = None + + v = ctx.whereClause() + if v: + where_clause = PqlAntlrToAstParser.parse_where_clause_expr(v.expr()) + else: + where_clause = None + + statements.append(ast.SelectStmt( + columns=columns, + from_clause=from_clause, + where_clause=where_clause + )) + + def visitSetStmt(self, ctx:PqlParser.SetStmtContext): + key = full_text(ctx.key) + # TODO: parse this better. There are literals there possibly. Need to unpack them. + value = full_text(ctx.value) + statements.append( + ast.SetStmt( + key, + value + ) + ) + + V().visit_from_string(pql) + + return statements diff --git a/python/src/pql_grammar/ast/model.py b/python/src/pql_grammar/ast/model.py index da52b09..36be16c 100644 --- a/python/src/pql_grammar/ast/model.py +++ b/python/src/pql_grammar/ast/model.py @@ -56,10 +56,21 @@ class Column(Node): type_cast: Optional[Function] = None alias: Optional[Taxon] = None +@dataclass +class Table(Node): + value: str + alias: Optional[str] = None + @dataclass class SelectStmt(Node): columns: List[Column] - where_clause: Optional[Expr] + from_clause: Optional[List[Table]] = None + where_clause: Optional[Expr] = None + +@dataclass +class SetStmt(Node): + key: str + value: str def ast_diff(a, b, path=None): diff --git a/python/src/pql_grammar/ast/to_pql.py b/python/src/pql_grammar/ast/to_pql.py index 861f25d..ae036a6 100644 --- a/python/src/pql_grammar/ast/to_pql.py +++ b/python/src/pql_grammar/ast/to_pql.py @@ -82,13 +82,23 @@ def __str__(self): return f'{value}{type_cast}{alias}' +class Table(Node): + n: ast.Table + def __str__(self): + n = self.n + value = n.value + alias = f' AS {n.alias}' if n.alias else '' + return f'{value}{alias}' + + class SelectStmt(Node): n: ast.SelectStmt def __str__(self): n = self.n - cc = 'SELECT\n' + INDENT + (',\n' + INDENT).join(map(str, map(Column, n.columns))) + '\n' - w = 'WHERE\n' + INDENT + str(to_r(n.where_clause)) + '\n' - return cc + w + ';\n' + select_str = 'SELECT\n' + INDENT + (',\n' + INDENT).join(map(str, map(Column, n.columns))) + '\n' + from_str = 'FROM\n' + INDENT + (',\n' + INDENT).join(map(str, map(Table, n.from_clause))) + '\n' + where_str = 'WHERE\n' + INDENT + str(to_r(n.where_clause)) + '\n' + return select_str + from_str + where_str + ';\n' def to_r(n: ast.Node): diff --git a/python/tests/ast_json_test.py b/python/tests/ast_json_test.py index 4bcfe94..008f90b 100644 --- a/python/tests/ast_json_test.py +++ b/python/tests/ast_json_test.py @@ -14,7 +14,7 @@ ast_should_be = ast.SelectStmt( - [ + columns = [ ast.Column(ast.Taxon('taxon1', 'ns1', True)), ast.Column(ast.Taxon('taxon2', 'ns2', False)), ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), @@ -38,7 +38,7 @@ ), ) ], - ast.Expr( + where_clause = ast.Expr( 'AND', [ ast.Expr( diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index bcf85d2..b0e7629 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -5,7 +5,7 @@ from pql_grammar.ast import model as ast from pql_grammar.ast.to_pql import to_pql -from pql_grammar.ast.from_pql import PqlAntlrToAstParser, PqlVisitor, PqlParser +from pql_grammar.ast.from_pql import PqlVisitor, PqlParser, from_pql class ErrorAssertingPqlVisitor(PqlVisitor): @@ -19,7 +19,7 @@ def visitErrorNode(self, node): raise AssertionError(details) -pql = """\ +pql_all_cases = """\ select ?ns1|taxon1, ns2|taxon2, @@ -27,6 +27,7 @@ def visitErrorNode(self, node): ?ns3|taxon3 + (slug2 - 1234) as myns|custom_data, (ns3|taxon3 + 5)::TypeCast() as myns|custom_data_cast, fn_4(fn_1(slug))::TypeCast(arg1=value1) +from my_ns, your_ns as super_ns where ns6|taxon6 > 1234 and (ns0|taxon10 + 4321) == 0 @@ -43,13 +44,16 @@ def visitErrorNode(self, node): ?ns3|taxon3 + (slug2 - 1234) AS myns|custom_data, (ns3|taxon3 + 5)::TypeCast() AS myns|custom_data_cast, (fn_4(fn_1(slug)))::TypeCast(arg1=value1) +FROM + my_ns, + your_ns AS super_ns WHERE ((ns6|taxon6 > 1234) AND ((ns0|taxon10 + 4321) == 0)) ; """ stmt_should_be = ast.SelectStmt( - [ + columns = [ ast.Column(ast.Taxon('taxon1', 'ns1', True)), ast.Column(ast.Taxon('taxon2', 'ns2', False)), ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), @@ -73,7 +77,11 @@ def visitErrorNode(self, node): ), ) ], - ast.Expr( + from_clause = [ + ast.Table('my_ns'), + ast.Table('your_ns', 'super_ns') + ], + where_clause = ast.Expr( 'AND', [ ast.Expr( @@ -104,39 +112,58 @@ def visitErrorNode(self, node): class PqlAstTests(TestCase): maxDiff = None - def test_select(self): - - statements = [] - - class V(ErrorAssertingPqlVisitor): - def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): - columns = [ - ast.Column( - PqlAntlrToAstParser.parse_column_value(column.value), - PqlAntlrToAstParser.parse_column_typecast(column.type_cast), - PqlAntlrToAstParser.parse_column_alias(column.alias) - ) - for column in ctx.selectClause().columns() - ] - where_clause = PqlAntlrToAstParser.parse_where_clause_expr(ctx.whereClause().expr()) - - statements.append(ast.SelectStmt( - columns, - where_clause - )) - - V().visit_from_string(pql) + def test_multiple_statements(self): + pql = """ + set fill_empty_dates = true; + select a, b; + """ + statements = from_pql(pql, ErrorAssertingPqlVisitor) + + assert len(statements) == 2 + set_stmt = statements[0] + select_stmt = statements[1] + + # TODO: make SET parse literal values properly into python native bool, int, str etc. + # Till then, this is mostly a placeholder for future functionality + assert set_stmt == ast.SetStmt('fill_empty_dates', 'true') + assert select_stmt == ast.SelectStmt( + columns=[ + ast.Column(ast.Taxon('a')), + ast.Column(ast.Taxon('b')) + ] + ) + def test_select(self): + statements = from_pql(pql_all_cases, ErrorAssertingPqlVisitor) assert statements stmt = statements[0] assert len(stmt.columns) == len(stmt_should_be.columns) for result, should_be in zip(stmt.columns, stmt_should_be.columns): assert result == should_be - # ast.ast_diff(stmt.where_clause, stmt_should_be.where_clause) assert stmt.where_clause == stmt_should_be.where_clause + def test_parse_from_statement(self): + pql_input = """\ + SELECT + a, + b + from + dataset_one, + dataset_two as two + WHERE + a > b + ; + """ + statements = from_pql(pql_input, ErrorAssertingPqlVisitor) + assert len(statements) == 1 + select_stmt: ast.SelectStmt = statements[0] + assert select_stmt.from_clause == [ + ast.Table('dataset_one'), + ast.Table('dataset_two', 'two') + ] + def test_render_pql_from_ast(self): pql_result = to_pql(stmt_should_be) assert pql_rendered_should_be == pql_result From 995f29a9319110098c0bb7e5cbd454e6f610290f Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 15 Nov 2020 12:51:08 -0800 Subject: [PATCH 17/32] fully unpack TEL expressions into AST (was kept as string) --- python/src/pql_grammar/ast/from_json.py | 13 +-- python/src/pql_grammar/ast/from_pql.py | 104 ++++++++++++----------- python/src/pql_grammar/ast/model.py | 44 +++------- python/src/pql_grammar/ast/to_pql.py | 17 ++-- python/src/pql_grammar/ast/tools.py | 25 ++++++ python/tests/ast_json_test.py | 107 +++++++++++++++++++++--- python/tests/ast_pql_test.py | 45 ++++++++-- 7 files changed, 241 insertions(+), 114 deletions(-) create mode 100644 python/src/pql_grammar/ast/tools.py diff --git a/python/src/pql_grammar/ast/from_json.py b/python/src/pql_grammar/ast/from_json.py index 27a7d57..49dcb6a 100644 --- a/python/src/pql_grammar/ast/from_json.py +++ b/python/src/pql_grammar/ast/from_json.py @@ -14,11 +14,14 @@ def from_json(o: dict): if not N: raise NotImplementedError(f'Renderer for node type "{name}" is not implemented.') - return N(**{ - k: from_json(v) - for k, v in o.items() - if k != TYPE_ATTRIBUTE - }) + try: + return N(**{ + k: from_json(v) + for k, v in o.items() + if k != TYPE_ATTRIBUTE + }) + except TypeError as ex: + raise TypeError(f"'{ex}' While processing {N} {o}") if isinstance(o, (list, tuple)): return [ diff --git a/python/src/pql_grammar/ast/from_pql.py b/python/src/pql_grammar/ast/from_pql.py index 169bbbb..d9d42d2 100644 --- a/python/src/pql_grammar/ast/from_pql.py +++ b/python/src/pql_grammar/ast/from_pql.py @@ -34,9 +34,15 @@ def unquote(s: str): # '"table name ""with quoted portion"""' becomes 'table name "with quoted portion"' if not s: return s - if s[0] == '"' and s[-1] == '"': + wrapper = (s[0], s[-1]) + if wrapper == ('"', '"') or wrapper == ("'", "'"): s = s[1:-1] - return s.replace('""', '"') + + # TODO: decide which one we want to support + # TEL style escapes + return s.replace('\\"', '"').replace("\\'", "'") + # # SQL style escapes + # return s.replace('""', '"').replace("''", "'") class PqlAntlrToAstParser: @@ -69,11 +75,11 @@ def parse_function_argument_pair(cls, e: PqlParser.ExprContext) -> Tuple[Optiona o = full_text(e.operator) if o == '=': arg_name = full_text(e.left) - arg_value = full_text(e.right) + arg_value = cls.parse_expr(e.right) else: arg_name = None - arg_value = full_text(e) - return (arg_name, arg_value) + arg_value = cls.parse_expr(e) + return [arg_name, arg_value] @classmethod def parse_function(cls, e: PqlParser.FunctionContext) -> ast.Function: @@ -85,29 +91,6 @@ def parse_function(cls, e: PqlParser.FunctionContext) -> ast.Function: ] if e.arguments else None ) - @classmethod - def parse_column_value(cls, v: PqlParser.expr) -> ast.ColumnValue: - # v is always PqlParser.expr, but anything can be inside - # It's not super relevant what's inside Expr, since - # we sent the original string-ified version of contenst to Husky anyway. - # There are some good reasons to parse the value for realz: - # - understanding if there is an outter `CAST( expr as TypeCast())` in there that needs re-syntaxing - # - deciding if specific value is taxon AND if it's in or is not in WHERE clause to channel it to pre/post agg - # However, we can do that crudely just on string representations of contents and avoid parsing them. - # Still, let's try to parse top level into one of: - # - Taxon - # - TelExpr where all other kinds of complex expressions are packed - # Specifically note that we allow Literal, Function other otherwise basic structures to be packed into Tel box. - - # So, if it's not Taxon object at top level, we unwrap redundant parens and pack string into TelExpr - v = cls.unwrap_expr_parens(v) - - t: Optional[PqlParser.TaxonContext] = v.taxon() - if t: - return cls.parse_taxon(t) - else: - return ast.TelExpr(full_text(v)) - @classmethod def parse_column_typecast(cls, v: PqlParser.FunctionContext) -> Optional[ast.Function]: if not v: @@ -123,17 +106,24 @@ def parse_column_alias(cls, v: PqlParser.TaxonContext) -> Optional[ast.Taxon]: @classmethod def parse_column(cls, e: PqlParser.ColumnsContext): return ast.Column( - cls.parse_column_value(e.value), + cls.parse_expr(e.value), cls.parse_column_typecast(e.type_cast), cls.parse_column_alias(e.alias) ) + @classmethod + def parse_literal(cls, e:PqlParser.LiteralValueContext): + return ast.Literal( + cls.parse_literal_value(e), + full_text(e) + ) + @staticmethod - def _literalValue_to_python_native(e:PqlParser.LiteralValueContext): - is_number = e.NUMERIC_LITERAL() - is_string = e.DOUBLE_QUOTED_STRING() or e.SINGLE_QUOTED_STRING() - is_null = e.K_NULL() - is_bool = e.K_TRUE() or e.K_FALSE() + def parse_literal_value(e:PqlParser.LiteralValueContext): + is_number = bool(e.NUMERIC_LITERAL()) + is_string = bool(e.DOUBLE_QUOTED_STRING()) or bool(e.SINGLE_QUOTED_STRING()) + is_null = bool(e.K_NULL()) + is_bool = bool(e.K_TRUE()) or bool(e.K_FALSE()) # TODO: # - BLOB_LITERAL @@ -146,7 +136,7 @@ def _literalValue_to_python_native(e:PqlParser.LiteralValueContext): return bool(e.K_TRUE()) try: - v = e.getText() + v = full_text(e) except IndexError: raise Exception(f"Could not extract literal value node from '{e.getText()}'.") @@ -176,22 +166,19 @@ def parse_from_clause_expr(cls, ctx: PqlParser.FromClauseContext) -> List[ast.Ta ] @classmethod - def parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : + def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : ctx = cls.unwrap_expr_parens(ctx) v = ctx.literalValue() if v: - return ast.Literal( - cls._literalValue_to_python_native(v), - full_text(v) - ) + return cls.parse_literal(v) v = ctx.unary_operator if v: operator = full_text(v).upper() return ast.Expr( operator, - [cls.parse_where_clause_expr(ctx.right)] + [cls.parse_expr(ctx.right)] ) v: Optional[str] = full_text(ctx.operator) @@ -202,8 +189,8 @@ def parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : return ast.Expr( v.upper(), [ - cls.parse_where_clause_expr(ctx.left), - cls.parse_where_clause_expr(ctx.right) + cls.parse_expr(ctx.left), + cls.parse_expr(ctx.right) ] ) @@ -220,7 +207,7 @@ def parse_where_clause_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : class PqlVisitor(_PqlParserVisitor): - def visit_from_string(self, pql: str): + def visit_from_pql_string(self, pql: str): inp_stream = InputStream(pql) lexer = PqlLexer(inp_stream) stream = CommonTokenStream(lexer) @@ -228,6 +215,14 @@ def visit_from_string(self, pql: str): tree = parser.parsePql() self.visit(tree) + def visit_from_tel_string(self, tel: str): + inp_stream = InputStream(tel) + lexer = PqlLexer(inp_stream) + stream = CommonTokenStream(lexer) + parser = PqlParser(stream) + tree = parser.parseTel() + self.visit(tree) + def from_pql(pql: str, cls:Type[PqlVisitor] = PqlVisitor) -> List[ast.Node]: @@ -238,7 +233,7 @@ class V(cls): def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): columns = [ ast.Column( - PqlAntlrToAstParser.parse_column_value(column.value), + PqlAntlrToAstParser.parse_expr(column.value), PqlAntlrToAstParser.parse_column_typecast(column.type_cast), PqlAntlrToAstParser.parse_column_alias(column.alias) ) @@ -253,7 +248,7 @@ def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): v = ctx.whereClause() if v: - where_clause = PqlAntlrToAstParser.parse_where_clause_expr(v.expr()) + where_clause = PqlAntlrToAstParser.parse_expr(v.expr()) else: where_clause = None @@ -274,6 +269,21 @@ def visitSetStmt(self, ctx:PqlParser.SetStmtContext): ) ) - V().visit_from_string(pql) + V().visit_from_pql_string(pql) return statements + + +def from_tel(tel: str, cls:Type[PqlVisitor] = PqlVisitor) -> ast.Node: + + statements = [] + + class V(cls): + def visitExpr(self, ctx:PqlParser.ExprContext): + statements.append( + PqlAntlrToAstParser.parse_expr(ctx) + ) + + V().visit_from_tel_string(tel) + + return statements[0] if statements else None diff --git a/python/src/pql_grammar/ast/model.py b/python/src/pql_grammar/ast/model.py index 36be16c..0dbc786 100644 --- a/python/src/pql_grammar/ast/model.py +++ b/python/src/pql_grammar/ast/model.py @@ -1,4 +1,4 @@ -from dataclasses import dataclass, fields +from dataclasses import dataclass from decimal import Decimal from typing import ( Any, @@ -25,10 +25,6 @@ class Expr(Node): # rarely there will be len more than 2. args: List[Any] -@dataclass -class TelExpr(Node): - raw_value: str - @dataclass class Literal(Node): value: Union[int,float,str,Decimal] @@ -44,11 +40,17 @@ class Taxon(Node): @dataclass class Function(Node): function_name: str - # support named args. - # each tuple is a pair of arg_name=arg_value in order of occurrence. - args: Optional[List[Tuple[Optional[str],str]]] = None - -ColumnValue = Union[TelExpr,Function,Taxon,Literal] + # Note, we supported named args. + # args is a list of lists + # Outer list is list of arg_name,arg_value pairs + # If first value in pair list is Null, no arg name was provided. + # fn(arg='value',arg2=2) + # [['arg','value'],['arg2',2]] + # fn('value',2) + # [[null,'value'],[null,2]] + args: Optional[List[List[Any]]] = None + +ColumnValue = Union[Expr,Function,Taxon,Literal] @dataclass class Column(Node): @@ -73,28 +75,6 @@ class SetStmt(Node): value: str -def ast_diff(a, b, path=None): - if not path: - path = [] - - if type(a) != type(b): - raise Exception(f"Types of {a} and {b} are not same {type(a)} != {type(b)} for path {path}") - - path += [type(a).__name__] - - if isinstance(a, Node): - for f in fields(a): - ast_diff(getattr(a, f.name), getattr(b, f.name), path + [f.name]) - elif isinstance(a, (list,tuple)): - if len(a) != len(b): - raise Exception(f"Lengths are different for {a} and {b} for path {path}") - for i, (x,y) in enumerate(zip(a, b)): - ast_diff(x,y, path + [i]) - else: - if a != b: - raise Exception(f"Values of {a} and {b} are not same for path {path}") - - inventory.update({ k : v for k, v in dict(locals()).items() diff --git a/python/src/pql_grammar/ast/to_pql.py b/python/src/pql_grammar/ast/to_pql.py index ae036a6..e34a449 100644 --- a/python/src/pql_grammar/ast/to_pql.py +++ b/python/src/pql_grammar/ast/to_pql.py @@ -35,12 +35,6 @@ def __str__(self): return f'({to_r(left)} {op} {to_r(right)})' -class TelExpr(Node): - n: ast.TelExpr - def __str__(self): - return self.n.raw_value - - class Literal(Node): n: ast.Literal def __str__(self): @@ -64,8 +58,8 @@ def __str__(self): fn = self.n.function_name # args are string pairs, not parsed deeper at all. args = ','.join([ - f'{n}={v}' if n else f'{v}' - for n,v in (self.n.args or []) + f'{n}={to_r(v)}' if n else f'{to_r(v)}' + for n, v in (self.n.args or []) ]) return f'{fn}({args})' @@ -102,10 +96,11 @@ def __str__(self): def to_r(n: ast.Node): - return renderer_map.get(type(n), Node)(n) - + if isinstance(n, ast.Node): + return renderer_map.get(type(n), Node)(n) + return str(n) -def to_pql(o: ast.Node): +def to_pql(o): return str(to_r(o)) diff --git a/python/src/pql_grammar/ast/tools.py b/python/src/pql_grammar/ast/tools.py new file mode 100644 index 0000000..82d79b6 --- /dev/null +++ b/python/src/pql_grammar/ast/tools.py @@ -0,0 +1,25 @@ +from dataclasses import fields +from typing import Callable, Iterator, Any +from . import model as ast + + +def ast_diff(a, b, path=None): + if not path: + path = [] + + if type(a) != type(b): + raise Exception(f"Types of {a} and {b} are not same {type(a)} != {type(b)} for path {path}") + + path += [type(a).__name__] + + if isinstance(a, ast.Node): + for f in fields(a): + ast_diff(getattr(a, f.name), getattr(b, f.name), path + [f.name]) + elif isinstance(a, (list,tuple)): + if len(a) != len(b): + raise Exception(f"Lengths are different for {a} and {b} for path {path}") + for i, (x,y) in enumerate(zip(a, b)): + ast_diff(x,y, path + [i]) + else: + if a != b: + raise Exception(f"Values of {a} and {b} are not same for path {path}") diff --git a/python/tests/ast_json_test.py b/python/tests/ast_json_test.py index 008f90b..3e68377 100644 --- a/python/tests/ast_json_test.py +++ b/python/tests/ast_json_test.py @@ -4,6 +4,7 @@ sys.path.append('./src') from pql_grammar.ast import model as ast +from pql_grammar.ast.tools import ast_diff from pql_grammar.ast.to_json import to_json from pql_grammar.ast.from_json import from_json @@ -19,19 +20,48 @@ ast.Column(ast.Taxon('taxon2', 'ns2', False)), ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), ast.Column( - ast.TelExpr('?ns3|taxon3 + (slug2 - 1234)'), + ast.Expr( + '+', + [ + ast.Taxon( + 'taxon3', + 'ns3', + True + ), + ast.Expr( + '-', + [ + ast.Taxon('slug'), + ast.Literal(1234, '1234') + ] + ) + ] + ), None, ast.Taxon('custom_data', 'myns') ), ast.Column( - ast.TelExpr('ns3|taxon3 + 5'), + ast.Expr( + '+', + [ + ast.Taxon( + 'taxon3', + 'ns3', + ), + ast.Literal(5, '5'), + ] + ), ast.Function( 'TypeCast' ), ast.Taxon('custom_data_cast', 'myns') ), ast.Column( - ast.TelExpr('fn_4(fn_1(slug))'), + ast.Function('fn_4', [ + [None, ast.Function('fn_1', [ + [None, ast.Taxon('slug')] + ])] + ]), ast.Function( 'TypeCast', [['arg1','value1']] # normally inner pair is a tuple, but for comparison making list. @@ -104,8 +134,32 @@ { "__typename": "Column", "value": { - "__typename": "TelExpr", - "raw_value": "?ns3|taxon3 + (slug2 - 1234)" + "__typename": "Expr", + "operator": "+", + "args": [ + { + "__typename": "Taxon", + "slug": "taxon3", + "namespace": "ns3", + "is_optional": true + }, + { + "__typename": "Expr", + "operator": "-", + "args": [ + { + "__typename": "Taxon", + "slug": "slug", + "is_optional": false + }, + { + "__typename": "Literal", + "value": 1234, + "raw_value": "1234" + } + ] + } + ] }, "alias": { "__typename": "Taxon", @@ -117,8 +171,21 @@ { "__typename": "Column", "value": { - "__typename": "TelExpr", - "raw_value": "ns3|taxon3 + 5" + "__typename": "Expr", + "operator": "+", + "args": [ + { + "__typename": "Taxon", + "slug": "taxon3", + "namespace": "ns3", + "is_optional": false + }, + { + "__typename": "Literal", + "value": 5, + "raw_value": "5" + } + ] }, "type_cast": { "__typename": "Function", @@ -134,8 +201,27 @@ { "__typename": "Column", "value": { - "__typename": "TelExpr", - "raw_value": "fn_4(fn_1(slug))" + "__typename": "Function", + "function_name": "fn_4", + "args": [ + [ + null, + { + "__typename": "Function", + "function_name": "fn_1", + "args": [ + [ + null, + { + "__typename": "Taxon", + "slug": "slug", + "is_optional": false + } + ] + ] + } + ] + ] }, "type_cast": { "__typename": "Function", @@ -214,6 +300,5 @@ def test_render_json_from_ast(self): def test_render_ast_from_json(self): ast_result = from_json(json_should_be) # import json; print(json.dumps(json_result, indent=4)) - - ast.ast_diff(ast_should_be, ast_result) + ast_diff(ast_should_be, ast_result) assert ast_should_be == ast_result diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index b0e7629..42f8f8e 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -26,7 +26,7 @@ def visitErrorNode(self, node): slug1 as myns|slug1, ?ns3|taxon3 + (slug2 - 1234) as myns|custom_data, (ns3|taxon3 + 5)::TypeCast() as myns|custom_data_cast, - fn_4(fn_1(slug))::TypeCast(arg1=value1) + fn_4(fn_1(slug))::TypeCast(arg1='value1') from my_ns, your_ns as super_ns where ns6|taxon6 > 1234 @@ -41,9 +41,9 @@ def visitErrorNode(self, node): ?ns1|taxon1, ns2|taxon2, slug1 AS myns|slug1, - ?ns3|taxon3 + (slug2 - 1234) AS myns|custom_data, + (?ns3|taxon3 + (slug2 - 1234)) AS myns|custom_data, (ns3|taxon3 + 5)::TypeCast() AS myns|custom_data_cast, - (fn_4(fn_1(slug)))::TypeCast(arg1=value1) + (fn_4(fn_1(slug)))::TypeCast(arg1='value1') FROM my_ns, your_ns AS super_ns @@ -58,22 +58,51 @@ def visitErrorNode(self, node): ast.Column(ast.Taxon('taxon2', 'ns2', False)), ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), ast.Column( - ast.TelExpr('?ns3|taxon3 + (slug2 - 1234)'), + ast.Expr( + '+', + [ + ast.Taxon( + 'taxon3', + 'ns3', + True + ), + ast.Expr( + '-', + [ + ast.Taxon('slug2'), + ast.Literal(1234, '1234') + ] + ) + ] + ), None, ast.Taxon('custom_data', 'myns') ), ast.Column( - ast.TelExpr('ns3|taxon3 + 5'), + ast.Expr( + '+', + [ + ast.Taxon( + 'taxon3', + 'ns3', + ), + ast.Literal(5, '5'), + ] + ), ast.Function( 'TypeCast' ), ast.Taxon('custom_data_cast', 'myns') ), ast.Column( - ast.TelExpr('fn_4(fn_1(slug))'), + ast.Function('fn_4', [ + [None, ast.Function('fn_1', [ + [None, ast.Taxon('slug')] + ])] + ]), ast.Function( 'TypeCast', - [('arg1','value1')] + [['arg1',ast.Literal('value1', "'value1'")]] ), ) ], @@ -166,4 +195,4 @@ def test_parse_from_statement(self): def test_render_pql_from_ast(self): pql_result = to_pql(stmt_should_be) - assert pql_rendered_should_be == pql_result + assert pql_result == pql_rendered_should_be From e3bcd07c932b34853131f62c931780d68dcff330 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 15 Nov 2020 13:02:42 -0800 Subject: [PATCH 18/32] make sure that parser speaks ARRAYS not single statements - to reflect reality of having to accept lists of statements --- python/src/pql_grammar/ast/to_pql.py | 11 +++++++++-- python/tests/ast_pql_test.py | 2 +- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/python/src/pql_grammar/ast/to_pql.py b/python/src/pql_grammar/ast/to_pql.py index e34a449..bfe8d4f 100644 --- a/python/src/pql_grammar/ast/to_pql.py +++ b/python/src/pql_grammar/ast/to_pql.py @@ -1,3 +1,4 @@ +from typing import List from . import model as ast @@ -100,8 +101,14 @@ def to_r(n: ast.Node): return renderer_map.get(type(n), Node)(n) return str(n) -def to_pql(o): - return str(to_r(o)) + +def to_pql(o: List[ast.Node]): + if not isinstance(o, (list, tuple)): + raise AttributeError(f"Argument must be a list of statements.") + return '\n'.join([ + str(to_r(e)) + for e in o + ]) renderer_map.update({ diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index 42f8f8e..bca2ef1 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -194,5 +194,5 @@ def test_parse_from_statement(self): ] def test_render_pql_from_ast(self): - pql_result = to_pql(stmt_should_be) + pql_result = to_pql([stmt_should_be]) assert pql_result == pql_rendered_should_be From bc03863995a234b0ee7e45c292dbe028924e9734 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 15 Nov 2020 13:10:05 -0800 Subject: [PATCH 19/32] add ast.tools.find_all and tests --- python/src/pql_grammar/ast/tools.py | 22 ++++++ python/tests/ast_tools_test.py | 113 ++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+) create mode 100644 python/tests/ast_tools_test.py diff --git a/python/src/pql_grammar/ast/tools.py b/python/src/pql_grammar/ast/tools.py index 82d79b6..e304ea7 100644 --- a/python/src/pql_grammar/ast/tools.py +++ b/python/src/pql_grammar/ast/tools.py @@ -23,3 +23,25 @@ def ast_diff(a, b, path=None): else: if a != b: raise Exception(f"Values of {a} and {b} are not same for path {path}") + + +def attr_names(n: ast.Node): + return [ + f.name + for f in fields(n) + ] + + +def find_all(o, rule: Callable) -> Iterator[Any]: + if rule(o): + yield o + + if isinstance(o, ast.Node): + for f in attr_names(o): + yield from find_all(getattr(o, f), rule) + elif isinstance(o, (list,tuple)): + for e in o: + yield from find_all(e, rule) + elif isinstance(o, dict): + for e in o.values(): + yield from find_all(e, rule) diff --git a/python/tests/ast_tools_test.py b/python/tests/ast_tools_test.py new file mode 100644 index 0000000..286d37f --- /dev/null +++ b/python/tests/ast_tools_test.py @@ -0,0 +1,113 @@ +import sys +from unittest import TestCase + +sys.path.append('./src') + +from pql_grammar.ast import model as ast +from pql_grammar.ast.tools import find_all + + +sample_tree = ast.SelectStmt( + columns = [ + ast.Column(ast.Taxon('taxon1', 'ns1', True)), + ast.Column(ast.Taxon('taxon2', 'ns2', False)), + ast.Column(ast.Literal(5555, '5555')), + ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), + ast.Column( + ast.Expr( + '+', + [ + ast.Taxon( + 'taxon3', + 'ns3', + True + ), + ast.Expr( + '-', + [ + ast.Taxon('slug'), + ast.Literal(12345, '12345') + ] + ) + ] + ), + None, + ast.Taxon('custom_data', 'myns') + ), + ast.Column( + ast.Expr( + '+', + [ + ast.Taxon( + 'taxon3', + 'ns3', + ), + ast.Literal(5, '5'), + ] + ), + ast.Function( + 'TypeCast' + ), + ast.Taxon('custom_data_cast', 'myns') + ), + ast.Column( + ast.Function('fn_4', [ + [None, ast.Function('fn_1', [ + [None, ast.Taxon('slug')] + ])] + ]), + ast.Function( + 'TypeCast', + [['arg1','value1']] # normally inner pair is a tuple, but for comparison making list. + ), + ) + ], + from_clause = [ + ast.Table('my_ns'), + ast.Table('your_ns', 'super_ns') + ], + where_clause = ast.Expr( + 'AND', + [ + ast.Expr( + '>', + [ + ast.Taxon('taxon6', 'ns6'), + ast.Literal(1234, '1234') + ] + ), + ast.Expr( + '==', + [ + ast.Expr( + '+', + [ + ast.Taxon('taxon10', 'ns0'), + ast.Literal(4321, '4321') + ] + ), + ast.Literal(0, '0') + ] + ) + ] + ) +) + + +class AstToolsTests(TestCase): + maxDiff = None + + def test_find_all(self): + vv = list(find_all( + sample_tree, + lambda o: isinstance(o, ast.Literal) + )) + + assert vv == [ + ast.Literal(value=5555, raw_value='5555'), + ast.Literal(value=12345, raw_value='12345'), + ast.Literal(value=5, raw_value='5'), + ast.Literal(value=1234, raw_value='1234'), + ast.Literal(value=4321, raw_value='4321'), + ast.Literal(value=0, raw_value='0'), + ] From 26cf2f895bc89c2dabff001b42edbbdfbaf36303 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sun, 15 Nov 2020 22:24:21 -0800 Subject: [PATCH 20/32] allow Node instances to be hashable --- python/src/pql_grammar/ast/from_json.py | 4 +- python/src/pql_grammar/ast/from_pql.py | 26 +++---- python/src/pql_grammar/ast/model.py | 26 ++++--- python/tests/ast_json_test.py | 65 ++++++++++------- python/tests/ast_pql_test.py | 97 +++++++++++++++---------- python/tests/ast_tools_test.py | 74 +++++++++---------- 6 files changed, 161 insertions(+), 131 deletions(-) diff --git a/python/src/pql_grammar/ast/from_json.py b/python/src/pql_grammar/ast/from_json.py index 49dcb6a..9cc03ea 100644 --- a/python/src/pql_grammar/ast/from_json.py +++ b/python/src/pql_grammar/ast/from_json.py @@ -24,10 +24,10 @@ def from_json(o: dict): raise TypeError(f"'{ex}' While processing {N} {o}") if isinstance(o, (list, tuple)): - return [ + return tuple([ from_json(v) for v in o - ] + ]) if isinstance(o, dict): return { diff --git a/python/src/pql_grammar/ast/from_pql.py b/python/src/pql_grammar/ast/from_pql.py index d9d42d2..ceca049 100644 --- a/python/src/pql_grammar/ast/from_pql.py +++ b/python/src/pql_grammar/ast/from_pql.py @@ -1,6 +1,6 @@ from antlr4 import CommonTokenStream, InputStream, ParserRuleContext from antlr4 import ParserRuleContext -from typing import Optional, Tuple, List, Type +from typing import Optional, Tuple, List, Type, Any from ..antlr.PqlLexer import PqlLexer from ..antlr.PqlParser import PqlParser @@ -70,7 +70,7 @@ def parse_taxon(cls, e: PqlParser.TaxonContext) -> ast.Taxon: ) @classmethod - def parse_function_argument_pair(cls, e: PqlParser.ExprContext) -> Tuple[Optional[str],str]: + def parse_function_argument_pair(cls, e: PqlParser.ExprContext) -> Tuple[Optional[str],Any]: e = cls.unwrap_expr_parens(e) o = full_text(e.operator) if o == '=': @@ -79,16 +79,16 @@ def parse_function_argument_pair(cls, e: PqlParser.ExprContext) -> Tuple[Optiona else: arg_name = None arg_value = cls.parse_expr(e) - return [arg_name, arg_value] + return arg_name, arg_value @classmethod def parse_function(cls, e: PqlParser.FunctionContext) -> ast.Function: return ast.Function( full_text(e.function_name), - [ + tuple([ cls.parse_function_argument_pair(expr) for expr in e.arguments.expr() - ] if e.arguments else None + ]) if e.arguments else None ) @classmethod @@ -156,14 +156,14 @@ def parse_literal_value(e:PqlParser.LiteralValueContext): return v @classmethod - def parse_from_clause_expr(cls, ctx: PqlParser.FromClauseContext) -> List[ast.Table]: - return [ + def parse_from_clause_expr(cls, ctx: PqlParser.FromClauseContext) -> Tuple[ast.Table, ...]: + return tuple([ ast.Table( full_text(table.table_name), full_text(table.table_alias) ) for table in ctx.tables() - ] + ]) @classmethod def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : @@ -178,7 +178,7 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : operator = full_text(v).upper() return ast.Expr( operator, - [cls.parse_expr(ctx.right)] + (cls.parse_expr(ctx.right),) ) v: Optional[str] = full_text(ctx.operator) @@ -188,10 +188,10 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : # with a lot of options for OP value. return ast.Expr( v.upper(), - [ + ( cls.parse_expr(ctx.left), cls.parse_expr(ctx.right) - ] + ) ) v: PqlParser.TaxonContext = ctx.taxon() @@ -231,14 +231,14 @@ def from_pql(pql: str, cls:Type[PqlVisitor] = PqlVisitor) -> List[ast.Node]: class V(cls): def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): - columns = [ + columns = tuple([ ast.Column( PqlAntlrToAstParser.parse_expr(column.value), PqlAntlrToAstParser.parse_column_typecast(column.type_cast), PqlAntlrToAstParser.parse_column_alias(column.alias) ) for column in ctx.selectClause().columns() - ] + ]) v = ctx.fromClause() if v: diff --git a/python/src/pql_grammar/ast/model.py b/python/src/pql_grammar/ast/model.py index 0dbc786..91f3702 100644 --- a/python/src/pql_grammar/ast/model.py +++ b/python/src/pql_grammar/ast/model.py @@ -16,28 +16,30 @@ class Node: pass -@dataclass +@dataclass(eq=True, frozen=True) class Expr(Node): """ arithmetic operation like 'a > b' in Pre-fix notation""" operator: str # some operations are unary. there will be only one arg # most others are left-right, so len would be 2. # rarely there will be len more than 2. - args: List[Any] + args: Tuple -@dataclass +@dataclass(eq=True, frozen=True) class Literal(Node): value: Union[int,float,str,Decimal] raw_value: str -@dataclass +@dataclass(eq=True, frozen=True) class Taxon(Node): slug: str namespace: Optional[str] = None is_optional: Optional[bool] = False tag: Optional[str] = None -@dataclass +CallArgs = Tuple[Optional[str],Any] + +@dataclass(eq=True, frozen=True) class Function(Node): function_name: str # Note, we supported named args. @@ -48,28 +50,28 @@ class Function(Node): # [['arg','value'],['arg2',2]] # fn('value',2) # [[null,'value'],[null,2]] - args: Optional[List[List[Any]]] = None + args: Optional[Tuple[CallArgs, ...]] = None ColumnValue = Union[Expr,Function,Taxon,Literal] -@dataclass +@dataclass(eq=True, frozen=True) class Column(Node): value: ColumnValue type_cast: Optional[Function] = None alias: Optional[Taxon] = None -@dataclass +@dataclass(eq=True, frozen=True) class Table(Node): value: str alias: Optional[str] = None -@dataclass +@dataclass(eq=True, frozen=True) class SelectStmt(Node): - columns: List[Column] - from_clause: Optional[List[Table]] = None + columns: Tuple[Column, ...] + from_clause: Optional[Tuple[Table, ...]] = None where_clause: Optional[Expr] = None -@dataclass +@dataclass(eq=True, frozen=True) class SetStmt(Node): key: str value: str diff --git a/python/tests/ast_json_test.py b/python/tests/ast_json_test.py index 3e68377..ff58fa5 100644 --- a/python/tests/ast_json_test.py +++ b/python/tests/ast_json_test.py @@ -7,6 +7,7 @@ from pql_grammar.ast.tools import ast_diff from pql_grammar.ast.to_json import to_json from pql_grammar.ast.from_json import from_json +from pql_grammar.ast.tools import find_all null = None @@ -15,14 +16,14 @@ ast_should_be = ast.SelectStmt( - columns = [ + columns = ( ast.Column(ast.Taxon('taxon1', 'ns1', True)), ast.Column(ast.Taxon('taxon2', 'ns2', False)), ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), ast.Column( ast.Expr( '+', - [ + ( ast.Taxon( 'taxon3', 'ns3', @@ -30,12 +31,12 @@ ), ast.Expr( '-', - [ + ( ast.Taxon('slug'), ast.Literal(1234, '1234') - ] - ) - ] + ), + ), + ), ), None, ast.Taxon('custom_data', 'myns') @@ -43,13 +44,13 @@ ast.Column( ast.Expr( '+', - [ + ( ast.Taxon( 'taxon3', 'ns3', ), ast.Literal(5, '5'), - ] + ), ), ast.Function( 'TypeCast' @@ -57,42 +58,42 @@ ast.Taxon('custom_data_cast', 'myns') ), ast.Column( - ast.Function('fn_4', [ - [None, ast.Function('fn_1', [ - [None, ast.Taxon('slug')] - ])] - ]), + ast.Function('fn_4', ( + (None, ast.Function('fn_1', ( + (None, ast.Taxon('slug')), + ),),), + ),), ast.Function( 'TypeCast', - [['arg1','value1']] # normally inner pair is a tuple, but for comparison making list. + (('arg1','value1'),), # normally inner pair is a tuple, but for comparison making list. ), ) - ], + ), where_clause = ast.Expr( 'AND', - [ + ( ast.Expr( '>', - [ + ( ast.Taxon('taxon6', 'ns6'), - ast.Literal(1234, '1234') - ] + ast.Literal(1234, '1234'), + ), ), ast.Expr( '==', - [ + ( ast.Expr( '+', - [ + ( ast.Taxon('taxon10', 'ns0'), - ast.Literal(4321, '4321') - ] + ast.Literal(4321, '4321'), + ), ), - ast.Literal(0, '0') - ] - ) - ] - ) + ast.Literal(0, '0'), + ), + ), + ), + ), ) @@ -302,3 +303,11 @@ def test_render_ast_from_json(self): # import json; print(json.dumps(json_result, indent=4)) ast_diff(ast_should_be, ast_result) assert ast_should_be == ast_result + + # ensure produced nodes are hashable + set( + find_all( + ast_result, + lambda o: isinstance(o, ast.Node) + ) + ) diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index bca2ef1..299c81c 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -6,6 +6,7 @@ from pql_grammar.ast import model as ast from pql_grammar.ast.to_pql import to_pql from pql_grammar.ast.from_pql import PqlVisitor, PqlParser, from_pql +from pql_grammar.ast.tools import find_all class ErrorAssertingPqlVisitor(PqlVisitor): @@ -53,14 +54,14 @@ def visitErrorNode(self, node): """ stmt_should_be = ast.SelectStmt( - columns = [ + columns = ( ast.Column(ast.Taxon('taxon1', 'ns1', True)), ast.Column(ast.Taxon('taxon2', 'ns2', False)), ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), ast.Column( ast.Expr( '+', - [ + ( ast.Taxon( 'taxon3', 'ns3', @@ -68,26 +69,26 @@ def visitErrorNode(self, node): ), ast.Expr( '-', - [ + ( ast.Taxon('slug2'), - ast.Literal(1234, '1234') - ] - ) - ] + ast.Literal(1234, '1234'), + ), + ), + ), ), None, - ast.Taxon('custom_data', 'myns') + ast.Taxon('custom_data', 'myns'), ), ast.Column( ast.Expr( '+', - [ + ( ast.Taxon( 'taxon3', 'ns3', ), ast.Literal(5, '5'), - ] + ), ), ast.Function( 'TypeCast' @@ -95,46 +96,48 @@ def visitErrorNode(self, node): ast.Taxon('custom_data_cast', 'myns') ), ast.Column( - ast.Function('fn_4', [ - [None, ast.Function('fn_1', [ - [None, ast.Taxon('slug')] - ])] - ]), + ast.Function('fn_4', ( + (None, ast.Function('fn_1', ( + (None, ast.Taxon('slug')), + ),),), + ),), ast.Function( 'TypeCast', - [['arg1',ast.Literal('value1', "'value1'")]] + ( + ('arg1',ast.Literal('value1', "'value1'")), + ), ), - ) - ], - from_clause = [ + ), + ), + from_clause = ( ast.Table('my_ns'), ast.Table('your_ns', 'super_ns') - ], + ), where_clause = ast.Expr( 'AND', - [ + ( ast.Expr( '>', - [ + ( ast.Taxon('taxon6', 'ns6'), - ast.Literal(1234, '1234') - ] + ast.Literal(1234, '1234'), + ), ), ast.Expr( '==', - [ + ( ast.Expr( '+', - [ + ( ast.Taxon('taxon10', 'ns0'), - ast.Literal(4321, '4321') - ] + ast.Literal(4321, '4321'), + ), ), - ast.Literal(0, '0') - ] - ) - ] - ) + ast.Literal(0, '0'), + ), + ), + ), + ), ) @@ -156,10 +159,10 @@ def test_multiple_statements(self): # Till then, this is mostly a placeholder for future functionality assert set_stmt == ast.SetStmt('fill_empty_dates', 'true') assert select_stmt == ast.SelectStmt( - columns=[ + columns=( ast.Column(ast.Taxon('a')), - ast.Column(ast.Taxon('b')) - ] + ast.Column(ast.Taxon('b')), + ) ) def test_select(self): @@ -173,6 +176,14 @@ def test_select(self): # ast.ast_diff(stmt.where_clause, stmt_should_be.where_clause) assert stmt.where_clause == stmt_should_be.where_clause + # ensure produced nodes are hashable + set( + find_all( + stmt, + lambda o: isinstance(o, ast.Node) + ) + ) + def test_parse_from_statement(self): pql_input = """\ SELECT @@ -188,10 +199,18 @@ def test_parse_from_statement(self): statements = from_pql(pql_input, ErrorAssertingPqlVisitor) assert len(statements) == 1 select_stmt: ast.SelectStmt = statements[0] - assert select_stmt.from_clause == [ + assert select_stmt.from_clause == ( ast.Table('dataset_one'), - ast.Table('dataset_two', 'two') - ] + ast.Table('dataset_two', 'two'), + ) + + # ensure produced nodes are hashable + set( + find_all( + statements, + lambda o: isinstance(o, ast.Node) + ) + ) def test_render_pql_from_ast(self): pql_result = to_pql([stmt_should_be]) diff --git a/python/tests/ast_tools_test.py b/python/tests/ast_tools_test.py index 286d37f..7640af5 100644 --- a/python/tests/ast_tools_test.py +++ b/python/tests/ast_tools_test.py @@ -8,7 +8,7 @@ sample_tree = ast.SelectStmt( - columns = [ + columns = ( ast.Column(ast.Taxon('taxon1', 'ns1', True)), ast.Column(ast.Taxon('taxon2', 'ns2', False)), ast.Column(ast.Literal(5555, '5555')), @@ -16,81 +16,81 @@ ast.Column( ast.Expr( '+', - [ + ( ast.Taxon( 'taxon3', 'ns3', - True + True, ), ast.Expr( '-', - [ + ( ast.Taxon('slug'), - ast.Literal(12345, '12345') - ] - ) - ] + ast.Literal(12345, '12345'), + ), + ), + ), ), None, - ast.Taxon('custom_data', 'myns') + ast.Taxon('custom_data', 'myns'), ), ast.Column( ast.Expr( '+', - [ + ( ast.Taxon( 'taxon3', 'ns3', ), ast.Literal(5, '5'), - ] + ), ), ast.Function( - 'TypeCast' + 'TypeCast', ), - ast.Taxon('custom_data_cast', 'myns') + ast.Taxon('custom_data_cast', 'myns'), ), ast.Column( - ast.Function('fn_4', [ - [None, ast.Function('fn_1', [ - [None, ast.Taxon('slug')] - ])] - ]), + ast.Function('fn_4', ( + (None, ast.Function('fn_1', ( + (None, ast.Taxon('slug'),), + ),),), + ),), ast.Function( 'TypeCast', - [['arg1','value1']] # normally inner pair is a tuple, but for comparison making list. + (('arg1','value1'),), # normally inner pair is a tuple, but for comparison making list. ), - ) - ], - from_clause = [ + ), + ), + from_clause = ( ast.Table('my_ns'), - ast.Table('your_ns', 'super_ns') - ], + ast.Table('your_ns', 'super_ns'), + ), where_clause = ast.Expr( 'AND', - [ + ( ast.Expr( '>', - [ + ( ast.Taxon('taxon6', 'ns6'), - ast.Literal(1234, '1234') - ] + ast.Literal(1234, '1234'), + ), ), ast.Expr( '==', - [ + ( ast.Expr( '+', - [ + ( ast.Taxon('taxon10', 'ns0'), - ast.Literal(4321, '4321') - ] + ast.Literal(4321, '4321'), + ), ), - ast.Literal(0, '0') - ] - ) - ] - ) + ast.Literal(0, '0'), + ), + ), + ), + ), ) From dba29df3d0c0785017943fe68a37622bf04bb1d2 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Wed, 2 Dec 2020 20:08:35 -0800 Subject: [PATCH 21/32] add test hashable --- python/tests/ast_tools_test.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/python/tests/ast_tools_test.py b/python/tests/ast_tools_test.py index 7640af5..65cd87b 100644 --- a/python/tests/ast_tools_test.py +++ b/python/tests/ast_tools_test.py @@ -111,3 +111,17 @@ def test_find_all(self): ast.Literal(value=4321, raw_value='4321'), ast.Literal(value=0, raw_value='0'), ] + + def test_nodes_hashable(self): + + a = { + ast.Taxon('slug1'), + ast.Taxon('slug2'), + } + + b = { + ast.Taxon('slug2'), + ast.Taxon('slug1'), + } + + assert a == b From 04df449d769a3a5397e05550e74e8c88d871b22b Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Wed, 2 Dec 2020 20:40:08 -0800 Subject: [PATCH 22/32] don't let robots format. This code is for humans --- python/src/pql_grammar/ast/from_json.py | 2 ++ python/src/pql_grammar/ast/from_pql.py | 2 ++ python/src/pql_grammar/ast/model.py | 15 +++++++++++++++ python/src/pql_grammar/ast/to_json.py | 2 ++ python/src/pql_grammar/ast/to_pql.py | 2 ++ python/src/pql_grammar/ast/tools.py | 2 ++ python/tests/ast_json_test.py | 2 ++ python/tests/ast_pql_test.py | 2 ++ python/tests/ast_tools_test.py | 2 ++ 9 files changed, 31 insertions(+) diff --git a/python/src/pql_grammar/ast/from_json.py b/python/src/pql_grammar/ast/from_json.py index 9cc03ea..2bc1527 100644 --- a/python/src/pql_grammar/ast/from_json.py +++ b/python/src/pql_grammar/ast/from_json.py @@ -1,3 +1,5 @@ +# fmt: off + from dataclasses import fields from typing import List, Tuple, Any from . import model as ast diff --git a/python/src/pql_grammar/ast/from_pql.py b/python/src/pql_grammar/ast/from_pql.py index ceca049..d131121 100644 --- a/python/src/pql_grammar/ast/from_pql.py +++ b/python/src/pql_grammar/ast/from_pql.py @@ -1,3 +1,5 @@ +# fmt: off + from antlr4 import CommonTokenStream, InputStream, ParserRuleContext from antlr4 import ParserRuleContext from typing import Optional, Tuple, List, Type, Any diff --git a/python/src/pql_grammar/ast/model.py b/python/src/pql_grammar/ast/model.py index 91f3702..fe0c4e0 100644 --- a/python/src/pql_grammar/ast/model.py +++ b/python/src/pql_grammar/ast/model.py @@ -1,3 +1,10 @@ +""" +- Intentionally minimalistic set of nodes to express AST of SQL-like statements and expressions +- Intentionally "frozen" (immutable) to prevent attempts to modify in-place and to allow set-like and eq operations + +""" +# fmt: off + from dataclasses import dataclass from decimal import Decimal from typing import ( @@ -16,6 +23,7 @@ class Node: pass + @dataclass(eq=True, frozen=True) class Expr(Node): """ arithmetic operation like 'a > b' in Pre-fix notation""" @@ -25,11 +33,13 @@ class Expr(Node): # rarely there will be len more than 2. args: Tuple + @dataclass(eq=True, frozen=True) class Literal(Node): value: Union[int,float,str,Decimal] raw_value: str + @dataclass(eq=True, frozen=True) class Taxon(Node): slug: str @@ -39,6 +49,7 @@ class Taxon(Node): CallArgs = Tuple[Optional[str],Any] + @dataclass(eq=True, frozen=True) class Function(Node): function_name: str @@ -54,23 +65,27 @@ class Function(Node): ColumnValue = Union[Expr,Function,Taxon,Literal] + @dataclass(eq=True, frozen=True) class Column(Node): value: ColumnValue type_cast: Optional[Function] = None alias: Optional[Taxon] = None + @dataclass(eq=True, frozen=True) class Table(Node): value: str alias: Optional[str] = None + @dataclass(eq=True, frozen=True) class SelectStmt(Node): columns: Tuple[Column, ...] from_clause: Optional[Tuple[Table, ...]] = None where_clause: Optional[Expr] = None + @dataclass(eq=True, frozen=True) class SetStmt(Node): key: str diff --git a/python/src/pql_grammar/ast/to_json.py b/python/src/pql_grammar/ast/to_json.py index 9fa6814..ed13b50 100644 --- a/python/src/pql_grammar/ast/to_json.py +++ b/python/src/pql_grammar/ast/to_json.py @@ -1,3 +1,5 @@ +# fmt: off + from dataclasses import fields from . import model as ast diff --git a/python/src/pql_grammar/ast/to_pql.py b/python/src/pql_grammar/ast/to_pql.py index bfe8d4f..54996bc 100644 --- a/python/src/pql_grammar/ast/to_pql.py +++ b/python/src/pql_grammar/ast/to_pql.py @@ -1,3 +1,5 @@ +# fmt: off + from typing import List from . import model as ast diff --git a/python/src/pql_grammar/ast/tools.py b/python/src/pql_grammar/ast/tools.py index e304ea7..fc262c6 100644 --- a/python/src/pql_grammar/ast/tools.py +++ b/python/src/pql_grammar/ast/tools.py @@ -1,3 +1,5 @@ +# fmt: off + from dataclasses import fields from typing import Callable, Iterator, Any from . import model as ast diff --git a/python/tests/ast_json_test.py b/python/tests/ast_json_test.py index ff58fa5..98a001d 100644 --- a/python/tests/ast_json_test.py +++ b/python/tests/ast_json_test.py @@ -1,3 +1,5 @@ +# fmt: off + import sys from unittest import TestCase diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index 299c81c..913f6cb 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -1,3 +1,5 @@ +# fmt: off + import sys from unittest import TestCase diff --git a/python/tests/ast_tools_test.py b/python/tests/ast_tools_test.py index 65cd87b..a697ce2 100644 --- a/python/tests/ast_tools_test.py +++ b/python/tests/ast_tools_test.py @@ -1,3 +1,5 @@ +# fmt: off + import sys from unittest import TestCase From 53ead6468fe2a6ef072a31600dd16edd261b366a Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Fri, 4 Dec 2020 18:09:48 -0800 Subject: [PATCH 23/32] reduce scope to TEL only --- grammar/PqlLexer.g4 | 10 - grammar/PqlParser.g4 | 74 +- python/src/pql_grammar/antlr/PqlLexer.py | 460 +++--- python/src/pql_grammar/antlr/PqlParser.py | 1316 ++--------------- .../pql_grammar/antlr/PqlParserListener.py | 117 -- .../src/pql_grammar/antlr/PqlParserVisitor.py | 65 - python/src/pql_grammar/ast/__init__.py | 0 python/src/pql_grammar/{ast => }/from_json.py | 2 - python/src/pql_grammar/{ast => }/from_pql.py | 127 +- python/src/pql_grammar/{ast => }/model.py | 29 - python/src/pql_grammar/{ast => }/to_json.py | 0 python/src/pql_grammar/{ast => }/to_pql.py | 40 +- python/src/pql_grammar/{ast => }/tools.py | 0 python/tests/ast_json_test.py | 310 ++-- python/tests/ast_pql_test.py | 240 +-- python/tests/ast_tools_test.py | 106 +- .../grammar_test.py => tel_grammar_test.py} | 62 +- 17 files changed, 610 insertions(+), 2348 deletions(-) delete mode 100644 python/src/pql_grammar/ast/__init__.py rename python/src/pql_grammar/{ast => }/from_json.py (92%) rename python/src/pql_grammar/{ast => }/from_pql.py (64%) rename python/src/pql_grammar/{ast => }/model.py (73%) rename python/src/pql_grammar/{ast => }/to_json.py (100%) rename python/src/pql_grammar/{ast => }/to_pql.py (60%) rename python/src/pql_grammar/{ast => }/tools.py (100%) rename python/tests/{tel/grammar_test.py => tel_grammar_test.py} (58%) diff --git a/grammar/PqlLexer.g4 b/grammar/PqlLexer.g4 index 9c381f3..1da2e99 100644 --- a/grammar/PqlLexer.g4 +++ b/grammar/PqlLexer.g4 @@ -34,25 +34,15 @@ UNDER: '_'; // SQL keywords we adapt: K_AND : A N D; -K_AS : A S; -K_ASC : A S C; -K_BY : B Y; -K_DESC : D E S C; K_FALSE : F A L S E; -K_FROM : F R O M ; K_IS : I S; K_ISNULL : I S N U L L; K_LIKE : L I K E; -K_LIMIT : L I M I T; K_NOT : N O T; K_NOTNULL : N O T N U L L; K_NULL : N U L L; K_OR : O R; -K_ORDER : O R D E R; -K_SELECT : S E L E C T; -K_SET : S E T; K_TRUE : T R U E; -K_WHERE : W H E R E; NUMERIC_LITERAL : DIGIT+ ( '.' DIGIT* )? ( E [-+]? DIGIT+ )? diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index 4113f0f..7798798 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -1,10 +1,12 @@ /* SQL-inspired "Pano Query Language" syntax -Subset of SQL Select statement with just 2 clauses supported: - - select plethora of taxons and TEL expressions combination - - where clause supporting plethora of taxons and TEL expressions logical comparisons +focusing on Expressions -Subset of https://github.com/panoramichq/entity-tree-sql-service/blob/master/src/sql/SQLSelect.g4 +Weird parts: +- Taxon is a SQL-column-like object with similar heritage (namespace etc) + and extra syntax for optionality +- Some operator characters are more "programming" than SQL + Example: Eq compare '==' vs SQL-like '=' (though '=' could be converted to '==' internally) */ parser grammar PqlParser; @@ -13,70 +15,8 @@ options { tokenVocab = PqlLexer; } -// we have 2 entry points: -// parse Tel expression: +// entry point parseTel: expr EOF ; -// parse PQL statements with TEL inside: -parsePql : ( sqlStmtList )* EOF ; - -sqlStmtList - : ';'* sqlStmt ( ';'+ sqlStmt )* ';'* - ; - -// this is where you add more statement types, like SET and other top-level SQL statements -sqlStmt - : setStmt - | selectStmt - ; - -// a way to set query context settings and avoid sending them inside PQL -// Example: set "fill in dates for date-ranged sparse data" flag for Husky. -setStmt - : K_SET key=identifierMultipart ASSIGN value=expr - ; - -selectStmt - : selectClause - ( fromClause )? - ( whereClause )? - ( orderByClause )? - ( limitClause )? - ; - -selectClause: K_SELECT columns ( COMMA columns )* ; -// Column is a complicated structure of many parts: -// {tel expression (includes taxon)}{::Type Cast function or token} {{AS} taxon-like} -// Example: -// (?ns3|taxon3 + (slug2 - 1234))::TypeHint(agg=ave) as ns1|custom_data1, -columns: value=expr (COLON COLON type_cast=function)? (K_AS alias=taxon)? ; -// TypeCasting with ::TypeCast() conflicts with end of taxon ":tag" -// This means that typecasting cannot be used on naked taxon -// Must wrap whatever expression into parens or other non-taxon before Type Casting -// WRONG: -// ns1|taxon:tag:TypeCast() -// ns1|taxon::TypeCast() -// CORRECT: -// (ns1|taxon:tag)::TypeCast() -// (ns1|taxon)::TypeCast() -// While SQL allows non-function and function type casts, -// we stick with requireing parens always for simplicity of syntax parser. - -fromClause: K_FROM tables (COMMA tables)* ; -tables: table_name=identifierMultipart ( K_AS? table_alias=identifierMultipart )? ; - -whereClause: K_WHERE expr; - -orderByClause - : K_ORDER K_BY orderExpr ( COMMA orderExpr )* - ; - -orderExpr - : expr ( K_ASC | K_DESC )? - ; - -limitClause - : K_LIMIT limit=expr // ( ( K_OFFSET | COMMA ) expr )? - ; expr : unary_operator=( MINUS | PLUS | K_NOT ) right=expr diff --git a/python/src/pql_grammar/antlr/PqlLexer.py b/python/src/pql_grammar/antlr/PqlLexer.py index 65332e9..9649113 100644 --- a/python/src/pql_grammar/antlr/PqlLexer.py +++ b/python/src/pql_grammar/antlr/PqlLexer.py @@ -8,8 +8,8 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2=") - buf.write("\u020f\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\63") + buf.write("\u01ca\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") @@ -20,50 +20,43 @@ def serializedATN(): buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") - buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t") - buf.write("U\4V\tV\4W\tW\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5") - buf.write("\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3") - buf.write("\t\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17") - buf.write("\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24") - buf.write("\3\25\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32") - buf.write("\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36") - buf.write("\3\37\3\37\3\37\3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3") - buf.write("\"\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3%\3%\3%\3&\3&\3&") - buf.write("\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3)") - buf.write("\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3,\3") - buf.write(",\3,\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3") - buf.write("/\3\60\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3") - buf.write("\61\3\62\6\62\u0155\n\62\r\62\16\62\u0156\3\62\3\62\7") - buf.write("\62\u015b\n\62\f\62\16\62\u015e\13\62\5\62\u0160\n\62") - buf.write("\3\62\3\62\5\62\u0164\n\62\3\62\6\62\u0167\n\62\r\62\16") - buf.write("\62\u0168\5\62\u016b\n\62\3\62\3\62\6\62\u016f\n\62\r") - buf.write("\62\16\62\u0170\3\62\3\62\5\62\u0175\n\62\3\62\6\62\u0178") - buf.write("\n\62\r\62\16\62\u0179\5\62\u017c\n\62\5\62\u017e\n\62") - buf.write("\3\63\3\63\3\64\3\64\3\64\3\64\7\64\u0186\n\64\f\64\16") - buf.write("\64\u0189\13\64\3\64\3\64\3\65\3\65\3\65\3\65\7\65\u0191") - buf.write("\n\65\f\65\16\65\u0194\13\65\3\65\3\65\3\66\3\66\3\67") - buf.write("\3\67\3\67\3\67\7\67\u019e\n\67\f\67\16\67\u01a1\13\67") - buf.write("\3\67\3\67\38\38\38\38\78\u01a9\n8\f8\168\u01ac\138\3") - buf.write("8\38\39\39\39\39\39\59\u01b5\n9\39\79\u01b8\n9\f9\169") - buf.write("\u01bb\139\39\39\3:\3:\3:\3:\7:\u01c3\n:\f:\16:\u01c6") - buf.write("\13:\3:\3:\3:\5:\u01cb\n:\3:\3:\3;\3;\3;\3;\3<\3<\7<\u01d5") - buf.write("\n<\f<\16<\u01d8\13<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B") - buf.write("\3B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3") - buf.write("K\3K\3L\3L\3M\3M\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3S\3S\3") - buf.write("T\3T\3U\3U\3V\3V\3W\3W\3\u01c4\2X\3\3\5\4\7\5\t\6\13\7") - buf.write("\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21") - buf.write("!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67") - buf.write("\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61") - buf.write("a\62c\63e\64g\65i\66k\67m8o9q:s;u\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3") + buf.write("G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3\u017f\2N\3") + buf.write("\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16") + buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61") + buf.write("\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*") + buf.write("S+U,W-Y.[/]\60_\61a\62c\63e\2g\2i\2k\2m\2o\2q\2s\2u\2") + buf.write("w\2y\2{\2}\2\177\2\u0081\2\u0083\2\u0085\2\u0087\2\u0089") + buf.write("\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093\2\u0095\2\u0097") + buf.write("\2\u0099\2\3\2$\4\2--//\3\2$$\3\2))\4\2\f\f\17\17\5\2") buf.write("\13\r\17\17\"\"\5\2C\\aac|\6\2\62;C\\aac|\3\2\62;\4\2") buf.write("CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4") buf.write("\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPp") buf.write("p\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2") - buf.write("WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u020c") + buf.write("WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u01c7") buf.write("\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13") buf.write("\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3") buf.write("\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2") @@ -74,175 +67,149 @@ def serializedATN(): buf.write("A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2") buf.write("\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2") buf.write("\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2") - buf.write("\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3") - buf.write("\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q") - buf.write("\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\3\u00af\3\2") - buf.write("\2\2\5\u00b2\3\2\2\2\7\u00b5\3\2\2\2\t\u00b8\3\2\2\2\13") - buf.write("\u00bb\3\2\2\2\r\u00be\3\2\2\2\17\u00c1\3\2\2\2\21\u00c4") - buf.write("\3\2\2\2\23\u00c7\3\2\2\2\25\u00ca\3\2\2\2\27\u00cc\3") - buf.write("\2\2\2\31\u00ce\3\2\2\2\33\u00d0\3\2\2\2\35\u00d2\3\2") - buf.write("\2\2\37\u00d4\3\2\2\2!\u00d6\3\2\2\2#\u00d8\3\2\2\2%\u00da") - buf.write("\3\2\2\2\'\u00dc\3\2\2\2)\u00de\3\2\2\2+\u00e0\3\2\2\2") - buf.write("-\u00e2\3\2\2\2/\u00e4\3\2\2\2\61\u00e6\3\2\2\2\63\u00e8") - buf.write("\3\2\2\2\65\u00ea\3\2\2\2\67\u00ec\3\2\2\29\u00ee\3\2") - buf.write("\2\2;\u00f0\3\2\2\2=\u00f4\3\2\2\2?\u00f7\3\2\2\2A\u00fb") - buf.write("\3\2\2\2C\u00fe\3\2\2\2E\u0103\3\2\2\2G\u0109\3\2\2\2") - buf.write("I\u010e\3\2\2\2K\u0111\3\2\2\2M\u0118\3\2\2\2O\u011d\3") - buf.write("\2\2\2Q\u0123\3\2\2\2S\u0127\3\2\2\2U\u012f\3\2\2\2W\u0134") - buf.write("\3\2\2\2Y\u0137\3\2\2\2[\u013d\3\2\2\2]\u0144\3\2\2\2") - buf.write("_\u0148\3\2\2\2a\u014d\3\2\2\2c\u017d\3\2\2\2e\u017f\3") - buf.write("\2\2\2g\u0181\3\2\2\2i\u018c\3\2\2\2k\u0197\3\2\2\2m\u0199") - buf.write("\3\2\2\2o\u01a4\3\2\2\2q\u01b4\3\2\2\2s\u01be\3\2\2\2") - buf.write("u\u01ce\3\2\2\2w\u01d2\3\2\2\2y\u01d9\3\2\2\2{\u01db\3") - buf.write("\2\2\2}\u01dd\3\2\2\2\177\u01df\3\2\2\2\u0081\u01e1\3") - buf.write("\2\2\2\u0083\u01e3\3\2\2\2\u0085\u01e5\3\2\2\2\u0087\u01e7") - buf.write("\3\2\2\2\u0089\u01e9\3\2\2\2\u008b\u01eb\3\2\2\2\u008d") - buf.write("\u01ed\3\2\2\2\u008f\u01ef\3\2\2\2\u0091\u01f1\3\2\2\2") - buf.write("\u0093\u01f3\3\2\2\2\u0095\u01f5\3\2\2\2\u0097\u01f7\3") - buf.write("\2\2\2\u0099\u01f9\3\2\2\2\u009b\u01fb\3\2\2\2\u009d\u01fd") - buf.write("\3\2\2\2\u009f\u01ff\3\2\2\2\u00a1\u0201\3\2\2\2\u00a3") - buf.write("\u0203\3\2\2\2\u00a5\u0205\3\2\2\2\u00a7\u0207\3\2\2\2") - buf.write("\u00a9\u0209\3\2\2\2\u00ab\u020b\3\2\2\2\u00ad\u020d\3") - buf.write("\2\2\2\u00af\u00b0\7(\2\2\u00b0\u00b1\7(\2\2\u00b1\4\3") - buf.write("\2\2\2\u00b2\u00b3\7?\2\2\u00b3\u00b4\7?\2\2\u00b4\6\3") - buf.write("\2\2\2\u00b5\u00b6\7@\2\2\u00b6\u00b7\7?\2\2\u00b7\b\3") - buf.write("\2\2\2\u00b8\u00b9\7>\2\2\u00b9\u00ba\7?\2\2\u00ba\n\3") - buf.write("\2\2\2\u00bb\u00bc\7#\2\2\u00bc\u00bd\7?\2\2\u00bd\f\3") - buf.write("\2\2\2\u00be\u00bf\7>\2\2\u00bf\u00c0\7@\2\2\u00c0\16") - buf.write("\3\2\2\2\u00c1\u00c2\7~\2\2\u00c2\u00c3\7~\2\2\u00c3\20") - buf.write("\3\2\2\2\u00c4\u00c5\7>\2\2\u00c5\u00c6\7>\2\2\u00c6\22") - buf.write("\3\2\2\2\u00c7\u00c8\7@\2\2\u00c8\u00c9\7@\2\2\u00c9\24") - buf.write("\3\2\2\2\u00ca\u00cb\7(\2\2\u00cb\26\3\2\2\2\u00cc\u00cd") - buf.write("\7?\2\2\u00cd\30\3\2\2\2\u00ce\u00cf\7+\2\2\u00cf\32\3") - buf.write("\2\2\2\u00d0\u00d1\7<\2\2\u00d1\34\3\2\2\2\u00d2\u00d3") - buf.write("\7.\2\2\u00d3\36\3\2\2\2\u00d4\u00d5\7\60\2\2\u00d5 \3") - buf.write("\2\2\2\u00d6\u00d7\7\61\2\2\u00d7\"\3\2\2\2\u00d8\u00d9") - buf.write("\7@\2\2\u00d9$\3\2\2\2\u00da\u00db\7>\2\2\u00db&\3\2\2") - buf.write("\2\u00dc\u00dd\7/\2\2\u00dd(\3\2\2\2\u00de\u00df\7\'\2") - buf.write("\2\u00df*\3\2\2\2\u00e0\u00e1\7*\2\2\u00e1,\3\2\2\2\u00e2") - buf.write("\u00e3\7~\2\2\u00e3.\3\2\2\2\u00e4\u00e5\7-\2\2\u00e5") - buf.write("\60\3\2\2\2\u00e6\u00e7\7A\2\2\u00e7\62\3\2\2\2\u00e8") - buf.write("\u00e9\7=\2\2\u00e9\64\3\2\2\2\u00ea\u00eb\7,\2\2\u00eb") - buf.write("\66\3\2\2\2\u00ec\u00ed\7\u0080\2\2\u00ed8\3\2\2\2\u00ee") - buf.write("\u00ef\7a\2\2\u00ef:\3\2\2\2\u00f0\u00f1\5{>\2\u00f1\u00f2") - buf.write("\5\u0095K\2\u00f2\u00f3\5\u0081A\2\u00f3<\3\2\2\2\u00f4") - buf.write("\u00f5\5{>\2\u00f5\u00f6\5\u009fP\2\u00f6>\3\2\2\2\u00f7") - buf.write("\u00f8\5{>\2\u00f8\u00f9\5\u009fP\2\u00f9\u00fa\5\177") - buf.write("@\2\u00fa@\3\2\2\2\u00fb\u00fc\5}?\2\u00fc\u00fd\5\u00ab") - buf.write("V\2\u00fdB\3\2\2\2\u00fe\u00ff\5\u0081A\2\u00ff\u0100") - buf.write("\5\u0083B\2\u0100\u0101\5\u009fP\2\u0101\u0102\5\177@") - buf.write("\2\u0102D\3\2\2\2\u0103\u0104\5\u0085C\2\u0104\u0105\5") - buf.write("{>\2\u0105\u0106\5\u0091I\2\u0106\u0107\5\u009fP\2\u0107") - buf.write("\u0108\5\u0083B\2\u0108F\3\2\2\2\u0109\u010a\5\u0085C") - buf.write("\2\u010a\u010b\5\u009dO\2\u010b\u010c\5\u0097L\2\u010c") - buf.write("\u010d\5\u0093J\2\u010dH\3\2\2\2\u010e\u010f\5\u008bF") - buf.write("\2\u010f\u0110\5\u009fP\2\u0110J\3\2\2\2\u0111\u0112\5") - buf.write("\u008bF\2\u0112\u0113\5\u009fP\2\u0113\u0114\5\u0095K") - buf.write("\2\u0114\u0115\5\u00a3R\2\u0115\u0116\5\u0091I\2\u0116") - buf.write("\u0117\5\u0091I\2\u0117L\3\2\2\2\u0118\u0119\5\u0091I") - buf.write("\2\u0119\u011a\5\u008bF\2\u011a\u011b\5\u008fH\2\u011b") - buf.write("\u011c\5\u0083B\2\u011cN\3\2\2\2\u011d\u011e\5\u0091I") - buf.write("\2\u011e\u011f\5\u008bF\2\u011f\u0120\5\u0093J\2\u0120") - buf.write("\u0121\5\u008bF\2\u0121\u0122\5\u00a1Q\2\u0122P\3\2\2") - buf.write("\2\u0123\u0124\5\u0095K\2\u0124\u0125\5\u0097L\2\u0125") - buf.write("\u0126\5\u00a1Q\2\u0126R\3\2\2\2\u0127\u0128\5\u0095K") - buf.write("\2\u0128\u0129\5\u0097L\2\u0129\u012a\5\u00a1Q\2\u012a") - buf.write("\u012b\5\u0095K\2\u012b\u012c\5\u00a3R\2\u012c\u012d\5") - buf.write("\u0091I\2\u012d\u012e\5\u0091I\2\u012eT\3\2\2\2\u012f") - buf.write("\u0130\5\u0095K\2\u0130\u0131\5\u00a3R\2\u0131\u0132\5") - buf.write("\u0091I\2\u0132\u0133\5\u0091I\2\u0133V\3\2\2\2\u0134") - buf.write("\u0135\5\u0097L\2\u0135\u0136\5\u009dO\2\u0136X\3\2\2") - buf.write("\2\u0137\u0138\5\u0097L\2\u0138\u0139\5\u009dO\2\u0139") - buf.write("\u013a\5\u0081A\2\u013a\u013b\5\u0083B\2\u013b\u013c\5") - buf.write("\u009dO\2\u013cZ\3\2\2\2\u013d\u013e\5\u009fP\2\u013e") - buf.write("\u013f\5\u0083B\2\u013f\u0140\5\u0091I\2\u0140\u0141\5") - buf.write("\u0083B\2\u0141\u0142\5\177@\2\u0142\u0143\5\u00a1Q\2") - buf.write("\u0143\\\3\2\2\2\u0144\u0145\5\u009fP\2\u0145\u0146\5") - buf.write("\u0083B\2\u0146\u0147\5\u00a1Q\2\u0147^\3\2\2\2\u0148") - buf.write("\u0149\5\u00a1Q\2\u0149\u014a\5\u009dO\2\u014a\u014b\5") - buf.write("\u00a3R\2\u014b\u014c\5\u0083B\2\u014c`\3\2\2\2\u014d") - buf.write("\u014e\5\u00a7T\2\u014e\u014f\5\u0089E\2\u014f\u0150\5") - buf.write("\u0083B\2\u0150\u0151\5\u009dO\2\u0151\u0152\5\u0083B") - buf.write("\2\u0152b\3\2\2\2\u0153\u0155\5y=\2\u0154\u0153\3\2\2") - buf.write("\2\u0155\u0156\3\2\2\2\u0156\u0154\3\2\2\2\u0156\u0157") - buf.write("\3\2\2\2\u0157\u015f\3\2\2\2\u0158\u015c\7\60\2\2\u0159") - buf.write("\u015b\5y=\2\u015a\u0159\3\2\2\2\u015b\u015e\3\2\2\2\u015c") - buf.write("\u015a\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u0160\3\2\2\2") - buf.write("\u015e\u015c\3\2\2\2\u015f\u0158\3\2\2\2\u015f\u0160\3") - buf.write("\2\2\2\u0160\u016a\3\2\2\2\u0161\u0163\5\u0083B\2\u0162") - buf.write("\u0164\t\2\2\2\u0163\u0162\3\2\2\2\u0163\u0164\3\2\2\2") - buf.write("\u0164\u0166\3\2\2\2\u0165\u0167\5y=\2\u0166\u0165\3\2") - buf.write("\2\2\u0167\u0168\3\2\2\2\u0168\u0166\3\2\2\2\u0168\u0169") - buf.write("\3\2\2\2\u0169\u016b\3\2\2\2\u016a\u0161\3\2\2\2\u016a") - buf.write("\u016b\3\2\2\2\u016b\u017e\3\2\2\2\u016c\u016e\7\60\2") - buf.write("\2\u016d\u016f\5y=\2\u016e\u016d\3\2\2\2\u016f\u0170\3") - buf.write("\2\2\2\u0170\u016e\3\2\2\2\u0170\u0171\3\2\2\2\u0171\u017b") - buf.write("\3\2\2\2\u0172\u0174\5\u0083B\2\u0173\u0175\t\2\2\2\u0174") - buf.write("\u0173\3\2\2\2\u0174\u0175\3\2\2\2\u0175\u0177\3\2\2\2") - buf.write("\u0176\u0178\5y=\2\u0177\u0176\3\2\2\2\u0178\u0179\3\2") - buf.write("\2\2\u0179\u0177\3\2\2\2\u0179\u017a\3\2\2\2\u017a\u017c") - buf.write("\3\2\2\2\u017b\u0172\3\2\2\2\u017b\u017c\3\2\2\2\u017c") - buf.write("\u017e\3\2\2\2\u017d\u0154\3\2\2\2\u017d\u016c\3\2\2\2") - buf.write("\u017ed\3\2\2\2\u017f\u0180\5g\64\2\u0180f\3\2\2\2\u0181") - buf.write("\u0187\7$\2\2\u0182\u0183\7^\2\2\u0183\u0186\7$\2\2\u0184") - buf.write("\u0186\n\3\2\2\u0185\u0182\3\2\2\2\u0185\u0184\3\2\2\2") - buf.write("\u0186\u0189\3\2\2\2\u0187\u0185\3\2\2\2\u0187\u0188\3") - buf.write("\2\2\2\u0188\u018a\3\2\2\2\u0189\u0187\3\2\2\2\u018a\u018b") - buf.write("\7$\2\2\u018bh\3\2\2\2\u018c\u0192\7$\2\2\u018d\u018e") - buf.write("\7$\2\2\u018e\u0191\7$\2\2\u018f\u0191\n\3\2\2\u0190\u018d") - buf.write("\3\2\2\2\u0190\u018f\3\2\2\2\u0191\u0194\3\2\2\2\u0192") - buf.write("\u0190\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0195\3\2\2\2") - buf.write("\u0194\u0192\3\2\2\2\u0195\u0196\7$\2\2\u0196j\3\2\2\2") - buf.write("\u0197\u0198\5m\67\2\u0198l\3\2\2\2\u0199\u019f\7)\2\2") - buf.write("\u019a\u019b\7^\2\2\u019b\u019e\7)\2\2\u019c\u019e\n\4") - buf.write("\2\2\u019d\u019a\3\2\2\2\u019d\u019c\3\2\2\2\u019e\u01a1") - buf.write("\3\2\2\2\u019f\u019d\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0") - buf.write("\u01a2\3\2\2\2\u01a1\u019f\3\2\2\2\u01a2\u01a3\7)\2\2") - buf.write("\u01a3n\3\2\2\2\u01a4\u01aa\7)\2\2\u01a5\u01a6\7)\2\2") - buf.write("\u01a6\u01a9\7)\2\2\u01a7\u01a9\n\4\2\2\u01a8\u01a5\3") - buf.write("\2\2\2\u01a8\u01a7\3\2\2\2\u01a9\u01ac\3\2\2\2\u01aa\u01a8") - buf.write("\3\2\2\2\u01aa\u01ab\3\2\2\2\u01ab\u01ad\3\2\2\2\u01ac") - buf.write("\u01aa\3\2\2\2\u01ad\u01ae\7)\2\2\u01aep\3\2\2\2\u01af") - buf.write("\u01b0\7/\2\2\u01b0\u01b5\7/\2\2\u01b1\u01b2\7\61\2\2") - buf.write("\u01b2\u01b5\7\61\2\2\u01b3\u01b5\7%\2\2\u01b4\u01af\3") - buf.write("\2\2\2\u01b4\u01b1\3\2\2\2\u01b4\u01b3\3\2\2\2\u01b5\u01b9") - buf.write("\3\2\2\2\u01b6\u01b8\n\5\2\2\u01b7\u01b6\3\2\2\2\u01b8") - buf.write("\u01bb\3\2\2\2\u01b9\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2") - buf.write("\u01ba\u01bc\3\2\2\2\u01bb\u01b9\3\2\2\2\u01bc\u01bd\b") - buf.write("9\2\2\u01bdr\3\2\2\2\u01be\u01bf\7\61\2\2\u01bf\u01c0") - buf.write("\7,\2\2\u01c0\u01c4\3\2\2\2\u01c1\u01c3\13\2\2\2\u01c2") - buf.write("\u01c1\3\2\2\2\u01c3\u01c6\3\2\2\2\u01c4\u01c5\3\2\2\2") - buf.write("\u01c4\u01c2\3\2\2\2\u01c5\u01ca\3\2\2\2\u01c6\u01c4\3") - buf.write("\2\2\2\u01c7\u01c8\7,\2\2\u01c8\u01cb\7\61\2\2\u01c9\u01cb") - buf.write("\7\2\2\3\u01ca\u01c7\3\2\2\2\u01ca\u01c9\3\2\2\2\u01cb") - buf.write("\u01cc\3\2\2\2\u01cc\u01cd\b:\2\2\u01cdt\3\2\2\2\u01ce") - buf.write("\u01cf\t\6\2\2\u01cf\u01d0\3\2\2\2\u01d0\u01d1\b;\2\2") - buf.write("\u01d1v\3\2\2\2\u01d2\u01d6\t\7\2\2\u01d3\u01d5\t\b\2") - buf.write("\2\u01d4\u01d3\3\2\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d4") - buf.write("\3\2\2\2\u01d6\u01d7\3\2\2\2\u01d7x\3\2\2\2\u01d8\u01d6") - buf.write("\3\2\2\2\u01d9\u01da\t\t\2\2\u01daz\3\2\2\2\u01db\u01dc") - buf.write("\t\n\2\2\u01dc|\3\2\2\2\u01dd\u01de\t\13\2\2\u01de~\3") - buf.write("\2\2\2\u01df\u01e0\t\f\2\2\u01e0\u0080\3\2\2\2\u01e1\u01e2") - buf.write("\t\r\2\2\u01e2\u0082\3\2\2\2\u01e3\u01e4\t\16\2\2\u01e4") - buf.write("\u0084\3\2\2\2\u01e5\u01e6\t\17\2\2\u01e6\u0086\3\2\2") - buf.write("\2\u01e7\u01e8\t\20\2\2\u01e8\u0088\3\2\2\2\u01e9\u01ea") - buf.write("\t\21\2\2\u01ea\u008a\3\2\2\2\u01eb\u01ec\t\22\2\2\u01ec") - buf.write("\u008c\3\2\2\2\u01ed\u01ee\t\23\2\2\u01ee\u008e\3\2\2") - buf.write("\2\u01ef\u01f0\t\24\2\2\u01f0\u0090\3\2\2\2\u01f1\u01f2") - buf.write("\t\25\2\2\u01f2\u0092\3\2\2\2\u01f3\u01f4\t\26\2\2\u01f4") - buf.write("\u0094\3\2\2\2\u01f5\u01f6\t\27\2\2\u01f6\u0096\3\2\2") - buf.write("\2\u01f7\u01f8\t\30\2\2\u01f8\u0098\3\2\2\2\u01f9\u01fa") - buf.write("\t\31\2\2\u01fa\u009a\3\2\2\2\u01fb\u01fc\t\32\2\2\u01fc") - buf.write("\u009c\3\2\2\2\u01fd\u01fe\t\33\2\2\u01fe\u009e\3\2\2") - buf.write("\2\u01ff\u0200\t\34\2\2\u0200\u00a0\3\2\2\2\u0201\u0202") - buf.write("\t\35\2\2\u0202\u00a2\3\2\2\2\u0203\u0204\t\36\2\2\u0204") - buf.write("\u00a4\3\2\2\2\u0205\u0206\t\37\2\2\u0206\u00a6\3\2\2") - buf.write("\2\u0207\u0208\t \2\2\u0208\u00a8\3\2\2\2\u0209\u020a") - buf.write("\t!\2\2\u020a\u00aa\3\2\2\2\u020b\u020c\t\"\2\2\u020c") - buf.write("\u00ac\3\2\2\2\u020d\u020e\t#\2\2\u020e\u00ae\3\2\2\2") - buf.write("\33\2\u0156\u015c\u015f\u0163\u0168\u016a\u0170\u0174") - buf.write("\u0179\u017b\u017d\u0185\u0187\u0190\u0192\u019d\u019f") - buf.write("\u01a8\u01aa\u01b4\u01b9\u01c4\u01ca\u01d6\3\2\3\2") + buf.write("\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\3\u009b\3\2\2\2") + buf.write("\5\u009e\3\2\2\2\7\u00a1\3\2\2\2\t\u00a4\3\2\2\2\13\u00a7") + buf.write("\3\2\2\2\r\u00aa\3\2\2\2\17\u00ad\3\2\2\2\21\u00b0\3\2") + buf.write("\2\2\23\u00b3\3\2\2\2\25\u00b6\3\2\2\2\27\u00b8\3\2\2") + buf.write("\2\31\u00ba\3\2\2\2\33\u00bc\3\2\2\2\35\u00be\3\2\2\2") + buf.write("\37\u00c0\3\2\2\2!\u00c2\3\2\2\2#\u00c4\3\2\2\2%\u00c6") + buf.write("\3\2\2\2\'\u00c8\3\2\2\2)\u00ca\3\2\2\2+\u00cc\3\2\2\2") + buf.write("-\u00ce\3\2\2\2/\u00d0\3\2\2\2\61\u00d2\3\2\2\2\63\u00d4") + buf.write("\3\2\2\2\65\u00d6\3\2\2\2\67\u00d8\3\2\2\29\u00da\3\2") + buf.write("\2\2;\u00dc\3\2\2\2=\u00e0\3\2\2\2?\u00e6\3\2\2\2A\u00e9") + buf.write("\3\2\2\2C\u00f0\3\2\2\2E\u00f5\3\2\2\2G\u00f9\3\2\2\2") + buf.write("I\u0101\3\2\2\2K\u0106\3\2\2\2M\u0109\3\2\2\2O\u0138\3") + buf.write("\2\2\2Q\u013a\3\2\2\2S\u013c\3\2\2\2U\u0147\3\2\2\2W\u0152") + buf.write("\3\2\2\2Y\u0154\3\2\2\2[\u015f\3\2\2\2]\u016f\3\2\2\2") + buf.write("_\u0179\3\2\2\2a\u0189\3\2\2\2c\u018d\3\2\2\2e\u0194\3") + buf.write("\2\2\2g\u0196\3\2\2\2i\u0198\3\2\2\2k\u019a\3\2\2\2m\u019c") + buf.write("\3\2\2\2o\u019e\3\2\2\2q\u01a0\3\2\2\2s\u01a2\3\2\2\2") + buf.write("u\u01a4\3\2\2\2w\u01a6\3\2\2\2y\u01a8\3\2\2\2{\u01aa\3") + buf.write("\2\2\2}\u01ac\3\2\2\2\177\u01ae\3\2\2\2\u0081\u01b0\3") + buf.write("\2\2\2\u0083\u01b2\3\2\2\2\u0085\u01b4\3\2\2\2\u0087\u01b6") + buf.write("\3\2\2\2\u0089\u01b8\3\2\2\2\u008b\u01ba\3\2\2\2\u008d") + buf.write("\u01bc\3\2\2\2\u008f\u01be\3\2\2\2\u0091\u01c0\3\2\2\2") + buf.write("\u0093\u01c2\3\2\2\2\u0095\u01c4\3\2\2\2\u0097\u01c6\3") + buf.write("\2\2\2\u0099\u01c8\3\2\2\2\u009b\u009c\7(\2\2\u009c\u009d") + buf.write("\7(\2\2\u009d\4\3\2\2\2\u009e\u009f\7?\2\2\u009f\u00a0") + buf.write("\7?\2\2\u00a0\6\3\2\2\2\u00a1\u00a2\7@\2\2\u00a2\u00a3") + buf.write("\7?\2\2\u00a3\b\3\2\2\2\u00a4\u00a5\7>\2\2\u00a5\u00a6") + buf.write("\7?\2\2\u00a6\n\3\2\2\2\u00a7\u00a8\7#\2\2\u00a8\u00a9") + buf.write("\7?\2\2\u00a9\f\3\2\2\2\u00aa\u00ab\7>\2\2\u00ab\u00ac") + buf.write("\7@\2\2\u00ac\16\3\2\2\2\u00ad\u00ae\7~\2\2\u00ae\u00af") + buf.write("\7~\2\2\u00af\20\3\2\2\2\u00b0\u00b1\7>\2\2\u00b1\u00b2") + buf.write("\7>\2\2\u00b2\22\3\2\2\2\u00b3\u00b4\7@\2\2\u00b4\u00b5") + buf.write("\7@\2\2\u00b5\24\3\2\2\2\u00b6\u00b7\7(\2\2\u00b7\26\3") + buf.write("\2\2\2\u00b8\u00b9\7?\2\2\u00b9\30\3\2\2\2\u00ba\u00bb") + buf.write("\7+\2\2\u00bb\32\3\2\2\2\u00bc\u00bd\7<\2\2\u00bd\34\3") + buf.write("\2\2\2\u00be\u00bf\7.\2\2\u00bf\36\3\2\2\2\u00c0\u00c1") + buf.write("\7\60\2\2\u00c1 \3\2\2\2\u00c2\u00c3\7\61\2\2\u00c3\"") + buf.write("\3\2\2\2\u00c4\u00c5\7@\2\2\u00c5$\3\2\2\2\u00c6\u00c7") + buf.write("\7>\2\2\u00c7&\3\2\2\2\u00c8\u00c9\7/\2\2\u00c9(\3\2\2") + buf.write("\2\u00ca\u00cb\7\'\2\2\u00cb*\3\2\2\2\u00cc\u00cd\7*\2") + buf.write("\2\u00cd,\3\2\2\2\u00ce\u00cf\7~\2\2\u00cf.\3\2\2\2\u00d0") + buf.write("\u00d1\7-\2\2\u00d1\60\3\2\2\2\u00d2\u00d3\7A\2\2\u00d3") + buf.write("\62\3\2\2\2\u00d4\u00d5\7=\2\2\u00d5\64\3\2\2\2\u00d6") + buf.write("\u00d7\7,\2\2\u00d7\66\3\2\2\2\u00d8\u00d9\7\u0080\2\2") + buf.write("\u00d98\3\2\2\2\u00da\u00db\7a\2\2\u00db:\3\2\2\2\u00dc") + buf.write("\u00dd\5g\64\2\u00dd\u00de\5\u0081A\2\u00de\u00df\5m\67") + buf.write("\2\u00df<\3\2\2\2\u00e0\u00e1\5q9\2\u00e1\u00e2\5g\64") + buf.write("\2\u00e2\u00e3\5}?\2\u00e3\u00e4\5\u008bF\2\u00e4\u00e5") + buf.write("\5o8\2\u00e5>\3\2\2\2\u00e6\u00e7\5w<\2\u00e7\u00e8\5") + buf.write("\u008bF\2\u00e8@\3\2\2\2\u00e9\u00ea\5w<\2\u00ea\u00eb") + buf.write("\5\u008bF\2\u00eb\u00ec\5\u0081A\2\u00ec\u00ed\5\u008f") + buf.write("H\2\u00ed\u00ee\5}?\2\u00ee\u00ef\5}?\2\u00efB\3\2\2\2") + buf.write("\u00f0\u00f1\5}?\2\u00f1\u00f2\5w<\2\u00f2\u00f3\5{>\2") + buf.write("\u00f3\u00f4\5o8\2\u00f4D\3\2\2\2\u00f5\u00f6\5\u0081") + buf.write("A\2\u00f6\u00f7\5\u0083B\2\u00f7\u00f8\5\u008dG\2\u00f8") + buf.write("F\3\2\2\2\u00f9\u00fa\5\u0081A\2\u00fa\u00fb\5\u0083B") + buf.write("\2\u00fb\u00fc\5\u008dG\2\u00fc\u00fd\5\u0081A\2\u00fd") + buf.write("\u00fe\5\u008fH\2\u00fe\u00ff\5}?\2\u00ff\u0100\5}?\2") + buf.write("\u0100H\3\2\2\2\u0101\u0102\5\u0081A\2\u0102\u0103\5\u008f") + buf.write("H\2\u0103\u0104\5}?\2\u0104\u0105\5}?\2\u0105J\3\2\2\2") + buf.write("\u0106\u0107\5\u0083B\2\u0107\u0108\5\u0089E\2\u0108L") + buf.write("\3\2\2\2\u0109\u010a\5\u008dG\2\u010a\u010b\5\u0089E\2") + buf.write("\u010b\u010c\5\u008fH\2\u010c\u010d\5o8\2\u010dN\3\2\2") + buf.write("\2\u010e\u0110\5e\63\2\u010f\u010e\3\2\2\2\u0110\u0111") + buf.write("\3\2\2\2\u0111\u010f\3\2\2\2\u0111\u0112\3\2\2\2\u0112") + buf.write("\u011a\3\2\2\2\u0113\u0117\7\60\2\2\u0114\u0116\5e\63") + buf.write("\2\u0115\u0114\3\2\2\2\u0116\u0119\3\2\2\2\u0117\u0115") + buf.write("\3\2\2\2\u0117\u0118\3\2\2\2\u0118\u011b\3\2\2\2\u0119") + buf.write("\u0117\3\2\2\2\u011a\u0113\3\2\2\2\u011a\u011b\3\2\2\2") + buf.write("\u011b\u0125\3\2\2\2\u011c\u011e\5o8\2\u011d\u011f\t\2") + buf.write("\2\2\u011e\u011d\3\2\2\2\u011e\u011f\3\2\2\2\u011f\u0121") + buf.write("\3\2\2\2\u0120\u0122\5e\63\2\u0121\u0120\3\2\2\2\u0122") + buf.write("\u0123\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124\3\2\2\2") + buf.write("\u0124\u0126\3\2\2\2\u0125\u011c\3\2\2\2\u0125\u0126\3") + buf.write("\2\2\2\u0126\u0139\3\2\2\2\u0127\u0129\7\60\2\2\u0128") + buf.write("\u012a\5e\63\2\u0129\u0128\3\2\2\2\u012a\u012b\3\2\2\2") + buf.write("\u012b\u0129\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u0136\3") + buf.write("\2\2\2\u012d\u012f\5o8\2\u012e\u0130\t\2\2\2\u012f\u012e") + buf.write("\3\2\2\2\u012f\u0130\3\2\2\2\u0130\u0132\3\2\2\2\u0131") + buf.write("\u0133\5e\63\2\u0132\u0131\3\2\2\2\u0133\u0134\3\2\2\2") + buf.write("\u0134\u0132\3\2\2\2\u0134\u0135\3\2\2\2\u0135\u0137\3") + buf.write("\2\2\2\u0136\u012d\3\2\2\2\u0136\u0137\3\2\2\2\u0137\u0139") + buf.write("\3\2\2\2\u0138\u010f\3\2\2\2\u0138\u0127\3\2\2\2\u0139") + buf.write("P\3\2\2\2\u013a\u013b\5S*\2\u013bR\3\2\2\2\u013c\u0142") + buf.write("\7$\2\2\u013d\u013e\7^\2\2\u013e\u0141\7$\2\2\u013f\u0141") + buf.write("\n\3\2\2\u0140\u013d\3\2\2\2\u0140\u013f\3\2\2\2\u0141") + buf.write("\u0144\3\2\2\2\u0142\u0140\3\2\2\2\u0142\u0143\3\2\2\2") + buf.write("\u0143\u0145\3\2\2\2\u0144\u0142\3\2\2\2\u0145\u0146\7") + buf.write("$\2\2\u0146T\3\2\2\2\u0147\u014d\7$\2\2\u0148\u0149\7") + buf.write("$\2\2\u0149\u014c\7$\2\2\u014a\u014c\n\3\2\2\u014b\u0148") + buf.write("\3\2\2\2\u014b\u014a\3\2\2\2\u014c\u014f\3\2\2\2\u014d") + buf.write("\u014b\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u0150\3\2\2\2") + buf.write("\u014f\u014d\3\2\2\2\u0150\u0151\7$\2\2\u0151V\3\2\2\2") + buf.write("\u0152\u0153\5Y-\2\u0153X\3\2\2\2\u0154\u015a\7)\2\2\u0155") + buf.write("\u0156\7^\2\2\u0156\u0159\7)\2\2\u0157\u0159\n\4\2\2\u0158") + buf.write("\u0155\3\2\2\2\u0158\u0157\3\2\2\2\u0159\u015c\3\2\2\2") + buf.write("\u015a\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u015d\3") + buf.write("\2\2\2\u015c\u015a\3\2\2\2\u015d\u015e\7)\2\2\u015eZ\3") + buf.write("\2\2\2\u015f\u0165\7)\2\2\u0160\u0161\7)\2\2\u0161\u0164") + buf.write("\7)\2\2\u0162\u0164\n\4\2\2\u0163\u0160\3\2\2\2\u0163") + buf.write("\u0162\3\2\2\2\u0164\u0167\3\2\2\2\u0165\u0163\3\2\2\2") + buf.write("\u0165\u0166\3\2\2\2\u0166\u0168\3\2\2\2\u0167\u0165\3") + buf.write("\2\2\2\u0168\u0169\7)\2\2\u0169\\\3\2\2\2\u016a\u016b") + buf.write("\7/\2\2\u016b\u0170\7/\2\2\u016c\u016d\7\61\2\2\u016d") + buf.write("\u0170\7\61\2\2\u016e\u0170\7%\2\2\u016f\u016a\3\2\2\2") + buf.write("\u016f\u016c\3\2\2\2\u016f\u016e\3\2\2\2\u0170\u0174\3") + buf.write("\2\2\2\u0171\u0173\n\5\2\2\u0172\u0171\3\2\2\2\u0173\u0176") + buf.write("\3\2\2\2\u0174\u0172\3\2\2\2\u0174\u0175\3\2\2\2\u0175") + buf.write("\u0177\3\2\2\2\u0176\u0174\3\2\2\2\u0177\u0178\b/\2\2") + buf.write("\u0178^\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u017b\7,\2") + buf.write("\2\u017b\u017f\3\2\2\2\u017c\u017e\13\2\2\2\u017d\u017c") + buf.write("\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u0180\3\2\2\2\u017f") + buf.write("\u017d\3\2\2\2\u0180\u0185\3\2\2\2\u0181\u017f\3\2\2\2") + buf.write("\u0182\u0183\7,\2\2\u0183\u0186\7\61\2\2\u0184\u0186\7") + buf.write("\2\2\3\u0185\u0182\3\2\2\2\u0185\u0184\3\2\2\2\u0186\u0187") + buf.write("\3\2\2\2\u0187\u0188\b\60\2\2\u0188`\3\2\2\2\u0189\u018a") + buf.write("\t\6\2\2\u018a\u018b\3\2\2\2\u018b\u018c\b\61\2\2\u018c") + buf.write("b\3\2\2\2\u018d\u0191\t\7\2\2\u018e\u0190\t\b\2\2\u018f") + buf.write("\u018e\3\2\2\2\u0190\u0193\3\2\2\2\u0191\u018f\3\2\2\2") + buf.write("\u0191\u0192\3\2\2\2\u0192d\3\2\2\2\u0193\u0191\3\2\2") + buf.write("\2\u0194\u0195\t\t\2\2\u0195f\3\2\2\2\u0196\u0197\t\n") + buf.write("\2\2\u0197h\3\2\2\2\u0198\u0199\t\13\2\2\u0199j\3\2\2") + buf.write("\2\u019a\u019b\t\f\2\2\u019bl\3\2\2\2\u019c\u019d\t\r") + buf.write("\2\2\u019dn\3\2\2\2\u019e\u019f\t\16\2\2\u019fp\3\2\2") + buf.write("\2\u01a0\u01a1\t\17\2\2\u01a1r\3\2\2\2\u01a2\u01a3\t\20") + buf.write("\2\2\u01a3t\3\2\2\2\u01a4\u01a5\t\21\2\2\u01a5v\3\2\2") + buf.write("\2\u01a6\u01a7\t\22\2\2\u01a7x\3\2\2\2\u01a8\u01a9\t\23") + buf.write("\2\2\u01a9z\3\2\2\2\u01aa\u01ab\t\24\2\2\u01ab|\3\2\2") + buf.write("\2\u01ac\u01ad\t\25\2\2\u01ad~\3\2\2\2\u01ae\u01af\t\26") + buf.write("\2\2\u01af\u0080\3\2\2\2\u01b0\u01b1\t\27\2\2\u01b1\u0082") + buf.write("\3\2\2\2\u01b2\u01b3\t\30\2\2\u01b3\u0084\3\2\2\2\u01b4") + buf.write("\u01b5\t\31\2\2\u01b5\u0086\3\2\2\2\u01b6\u01b7\t\32\2") + buf.write("\2\u01b7\u0088\3\2\2\2\u01b8\u01b9\t\33\2\2\u01b9\u008a") + buf.write("\3\2\2\2\u01ba\u01bb\t\34\2\2\u01bb\u008c\3\2\2\2\u01bc") + buf.write("\u01bd\t\35\2\2\u01bd\u008e\3\2\2\2\u01be\u01bf\t\36\2") + buf.write("\2\u01bf\u0090\3\2\2\2\u01c0\u01c1\t\37\2\2\u01c1\u0092") + buf.write("\3\2\2\2\u01c2\u01c3\t \2\2\u01c3\u0094\3\2\2\2\u01c4") + buf.write("\u01c5\t!\2\2\u01c5\u0096\3\2\2\2\u01c6\u01c7\t\"\2\2") + buf.write("\u01c7\u0098\3\2\2\2\u01c8\u01c9\t#\2\2\u01c9\u009a\3") + buf.write("\2\2\2\33\2\u0111\u0117\u011a\u011e\u0123\u0125\u012b") + buf.write("\u012f\u0134\u0136\u0138\u0140\u0142\u014b\u014d\u0158") + buf.write("\u015a\u0163\u0165\u016f\u0174\u017f\u0185\u0191\3\2\3") + buf.write("\2") return buf.getvalue() @@ -281,36 +248,26 @@ class PqlLexer(Lexer): TILDE = 27 UNDER = 28 K_AND = 29 - K_AS = 30 - K_ASC = 31 - K_BY = 32 - K_DESC = 33 - K_FALSE = 34 - K_FROM = 35 - K_IS = 36 - K_ISNULL = 37 - K_LIKE = 38 - K_LIMIT = 39 - K_NOT = 40 - K_NOTNULL = 41 - K_NULL = 42 - K_OR = 43 - K_ORDER = 44 - K_SELECT = 45 - K_SET = 46 - K_TRUE = 47 - K_WHERE = 48 - NUMERIC_LITERAL = 49 - DOUBLE_QUOTED_STRING = 50 - DOUBLE_QUOTED_STRING_TEL = 51 - DOUBLE_QUOTED_STRING_SQL = 52 - SINGLE_QUOTED_STRING = 53 - SINGLE_QUOTED_STRING_TEL = 54 - SINGLE_QUOTED_STRING_SQL = 55 - SINGLE_LINE_COMMENT = 56 - MULTILINE_COMMENT = 57 - SPACES = 58 - WORD = 59 + K_FALSE = 30 + K_IS = 31 + K_ISNULL = 32 + K_LIKE = 33 + K_NOT = 34 + K_NOTNULL = 35 + K_NULL = 36 + K_OR = 37 + K_TRUE = 38 + NUMERIC_LITERAL = 39 + DOUBLE_QUOTED_STRING = 40 + DOUBLE_QUOTED_STRING_TEL = 41 + DOUBLE_QUOTED_STRING_SQL = 42 + SINGLE_QUOTED_STRING = 43 + SINGLE_QUOTED_STRING_TEL = 44 + SINGLE_QUOTED_STRING_SQL = 45 + SINGLE_LINE_COMMENT = 46 + MULTILINE_COMMENT = 47 + SPACES = 48 + WORD = 49 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] @@ -327,23 +284,20 @@ class PqlLexer(Lexer): "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", - "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", "K_FALSE", "K_FROM", - "K_IS", "K_ISNULL", "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", - "K_NULL", "K_OR", "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", - "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", - "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", - "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", - "SPACES", "WORD" ] + "K_AND", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "WORD" ] ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", - "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_AS", "K_ASC", - "K_BY", "K_DESC", "K_FALSE", "K_FROM", "K_IS", "K_ISNULL", - "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", - "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", - "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_FALSE", + "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", + "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD", "DIGIT", "A", "B", "C", "D", "E", "F", "G", "H", diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index 55fd58e..265cf63 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -11,103 +11,40 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3=") - buf.write("\u00e4\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") - buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") - buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") - buf.write("\4\24\t\24\4\25\t\25\3\2\3\2\3\2\3\3\7\3/\n\3\f\3\16\3") - buf.write("\62\13\3\3\3\3\3\3\4\7\4\67\n\4\f\4\16\4:\13\4\3\4\3\4") - buf.write("\6\4>\n\4\r\4\16\4?\3\4\7\4C\n\4\f\4\16\4F\13\4\3\4\7") - buf.write("\4I\n\4\f\4\16\4L\13\4\3\5\3\5\5\5P\n\5\3\6\3\6\3\6\3") - buf.write("\6\3\6\3\7\3\7\5\7Y\n\7\3\7\5\7\\\n\7\3\7\5\7_\n\7\3\7") - buf.write("\5\7b\n\7\3\b\3\b\3\b\3\b\7\bh\n\b\f\b\16\bk\13\b\3\t") - buf.write("\3\t\3\t\3\t\5\tq\n\t\3\t\3\t\5\tu\n\t\3\n\3\n\3\n\3\n") - buf.write("\7\n{\n\n\f\n\16\n~\13\n\3\13\3\13\5\13\u0082\n\13\3\13") - buf.write("\5\13\u0085\n\13\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\7\r\u008f") - buf.write("\n\r\f\r\16\r\u0092\13\r\3\16\3\16\5\16\u0096\n\16\3\17") - buf.write("\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20") - buf.write("\3\20\5\20\u00a5\n\20\3\20\3\20\3\20\3\20\3\20\3\20\3") - buf.write("\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20") - buf.write("\3\20\7\20\u00b9\n\20\f\20\16\20\u00bc\13\20\3\21\3\21") - buf.write("\3\21\5\21\u00c1\n\21\3\21\3\21\3\22\3\22\3\22\7\22\u00c8") - buf.write("\n\22\f\22\16\22\u00cb\13\22\3\23\5\23\u00ce\n\23\3\23") - buf.write("\3\23\3\23\5\23\u00d3\n\23\3\23\3\23\3\23\5\23\u00d8\n") - buf.write("\23\3\24\3\24\3\24\7\24\u00dd\n\24\f\24\16\24\u00e0\13") - buf.write("\24\3\25\3\25\3\25\2\3\36\26\2\4\6\b\n\f\16\20\22\24\26") - buf.write("\30\32\34\36 \"$&(\2\13\4\2!!##\5\2\25\25\31\31**\5\2") - buf.write("\22\22\26\26\34\34\4\2\25\25\31\31\4\2\5\6\23\24\6\2\4") - buf.write("\4\7\b\r\r&&\4\2\3\3\37\37\4\2\t\t--\7\2$$,,\61\61\63") - buf.write("\64\67\67\2\u00f1\2*\3\2\2\2\4\60\3\2\2\2\68\3\2\2\2\b") - buf.write("O\3\2\2\2\nQ\3\2\2\2\fV\3\2\2\2\16c\3\2\2\2\20l\3\2\2") - buf.write("\2\22v\3\2\2\2\24\177\3\2\2\2\26\u0086\3\2\2\2\30\u0089") - buf.write("\3\2\2\2\32\u0093\3\2\2\2\34\u0097\3\2\2\2\36\u00a4\3") - buf.write("\2\2\2 \u00bd\3\2\2\2\"\u00c4\3\2\2\2$\u00cd\3\2\2\2&") - buf.write("\u00d9\3\2\2\2(\u00e1\3\2\2\2*+\5\36\20\2+,\7\2\2\3,\3") - buf.write("\3\2\2\2-/\5\6\4\2.-\3\2\2\2/\62\3\2\2\2\60.\3\2\2\2\60") - buf.write("\61\3\2\2\2\61\63\3\2\2\2\62\60\3\2\2\2\63\64\7\2\2\3") - buf.write("\64\5\3\2\2\2\65\67\7\33\2\2\66\65\3\2\2\2\67:\3\2\2\2") - buf.write("8\66\3\2\2\289\3\2\2\29;\3\2\2\2:8\3\2\2\2;D\5\b\5\2<") - buf.write(">\7\33\2\2=<\3\2\2\2>?\3\2\2\2?=\3\2\2\2?@\3\2\2\2@A\3") - buf.write("\2\2\2AC\5\b\5\2B=\3\2\2\2CF\3\2\2\2DB\3\2\2\2DE\3\2\2") - buf.write("\2EJ\3\2\2\2FD\3\2\2\2GI\7\33\2\2HG\3\2\2\2IL\3\2\2\2") - buf.write("JH\3\2\2\2JK\3\2\2\2K\7\3\2\2\2LJ\3\2\2\2MP\5\n\6\2NP") - buf.write("\5\f\7\2OM\3\2\2\2ON\3\2\2\2P\t\3\2\2\2QR\7\60\2\2RS\5") - buf.write("&\24\2ST\7\r\2\2TU\5\36\20\2U\13\3\2\2\2VX\5\16\b\2WY") - buf.write("\5\22\n\2XW\3\2\2\2XY\3\2\2\2Y[\3\2\2\2Z\\\5\26\f\2[Z") - buf.write("\3\2\2\2[\\\3\2\2\2\\^\3\2\2\2]_\5\30\r\2^]\3\2\2\2^_") - buf.write("\3\2\2\2_a\3\2\2\2`b\5\34\17\2a`\3\2\2\2ab\3\2\2\2b\r") - buf.write("\3\2\2\2cd\7/\2\2di\5\20\t\2ef\7\20\2\2fh\5\20\t\2ge\3") - buf.write("\2\2\2hk\3\2\2\2ig\3\2\2\2ij\3\2\2\2j\17\3\2\2\2ki\3\2") - buf.write("\2\2lp\5\36\20\2mn\7\17\2\2no\7\17\2\2oq\5 \21\2pm\3\2") - buf.write("\2\2pq\3\2\2\2qt\3\2\2\2rs\7 \2\2su\5$\23\2tr\3\2\2\2") - buf.write("tu\3\2\2\2u\21\3\2\2\2vw\7%\2\2w|\5\24\13\2xy\7\20\2\2") - buf.write("y{\5\24\13\2zx\3\2\2\2{~\3\2\2\2|z\3\2\2\2|}\3\2\2\2}") - buf.write("\23\3\2\2\2~|\3\2\2\2\177\u0084\5&\24\2\u0080\u0082\7") - buf.write(" \2\2\u0081\u0080\3\2\2\2\u0081\u0082\3\2\2\2\u0082\u0083") - buf.write("\3\2\2\2\u0083\u0085\5&\24\2\u0084\u0081\3\2\2\2\u0084") - buf.write("\u0085\3\2\2\2\u0085\25\3\2\2\2\u0086\u0087\7\62\2\2\u0087") - buf.write("\u0088\5\36\20\2\u0088\27\3\2\2\2\u0089\u008a\7.\2\2\u008a") - buf.write("\u008b\7\"\2\2\u008b\u0090\5\32\16\2\u008c\u008d\7\20") - buf.write("\2\2\u008d\u008f\5\32\16\2\u008e\u008c\3\2\2\2\u008f\u0092") - buf.write("\3\2\2\2\u0090\u008e\3\2\2\2\u0090\u0091\3\2\2\2\u0091") - buf.write("\31\3\2\2\2\u0092\u0090\3\2\2\2\u0093\u0095\5\36\20\2") - buf.write("\u0094\u0096\t\2\2\2\u0095\u0094\3\2\2\2\u0095\u0096\3") - buf.write("\2\2\2\u0096\33\3\2\2\2\u0097\u0098\7)\2\2\u0098\u0099") - buf.write("\5\36\20\2\u0099\35\3\2\2\2\u009a\u009b\b\20\1\2\u009b") - buf.write("\u009c\t\3\2\2\u009c\u00a5\5\36\20\r\u009d\u009e\7\27") - buf.write("\2\2\u009e\u009f\5\36\20\2\u009f\u00a0\7\16\2\2\u00a0") - buf.write("\u00a5\3\2\2\2\u00a1\u00a5\5(\25\2\u00a2\u00a5\5 \21\2") - buf.write("\u00a3\u00a5\5$\23\2\u00a4\u009a\3\2\2\2\u00a4\u009d\3") - buf.write("\2\2\2\u00a4\u00a1\3\2\2\2\u00a4\u00a2\3\2\2\2\u00a4\u00a3") - buf.write("\3\2\2\2\u00a5\u00ba\3\2\2\2\u00a6\u00a7\f\f\2\2\u00a7") - buf.write("\u00a8\t\4\2\2\u00a8\u00b9\5\36\20\r\u00a9\u00aa\f\13") - buf.write("\2\2\u00aa\u00ab\t\5\2\2\u00ab\u00b9\5\36\20\f\u00ac\u00ad") - buf.write("\f\n\2\2\u00ad\u00ae\t\6\2\2\u00ae\u00b9\5\36\20\13\u00af") - buf.write("\u00b0\f\t\2\2\u00b0\u00b1\t\7\2\2\u00b1\u00b9\5\36\20") - buf.write("\n\u00b2\u00b3\f\b\2\2\u00b3\u00b4\t\b\2\2\u00b4\u00b9") - buf.write("\5\36\20\t\u00b5\u00b6\f\7\2\2\u00b6\u00b7\t\t\2\2\u00b7") - buf.write("\u00b9\5\36\20\b\u00b8\u00a6\3\2\2\2\u00b8\u00a9\3\2\2") - buf.write("\2\u00b8\u00ac\3\2\2\2\u00b8\u00af\3\2\2\2\u00b8\u00b2") - buf.write("\3\2\2\2\u00b8\u00b5\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba") - buf.write("\u00b8\3\2\2\2\u00ba\u00bb\3\2\2\2\u00bb\37\3\2\2\2\u00bc") - buf.write("\u00ba\3\2\2\2\u00bd\u00be\5&\24\2\u00be\u00c0\7\27\2") - buf.write("\2\u00bf\u00c1\5\"\22\2\u00c0\u00bf\3\2\2\2\u00c0\u00c1") - buf.write("\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c3\7\16\2\2\u00c3") - buf.write("!\3\2\2\2\u00c4\u00c9\5\36\20\2\u00c5\u00c6\7\20\2\2\u00c6") - buf.write("\u00c8\5\36\20\2\u00c7\u00c5\3\2\2\2\u00c8\u00cb\3\2\2") - buf.write("\2\u00c9\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca#\3\2") - buf.write("\2\2\u00cb\u00c9\3\2\2\2\u00cc\u00ce\7\32\2\2\u00cd\u00cc") - buf.write("\3\2\2\2\u00cd\u00ce\3\2\2\2\u00ce\u00d2\3\2\2\2\u00cf") - buf.write("\u00d0\5&\24\2\u00d0\u00d1\7\30\2\2\u00d1\u00d3\3\2\2") - buf.write("\2\u00d2\u00cf\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3\u00d4") - buf.write("\3\2\2\2\u00d4\u00d7\5&\24\2\u00d5\u00d6\7\17\2\2\u00d6") - buf.write("\u00d8\5&\24\2\u00d7\u00d5\3\2\2\2\u00d7\u00d8\3\2\2\2") - buf.write("\u00d8%\3\2\2\2\u00d9\u00de\7=\2\2\u00da\u00db\7\21\2") - buf.write("\2\u00db\u00dd\7=\2\2\u00dc\u00da\3\2\2\2\u00dd\u00e0") - buf.write("\3\2\2\2\u00de\u00dc\3\2\2\2\u00de\u00df\3\2\2\2\u00df") - buf.write("\'\3\2\2\2\u00e0\u00de\3\2\2\2\u00e1\u00e2\t\n\2\2\u00e2") - buf.write(")\3\2\2\2\35\608?DJOX[^aipt|\u0081\u0084\u0090\u0095\u00a4") - buf.write("\u00b8\u00ba\u00c0\u00c9\u00cd\u00d2\u00d7\u00de") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\63") + buf.write("]\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b") + buf.write("\t\b\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") + buf.write("\3\5\3\36\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") + buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\7\3\62\n\3\f\3\16\3\65") + buf.write("\13\3\3\4\3\4\3\4\5\4:\n\4\3\4\3\4\3\5\3\5\3\5\7\5A\n") + buf.write("\5\f\5\16\5D\13\5\3\6\5\6G\n\6\3\6\3\6\3\6\5\6L\n\6\3") + buf.write("\6\3\6\3\6\5\6Q\n\6\3\7\3\7\3\7\7\7V\n\7\f\7\16\7Y\13") + buf.write("\7\3\b\3\b\3\b\2\3\4\t\2\4\6\b\n\f\16\2\n\5\2\25\25\31") + buf.write("\31$$\5\2\22\22\26\26\34\34\4\2\25\25\31\31\4\2\5\6\23") + buf.write("\24\6\2\4\4\7\b\r\r!!\4\2\3\3\37\37\4\2\t\t\'\'\6\2 ") + buf.write("&&(*--\2e\2\20\3\2\2\2\4\35\3\2\2\2\6\66\3\2\2\2\b=\3") + buf.write("\2\2\2\nF\3\2\2\2\fR\3\2\2\2\16Z\3\2\2\2\20\21\5\4\3\2") + buf.write("\21\22\7\2\2\3\22\3\3\2\2\2\23\24\b\3\1\2\24\25\t\2\2") + buf.write("\2\25\36\5\4\3\r\26\27\7\27\2\2\27\30\5\4\3\2\30\31\7") + buf.write("\16\2\2\31\36\3\2\2\2\32\36\5\16\b\2\33\36\5\6\4\2\34") + buf.write("\36\5\n\6\2\35\23\3\2\2\2\35\26\3\2\2\2\35\32\3\2\2\2") + buf.write("\35\33\3\2\2\2\35\34\3\2\2\2\36\63\3\2\2\2\37 \f\f\2\2") + buf.write(" !\t\3\2\2!\62\5\4\3\r\"#\f\13\2\2#$\t\4\2\2$\62\5\4\3") + buf.write("\f%&\f\n\2\2&\'\t\5\2\2\'\62\5\4\3\13()\f\t\2\2)*\t\6") + buf.write("\2\2*\62\5\4\3\n+,\f\b\2\2,-\t\7\2\2-\62\5\4\3\t./\f\7") + buf.write("\2\2/\60\t\b\2\2\60\62\5\4\3\b\61\37\3\2\2\2\61\"\3\2") + buf.write("\2\2\61%\3\2\2\2\61(\3\2\2\2\61+\3\2\2\2\61.\3\2\2\2\62") + buf.write("\65\3\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64\5\3\2\2\2\65") + buf.write("\63\3\2\2\2\66\67\5\f\7\2\679\7\27\2\28:\5\b\5\298\3\2") + buf.write("\2\29:\3\2\2\2:;\3\2\2\2;<\7\16\2\2<\7\3\2\2\2=B\5\4\3") + buf.write("\2>?\7\20\2\2?A\5\4\3\2@>\3\2\2\2AD\3\2\2\2B@\3\2\2\2") + buf.write("BC\3\2\2\2C\t\3\2\2\2DB\3\2\2\2EG\7\32\2\2FE\3\2\2\2F") + buf.write("G\3\2\2\2GK\3\2\2\2HI\5\f\7\2IJ\7\30\2\2JL\3\2\2\2KH\3") + buf.write("\2\2\2KL\3\2\2\2LM\3\2\2\2MP\5\f\7\2NO\7\17\2\2OQ\5\f") + buf.write("\7\2PN\3\2\2\2PQ\3\2\2\2Q\13\3\2\2\2RW\7\63\2\2ST\7\21") + buf.write("\2\2TV\7\63\2\2US\3\2\2\2VY\3\2\2\2WU\3\2\2\2WX\3\2\2") + buf.write("\2X\r\3\2\2\2YW\3\2\2\2Z[\t\t\2\2[\17\3\2\2\2\13\35\61") + buf.write("\639BFKPW") return buf.getvalue() @@ -131,10 +68,8 @@ class PqlParser ( Parser ): "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", - "UNDER", "K_AND", "K_AS", "K_ASC", "K_BY", "K_DESC", - "K_FALSE", "K_FROM", "K_IS", "K_ISNULL", "K_LIKE", - "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", - "K_ORDER", "K_SELECT", "K_SET", "K_TRUE", "K_WHERE", + "UNDER", "K_AND", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", @@ -142,30 +77,14 @@ class PqlParser ( Parser ): "WORD" ] RULE_parseTel = 0 - RULE_parsePql = 1 - RULE_sqlStmtList = 2 - RULE_sqlStmt = 3 - RULE_setStmt = 4 - RULE_selectStmt = 5 - RULE_selectClause = 6 - RULE_columns = 7 - RULE_fromClause = 8 - RULE_tables = 9 - RULE_whereClause = 10 - RULE_orderByClause = 11 - RULE_orderExpr = 12 - RULE_limitClause = 13 - RULE_expr = 14 - RULE_function = 15 - RULE_exprList = 16 - RULE_taxon = 17 - RULE_identifierMultipart = 18 - RULE_literalValue = 19 - - ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "setStmt", - "selectStmt", "selectClause", "columns", "fromClause", - "tables", "whereClause", "orderByClause", "orderExpr", - "limitClause", "expr", "function", "exprList", "taxon", + RULE_expr = 1 + RULE_function = 2 + RULE_exprList = 3 + RULE_taxon = 4 + RULE_identifierMultipart = 5 + RULE_literalValue = 6 + + ruleNames = [ "parseTel", "expr", "function", "exprList", "taxon", "identifierMultipart", "literalValue" ] EOF = Token.EOF @@ -198,36 +117,26 @@ class PqlParser ( Parser ): TILDE=27 UNDER=28 K_AND=29 - K_AS=30 - K_ASC=31 - K_BY=32 - K_DESC=33 - K_FALSE=34 - K_FROM=35 - K_IS=36 - K_ISNULL=37 - K_LIKE=38 - K_LIMIT=39 - K_NOT=40 - K_NOTNULL=41 - K_NULL=42 - K_OR=43 - K_ORDER=44 - K_SELECT=45 - K_SET=46 - K_TRUE=47 - K_WHERE=48 - NUMERIC_LITERAL=49 - DOUBLE_QUOTED_STRING=50 - DOUBLE_QUOTED_STRING_TEL=51 - DOUBLE_QUOTED_STRING_SQL=52 - SINGLE_QUOTED_STRING=53 - SINGLE_QUOTED_STRING_TEL=54 - SINGLE_QUOTED_STRING_SQL=55 - SINGLE_LINE_COMMENT=56 - MULTILINE_COMMENT=57 - SPACES=58 - WORD=59 + K_FALSE=30 + K_IS=31 + K_ISNULL=32 + K_LIKE=33 + K_NOT=34 + K_NOTNULL=35 + K_NULL=36 + K_OR=37 + K_TRUE=38 + NUMERIC_LITERAL=39 + DOUBLE_QUOTED_STRING=40 + DOUBLE_QUOTED_STRING_TEL=41 + DOUBLE_QUOTED_STRING_SQL=42 + SINGLE_QUOTED_STRING=43 + SINGLE_QUOTED_STRING_TEL=44 + SINGLE_QUOTED_STRING_SQL=45 + SINGLE_LINE_COMMENT=46 + MULTILINE_COMMENT=47 + SPACES=48 + WORD=49 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -277,9 +186,9 @@ def parseTel(self): self.enterRule(localctx, 0, self.RULE_parseTel) try: self.enterOuterAlt(localctx, 1) - self.state = 40 + self.state = 14 self.expr(0) - self.state = 41 + self.state = 15 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -290,957 +199,6 @@ def parseTel(self): return localctx - class ParsePqlContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def EOF(self): - return self.getToken(PqlParser.EOF, 0) - - def sqlStmtList(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PqlParser.SqlStmtListContext) - else: - return self.getTypedRuleContext(PqlParser.SqlStmtListContext,i) - - - def getRuleIndex(self): - return PqlParser.RULE_parsePql - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterParsePql" ): - listener.enterParsePql(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitParsePql" ): - listener.exitParsePql(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitParsePql" ): - return visitor.visitParsePql(self) - else: - return visitor.visitChildren(self) - - - - - def parsePql(self): - - localctx = PqlParser.ParsePqlContext(self, self._ctx, self.state) - self.enterRule(localctx, 2, self.RULE_parsePql) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 46 - self._errHandler.sync(self) - _la = self._input.LA(1) - while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.SCOL) | (1 << PqlParser.K_SELECT) | (1 << PqlParser.K_SET))) != 0): - self.state = 43 - self.sqlStmtList() - self.state = 48 - self._errHandler.sync(self) - _la = self._input.LA(1) - - self.state = 49 - self.match(PqlParser.EOF) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class SqlStmtListContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def sqlStmt(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PqlParser.SqlStmtContext) - else: - return self.getTypedRuleContext(PqlParser.SqlStmtContext,i) - - - def SCOL(self, i:int=None): - if i is None: - return self.getTokens(PqlParser.SCOL) - else: - return self.getToken(PqlParser.SCOL, i) - - def getRuleIndex(self): - return PqlParser.RULE_sqlStmtList - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterSqlStmtList" ): - listener.enterSqlStmtList(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitSqlStmtList" ): - listener.exitSqlStmtList(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitSqlStmtList" ): - return visitor.visitSqlStmtList(self) - else: - return visitor.visitChildren(self) - - - - - def sqlStmtList(self): - - localctx = PqlParser.SqlStmtListContext(self, self._ctx, self.state) - self.enterRule(localctx, 4, self.RULE_sqlStmtList) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 54 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==PqlParser.SCOL: - self.state = 51 - self.match(PqlParser.SCOL) - self.state = 56 - self._errHandler.sync(self) - _la = self._input.LA(1) - - self.state = 57 - self.sqlStmt() - self.state = 66 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: - if _alt==1: - self.state = 59 - self._errHandler.sync(self) - _la = self._input.LA(1) - while True: - self.state = 58 - self.match(PqlParser.SCOL) - self.state = 61 - self._errHandler.sync(self) - _la = self._input.LA(1) - if not (_la==PqlParser.SCOL): - break - - self.state = 63 - self.sqlStmt() - self.state = 68 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,3,self._ctx) - - self.state = 72 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,4,self._ctx) - while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: - if _alt==1: - self.state = 69 - self.match(PqlParser.SCOL) - self.state = 74 - self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,4,self._ctx) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class SqlStmtContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def setStmt(self): - return self.getTypedRuleContext(PqlParser.SetStmtContext,0) - - - def selectStmt(self): - return self.getTypedRuleContext(PqlParser.SelectStmtContext,0) - - - def getRuleIndex(self): - return PqlParser.RULE_sqlStmt - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterSqlStmt" ): - listener.enterSqlStmt(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitSqlStmt" ): - listener.exitSqlStmt(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitSqlStmt" ): - return visitor.visitSqlStmt(self) - else: - return visitor.visitChildren(self) - - - - - def sqlStmt(self): - - localctx = PqlParser.SqlStmtContext(self, self._ctx, self.state) - self.enterRule(localctx, 6, self.RULE_sqlStmt) - try: - self.state = 77 - self._errHandler.sync(self) - token = self._input.LA(1) - if token in [PqlParser.K_SET]: - self.enterOuterAlt(localctx, 1) - self.state = 75 - self.setStmt() - pass - elif token in [PqlParser.K_SELECT]: - self.enterOuterAlt(localctx, 2) - self.state = 76 - self.selectStmt() - pass - else: - raise NoViableAltException(self) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class SetStmtContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - self.key = None # IdentifierMultipartContext - self.value = None # ExprContext - - def K_SET(self): - return self.getToken(PqlParser.K_SET, 0) - - def ASSIGN(self): - return self.getToken(PqlParser.ASSIGN, 0) - - def identifierMultipart(self): - return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,0) - - - def expr(self): - return self.getTypedRuleContext(PqlParser.ExprContext,0) - - - def getRuleIndex(self): - return PqlParser.RULE_setStmt - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterSetStmt" ): - listener.enterSetStmt(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitSetStmt" ): - listener.exitSetStmt(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitSetStmt" ): - return visitor.visitSetStmt(self) - else: - return visitor.visitChildren(self) - - - - - def setStmt(self): - - localctx = PqlParser.SetStmtContext(self, self._ctx, self.state) - self.enterRule(localctx, 8, self.RULE_setStmt) - try: - self.enterOuterAlt(localctx, 1) - self.state = 79 - self.match(PqlParser.K_SET) - self.state = 80 - localctx.key = self.identifierMultipart() - self.state = 81 - self.match(PqlParser.ASSIGN) - self.state = 82 - localctx.value = self.expr(0) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class SelectStmtContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def selectClause(self): - return self.getTypedRuleContext(PqlParser.SelectClauseContext,0) - - - def fromClause(self): - return self.getTypedRuleContext(PqlParser.FromClauseContext,0) - - - def whereClause(self): - return self.getTypedRuleContext(PqlParser.WhereClauseContext,0) - - - def orderByClause(self): - return self.getTypedRuleContext(PqlParser.OrderByClauseContext,0) - - - def limitClause(self): - return self.getTypedRuleContext(PqlParser.LimitClauseContext,0) - - - def getRuleIndex(self): - return PqlParser.RULE_selectStmt - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterSelectStmt" ): - listener.enterSelectStmt(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitSelectStmt" ): - listener.exitSelectStmt(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitSelectStmt" ): - return visitor.visitSelectStmt(self) - else: - return visitor.visitChildren(self) - - - - - def selectStmt(self): - - localctx = PqlParser.SelectStmtContext(self, self._ctx, self.state) - self.enterRule(localctx, 10, self.RULE_selectStmt) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 84 - self.selectClause() - self.state = 86 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_FROM: - self.state = 85 - self.fromClause() - - - self.state = 89 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_WHERE: - self.state = 88 - self.whereClause() - - - self.state = 92 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_ORDER: - self.state = 91 - self.orderByClause() - - - self.state = 95 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_LIMIT: - self.state = 94 - self.limitClause() - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class SelectClauseContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def K_SELECT(self): - return self.getToken(PqlParser.K_SELECT, 0) - - def columns(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PqlParser.ColumnsContext) - else: - return self.getTypedRuleContext(PqlParser.ColumnsContext,i) - - - def COMMA(self, i:int=None): - if i is None: - return self.getTokens(PqlParser.COMMA) - else: - return self.getToken(PqlParser.COMMA, i) - - def getRuleIndex(self): - return PqlParser.RULE_selectClause - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterSelectClause" ): - listener.enterSelectClause(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitSelectClause" ): - listener.exitSelectClause(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitSelectClause" ): - return visitor.visitSelectClause(self) - else: - return visitor.visitChildren(self) - - - - - def selectClause(self): - - localctx = PqlParser.SelectClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 12, self.RULE_selectClause) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 97 - self.match(PqlParser.K_SELECT) - self.state = 98 - self.columns() - self.state = 103 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==PqlParser.COMMA: - self.state = 99 - self.match(PqlParser.COMMA) - self.state = 100 - self.columns() - self.state = 105 - self._errHandler.sync(self) - _la = self._input.LA(1) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class ColumnsContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - self.value = None # ExprContext - self.type_cast = None # FunctionContext - self.alias = None # TaxonContext - - def expr(self): - return self.getTypedRuleContext(PqlParser.ExprContext,0) - - - def COLON(self, i:int=None): - if i is None: - return self.getTokens(PqlParser.COLON) - else: - return self.getToken(PqlParser.COLON, i) - - def K_AS(self): - return self.getToken(PqlParser.K_AS, 0) - - def function(self): - return self.getTypedRuleContext(PqlParser.FunctionContext,0) - - - def taxon(self): - return self.getTypedRuleContext(PqlParser.TaxonContext,0) - - - def getRuleIndex(self): - return PqlParser.RULE_columns - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterColumns" ): - listener.enterColumns(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitColumns" ): - listener.exitColumns(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitColumns" ): - return visitor.visitColumns(self) - else: - return visitor.visitChildren(self) - - - - - def columns(self): - - localctx = PqlParser.ColumnsContext(self, self._ctx, self.state) - self.enterRule(localctx, 14, self.RULE_columns) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 106 - localctx.value = self.expr(0) - self.state = 110 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.COLON: - self.state = 107 - self.match(PqlParser.COLON) - self.state = 108 - self.match(PqlParser.COLON) - self.state = 109 - localctx.type_cast = self.function() - - - self.state = 114 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_AS: - self.state = 112 - self.match(PqlParser.K_AS) - self.state = 113 - localctx.alias = self.taxon() - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class FromClauseContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def K_FROM(self): - return self.getToken(PqlParser.K_FROM, 0) - - def tables(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PqlParser.TablesContext) - else: - return self.getTypedRuleContext(PqlParser.TablesContext,i) - - - def COMMA(self, i:int=None): - if i is None: - return self.getTokens(PqlParser.COMMA) - else: - return self.getToken(PqlParser.COMMA, i) - - def getRuleIndex(self): - return PqlParser.RULE_fromClause - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterFromClause" ): - listener.enterFromClause(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitFromClause" ): - listener.exitFromClause(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitFromClause" ): - return visitor.visitFromClause(self) - else: - return visitor.visitChildren(self) - - - - - def fromClause(self): - - localctx = PqlParser.FromClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 16, self.RULE_fromClause) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 116 - self.match(PqlParser.K_FROM) - self.state = 117 - self.tables() - self.state = 122 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==PqlParser.COMMA: - self.state = 118 - self.match(PqlParser.COMMA) - self.state = 119 - self.tables() - self.state = 124 - self._errHandler.sync(self) - _la = self._input.LA(1) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class TablesContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - self.table_name = None # IdentifierMultipartContext - self.table_alias = None # IdentifierMultipartContext - - def identifierMultipart(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PqlParser.IdentifierMultipartContext) - else: - return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,i) - - - def K_AS(self): - return self.getToken(PqlParser.K_AS, 0) - - def getRuleIndex(self): - return PqlParser.RULE_tables - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterTables" ): - listener.enterTables(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitTables" ): - listener.exitTables(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitTables" ): - return visitor.visitTables(self) - else: - return visitor.visitChildren(self) - - - - - def tables(self): - - localctx = PqlParser.TablesContext(self, self._ctx, self.state) - self.enterRule(localctx, 18, self.RULE_tables) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 125 - localctx.table_name = self.identifierMultipart() - self.state = 130 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_AS or _la==PqlParser.WORD: - self.state = 127 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_AS: - self.state = 126 - self.match(PqlParser.K_AS) - - - self.state = 129 - localctx.table_alias = self.identifierMultipart() - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class WhereClauseContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def K_WHERE(self): - return self.getToken(PqlParser.K_WHERE, 0) - - def expr(self): - return self.getTypedRuleContext(PqlParser.ExprContext,0) - - - def getRuleIndex(self): - return PqlParser.RULE_whereClause - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterWhereClause" ): - listener.enterWhereClause(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitWhereClause" ): - listener.exitWhereClause(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitWhereClause" ): - return visitor.visitWhereClause(self) - else: - return visitor.visitChildren(self) - - - - - def whereClause(self): - - localctx = PqlParser.WhereClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 20, self.RULE_whereClause) - try: - self.enterOuterAlt(localctx, 1) - self.state = 132 - self.match(PqlParser.K_WHERE) - self.state = 133 - self.expr(0) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class OrderByClauseContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def K_ORDER(self): - return self.getToken(PqlParser.K_ORDER, 0) - - def K_BY(self): - return self.getToken(PqlParser.K_BY, 0) - - def orderExpr(self, i:int=None): - if i is None: - return self.getTypedRuleContexts(PqlParser.OrderExprContext) - else: - return self.getTypedRuleContext(PqlParser.OrderExprContext,i) - - - def COMMA(self, i:int=None): - if i is None: - return self.getTokens(PqlParser.COMMA) - else: - return self.getToken(PqlParser.COMMA, i) - - def getRuleIndex(self): - return PqlParser.RULE_orderByClause - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterOrderByClause" ): - listener.enterOrderByClause(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitOrderByClause" ): - listener.exitOrderByClause(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitOrderByClause" ): - return visitor.visitOrderByClause(self) - else: - return visitor.visitChildren(self) - - - - - def orderByClause(self): - - localctx = PqlParser.OrderByClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 22, self.RULE_orderByClause) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 135 - self.match(PqlParser.K_ORDER) - self.state = 136 - self.match(PqlParser.K_BY) - self.state = 137 - self.orderExpr() - self.state = 142 - self._errHandler.sync(self) - _la = self._input.LA(1) - while _la==PqlParser.COMMA: - self.state = 138 - self.match(PqlParser.COMMA) - self.state = 139 - self.orderExpr() - self.state = 144 - self._errHandler.sync(self) - _la = self._input.LA(1) - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class OrderExprContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - - def expr(self): - return self.getTypedRuleContext(PqlParser.ExprContext,0) - - - def K_ASC(self): - return self.getToken(PqlParser.K_ASC, 0) - - def K_DESC(self): - return self.getToken(PqlParser.K_DESC, 0) - - def getRuleIndex(self): - return PqlParser.RULE_orderExpr - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterOrderExpr" ): - listener.enterOrderExpr(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitOrderExpr" ): - listener.exitOrderExpr(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitOrderExpr" ): - return visitor.visitOrderExpr(self) - else: - return visitor.visitChildren(self) - - - - - def orderExpr(self): - - localctx = PqlParser.OrderExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 24, self.RULE_orderExpr) - self._la = 0 # Token type - try: - self.enterOuterAlt(localctx, 1) - self.state = 145 - self.expr(0) - self.state = 147 - self._errHandler.sync(self) - _la = self._input.LA(1) - if _la==PqlParser.K_ASC or _la==PqlParser.K_DESC: - self.state = 146 - _la = self._input.LA(1) - if not(_la==PqlParser.K_ASC or _la==PqlParser.K_DESC): - self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() - - - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - - class LimitClauseContext(ParserRuleContext): - - def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): - super().__init__(parent, invokingState) - self.parser = parser - self.limit = None # ExprContext - - def K_LIMIT(self): - return self.getToken(PqlParser.K_LIMIT, 0) - - def expr(self): - return self.getTypedRuleContext(PqlParser.ExprContext,0) - - - def getRuleIndex(self): - return PqlParser.RULE_limitClause - - def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterLimitClause" ): - listener.enterLimitClause(self) - - def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitLimitClause" ): - listener.exitLimitClause(self) - - def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitLimitClause" ): - return visitor.visitLimitClause(self) - else: - return visitor.visitChildren(self) - - - - - def limitClause(self): - - localctx = PqlParser.LimitClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 26, self.RULE_limitClause) - try: - self.enterOuterAlt(localctx, 1) - self.state = 149 - self.match(PqlParser.K_LIMIT) - self.state = 150 - localctx.limit = self.expr(0) - except RecognitionException as re: - localctx.exception = re - self._errHandler.reportError(self, re) - self._errHandler.recover(self, re) - finally: - self.exitRule() - return localctx - - class ExprContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): @@ -1358,16 +316,16 @@ def expr(self, _p:int=0): _parentState = self.state localctx = PqlParser.ExprContext(self, self._ctx, _parentState) _prevctx = localctx - _startState = 28 - self.enterRecursionRule(localctx, 28, self.RULE_expr, _p) + _startState = 2 + self.enterRecursionRule(localctx, 2, self.RULE_expr, _p) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 162 + self.state = 27 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,18,self._ctx) + la_ = self._interp.adaptivePredict(self._input,0,self._ctx) if la_ == 1: - self.state = 153 + self.state = 18 localctx.unary_operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_NOT))) != 0)): @@ -1375,56 +333,56 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 154 + self.state = 19 localctx.right = self.expr(11) pass elif la_ == 2: - self.state = 155 + self.state = 20 self.match(PqlParser.OPEN_PAREN) - self.state = 156 + self.state = 21 localctx.inner = self.expr(0) - self.state = 157 + self.state = 22 self.match(PqlParser.CLOSE_PAREN) pass elif la_ == 3: - self.state = 159 + self.state = 24 self.literalValue() pass elif la_ == 4: - self.state = 160 + self.state = 25 self.function() pass elif la_ == 5: - self.state = 161 + self.state = 26 self.taxon() pass self._ctx.stop = self._input.LT(-1) - self.state = 184 + self.state = 49 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,20,self._ctx) + _alt = self._interp.adaptivePredict(self._input,2,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 182 + self.state = 47 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,19,self._ctx) + la_ = self._interp.adaptivePredict(self._input,1,self._ctx) if la_ == 1: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 164 + self.state = 29 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 165 + self.state = 30 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) != 0)): @@ -1432,7 +390,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 166 + self.state = 31 localctx.right = self.expr(11) pass @@ -1440,11 +398,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 167 + self.state = 32 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 168 + self.state = 33 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.MINUS or _la==PqlParser.PLUS): @@ -1452,7 +410,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 169 + self.state = 34 localctx.right = self.expr(10) pass @@ -1460,11 +418,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 170 + self.state = 35 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 171 + self.state = 36 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) != 0)): @@ -1472,7 +430,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 172 + self.state = 37 localctx.right = self.expr(9) pass @@ -1480,11 +438,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 173 + self.state = 38 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 174 + self.state = 39 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) != 0)): @@ -1492,7 +450,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 175 + self.state = 40 localctx.right = self.expr(8) pass @@ -1500,11 +458,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 176 + self.state = 41 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 177 + self.state = 42 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.AND or _la==PqlParser.K_AND): @@ -1512,7 +470,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 178 + self.state = 43 localctx.right = self.expr(7) pass @@ -1520,11 +478,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 179 + self.state = 44 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 180 + self.state = 45 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.OR or _la==PqlParser.K_OR): @@ -1532,14 +490,14 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 181 + self.state = 46 localctx.right = self.expr(6) pass - self.state = 186 + self.state = 51 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,20,self._ctx) + _alt = self._interp.adaptivePredict(self._input,2,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1595,23 +553,23 @@ def accept(self, visitor:ParseTreeVisitor): def function(self): localctx = PqlParser.FunctionContext(self, self._ctx, self.state) - self.enterRule(localctx, 30, self.RULE_function) + self.enterRule(localctx, 4, self.RULE_function) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 187 + self.state = 52 localctx.function_name = self.identifierMultipart() - self.state = 188 + self.state = 53 self.match(PqlParser.OPEN_PAREN) - self.state = 190 + self.state = 55 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): - self.state = 189 + self.state = 54 localctx.arguments = self.exprList() - self.state = 192 + self.state = 57 self.match(PqlParser.CLOSE_PAREN) except RecognitionException as re: localctx.exception = re @@ -1664,21 +622,21 @@ def accept(self, visitor:ParseTreeVisitor): def exprList(self): localctx = PqlParser.ExprListContext(self, self._ctx, self.state) - self.enterRule(localctx, 32, self.RULE_exprList) + self.enterRule(localctx, 6, self.RULE_exprList) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 194 + self.state = 59 self.expr(0) - self.state = 199 + self.state = 64 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 195 + self.state = 60 self.match(PqlParser.COMMA) - self.state = 196 + self.state = 61 self.expr(0) - self.state = 201 + self.state = 66 self._errHandler.sync(self) _la = self._input.LA(1) @@ -1740,37 +698,37 @@ def accept(self, visitor:ParseTreeVisitor): def taxon(self): localctx = PqlParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 34, self.RULE_taxon) + self.enterRule(localctx, 8, self.RULE_taxon) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 203 + self.state = 68 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.QUESTION_MARK: - self.state = 202 + self.state = 67 localctx.is_optional = self.match(PqlParser.QUESTION_MARK) - self.state = 208 + self.state = 73 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,24,self._ctx) + la_ = self._interp.adaptivePredict(self._input,6,self._ctx) if la_ == 1: - self.state = 205 + self.state = 70 localctx.namespace = self.identifierMultipart() - self.state = 206 + self.state = 71 self.match(PqlParser.PIPE) - self.state = 210 + self.state = 75 localctx.slug = self.identifierMultipart() - self.state = 213 + self.state = 78 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,25,self._ctx) + la_ = self._interp.adaptivePredict(self._input,7,self._ctx) if la_ == 1: - self.state = 211 + self.state = 76 self.match(PqlParser.COLON) - self.state = 212 + self.state = 77 localctx.tag = self.identifierMultipart() @@ -1824,23 +782,23 @@ def accept(self, visitor:ParseTreeVisitor): def identifierMultipart(self): localctx = PqlParser.IdentifierMultipartContext(self, self._ctx, self.state) - self.enterRule(localctx, 36, self.RULE_identifierMultipart) + self.enterRule(localctx, 10, self.RULE_identifierMultipart) try: self.enterOuterAlt(localctx, 1) - self.state = 215 + self.state = 80 self.match(PqlParser.WORD) - self.state = 220 + self.state = 85 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,26,self._ctx) + _alt = self._interp.adaptivePredict(self._input,8,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 216 + self.state = 81 self.match(PqlParser.DOT) - self.state = 217 + self.state = 82 self.match(PqlParser.WORD) - self.state = 222 + self.state = 87 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,26,self._ctx) + _alt = self._interp.adaptivePredict(self._input,8,self._ctx) except RecognitionException as re: localctx.exception = re @@ -1898,11 +856,11 @@ def accept(self, visitor:ParseTreeVisitor): def literalValue(self): localctx = PqlParser.LiteralValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 38, self.RULE_literalValue) + self.enterRule(localctx, 12, self.RULE_literalValue) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 223 + self.state = 88 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): self._errHandler.recoverInline(self) @@ -1922,7 +880,7 @@ def literalValue(self): def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): if self._predicates == None: self._predicates = dict() - self._predicates[14] = self.expr_sempred + self._predicates[1] = self.expr_sempred pred = self._predicates.get(ruleIndex, None) if pred is None: raise Exception("No predicate with index:" + str(ruleIndex)) diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py index 8b1703e..597effb 100644 --- a/python/src/pql_grammar/antlr/PqlParserListener.py +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -17,123 +17,6 @@ def exitParseTel(self, ctx:PqlParser.ParseTelContext): pass - # Enter a parse tree produced by PqlParser#parsePql. - def enterParsePql(self, ctx:PqlParser.ParsePqlContext): - pass - - # Exit a parse tree produced by PqlParser#parsePql. - def exitParsePql(self, ctx:PqlParser.ParsePqlContext): - pass - - - # Enter a parse tree produced by PqlParser#sqlStmtList. - def enterSqlStmtList(self, ctx:PqlParser.SqlStmtListContext): - pass - - # Exit a parse tree produced by PqlParser#sqlStmtList. - def exitSqlStmtList(self, ctx:PqlParser.SqlStmtListContext): - pass - - - # Enter a parse tree produced by PqlParser#sqlStmt. - def enterSqlStmt(self, ctx:PqlParser.SqlStmtContext): - pass - - # Exit a parse tree produced by PqlParser#sqlStmt. - def exitSqlStmt(self, ctx:PqlParser.SqlStmtContext): - pass - - - # Enter a parse tree produced by PqlParser#setStmt. - def enterSetStmt(self, ctx:PqlParser.SetStmtContext): - pass - - # Exit a parse tree produced by PqlParser#setStmt. - def exitSetStmt(self, ctx:PqlParser.SetStmtContext): - pass - - - # Enter a parse tree produced by PqlParser#selectStmt. - def enterSelectStmt(self, ctx:PqlParser.SelectStmtContext): - pass - - # Exit a parse tree produced by PqlParser#selectStmt. - def exitSelectStmt(self, ctx:PqlParser.SelectStmtContext): - pass - - - # Enter a parse tree produced by PqlParser#selectClause. - def enterSelectClause(self, ctx:PqlParser.SelectClauseContext): - pass - - # Exit a parse tree produced by PqlParser#selectClause. - def exitSelectClause(self, ctx:PqlParser.SelectClauseContext): - pass - - - # Enter a parse tree produced by PqlParser#columns. - def enterColumns(self, ctx:PqlParser.ColumnsContext): - pass - - # Exit a parse tree produced by PqlParser#columns. - def exitColumns(self, ctx:PqlParser.ColumnsContext): - pass - - - # Enter a parse tree produced by PqlParser#fromClause. - def enterFromClause(self, ctx:PqlParser.FromClauseContext): - pass - - # Exit a parse tree produced by PqlParser#fromClause. - def exitFromClause(self, ctx:PqlParser.FromClauseContext): - pass - - - # Enter a parse tree produced by PqlParser#tables. - def enterTables(self, ctx:PqlParser.TablesContext): - pass - - # Exit a parse tree produced by PqlParser#tables. - def exitTables(self, ctx:PqlParser.TablesContext): - pass - - - # Enter a parse tree produced by PqlParser#whereClause. - def enterWhereClause(self, ctx:PqlParser.WhereClauseContext): - pass - - # Exit a parse tree produced by PqlParser#whereClause. - def exitWhereClause(self, ctx:PqlParser.WhereClauseContext): - pass - - - # Enter a parse tree produced by PqlParser#orderByClause. - def enterOrderByClause(self, ctx:PqlParser.OrderByClauseContext): - pass - - # Exit a parse tree produced by PqlParser#orderByClause. - def exitOrderByClause(self, ctx:PqlParser.OrderByClauseContext): - pass - - - # Enter a parse tree produced by PqlParser#orderExpr. - def enterOrderExpr(self, ctx:PqlParser.OrderExprContext): - pass - - # Exit a parse tree produced by PqlParser#orderExpr. - def exitOrderExpr(self, ctx:PqlParser.OrderExprContext): - pass - - - # Enter a parse tree produced by PqlParser#limitClause. - def enterLimitClause(self, ctx:PqlParser.LimitClauseContext): - pass - - # Exit a parse tree produced by PqlParser#limitClause. - def exitLimitClause(self, ctx:PqlParser.LimitClauseContext): - pass - - # Enter a parse tree produced by PqlParser#expr. def enterExpr(self, ctx:PqlParser.ExprContext): pass diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py index cf73393..12aad10 100644 --- a/python/src/pql_grammar/antlr/PqlParserVisitor.py +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -14,71 +14,6 @@ def visitParseTel(self, ctx:PqlParser.ParseTelContext): return self.visitChildren(ctx) - # Visit a parse tree produced by PqlParser#parsePql. - def visitParsePql(self, ctx:PqlParser.ParsePqlContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#sqlStmtList. - def visitSqlStmtList(self, ctx:PqlParser.SqlStmtListContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#sqlStmt. - def visitSqlStmt(self, ctx:PqlParser.SqlStmtContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#setStmt. - def visitSetStmt(self, ctx:PqlParser.SetStmtContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#selectStmt. - def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#selectClause. - def visitSelectClause(self, ctx:PqlParser.SelectClauseContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#columns. - def visitColumns(self, ctx:PqlParser.ColumnsContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#fromClause. - def visitFromClause(self, ctx:PqlParser.FromClauseContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#tables. - def visitTables(self, ctx:PqlParser.TablesContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#whereClause. - def visitWhereClause(self, ctx:PqlParser.WhereClauseContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#orderByClause. - def visitOrderByClause(self, ctx:PqlParser.OrderByClauseContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#orderExpr. - def visitOrderExpr(self, ctx:PqlParser.OrderExprContext): - return self.visitChildren(ctx) - - - # Visit a parse tree produced by PqlParser#limitClause. - def visitLimitClause(self, ctx:PqlParser.LimitClauseContext): - return self.visitChildren(ctx) - - # Visit a parse tree produced by PqlParser#expr. def visitExpr(self, ctx:PqlParser.ExprContext): return self.visitChildren(ctx) diff --git a/python/src/pql_grammar/ast/__init__.py b/python/src/pql_grammar/ast/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/src/pql_grammar/ast/from_json.py b/python/src/pql_grammar/from_json.py similarity index 92% rename from python/src/pql_grammar/ast/from_json.py rename to python/src/pql_grammar/from_json.py index 2bc1527..7ec5924 100644 --- a/python/src/pql_grammar/ast/from_json.py +++ b/python/src/pql_grammar/from_json.py @@ -1,7 +1,5 @@ # fmt: off -from dataclasses import fields -from typing import List, Tuple, Any from . import model as ast diff --git a/python/src/pql_grammar/ast/from_pql.py b/python/src/pql_grammar/from_pql.py similarity index 64% rename from python/src/pql_grammar/ast/from_pql.py rename to python/src/pql_grammar/from_pql.py index d131121..721e434 100644 --- a/python/src/pql_grammar/ast/from_pql.py +++ b/python/src/pql_grammar/from_pql.py @@ -4,19 +4,38 @@ from antlr4 import ParserRuleContext from typing import Optional, Tuple, List, Type, Any -from ..antlr.PqlLexer import PqlLexer -from ..antlr.PqlParser import PqlParser -from ..antlr.PqlParserVisitor import PqlParserVisitor as _PqlParserVisitor +from .antlr.PqlLexer import PqlLexer +from .antlr.PqlParser import PqlParser +from .antlr.PqlParserVisitor import PqlParserVisitor as _PqlParserVisitor from . import model as ast +class ParseError(ValueError): + pass + + def full_text(ctx: ParserRuleContext) -> str: # extracts full text from a tree of nodes, # including white space. if ctx: if isinstance(ctx, ParserRuleContext): - return ctx.start.getInputStream().getText(ctx.start.start, ctx.stop.stop) + try: + start = ctx.start.start + except AttributeError: + start = None + try: + stop = ctx.stop.stop + except AttributeError: + stop = None + + if not(start is None) and stop is None: + stop = start + + if start is None: + return str(ctx) + + return ctx.start.getInputStream().getText(start, stop) else: try: # some primitive context object @@ -28,6 +47,7 @@ def full_text(ctx: ParserRuleContext) -> str: return None + def unquote(s: str): # Quoted schema, table, column names come in Postgres style - double-quotes # in-string double-quotes are escaped by doubling the double-quotes ANSI SQL style. @@ -93,26 +113,6 @@ def parse_function(cls, e: PqlParser.FunctionContext) -> ast.Function: ]) if e.arguments else None ) - @classmethod - def parse_column_typecast(cls, v: PqlParser.FunctionContext) -> Optional[ast.Function]: - if not v: - return None - return cls.parse_function(v) - - @classmethod - def parse_column_alias(cls, v: PqlParser.TaxonContext) -> Optional[ast.Taxon]: - if not v: - return None - return cls.parse_taxon(v) - - @classmethod - def parse_column(cls, e: PqlParser.ColumnsContext): - return ast.Column( - cls.parse_expr(e.value), - cls.parse_column_typecast(e.type_cast), - cls.parse_column_alias(e.alias) - ) - @classmethod def parse_literal(cls, e:PqlParser.LiteralValueContext): return ast.Literal( @@ -140,7 +140,7 @@ def parse_literal_value(e:PqlParser.LiteralValueContext): try: v = full_text(e) except IndexError: - raise Exception(f"Could not extract literal value node from '{e.getText()}'.") + raise ParseError(f"Could not extract literal value node from '{e.getText()}'.") if is_number: # TODO: contemplate decimal type instead @@ -150,23 +150,13 @@ def parse_literal_value(e:PqlParser.LiteralValueContext): try: return float(v) except Exception: - raise Exception(f"Could not convert SQL number {v} to native number representation.") + raise ParseError(f"Could not convert SQL number {v} to native number representation.") if is_string: return unquote(v) return v - @classmethod - def parse_from_clause_expr(cls, ctx: PqlParser.FromClauseContext) -> Tuple[ast.Table, ...]: - return tuple([ - ast.Table( - full_text(table.table_name), - full_text(table.table_alias) - ) - for table in ctx.tables() - ]) - @classmethod def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : ctx = cls.unwrap_expr_parens(ctx) @@ -204,18 +194,17 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : if v: return cls.parse_function(v) - raise Exception(f'Where expression "{full_text(ctx)}" is not supported yet.') + raise ParseError(f'Where expression "{full_text(ctx)}" is not supported yet.') class PqlVisitor(_PqlParserVisitor): - def visit_from_pql_string(self, pql: str): - inp_stream = InputStream(pql) - lexer = PqlLexer(inp_stream) - stream = CommonTokenStream(lexer) - parser = PqlParser(stream) - tree = parser.parsePql() - self.visit(tree) + def visitErrorNode(self, node): + wrong_symbol = node.symbol.text + line = node.symbol.line + column = node.symbol.column + 1 + details = f'Unexpected symbol "{wrong_symbol}" on line {line}, position {column}' + raise ParseError(details) def visit_from_tel_string(self, tel: str): inp_stream = InputStream(tel) @@ -226,56 +215,6 @@ def visit_from_tel_string(self, tel: str): self.visit(tree) -def from_pql(pql: str, cls:Type[PqlVisitor] = PqlVisitor) -> List[ast.Node]: - - statements = [] - - class V(cls): - - def visitSelectStmt(self, ctx:PqlParser.SelectStmtContext): - columns = tuple([ - ast.Column( - PqlAntlrToAstParser.parse_expr(column.value), - PqlAntlrToAstParser.parse_column_typecast(column.type_cast), - PqlAntlrToAstParser.parse_column_alias(column.alias) - ) - for column in ctx.selectClause().columns() - ]) - - v = ctx.fromClause() - if v: - from_clause = PqlAntlrToAstParser.parse_from_clause_expr(v) - else: - from_clause = None - - v = ctx.whereClause() - if v: - where_clause = PqlAntlrToAstParser.parse_expr(v.expr()) - else: - where_clause = None - - statements.append(ast.SelectStmt( - columns=columns, - from_clause=from_clause, - where_clause=where_clause - )) - - def visitSetStmt(self, ctx:PqlParser.SetStmtContext): - key = full_text(ctx.key) - # TODO: parse this better. There are literals there possibly. Need to unpack them. - value = full_text(ctx.value) - statements.append( - ast.SetStmt( - key, - value - ) - ) - - V().visit_from_pql_string(pql) - - return statements - - def from_tel(tel: str, cls:Type[PqlVisitor] = PqlVisitor) -> ast.Node: statements = [] diff --git a/python/src/pql_grammar/ast/model.py b/python/src/pql_grammar/model.py similarity index 73% rename from python/src/pql_grammar/ast/model.py rename to python/src/pql_grammar/model.py index fe0c4e0..41aa9c2 100644 --- a/python/src/pql_grammar/ast/model.py +++ b/python/src/pql_grammar/model.py @@ -9,7 +9,6 @@ from decimal import Decimal from typing import ( Any, - List, Optional, Tuple, Union, @@ -63,34 +62,6 @@ class Function(Node): # [[null,'value'],[null,2]] args: Optional[Tuple[CallArgs, ...]] = None -ColumnValue = Union[Expr,Function,Taxon,Literal] - - -@dataclass(eq=True, frozen=True) -class Column(Node): - value: ColumnValue - type_cast: Optional[Function] = None - alias: Optional[Taxon] = None - - -@dataclass(eq=True, frozen=True) -class Table(Node): - value: str - alias: Optional[str] = None - - -@dataclass(eq=True, frozen=True) -class SelectStmt(Node): - columns: Tuple[Column, ...] - from_clause: Optional[Tuple[Table, ...]] = None - where_clause: Optional[Expr] = None - - -@dataclass(eq=True, frozen=True) -class SetStmt(Node): - key: str - value: str - inventory.update({ k : v diff --git a/python/src/pql_grammar/ast/to_json.py b/python/src/pql_grammar/to_json.py similarity index 100% rename from python/src/pql_grammar/ast/to_json.py rename to python/src/pql_grammar/to_json.py diff --git a/python/src/pql_grammar/ast/to_pql.py b/python/src/pql_grammar/to_pql.py similarity index 60% rename from python/src/pql_grammar/ast/to_pql.py rename to python/src/pql_grammar/to_pql.py index 54996bc..f7c40a4 100644 --- a/python/src/pql_grammar/ast/to_pql.py +++ b/python/src/pql_grammar/to_pql.py @@ -67,50 +67,14 @@ def __str__(self): return f'{fn}({args})' -class Column(Node): - n: ast.Column - def __str__(self): - n = self.n - value = f'{to_r(n.value)}' - type_cast = f'::{to_r(n.type_cast)}' if n.type_cast else '' - if type_cast and (value[FIRST] != '(' or value[LAST] != ')'): - value = f'({value})' - alias = f' AS {to_r(n.alias)}' if n.alias else '' - return f'{value}{type_cast}{alias}' - - -class Table(Node): - n: ast.Table - def __str__(self): - n = self.n - value = n.value - alias = f' AS {n.alias}' if n.alias else '' - return f'{value}{alias}' - - -class SelectStmt(Node): - n: ast.SelectStmt - def __str__(self): - n = self.n - select_str = 'SELECT\n' + INDENT + (',\n' + INDENT).join(map(str, map(Column, n.columns))) + '\n' - from_str = 'FROM\n' + INDENT + (',\n' + INDENT).join(map(str, map(Table, n.from_clause))) + '\n' - where_str = 'WHERE\n' + INDENT + str(to_r(n.where_clause)) + '\n' - return select_str + from_str + where_str + ';\n' - - def to_r(n: ast.Node): if isinstance(n, ast.Node): return renderer_map.get(type(n), Node)(n) return str(n) -def to_pql(o: List[ast.Node]): - if not isinstance(o, (list, tuple)): - raise AttributeError(f"Argument must be a list of statements.") - return '\n'.join([ - str(to_r(e)) - for e in o - ]) +def to_tel(o: ast.Node): + return str(to_r(o)) renderer_map.update({ diff --git a/python/src/pql_grammar/ast/tools.py b/python/src/pql_grammar/tools.py similarity index 100% rename from python/src/pql_grammar/ast/tools.py rename to python/src/pql_grammar/tools.py diff --git a/python/tests/ast_json_test.py b/python/tests/ast_json_test.py index 98a001d..8248499 100644 --- a/python/tests/ast_json_test.py +++ b/python/tests/ast_json_test.py @@ -1,15 +1,14 @@ # fmt: off +import pytest import sys -from unittest import TestCase sys.path.append('./src') -from pql_grammar.ast import model as ast -from pql_grammar.ast.tools import ast_diff -from pql_grammar.ast.to_json import to_json -from pql_grammar.ast.from_json import from_json -from pql_grammar.ast.tools import find_all +from pql_grammar import model as ast +from pql_grammar.tools import ast_diff +from pql_grammar.to_json import to_json +from pql_grammar.from_json import from_json null = None @@ -17,61 +16,43 @@ true = True -ast_should_be = ast.SelectStmt( - columns = ( - ast.Column(ast.Taxon('taxon1', 'ns1', True)), - ast.Column(ast.Taxon('taxon2', 'ns2', False)), - ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), - ast.Column( - ast.Expr( - '+', - ( - ast.Taxon( - 'taxon3', - 'ns3', - True - ), - ast.Expr( - '-', - ( - ast.Taxon('slug'), - ast.Literal(1234, '1234') - ), - ), - ), +inputs = ( + ast.Taxon('taxon1', 'ns1', True), + ast.Taxon('taxon2', 'ns2', False), + ast.Taxon('slug1'), + ast.Expr( + '+', + ( + ast.Taxon( + 'taxon3', + 'ns3', + True ), - None, - ast.Taxon('custom_data', 'myns') - ), - ast.Column( ast.Expr( - '+', + '-', ( - ast.Taxon( - 'taxon3', - 'ns3', - ), - ast.Literal(5, '5'), + ast.Taxon('slug'), + ast.Literal(1234, '1234') ), ), - ast.Function( - 'TypeCast' - ), - ast.Taxon('custom_data_cast', 'myns') ), - ast.Column( - ast.Function('fn_4', ( - (None, ast.Function('fn_1', ( - (None, ast.Taxon('slug')), - ),),), - ),), - ast.Function( - 'TypeCast', - (('arg1','value1'),), # normally inner pair is a tuple, but for comparison making list. + ), + ast.Expr( + '+', + ( + ast.Taxon( + 'taxon3', + 'ns3', ), - ) + ast.Literal(5, '5'), + ), ), - where_clause = ast.Expr( + ast.Function('fn_4', ( + (None, ast.Function('fn_1', ( + (None, ast.Taxon('slug')), + ),),), + ),), + ast.Expr( 'AND', ( ast.Expr( @@ -99,146 +80,93 @@ ) -json_should_be = { - "__typename": "SelectStmt", - "columns": [ - { - "__typename": "Column", - "value": { +outputs = ( + { + "__typename": "Taxon", + "slug": "taxon1", + "namespace": "ns1", + "is_optional": true + }, + { + "__typename": "Taxon", + "slug": "taxon2", + "namespace": "ns2", + "is_optional": false + }, + { + "__typename": "Taxon", + "slug": "slug1", + "is_optional": false + }, + { + "__typename": "Expr", + "operator": "+", + "args": [ + { "__typename": "Taxon", - "slug": "taxon1", - "namespace": "ns1", + "slug": "taxon3", + "namespace": "ns3", "is_optional": true - } - }, - { - "__typename": "Column", - "value": { - "__typename": "Taxon", - "slug": "taxon2", - "namespace": "ns2", - "is_optional": false - } - }, - { - "__typename": "Column", - "value": { - "__typename": "Taxon", - "slug": "slug1", - "is_optional": false }, - "alias": { - "__typename": "Taxon", - "slug": "slug1", - "namespace": "myns", - "is_optional": false - } - }, - { - "__typename": "Column", - "value": { - "__typename": "Expr", - "operator": "+", - "args": [ - { - "__typename": "Taxon", - "slug": "taxon3", - "namespace": "ns3", - "is_optional": true - }, - { - "__typename": "Expr", - "operator": "-", - "args": [ - { - "__typename": "Taxon", - "slug": "slug", - "is_optional": false - }, - { - "__typename": "Literal", - "value": 1234, - "raw_value": "1234" - } - ] - } - ] - }, - "alias": { - "__typename": "Taxon", - "slug": "custom_data", - "namespace": "myns", - "is_optional": false - } - }, - { - "__typename": "Column", - "value": { + { "__typename": "Expr", - "operator": "+", + "operator": "-", "args": [ { "__typename": "Taxon", - "slug": "taxon3", - "namespace": "ns3", + "slug": "slug", "is_optional": false }, { "__typename": "Literal", - "value": 5, - "raw_value": "5" + "value": 1234, + "raw_value": "1234" } ] - }, - "type_cast": { - "__typename": "Function", - "function_name": "TypeCast" - }, - "alias": { + } + ] + }, + { + "__typename": "Expr", + "operator": "+", + "args": [ + { "__typename": "Taxon", - "slug": "custom_data_cast", - "namespace": "myns", + "slug": "taxon3", + "namespace": "ns3", "is_optional": false - } - }, - { - "__typename": "Column", - "value": { - "__typename": "Function", - "function_name": "fn_4", - "args": [ - [ - null, - { - "__typename": "Function", - "function_name": "fn_1", - "args": [ - [ - null, - { - "__typename": "Taxon", - "slug": "slug", - "is_optional": false - } - ] - ] - } - ] - ] }, - "type_cast": { - "__typename": "Function", - "function_name": "TypeCast", - "args": [ - [ - "arg1", - "value1" - ] - ] + { + "__typename": "Literal", + "value": 5, + "raw_value": "5" } - } - ], - "where_clause": { + ] + }, + { + "__typename": "Function", + "function_name": "fn_4", + "args": [ + [ + null, + { + "__typename": "Function", + "function_name": "fn_1", + "args": [ + [ + null, + { + "__typename": "Taxon", + "slug": "slug", + "is_optional": false + } + ] + ] + } + ] + ] + }, + { "__typename": "Expr", "operator": "AND", "args": [ @@ -289,27 +217,13 @@ } ] } -} - - -class JsonAstTests(TestCase): - maxDiff = None - - def test_render_json_from_ast(self): - json_result = to_json(ast_should_be) - # import json; print(json.dumps(json_result, indent=4)) - assert json_should_be == json_result +) - def test_render_ast_from_json(self): - ast_result = from_json(json_should_be) - # import json; print(json.dumps(json_result, indent=4)) - ast_diff(ast_should_be, ast_result) - assert ast_should_be == ast_result - # ensure produced nodes are hashable - set( - find_all( - ast_result, - lambda o: isinstance(o, ast.Node) - ) - ) +@pytest.mark.parametrize( + 'input, output_should_be', + zip(inputs, outputs) +) +def test_tel_to_ast_and_back(input: str, output_should_be: str): + json_is = to_json(input) + assert json_is == output_should_be diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index 913f6cb..97156b4 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -1,121 +1,71 @@ # fmt: off +import pytest import sys -from unittest import TestCase sys.path.append('./src') -from pql_grammar.ast import model as ast -from pql_grammar.ast.to_pql import to_pql -from pql_grammar.ast.from_pql import PqlVisitor, PqlParser, from_pql -from pql_grammar.ast.tools import find_all - - -class ErrorAssertingPqlVisitor(PqlVisitor): - """ - Special TelVisitor for testing grammar. Throws error in case of invalid node. - """ - def visitErrorNode(self, node): - wrong_symbol = node.symbol.text - position = node.symbol.column + 1 - details = f'Unexpected symbol "{wrong_symbol}" at position {position}' - raise AssertionError(details) - - -pql_all_cases = """\ -select - ?ns1|taxon1, - ns2|taxon2, - slug1 as myns|slug1, - ?ns3|taxon3 + (slug2 - 1234) as myns|custom_data, - (ns3|taxon3 + 5)::TypeCast() as myns|custom_data_cast, - fn_4(fn_1(slug))::TypeCast(arg1='value1') -from my_ns, your_ns as super_ns -where - ns6|taxon6 > 1234 - and (ns0|taxon10 + 4321) == 0 -""" +from pql_grammar import model as ast +from pql_grammar.to_pql import to_tel +from pql_grammar.from_pql import PqlVisitor, from_tel +from pql_grammar.tools import find_all + + +inputs = ( + '?ns1|taxon1', + 'ns2|taxon2', + 'slug1', + '?ns3|taxon3 + (slug2 - 1234)', + '(ns3|taxon3 + 5)', + 'fn_4(fn_1(slug))', + 'TypeCast(arg1=\'value1\')', + 'ns6|taxon6 > 1234 and (ns0|taxon10 + 4321) == 0' +) -# renderer is recursive and ads parens for safety -# TODO: contemplate ways to avoid adding superfluous parens -# and upper-cases all keywords -pql_rendered_should_be = """\ -SELECT - ?ns1|taxon1, - ns2|taxon2, - slug1 AS myns|slug1, - (?ns3|taxon3 + (slug2 - 1234)) AS myns|custom_data, - (ns3|taxon3 + 5)::TypeCast() AS myns|custom_data_cast, - (fn_4(fn_1(slug)))::TypeCast(arg1='value1') -FROM - my_ns, - your_ns AS super_ns -WHERE - ((ns6|taxon6 > 1234) AND ((ns0|taxon10 + 4321) == 0)) -; -""" -stmt_should_be = ast.SelectStmt( - columns = ( - ast.Column(ast.Taxon('taxon1', 'ns1', True)), - ast.Column(ast.Taxon('taxon2', 'ns2', False)), - ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), - ast.Column( - ast.Expr( - '+', - ( - ast.Taxon( - 'taxon3', - 'ns3', - True - ), - ast.Expr( - '-', - ( - ast.Taxon('slug2'), - ast.Literal(1234, '1234'), - ), - ), - ), +ast_should_bes = ( + ast.Taxon('taxon1', 'ns1', True), + ast.Taxon('taxon2', 'ns2', False), + ast.Taxon('slug1'), + ast.Expr( + '+', + ( + ast.Taxon( + 'taxon3', + 'ns3', + True ), - None, - ast.Taxon('custom_data', 'myns'), - ), - ast.Column( ast.Expr( - '+', + '-', ( - ast.Taxon( - 'taxon3', - 'ns3', - ), - ast.Literal(5, '5'), + ast.Taxon('slug2'), + ast.Literal(1234, '1234'), ), ), - ast.Function( - 'TypeCast' - ), - ast.Taxon('custom_data_cast', 'myns') ), - ast.Column( - ast.Function('fn_4', ( - (None, ast.Function('fn_1', ( - (None, ast.Taxon('slug')), - ),),), - ),), - ast.Function( - 'TypeCast', - ( - ('arg1',ast.Literal('value1', "'value1'")), - ), + ), + ast.Expr( + '+', + ( + ast.Taxon( + 'taxon3', + 'ns3', ), + ast.Literal(5, '5'), ), ), - from_clause = ( - ast.Table('my_ns'), - ast.Table('your_ns', 'super_ns') + ast.Function('fn_4', ( + (None, ast.Function('fn_1', ( + (None, ast.Taxon('slug')), + ),),), + ),), + ast.Function( + 'TypeCast', + ( + ('arg1',ast.Literal('value1', "'value1'")), + ), ), - where_clause = ast.Expr( + ast.Expr( 'AND', ( ast.Expr( @@ -143,77 +93,25 @@ def visitErrorNode(self, node): ) -class PqlAstTests(TestCase): - maxDiff = None - - def test_multiple_statements(self): - pql = """ - set fill_empty_dates = true; - select a, b; - """ - statements = from_pql(pql, ErrorAssertingPqlVisitor) - - assert len(statements) == 2 - set_stmt = statements[0] - select_stmt = statements[1] - - # TODO: make SET parse literal values properly into python native bool, int, str etc. - # Till then, this is mostly a placeholder for future functionality - assert set_stmt == ast.SetStmt('fill_empty_dates', 'true') - assert select_stmt == ast.SelectStmt( - columns=( - ast.Column(ast.Taxon('a')), - ast.Column(ast.Taxon('b')), - ) - ) - - def test_select(self): - statements = from_pql(pql_all_cases, ErrorAssertingPqlVisitor) - assert statements - stmt = statements[0] - - assert len(stmt.columns) == len(stmt_should_be.columns) - for result, should_be in zip(stmt.columns, stmt_should_be.columns): - assert result == should_be - # ast.ast_diff(stmt.where_clause, stmt_should_be.where_clause) - assert stmt.where_clause == stmt_should_be.where_clause - - # ensure produced nodes are hashable - set( - find_all( - stmt, - lambda o: isinstance(o, ast.Node) - ) - ) +outputs = ( + '?ns1|taxon1', + 'ns2|taxon2', + 'slug1', + '(?ns3|taxon3 + (slug2 - 1234))', + '(ns3|taxon3 + 5)', + 'fn_4(fn_1(slug))', + 'TypeCast(arg1=\'value1\')', + '((ns6|taxon6 > 1234) AND ((ns0|taxon10 + 4321) == 0))' +) - def test_parse_from_statement(self): - pql_input = """\ - SELECT - a, - b - from - dataset_one, - dataset_two as two - WHERE - a > b - ; - """ - statements = from_pql(pql_input, ErrorAssertingPqlVisitor) - assert len(statements) == 1 - select_stmt: ast.SelectStmt = statements[0] - assert select_stmt.from_clause == ( - ast.Table('dataset_one'), - ast.Table('dataset_two', 'two'), - ) - # ensure produced nodes are hashable - set( - find_all( - statements, - lambda o: isinstance(o, ast.Node) - ) - ) +@pytest.mark.parametrize( + 'input, ast_should_be, output_should_be', + zip(inputs, ast_should_bes, outputs) +) +def test_tel_to_ast_and_back(input: str, ast_should_be: ast.Node, output_should_be: str): + ast_is = from_tel(input) + assert ast_is == ast_should_be - def test_render_pql_from_ast(self): - pql_result = to_pql([stmt_should_be]) - assert pql_result == pql_rendered_should_be + output_is = to_tel(ast_is) + assert output_is == output_should_be diff --git a/python/tests/ast_tools_test.py b/python/tests/ast_tools_test.py index a697ce2..11a095e 100644 --- a/python/tests/ast_tools_test.py +++ b/python/tests/ast_tools_test.py @@ -5,110 +5,44 @@ sys.path.append('./src') -from pql_grammar.ast import model as ast -from pql_grammar.ast.tools import find_all +from pql_grammar import model as ast +from pql_grammar.tools import find_all -sample_tree = ast.SelectStmt( - columns = ( - ast.Column(ast.Taxon('taxon1', 'ns1', True)), - ast.Column(ast.Taxon('taxon2', 'ns2', False)), - ast.Column(ast.Literal(5555, '5555')), - ast.Column(ast.Taxon('slug1'), None, ast.Taxon('slug1', 'myns')), - ast.Column( +class AstToolsTests(TestCase): + maxDiff = None + + def test_find_all(self): + vv = list(find_all( ast.Expr( - '+', + 'AND', ( - ast.Taxon( - 'taxon3', - 'ns3', - True, - ), ast.Expr( - '-', + '>', ( - ast.Taxon('slug'), - ast.Literal(12345, '12345'), + ast.Taxon('taxon6', 'ns6'), + ast.Literal(1234, '1234'), ), ), - ), - ), - None, - ast.Taxon('custom_data', 'myns'), - ), - ast.Column( - ast.Expr( - '+', - ( - ast.Taxon( - 'taxon3', - 'ns3', - ), - ast.Literal(5, '5'), - ), - ), - ast.Function( - 'TypeCast', - ), - ast.Taxon('custom_data_cast', 'myns'), - ), - ast.Column( - ast.Function('fn_4', ( - (None, ast.Function('fn_1', ( - (None, ast.Taxon('slug'),), - ),),), - ),), - ast.Function( - 'TypeCast', - (('arg1','value1'),), # normally inner pair is a tuple, but for comparison making list. - ), - ), - ), - from_clause = ( - ast.Table('my_ns'), - ast.Table('your_ns', 'super_ns'), - ), - where_clause = ast.Expr( - 'AND', - ( - ast.Expr( - '>', - ( - ast.Taxon('taxon6', 'ns6'), - ast.Literal(1234, '1234'), - ), - ), - ast.Expr( - '==', - ( ast.Expr( - '+', + '==', ( - ast.Taxon('taxon10', 'ns0'), - ast.Literal(4321, '4321'), + ast.Expr( + '+', + ( + ast.Taxon('taxon10', 'ns0'), + ast.Literal(4321, '4321'), + ), + ), + ast.Literal(0, '0'), ), ), - ast.Literal(0, '0'), ), ), - ), - ), -) - - -class AstToolsTests(TestCase): - maxDiff = None - - def test_find_all(self): - vv = list(find_all( - sample_tree, lambda o: isinstance(o, ast.Literal) )) assert vv == [ - ast.Literal(value=5555, raw_value='5555'), - ast.Literal(value=12345, raw_value='12345'), - ast.Literal(value=5, raw_value='5'), ast.Literal(value=1234, raw_value='1234'), ast.Literal(value=4321, raw_value='4321'), ast.Literal(value=0, raw_value='0'), diff --git a/python/tests/tel/grammar_test.py b/python/tests/tel_grammar_test.py similarity index 58% rename from python/tests/tel/grammar_test.py rename to python/tests/tel_grammar_test.py index 3cdcdfb..b4a6ff1 100644 --- a/python/tests/tel/grammar_test.py +++ b/python/tests/tel_grammar_test.py @@ -5,38 +5,8 @@ from antlr4 import CommonTokenStream, InputStream, ParserRuleContext sys.path.append('./src') -from pql_grammar.antlr.PqlLexer import PqlLexer -from pql_grammar.antlr.PqlParser import PqlParser -from pql_grammar.antlr.PqlParserVisitor import PqlParserVisitor - -def full_text(ctx: ParserRuleContext) -> str: - # extracts full text from a tree of nodes, - # including white space. - if ctx: - if isinstance(ctx, ParserRuleContext): - return ctx.start.getInputStream().getText(ctx.start.start, ctx.stop.stop) - else: - try: - # some primitive context object - return ctx.text - except AttributeError: - # Terminal Node of some sort - return str(ctx) - else: - return None - - -class AssertTelVisitor(PqlParserVisitor): - """ - Special TelVisitor for testing grammar. Throws error in case of invalid node. - """ - - def visitErrorNode(self, node): - wrong_symbol = node.symbol.text - position = node.symbol.column + 1 - details = f'Unexpected symbol "{wrong_symbol}" at position {position}' - raise AssertionError(details) +from pql_grammar.from_pql import from_tel, ParseError @pytest.mark.parametrize( @@ -92,11 +62,25 @@ def visitErrorNode(self, node): ], ) def test_grammar(test_case): - inp_stream = InputStream(test_case) - lexer = PqlLexer(inp_stream) - stream = CommonTokenStream(lexer) - parser = PqlParser(stream) - tree = parser.parseTel() - # Use error visitor on parsed tree to test it - visitor = AssertTelVisitor() - visitor.visit(tree) + assert from_tel(test_case) + + +@pytest.mark.parametrize( + ["test_case"], + [ + # Basic operations + ('1.3.3 + slug',), + ('1 + slug) - 3',), + # ('34 - (slug * 2',), passes because does not need closing paren + ('-------------',), + # Handle namespaced taxons + ('ds|sl|ug - sluging',), + # Handle nested functions + ('slug is',), + ('slug not',), + ('',), + ], +) +def test_grammar_bad(test_case): + with pytest.raises(ParseError) as ex: + assert from_tel(test_case) From 6f40979879a3e32e36cbf89d0774256bc3063a47 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Fri, 4 Dec 2020 18:14:00 -0800 Subject: [PATCH 24/32] rename `function` token to `fn` to avoid reserved word collision in JS --- grammar/PqlParser.g4 | 4 +- js-temp/PqlLexer.js | 701 ++++---- js-temp/PqlParser.js | 1550 +++++------------ js-temp/PqlParserListener.js | 87 +- js-temp/PqlParserVisitor.js | 54 +- python/src/pql_grammar/antlr/PqlParser.py | 34 +- .../pql_grammar/antlr/PqlParserListener.py | 8 +- .../src/pql_grammar/antlr/PqlParserVisitor.py | 4 +- python/src/pql_grammar/from_pql.py | 4 +- 9 files changed, 768 insertions(+), 1678 deletions(-) diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index 7798798..5526f28 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -30,7 +30,7 @@ expr | left=expr operator=( K_OR | OR ) right=expr | OPEN_PAREN inner=expr CLOSE_PAREN | literalValue - | function + | fn | taxon ; @@ -41,7 +41,7 @@ expr // Named ones will come as `expr` with left=expr,operator=ASSIGN,right=expr contents. // You might need to express these as ordered dict / list of tuples to preserve names of args. // Positional will be whatever literal or other single-valued expr content could be. -function: function_name=identifierMultipart OPEN_PAREN arguments=exprList? CLOSE_PAREN; +fn: function_name=identifierMultipart OPEN_PAREN arguments=exprList? CLOSE_PAREN; exprList: expr ( COMMA expr )* ; // TODO: TAXON_TAG_DELIMITER is being killed off. Remove when we migrate out of taxon tags. diff --git a/js-temp/PqlLexer.js b/js-temp/PqlLexer.js index bf254b5..ff6a637 100644 --- a/js-temp/PqlLexer.js +++ b/js-temp/PqlLexer.js @@ -5,7 +5,7 @@ var antlr4 = require('antlr4/index'); var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u0002:\u01fd\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", + "\u00023\u01ca\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", "\u0004\t\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t", "\u0007\u0004\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004", "\f\t\f\u0004\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0004\u0010", @@ -20,304 +20,274 @@ var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", "8\t8\u00049\t9\u0004:\t:\u0004;\t;\u0004<\t<\u0004=\t=\u0004>\t>\u0004", "?\t?\u0004@\t@\u0004A\tA\u0004B\tB\u0004C\tC\u0004D\tD\u0004E\tE\u0004", "F\tF\u0004G\tG\u0004H\tH\u0004I\tI\u0004J\tJ\u0004K\tK\u0004L\tL\u0004", - "M\tM\u0004N\tN\u0004O\tO\u0004P\tP\u0004Q\tQ\u0004R\tR\u0004S\tS\u0004", - "T\tT\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003\u0004\u0003", - "\u0004\u0003\u0004\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0006\u0003", - "\u0006\u0003\u0006\u0003\u0007\u0003\u0007\u0003\u0007\u0003\b\u0003", - "\b\u0003\b\u0003\t\u0003\t\u0003\t\u0003\n\u0003\n\u0003\n\u0003\u000b", - "\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003\f\u0003\r\u0003\r\u0003", + "M\tM\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0005\u0003\u0005\u0003", + "\u0005\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0007\u0003\u0007\u0003", + "\u0007\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0003\t\u0003\n\u0003", + "\n\u0003\n\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003\r\u0003\r\u0003", "\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010\u0003\u0010\u0003", "\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003\u0013\u0003\u0013\u0003", "\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016\u0003\u0016\u0003", "\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0019\u0003\u0019\u0003", "\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c\u0003\u001c\u0003", "\u001d\u0003\u001d\u0003\u001e\u0003\u001e\u0003\u001e\u0003\u001e\u0003", - "\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003 \u0003 \u0003 \u0003", - "!\u0003!\u0003!\u0003!\u0003!\u0003\"\u0003\"\u0003\"\u0003\"\u0003", - "\"\u0003\"\u0003#\u0003#\u0003#\u0003$\u0003$\u0003$\u0003$\u0003$\u0003", - "$\u0003$\u0003%\u0003%\u0003%\u0003%\u0003%\u0003&\u0003&\u0003&\u0003", - "&\u0003&\u0003&\u0003\'\u0003\'\u0003\'\u0003\'\u0003(\u0003(\u0003", - "(\u0003(\u0003(\u0003(\u0003(\u0003(\u0003)\u0003)\u0003)\u0003)\u0003", - ")\u0003*\u0003*\u0003*\u0003+\u0003+\u0003+\u0003+\u0003+\u0003+\u0003", - ",\u0003,\u0003,\u0003,\u0003,\u0003,\u0003,\u0003-\u0003-\u0003-\u0003", - "-\u0003-\u0003.\u0003.\u0003.\u0003.\u0003.\u0003.\u0003/\u0006/\u0143", - "\n/\r/\u000e/\u0144\u0003/\u0003/\u0007/\u0149\n/\f/\u000e/\u014c\u000b", - "/\u0005/\u014e\n/\u0003/\u0003/\u0005/\u0152\n/\u0003/\u0006/\u0155", - "\n/\r/\u000e/\u0156\u0005/\u0159\n/\u0003/\u0003/\u0006/\u015d\n/\r", - "/\u000e/\u015e\u0003/\u0003/\u0005/\u0163\n/\u0003/\u0006/\u0166\n/", - "\r/\u000e/\u0167\u0005/\u016a\n/\u0005/\u016c\n/\u00030\u00030\u0003", - "1\u00031\u00031\u00031\u00071\u0174\n1\f1\u000e1\u0177\u000b1\u0003", - "1\u00031\u00032\u00032\u00032\u00032\u00072\u017f\n2\f2\u000e2\u0182", - "\u000b2\u00032\u00032\u00033\u00033\u00034\u00034\u00034\u00034\u0007", - "4\u018c\n4\f4\u000e4\u018f\u000b4\u00034\u00034\u00035\u00035\u0003", - "5\u00035\u00075\u0197\n5\f5\u000e5\u019a\u000b5\u00035\u00035\u0003", - "6\u00036\u00036\u00036\u00036\u00056\u01a3\n6\u00036\u00076\u01a6\n", - "6\f6\u000e6\u01a9\u000b6\u00036\u00036\u00037\u00037\u00037\u00037\u0007", - "7\u01b1\n7\f7\u000e7\u01b4\u000b7\u00037\u00037\u00037\u00057\u01b9", - "\n7\u00037\u00037\u00038\u00038\u00038\u00038\u00039\u00039\u00079\u01c3", - "\n9\f9\u000e9\u01c6\u000b9\u0003:\u0003:\u0003;\u0003;\u0003<\u0003", - "<\u0003=\u0003=\u0003>\u0003>\u0003?\u0003?\u0003@\u0003@\u0003A\u0003", - "A\u0003B\u0003B\u0003C\u0003C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003", - "F\u0003G\u0003G\u0003H\u0003H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003", - "K\u0003L\u0003L\u0003M\u0003M\u0003N\u0003N\u0003O\u0003O\u0003P\u0003", - "P\u0003Q\u0003Q\u0003R\u0003R\u0003S\u0003S\u0003T\u0003T\u0003\u01b2", - "\u0002U\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b\u0007\r\b", - "\u000f\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e\u001b\u000f", - "\u001d\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016+\u0017", - "-\u0018/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?!A\"C#E$", - "G%I&K\'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7m8o9q:s\u0002u\u0002w\u0002y", - "\u0002{\u0002}\u0002\u007f\u0002\u0081\u0002\u0083\u0002\u0085\u0002", - "\u0087\u0002\u0089\u0002\u008b\u0002\u008d\u0002\u008f\u0002\u0091\u0002", - "\u0093\u0002\u0095\u0002\u0097\u0002\u0099\u0002\u009b\u0002\u009d\u0002", - "\u009f\u0002\u00a1\u0002\u00a3\u0002\u00a5\u0002\u00a7\u0002\u0003\u0002", - "$\u0004\u0002--//\u0003\u0002$$\u0003\u0002))\u0004\u0002\f\f\u000f", - "\u000f\u0005\u0002\u000b\r\u000f\u000f\"\"\u0005\u0002C\\aac|\u0006", - "\u00022;C\\aac|\u0003\u00022;\u0004\u0002CCcc\u0004\u0002DDdd\u0004", - "\u0002EEee\u0004\u0002FFff\u0004\u0002GGgg\u0004\u0002HHhh\u0004\u0002", - "IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004\u0002LLll\u0004\u0002MMm", - "m\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002PPpp\u0004\u0002QQqq\u0004", - "\u0002RRrr\u0004\u0002SSss\u0004\u0002TTtt\u0004\u0002UUuu\u0004\u0002", - "VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004\u0002YYyy\u0004\u0002ZZz", - "z\u0004\u0002[[{{\u0004\u0002\\\\||\u0002\u01fa\u0002\u0003\u0003\u0002", - "\u0002\u0002\u0002\u0005\u0003\u0002\u0002\u0002\u0002\u0007\u0003\u0002", - "\u0002\u0002\u0002\t\u0003\u0002\u0002\u0002\u0002\u000b\u0003\u0002", - "\u0002\u0002\u0002\r\u0003\u0002\u0002\u0002\u0002\u000f\u0003\u0002", - "\u0002\u0002\u0002\u0011\u0003\u0002\u0002\u0002\u0002\u0013\u0003\u0002", - "\u0002\u0002\u0002\u0015\u0003\u0002\u0002\u0002\u0002\u0017\u0003\u0002", - "\u0002\u0002\u0002\u0019\u0003\u0002\u0002\u0002\u0002\u001b\u0003\u0002", - "\u0002\u0002\u0002\u001d\u0003\u0002\u0002\u0002\u0002\u001f\u0003\u0002", - "\u0002\u0002\u0002!\u0003\u0002\u0002\u0002\u0002#\u0003\u0002\u0002", - "\u0002\u0002%\u0003\u0002\u0002\u0002\u0002\'\u0003\u0002\u0002\u0002", - "\u0002)\u0003\u0002\u0002\u0002\u0002+\u0003\u0002\u0002\u0002\u0002", - "-\u0003\u0002\u0002\u0002\u0002/\u0003\u0002\u0002\u0002\u00021\u0003", - "\u0002\u0002\u0002\u00023\u0003\u0002\u0002\u0002\u00025\u0003\u0002", - "\u0002\u0002\u00027\u0003\u0002\u0002\u0002\u00029\u0003\u0002\u0002", - "\u0002\u0002;\u0003\u0002\u0002\u0002\u0002=\u0003\u0002\u0002\u0002", - "\u0002?\u0003\u0002\u0002\u0002\u0002A\u0003\u0002\u0002\u0002\u0002", - "C\u0003\u0002\u0002\u0002\u0002E\u0003\u0002\u0002\u0002\u0002G\u0003", - "\u0002\u0002\u0002\u0002I\u0003\u0002\u0002\u0002\u0002K\u0003\u0002", - "\u0002\u0002\u0002M\u0003\u0002\u0002\u0002\u0002O\u0003\u0002\u0002", - "\u0002\u0002Q\u0003\u0002\u0002\u0002\u0002S\u0003\u0002\u0002\u0002", - "\u0002U\u0003\u0002\u0002\u0002\u0002W\u0003\u0002\u0002\u0002\u0002", - "Y\u0003\u0002\u0002\u0002\u0002[\u0003\u0002\u0002\u0002\u0002]\u0003", - "\u0002\u0002\u0002\u0002_\u0003\u0002\u0002\u0002\u0002a\u0003\u0002", - "\u0002\u0002\u0002c\u0003\u0002\u0002\u0002\u0002e\u0003\u0002\u0002", - "\u0002\u0002g\u0003\u0002\u0002\u0002\u0002i\u0003\u0002\u0002\u0002", - "\u0002k\u0003\u0002\u0002\u0002\u0002m\u0003\u0002\u0002\u0002\u0002", - "o\u0003\u0002\u0002\u0002\u0002q\u0003\u0002\u0002\u0002\u0003\u00a9", - "\u0003\u0002\u0002\u0002\u0005\u00ab\u0003\u0002\u0002\u0002\u0007\u00ad", - "\u0003\u0002\u0002\u0002\t\u00b0\u0003\u0002\u0002\u0002\u000b\u00b3", - "\u0003\u0002\u0002\u0002\r\u00b6\u0003\u0002\u0002\u0002\u000f\u00b9", - "\u0003\u0002\u0002\u0002\u0011\u00bc\u0003\u0002\u0002\u0002\u0013\u00bf", - "\u0003\u0002\u0002\u0002\u0015\u00c2\u0003\u0002\u0002\u0002\u0017\u00c5", - "\u0003\u0002\u0002\u0002\u0019\u00c8\u0003\u0002\u0002\u0002\u001b\u00ca", - "\u0003\u0002\u0002\u0002\u001d\u00cc\u0003\u0002\u0002\u0002\u001f\u00ce", - "\u0003\u0002\u0002\u0002!\u00d0\u0003\u0002\u0002\u0002#\u00d2\u0003", - "\u0002\u0002\u0002%\u00d4\u0003\u0002\u0002\u0002\'\u00d6\u0003\u0002", - "\u0002\u0002)\u00d8\u0003\u0002\u0002\u0002+\u00da\u0003\u0002\u0002", - "\u0002-\u00dc\u0003\u0002\u0002\u0002/\u00de\u0003\u0002\u0002\u0002", - "1\u00e0\u0003\u0002\u0002\u00023\u00e2\u0003\u0002\u0002\u00025\u00e4", - "\u0003\u0002\u0002\u00027\u00e6\u0003\u0002\u0002\u00029\u00e8\u0003", - "\u0002\u0002\u0002;\u00ea\u0003\u0002\u0002\u0002=\u00ee\u0003\u0002", - "\u0002\u0002?\u00f2\u0003\u0002\u0002\u0002A\u00f5\u0003\u0002\u0002", - "\u0002C\u00fa\u0003\u0002\u0002\u0002E\u0100\u0003\u0002\u0002\u0002", - "G\u0103\u0003\u0002\u0002\u0002I\u010a\u0003\u0002\u0002\u0002K\u010f", - "\u0003\u0002\u0002\u0002M\u0115\u0003\u0002\u0002\u0002O\u0119\u0003", - "\u0002\u0002\u0002Q\u0121\u0003\u0002\u0002\u0002S\u0126\u0003\u0002", - "\u0002\u0002U\u0129\u0003\u0002\u0002\u0002W\u012f\u0003\u0002\u0002", - "\u0002Y\u0136\u0003\u0002\u0002\u0002[\u013b\u0003\u0002\u0002\u0002", - "]\u016b\u0003\u0002\u0002\u0002_\u016d\u0003\u0002\u0002\u0002a\u016f", - "\u0003\u0002\u0002\u0002c\u017a\u0003\u0002\u0002\u0002e\u0185\u0003", - "\u0002\u0002\u0002g\u0187\u0003\u0002\u0002\u0002i\u0192\u0003\u0002", - "\u0002\u0002k\u01a2\u0003\u0002\u0002\u0002m\u01ac\u0003\u0002\u0002", - "\u0002o\u01bc\u0003\u0002\u0002\u0002q\u01c0\u0003\u0002\u0002\u0002", - "s\u01c7\u0003\u0002\u0002\u0002u\u01c9\u0003\u0002\u0002\u0002w\u01cb", - "\u0003\u0002\u0002\u0002y\u01cd\u0003\u0002\u0002\u0002{\u01cf\u0003", - "\u0002\u0002\u0002}\u01d1\u0003\u0002\u0002\u0002\u007f\u01d3\u0003", - "\u0002\u0002\u0002\u0081\u01d5\u0003\u0002\u0002\u0002\u0083\u01d7\u0003", - "\u0002\u0002\u0002\u0085\u01d9\u0003\u0002\u0002\u0002\u0087\u01db\u0003", - "\u0002\u0002\u0002\u0089\u01dd\u0003\u0002\u0002\u0002\u008b\u01df\u0003", - "\u0002\u0002\u0002\u008d\u01e1\u0003\u0002\u0002\u0002\u008f\u01e3\u0003", - "\u0002\u0002\u0002\u0091\u01e5\u0003\u0002\u0002\u0002\u0093\u01e7\u0003", - "\u0002\u0002\u0002\u0095\u01e9\u0003\u0002\u0002\u0002\u0097\u01eb\u0003", - "\u0002\u0002\u0002\u0099\u01ed\u0003\u0002\u0002\u0002\u009b\u01ef\u0003", - "\u0002\u0002\u0002\u009d\u01f1\u0003\u0002\u0002\u0002\u009f\u01f3\u0003", - "\u0002\u0002\u0002\u00a1\u01f5\u0003\u0002\u0002\u0002\u00a3\u01f7\u0003", - "\u0002\u0002\u0002\u00a5\u01f9\u0003\u0002\u0002\u0002\u00a7\u01fb\u0003", - "\u0002\u0002\u0002\u00a9\u00aa\u0007<\u0002\u0002\u00aa\u0004\u0003", - "\u0002\u0002\u0002\u00ab\u00ac\u0007A\u0002\u0002\u00ac\u0006\u0003", - "\u0002\u0002\u0002\u00ad\u00ae\u0007(\u0002\u0002\u00ae\u00af\u0007", - "(\u0002\u0002\u00af\b\u0003\u0002\u0002\u0002\u00b0\u00b1\u0007?\u0002", - "\u0002\u00b1\u00b2\u0007?\u0002\u0002\u00b2\n\u0003\u0002\u0002\u0002", - "\u00b3\u00b4\u0007@\u0002\u0002\u00b4\u00b5\u0007?\u0002\u0002\u00b5", - "\f\u0003\u0002\u0002\u0002\u00b6\u00b7\u0007>\u0002\u0002\u00b7\u00b8", - "\u0007?\u0002\u0002\u00b8\u000e\u0003\u0002\u0002\u0002\u00b9\u00ba", - "\u0007#\u0002\u0002\u00ba\u00bb\u0007?\u0002\u0002\u00bb\u0010\u0003", - "\u0002\u0002\u0002\u00bc\u00bd\u0007>\u0002\u0002\u00bd\u00be\u0007", - "@\u0002\u0002\u00be\u0012\u0003\u0002\u0002\u0002\u00bf\u00c0\u0007", - "~\u0002\u0002\u00c0\u00c1\u0007~\u0002\u0002\u00c1\u0014\u0003\u0002", - "\u0002\u0002\u00c2\u00c3\u0007>\u0002\u0002\u00c3\u00c4\u0007>\u0002", - "\u0002\u00c4\u0016\u0003\u0002\u0002\u0002\u00c5\u00c6\u0007@\u0002", - "\u0002\u00c6\u00c7\u0007@\u0002\u0002\u00c7\u0018\u0003\u0002\u0002", - "\u0002\u00c8\u00c9\u0007(\u0002\u0002\u00c9\u001a\u0003\u0002\u0002", - "\u0002\u00ca\u00cb\u0007?\u0002\u0002\u00cb\u001c\u0003\u0002\u0002", - "\u0002\u00cc\u00cd\u0007+\u0002\u0002\u00cd\u001e\u0003\u0002\u0002", - "\u0002\u00ce\u00cf\u0007.\u0002\u0002\u00cf \u0003\u0002\u0002\u0002", - "\u00d0\u00d1\u00070\u0002\u0002\u00d1\"\u0003\u0002\u0002\u0002\u00d2", - "\u00d3\u00071\u0002\u0002\u00d3$\u0003\u0002\u0002\u0002\u00d4\u00d5", - "\u0007@\u0002\u0002\u00d5&\u0003\u0002\u0002\u0002\u00d6\u00d7\u0007", - ">\u0002\u0002\u00d7(\u0003\u0002\u0002\u0002\u00d8\u00d9\u0007/\u0002", - "\u0002\u00d9*\u0003\u0002\u0002\u0002\u00da\u00db\u0007\'\u0002\u0002", - "\u00db,\u0003\u0002\u0002\u0002\u00dc\u00dd\u0007*\u0002\u0002\u00dd", - ".\u0003\u0002\u0002\u0002\u00de\u00df\u0007~\u0002\u0002\u00df0\u0003", - "\u0002\u0002\u0002\u00e0\u00e1\u0007-\u0002\u0002\u00e12\u0003\u0002", - "\u0002\u0002\u00e2\u00e3\u0007=\u0002\u0002\u00e34\u0003\u0002\u0002", - "\u0002\u00e4\u00e5\u0007,\u0002\u0002\u00e56\u0003\u0002\u0002\u0002", - "\u00e6\u00e7\u0007\u0080\u0002\u0002\u00e78\u0003\u0002\u0002\u0002", - "\u00e8\u00e9\u0007a\u0002\u0002\u00e9:\u0003\u0002\u0002\u0002\u00ea", - "\u00eb\u0005u;\u0002\u00eb\u00ec\u0005\u008fH\u0002\u00ec\u00ed\u0005", - "{>\u0002\u00ed<\u0003\u0002\u0002\u0002\u00ee\u00ef\u0005u;\u0002\u00ef", - "\u00f0\u0005\u0099M\u0002\u00f0\u00f1\u0005y=\u0002\u00f1>\u0003\u0002", - "\u0002\u0002\u00f2\u00f3\u0005w<\u0002\u00f3\u00f4\u0005\u00a5S\u0002", - "\u00f4@\u0003\u0002\u0002\u0002\u00f5\u00f6\u0005{>\u0002\u00f6\u00f7", - "\u0005}?\u0002\u00f7\u00f8\u0005\u0099M\u0002\u00f8\u00f9\u0005y=\u0002", - "\u00f9B\u0003\u0002\u0002\u0002\u00fa\u00fb\u0005\u007f@\u0002\u00fb", - "\u00fc\u0005u;\u0002\u00fc\u00fd\u0005\u008bF\u0002\u00fd\u00fe\u0005", - "\u0099M\u0002\u00fe\u00ff\u0005}?\u0002\u00ffD\u0003\u0002\u0002\u0002", - "\u0100\u0101\u0005\u0085C\u0002\u0101\u0102\u0005\u0099M\u0002\u0102", - "F\u0003\u0002\u0002\u0002\u0103\u0104\u0005\u0085C\u0002\u0104\u0105", - "\u0005\u0099M\u0002\u0105\u0106\u0005\u008fH\u0002\u0106\u0107\u0005", - "\u009dO\u0002\u0107\u0108\u0005\u008bF\u0002\u0108\u0109\u0005\u008b", - "F\u0002\u0109H\u0003\u0002\u0002\u0002\u010a\u010b\u0005\u008bF\u0002", - "\u010b\u010c\u0005\u0085C\u0002\u010c\u010d\u0005\u0089E\u0002\u010d", - "\u010e\u0005}?\u0002\u010eJ\u0003\u0002\u0002\u0002\u010f\u0110\u0005", - "\u008bF\u0002\u0110\u0111\u0005\u0085C\u0002\u0111\u0112\u0005\u008d", - "G\u0002\u0112\u0113\u0005\u0085C\u0002\u0113\u0114\u0005\u009bN\u0002", - "\u0114L\u0003\u0002\u0002\u0002\u0115\u0116\u0005\u008fH\u0002\u0116", - "\u0117\u0005\u0091I\u0002\u0117\u0118\u0005\u009bN\u0002\u0118N\u0003", - "\u0002\u0002\u0002\u0119\u011a\u0005\u008fH\u0002\u011a\u011b\u0005", - "\u0091I\u0002\u011b\u011c\u0005\u009bN\u0002\u011c\u011d\u0005\u008f", - "H\u0002\u011d\u011e\u0005\u009dO\u0002\u011e\u011f\u0005\u008bF\u0002", - "\u011f\u0120\u0005\u008bF\u0002\u0120P\u0003\u0002\u0002\u0002\u0121", - "\u0122\u0005\u008fH\u0002\u0122\u0123\u0005\u009dO\u0002\u0123\u0124", - "\u0005\u008bF\u0002\u0124\u0125\u0005\u008bF\u0002\u0125R\u0003\u0002", - "\u0002\u0002\u0126\u0127\u0005\u0091I\u0002\u0127\u0128\u0005\u0097", - "L\u0002\u0128T\u0003\u0002\u0002\u0002\u0129\u012a\u0005\u0091I\u0002", - "\u012a\u012b\u0005\u0097L\u0002\u012b\u012c\u0005{>\u0002\u012c\u012d", - "\u0005}?\u0002\u012d\u012e\u0005\u0097L\u0002\u012eV\u0003\u0002\u0002", - "\u0002\u012f\u0130\u0005\u0099M\u0002\u0130\u0131\u0005}?\u0002\u0131", - "\u0132\u0005\u008bF\u0002\u0132\u0133\u0005}?\u0002\u0133\u0134\u0005", - "y=\u0002\u0134\u0135\u0005\u009bN\u0002\u0135X\u0003\u0002\u0002\u0002", - "\u0136\u0137\u0005\u009bN\u0002\u0137\u0138\u0005\u0097L\u0002\u0138", - "\u0139\u0005\u009dO\u0002\u0139\u013a\u0005}?\u0002\u013aZ\u0003\u0002", - "\u0002\u0002\u013b\u013c\u0005\u00a1Q\u0002\u013c\u013d\u0005\u0083", - "B\u0002\u013d\u013e\u0005}?\u0002\u013e\u013f\u0005\u0097L\u0002\u013f", - "\u0140\u0005}?\u0002\u0140\\\u0003\u0002\u0002\u0002\u0141\u0143\u0005", - "s:\u0002\u0142\u0141\u0003\u0002\u0002\u0002\u0143\u0144\u0003\u0002", - "\u0002\u0002\u0144\u0142\u0003\u0002\u0002\u0002\u0144\u0145\u0003\u0002", - "\u0002\u0002\u0145\u014d\u0003\u0002\u0002\u0002\u0146\u014a\u00070", - "\u0002\u0002\u0147\u0149\u0005s:\u0002\u0148\u0147\u0003\u0002\u0002", - "\u0002\u0149\u014c\u0003\u0002\u0002\u0002\u014a\u0148\u0003\u0002\u0002", - "\u0002\u014a\u014b\u0003\u0002\u0002\u0002\u014b\u014e\u0003\u0002\u0002", - "\u0002\u014c\u014a\u0003\u0002\u0002\u0002\u014d\u0146\u0003\u0002\u0002", - "\u0002\u014d\u014e\u0003\u0002\u0002\u0002\u014e\u0158\u0003\u0002\u0002", - "\u0002\u014f\u0151\u0005}?\u0002\u0150\u0152\t\u0002\u0002\u0002\u0151", - "\u0150\u0003\u0002\u0002\u0002\u0151\u0152\u0003\u0002\u0002\u0002\u0152", - "\u0154\u0003\u0002\u0002\u0002\u0153\u0155\u0005s:\u0002\u0154\u0153", - "\u0003\u0002\u0002\u0002\u0155\u0156\u0003\u0002\u0002\u0002\u0156\u0154", - "\u0003\u0002\u0002\u0002\u0156\u0157\u0003\u0002\u0002\u0002\u0157\u0159", - "\u0003\u0002\u0002\u0002\u0158\u014f\u0003\u0002\u0002\u0002\u0158\u0159", - "\u0003\u0002\u0002\u0002\u0159\u016c\u0003\u0002\u0002\u0002\u015a\u015c", - "\u00070\u0002\u0002\u015b\u015d\u0005s:\u0002\u015c\u015b\u0003\u0002", - "\u0002\u0002\u015d\u015e\u0003\u0002\u0002\u0002\u015e\u015c\u0003\u0002", - "\u0002\u0002\u015e\u015f\u0003\u0002\u0002\u0002\u015f\u0169\u0003\u0002", - "\u0002\u0002\u0160\u0162\u0005}?\u0002\u0161\u0163\t\u0002\u0002\u0002", - "\u0162\u0161\u0003\u0002\u0002\u0002\u0162\u0163\u0003\u0002\u0002\u0002", - "\u0163\u0165\u0003\u0002\u0002\u0002\u0164\u0166\u0005s:\u0002\u0165", - "\u0164\u0003\u0002\u0002\u0002\u0166\u0167\u0003\u0002\u0002\u0002\u0167", - "\u0165\u0003\u0002\u0002\u0002\u0167\u0168\u0003\u0002\u0002\u0002\u0168", - "\u016a\u0003\u0002\u0002\u0002\u0169\u0160\u0003\u0002\u0002\u0002\u0169", - "\u016a\u0003\u0002\u0002\u0002\u016a\u016c\u0003\u0002\u0002\u0002\u016b", - "\u0142\u0003\u0002\u0002\u0002\u016b\u015a\u0003\u0002\u0002\u0002\u016c", - "^\u0003\u0002\u0002\u0002\u016d\u016e\u0005a1\u0002\u016e`\u0003\u0002", - "\u0002\u0002\u016f\u0175\u0007$\u0002\u0002\u0170\u0171\u0007^\u0002", - "\u0002\u0171\u0174\u0007$\u0002\u0002\u0172\u0174\n\u0003\u0002\u0002", - "\u0173\u0170\u0003\u0002\u0002\u0002\u0173\u0172\u0003\u0002\u0002\u0002", - "\u0174\u0177\u0003\u0002\u0002\u0002\u0175\u0173\u0003\u0002\u0002\u0002", - "\u0175\u0176\u0003\u0002\u0002\u0002\u0176\u0178\u0003\u0002\u0002\u0002", - "\u0177\u0175\u0003\u0002\u0002\u0002\u0178\u0179\u0007$\u0002\u0002", - "\u0179b\u0003\u0002\u0002\u0002\u017a\u0180\u0007$\u0002\u0002\u017b", - "\u017c\u0007$\u0002\u0002\u017c\u017f\u0007$\u0002\u0002\u017d\u017f", - "\n\u0003\u0002\u0002\u017e\u017b\u0003\u0002\u0002\u0002\u017e\u017d", - "\u0003\u0002\u0002\u0002\u017f\u0182\u0003\u0002\u0002\u0002\u0180\u017e", - "\u0003\u0002\u0002\u0002\u0180\u0181\u0003\u0002\u0002\u0002\u0181\u0183", - "\u0003\u0002\u0002\u0002\u0182\u0180\u0003\u0002\u0002\u0002\u0183\u0184", - "\u0007$\u0002\u0002\u0184d\u0003\u0002\u0002\u0002\u0185\u0186\u0005", - "g4\u0002\u0186f\u0003\u0002\u0002\u0002\u0187\u018d\u0007)\u0002\u0002", - "\u0188\u0189\u0007^\u0002\u0002\u0189\u018c\u0007)\u0002\u0002\u018a", - "\u018c\n\u0004\u0002\u0002\u018b\u0188\u0003\u0002\u0002\u0002\u018b", - "\u018a\u0003\u0002\u0002\u0002\u018c\u018f\u0003\u0002\u0002\u0002\u018d", - "\u018b\u0003\u0002\u0002\u0002\u018d\u018e\u0003\u0002\u0002\u0002\u018e", - "\u0190\u0003\u0002\u0002\u0002\u018f\u018d\u0003\u0002\u0002\u0002\u0190", - "\u0191\u0007)\u0002\u0002\u0191h\u0003\u0002\u0002\u0002\u0192\u0198", - "\u0007)\u0002\u0002\u0193\u0194\u0007)\u0002\u0002\u0194\u0197\u0007", - ")\u0002\u0002\u0195\u0197\n\u0004\u0002\u0002\u0196\u0193\u0003\u0002", - "\u0002\u0002\u0196\u0195\u0003\u0002\u0002\u0002\u0197\u019a\u0003\u0002", - "\u0002\u0002\u0198\u0196\u0003\u0002\u0002\u0002\u0198\u0199\u0003\u0002", - "\u0002\u0002\u0199\u019b\u0003\u0002\u0002\u0002\u019a\u0198\u0003\u0002", - "\u0002\u0002\u019b\u019c\u0007)\u0002\u0002\u019cj\u0003\u0002\u0002", - "\u0002\u019d\u019e\u0007/\u0002\u0002\u019e\u01a3\u0007/\u0002\u0002", - "\u019f\u01a0\u00071\u0002\u0002\u01a0\u01a3\u00071\u0002\u0002\u01a1", - "\u01a3\u0007%\u0002\u0002\u01a2\u019d\u0003\u0002\u0002\u0002\u01a2", - "\u019f\u0003\u0002\u0002\u0002\u01a2\u01a1\u0003\u0002\u0002\u0002\u01a3", - "\u01a7\u0003\u0002\u0002\u0002\u01a4\u01a6\n\u0005\u0002\u0002\u01a5", - "\u01a4\u0003\u0002\u0002\u0002\u01a6\u01a9\u0003\u0002\u0002\u0002\u01a7", - "\u01a5\u0003\u0002\u0002\u0002\u01a7\u01a8\u0003\u0002\u0002\u0002\u01a8", - "\u01aa\u0003\u0002\u0002\u0002\u01a9\u01a7\u0003\u0002\u0002\u0002\u01aa", - "\u01ab\b6\u0002\u0002\u01abl\u0003\u0002\u0002\u0002\u01ac\u01ad\u0007", - "1\u0002\u0002\u01ad\u01ae\u0007,\u0002\u0002\u01ae\u01b2\u0003\u0002", - "\u0002\u0002\u01af\u01b1\u000b\u0002\u0002\u0002\u01b0\u01af\u0003\u0002", - "\u0002\u0002\u01b1\u01b4\u0003\u0002\u0002\u0002\u01b2\u01b3\u0003\u0002", - "\u0002\u0002\u01b2\u01b0\u0003\u0002\u0002\u0002\u01b3\u01b8\u0003\u0002", - "\u0002\u0002\u01b4\u01b2\u0003\u0002\u0002\u0002\u01b5\u01b6\u0007,", - "\u0002\u0002\u01b6\u01b9\u00071\u0002\u0002\u01b7\u01b9\u0007\u0002", - "\u0002\u0003\u01b8\u01b5\u0003\u0002\u0002\u0002\u01b8\u01b7\u0003\u0002", - "\u0002\u0002\u01b9\u01ba\u0003\u0002\u0002\u0002\u01ba\u01bb\b7\u0002", - "\u0002\u01bbn\u0003\u0002\u0002\u0002\u01bc\u01bd\t\u0006\u0002\u0002", - "\u01bd\u01be\u0003\u0002\u0002\u0002\u01be\u01bf\b8\u0002\u0002\u01bf", - "p\u0003\u0002\u0002\u0002\u01c0\u01c4\t\u0007\u0002\u0002\u01c1\u01c3", - "\t\b\u0002\u0002\u01c2\u01c1\u0003\u0002\u0002\u0002\u01c3\u01c6\u0003", - "\u0002\u0002\u0002\u01c4\u01c2\u0003\u0002\u0002\u0002\u01c4\u01c5\u0003", - "\u0002\u0002\u0002\u01c5r\u0003\u0002\u0002\u0002\u01c6\u01c4\u0003", - "\u0002\u0002\u0002\u01c7\u01c8\t\t\u0002\u0002\u01c8t\u0003\u0002\u0002", - "\u0002\u01c9\u01ca\t\n\u0002\u0002\u01cav\u0003\u0002\u0002\u0002\u01cb", - "\u01cc\t\u000b\u0002\u0002\u01ccx\u0003\u0002\u0002\u0002\u01cd\u01ce", - "\t\f\u0002\u0002\u01cez\u0003\u0002\u0002\u0002\u01cf\u01d0\t\r\u0002", - "\u0002\u01d0|\u0003\u0002\u0002\u0002\u01d1\u01d2\t\u000e\u0002\u0002", - "\u01d2~\u0003\u0002\u0002\u0002\u01d3\u01d4\t\u000f\u0002\u0002\u01d4", - "\u0080\u0003\u0002\u0002\u0002\u01d5\u01d6\t\u0010\u0002\u0002\u01d6", - "\u0082\u0003\u0002\u0002\u0002\u01d7\u01d8\t\u0011\u0002\u0002\u01d8", - "\u0084\u0003\u0002\u0002\u0002\u01d9\u01da\t\u0012\u0002\u0002\u01da", - "\u0086\u0003\u0002\u0002\u0002\u01db\u01dc\t\u0013\u0002\u0002\u01dc", - "\u0088\u0003\u0002\u0002\u0002\u01dd\u01de\t\u0014\u0002\u0002\u01de", - "\u008a\u0003\u0002\u0002\u0002\u01df\u01e0\t\u0015\u0002\u0002\u01e0", - "\u008c\u0003\u0002\u0002\u0002\u01e1\u01e2\t\u0016\u0002\u0002\u01e2", - "\u008e\u0003\u0002\u0002\u0002\u01e3\u01e4\t\u0017\u0002\u0002\u01e4", - "\u0090\u0003\u0002\u0002\u0002\u01e5\u01e6\t\u0018\u0002\u0002\u01e6", - "\u0092\u0003\u0002\u0002\u0002\u01e7\u01e8\t\u0019\u0002\u0002\u01e8", - "\u0094\u0003\u0002\u0002\u0002\u01e9\u01ea\t\u001a\u0002\u0002\u01ea", - "\u0096\u0003\u0002\u0002\u0002\u01eb\u01ec\t\u001b\u0002\u0002\u01ec", - "\u0098\u0003\u0002\u0002\u0002\u01ed\u01ee\t\u001c\u0002\u0002\u01ee", - "\u009a\u0003\u0002\u0002\u0002\u01ef\u01f0\t\u001d\u0002\u0002\u01f0", - "\u009c\u0003\u0002\u0002\u0002\u01f1\u01f2\t\u001e\u0002\u0002\u01f2", - "\u009e\u0003\u0002\u0002\u0002\u01f3\u01f4\t\u001f\u0002\u0002\u01f4", - "\u00a0\u0003\u0002\u0002\u0002\u01f5\u01f6\t \u0002\u0002\u01f6\u00a2", - "\u0003\u0002\u0002\u0002\u01f7\u01f8\t!\u0002\u0002\u01f8\u00a4\u0003", - "\u0002\u0002\u0002\u01f9\u01fa\t\"\u0002\u0002\u01fa\u00a6\u0003\u0002", - "\u0002\u0002\u01fb\u01fc\t#\u0002\u0002\u01fc\u00a8\u0003\u0002\u0002", - "\u0002\u001b\u0002\u0144\u014a\u014d\u0151\u0156\u0158\u015e\u0162\u0167", - "\u0169\u016b\u0173\u0175\u017e\u0180\u018b\u018d\u0196\u0198\u01a2\u01a7", - "\u01b2\u01b8\u01c4\u0003\u0002\u0003\u0002"].join(""); + "\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003", + " \u0003 \u0003 \u0003!\u0003!\u0003!\u0003!\u0003!\u0003!\u0003!\u0003", + "\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003#\u0003#\u0003#\u0003#\u0003", + "$\u0003$\u0003$\u0003$\u0003$\u0003$\u0003$\u0003$\u0003%\u0003%\u0003", + "%\u0003%\u0003%\u0003&\u0003&\u0003&\u0003\'\u0003\'\u0003\'\u0003\'", + "\u0003\'\u0003(\u0006(\u0110\n(\r(\u000e(\u0111\u0003(\u0003(\u0007", + "(\u0116\n(\f(\u000e(\u0119\u000b(\u0005(\u011b\n(\u0003(\u0003(\u0005", + "(\u011f\n(\u0003(\u0006(\u0122\n(\r(\u000e(\u0123\u0005(\u0126\n(\u0003", + "(\u0003(\u0006(\u012a\n(\r(\u000e(\u012b\u0003(\u0003(\u0005(\u0130", + "\n(\u0003(\u0006(\u0133\n(\r(\u000e(\u0134\u0005(\u0137\n(\u0005(\u0139", + "\n(\u0003)\u0003)\u0003*\u0003*\u0003*\u0003*\u0007*\u0141\n*\f*\u000e", + "*\u0144\u000b*\u0003*\u0003*\u0003+\u0003+\u0003+\u0003+\u0007+\u014c", + "\n+\f+\u000e+\u014f\u000b+\u0003+\u0003+\u0003,\u0003,\u0003-\u0003", + "-\u0003-\u0003-\u0007-\u0159\n-\f-\u000e-\u015c\u000b-\u0003-\u0003", + "-\u0003.\u0003.\u0003.\u0003.\u0007.\u0164\n.\f.\u000e.\u0167\u000b", + ".\u0003.\u0003.\u0003/\u0003/\u0003/\u0003/\u0003/\u0005/\u0170\n/\u0003", + "/\u0007/\u0173\n/\f/\u000e/\u0176\u000b/\u0003/\u0003/\u00030\u0003", + "0\u00030\u00030\u00070\u017e\n0\f0\u000e0\u0181\u000b0\u00030\u0003", + "0\u00030\u00050\u0186\n0\u00030\u00030\u00031\u00031\u00031\u00031\u0003", + "2\u00032\u00072\u0190\n2\f2\u000e2\u0193\u000b2\u00033\u00033\u0003", + "4\u00034\u00035\u00035\u00036\u00036\u00037\u00037\u00038\u00038\u0003", + "9\u00039\u0003:\u0003:\u0003;\u0003;\u0003<\u0003<\u0003=\u0003=\u0003", + ">\u0003>\u0003?\u0003?\u0003@\u0003@\u0003A\u0003A\u0003B\u0003B\u0003", + "C\u0003C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003F\u0003G\u0003G\u0003", + "H\u0003H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003K\u0003L\u0003L\u0003", + "M\u0003M\u0003\u017f\u0002N\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006", + "\u000b\u0007\r\b\u000f\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e", + "\u001b\u000f\u001d\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016", + "+\u0017-\u0018/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?!", + "A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]0_1a2c3e\u0002g\u0002i\u0002k\u0002m", + "\u0002o\u0002q\u0002s\u0002u\u0002w\u0002y\u0002{\u0002}\u0002\u007f", + "\u0002\u0081\u0002\u0083\u0002\u0085\u0002\u0087\u0002\u0089\u0002\u008b", + "\u0002\u008d\u0002\u008f\u0002\u0091\u0002\u0093\u0002\u0095\u0002\u0097", + "\u0002\u0099\u0002\u0003\u0002$\u0004\u0002--//\u0003\u0002$$\u0003", + "\u0002))\u0004\u0002\f\f\u000f\u000f\u0005\u0002\u000b\r\u000f\u000f", + "\"\"\u0005\u0002C\\aac|\u0006\u00022;C\\aac|\u0003\u00022;\u0004\u0002", + "CCcc\u0004\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002GGg", + "g\u0004\u0002HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004", + "\u0002LLll\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002", + "PPpp\u0004\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002TTt", + "t\u0004\u0002UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004", + "\u0002YYyy\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||\u0002\u01c7", + "\u0002\u0003\u0003\u0002\u0002\u0002\u0002\u0005\u0003\u0002\u0002\u0002", + "\u0002\u0007\u0003\u0002\u0002\u0002\u0002\t\u0003\u0002\u0002\u0002", + "\u0002\u000b\u0003\u0002\u0002\u0002\u0002\r\u0003\u0002\u0002\u0002", + "\u0002\u000f\u0003\u0002\u0002\u0002\u0002\u0011\u0003\u0002\u0002\u0002", + "\u0002\u0013\u0003\u0002\u0002\u0002\u0002\u0015\u0003\u0002\u0002\u0002", + "\u0002\u0017\u0003\u0002\u0002\u0002\u0002\u0019\u0003\u0002\u0002\u0002", + "\u0002\u001b\u0003\u0002\u0002\u0002\u0002\u001d\u0003\u0002\u0002\u0002", + "\u0002\u001f\u0003\u0002\u0002\u0002\u0002!\u0003\u0002\u0002\u0002", + "\u0002#\u0003\u0002\u0002\u0002\u0002%\u0003\u0002\u0002\u0002\u0002", + "\'\u0003\u0002\u0002\u0002\u0002)\u0003\u0002\u0002\u0002\u0002+\u0003", + "\u0002\u0002\u0002\u0002-\u0003\u0002\u0002\u0002\u0002/\u0003\u0002", + "\u0002\u0002\u00021\u0003\u0002\u0002\u0002\u00023\u0003\u0002\u0002", + "\u0002\u00025\u0003\u0002\u0002\u0002\u00027\u0003\u0002\u0002\u0002", + "\u00029\u0003\u0002\u0002\u0002\u0002;\u0003\u0002\u0002\u0002\u0002", + "=\u0003\u0002\u0002\u0002\u0002?\u0003\u0002\u0002\u0002\u0002A\u0003", + "\u0002\u0002\u0002\u0002C\u0003\u0002\u0002\u0002\u0002E\u0003\u0002", + "\u0002\u0002\u0002G\u0003\u0002\u0002\u0002\u0002I\u0003\u0002\u0002", + "\u0002\u0002K\u0003\u0002\u0002\u0002\u0002M\u0003\u0002\u0002\u0002", + "\u0002O\u0003\u0002\u0002\u0002\u0002Q\u0003\u0002\u0002\u0002\u0002", + "S\u0003\u0002\u0002\u0002\u0002U\u0003\u0002\u0002\u0002\u0002W\u0003", + "\u0002\u0002\u0002\u0002Y\u0003\u0002\u0002\u0002\u0002[\u0003\u0002", + "\u0002\u0002\u0002]\u0003\u0002\u0002\u0002\u0002_\u0003\u0002\u0002", + "\u0002\u0002a\u0003\u0002\u0002\u0002\u0002c\u0003\u0002\u0002\u0002", + "\u0003\u009b\u0003\u0002\u0002\u0002\u0005\u009e\u0003\u0002\u0002\u0002", + "\u0007\u00a1\u0003\u0002\u0002\u0002\t\u00a4\u0003\u0002\u0002\u0002", + "\u000b\u00a7\u0003\u0002\u0002\u0002\r\u00aa\u0003\u0002\u0002\u0002", + "\u000f\u00ad\u0003\u0002\u0002\u0002\u0011\u00b0\u0003\u0002\u0002\u0002", + "\u0013\u00b3\u0003\u0002\u0002\u0002\u0015\u00b6\u0003\u0002\u0002\u0002", + "\u0017\u00b8\u0003\u0002\u0002\u0002\u0019\u00ba\u0003\u0002\u0002\u0002", + "\u001b\u00bc\u0003\u0002\u0002\u0002\u001d\u00be\u0003\u0002\u0002\u0002", + "\u001f\u00c0\u0003\u0002\u0002\u0002!\u00c2\u0003\u0002\u0002\u0002", + "#\u00c4\u0003\u0002\u0002\u0002%\u00c6\u0003\u0002\u0002\u0002\'\u00c8", + "\u0003\u0002\u0002\u0002)\u00ca\u0003\u0002\u0002\u0002+\u00cc\u0003", + "\u0002\u0002\u0002-\u00ce\u0003\u0002\u0002\u0002/\u00d0\u0003\u0002", + "\u0002\u00021\u00d2\u0003\u0002\u0002\u00023\u00d4\u0003\u0002\u0002", + "\u00025\u00d6\u0003\u0002\u0002\u00027\u00d8\u0003\u0002\u0002\u0002", + "9\u00da\u0003\u0002\u0002\u0002;\u00dc\u0003\u0002\u0002\u0002=\u00e0", + "\u0003\u0002\u0002\u0002?\u00e6\u0003\u0002\u0002\u0002A\u00e9\u0003", + "\u0002\u0002\u0002C\u00f0\u0003\u0002\u0002\u0002E\u00f5\u0003\u0002", + "\u0002\u0002G\u00f9\u0003\u0002\u0002\u0002I\u0101\u0003\u0002\u0002", + "\u0002K\u0106\u0003\u0002\u0002\u0002M\u0109\u0003\u0002\u0002\u0002", + "O\u0138\u0003\u0002\u0002\u0002Q\u013a\u0003\u0002\u0002\u0002S\u013c", + "\u0003\u0002\u0002\u0002U\u0147\u0003\u0002\u0002\u0002W\u0152\u0003", + "\u0002\u0002\u0002Y\u0154\u0003\u0002\u0002\u0002[\u015f\u0003\u0002", + "\u0002\u0002]\u016f\u0003\u0002\u0002\u0002_\u0179\u0003\u0002\u0002", + "\u0002a\u0189\u0003\u0002\u0002\u0002c\u018d\u0003\u0002\u0002\u0002", + "e\u0194\u0003\u0002\u0002\u0002g\u0196\u0003\u0002\u0002\u0002i\u0198", + "\u0003\u0002\u0002\u0002k\u019a\u0003\u0002\u0002\u0002m\u019c\u0003", + "\u0002\u0002\u0002o\u019e\u0003\u0002\u0002\u0002q\u01a0\u0003\u0002", + "\u0002\u0002s\u01a2\u0003\u0002\u0002\u0002u\u01a4\u0003\u0002\u0002", + "\u0002w\u01a6\u0003\u0002\u0002\u0002y\u01a8\u0003\u0002\u0002\u0002", + "{\u01aa\u0003\u0002\u0002\u0002}\u01ac\u0003\u0002\u0002\u0002\u007f", + "\u01ae\u0003\u0002\u0002\u0002\u0081\u01b0\u0003\u0002\u0002\u0002\u0083", + "\u01b2\u0003\u0002\u0002\u0002\u0085\u01b4\u0003\u0002\u0002\u0002\u0087", + "\u01b6\u0003\u0002\u0002\u0002\u0089\u01b8\u0003\u0002\u0002\u0002\u008b", + "\u01ba\u0003\u0002\u0002\u0002\u008d\u01bc\u0003\u0002\u0002\u0002\u008f", + "\u01be\u0003\u0002\u0002\u0002\u0091\u01c0\u0003\u0002\u0002\u0002\u0093", + "\u01c2\u0003\u0002\u0002\u0002\u0095\u01c4\u0003\u0002\u0002\u0002\u0097", + "\u01c6\u0003\u0002\u0002\u0002\u0099\u01c8\u0003\u0002\u0002\u0002\u009b", + "\u009c\u0007(\u0002\u0002\u009c\u009d\u0007(\u0002\u0002\u009d\u0004", + "\u0003\u0002\u0002\u0002\u009e\u009f\u0007?\u0002\u0002\u009f\u00a0", + "\u0007?\u0002\u0002\u00a0\u0006\u0003\u0002\u0002\u0002\u00a1\u00a2", + "\u0007@\u0002\u0002\u00a2\u00a3\u0007?\u0002\u0002\u00a3\b\u0003\u0002", + "\u0002\u0002\u00a4\u00a5\u0007>\u0002\u0002\u00a5\u00a6\u0007?\u0002", + "\u0002\u00a6\n\u0003\u0002\u0002\u0002\u00a7\u00a8\u0007#\u0002\u0002", + "\u00a8\u00a9\u0007?\u0002\u0002\u00a9\f\u0003\u0002\u0002\u0002\u00aa", + "\u00ab\u0007>\u0002\u0002\u00ab\u00ac\u0007@\u0002\u0002\u00ac\u000e", + "\u0003\u0002\u0002\u0002\u00ad\u00ae\u0007~\u0002\u0002\u00ae\u00af", + "\u0007~\u0002\u0002\u00af\u0010\u0003\u0002\u0002\u0002\u00b0\u00b1", + "\u0007>\u0002\u0002\u00b1\u00b2\u0007>\u0002\u0002\u00b2\u0012\u0003", + "\u0002\u0002\u0002\u00b3\u00b4\u0007@\u0002\u0002\u00b4\u00b5\u0007", + "@\u0002\u0002\u00b5\u0014\u0003\u0002\u0002\u0002\u00b6\u00b7\u0007", + "(\u0002\u0002\u00b7\u0016\u0003\u0002\u0002\u0002\u00b8\u00b9\u0007", + "?\u0002\u0002\u00b9\u0018\u0003\u0002\u0002\u0002\u00ba\u00bb\u0007", + "+\u0002\u0002\u00bb\u001a\u0003\u0002\u0002\u0002\u00bc\u00bd\u0007", + "<\u0002\u0002\u00bd\u001c\u0003\u0002\u0002\u0002\u00be\u00bf\u0007", + ".\u0002\u0002\u00bf\u001e\u0003\u0002\u0002\u0002\u00c0\u00c1\u0007", + "0\u0002\u0002\u00c1 \u0003\u0002\u0002\u0002\u00c2\u00c3\u00071\u0002", + "\u0002\u00c3\"\u0003\u0002\u0002\u0002\u00c4\u00c5\u0007@\u0002\u0002", + "\u00c5$\u0003\u0002\u0002\u0002\u00c6\u00c7\u0007>\u0002\u0002\u00c7", + "&\u0003\u0002\u0002\u0002\u00c8\u00c9\u0007/\u0002\u0002\u00c9(\u0003", + "\u0002\u0002\u0002\u00ca\u00cb\u0007\'\u0002\u0002\u00cb*\u0003\u0002", + "\u0002\u0002\u00cc\u00cd\u0007*\u0002\u0002\u00cd,\u0003\u0002\u0002", + "\u0002\u00ce\u00cf\u0007~\u0002\u0002\u00cf.\u0003\u0002\u0002\u0002", + "\u00d0\u00d1\u0007-\u0002\u0002\u00d10\u0003\u0002\u0002\u0002\u00d2", + "\u00d3\u0007A\u0002\u0002\u00d32\u0003\u0002\u0002\u0002\u00d4\u00d5", + "\u0007=\u0002\u0002\u00d54\u0003\u0002\u0002\u0002\u00d6\u00d7\u0007", + ",\u0002\u0002\u00d76\u0003\u0002\u0002\u0002\u00d8\u00d9\u0007\u0080", + "\u0002\u0002\u00d98\u0003\u0002\u0002\u0002\u00da\u00db\u0007a\u0002", + "\u0002\u00db:\u0003\u0002\u0002\u0002\u00dc\u00dd\u0005g4\u0002\u00dd", + "\u00de\u0005\u0081A\u0002\u00de\u00df\u0005m7\u0002\u00df<\u0003\u0002", + "\u0002\u0002\u00e0\u00e1\u0005q9\u0002\u00e1\u00e2\u0005g4\u0002\u00e2", + "\u00e3\u0005}?\u0002\u00e3\u00e4\u0005\u008bF\u0002\u00e4\u00e5\u0005", + "o8\u0002\u00e5>\u0003\u0002\u0002\u0002\u00e6\u00e7\u0005w<\u0002\u00e7", + "\u00e8\u0005\u008bF\u0002\u00e8@\u0003\u0002\u0002\u0002\u00e9\u00ea", + "\u0005w<\u0002\u00ea\u00eb\u0005\u008bF\u0002\u00eb\u00ec\u0005\u0081", + "A\u0002\u00ec\u00ed\u0005\u008fH\u0002\u00ed\u00ee\u0005}?\u0002\u00ee", + "\u00ef\u0005}?\u0002\u00efB\u0003\u0002\u0002\u0002\u00f0\u00f1\u0005", + "}?\u0002\u00f1\u00f2\u0005w<\u0002\u00f2\u00f3\u0005{>\u0002\u00f3\u00f4", + "\u0005o8\u0002\u00f4D\u0003\u0002\u0002\u0002\u00f5\u00f6\u0005\u0081", + "A\u0002\u00f6\u00f7\u0005\u0083B\u0002\u00f7\u00f8\u0005\u008dG\u0002", + "\u00f8F\u0003\u0002\u0002\u0002\u00f9\u00fa\u0005\u0081A\u0002\u00fa", + "\u00fb\u0005\u0083B\u0002\u00fb\u00fc\u0005\u008dG\u0002\u00fc\u00fd", + "\u0005\u0081A\u0002\u00fd\u00fe\u0005\u008fH\u0002\u00fe\u00ff\u0005", + "}?\u0002\u00ff\u0100\u0005}?\u0002\u0100H\u0003\u0002\u0002\u0002\u0101", + "\u0102\u0005\u0081A\u0002\u0102\u0103\u0005\u008fH\u0002\u0103\u0104", + "\u0005}?\u0002\u0104\u0105\u0005}?\u0002\u0105J\u0003\u0002\u0002\u0002", + "\u0106\u0107\u0005\u0083B\u0002\u0107\u0108\u0005\u0089E\u0002\u0108", + "L\u0003\u0002\u0002\u0002\u0109\u010a\u0005\u008dG\u0002\u010a\u010b", + "\u0005\u0089E\u0002\u010b\u010c\u0005\u008fH\u0002\u010c\u010d\u0005", + "o8\u0002\u010dN\u0003\u0002\u0002\u0002\u010e\u0110\u0005e3\u0002\u010f", + "\u010e\u0003\u0002\u0002\u0002\u0110\u0111\u0003\u0002\u0002\u0002\u0111", + "\u010f\u0003\u0002\u0002\u0002\u0111\u0112\u0003\u0002\u0002\u0002\u0112", + "\u011a\u0003\u0002\u0002\u0002\u0113\u0117\u00070\u0002\u0002\u0114", + "\u0116\u0005e3\u0002\u0115\u0114\u0003\u0002\u0002\u0002\u0116\u0119", + "\u0003\u0002\u0002\u0002\u0117\u0115\u0003\u0002\u0002\u0002\u0117\u0118", + "\u0003\u0002\u0002\u0002\u0118\u011b\u0003\u0002\u0002\u0002\u0119\u0117", + "\u0003\u0002\u0002\u0002\u011a\u0113\u0003\u0002\u0002\u0002\u011a\u011b", + "\u0003\u0002\u0002\u0002\u011b\u0125\u0003\u0002\u0002\u0002\u011c\u011e", + "\u0005o8\u0002\u011d\u011f\t\u0002\u0002\u0002\u011e\u011d\u0003\u0002", + "\u0002\u0002\u011e\u011f\u0003\u0002\u0002\u0002\u011f\u0121\u0003\u0002", + "\u0002\u0002\u0120\u0122\u0005e3\u0002\u0121\u0120\u0003\u0002\u0002", + "\u0002\u0122\u0123\u0003\u0002\u0002\u0002\u0123\u0121\u0003\u0002\u0002", + "\u0002\u0123\u0124\u0003\u0002\u0002\u0002\u0124\u0126\u0003\u0002\u0002", + "\u0002\u0125\u011c\u0003\u0002\u0002\u0002\u0125\u0126\u0003\u0002\u0002", + "\u0002\u0126\u0139\u0003\u0002\u0002\u0002\u0127\u0129\u00070\u0002", + "\u0002\u0128\u012a\u0005e3\u0002\u0129\u0128\u0003\u0002\u0002\u0002", + "\u012a\u012b\u0003\u0002\u0002\u0002\u012b\u0129\u0003\u0002\u0002\u0002", + "\u012b\u012c\u0003\u0002\u0002\u0002\u012c\u0136\u0003\u0002\u0002\u0002", + "\u012d\u012f\u0005o8\u0002\u012e\u0130\t\u0002\u0002\u0002\u012f\u012e", + "\u0003\u0002\u0002\u0002\u012f\u0130\u0003\u0002\u0002\u0002\u0130\u0132", + "\u0003\u0002\u0002\u0002\u0131\u0133\u0005e3\u0002\u0132\u0131\u0003", + "\u0002\u0002\u0002\u0133\u0134\u0003\u0002\u0002\u0002\u0134\u0132\u0003", + "\u0002\u0002\u0002\u0134\u0135\u0003\u0002\u0002\u0002\u0135\u0137\u0003", + "\u0002\u0002\u0002\u0136\u012d\u0003\u0002\u0002\u0002\u0136\u0137\u0003", + "\u0002\u0002\u0002\u0137\u0139\u0003\u0002\u0002\u0002\u0138\u010f\u0003", + "\u0002\u0002\u0002\u0138\u0127\u0003\u0002\u0002\u0002\u0139P\u0003", + "\u0002\u0002\u0002\u013a\u013b\u0005S*\u0002\u013bR\u0003\u0002\u0002", + "\u0002\u013c\u0142\u0007$\u0002\u0002\u013d\u013e\u0007^\u0002\u0002", + "\u013e\u0141\u0007$\u0002\u0002\u013f\u0141\n\u0003\u0002\u0002\u0140", + "\u013d\u0003\u0002\u0002\u0002\u0140\u013f\u0003\u0002\u0002\u0002\u0141", + "\u0144\u0003\u0002\u0002\u0002\u0142\u0140\u0003\u0002\u0002\u0002\u0142", + "\u0143\u0003\u0002\u0002\u0002\u0143\u0145\u0003\u0002\u0002\u0002\u0144", + "\u0142\u0003\u0002\u0002\u0002\u0145\u0146\u0007$\u0002\u0002\u0146", + "T\u0003\u0002\u0002\u0002\u0147\u014d\u0007$\u0002\u0002\u0148\u0149", + "\u0007$\u0002\u0002\u0149\u014c\u0007$\u0002\u0002\u014a\u014c\n\u0003", + "\u0002\u0002\u014b\u0148\u0003\u0002\u0002\u0002\u014b\u014a\u0003\u0002", + "\u0002\u0002\u014c\u014f\u0003\u0002\u0002\u0002\u014d\u014b\u0003\u0002", + "\u0002\u0002\u014d\u014e\u0003\u0002\u0002\u0002\u014e\u0150\u0003\u0002", + "\u0002\u0002\u014f\u014d\u0003\u0002\u0002\u0002\u0150\u0151\u0007$", + "\u0002\u0002\u0151V\u0003\u0002\u0002\u0002\u0152\u0153\u0005Y-\u0002", + "\u0153X\u0003\u0002\u0002\u0002\u0154\u015a\u0007)\u0002\u0002\u0155", + "\u0156\u0007^\u0002\u0002\u0156\u0159\u0007)\u0002\u0002\u0157\u0159", + "\n\u0004\u0002\u0002\u0158\u0155\u0003\u0002\u0002\u0002\u0158\u0157", + "\u0003\u0002\u0002\u0002\u0159\u015c\u0003\u0002\u0002\u0002\u015a\u0158", + "\u0003\u0002\u0002\u0002\u015a\u015b\u0003\u0002\u0002\u0002\u015b\u015d", + "\u0003\u0002\u0002\u0002\u015c\u015a\u0003\u0002\u0002\u0002\u015d\u015e", + "\u0007)\u0002\u0002\u015eZ\u0003\u0002\u0002\u0002\u015f\u0165\u0007", + ")\u0002\u0002\u0160\u0161\u0007)\u0002\u0002\u0161\u0164\u0007)\u0002", + "\u0002\u0162\u0164\n\u0004\u0002\u0002\u0163\u0160\u0003\u0002\u0002", + "\u0002\u0163\u0162\u0003\u0002\u0002\u0002\u0164\u0167\u0003\u0002\u0002", + "\u0002\u0165\u0163\u0003\u0002\u0002\u0002\u0165\u0166\u0003\u0002\u0002", + "\u0002\u0166\u0168\u0003\u0002\u0002\u0002\u0167\u0165\u0003\u0002\u0002", + "\u0002\u0168\u0169\u0007)\u0002\u0002\u0169\\\u0003\u0002\u0002\u0002", + "\u016a\u016b\u0007/\u0002\u0002\u016b\u0170\u0007/\u0002\u0002\u016c", + "\u016d\u00071\u0002\u0002\u016d\u0170\u00071\u0002\u0002\u016e\u0170", + "\u0007%\u0002\u0002\u016f\u016a\u0003\u0002\u0002\u0002\u016f\u016c", + "\u0003\u0002\u0002\u0002\u016f\u016e\u0003\u0002\u0002\u0002\u0170\u0174", + "\u0003\u0002\u0002\u0002\u0171\u0173\n\u0005\u0002\u0002\u0172\u0171", + "\u0003\u0002\u0002\u0002\u0173\u0176\u0003\u0002\u0002\u0002\u0174\u0172", + "\u0003\u0002\u0002\u0002\u0174\u0175\u0003\u0002\u0002\u0002\u0175\u0177", + "\u0003\u0002\u0002\u0002\u0176\u0174\u0003\u0002\u0002\u0002\u0177\u0178", + "\b/\u0002\u0002\u0178^\u0003\u0002\u0002\u0002\u0179\u017a\u00071\u0002", + "\u0002\u017a\u017b\u0007,\u0002\u0002\u017b\u017f\u0003\u0002\u0002", + "\u0002\u017c\u017e\u000b\u0002\u0002\u0002\u017d\u017c\u0003\u0002\u0002", + "\u0002\u017e\u0181\u0003\u0002\u0002\u0002\u017f\u0180\u0003\u0002\u0002", + "\u0002\u017f\u017d\u0003\u0002\u0002\u0002\u0180\u0185\u0003\u0002\u0002", + "\u0002\u0181\u017f\u0003\u0002\u0002\u0002\u0182\u0183\u0007,\u0002", + "\u0002\u0183\u0186\u00071\u0002\u0002\u0184\u0186\u0007\u0002\u0002", + "\u0003\u0185\u0182\u0003\u0002\u0002\u0002\u0185\u0184\u0003\u0002\u0002", + "\u0002\u0186\u0187\u0003\u0002\u0002\u0002\u0187\u0188\b0\u0002\u0002", + "\u0188`\u0003\u0002\u0002\u0002\u0189\u018a\t\u0006\u0002\u0002\u018a", + "\u018b\u0003\u0002\u0002\u0002\u018b\u018c\b1\u0002\u0002\u018cb\u0003", + "\u0002\u0002\u0002\u018d\u0191\t\u0007\u0002\u0002\u018e\u0190\t\b\u0002", + "\u0002\u018f\u018e\u0003\u0002\u0002\u0002\u0190\u0193\u0003\u0002\u0002", + "\u0002\u0191\u018f\u0003\u0002\u0002\u0002\u0191\u0192\u0003\u0002\u0002", + "\u0002\u0192d\u0003\u0002\u0002\u0002\u0193\u0191\u0003\u0002\u0002", + "\u0002\u0194\u0195\t\t\u0002\u0002\u0195f\u0003\u0002\u0002\u0002\u0196", + "\u0197\t\n\u0002\u0002\u0197h\u0003\u0002\u0002\u0002\u0198\u0199\t", + "\u000b\u0002\u0002\u0199j\u0003\u0002\u0002\u0002\u019a\u019b\t\f\u0002", + "\u0002\u019bl\u0003\u0002\u0002\u0002\u019c\u019d\t\r\u0002\u0002\u019d", + "n\u0003\u0002\u0002\u0002\u019e\u019f\t\u000e\u0002\u0002\u019fp\u0003", + "\u0002\u0002\u0002\u01a0\u01a1\t\u000f\u0002\u0002\u01a1r\u0003\u0002", + "\u0002\u0002\u01a2\u01a3\t\u0010\u0002\u0002\u01a3t\u0003\u0002\u0002", + "\u0002\u01a4\u01a5\t\u0011\u0002\u0002\u01a5v\u0003\u0002\u0002\u0002", + "\u01a6\u01a7\t\u0012\u0002\u0002\u01a7x\u0003\u0002\u0002\u0002\u01a8", + "\u01a9\t\u0013\u0002\u0002\u01a9z\u0003\u0002\u0002\u0002\u01aa\u01ab", + "\t\u0014\u0002\u0002\u01ab|\u0003\u0002\u0002\u0002\u01ac\u01ad\t\u0015", + "\u0002\u0002\u01ad~\u0003\u0002\u0002\u0002\u01ae\u01af\t\u0016\u0002", + "\u0002\u01af\u0080\u0003\u0002\u0002\u0002\u01b0\u01b1\t\u0017\u0002", + "\u0002\u01b1\u0082\u0003\u0002\u0002\u0002\u01b2\u01b3\t\u0018\u0002", + "\u0002\u01b3\u0084\u0003\u0002\u0002\u0002\u01b4\u01b5\t\u0019\u0002", + "\u0002\u01b5\u0086\u0003\u0002\u0002\u0002\u01b6\u01b7\t\u001a\u0002", + "\u0002\u01b7\u0088\u0003\u0002\u0002\u0002\u01b8\u01b9\t\u001b\u0002", + "\u0002\u01b9\u008a\u0003\u0002\u0002\u0002\u01ba\u01bb\t\u001c\u0002", + "\u0002\u01bb\u008c\u0003\u0002\u0002\u0002\u01bc\u01bd\t\u001d\u0002", + "\u0002\u01bd\u008e\u0003\u0002\u0002\u0002\u01be\u01bf\t\u001e\u0002", + "\u0002\u01bf\u0090\u0003\u0002\u0002\u0002\u01c0\u01c1\t\u001f\u0002", + "\u0002\u01c1\u0092\u0003\u0002\u0002\u0002\u01c2\u01c3\t \u0002\u0002", + "\u01c3\u0094\u0003\u0002\u0002\u0002\u01c4\u01c5\t!\u0002\u0002\u01c5", + "\u0096\u0003\u0002\u0002\u0002\u01c6\u01c7\t\"\u0002\u0002\u01c7\u0098", + "\u0003\u0002\u0002\u0002\u01c8\u01c9\t#\u0002\u0002\u01c9\u009a\u0003", + "\u0002\u0002\u0002\u001b\u0002\u0111\u0117\u011a\u011e\u0123\u0125\u012b", + "\u012f\u0134\u0136\u0138\u0140\u0142\u014b\u014d\u0158\u015a\u0163\u0165", + "\u016f\u0174\u017f\u0185\u0191\u0003\u0002\u0003\u0002"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -340,102 +310,91 @@ Object.defineProperty(PqlLexer.prototype, "atn", { }); PqlLexer.EOF = antlr4.Token.EOF; -PqlLexer.TAXON_TAG_DELIMITER = 1; -PqlLexer.TAXON_OPTIONAL_OPERATOR = 2; -PqlLexer.AND = 3; -PqlLexer.EQ = 4; -PqlLexer.GT_EQ = 5; -PqlLexer.LT_EQ = 6; -PqlLexer.NOT_EQ1 = 7; -PqlLexer.NOT_EQ2 = 8; -PqlLexer.OR = 9; -PqlLexer.SHIFT_LEFT = 10; -PqlLexer.SHIFT_RIGHT = 11; -PqlLexer.AMP = 12; -PqlLexer.ASSIGN = 13; -PqlLexer.CLOSE_PAREN = 14; -PqlLexer.COMMA = 15; -PqlLexer.DOT = 16; -PqlLexer.FORWARD_SLASH = 17; -PqlLexer.GT = 18; -PqlLexer.LT = 19; -PqlLexer.MINUS = 20; -PqlLexer.MOD = 21; -PqlLexer.OPEN_PAREN = 22; -PqlLexer.PIPE = 23; -PqlLexer.PLUS = 24; +PqlLexer.AND = 1; +PqlLexer.EQ = 2; +PqlLexer.GT_EQ = 3; +PqlLexer.LT_EQ = 4; +PqlLexer.NOT_EQ1 = 5; +PqlLexer.NOT_EQ2 = 6; +PqlLexer.OR = 7; +PqlLexer.SHIFT_LEFT = 8; +PqlLexer.SHIFT_RIGHT = 9; +PqlLexer.AMP = 10; +PqlLexer.ASSIGN = 11; +PqlLexer.CLOSE_PAREN = 12; +PqlLexer.COLON = 13; +PqlLexer.COMMA = 14; +PqlLexer.DOT = 15; +PqlLexer.FORWARD_SLASH = 16; +PqlLexer.GT = 17; +PqlLexer.LT = 18; +PqlLexer.MINUS = 19; +PqlLexer.MOD = 20; +PqlLexer.OPEN_PAREN = 21; +PqlLexer.PIPE = 22; +PqlLexer.PLUS = 23; +PqlLexer.QUESTION_MARK = 24; PqlLexer.SCOL = 25; PqlLexer.STAR = 26; PqlLexer.TILDE = 27; PqlLexer.UNDER = 28; PqlLexer.K_AND = 29; -PqlLexer.K_ASC = 30; -PqlLexer.K_BY = 31; -PqlLexer.K_DESC = 32; -PqlLexer.K_FALSE = 33; -PqlLexer.K_IS = 34; -PqlLexer.K_ISNULL = 35; -PqlLexer.K_LIKE = 36; -PqlLexer.K_LIMIT = 37; -PqlLexer.K_NOT = 38; -PqlLexer.K_NOTNULL = 39; -PqlLexer.K_NULL = 40; -PqlLexer.K_OR = 41; -PqlLexer.K_ORDER = 42; -PqlLexer.K_SELECT = 43; -PqlLexer.K_TRUE = 44; -PqlLexer.K_WHERE = 45; -PqlLexer.NUMERIC_LITERAL = 46; -PqlLexer.DOUBLE_QUOTED_STRING = 47; -PqlLexer.DOUBLE_QUOTED_STRING_TEL = 48; -PqlLexer.DOUBLE_QUOTED_STRING_SQL = 49; -PqlLexer.SINGLE_QUOTED_STRING = 50; -PqlLexer.SINGLE_QUOTED_STRING_TEL = 51; -PqlLexer.SINGLE_QUOTED_STRING_SQL = 52; -PqlLexer.SINGLE_LINE_COMMENT = 53; -PqlLexer.MULTILINE_COMMENT = 54; -PqlLexer.SPACES = 55; -PqlLexer.WORD = 56; +PqlLexer.K_FALSE = 30; +PqlLexer.K_IS = 31; +PqlLexer.K_ISNULL = 32; +PqlLexer.K_LIKE = 33; +PqlLexer.K_NOT = 34; +PqlLexer.K_NOTNULL = 35; +PqlLexer.K_NULL = 36; +PqlLexer.K_OR = 37; +PqlLexer.K_TRUE = 38; +PqlLexer.NUMERIC_LITERAL = 39; +PqlLexer.DOUBLE_QUOTED_STRING = 40; +PqlLexer.DOUBLE_QUOTED_STRING_TEL = 41; +PqlLexer.DOUBLE_QUOTED_STRING_SQL = 42; +PqlLexer.SINGLE_QUOTED_STRING = 43; +PqlLexer.SINGLE_QUOTED_STRING_TEL = 44; +PqlLexer.SINGLE_QUOTED_STRING_SQL = 45; +PqlLexer.SINGLE_LINE_COMMENT = 46; +PqlLexer.MULTILINE_COMMENT = 47; +PqlLexer.SPACES = 48; +PqlLexer.WORD = 49; PqlLexer.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; PqlLexer.prototype.modeNames = [ "DEFAULT_MODE" ]; -PqlLexer.prototype.literalNames = [ null, "':'", "'?'", "'&&'", "'=='", - "'>='", "'<='", "'!='", "'<>'", "'||'", - "'<<'", "'>>'", "'&'", "'='", "')'", - "','", "'.'", "'/'", "'>'", "'<'", "'-'", - "'%'", "'('", "'|'", "'+'", "';'", "'*'", - "'~'", "'_'" ]; +PqlLexer.prototype.literalNames = [ null, "'&&'", "'=='", "'>='", "'<='", + "'!='", "'<>'", "'||'", "'<<'", "'>>'", + "'&'", "'='", "')'", "':'", "','", "'.'", + "'/'", "'>'", "'<'", "'-'", "'%'", "'('", + "'|'", "'+'", "'?'", "';'", "'*'", "'~'", + "'_'" ]; -PqlLexer.prototype.symbolicNames = [ null, "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", - "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", - "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", - "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", - "DOT", "FORWARD_SLASH", "GT", "LT", - "MINUS", "MOD", "OPEN_PAREN", "PIPE", - "PLUS", "SCOL", "STAR", "TILDE", "UNDER", - "K_AND", "K_ASC", "K_BY", "K_DESC", +PqlLexer.prototype.symbolicNames = [ null, "AND", "EQ", "GT_EQ", "LT_EQ", + "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", + "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", + "COLON", "COMMA", "DOT", "FORWARD_SLASH", + "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", + "PIPE", "PLUS", "QUESTION_MARK", "SCOL", + "STAR", "TILDE", "UNDER", "K_AND", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", - "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", - "K_OR", "K_ORDER", "K_SELECT", "K_TRUE", - "K_WHERE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD" ]; -PqlLexer.prototype.ruleNames = [ "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", - "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", +PqlLexer.prototype.ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", - "AMP", "ASSIGN", "CLOSE_PAREN", "COMMA", - "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", - "MOD", "OPEN_PAREN", "PIPE", "PLUS", "SCOL", - "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", - "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", - "K_LIKE", "K_LIMIT", "K_NOT", "K_NOTNULL", - "K_NULL", "K_OR", "K_ORDER", "K_SELECT", - "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", + "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", + "COMMA", "DOT", "FORWARD_SLASH", "GT", + "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", + "PLUS", "QUESTION_MARK", "SCOL", "STAR", + "TILDE", "UNDER", "K_AND", "K_FALSE", "K_IS", + "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", diff --git a/js-temp/PqlParser.js b/js-temp/PqlParser.js index ff20d0e..0b6e7fa 100644 --- a/js-temp/PqlParser.js +++ b/js-temp/PqlParser.js @@ -8,114 +8,65 @@ var grammarFileName = "PqlParser.g4"; var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u0003:\u00b2\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t", - "\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004", - "\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004\f\t\f\u0004", - "\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0003\u0002\u0003\u0002", - "\u0003\u0002\u0003\u0003\u0007\u0003#\n\u0003\f\u0003\u000e\u0003&\u000b", - "\u0003\u0003\u0003\u0003\u0003\u0003\u0004\u0007\u0004+\n\u0004\f\u0004", - "\u000e\u0004.\u000b\u0004\u0003\u0004\u0003\u0004\u0006\u00042\n\u0004", - "\r\u0004\u000e\u00043\u0003\u0004\u0007\u00047\n\u0004\f\u0004\u000e", - "\u0004:\u000b\u0004\u0003\u0004\u0007\u0004=\n\u0004\f\u0004\u000e\u0004", - "@\u000b\u0004\u0003\u0005\u0003\u0005\u0003\u0006\u0003\u0006\u0003", - "\u0006\u0005\u0006G\n\u0006\u0003\u0006\u0005\u0006J\n\u0006\u0003\u0006", - "\u0005\u0006M\n\u0006\u0003\u0007\u0003\u0007\u0003\u0007\u0007\u0007", - "R\n\u0007\f\u0007\u000e\u0007U\u000b\u0007\u0003\b\u0003\b\u0003\b\u0003", - "\t\u0003\t\u0003\t\u0003\t\u0003\t\u0007\t_\n\t\f\t\u000e\tb\u000b\t", - "\u0003\n\u0003\n\u0005\nf\n\n\u0003\u000b\u0003\u000b\u0003\u000b\u0003", - "\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003", - "\f\u0003\f\u0003\f\u0003\f\u0007\fx\n\f\f\f\u000e\f{\u000b\f\u0005\f", - "}\n\f\u0003\f\u0003\f\u0003\f\u0005\f\u0082\n\f\u0003\f\u0003\f\u0003", - "\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003", - "\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0003\f\u0007\f\u0096\n\f", - "\f\f\u000e\f\u0099\u000b\f\u0003\r\u0005\r\u009c\n\r\u0003\r\u0003\r", - "\u0003\r\u0005\r\u00a1\n\r\u0003\r\u0003\r\u0003\r\u0005\r\u00a6\n\r", - "\u0003\u000e\u0003\u000e\u0003\u000e\u0007\u000e\u00ab\n\u000e\f\u000e", - "\u000e\u000e\u00ae\u000b\u000e\u0003\u000f\u0003\u000f\u0003\u000f\u0002", - "\u0003\u0016\u0010\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016", - "\u0018\u001a\u001c\u0002\u000b\u0004\u0002 \"\"\u0005\u0002\u0016\u0016", - "\u001a\u001a((\u0005\u0002\u0013\u0013\u0017\u0017\u001c\u001c\u0004", - "\u0002\u0016\u0016\u001a\u001a\u0004\u0002\u0007\b\u0014\u0015\u0006", - "\u0002\u0006\u0006\t\n\u000f\u000f$$\u0004\u0002\u0005\u0005\u001f\u001f", - "\u0004\u0002\u000b\u000b++\u0007\u0002##**..0144\u0002\u00be\u0002\u001e", - "\u0003\u0002\u0002\u0002\u0004$\u0003\u0002\u0002\u0002\u0006,\u0003", - "\u0002\u0002\u0002\bA\u0003\u0002\u0002\u0002\nC\u0003\u0002\u0002\u0002", - "\fN\u0003\u0002\u0002\u0002\u000eV\u0003\u0002\u0002\u0002\u0010Y\u0003", - "\u0002\u0002\u0002\u0012c\u0003\u0002\u0002\u0002\u0014g\u0003\u0002", - "\u0002\u0002\u0016\u0081\u0003\u0002\u0002\u0002\u0018\u009b\u0003\u0002", - "\u0002\u0002\u001a\u00a7\u0003\u0002\u0002\u0002\u001c\u00af\u0003\u0002", - "\u0002\u0002\u001e\u001f\u0005\u0016\f\u0002\u001f \u0007\u0002\u0002", - "\u0003 \u0003\u0003\u0002\u0002\u0002!#\u0005\u0006\u0004\u0002\"!\u0003", - "\u0002\u0002\u0002#&\u0003\u0002\u0002\u0002$\"\u0003\u0002\u0002\u0002", - "$%\u0003\u0002\u0002\u0002%\'\u0003\u0002\u0002\u0002&$\u0003\u0002", - "\u0002\u0002\'(\u0007\u0002\u0002\u0003(\u0005\u0003\u0002\u0002\u0002", - ")+\u0007\u001b\u0002\u0002*)\u0003\u0002\u0002\u0002+.\u0003\u0002\u0002", - "\u0002,*\u0003\u0002\u0002\u0002,-\u0003\u0002\u0002\u0002-/\u0003\u0002", - "\u0002\u0002.,\u0003\u0002\u0002\u0002/8\u0005\b\u0005\u000202\u0007", - "\u001b\u0002\u000210\u0003\u0002\u0002\u000223\u0003\u0002\u0002\u0002", - "31\u0003\u0002\u0002\u000234\u0003\u0002\u0002\u000245\u0003\u0002\u0002", - "\u000257\u0005\b\u0005\u000261\u0003\u0002\u0002\u00027:\u0003\u0002", - "\u0002\u000286\u0003\u0002\u0002\u000289\u0003\u0002\u0002\u00029>\u0003", - "\u0002\u0002\u0002:8\u0003\u0002\u0002\u0002;=\u0007\u001b\u0002\u0002", - "<;\u0003\u0002\u0002\u0002=@\u0003\u0002\u0002\u0002><\u0003\u0002\u0002", - "\u0002>?\u0003\u0002\u0002\u0002?\u0007\u0003\u0002\u0002\u0002@>\u0003", - "\u0002\u0002\u0002AB\u0005\n\u0006\u0002B\t\u0003\u0002\u0002\u0002", - "CD\u0007-\u0002\u0002DF\u0005\f\u0007\u0002EG\u0005\u000e\b\u0002FE", - "\u0003\u0002\u0002\u0002FG\u0003\u0002\u0002\u0002GI\u0003\u0002\u0002", - "\u0002HJ\u0005\u0010\t\u0002IH\u0003\u0002\u0002\u0002IJ\u0003\u0002", - "\u0002\u0002JL\u0003\u0002\u0002\u0002KM\u0005\u0014\u000b\u0002LK\u0003", - "\u0002\u0002\u0002LM\u0003\u0002\u0002\u0002M\u000b\u0003\u0002\u0002", - "\u0002NS\u0005\u0016\f\u0002OP\u0007\u0011\u0002\u0002PR\u0005\u0016", - "\f\u0002QO\u0003\u0002\u0002\u0002RU\u0003\u0002\u0002\u0002SQ\u0003", - "\u0002\u0002\u0002ST\u0003\u0002\u0002\u0002T\r\u0003\u0002\u0002\u0002", - "US\u0003\u0002\u0002\u0002VW\u0007/\u0002\u0002WX\u0005\u0016\f\u0002", - "X\u000f\u0003\u0002\u0002\u0002YZ\u0007,\u0002\u0002Z[\u0007!\u0002", - "\u0002[`\u0005\u0012\n\u0002\\]\u0007\u0011\u0002\u0002]_\u0005\u0012", - "\n\u0002^\\\u0003\u0002\u0002\u0002_b\u0003\u0002\u0002\u0002`^\u0003", - "\u0002\u0002\u0002`a\u0003\u0002\u0002\u0002a\u0011\u0003\u0002\u0002", - "\u0002b`\u0003\u0002\u0002\u0002ce\u0005\u0016\f\u0002df\t\u0002\u0002", - "\u0002ed\u0003\u0002\u0002\u0002ef\u0003\u0002\u0002\u0002f\u0013\u0003", - "\u0002\u0002\u0002gh\u0007\'\u0002\u0002hi\u0005\u0016\f\u0002i\u0015", - "\u0003\u0002\u0002\u0002jk\b\f\u0001\u0002kl\t\u0003\u0002\u0002l\u0082", - "\u0005\u0016\f\rmn\u0007\u0018\u0002\u0002no\u0005\u0016\f\u0002op\u0007", - "\u0010\u0002\u0002p\u0082\u0003\u0002\u0002\u0002q\u0082\u0005\u001c", - "\u000f\u0002rs\u0005\u001a\u000e\u0002s|\u0007\u0018\u0002\u0002ty\u0005", - "\u0016\f\u0002uv\u0007\u0011\u0002\u0002vx\u0005\u0016\f\u0002wu\u0003", - "\u0002\u0002\u0002x{\u0003\u0002\u0002\u0002yw\u0003\u0002\u0002\u0002", - "yz\u0003\u0002\u0002\u0002z}\u0003\u0002\u0002\u0002{y\u0003\u0002\u0002", - "\u0002|t\u0003\u0002\u0002\u0002|}\u0003\u0002\u0002\u0002}~\u0003\u0002", - "\u0002\u0002~\u007f\u0007\u0010\u0002\u0002\u007f\u0082\u0003\u0002", - "\u0002\u0002\u0080\u0082\u0005\u0018\r\u0002\u0081j\u0003\u0002\u0002", - "\u0002\u0081m\u0003\u0002\u0002\u0002\u0081q\u0003\u0002\u0002\u0002", - "\u0081r\u0003\u0002\u0002\u0002\u0081\u0080\u0003\u0002\u0002\u0002", - "\u0082\u0097\u0003\u0002\u0002\u0002\u0083\u0084\f\f\u0002\u0002\u0084", - "\u0085\t\u0004\u0002\u0002\u0085\u0096\u0005\u0016\f\r\u0086\u0087\f", - "\u000b\u0002\u0002\u0087\u0088\t\u0005\u0002\u0002\u0088\u0096\u0005", - "\u0016\f\f\u0089\u008a\f\n\u0002\u0002\u008a\u008b\t\u0006\u0002\u0002", - "\u008b\u0096\u0005\u0016\f\u000b\u008c\u008d\f\t\u0002\u0002\u008d\u008e", - "\t\u0007\u0002\u0002\u008e\u0096\u0005\u0016\f\n\u008f\u0090\f\b\u0002", - "\u0002\u0090\u0091\t\b\u0002\u0002\u0091\u0096\u0005\u0016\f\t\u0092", - "\u0093\f\u0007\u0002\u0002\u0093\u0094\t\t\u0002\u0002\u0094\u0096\u0005", - "\u0016\f\b\u0095\u0083\u0003\u0002\u0002\u0002\u0095\u0086\u0003\u0002", - "\u0002\u0002\u0095\u0089\u0003\u0002\u0002\u0002\u0095\u008c\u0003\u0002", - "\u0002\u0002\u0095\u008f\u0003\u0002\u0002\u0002\u0095\u0092\u0003\u0002", - "\u0002\u0002\u0096\u0099\u0003\u0002\u0002\u0002\u0097\u0095\u0003\u0002", - "\u0002\u0002\u0097\u0098\u0003\u0002\u0002\u0002\u0098\u0017\u0003\u0002", - "\u0002\u0002\u0099\u0097\u0003\u0002\u0002\u0002\u009a\u009c\u0007\u0004", - "\u0002\u0002\u009b\u009a\u0003\u0002\u0002\u0002\u009b\u009c\u0003\u0002", - "\u0002\u0002\u009c\u00a0\u0003\u0002\u0002\u0002\u009d\u009e\u0005\u001a", - "\u000e\u0002\u009e\u009f\u0007\u0019\u0002\u0002\u009f\u00a1\u0003\u0002", - "\u0002\u0002\u00a0\u009d\u0003\u0002\u0002\u0002\u00a0\u00a1\u0003\u0002", - "\u0002\u0002\u00a1\u00a2\u0003\u0002\u0002\u0002\u00a2\u00a5\u0005\u001a", - "\u000e\u0002\u00a3\u00a4\u0007\u0003\u0002\u0002\u00a4\u00a6\u0005\u001a", - "\u000e\u0002\u00a5\u00a3\u0003\u0002\u0002\u0002\u00a5\u00a6\u0003\u0002", - "\u0002\u0002\u00a6\u0019\u0003\u0002\u0002\u0002\u00a7\u00ac\u0007:", - "\u0002\u0002\u00a8\u00a9\u0007\u0012\u0002\u0002\u00a9\u00ab\u0007:", - "\u0002\u0002\u00aa\u00a8\u0003\u0002\u0002\u0002\u00ab\u00ae\u0003\u0002", - "\u0002\u0002\u00ac\u00aa\u0003\u0002\u0002\u0002\u00ac\u00ad\u0003\u0002", - "\u0002\u0002\u00ad\u001b\u0003\u0002\u0002\u0002\u00ae\u00ac\u0003\u0002", - "\u0002\u0002\u00af\u00b0\t\n\u0002\u0002\u00b0\u001d\u0003\u0002\u0002", - "\u0002\u0016$,38>FILS`ey|\u0081\u0095\u0097\u009b\u00a0\u00a5\u00ac"].join(""); + "\u00033]\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004", + "\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004\b", + "\t\b\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0005\u0003\u001e\n\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0007\u00032\n\u0003", + "\f\u0003\u000e\u00035\u000b\u0003\u0003\u0004\u0003\u0004\u0003\u0004", + "\u0005\u0004:\n\u0004\u0003\u0004\u0003\u0004\u0003\u0005\u0003\u0005", + "\u0003\u0005\u0007\u0005A\n\u0005\f\u0005\u000e\u0005D\u000b\u0005\u0003", + "\u0006\u0005\u0006G\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005", + "\u0006L\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005\u0006Q\n\u0006", + "\u0003\u0007\u0003\u0007\u0003\u0007\u0007\u0007V\n\u0007\f\u0007\u000e", + "\u0007Y\u000b\u0007\u0003\b\u0003\b\u0003\b\u0002\u0003\u0004\t\u0002", + "\u0004\u0006\b\n\f\u000e\u0002\n\u0005\u0002\u0015\u0015\u0019\u0019", + "$$\u0005\u0002\u0012\u0012\u0016\u0016\u001c\u001c\u0004\u0002\u0015", + "\u0015\u0019\u0019\u0004\u0002\u0005\u0006\u0013\u0014\u0006\u0002\u0004", + "\u0004\u0007\b\r\r!!\u0004\u0002\u0003\u0003\u001f\u001f\u0004\u0002", + "\t\t\'\'\u0006\u0002 &&(*--\u0002e\u0002\u0010\u0003\u0002\u0002\u0002", + "\u0004\u001d\u0003\u0002\u0002\u0002\u00066\u0003\u0002\u0002\u0002", + "\b=\u0003\u0002\u0002\u0002\nF\u0003\u0002\u0002\u0002\fR\u0003\u0002", + "\u0002\u0002\u000eZ\u0003\u0002\u0002\u0002\u0010\u0011\u0005\u0004", + "\u0003\u0002\u0011\u0012\u0007\u0002\u0002\u0003\u0012\u0003\u0003\u0002", + "\u0002\u0002\u0013\u0014\b\u0003\u0001\u0002\u0014\u0015\t\u0002\u0002", + "\u0002\u0015\u001e\u0005\u0004\u0003\r\u0016\u0017\u0007\u0017\u0002", + "\u0002\u0017\u0018\u0005\u0004\u0003\u0002\u0018\u0019\u0007\u000e\u0002", + "\u0002\u0019\u001e\u0003\u0002\u0002\u0002\u001a\u001e\u0005\u000e\b", + "\u0002\u001b\u001e\u0005\u0006\u0004\u0002\u001c\u001e\u0005\n\u0006", + "\u0002\u001d\u0013\u0003\u0002\u0002\u0002\u001d\u0016\u0003\u0002\u0002", + "\u0002\u001d\u001a\u0003\u0002\u0002\u0002\u001d\u001b\u0003\u0002\u0002", + "\u0002\u001d\u001c\u0003\u0002\u0002\u0002\u001e3\u0003\u0002\u0002", + "\u0002\u001f \f\f\u0002\u0002 !\t\u0003\u0002\u0002!2\u0005\u0004\u0003", + "\r\"#\f\u000b\u0002\u0002#$\t\u0004\u0002\u0002$2\u0005\u0004\u0003", + "\f%&\f\n\u0002\u0002&\'\t\u0005\u0002\u0002\'2\u0005\u0004\u0003\u000b", + "()\f\t\u0002\u0002)*\t\u0006\u0002\u0002*2\u0005\u0004\u0003\n+,\f\b", + "\u0002\u0002,-\t\u0007\u0002\u0002-2\u0005\u0004\u0003\t./\f\u0007\u0002", + "\u0002/0\t\b\u0002\u000202\u0005\u0004\u0003\b1\u001f\u0003\u0002\u0002", + "\u00021\"\u0003\u0002\u0002\u00021%\u0003\u0002\u0002\u00021(\u0003", + "\u0002\u0002\u00021+\u0003\u0002\u0002\u00021.\u0003\u0002\u0002\u0002", + "25\u0003\u0002\u0002\u000231\u0003\u0002\u0002\u000234\u0003\u0002\u0002", + "\u00024\u0005\u0003\u0002\u0002\u000253\u0003\u0002\u0002\u000267\u0005", + "\f\u0007\u000279\u0007\u0017\u0002\u00028:\u0005\b\u0005\u000298\u0003", + "\u0002\u0002\u00029:\u0003\u0002\u0002\u0002:;\u0003\u0002\u0002\u0002", + ";<\u0007\u000e\u0002\u0002<\u0007\u0003\u0002\u0002\u0002=B\u0005\u0004", + "\u0003\u0002>?\u0007\u0010\u0002\u0002?A\u0005\u0004\u0003\u0002@>\u0003", + "\u0002\u0002\u0002AD\u0003\u0002\u0002\u0002B@\u0003\u0002\u0002\u0002", + "BC\u0003\u0002\u0002\u0002C\t\u0003\u0002\u0002\u0002DB\u0003\u0002", + "\u0002\u0002EG\u0007\u001a\u0002\u0002FE\u0003\u0002\u0002\u0002FG\u0003", + "\u0002\u0002\u0002GK\u0003\u0002\u0002\u0002HI\u0005\f\u0007\u0002I", + "J\u0007\u0018\u0002\u0002JL\u0003\u0002\u0002\u0002KH\u0003\u0002\u0002", + "\u0002KL\u0003\u0002\u0002\u0002LM\u0003\u0002\u0002\u0002MP\u0005\f", + "\u0007\u0002NO\u0007\u000f\u0002\u0002OQ\u0005\f\u0007\u0002PN\u0003", + "\u0002\u0002\u0002PQ\u0003\u0002\u0002\u0002Q\u000b\u0003\u0002\u0002", + "\u0002RW\u00073\u0002\u0002ST\u0007\u0011\u0002\u0002TV\u00073\u0002", + "\u0002US\u0003\u0002\u0002\u0002VY\u0003\u0002\u0002\u0002WU\u0003\u0002", + "\u0002\u0002WX\u0003\u0002\u0002\u0002X\r\u0003\u0002\u0002\u0002YW", + "\u0003\u0002\u0002\u0002Z[\t\t\u0002\u0002[\u000f\u0003\u0002\u0002", + "\u0002\u000b\u001d139BFKPW"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -124,29 +75,25 @@ var decisionsToDFA = atn.decisionToState.map( function(ds, index) { return new a var sharedContextCache = new antlr4.PredictionContextCache(); -var literalNames = [ null, "':'", "'?'", "'&&'", "'=='", "'>='", "'<='", - "'!='", "'<>'", "'||'", "'<<'", "'>>'", "'&'", "'='", - "')'", "','", "'.'", "'/'", "'>'", "'<'", "'-'", "'%'", - "'('", "'|'", "'+'", "';'", "'*'", "'~'", "'_'" ]; +var literalNames = [ null, "'&&'", "'=='", "'>='", "'<='", "'!='", "'<>'", + "'||'", "'<<'", "'>>'", "'&'", "'='", "')'", "':'", + "','", "'.'", "'/'", "'>'", "'<'", "'-'", "'%'", "'('", + "'|'", "'+'", "'?'", "';'", "'*'", "'~'", "'_'" ]; -var symbolicNames = [ null, "TAXON_TAG_DELIMITER", "TAXON_OPTIONAL_OPERATOR", - "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", +var symbolicNames = [ null, "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", - "CLOSE_PAREN", "COMMA", "DOT", "FORWARD_SLASH", "GT", - "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", - "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_ASC", - "K_BY", "K_DESC", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", - "K_LIMIT", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", - "K_ORDER", "K_SELECT", "K_TRUE", "K_WHERE", "NUMERIC_LITERAL", - "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", + "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", + "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", + "UNDER", "K_AND", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", + "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD" ]; -var ruleNames = [ "parseTel", "parsePql", "sqlStmtList", "sqlStmt", "selectStmt", - "columns", "whereClause", "orderByClause", "orderExpr", - "limitClause", "expr", "taxon", "identifierMultipart", +var ruleNames = [ "parseTel", "expr", "fn", "exprList", "taxon", "identifierMultipart", "literalValue" ]; function PqlParser (input) { @@ -168,77 +115,63 @@ Object.defineProperty(PqlParser.prototype, "atn", { }); PqlParser.EOF = antlr4.Token.EOF; -PqlParser.TAXON_TAG_DELIMITER = 1; -PqlParser.TAXON_OPTIONAL_OPERATOR = 2; -PqlParser.AND = 3; -PqlParser.EQ = 4; -PqlParser.GT_EQ = 5; -PqlParser.LT_EQ = 6; -PqlParser.NOT_EQ1 = 7; -PqlParser.NOT_EQ2 = 8; -PqlParser.OR = 9; -PqlParser.SHIFT_LEFT = 10; -PqlParser.SHIFT_RIGHT = 11; -PqlParser.AMP = 12; -PqlParser.ASSIGN = 13; -PqlParser.CLOSE_PAREN = 14; -PqlParser.COMMA = 15; -PqlParser.DOT = 16; -PqlParser.FORWARD_SLASH = 17; -PqlParser.GT = 18; -PqlParser.LT = 19; -PqlParser.MINUS = 20; -PqlParser.MOD = 21; -PqlParser.OPEN_PAREN = 22; -PqlParser.PIPE = 23; -PqlParser.PLUS = 24; +PqlParser.AND = 1; +PqlParser.EQ = 2; +PqlParser.GT_EQ = 3; +PqlParser.LT_EQ = 4; +PqlParser.NOT_EQ1 = 5; +PqlParser.NOT_EQ2 = 6; +PqlParser.OR = 7; +PqlParser.SHIFT_LEFT = 8; +PqlParser.SHIFT_RIGHT = 9; +PqlParser.AMP = 10; +PqlParser.ASSIGN = 11; +PqlParser.CLOSE_PAREN = 12; +PqlParser.COLON = 13; +PqlParser.COMMA = 14; +PqlParser.DOT = 15; +PqlParser.FORWARD_SLASH = 16; +PqlParser.GT = 17; +PqlParser.LT = 18; +PqlParser.MINUS = 19; +PqlParser.MOD = 20; +PqlParser.OPEN_PAREN = 21; +PqlParser.PIPE = 22; +PqlParser.PLUS = 23; +PqlParser.QUESTION_MARK = 24; PqlParser.SCOL = 25; PqlParser.STAR = 26; PqlParser.TILDE = 27; PqlParser.UNDER = 28; PqlParser.K_AND = 29; -PqlParser.K_ASC = 30; -PqlParser.K_BY = 31; -PqlParser.K_DESC = 32; -PqlParser.K_FALSE = 33; -PqlParser.K_IS = 34; -PqlParser.K_ISNULL = 35; -PqlParser.K_LIKE = 36; -PqlParser.K_LIMIT = 37; -PqlParser.K_NOT = 38; -PqlParser.K_NOTNULL = 39; -PqlParser.K_NULL = 40; -PqlParser.K_OR = 41; -PqlParser.K_ORDER = 42; -PqlParser.K_SELECT = 43; -PqlParser.K_TRUE = 44; -PqlParser.K_WHERE = 45; -PqlParser.NUMERIC_LITERAL = 46; -PqlParser.DOUBLE_QUOTED_STRING = 47; -PqlParser.DOUBLE_QUOTED_STRING_TEL = 48; -PqlParser.DOUBLE_QUOTED_STRING_SQL = 49; -PqlParser.SINGLE_QUOTED_STRING = 50; -PqlParser.SINGLE_QUOTED_STRING_TEL = 51; -PqlParser.SINGLE_QUOTED_STRING_SQL = 52; -PqlParser.SINGLE_LINE_COMMENT = 53; -PqlParser.MULTILINE_COMMENT = 54; -PqlParser.SPACES = 55; -PqlParser.WORD = 56; +PqlParser.K_FALSE = 30; +PqlParser.K_IS = 31; +PqlParser.K_ISNULL = 32; +PqlParser.K_LIKE = 33; +PqlParser.K_NOT = 34; +PqlParser.K_NOTNULL = 35; +PqlParser.K_NULL = 36; +PqlParser.K_OR = 37; +PqlParser.K_TRUE = 38; +PqlParser.NUMERIC_LITERAL = 39; +PqlParser.DOUBLE_QUOTED_STRING = 40; +PqlParser.DOUBLE_QUOTED_STRING_TEL = 41; +PqlParser.DOUBLE_QUOTED_STRING_SQL = 42; +PqlParser.SINGLE_QUOTED_STRING = 43; +PqlParser.SINGLE_QUOTED_STRING_TEL = 44; +PqlParser.SINGLE_QUOTED_STRING_SQL = 45; +PqlParser.SINGLE_LINE_COMMENT = 46; +PqlParser.MULTILINE_COMMENT = 47; +PqlParser.SPACES = 48; +PqlParser.WORD = 49; PqlParser.RULE_parseTel = 0; -PqlParser.RULE_parsePql = 1; -PqlParser.RULE_sqlStmtList = 2; -PqlParser.RULE_sqlStmt = 3; -PqlParser.RULE_selectStmt = 4; -PqlParser.RULE_columns = 5; -PqlParser.RULE_whereClause = 6; -PqlParser.RULE_orderByClause = 7; -PqlParser.RULE_orderExpr = 8; -PqlParser.RULE_limitClause = 9; -PqlParser.RULE_expr = 10; -PqlParser.RULE_taxon = 11; -PqlParser.RULE_identifierMultipart = 12; -PqlParser.RULE_literalValue = 13; +PqlParser.RULE_expr = 1; +PqlParser.RULE_fn = 2; +PqlParser.RULE_exprList = 3; +PqlParser.RULE_taxon = 4; +PqlParser.RULE_identifierMultipart = 5; +PqlParser.RULE_literalValue = 6; function ParseTelContext(parser, parent, invokingState) { @@ -296,9 +229,9 @@ PqlParser.prototype.parseTel = function() { this.enterRule(localctx, 0, PqlParser.RULE_parseTel); try { this.enterOuterAlt(localctx, 1); - this.state = 28; + this.state = 14; this.expr(0); - this.state = 29; + this.state = 15; this.match(PqlParser.EOF); } catch (re) { if(re instanceof antlr4.error.RecognitionException) { @@ -315,862 +248,6 @@ PqlParser.prototype.parseTel = function() { }; -function ParsePqlContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_parsePql; - return this; -} - -ParsePqlContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -ParsePqlContext.prototype.constructor = ParsePqlContext; - -ParsePqlContext.prototype.EOF = function() { - return this.getToken(PqlParser.EOF, 0); -}; - -ParsePqlContext.prototype.sqlStmtList = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTypedRuleContexts(SqlStmtListContext); - } else { - return this.getTypedRuleContext(SqlStmtListContext,i); - } -}; - -ParsePqlContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterParsePql(this); - } -}; - -ParsePqlContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitParsePql(this); - } -}; - -ParsePqlContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitParsePql(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.ParsePqlContext = ParsePqlContext; - -PqlParser.prototype.parsePql = function() { - - var localctx = new ParsePqlContext(this, this._ctx, this.state); - this.enterRule(localctx, 2, PqlParser.RULE_parsePql); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 34; - this._errHandler.sync(this); - _la = this._input.LA(1); - while(_la===PqlParser.SCOL || _la===PqlParser.K_SELECT) { - this.state = 31; - this.sqlStmtList(); - this.state = 36; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 37; - this.match(PqlParser.EOF); - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function SqlStmtListContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_sqlStmtList; - return this; -} - -SqlStmtListContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -SqlStmtListContext.prototype.constructor = SqlStmtListContext; - -SqlStmtListContext.prototype.sqlStmt = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTypedRuleContexts(SqlStmtContext); - } else { - return this.getTypedRuleContext(SqlStmtContext,i); - } -}; - -SqlStmtListContext.prototype.SCOL = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTokens(PqlParser.SCOL); - } else { - return this.getToken(PqlParser.SCOL, i); - } -}; - - -SqlStmtListContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterSqlStmtList(this); - } -}; - -SqlStmtListContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitSqlStmtList(this); - } -}; - -SqlStmtListContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitSqlStmtList(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.SqlStmtListContext = SqlStmtListContext; - -PqlParser.prototype.sqlStmtList = function() { - - var localctx = new SqlStmtListContext(this, this._ctx, this.state); - this.enterRule(localctx, 4, PqlParser.RULE_sqlStmtList); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 42; - this._errHandler.sync(this); - _la = this._input.LA(1); - while(_la===PqlParser.SCOL) { - this.state = 39; - this.match(PqlParser.SCOL); - this.state = 44; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 45; - this.sqlStmt(); - this.state = 54; - this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,3,this._ctx) - while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { - if(_alt===1) { - this.state = 47; - this._errHandler.sync(this); - _la = this._input.LA(1); - do { - this.state = 46; - this.match(PqlParser.SCOL); - this.state = 49; - this._errHandler.sync(this); - _la = this._input.LA(1); - } while(_la===PqlParser.SCOL); - this.state = 51; - this.sqlStmt(); - } - this.state = 56; - this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,3,this._ctx); - } - - this.state = 60; - this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,4,this._ctx) - while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { - if(_alt===1) { - this.state = 57; - this.match(PqlParser.SCOL); - } - this.state = 62; - this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,4,this._ctx); - } - - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function SqlStmtContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_sqlStmt; - return this; -} - -SqlStmtContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -SqlStmtContext.prototype.constructor = SqlStmtContext; - -SqlStmtContext.prototype.selectStmt = function() { - return this.getTypedRuleContext(SelectStmtContext,0); -}; - -SqlStmtContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterSqlStmt(this); - } -}; - -SqlStmtContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitSqlStmt(this); - } -}; - -SqlStmtContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitSqlStmt(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.SqlStmtContext = SqlStmtContext; - -PqlParser.prototype.sqlStmt = function() { - - var localctx = new SqlStmtContext(this, this._ctx, this.state); - this.enterRule(localctx, 6, PqlParser.RULE_sqlStmt); - try { - this.enterOuterAlt(localctx, 1); - this.state = 63; - this.selectStmt(); - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function SelectStmtContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_selectStmt; - return this; -} - -SelectStmtContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -SelectStmtContext.prototype.constructor = SelectStmtContext; - -SelectStmtContext.prototype.K_SELECT = function() { - return this.getToken(PqlParser.K_SELECT, 0); -}; - -SelectStmtContext.prototype.columns = function() { - return this.getTypedRuleContext(ColumnsContext,0); -}; - -SelectStmtContext.prototype.whereClause = function() { - return this.getTypedRuleContext(WhereClauseContext,0); -}; - -SelectStmtContext.prototype.orderByClause = function() { - return this.getTypedRuleContext(OrderByClauseContext,0); -}; - -SelectStmtContext.prototype.limitClause = function() { - return this.getTypedRuleContext(LimitClauseContext,0); -}; - -SelectStmtContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterSelectStmt(this); - } -}; - -SelectStmtContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitSelectStmt(this); - } -}; - -SelectStmtContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitSelectStmt(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.SelectStmtContext = SelectStmtContext; - -PqlParser.prototype.selectStmt = function() { - - var localctx = new SelectStmtContext(this, this._ctx, this.state); - this.enterRule(localctx, 8, PqlParser.RULE_selectStmt); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 65; - this.match(PqlParser.K_SELECT); - this.state = 66; - this.columns(); - this.state = 68; - this._errHandler.sync(this); - _la = this._input.LA(1); - if(_la===PqlParser.K_WHERE) { - this.state = 67; - this.whereClause(); - } - - this.state = 71; - this._errHandler.sync(this); - _la = this._input.LA(1); - if(_la===PqlParser.K_ORDER) { - this.state = 70; - this.orderByClause(); - } - - this.state = 74; - this._errHandler.sync(this); - _la = this._input.LA(1); - if(_la===PqlParser.K_LIMIT) { - this.state = 73; - this.limitClause(); - } - - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function ColumnsContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_columns; - return this; -} - -ColumnsContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -ColumnsContext.prototype.constructor = ColumnsContext; - -ColumnsContext.prototype.expr = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTypedRuleContexts(ExprContext); - } else { - return this.getTypedRuleContext(ExprContext,i); - } -}; - -ColumnsContext.prototype.COMMA = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTokens(PqlParser.COMMA); - } else { - return this.getToken(PqlParser.COMMA, i); - } -}; - - -ColumnsContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterColumns(this); - } -}; - -ColumnsContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitColumns(this); - } -}; - -ColumnsContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitColumns(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.ColumnsContext = ColumnsContext; - -PqlParser.prototype.columns = function() { - - var localctx = new ColumnsContext(this, this._ctx, this.state); - this.enterRule(localctx, 10, PqlParser.RULE_columns); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 76; - this.expr(0); - this.state = 81; - this._errHandler.sync(this); - _la = this._input.LA(1); - while(_la===PqlParser.COMMA) { - this.state = 77; - this.match(PqlParser.COMMA); - this.state = 78; - this.expr(0); - this.state = 83; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function WhereClauseContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_whereClause; - return this; -} - -WhereClauseContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -WhereClauseContext.prototype.constructor = WhereClauseContext; - -WhereClauseContext.prototype.K_WHERE = function() { - return this.getToken(PqlParser.K_WHERE, 0); -}; - -WhereClauseContext.prototype.expr = function() { - return this.getTypedRuleContext(ExprContext,0); -}; - -WhereClauseContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterWhereClause(this); - } -}; - -WhereClauseContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitWhereClause(this); - } -}; - -WhereClauseContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitWhereClause(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.WhereClauseContext = WhereClauseContext; - -PqlParser.prototype.whereClause = function() { - - var localctx = new WhereClauseContext(this, this._ctx, this.state); - this.enterRule(localctx, 12, PqlParser.RULE_whereClause); - try { - this.enterOuterAlt(localctx, 1); - this.state = 84; - this.match(PqlParser.K_WHERE); - this.state = 85; - this.expr(0); - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function OrderByClauseContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_orderByClause; - return this; -} - -OrderByClauseContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -OrderByClauseContext.prototype.constructor = OrderByClauseContext; - -OrderByClauseContext.prototype.K_ORDER = function() { - return this.getToken(PqlParser.K_ORDER, 0); -}; - -OrderByClauseContext.prototype.K_BY = function() { - return this.getToken(PqlParser.K_BY, 0); -}; - -OrderByClauseContext.prototype.orderExpr = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTypedRuleContexts(OrderExprContext); - } else { - return this.getTypedRuleContext(OrderExprContext,i); - } -}; - -OrderByClauseContext.prototype.COMMA = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTokens(PqlParser.COMMA); - } else { - return this.getToken(PqlParser.COMMA, i); - } -}; - - -OrderByClauseContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterOrderByClause(this); - } -}; - -OrderByClauseContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitOrderByClause(this); - } -}; - -OrderByClauseContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitOrderByClause(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.OrderByClauseContext = OrderByClauseContext; - -PqlParser.prototype.orderByClause = function() { - - var localctx = new OrderByClauseContext(this, this._ctx, this.state); - this.enterRule(localctx, 14, PqlParser.RULE_orderByClause); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 87; - this.match(PqlParser.K_ORDER); - this.state = 88; - this.match(PqlParser.K_BY); - this.state = 89; - this.orderExpr(); - this.state = 94; - this._errHandler.sync(this); - _la = this._input.LA(1); - while(_la===PqlParser.COMMA) { - this.state = 90; - this.match(PqlParser.COMMA); - this.state = 91; - this.orderExpr(); - this.state = 96; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function OrderExprContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_orderExpr; - return this; -} - -OrderExprContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -OrderExprContext.prototype.constructor = OrderExprContext; - -OrderExprContext.prototype.expr = function() { - return this.getTypedRuleContext(ExprContext,0); -}; - -OrderExprContext.prototype.K_ASC = function() { - return this.getToken(PqlParser.K_ASC, 0); -}; - -OrderExprContext.prototype.K_DESC = function() { - return this.getToken(PqlParser.K_DESC, 0); -}; - -OrderExprContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterOrderExpr(this); - } -}; - -OrderExprContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitOrderExpr(this); - } -}; - -OrderExprContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitOrderExpr(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.OrderExprContext = OrderExprContext; - -PqlParser.prototype.orderExpr = function() { - - var localctx = new OrderExprContext(this, this._ctx, this.state); - this.enterRule(localctx, 16, PqlParser.RULE_orderExpr); - var _la = 0; // Token type - try { - this.enterOuterAlt(localctx, 1); - this.state = 97; - this.expr(0); - this.state = 99; - this._errHandler.sync(this); - _la = this._input.LA(1); - if(_la===PqlParser.K_ASC || _la===PqlParser.K_DESC) { - this.state = 98; - _la = this._input.LA(1); - if(!(_la===PqlParser.K_ASC || _la===PqlParser.K_DESC)) { - this._errHandler.recoverInline(this); - } - else { - this._errHandler.reportMatch(this); - this.consume(); - } - } - - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - -function LimitClauseContext(parser, parent, invokingState) { - if(parent===undefined) { - parent = null; - } - if(invokingState===undefined || invokingState===null) { - invokingState = -1; - } - antlr4.ParserRuleContext.call(this, parent, invokingState); - this.parser = parser; - this.ruleIndex = PqlParser.RULE_limitClause; - this.limit = null; // ExprContext - return this; -} - -LimitClauseContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -LimitClauseContext.prototype.constructor = LimitClauseContext; - -LimitClauseContext.prototype.K_LIMIT = function() { - return this.getToken(PqlParser.K_LIMIT, 0); -}; - -LimitClauseContext.prototype.expr = function() { - return this.getTypedRuleContext(ExprContext,0); -}; - -LimitClauseContext.prototype.enterRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.enterLimitClause(this); - } -}; - -LimitClauseContext.prototype.exitRule = function(listener) { - if(listener instanceof PqlParserListener ) { - listener.exitLimitClause(this); - } -}; - -LimitClauseContext.prototype.accept = function(visitor) { - if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitLimitClause(this); - } else { - return visitor.visitChildren(this); - } -}; - - - - -PqlParser.LimitClauseContext = LimitClauseContext; - -PqlParser.prototype.limitClause = function() { - - var localctx = new LimitClauseContext(this, this._ctx, this.state); - this.enterRule(localctx, 18, PqlParser.RULE_limitClause); - try { - this.enterOuterAlt(localctx, 1); - this.state = 101; - this.match(PqlParser.K_LIMIT); - this.state = 102; - localctx.limit = this.expr(0); - } catch (re) { - if(re instanceof antlr4.error.RecognitionException) { - localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return localctx; -}; - - function ExprContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; @@ -1185,7 +262,6 @@ function ExprContext(parser, parent, invokingState) { this.unary_operator = null; // Token this.right = null; // ExprContext this.inner = null; // ExprContext - this.function_name = null; // IdentifierMultipartContext this.operator = null; // Token return this; } @@ -1228,22 +304,10 @@ ExprContext.prototype.literalValue = function() { return this.getTypedRuleContext(LiteralValueContext,0); }; -ExprContext.prototype.identifierMultipart = function() { - return this.getTypedRuleContext(IdentifierMultipartContext,0); -}; - -ExprContext.prototype.COMMA = function(i) { - if(i===undefined) { - i = null; - } - if(i===null) { - return this.getTokens(PqlParser.COMMA); - } else { - return this.getToken(PqlParser.COMMA, i); - } +ExprContext.prototype.fn = function() { + return this.getTypedRuleContext(FnContext,0); }; - ExprContext.prototype.taxon = function() { return this.getTypedRuleContext(TaxonContext,0); }; @@ -1342,102 +406,78 @@ PqlParser.prototype.expr = function(_p) { var _parentState = this.state; var localctx = new ExprContext(this, this._ctx, _parentState); var _prevctx = localctx; - var _startState = 20; - this.enterRecursionRule(localctx, 20, PqlParser.RULE_expr, _p); + var _startState = 2; + this.enterRecursionRule(localctx, 2, PqlParser.RULE_expr, _p); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 127; + this.state = 27; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,13,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,0,this._ctx); switch(la_) { case 1: - this.state = 105; + this.state = 18; localctx.unary_operator = this._input.LT(1); _la = this._input.LA(1); - if(!(((((_la - 20)) & ~0x1f) == 0 && ((1 << (_la - 20)) & ((1 << (PqlParser.MINUS - 20)) | (1 << (PqlParser.PLUS - 20)) | (1 << (PqlParser.K_NOT - 20)))) !== 0))) { + if(!(((((_la - 19)) & ~0x1f) == 0 && ((1 << (_la - 19)) & ((1 << (PqlParser.MINUS - 19)) | (1 << (PqlParser.PLUS - 19)) | (1 << (PqlParser.K_NOT - 19)))) !== 0))) { localctx.unary_operator = this._errHandler.recoverInline(this); } else { this._errHandler.reportMatch(this); this.consume(); } - this.state = 106; + this.state = 19; localctx.right = this.expr(11); break; case 2: - this.state = 107; + this.state = 20; this.match(PqlParser.OPEN_PAREN); - this.state = 108; + this.state = 21; localctx.inner = this.expr(0); - this.state = 109; + this.state = 22; this.match(PqlParser.CLOSE_PAREN); break; case 3: - this.state = 111; + this.state = 24; this.literalValue(); break; case 4: - this.state = 112; - localctx.function_name = this.identifierMultipart(); - this.state = 113; - this.match(PqlParser.OPEN_PAREN); - this.state = 122; - this._errHandler.sync(this); - _la = this._input.LA(1); - if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.TAXON_OPTIONAL_OPERATOR) | (1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS))) !== 0) || ((((_la - 33)) & ~0x1f) == 0 && ((1 << (_la - 33)) & ((1 << (PqlParser.K_FALSE - 33)) | (1 << (PqlParser.K_NOT - 33)) | (1 << (PqlParser.K_NULL - 33)) | (1 << (PqlParser.K_TRUE - 33)) | (1 << (PqlParser.NUMERIC_LITERAL - 33)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 33)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 33)) | (1 << (PqlParser.WORD - 33)))) !== 0)) { - this.state = 114; - this.expr(0); - this.state = 119; - this._errHandler.sync(this); - _la = this._input.LA(1); - while(_la===PqlParser.COMMA) { - this.state = 115; - this.match(PqlParser.COMMA); - this.state = 116; - this.expr(0); - this.state = 121; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - - this.state = 124; - this.match(PqlParser.CLOSE_PAREN); + this.state = 25; + this.fn(); break; case 5: - this.state = 126; + this.state = 26; this.taxon(); break; } this._ctx.stop = this._input.LT(-1); - this.state = 149; + this.state = 49; this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,15,this._ctx) + var _alt = this._interp.adaptivePredict(this._input,2,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { if(_alt===1) { if(this._parseListeners!==null) { this.triggerExitRuleEvent(); } _prevctx = localctx; - this.state = 147; + this.state = 47; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,14,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,1,this._ctx); switch(la_) { case 1: localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 129; + this.state = 29; if (!( this.precpred(this._ctx, 10))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 10)"); } - this.state = 130; + this.state = 30; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) !== 0))) { @@ -1447,7 +487,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 131; + this.state = 31; localctx.right = this.expr(11); break; @@ -1455,11 +495,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 132; + this.state = 32; if (!( this.precpred(this._ctx, 9))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 9)"); } - this.state = 133; + this.state = 33; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.MINUS || _la===PqlParser.PLUS)) { @@ -1469,7 +509,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 134; + this.state = 34; localctx.right = this.expr(10); break; @@ -1477,11 +517,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 135; + this.state = 35; if (!( this.precpred(this._ctx, 8))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 8)"); } - this.state = 136; + this.state = 36; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) !== 0))) { @@ -1491,7 +531,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 137; + this.state = 37; localctx.right = this.expr(9); break; @@ -1499,21 +539,21 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 138; + this.state = 38; if (!( this.precpred(this._ctx, 7))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 7)"); } - this.state = 139; + this.state = 39; localctx.operator = this._input.LT(1); _la = this._input.LA(1); - if(!(((((_la - 4)) & ~0x1f) == 0 && ((1 << (_la - 4)) & ((1 << (PqlParser.EQ - 4)) | (1 << (PqlParser.NOT_EQ1 - 4)) | (1 << (PqlParser.NOT_EQ2 - 4)) | (1 << (PqlParser.ASSIGN - 4)) | (1 << (PqlParser.K_IS - 4)))) !== 0))) { + if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) !== 0))) { localctx.operator = this._errHandler.recoverInline(this); } else { this._errHandler.reportMatch(this); this.consume(); } - this.state = 140; + this.state = 40; localctx.right = this.expr(8); break; @@ -1521,11 +561,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 141; + this.state = 41; if (!( this.precpred(this._ctx, 6))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 6)"); } - this.state = 142; + this.state = 42; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.AND || _la===PqlParser.K_AND)) { @@ -1535,7 +575,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 143; + this.state = 43; localctx.right = this.expr(7); break; @@ -1543,11 +583,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 144; + this.state = 44; if (!( this.precpred(this._ctx, 5))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); } - this.state = 145; + this.state = 45; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.OR || _la===PqlParser.K_OR)) { @@ -1557,15 +597,15 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 146; + this.state = 46; localctx.right = this.expr(6); break; } } - this.state = 151; + this.state = 51; this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,15,this._ctx); + _alt = this._interp.adaptivePredict(this._input,2,this._ctx); } } catch( error) { @@ -1583,6 +623,201 @@ PqlParser.prototype.expr = function(_p) { }; +function FnContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_fn; + this.function_name = null; // IdentifierMultipartContext + this.arguments = null; // ExprListContext + return this; +} + +FnContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +FnContext.prototype.constructor = FnContext; + +FnContext.prototype.OPEN_PAREN = function() { + return this.getToken(PqlParser.OPEN_PAREN, 0); +}; + +FnContext.prototype.CLOSE_PAREN = function() { + return this.getToken(PqlParser.CLOSE_PAREN, 0); +}; + +FnContext.prototype.identifierMultipart = function() { + return this.getTypedRuleContext(IdentifierMultipartContext,0); +}; + +FnContext.prototype.exprList = function() { + return this.getTypedRuleContext(ExprListContext,0); +}; + +FnContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterFn(this); + } +}; + +FnContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitFn(this); + } +}; + +FnContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitFn(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.FnContext = FnContext; + +PqlParser.prototype.fn = function() { + + var localctx = new FnContext(this, this._ctx, this.state); + this.enterRule(localctx, 4, PqlParser.RULE_fn); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 52; + localctx.function_name = this.identifierMultipart(); + this.state = 53; + this.match(PqlParser.OPEN_PAREN); + this.state = 55; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(((((_la - 19)) & ~0x1f) == 0 && ((1 << (_la - 19)) & ((1 << (PqlParser.MINUS - 19)) | (1 << (PqlParser.OPEN_PAREN - 19)) | (1 << (PqlParser.PLUS - 19)) | (1 << (PqlParser.QUESTION_MARK - 19)) | (1 << (PqlParser.K_FALSE - 19)) | (1 << (PqlParser.K_NOT - 19)) | (1 << (PqlParser.K_NULL - 19)) | (1 << (PqlParser.K_TRUE - 19)) | (1 << (PqlParser.NUMERIC_LITERAL - 19)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 19)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 19)) | (1 << (PqlParser.WORD - 19)))) !== 0)) { + this.state = 54; + localctx.arguments = this.exprList(); + } + + this.state = 57; + this.match(PqlParser.CLOSE_PAREN); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + +function ExprListContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_exprList; + return this; +} + +ExprListContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +ExprListContext.prototype.constructor = ExprListContext; + +ExprListContext.prototype.expr = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(ExprContext); + } else { + return this.getTypedRuleContext(ExprContext,i); + } +}; + +ExprListContext.prototype.COMMA = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.COMMA); + } else { + return this.getToken(PqlParser.COMMA, i); + } +}; + + +ExprListContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterExprList(this); + } +}; + +ExprListContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitExprList(this); + } +}; + +ExprListContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitExprList(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.ExprListContext = ExprListContext; + +PqlParser.prototype.exprList = function() { + + var localctx = new ExprListContext(this, this._ctx, this.state); + this.enterRule(localctx, 6, PqlParser.RULE_exprList); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 59; + this.expr(0); + this.state = 64; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===PqlParser.COMMA) { + this.state = 60; + this.match(PqlParser.COMMA); + this.state = 61; + this.expr(0); + this.state = 66; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + function TaxonContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; @@ -1593,6 +828,7 @@ function TaxonContext(parser, parent, invokingState) { antlr4.ParserRuleContext.call(this, parent, invokingState); this.parser = parser; this.ruleIndex = PqlParser.RULE_taxon; + this.is_optional = null; // Token this.namespace = null; // IdentifierMultipartContext this.slug = null; // IdentifierMultipartContext this.tag = null; // IdentifierMultipartContext @@ -1613,16 +849,16 @@ TaxonContext.prototype.identifierMultipart = function(i) { } }; -TaxonContext.prototype.TAXON_OPTIONAL_OPERATOR = function() { - return this.getToken(PqlParser.TAXON_OPTIONAL_OPERATOR, 0); -}; - TaxonContext.prototype.PIPE = function() { return this.getToken(PqlParser.PIPE, 0); }; -TaxonContext.prototype.TAXON_TAG_DELIMITER = function() { - return this.getToken(PqlParser.TAXON_TAG_DELIMITER, 0); +TaxonContext.prototype.COLON = function() { + return this.getToken(PqlParser.COLON, 0); +}; + +TaxonContext.prototype.QUESTION_MARK = function() { + return this.getToken(PqlParser.QUESTION_MARK, 0); }; TaxonContext.prototype.enterRule = function(listener) { @@ -1653,37 +889,37 @@ PqlParser.TaxonContext = TaxonContext; PqlParser.prototype.taxon = function() { var localctx = new TaxonContext(this, this._ctx, this.state); - this.enterRule(localctx, 22, PqlParser.RULE_taxon); + this.enterRule(localctx, 8, PqlParser.RULE_taxon); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 153; + this.state = 68; this._errHandler.sync(this); _la = this._input.LA(1); - if(_la===PqlParser.TAXON_OPTIONAL_OPERATOR) { - this.state = 152; - this.match(PqlParser.TAXON_OPTIONAL_OPERATOR); + if(_la===PqlParser.QUESTION_MARK) { + this.state = 67; + localctx.is_optional = this.match(PqlParser.QUESTION_MARK); } - this.state = 158; + this.state = 73; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,17,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,6,this._ctx); if(la_===1) { - this.state = 155; + this.state = 70; localctx.namespace = this.identifierMultipart(); - this.state = 156; + this.state = 71; this.match(PqlParser.PIPE); } - this.state = 160; + this.state = 75; localctx.slug = this.identifierMultipart(); - this.state = 163; + this.state = 78; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,18,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,7,this._ctx); if(la_===1) { - this.state = 161; - this.match(PqlParser.TAXON_TAG_DELIMITER); - this.state = 162; + this.state = 76; + this.match(PqlParser.COLON); + this.state = 77; localctx.tag = this.identifierMultipart(); } @@ -1770,24 +1006,24 @@ PqlParser.IdentifierMultipartContext = IdentifierMultipartContext; PqlParser.prototype.identifierMultipart = function() { var localctx = new IdentifierMultipartContext(this, this._ctx, this.state); - this.enterRule(localctx, 24, PqlParser.RULE_identifierMultipart); + this.enterRule(localctx, 10, PqlParser.RULE_identifierMultipart); try { this.enterOuterAlt(localctx, 1); - this.state = 165; + this.state = 80; this.match(PqlParser.WORD); - this.state = 170; + this.state = 85; this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,19,this._ctx) + var _alt = this._interp.adaptivePredict(this._input,8,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { if(_alt===1) { - this.state = 166; + this.state = 81; this.match(PqlParser.DOT); - this.state = 167; + this.state = 82; this.match(PqlParser.WORD); } - this.state = 172; + this.state = 87; this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,19,this._ctx); + _alt = this._interp.adaptivePredict(this._input,8,this._ctx); } } catch (re) { @@ -1873,13 +1109,13 @@ PqlParser.LiteralValueContext = LiteralValueContext; PqlParser.prototype.literalValue = function() { var localctx = new LiteralValueContext(this, this._ctx, this.state); - this.enterRule(localctx, 26, PqlParser.RULE_literalValue); + this.enterRule(localctx, 12, PqlParser.RULE_literalValue); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 173; + this.state = 88; _la = this._input.LA(1); - if(!(((((_la - 33)) & ~0x1f) == 0 && ((1 << (_la - 33)) & ((1 << (PqlParser.K_FALSE - 33)) | (1 << (PqlParser.K_NULL - 33)) | (1 << (PqlParser.K_TRUE - 33)) | (1 << (PqlParser.NUMERIC_LITERAL - 33)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 33)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 33)))) !== 0))) { + if(!(((((_la - 30)) & ~0x1f) == 0 && ((1 << (_la - 30)) & ((1 << (PqlParser.K_FALSE - 30)) | (1 << (PqlParser.K_NULL - 30)) | (1 << (PqlParser.K_TRUE - 30)) | (1 << (PqlParser.NUMERIC_LITERAL - 30)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 30)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 30)))) !== 0))) { this._errHandler.recoverInline(this); } else { @@ -1903,7 +1139,7 @@ PqlParser.prototype.literalValue = function() { PqlParser.prototype.sempred = function(localctx, ruleIndex, predIndex) { switch(ruleIndex) { - case 10: + case 1: return this.expr_sempred(localctx, predIndex); default: throw "No predicate with index:" + ruleIndex; diff --git a/js-temp/PqlParserListener.js b/js-temp/PqlParserListener.js index bd2ae90..a1dff0a 100644 --- a/js-temp/PqlParserListener.js +++ b/js-temp/PqlParserListener.js @@ -20,93 +20,30 @@ PqlParserListener.prototype.exitParseTel = function(ctx) { }; -// Enter a parse tree produced by PqlParser#parsePql. -PqlParserListener.prototype.enterParsePql = function(ctx) { -}; - -// Exit a parse tree produced by PqlParser#parsePql. -PqlParserListener.prototype.exitParsePql = function(ctx) { -}; - - -// Enter a parse tree produced by PqlParser#sqlStmtList. -PqlParserListener.prototype.enterSqlStmtList = function(ctx) { -}; - -// Exit a parse tree produced by PqlParser#sqlStmtList. -PqlParserListener.prototype.exitSqlStmtList = function(ctx) { -}; - - -// Enter a parse tree produced by PqlParser#sqlStmt. -PqlParserListener.prototype.enterSqlStmt = function(ctx) { -}; - -// Exit a parse tree produced by PqlParser#sqlStmt. -PqlParserListener.prototype.exitSqlStmt = function(ctx) { -}; - - -// Enter a parse tree produced by PqlParser#selectStmt. -PqlParserListener.prototype.enterSelectStmt = function(ctx) { -}; - -// Exit a parse tree produced by PqlParser#selectStmt. -PqlParserListener.prototype.exitSelectStmt = function(ctx) { -}; - - -// Enter a parse tree produced by PqlParser#columns. -PqlParserListener.prototype.enterColumns = function(ctx) { -}; - -// Exit a parse tree produced by PqlParser#columns. -PqlParserListener.prototype.exitColumns = function(ctx) { -}; - - -// Enter a parse tree produced by PqlParser#whereClause. -PqlParserListener.prototype.enterWhereClause = function(ctx) { -}; - -// Exit a parse tree produced by PqlParser#whereClause. -PqlParserListener.prototype.exitWhereClause = function(ctx) { -}; - - -// Enter a parse tree produced by PqlParser#orderByClause. -PqlParserListener.prototype.enterOrderByClause = function(ctx) { -}; - -// Exit a parse tree produced by PqlParser#orderByClause. -PqlParserListener.prototype.exitOrderByClause = function(ctx) { -}; - - -// Enter a parse tree produced by PqlParser#orderExpr. -PqlParserListener.prototype.enterOrderExpr = function(ctx) { +// Enter a parse tree produced by PqlParser#expr. +PqlParserListener.prototype.enterExpr = function(ctx) { }; -// Exit a parse tree produced by PqlParser#orderExpr. -PqlParserListener.prototype.exitOrderExpr = function(ctx) { +// Exit a parse tree produced by PqlParser#expr. +PqlParserListener.prototype.exitExpr = function(ctx) { }; -// Enter a parse tree produced by PqlParser#limitClause. -PqlParserListener.prototype.enterLimitClause = function(ctx) { +// Enter a parse tree produced by PqlParser#fn. +PqlParserListener.prototype.enterFn = function(ctx) { }; -// Exit a parse tree produced by PqlParser#limitClause. -PqlParserListener.prototype.exitLimitClause = function(ctx) { +// Exit a parse tree produced by PqlParser#fn. +PqlParserListener.prototype.exitFn = function(ctx) { }; -// Enter a parse tree produced by PqlParser#expr. -PqlParserListener.prototype.enterExpr = function(ctx) { +// Enter a parse tree produced by PqlParser#exprList. +PqlParserListener.prototype.enterExprList = function(ctx) { }; -// Exit a parse tree produced by PqlParser#expr. -PqlParserListener.prototype.exitExpr = function(ctx) { +// Exit a parse tree produced by PqlParser#exprList. +PqlParserListener.prototype.exitExprList = function(ctx) { }; diff --git a/js-temp/PqlParserVisitor.js b/js-temp/PqlParserVisitor.js index 6970d4e..e1e457a 100644 --- a/js-temp/PqlParserVisitor.js +++ b/js-temp/PqlParserVisitor.js @@ -18,62 +18,20 @@ PqlParserVisitor.prototype.visitParseTel = function(ctx) { }; -// Visit a parse tree produced by PqlParser#parsePql. -PqlParserVisitor.prototype.visitParsePql = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by PqlParser#sqlStmtList. -PqlParserVisitor.prototype.visitSqlStmtList = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by PqlParser#sqlStmt. -PqlParserVisitor.prototype.visitSqlStmt = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by PqlParser#selectStmt. -PqlParserVisitor.prototype.visitSelectStmt = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by PqlParser#columns. -PqlParserVisitor.prototype.visitColumns = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by PqlParser#whereClause. -PqlParserVisitor.prototype.visitWhereClause = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by PqlParser#orderByClause. -PqlParserVisitor.prototype.visitOrderByClause = function(ctx) { - return this.visitChildren(ctx); -}; - - -// Visit a parse tree produced by PqlParser#orderExpr. -PqlParserVisitor.prototype.visitOrderExpr = function(ctx) { +// Visit a parse tree produced by PqlParser#expr. +PqlParserVisitor.prototype.visitExpr = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by PqlParser#limitClause. -PqlParserVisitor.prototype.visitLimitClause = function(ctx) { +// Visit a parse tree produced by PqlParser#fn. +PqlParserVisitor.prototype.visitFn = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by PqlParser#expr. -PqlParserVisitor.prototype.visitExpr = function(ctx) { +// Visit a parse tree produced by PqlParser#exprList. +PqlParserVisitor.prototype.visitExprList = function(ctx) { return this.visitChildren(ctx); }; diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index 265cf63..2da7ba1 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -78,14 +78,14 @@ class PqlParser ( Parser ): RULE_parseTel = 0 RULE_expr = 1 - RULE_function = 2 + RULE_fn = 2 RULE_exprList = 3 RULE_taxon = 4 RULE_identifierMultipart = 5 RULE_literalValue = 6 - ruleNames = [ "parseTel", "expr", "function", "exprList", "taxon", - "identifierMultipart", "literalValue" ] + ruleNames = [ "parseTel", "expr", "fn", "exprList", "taxon", "identifierMultipart", + "literalValue" ] EOF = Token.EOF AND=1 @@ -236,8 +236,8 @@ def literalValue(self): return self.getTypedRuleContext(PqlParser.LiteralValueContext,0) - def function(self): - return self.getTypedRuleContext(PqlParser.FunctionContext,0) + def fn(self): + return self.getTypedRuleContext(PqlParser.FnContext,0) def taxon(self): @@ -353,7 +353,7 @@ def expr(self, _p:int=0): elif la_ == 4: self.state = 25 - self.function() + self.fn() pass elif la_ == 5: @@ -508,7 +508,7 @@ def expr(self, _p:int=0): return localctx - class FunctionContext(ParserRuleContext): + class FnContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) @@ -531,29 +531,29 @@ def exprList(self): def getRuleIndex(self): - return PqlParser.RULE_function + return PqlParser.RULE_fn def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterFunction" ): - listener.enterFunction(self) + if hasattr( listener, "enterFn" ): + listener.enterFn(self) def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitFunction" ): - listener.exitFunction(self) + if hasattr( listener, "exitFn" ): + listener.exitFn(self) def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitFunction" ): - return visitor.visitFunction(self) + if hasattr( visitor, "visitFn" ): + return visitor.visitFn(self) else: return visitor.visitChildren(self) - def function(self): + def fn(self): - localctx = PqlParser.FunctionContext(self, self._ctx, self.state) - self.enterRule(localctx, 4, self.RULE_function) + localctx = PqlParser.FnContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_fn) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py index 597effb..4f57470 100644 --- a/python/src/pql_grammar/antlr/PqlParserListener.py +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -26,12 +26,12 @@ def exitExpr(self, ctx:PqlParser.ExprContext): pass - # Enter a parse tree produced by PqlParser#function. - def enterFunction(self, ctx:PqlParser.FunctionContext): + # Enter a parse tree produced by PqlParser#fn. + def enterFn(self, ctx:PqlParser.FnContext): pass - # Exit a parse tree produced by PqlParser#function. - def exitFunction(self, ctx:PqlParser.FunctionContext): + # Exit a parse tree produced by PqlParser#fn. + def exitFn(self, ctx:PqlParser.FnContext): pass diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py index 12aad10..4caf92d 100644 --- a/python/src/pql_grammar/antlr/PqlParserVisitor.py +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -19,8 +19,8 @@ def visitExpr(self, ctx:PqlParser.ExprContext): return self.visitChildren(ctx) - # Visit a parse tree produced by PqlParser#function. - def visitFunction(self, ctx:PqlParser.FunctionContext): + # Visit a parse tree produced by PqlParser#fn. + def visitFn(self, ctx:PqlParser.FnContext): return self.visitChildren(ctx) diff --git a/python/src/pql_grammar/from_pql.py b/python/src/pql_grammar/from_pql.py index 721e434..5e9a98a 100644 --- a/python/src/pql_grammar/from_pql.py +++ b/python/src/pql_grammar/from_pql.py @@ -104,7 +104,7 @@ def parse_function_argument_pair(cls, e: PqlParser.ExprContext) -> Tuple[Optiona return arg_name, arg_value @classmethod - def parse_function(cls, e: PqlParser.FunctionContext) -> ast.Function: + def parse_function(cls, e: PqlParser.FnContext) -> ast.Function: return ast.Function( full_text(e.function_name), tuple([ @@ -190,7 +190,7 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : if v: return cls.parse_taxon(v) - v: PqlParser.FunctionContext = ctx.function() + v: PqlParser.FunctionContext = ctx.fn() if v: return cls.parse_function(v) From cac4984fb18c8a84b4bd9690a501027a4ad1d76f Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Fri, 4 Dec 2020 18:18:25 -0800 Subject: [PATCH 25/32] boilerplate fixup --- Makefile | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 493b108..48c7d08 100644 --- a/Makefile +++ b/Makefile @@ -100,4 +100,4 @@ build-code-js: grammar/PqlLexer.tokens grammar/PqlParser.g4 # image-antlr build-code: build-code-python build-code-js -.PHONY: image.antlr build-code-python build-code-js build-code +.PHONY: image-antlr build-code-python build-code-js build-code diff --git a/README.md b/README.md index acd5214..829a614 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To release a new version of the library, follow these steps: ## Introduction This repository contains formal definition of grammar for TEL written in [ANTLR v4](https://github.com/antlr/antlr4). -It can generate following components in both python and JavaScript to handle parsing string expressions: +It can generate following components in Python, JavaScript to handle parsing string expressions: - *lexer* - splits string expression into tokens - *parser* - connects tokens into parse tree (similar to AST) From cadc42ed8decb097e0c85744ba4bb105ab029a5c Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Fri, 4 Dec 2020 19:55:24 -0800 Subject: [PATCH 26/32] add .raw_value property to ast.Taxon to standardize taxon value expression --- python/src/pql_grammar/model.py | 9 +++++++++ python/tests/model_test.py | 36 +++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 python/tests/model_test.py diff --git a/python/src/pql_grammar/model.py b/python/src/pql_grammar/model.py index 41aa9c2..6d22612 100644 --- a/python/src/pql_grammar/model.py +++ b/python/src/pql_grammar/model.py @@ -46,6 +46,15 @@ class Taxon(Node): is_optional: Optional[bool] = False tag: Optional[str] = None + @property + def raw_value(self): + is_optional = '?' if self.is_optional else '' + namespace = self.namespace + '|' if self.namespace else '' + slug = self.slug + tag = ':' + self.tag if self.tag else '' + return f'{is_optional}{namespace}{slug}{tag}' + + CallArgs = Tuple[Optional[str],Any] diff --git a/python/tests/model_test.py b/python/tests/model_test.py new file mode 100644 index 0000000..051f29c --- /dev/null +++ b/python/tests/model_test.py @@ -0,0 +1,36 @@ +# fmt: off + +import sys +sys.path.append('./src') + +import pytest + +from pql_grammar import model as ast + + +inputs = ( + ast.Taxon('slug'), + ast.Taxon('slug', 'ns'), + ast.Taxon('slug', 'ns', is_optional=True), + ast.Taxon('slug', None, is_optional=True), + ast.Taxon('slug', 'ns', is_optional=True, tag='Tagggg'), + ast.Taxon('slug', None, is_optional=True, tag='Tagggg'), + ast.Taxon('slug', 'ns', tag='Tagggg'), + ast.Taxon('slug', None, tag='Tagggg'), +) + +outputs = ( + 'slug', + 'ns|slug', + '?ns|slug', + '?slug', + '?ns|slug:Tagggg', + '?slug:Tagggg', + 'ns|slug:Tagggg', + 'slug:Tagggg', +) + + +@pytest.mark.parametrize('input, output', zip(inputs, outputs)) +def test_taxon_raw_value(input: ast.Taxon, output: str): + assert input.raw_value == output From 9a38f235e582b54f95e7864a93392b7dffaf1041 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Fri, 4 Dec 2020 20:34:21 -0800 Subject: [PATCH 27/32] enable LIKE, BETWEEN and IN expression operators --- grammar/PqlLexer.g4 | 2 + grammar/PqlParser.g4 | 7 +- js-temp/PqlLexer.js | 628 +++++++++++----------- js-temp/PqlParser.js | 403 +++++++++----- python/src/pql_grammar/antlr/PqlLexer.py | 425 ++++++++------- python/src/pql_grammar/antlr/PqlParser.py | 351 +++++++----- python/src/pql_grammar/from_pql.py | 132 ++++- python/src/pql_grammar/to_pql.py | 41 +- python/tests/ast_pql_test.py | 154 +++++- python/tests/tel_grammar_test.py | 4 +- 10 files changed, 1341 insertions(+), 806 deletions(-) diff --git a/grammar/PqlLexer.g4 b/grammar/PqlLexer.g4 index 1da2e99..9a0b6c2 100644 --- a/grammar/PqlLexer.g4 +++ b/grammar/PqlLexer.g4 @@ -34,7 +34,9 @@ UNDER: '_'; // SQL keywords we adapt: K_AND : A N D; +K_BETWEEN : B E T W E E N; K_FALSE : F A L S E; +K_IN : I N; K_IS : I S; K_ISNULL : I S N U L L; K_LIKE : L I K E; diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index 5526f28..e1b09bd 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -24,10 +24,13 @@ expr | left=expr operator=( PLUS | MINUS ) right=expr | left=expr operator=( LT | LT_EQ | GT | GT_EQ ) right=expr | left=expr operator=( ASSIGN | EQ | NOT_EQ1 | NOT_EQ2 | K_IS ) right=expr -// | left=expr is_negated=K_NOT? operator=( K_LIKE | K_BETWEEN ) right=expr -// | left=expr is_negated=K_NOT? operator=K_IN '(' exprList? ')' + | left=expr is_negated=K_NOT? operator=K_LIKE right=expr + | left=expr is_negated=K_NOT? operator=K_IN OPEN_PAREN right_list=exprList CLOSE_PAREN | left=expr operator=( K_AND | AND ) right=expr | left=expr operator=( K_OR | OR ) right=expr + // BETWEEN must come after AND or risk being parsed before it + // resulting in `a BETWEEN b` where `AND c` fragment is outside of BETWEEN expression + | left=expr is_negated=K_NOT? operator=K_BETWEEN right=expr | OPEN_PAREN inner=expr CLOSE_PAREN | literalValue | fn diff --git a/js-temp/PqlLexer.js b/js-temp/PqlLexer.js index ff6a637..26a3460 100644 --- a/js-temp/PqlLexer.js +++ b/js-temp/PqlLexer.js @@ -5,7 +5,7 @@ var antlr4 = require('antlr4/index'); var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u00023\u01ca\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", + "\u00025\u01d9\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", "\u0004\t\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t", "\u0007\u0004\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004", "\f\t\f\u0004\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0004\u0010", @@ -20,274 +20,284 @@ var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", "8\t8\u00049\t9\u0004:\t:\u0004;\t;\u0004<\t<\u0004=\t=\u0004>\t>\u0004", "?\t?\u0004@\t@\u0004A\tA\u0004B\tB\u0004C\tC\u0004D\tD\u0004E\tE\u0004", "F\tF\u0004G\tG\u0004H\tH\u0004I\tI\u0004J\tJ\u0004K\tK\u0004L\tL\u0004", - "M\tM\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003", - "\u0003\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0005\u0003\u0005\u0003", - "\u0005\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0007\u0003\u0007\u0003", - "\u0007\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0003\t\u0003\n\u0003", - "\n\u0003\n\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003\r\u0003\r\u0003", - "\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010\u0003\u0010\u0003", - "\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003\u0013\u0003\u0013\u0003", - "\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016\u0003\u0016\u0003", - "\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0019\u0003\u0019\u0003", - "\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c\u0003\u001c\u0003", - "\u001d\u0003\u001d\u0003\u001e\u0003\u001e\u0003\u001e\u0003\u001e\u0003", - "\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003", - " \u0003 \u0003 \u0003!\u0003!\u0003!\u0003!\u0003!\u0003!\u0003!\u0003", - "\"\u0003\"\u0003\"\u0003\"\u0003\"\u0003#\u0003#\u0003#\u0003#\u0003", - "$\u0003$\u0003$\u0003$\u0003$\u0003$\u0003$\u0003$\u0003%\u0003%\u0003", - "%\u0003%\u0003%\u0003&\u0003&\u0003&\u0003\'\u0003\'\u0003\'\u0003\'", - "\u0003\'\u0003(\u0006(\u0110\n(\r(\u000e(\u0111\u0003(\u0003(\u0007", - "(\u0116\n(\f(\u000e(\u0119\u000b(\u0005(\u011b\n(\u0003(\u0003(\u0005", - "(\u011f\n(\u0003(\u0006(\u0122\n(\r(\u000e(\u0123\u0005(\u0126\n(\u0003", - "(\u0003(\u0006(\u012a\n(\r(\u000e(\u012b\u0003(\u0003(\u0005(\u0130", - "\n(\u0003(\u0006(\u0133\n(\r(\u000e(\u0134\u0005(\u0137\n(\u0005(\u0139", - "\n(\u0003)\u0003)\u0003*\u0003*\u0003*\u0003*\u0007*\u0141\n*\f*\u000e", - "*\u0144\u000b*\u0003*\u0003*\u0003+\u0003+\u0003+\u0003+\u0007+\u014c", - "\n+\f+\u000e+\u014f\u000b+\u0003+\u0003+\u0003,\u0003,\u0003-\u0003", - "-\u0003-\u0003-\u0007-\u0159\n-\f-\u000e-\u015c\u000b-\u0003-\u0003", - "-\u0003.\u0003.\u0003.\u0003.\u0007.\u0164\n.\f.\u000e.\u0167\u000b", - ".\u0003.\u0003.\u0003/\u0003/\u0003/\u0003/\u0003/\u0005/\u0170\n/\u0003", - "/\u0007/\u0173\n/\f/\u000e/\u0176\u000b/\u0003/\u0003/\u00030\u0003", - "0\u00030\u00030\u00070\u017e\n0\f0\u000e0\u0181\u000b0\u00030\u0003", - "0\u00030\u00050\u0186\n0\u00030\u00030\u00031\u00031\u00031\u00031\u0003", - "2\u00032\u00072\u0190\n2\f2\u000e2\u0193\u000b2\u00033\u00033\u0003", - "4\u00034\u00035\u00035\u00036\u00036\u00037\u00037\u00038\u00038\u0003", - "9\u00039\u0003:\u0003:\u0003;\u0003;\u0003<\u0003<\u0003=\u0003=\u0003", - ">\u0003>\u0003?\u0003?\u0003@\u0003@\u0003A\u0003A\u0003B\u0003B\u0003", - "C\u0003C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003F\u0003G\u0003G\u0003", - "H\u0003H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003K\u0003L\u0003L\u0003", - "M\u0003M\u0003\u017f\u0002N\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006", - "\u000b\u0007\r\b\u000f\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e", - "\u001b\u000f\u001d\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016", - "+\u0017-\u0018/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?!", - "A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]0_1a2c3e\u0002g\u0002i\u0002k\u0002m", - "\u0002o\u0002q\u0002s\u0002u\u0002w\u0002y\u0002{\u0002}\u0002\u007f", - "\u0002\u0081\u0002\u0083\u0002\u0085\u0002\u0087\u0002\u0089\u0002\u008b", - "\u0002\u008d\u0002\u008f\u0002\u0091\u0002\u0093\u0002\u0095\u0002\u0097", - "\u0002\u0099\u0002\u0003\u0002$\u0004\u0002--//\u0003\u0002$$\u0003", - "\u0002))\u0004\u0002\f\f\u000f\u000f\u0005\u0002\u000b\r\u000f\u000f", - "\"\"\u0005\u0002C\\aac|\u0006\u00022;C\\aac|\u0003\u00022;\u0004\u0002", - "CCcc\u0004\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002GGg", - "g\u0004\u0002HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004", - "\u0002LLll\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002", - "PPpp\u0004\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002TTt", - "t\u0004\u0002UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004", - "\u0002YYyy\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||\u0002\u01c7", - "\u0002\u0003\u0003\u0002\u0002\u0002\u0002\u0005\u0003\u0002\u0002\u0002", - "\u0002\u0007\u0003\u0002\u0002\u0002\u0002\t\u0003\u0002\u0002\u0002", - "\u0002\u000b\u0003\u0002\u0002\u0002\u0002\r\u0003\u0002\u0002\u0002", - "\u0002\u000f\u0003\u0002\u0002\u0002\u0002\u0011\u0003\u0002\u0002\u0002", - "\u0002\u0013\u0003\u0002\u0002\u0002\u0002\u0015\u0003\u0002\u0002\u0002", - "\u0002\u0017\u0003\u0002\u0002\u0002\u0002\u0019\u0003\u0002\u0002\u0002", - "\u0002\u001b\u0003\u0002\u0002\u0002\u0002\u001d\u0003\u0002\u0002\u0002", - "\u0002\u001f\u0003\u0002\u0002\u0002\u0002!\u0003\u0002\u0002\u0002", - "\u0002#\u0003\u0002\u0002\u0002\u0002%\u0003\u0002\u0002\u0002\u0002", - "\'\u0003\u0002\u0002\u0002\u0002)\u0003\u0002\u0002\u0002\u0002+\u0003", - "\u0002\u0002\u0002\u0002-\u0003\u0002\u0002\u0002\u0002/\u0003\u0002", - "\u0002\u0002\u00021\u0003\u0002\u0002\u0002\u00023\u0003\u0002\u0002", - "\u0002\u00025\u0003\u0002\u0002\u0002\u00027\u0003\u0002\u0002\u0002", - "\u00029\u0003\u0002\u0002\u0002\u0002;\u0003\u0002\u0002\u0002\u0002", - "=\u0003\u0002\u0002\u0002\u0002?\u0003\u0002\u0002\u0002\u0002A\u0003", - "\u0002\u0002\u0002\u0002C\u0003\u0002\u0002\u0002\u0002E\u0003\u0002", - "\u0002\u0002\u0002G\u0003\u0002\u0002\u0002\u0002I\u0003\u0002\u0002", - "\u0002\u0002K\u0003\u0002\u0002\u0002\u0002M\u0003\u0002\u0002\u0002", - "\u0002O\u0003\u0002\u0002\u0002\u0002Q\u0003\u0002\u0002\u0002\u0002", - "S\u0003\u0002\u0002\u0002\u0002U\u0003\u0002\u0002\u0002\u0002W\u0003", - "\u0002\u0002\u0002\u0002Y\u0003\u0002\u0002\u0002\u0002[\u0003\u0002", - "\u0002\u0002\u0002]\u0003\u0002\u0002\u0002\u0002_\u0003\u0002\u0002", - "\u0002\u0002a\u0003\u0002\u0002\u0002\u0002c\u0003\u0002\u0002\u0002", - "\u0003\u009b\u0003\u0002\u0002\u0002\u0005\u009e\u0003\u0002\u0002\u0002", - "\u0007\u00a1\u0003\u0002\u0002\u0002\t\u00a4\u0003\u0002\u0002\u0002", - "\u000b\u00a7\u0003\u0002\u0002\u0002\r\u00aa\u0003\u0002\u0002\u0002", - "\u000f\u00ad\u0003\u0002\u0002\u0002\u0011\u00b0\u0003\u0002\u0002\u0002", - "\u0013\u00b3\u0003\u0002\u0002\u0002\u0015\u00b6\u0003\u0002\u0002\u0002", - "\u0017\u00b8\u0003\u0002\u0002\u0002\u0019\u00ba\u0003\u0002\u0002\u0002", - "\u001b\u00bc\u0003\u0002\u0002\u0002\u001d\u00be\u0003\u0002\u0002\u0002", - "\u001f\u00c0\u0003\u0002\u0002\u0002!\u00c2\u0003\u0002\u0002\u0002", - "#\u00c4\u0003\u0002\u0002\u0002%\u00c6\u0003\u0002\u0002\u0002\'\u00c8", - "\u0003\u0002\u0002\u0002)\u00ca\u0003\u0002\u0002\u0002+\u00cc\u0003", - "\u0002\u0002\u0002-\u00ce\u0003\u0002\u0002\u0002/\u00d0\u0003\u0002", - "\u0002\u00021\u00d2\u0003\u0002\u0002\u00023\u00d4\u0003\u0002\u0002", - "\u00025\u00d6\u0003\u0002\u0002\u00027\u00d8\u0003\u0002\u0002\u0002", - "9\u00da\u0003\u0002\u0002\u0002;\u00dc\u0003\u0002\u0002\u0002=\u00e0", - "\u0003\u0002\u0002\u0002?\u00e6\u0003\u0002\u0002\u0002A\u00e9\u0003", - "\u0002\u0002\u0002C\u00f0\u0003\u0002\u0002\u0002E\u00f5\u0003\u0002", - "\u0002\u0002G\u00f9\u0003\u0002\u0002\u0002I\u0101\u0003\u0002\u0002", - "\u0002K\u0106\u0003\u0002\u0002\u0002M\u0109\u0003\u0002\u0002\u0002", - "O\u0138\u0003\u0002\u0002\u0002Q\u013a\u0003\u0002\u0002\u0002S\u013c", - "\u0003\u0002\u0002\u0002U\u0147\u0003\u0002\u0002\u0002W\u0152\u0003", - "\u0002\u0002\u0002Y\u0154\u0003\u0002\u0002\u0002[\u015f\u0003\u0002", - "\u0002\u0002]\u016f\u0003\u0002\u0002\u0002_\u0179\u0003\u0002\u0002", - "\u0002a\u0189\u0003\u0002\u0002\u0002c\u018d\u0003\u0002\u0002\u0002", - "e\u0194\u0003\u0002\u0002\u0002g\u0196\u0003\u0002\u0002\u0002i\u0198", - "\u0003\u0002\u0002\u0002k\u019a\u0003\u0002\u0002\u0002m\u019c\u0003", - "\u0002\u0002\u0002o\u019e\u0003\u0002\u0002\u0002q\u01a0\u0003\u0002", - "\u0002\u0002s\u01a2\u0003\u0002\u0002\u0002u\u01a4\u0003\u0002\u0002", - "\u0002w\u01a6\u0003\u0002\u0002\u0002y\u01a8\u0003\u0002\u0002\u0002", - "{\u01aa\u0003\u0002\u0002\u0002}\u01ac\u0003\u0002\u0002\u0002\u007f", - "\u01ae\u0003\u0002\u0002\u0002\u0081\u01b0\u0003\u0002\u0002\u0002\u0083", - "\u01b2\u0003\u0002\u0002\u0002\u0085\u01b4\u0003\u0002\u0002\u0002\u0087", - "\u01b6\u0003\u0002\u0002\u0002\u0089\u01b8\u0003\u0002\u0002\u0002\u008b", - "\u01ba\u0003\u0002\u0002\u0002\u008d\u01bc\u0003\u0002\u0002\u0002\u008f", - "\u01be\u0003\u0002\u0002\u0002\u0091\u01c0\u0003\u0002\u0002\u0002\u0093", - "\u01c2\u0003\u0002\u0002\u0002\u0095\u01c4\u0003\u0002\u0002\u0002\u0097", - "\u01c6\u0003\u0002\u0002\u0002\u0099\u01c8\u0003\u0002\u0002\u0002\u009b", - "\u009c\u0007(\u0002\u0002\u009c\u009d\u0007(\u0002\u0002\u009d\u0004", - "\u0003\u0002\u0002\u0002\u009e\u009f\u0007?\u0002\u0002\u009f\u00a0", - "\u0007?\u0002\u0002\u00a0\u0006\u0003\u0002\u0002\u0002\u00a1\u00a2", - "\u0007@\u0002\u0002\u00a2\u00a3\u0007?\u0002\u0002\u00a3\b\u0003\u0002", - "\u0002\u0002\u00a4\u00a5\u0007>\u0002\u0002\u00a5\u00a6\u0007?\u0002", - "\u0002\u00a6\n\u0003\u0002\u0002\u0002\u00a7\u00a8\u0007#\u0002\u0002", - "\u00a8\u00a9\u0007?\u0002\u0002\u00a9\f\u0003\u0002\u0002\u0002\u00aa", - "\u00ab\u0007>\u0002\u0002\u00ab\u00ac\u0007@\u0002\u0002\u00ac\u000e", - "\u0003\u0002\u0002\u0002\u00ad\u00ae\u0007~\u0002\u0002\u00ae\u00af", - "\u0007~\u0002\u0002\u00af\u0010\u0003\u0002\u0002\u0002\u00b0\u00b1", - "\u0007>\u0002\u0002\u00b1\u00b2\u0007>\u0002\u0002\u00b2\u0012\u0003", - "\u0002\u0002\u0002\u00b3\u00b4\u0007@\u0002\u0002\u00b4\u00b5\u0007", - "@\u0002\u0002\u00b5\u0014\u0003\u0002\u0002\u0002\u00b6\u00b7\u0007", - "(\u0002\u0002\u00b7\u0016\u0003\u0002\u0002\u0002\u00b8\u00b9\u0007", - "?\u0002\u0002\u00b9\u0018\u0003\u0002\u0002\u0002\u00ba\u00bb\u0007", - "+\u0002\u0002\u00bb\u001a\u0003\u0002\u0002\u0002\u00bc\u00bd\u0007", - "<\u0002\u0002\u00bd\u001c\u0003\u0002\u0002\u0002\u00be\u00bf\u0007", - ".\u0002\u0002\u00bf\u001e\u0003\u0002\u0002\u0002\u00c0\u00c1\u0007", - "0\u0002\u0002\u00c1 \u0003\u0002\u0002\u0002\u00c2\u00c3\u00071\u0002", - "\u0002\u00c3\"\u0003\u0002\u0002\u0002\u00c4\u00c5\u0007@\u0002\u0002", - "\u00c5$\u0003\u0002\u0002\u0002\u00c6\u00c7\u0007>\u0002\u0002\u00c7", - "&\u0003\u0002\u0002\u0002\u00c8\u00c9\u0007/\u0002\u0002\u00c9(\u0003", - "\u0002\u0002\u0002\u00ca\u00cb\u0007\'\u0002\u0002\u00cb*\u0003\u0002", - "\u0002\u0002\u00cc\u00cd\u0007*\u0002\u0002\u00cd,\u0003\u0002\u0002", - "\u0002\u00ce\u00cf\u0007~\u0002\u0002\u00cf.\u0003\u0002\u0002\u0002", - "\u00d0\u00d1\u0007-\u0002\u0002\u00d10\u0003\u0002\u0002\u0002\u00d2", - "\u00d3\u0007A\u0002\u0002\u00d32\u0003\u0002\u0002\u0002\u00d4\u00d5", - "\u0007=\u0002\u0002\u00d54\u0003\u0002\u0002\u0002\u00d6\u00d7\u0007", - ",\u0002\u0002\u00d76\u0003\u0002\u0002\u0002\u00d8\u00d9\u0007\u0080", - "\u0002\u0002\u00d98\u0003\u0002\u0002\u0002\u00da\u00db\u0007a\u0002", - "\u0002\u00db:\u0003\u0002\u0002\u0002\u00dc\u00dd\u0005g4\u0002\u00dd", - "\u00de\u0005\u0081A\u0002\u00de\u00df\u0005m7\u0002\u00df<\u0003\u0002", - "\u0002\u0002\u00e0\u00e1\u0005q9\u0002\u00e1\u00e2\u0005g4\u0002\u00e2", - "\u00e3\u0005}?\u0002\u00e3\u00e4\u0005\u008bF\u0002\u00e4\u00e5\u0005", - "o8\u0002\u00e5>\u0003\u0002\u0002\u0002\u00e6\u00e7\u0005w<\u0002\u00e7", - "\u00e8\u0005\u008bF\u0002\u00e8@\u0003\u0002\u0002\u0002\u00e9\u00ea", - "\u0005w<\u0002\u00ea\u00eb\u0005\u008bF\u0002\u00eb\u00ec\u0005\u0081", - "A\u0002\u00ec\u00ed\u0005\u008fH\u0002\u00ed\u00ee\u0005}?\u0002\u00ee", - "\u00ef\u0005}?\u0002\u00efB\u0003\u0002\u0002\u0002\u00f0\u00f1\u0005", - "}?\u0002\u00f1\u00f2\u0005w<\u0002\u00f2\u00f3\u0005{>\u0002\u00f3\u00f4", - "\u0005o8\u0002\u00f4D\u0003\u0002\u0002\u0002\u00f5\u00f6\u0005\u0081", - "A\u0002\u00f6\u00f7\u0005\u0083B\u0002\u00f7\u00f8\u0005\u008dG\u0002", - "\u00f8F\u0003\u0002\u0002\u0002\u00f9\u00fa\u0005\u0081A\u0002\u00fa", - "\u00fb\u0005\u0083B\u0002\u00fb\u00fc\u0005\u008dG\u0002\u00fc\u00fd", - "\u0005\u0081A\u0002\u00fd\u00fe\u0005\u008fH\u0002\u00fe\u00ff\u0005", - "}?\u0002\u00ff\u0100\u0005}?\u0002\u0100H\u0003\u0002\u0002\u0002\u0101", - "\u0102\u0005\u0081A\u0002\u0102\u0103\u0005\u008fH\u0002\u0103\u0104", - "\u0005}?\u0002\u0104\u0105\u0005}?\u0002\u0105J\u0003\u0002\u0002\u0002", - "\u0106\u0107\u0005\u0083B\u0002\u0107\u0108\u0005\u0089E\u0002\u0108", - "L\u0003\u0002\u0002\u0002\u0109\u010a\u0005\u008dG\u0002\u010a\u010b", - "\u0005\u0089E\u0002\u010b\u010c\u0005\u008fH\u0002\u010c\u010d\u0005", - "o8\u0002\u010dN\u0003\u0002\u0002\u0002\u010e\u0110\u0005e3\u0002\u010f", - "\u010e\u0003\u0002\u0002\u0002\u0110\u0111\u0003\u0002\u0002\u0002\u0111", - "\u010f\u0003\u0002\u0002\u0002\u0111\u0112\u0003\u0002\u0002\u0002\u0112", - "\u011a\u0003\u0002\u0002\u0002\u0113\u0117\u00070\u0002\u0002\u0114", - "\u0116\u0005e3\u0002\u0115\u0114\u0003\u0002\u0002\u0002\u0116\u0119", - "\u0003\u0002\u0002\u0002\u0117\u0115\u0003\u0002\u0002\u0002\u0117\u0118", - "\u0003\u0002\u0002\u0002\u0118\u011b\u0003\u0002\u0002\u0002\u0119\u0117", - "\u0003\u0002\u0002\u0002\u011a\u0113\u0003\u0002\u0002\u0002\u011a\u011b", - "\u0003\u0002\u0002\u0002\u011b\u0125\u0003\u0002\u0002\u0002\u011c\u011e", - "\u0005o8\u0002\u011d\u011f\t\u0002\u0002\u0002\u011e\u011d\u0003\u0002", - "\u0002\u0002\u011e\u011f\u0003\u0002\u0002\u0002\u011f\u0121\u0003\u0002", - "\u0002\u0002\u0120\u0122\u0005e3\u0002\u0121\u0120\u0003\u0002\u0002", - "\u0002\u0122\u0123\u0003\u0002\u0002\u0002\u0123\u0121\u0003\u0002\u0002", - "\u0002\u0123\u0124\u0003\u0002\u0002\u0002\u0124\u0126\u0003\u0002\u0002", - "\u0002\u0125\u011c\u0003\u0002\u0002\u0002\u0125\u0126\u0003\u0002\u0002", - "\u0002\u0126\u0139\u0003\u0002\u0002\u0002\u0127\u0129\u00070\u0002", - "\u0002\u0128\u012a\u0005e3\u0002\u0129\u0128\u0003\u0002\u0002\u0002", - "\u012a\u012b\u0003\u0002\u0002\u0002\u012b\u0129\u0003\u0002\u0002\u0002", - "\u012b\u012c\u0003\u0002\u0002\u0002\u012c\u0136\u0003\u0002\u0002\u0002", - "\u012d\u012f\u0005o8\u0002\u012e\u0130\t\u0002\u0002\u0002\u012f\u012e", - "\u0003\u0002\u0002\u0002\u012f\u0130\u0003\u0002\u0002\u0002\u0130\u0132", - "\u0003\u0002\u0002\u0002\u0131\u0133\u0005e3\u0002\u0132\u0131\u0003", - "\u0002\u0002\u0002\u0133\u0134\u0003\u0002\u0002\u0002\u0134\u0132\u0003", - "\u0002\u0002\u0002\u0134\u0135\u0003\u0002\u0002\u0002\u0135\u0137\u0003", - "\u0002\u0002\u0002\u0136\u012d\u0003\u0002\u0002\u0002\u0136\u0137\u0003", - "\u0002\u0002\u0002\u0137\u0139\u0003\u0002\u0002\u0002\u0138\u010f\u0003", - "\u0002\u0002\u0002\u0138\u0127\u0003\u0002\u0002\u0002\u0139P\u0003", - "\u0002\u0002\u0002\u013a\u013b\u0005S*\u0002\u013bR\u0003\u0002\u0002", - "\u0002\u013c\u0142\u0007$\u0002\u0002\u013d\u013e\u0007^\u0002\u0002", - "\u013e\u0141\u0007$\u0002\u0002\u013f\u0141\n\u0003\u0002\u0002\u0140", - "\u013d\u0003\u0002\u0002\u0002\u0140\u013f\u0003\u0002\u0002\u0002\u0141", - "\u0144\u0003\u0002\u0002\u0002\u0142\u0140\u0003\u0002\u0002\u0002\u0142", - "\u0143\u0003\u0002\u0002\u0002\u0143\u0145\u0003\u0002\u0002\u0002\u0144", - "\u0142\u0003\u0002\u0002\u0002\u0145\u0146\u0007$\u0002\u0002\u0146", - "T\u0003\u0002\u0002\u0002\u0147\u014d\u0007$\u0002\u0002\u0148\u0149", - "\u0007$\u0002\u0002\u0149\u014c\u0007$\u0002\u0002\u014a\u014c\n\u0003", - "\u0002\u0002\u014b\u0148\u0003\u0002\u0002\u0002\u014b\u014a\u0003\u0002", - "\u0002\u0002\u014c\u014f\u0003\u0002\u0002\u0002\u014d\u014b\u0003\u0002", - "\u0002\u0002\u014d\u014e\u0003\u0002\u0002\u0002\u014e\u0150\u0003\u0002", - "\u0002\u0002\u014f\u014d\u0003\u0002\u0002\u0002\u0150\u0151\u0007$", - "\u0002\u0002\u0151V\u0003\u0002\u0002\u0002\u0152\u0153\u0005Y-\u0002", - "\u0153X\u0003\u0002\u0002\u0002\u0154\u015a\u0007)\u0002\u0002\u0155", - "\u0156\u0007^\u0002\u0002\u0156\u0159\u0007)\u0002\u0002\u0157\u0159", - "\n\u0004\u0002\u0002\u0158\u0155\u0003\u0002\u0002\u0002\u0158\u0157", - "\u0003\u0002\u0002\u0002\u0159\u015c\u0003\u0002\u0002\u0002\u015a\u0158", - "\u0003\u0002\u0002\u0002\u015a\u015b\u0003\u0002\u0002\u0002\u015b\u015d", - "\u0003\u0002\u0002\u0002\u015c\u015a\u0003\u0002\u0002\u0002\u015d\u015e", - "\u0007)\u0002\u0002\u015eZ\u0003\u0002\u0002\u0002\u015f\u0165\u0007", - ")\u0002\u0002\u0160\u0161\u0007)\u0002\u0002\u0161\u0164\u0007)\u0002", - "\u0002\u0162\u0164\n\u0004\u0002\u0002\u0163\u0160\u0003\u0002\u0002", - "\u0002\u0163\u0162\u0003\u0002\u0002\u0002\u0164\u0167\u0003\u0002\u0002", - "\u0002\u0165\u0163\u0003\u0002\u0002\u0002\u0165\u0166\u0003\u0002\u0002", - "\u0002\u0166\u0168\u0003\u0002\u0002\u0002\u0167\u0165\u0003\u0002\u0002", - "\u0002\u0168\u0169\u0007)\u0002\u0002\u0169\\\u0003\u0002\u0002\u0002", - "\u016a\u016b\u0007/\u0002\u0002\u016b\u0170\u0007/\u0002\u0002\u016c", - "\u016d\u00071\u0002\u0002\u016d\u0170\u00071\u0002\u0002\u016e\u0170", - "\u0007%\u0002\u0002\u016f\u016a\u0003\u0002\u0002\u0002\u016f\u016c", - "\u0003\u0002\u0002\u0002\u016f\u016e\u0003\u0002\u0002\u0002\u0170\u0174", - "\u0003\u0002\u0002\u0002\u0171\u0173\n\u0005\u0002\u0002\u0172\u0171", - "\u0003\u0002\u0002\u0002\u0173\u0176\u0003\u0002\u0002\u0002\u0174\u0172", - "\u0003\u0002\u0002\u0002\u0174\u0175\u0003\u0002\u0002\u0002\u0175\u0177", - "\u0003\u0002\u0002\u0002\u0176\u0174\u0003\u0002\u0002\u0002\u0177\u0178", - "\b/\u0002\u0002\u0178^\u0003\u0002\u0002\u0002\u0179\u017a\u00071\u0002", - "\u0002\u017a\u017b\u0007,\u0002\u0002\u017b\u017f\u0003\u0002\u0002", - "\u0002\u017c\u017e\u000b\u0002\u0002\u0002\u017d\u017c\u0003\u0002\u0002", - "\u0002\u017e\u0181\u0003\u0002\u0002\u0002\u017f\u0180\u0003\u0002\u0002", - "\u0002\u017f\u017d\u0003\u0002\u0002\u0002\u0180\u0185\u0003\u0002\u0002", - "\u0002\u0181\u017f\u0003\u0002\u0002\u0002\u0182\u0183\u0007,\u0002", - "\u0002\u0183\u0186\u00071\u0002\u0002\u0184\u0186\u0007\u0002\u0002", - "\u0003\u0185\u0182\u0003\u0002\u0002\u0002\u0185\u0184\u0003\u0002\u0002", - "\u0002\u0186\u0187\u0003\u0002\u0002\u0002\u0187\u0188\b0\u0002\u0002", - "\u0188`\u0003\u0002\u0002\u0002\u0189\u018a\t\u0006\u0002\u0002\u018a", - "\u018b\u0003\u0002\u0002\u0002\u018b\u018c\b1\u0002\u0002\u018cb\u0003", - "\u0002\u0002\u0002\u018d\u0191\t\u0007\u0002\u0002\u018e\u0190\t\b\u0002", - "\u0002\u018f\u018e\u0003\u0002\u0002\u0002\u0190\u0193\u0003\u0002\u0002", - "\u0002\u0191\u018f\u0003\u0002\u0002\u0002\u0191\u0192\u0003\u0002\u0002", - "\u0002\u0192d\u0003\u0002\u0002\u0002\u0193\u0191\u0003\u0002\u0002", - "\u0002\u0194\u0195\t\t\u0002\u0002\u0195f\u0003\u0002\u0002\u0002\u0196", - "\u0197\t\n\u0002\u0002\u0197h\u0003\u0002\u0002\u0002\u0198\u0199\t", - "\u000b\u0002\u0002\u0199j\u0003\u0002\u0002\u0002\u019a\u019b\t\f\u0002", - "\u0002\u019bl\u0003\u0002\u0002\u0002\u019c\u019d\t\r\u0002\u0002\u019d", - "n\u0003\u0002\u0002\u0002\u019e\u019f\t\u000e\u0002\u0002\u019fp\u0003", - "\u0002\u0002\u0002\u01a0\u01a1\t\u000f\u0002\u0002\u01a1r\u0003\u0002", - "\u0002\u0002\u01a2\u01a3\t\u0010\u0002\u0002\u01a3t\u0003\u0002\u0002", - "\u0002\u01a4\u01a5\t\u0011\u0002\u0002\u01a5v\u0003\u0002\u0002\u0002", - "\u01a6\u01a7\t\u0012\u0002\u0002\u01a7x\u0003\u0002\u0002\u0002\u01a8", - "\u01a9\t\u0013\u0002\u0002\u01a9z\u0003\u0002\u0002\u0002\u01aa\u01ab", - "\t\u0014\u0002\u0002\u01ab|\u0003\u0002\u0002\u0002\u01ac\u01ad\t\u0015", - "\u0002\u0002\u01ad~\u0003\u0002\u0002\u0002\u01ae\u01af\t\u0016\u0002", - "\u0002\u01af\u0080\u0003\u0002\u0002\u0002\u01b0\u01b1\t\u0017\u0002", - "\u0002\u01b1\u0082\u0003\u0002\u0002\u0002\u01b2\u01b3\t\u0018\u0002", - "\u0002\u01b3\u0084\u0003\u0002\u0002\u0002\u01b4\u01b5\t\u0019\u0002", - "\u0002\u01b5\u0086\u0003\u0002\u0002\u0002\u01b6\u01b7\t\u001a\u0002", - "\u0002\u01b7\u0088\u0003\u0002\u0002\u0002\u01b8\u01b9\t\u001b\u0002", - "\u0002\u01b9\u008a\u0003\u0002\u0002\u0002\u01ba\u01bb\t\u001c\u0002", - "\u0002\u01bb\u008c\u0003\u0002\u0002\u0002\u01bc\u01bd\t\u001d\u0002", - "\u0002\u01bd\u008e\u0003\u0002\u0002\u0002\u01be\u01bf\t\u001e\u0002", - "\u0002\u01bf\u0090\u0003\u0002\u0002\u0002\u01c0\u01c1\t\u001f\u0002", - "\u0002\u01c1\u0092\u0003\u0002\u0002\u0002\u01c2\u01c3\t \u0002\u0002", - "\u01c3\u0094\u0003\u0002\u0002\u0002\u01c4\u01c5\t!\u0002\u0002\u01c5", - "\u0096\u0003\u0002\u0002\u0002\u01c6\u01c7\t\"\u0002\u0002\u01c7\u0098", - "\u0003\u0002\u0002\u0002\u01c8\u01c9\t#\u0002\u0002\u01c9\u009a\u0003", - "\u0002\u0002\u0002\u001b\u0002\u0111\u0117\u011a\u011e\u0123\u0125\u012b", - "\u012f\u0134\u0136\u0138\u0140\u0142\u014b\u014d\u0158\u015a\u0163\u0165", - "\u016f\u0174\u017f\u0185\u0191\u0003\u0002\u0003\u0002"].join(""); + "M\tM\u0004N\tN\u0004O\tO\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0005", + "\u0003\u0005\u0003\u0005\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0007", + "\u0003\u0007\u0003\u0007\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0003", + "\t\u0003\n\u0003\n\u0003\n\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003", + "\r\u0003\r\u0003\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010", + "\u0003\u0010\u0003\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003\u0013", + "\u0003\u0013\u0003\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016", + "\u0003\u0016\u0003\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0019", + "\u0003\u0019\u0003\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c", + "\u0003\u001c\u0003\u001d\u0003\u001d\u0003\u001e\u0003\u001e\u0003\u001e", + "\u0003\u001e\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f", + "\u0003\u001f\u0003\u001f\u0003\u001f\u0003 \u0003 \u0003 \u0003 \u0003", + " \u0003 \u0003!\u0003!\u0003!\u0003\"\u0003\"\u0003\"\u0003#\u0003#", + "\u0003#\u0003#\u0003#\u0003#\u0003#\u0003$\u0003$\u0003$\u0003$\u0003", + "$\u0003%\u0003%\u0003%\u0003%\u0003&\u0003&\u0003&\u0003&\u0003&\u0003", + "&\u0003&\u0003&\u0003\'\u0003\'\u0003\'\u0003\'\u0003\'\u0003(\u0003", + "(\u0003(\u0003)\u0003)\u0003)\u0003)\u0003)\u0003*\u0006*\u011f\n*\r", + "*\u000e*\u0120\u0003*\u0003*\u0007*\u0125\n*\f*\u000e*\u0128\u000b*", + "\u0005*\u012a\n*\u0003*\u0003*\u0005*\u012e\n*\u0003*\u0006*\u0131\n", + "*\r*\u000e*\u0132\u0005*\u0135\n*\u0003*\u0003*\u0006*\u0139\n*\r*\u000e", + "*\u013a\u0003*\u0003*\u0005*\u013f\n*\u0003*\u0006*\u0142\n*\r*\u000e", + "*\u0143\u0005*\u0146\n*\u0005*\u0148\n*\u0003+\u0003+\u0003,\u0003,", + "\u0003,\u0003,\u0007,\u0150\n,\f,\u000e,\u0153\u000b,\u0003,\u0003,", + "\u0003-\u0003-\u0003-\u0003-\u0007-\u015b\n-\f-\u000e-\u015e\u000b-", + "\u0003-\u0003-\u0003.\u0003.\u0003/\u0003/\u0003/\u0003/\u0007/\u0168", + "\n/\f/\u000e/\u016b\u000b/\u0003/\u0003/\u00030\u00030\u00030\u0003", + "0\u00070\u0173\n0\f0\u000e0\u0176\u000b0\u00030\u00030\u00031\u0003", + "1\u00031\u00031\u00031\u00051\u017f\n1\u00031\u00071\u0182\n1\f1\u000e", + "1\u0185\u000b1\u00031\u00031\u00032\u00032\u00032\u00032\u00072\u018d", + "\n2\f2\u000e2\u0190\u000b2\u00032\u00032\u00032\u00052\u0195\n2\u0003", + "2\u00032\u00033\u00033\u00033\u00033\u00034\u00034\u00074\u019f\n4\f", + "4\u000e4\u01a2\u000b4\u00035\u00035\u00036\u00036\u00037\u00037\u0003", + "8\u00038\u00039\u00039\u0003:\u0003:\u0003;\u0003;\u0003<\u0003<\u0003", + "=\u0003=\u0003>\u0003>\u0003?\u0003?\u0003@\u0003@\u0003A\u0003A\u0003", + "B\u0003B\u0003C\u0003C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003F\u0003", + "G\u0003G\u0003H\u0003H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003K\u0003", + "L\u0003L\u0003M\u0003M\u0003N\u0003N\u0003O\u0003O\u0003\u018e\u0002", + "P\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b\u0007\r\b\u000f", + "\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e\u001b\u000f\u001d", + "\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016+\u0017-\u0018", + "/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?!A\"C#E$G%I&K\'", + "M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i\u0002k\u0002m\u0002o\u0002q\u0002s\u0002", + "u\u0002w\u0002y\u0002{\u0002}\u0002\u007f\u0002\u0081\u0002\u0083\u0002", + "\u0085\u0002\u0087\u0002\u0089\u0002\u008b\u0002\u008d\u0002\u008f\u0002", + "\u0091\u0002\u0093\u0002\u0095\u0002\u0097\u0002\u0099\u0002\u009b\u0002", + "\u009d\u0002\u0003\u0002$\u0004\u0002--//\u0003\u0002$$\u0003\u0002", + "))\u0004\u0002\f\f\u000f\u000f\u0005\u0002\u000b\r\u000f\u000f\"\"\u0005", + "\u0002C\\aac|\u0006\u00022;C\\aac|\u0003\u00022;\u0004\u0002CCcc\u0004", + "\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002GGgg\u0004\u0002", + "HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004\u0002LLl", + "l\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002PPpp\u0004", + "\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002TTtt\u0004\u0002", + "UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004\u0002YYy", + "y\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||\u0002\u01d6\u0002", + "\u0003\u0003\u0002\u0002\u0002\u0002\u0005\u0003\u0002\u0002\u0002\u0002", + "\u0007\u0003\u0002\u0002\u0002\u0002\t\u0003\u0002\u0002\u0002\u0002", + "\u000b\u0003\u0002\u0002\u0002\u0002\r\u0003\u0002\u0002\u0002\u0002", + "\u000f\u0003\u0002\u0002\u0002\u0002\u0011\u0003\u0002\u0002\u0002\u0002", + "\u0013\u0003\u0002\u0002\u0002\u0002\u0015\u0003\u0002\u0002\u0002\u0002", + "\u0017\u0003\u0002\u0002\u0002\u0002\u0019\u0003\u0002\u0002\u0002\u0002", + "\u001b\u0003\u0002\u0002\u0002\u0002\u001d\u0003\u0002\u0002\u0002\u0002", + "\u001f\u0003\u0002\u0002\u0002\u0002!\u0003\u0002\u0002\u0002\u0002", + "#\u0003\u0002\u0002\u0002\u0002%\u0003\u0002\u0002\u0002\u0002\'\u0003", + "\u0002\u0002\u0002\u0002)\u0003\u0002\u0002\u0002\u0002+\u0003\u0002", + "\u0002\u0002\u0002-\u0003\u0002\u0002\u0002\u0002/\u0003\u0002\u0002", + "\u0002\u00021\u0003\u0002\u0002\u0002\u00023\u0003\u0002\u0002\u0002", + "\u00025\u0003\u0002\u0002\u0002\u00027\u0003\u0002\u0002\u0002\u0002", + "9\u0003\u0002\u0002\u0002\u0002;\u0003\u0002\u0002\u0002\u0002=\u0003", + "\u0002\u0002\u0002\u0002?\u0003\u0002\u0002\u0002\u0002A\u0003\u0002", + "\u0002\u0002\u0002C\u0003\u0002\u0002\u0002\u0002E\u0003\u0002\u0002", + "\u0002\u0002G\u0003\u0002\u0002\u0002\u0002I\u0003\u0002\u0002\u0002", + "\u0002K\u0003\u0002\u0002\u0002\u0002M\u0003\u0002\u0002\u0002\u0002", + "O\u0003\u0002\u0002\u0002\u0002Q\u0003\u0002\u0002\u0002\u0002S\u0003", + "\u0002\u0002\u0002\u0002U\u0003\u0002\u0002\u0002\u0002W\u0003\u0002", + "\u0002\u0002\u0002Y\u0003\u0002\u0002\u0002\u0002[\u0003\u0002\u0002", + "\u0002\u0002]\u0003\u0002\u0002\u0002\u0002_\u0003\u0002\u0002\u0002", + "\u0002a\u0003\u0002\u0002\u0002\u0002c\u0003\u0002\u0002\u0002\u0002", + "e\u0003\u0002\u0002\u0002\u0002g\u0003\u0002\u0002\u0002\u0003\u009f", + "\u0003\u0002\u0002\u0002\u0005\u00a2\u0003\u0002\u0002\u0002\u0007\u00a5", + "\u0003\u0002\u0002\u0002\t\u00a8\u0003\u0002\u0002\u0002\u000b\u00ab", + "\u0003\u0002\u0002\u0002\r\u00ae\u0003\u0002\u0002\u0002\u000f\u00b1", + "\u0003\u0002\u0002\u0002\u0011\u00b4\u0003\u0002\u0002\u0002\u0013\u00b7", + "\u0003\u0002\u0002\u0002\u0015\u00ba\u0003\u0002\u0002\u0002\u0017\u00bc", + "\u0003\u0002\u0002\u0002\u0019\u00be\u0003\u0002\u0002\u0002\u001b\u00c0", + "\u0003\u0002\u0002\u0002\u001d\u00c2\u0003\u0002\u0002\u0002\u001f\u00c4", + "\u0003\u0002\u0002\u0002!\u00c6\u0003\u0002\u0002\u0002#\u00c8\u0003", + "\u0002\u0002\u0002%\u00ca\u0003\u0002\u0002\u0002\'\u00cc\u0003\u0002", + "\u0002\u0002)\u00ce\u0003\u0002\u0002\u0002+\u00d0\u0003\u0002\u0002", + "\u0002-\u00d2\u0003\u0002\u0002\u0002/\u00d4\u0003\u0002\u0002\u0002", + "1\u00d6\u0003\u0002\u0002\u00023\u00d8\u0003\u0002\u0002\u00025\u00da", + "\u0003\u0002\u0002\u00027\u00dc\u0003\u0002\u0002\u00029\u00de\u0003", + "\u0002\u0002\u0002;\u00e0\u0003\u0002\u0002\u0002=\u00e4\u0003\u0002", + "\u0002\u0002?\u00ec\u0003\u0002\u0002\u0002A\u00f2\u0003\u0002\u0002", + "\u0002C\u00f5\u0003\u0002\u0002\u0002E\u00f8\u0003\u0002\u0002\u0002", + "G\u00ff\u0003\u0002\u0002\u0002I\u0104\u0003\u0002\u0002\u0002K\u0108", + "\u0003\u0002\u0002\u0002M\u0110\u0003\u0002\u0002\u0002O\u0115\u0003", + "\u0002\u0002\u0002Q\u0118\u0003\u0002\u0002\u0002S\u0147\u0003\u0002", + "\u0002\u0002U\u0149\u0003\u0002\u0002\u0002W\u014b\u0003\u0002\u0002", + "\u0002Y\u0156\u0003\u0002\u0002\u0002[\u0161\u0003\u0002\u0002\u0002", + "]\u0163\u0003\u0002\u0002\u0002_\u016e\u0003\u0002\u0002\u0002a\u017e", + "\u0003\u0002\u0002\u0002c\u0188\u0003\u0002\u0002\u0002e\u0198\u0003", + "\u0002\u0002\u0002g\u019c\u0003\u0002\u0002\u0002i\u01a3\u0003\u0002", + "\u0002\u0002k\u01a5\u0003\u0002\u0002\u0002m\u01a7\u0003\u0002\u0002", + "\u0002o\u01a9\u0003\u0002\u0002\u0002q\u01ab\u0003\u0002\u0002\u0002", + "s\u01ad\u0003\u0002\u0002\u0002u\u01af\u0003\u0002\u0002\u0002w\u01b1", + "\u0003\u0002\u0002\u0002y\u01b3\u0003\u0002\u0002\u0002{\u01b5\u0003", + "\u0002\u0002\u0002}\u01b7\u0003\u0002\u0002\u0002\u007f\u01b9\u0003", + "\u0002\u0002\u0002\u0081\u01bb\u0003\u0002\u0002\u0002\u0083\u01bd\u0003", + "\u0002\u0002\u0002\u0085\u01bf\u0003\u0002\u0002\u0002\u0087\u01c1\u0003", + "\u0002\u0002\u0002\u0089\u01c3\u0003\u0002\u0002\u0002\u008b\u01c5\u0003", + "\u0002\u0002\u0002\u008d\u01c7\u0003\u0002\u0002\u0002\u008f\u01c9\u0003", + "\u0002\u0002\u0002\u0091\u01cb\u0003\u0002\u0002\u0002\u0093\u01cd\u0003", + "\u0002\u0002\u0002\u0095\u01cf\u0003\u0002\u0002\u0002\u0097\u01d1\u0003", + "\u0002\u0002\u0002\u0099\u01d3\u0003\u0002\u0002\u0002\u009b\u01d5\u0003", + "\u0002\u0002\u0002\u009d\u01d7\u0003\u0002\u0002\u0002\u009f\u00a0\u0007", + "(\u0002\u0002\u00a0\u00a1\u0007(\u0002\u0002\u00a1\u0004\u0003\u0002", + "\u0002\u0002\u00a2\u00a3\u0007?\u0002\u0002\u00a3\u00a4\u0007?\u0002", + "\u0002\u00a4\u0006\u0003\u0002\u0002\u0002\u00a5\u00a6\u0007@\u0002", + "\u0002\u00a6\u00a7\u0007?\u0002\u0002\u00a7\b\u0003\u0002\u0002\u0002", + "\u00a8\u00a9\u0007>\u0002\u0002\u00a9\u00aa\u0007?\u0002\u0002\u00aa", + "\n\u0003\u0002\u0002\u0002\u00ab\u00ac\u0007#\u0002\u0002\u00ac\u00ad", + "\u0007?\u0002\u0002\u00ad\f\u0003\u0002\u0002\u0002\u00ae\u00af\u0007", + ">\u0002\u0002\u00af\u00b0\u0007@\u0002\u0002\u00b0\u000e\u0003\u0002", + "\u0002\u0002\u00b1\u00b2\u0007~\u0002\u0002\u00b2\u00b3\u0007~\u0002", + "\u0002\u00b3\u0010\u0003\u0002\u0002\u0002\u00b4\u00b5\u0007>\u0002", + "\u0002\u00b5\u00b6\u0007>\u0002\u0002\u00b6\u0012\u0003\u0002\u0002", + "\u0002\u00b7\u00b8\u0007@\u0002\u0002\u00b8\u00b9\u0007@\u0002\u0002", + "\u00b9\u0014\u0003\u0002\u0002\u0002\u00ba\u00bb\u0007(\u0002\u0002", + "\u00bb\u0016\u0003\u0002\u0002\u0002\u00bc\u00bd\u0007?\u0002\u0002", + "\u00bd\u0018\u0003\u0002\u0002\u0002\u00be\u00bf\u0007+\u0002\u0002", + "\u00bf\u001a\u0003\u0002\u0002\u0002\u00c0\u00c1\u0007<\u0002\u0002", + "\u00c1\u001c\u0003\u0002\u0002\u0002\u00c2\u00c3\u0007.\u0002\u0002", + "\u00c3\u001e\u0003\u0002\u0002\u0002\u00c4\u00c5\u00070\u0002\u0002", + "\u00c5 \u0003\u0002\u0002\u0002\u00c6\u00c7\u00071\u0002\u0002\u00c7", + "\"\u0003\u0002\u0002\u0002\u00c8\u00c9\u0007@\u0002\u0002\u00c9$\u0003", + "\u0002\u0002\u0002\u00ca\u00cb\u0007>\u0002\u0002\u00cb&\u0003\u0002", + "\u0002\u0002\u00cc\u00cd\u0007/\u0002\u0002\u00cd(\u0003\u0002\u0002", + "\u0002\u00ce\u00cf\u0007\'\u0002\u0002\u00cf*\u0003\u0002\u0002\u0002", + "\u00d0\u00d1\u0007*\u0002\u0002\u00d1,\u0003\u0002\u0002\u0002\u00d2", + "\u00d3\u0007~\u0002\u0002\u00d3.\u0003\u0002\u0002\u0002\u00d4\u00d5", + "\u0007-\u0002\u0002\u00d50\u0003\u0002\u0002\u0002\u00d6\u00d7\u0007", + "A\u0002\u0002\u00d72\u0003\u0002\u0002\u0002\u00d8\u00d9\u0007=\u0002", + "\u0002\u00d94\u0003\u0002\u0002\u0002\u00da\u00db\u0007,\u0002\u0002", + "\u00db6\u0003\u0002\u0002\u0002\u00dc\u00dd\u0007\u0080\u0002\u0002", + "\u00dd8\u0003\u0002\u0002\u0002\u00de\u00df\u0007a\u0002\u0002\u00df", + ":\u0003\u0002\u0002\u0002\u00e0\u00e1\u0005k6\u0002\u00e1\u00e2\u0005", + "\u0085C\u0002\u00e2\u00e3\u0005q9\u0002\u00e3<\u0003\u0002\u0002\u0002", + "\u00e4\u00e5\u0005m7\u0002\u00e5\u00e6\u0005s:\u0002\u00e6\u00e7\u0005", + "\u0091I\u0002\u00e7\u00e8\u0005\u0097L\u0002\u00e8\u00e9\u0005s:\u0002", + "\u00e9\u00ea\u0005s:\u0002\u00ea\u00eb\u0005\u0085C\u0002\u00eb>\u0003", + "\u0002\u0002\u0002\u00ec\u00ed\u0005u;\u0002\u00ed\u00ee\u0005k6\u0002", + "\u00ee\u00ef\u0005\u0081A\u0002\u00ef\u00f0\u0005\u008fH\u0002\u00f0", + "\u00f1\u0005s:\u0002\u00f1@\u0003\u0002\u0002\u0002\u00f2\u00f3\u0005", + "{>\u0002\u00f3\u00f4\u0005\u0085C\u0002\u00f4B\u0003\u0002\u0002\u0002", + "\u00f5\u00f6\u0005{>\u0002\u00f6\u00f7\u0005\u008fH\u0002\u00f7D\u0003", + "\u0002\u0002\u0002\u00f8\u00f9\u0005{>\u0002\u00f9\u00fa\u0005\u008f", + "H\u0002\u00fa\u00fb\u0005\u0085C\u0002\u00fb\u00fc\u0005\u0093J\u0002", + "\u00fc\u00fd\u0005\u0081A\u0002\u00fd\u00fe\u0005\u0081A\u0002\u00fe", + "F\u0003\u0002\u0002\u0002\u00ff\u0100\u0005\u0081A\u0002\u0100\u0101", + "\u0005{>\u0002\u0101\u0102\u0005\u007f@\u0002\u0102\u0103\u0005s:\u0002", + "\u0103H\u0003\u0002\u0002\u0002\u0104\u0105\u0005\u0085C\u0002\u0105", + "\u0106\u0005\u0087D\u0002\u0106\u0107\u0005\u0091I\u0002\u0107J\u0003", + "\u0002\u0002\u0002\u0108\u0109\u0005\u0085C\u0002\u0109\u010a\u0005", + "\u0087D\u0002\u010a\u010b\u0005\u0091I\u0002\u010b\u010c\u0005\u0085", + "C\u0002\u010c\u010d\u0005\u0093J\u0002\u010d\u010e\u0005\u0081A\u0002", + "\u010e\u010f\u0005\u0081A\u0002\u010fL\u0003\u0002\u0002\u0002\u0110", + "\u0111\u0005\u0085C\u0002\u0111\u0112\u0005\u0093J\u0002\u0112\u0113", + "\u0005\u0081A\u0002\u0113\u0114\u0005\u0081A\u0002\u0114N\u0003\u0002", + "\u0002\u0002\u0115\u0116\u0005\u0087D\u0002\u0116\u0117\u0005\u008d", + "G\u0002\u0117P\u0003\u0002\u0002\u0002\u0118\u0119\u0005\u0091I\u0002", + "\u0119\u011a\u0005\u008dG\u0002\u011a\u011b\u0005\u0093J\u0002\u011b", + "\u011c\u0005s:\u0002\u011cR\u0003\u0002\u0002\u0002\u011d\u011f\u0005", + "i5\u0002\u011e\u011d\u0003\u0002\u0002\u0002\u011f\u0120\u0003\u0002", + "\u0002\u0002\u0120\u011e\u0003\u0002\u0002\u0002\u0120\u0121\u0003\u0002", + "\u0002\u0002\u0121\u0129\u0003\u0002\u0002\u0002\u0122\u0126\u00070", + "\u0002\u0002\u0123\u0125\u0005i5\u0002\u0124\u0123\u0003\u0002\u0002", + "\u0002\u0125\u0128\u0003\u0002\u0002\u0002\u0126\u0124\u0003\u0002\u0002", + "\u0002\u0126\u0127\u0003\u0002\u0002\u0002\u0127\u012a\u0003\u0002\u0002", + "\u0002\u0128\u0126\u0003\u0002\u0002\u0002\u0129\u0122\u0003\u0002\u0002", + "\u0002\u0129\u012a\u0003\u0002\u0002\u0002\u012a\u0134\u0003\u0002\u0002", + "\u0002\u012b\u012d\u0005s:\u0002\u012c\u012e\t\u0002\u0002\u0002\u012d", + "\u012c\u0003\u0002\u0002\u0002\u012d\u012e\u0003\u0002\u0002\u0002\u012e", + "\u0130\u0003\u0002\u0002\u0002\u012f\u0131\u0005i5\u0002\u0130\u012f", + "\u0003\u0002\u0002\u0002\u0131\u0132\u0003\u0002\u0002\u0002\u0132\u0130", + "\u0003\u0002\u0002\u0002\u0132\u0133\u0003\u0002\u0002\u0002\u0133\u0135", + "\u0003\u0002\u0002\u0002\u0134\u012b\u0003\u0002\u0002\u0002\u0134\u0135", + "\u0003\u0002\u0002\u0002\u0135\u0148\u0003\u0002\u0002\u0002\u0136\u0138", + "\u00070\u0002\u0002\u0137\u0139\u0005i5\u0002\u0138\u0137\u0003\u0002", + "\u0002\u0002\u0139\u013a\u0003\u0002\u0002\u0002\u013a\u0138\u0003\u0002", + "\u0002\u0002\u013a\u013b\u0003\u0002\u0002\u0002\u013b\u0145\u0003\u0002", + "\u0002\u0002\u013c\u013e\u0005s:\u0002\u013d\u013f\t\u0002\u0002\u0002", + "\u013e\u013d\u0003\u0002\u0002\u0002\u013e\u013f\u0003\u0002\u0002\u0002", + "\u013f\u0141\u0003\u0002\u0002\u0002\u0140\u0142\u0005i5\u0002\u0141", + "\u0140\u0003\u0002\u0002\u0002\u0142\u0143\u0003\u0002\u0002\u0002\u0143", + "\u0141\u0003\u0002\u0002\u0002\u0143\u0144\u0003\u0002\u0002\u0002\u0144", + "\u0146\u0003\u0002\u0002\u0002\u0145\u013c\u0003\u0002\u0002\u0002\u0145", + "\u0146\u0003\u0002\u0002\u0002\u0146\u0148\u0003\u0002\u0002\u0002\u0147", + "\u011e\u0003\u0002\u0002\u0002\u0147\u0136\u0003\u0002\u0002\u0002\u0148", + "T\u0003\u0002\u0002\u0002\u0149\u014a\u0005W,\u0002\u014aV\u0003\u0002", + "\u0002\u0002\u014b\u0151\u0007$\u0002\u0002\u014c\u014d\u0007^\u0002", + "\u0002\u014d\u0150\u0007$\u0002\u0002\u014e\u0150\n\u0003\u0002\u0002", + "\u014f\u014c\u0003\u0002\u0002\u0002\u014f\u014e\u0003\u0002\u0002\u0002", + "\u0150\u0153\u0003\u0002\u0002\u0002\u0151\u014f\u0003\u0002\u0002\u0002", + "\u0151\u0152\u0003\u0002\u0002\u0002\u0152\u0154\u0003\u0002\u0002\u0002", + "\u0153\u0151\u0003\u0002\u0002\u0002\u0154\u0155\u0007$\u0002\u0002", + "\u0155X\u0003\u0002\u0002\u0002\u0156\u015c\u0007$\u0002\u0002\u0157", + "\u0158\u0007$\u0002\u0002\u0158\u015b\u0007$\u0002\u0002\u0159\u015b", + "\n\u0003\u0002\u0002\u015a\u0157\u0003\u0002\u0002\u0002\u015a\u0159", + "\u0003\u0002\u0002\u0002\u015b\u015e\u0003\u0002\u0002\u0002\u015c\u015a", + "\u0003\u0002\u0002\u0002\u015c\u015d\u0003\u0002\u0002\u0002\u015d\u015f", + "\u0003\u0002\u0002\u0002\u015e\u015c\u0003\u0002\u0002\u0002\u015f\u0160", + "\u0007$\u0002\u0002\u0160Z\u0003\u0002\u0002\u0002\u0161\u0162\u0005", + "]/\u0002\u0162\\\u0003\u0002\u0002\u0002\u0163\u0169\u0007)\u0002\u0002", + "\u0164\u0165\u0007^\u0002\u0002\u0165\u0168\u0007)\u0002\u0002\u0166", + "\u0168\n\u0004\u0002\u0002\u0167\u0164\u0003\u0002\u0002\u0002\u0167", + "\u0166\u0003\u0002\u0002\u0002\u0168\u016b\u0003\u0002\u0002\u0002\u0169", + "\u0167\u0003\u0002\u0002\u0002\u0169\u016a\u0003\u0002\u0002\u0002\u016a", + "\u016c\u0003\u0002\u0002\u0002\u016b\u0169\u0003\u0002\u0002\u0002\u016c", + "\u016d\u0007)\u0002\u0002\u016d^\u0003\u0002\u0002\u0002\u016e\u0174", + "\u0007)\u0002\u0002\u016f\u0170\u0007)\u0002\u0002\u0170\u0173\u0007", + ")\u0002\u0002\u0171\u0173\n\u0004\u0002\u0002\u0172\u016f\u0003\u0002", + "\u0002\u0002\u0172\u0171\u0003\u0002\u0002\u0002\u0173\u0176\u0003\u0002", + "\u0002\u0002\u0174\u0172\u0003\u0002\u0002\u0002\u0174\u0175\u0003\u0002", + "\u0002\u0002\u0175\u0177\u0003\u0002\u0002\u0002\u0176\u0174\u0003\u0002", + "\u0002\u0002\u0177\u0178\u0007)\u0002\u0002\u0178`\u0003\u0002\u0002", + "\u0002\u0179\u017a\u0007/\u0002\u0002\u017a\u017f\u0007/\u0002\u0002", + "\u017b\u017c\u00071\u0002\u0002\u017c\u017f\u00071\u0002\u0002\u017d", + "\u017f\u0007%\u0002\u0002\u017e\u0179\u0003\u0002\u0002\u0002\u017e", + "\u017b\u0003\u0002\u0002\u0002\u017e\u017d\u0003\u0002\u0002\u0002\u017f", + "\u0183\u0003\u0002\u0002\u0002\u0180\u0182\n\u0005\u0002\u0002\u0181", + "\u0180\u0003\u0002\u0002\u0002\u0182\u0185\u0003\u0002\u0002\u0002\u0183", + "\u0181\u0003\u0002\u0002\u0002\u0183\u0184\u0003\u0002\u0002\u0002\u0184", + "\u0186\u0003\u0002\u0002\u0002\u0185\u0183\u0003\u0002\u0002\u0002\u0186", + "\u0187\b1\u0002\u0002\u0187b\u0003\u0002\u0002\u0002\u0188\u0189\u0007", + "1\u0002\u0002\u0189\u018a\u0007,\u0002\u0002\u018a\u018e\u0003\u0002", + "\u0002\u0002\u018b\u018d\u000b\u0002\u0002\u0002\u018c\u018b\u0003\u0002", + "\u0002\u0002\u018d\u0190\u0003\u0002\u0002\u0002\u018e\u018f\u0003\u0002", + "\u0002\u0002\u018e\u018c\u0003\u0002\u0002\u0002\u018f\u0194\u0003\u0002", + "\u0002\u0002\u0190\u018e\u0003\u0002\u0002\u0002\u0191\u0192\u0007,", + "\u0002\u0002\u0192\u0195\u00071\u0002\u0002\u0193\u0195\u0007\u0002", + "\u0002\u0003\u0194\u0191\u0003\u0002\u0002\u0002\u0194\u0193\u0003\u0002", + "\u0002\u0002\u0195\u0196\u0003\u0002\u0002\u0002\u0196\u0197\b2\u0002", + "\u0002\u0197d\u0003\u0002\u0002\u0002\u0198\u0199\t\u0006\u0002\u0002", + "\u0199\u019a\u0003\u0002\u0002\u0002\u019a\u019b\b3\u0002\u0002\u019b", + "f\u0003\u0002\u0002\u0002\u019c\u01a0\t\u0007\u0002\u0002\u019d\u019f", + "\t\b\u0002\u0002\u019e\u019d\u0003\u0002\u0002\u0002\u019f\u01a2\u0003", + "\u0002\u0002\u0002\u01a0\u019e\u0003\u0002\u0002\u0002\u01a0\u01a1\u0003", + "\u0002\u0002\u0002\u01a1h\u0003\u0002\u0002\u0002\u01a2\u01a0\u0003", + "\u0002\u0002\u0002\u01a3\u01a4\t\t\u0002\u0002\u01a4j\u0003\u0002\u0002", + "\u0002\u01a5\u01a6\t\n\u0002\u0002\u01a6l\u0003\u0002\u0002\u0002\u01a7", + "\u01a8\t\u000b\u0002\u0002\u01a8n\u0003\u0002\u0002\u0002\u01a9\u01aa", + "\t\f\u0002\u0002\u01aap\u0003\u0002\u0002\u0002\u01ab\u01ac\t\r\u0002", + "\u0002\u01acr\u0003\u0002\u0002\u0002\u01ad\u01ae\t\u000e\u0002\u0002", + "\u01aet\u0003\u0002\u0002\u0002\u01af\u01b0\t\u000f\u0002\u0002\u01b0", + "v\u0003\u0002\u0002\u0002\u01b1\u01b2\t\u0010\u0002\u0002\u01b2x\u0003", + "\u0002\u0002\u0002\u01b3\u01b4\t\u0011\u0002\u0002\u01b4z\u0003\u0002", + "\u0002\u0002\u01b5\u01b6\t\u0012\u0002\u0002\u01b6|\u0003\u0002\u0002", + "\u0002\u01b7\u01b8\t\u0013\u0002\u0002\u01b8~\u0003\u0002\u0002\u0002", + "\u01b9\u01ba\t\u0014\u0002\u0002\u01ba\u0080\u0003\u0002\u0002\u0002", + "\u01bb\u01bc\t\u0015\u0002\u0002\u01bc\u0082\u0003\u0002\u0002\u0002", + "\u01bd\u01be\t\u0016\u0002\u0002\u01be\u0084\u0003\u0002\u0002\u0002", + "\u01bf\u01c0\t\u0017\u0002\u0002\u01c0\u0086\u0003\u0002\u0002\u0002", + "\u01c1\u01c2\t\u0018\u0002\u0002\u01c2\u0088\u0003\u0002\u0002\u0002", + "\u01c3\u01c4\t\u0019\u0002\u0002\u01c4\u008a\u0003\u0002\u0002\u0002", + "\u01c5\u01c6\t\u001a\u0002\u0002\u01c6\u008c\u0003\u0002\u0002\u0002", + "\u01c7\u01c8\t\u001b\u0002\u0002\u01c8\u008e\u0003\u0002\u0002\u0002", + "\u01c9\u01ca\t\u001c\u0002\u0002\u01ca\u0090\u0003\u0002\u0002\u0002", + "\u01cb\u01cc\t\u001d\u0002\u0002\u01cc\u0092\u0003\u0002\u0002\u0002", + "\u01cd\u01ce\t\u001e\u0002\u0002\u01ce\u0094\u0003\u0002\u0002\u0002", + "\u01cf\u01d0\t\u001f\u0002\u0002\u01d0\u0096\u0003\u0002\u0002\u0002", + "\u01d1\u01d2\t \u0002\u0002\u01d2\u0098\u0003\u0002\u0002\u0002\u01d3", + "\u01d4\t!\u0002\u0002\u01d4\u009a\u0003\u0002\u0002\u0002\u01d5\u01d6", + "\t\"\u0002\u0002\u01d6\u009c\u0003\u0002\u0002\u0002\u01d7\u01d8\t#", + "\u0002\u0002\u01d8\u009e\u0003\u0002\u0002\u0002\u001b\u0002\u0120\u0126", + "\u0129\u012d\u0132\u0134\u013a\u013e\u0143\u0145\u0147\u014f\u0151\u015a", + "\u015c\u0167\u0169\u0172\u0174\u017e\u0183\u018e\u0194\u01a0\u0003\u0002", + "\u0003\u0002"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -339,26 +349,28 @@ PqlLexer.STAR = 26; PqlLexer.TILDE = 27; PqlLexer.UNDER = 28; PqlLexer.K_AND = 29; -PqlLexer.K_FALSE = 30; -PqlLexer.K_IS = 31; -PqlLexer.K_ISNULL = 32; -PqlLexer.K_LIKE = 33; -PqlLexer.K_NOT = 34; -PqlLexer.K_NOTNULL = 35; -PqlLexer.K_NULL = 36; -PqlLexer.K_OR = 37; -PqlLexer.K_TRUE = 38; -PqlLexer.NUMERIC_LITERAL = 39; -PqlLexer.DOUBLE_QUOTED_STRING = 40; -PqlLexer.DOUBLE_QUOTED_STRING_TEL = 41; -PqlLexer.DOUBLE_QUOTED_STRING_SQL = 42; -PqlLexer.SINGLE_QUOTED_STRING = 43; -PqlLexer.SINGLE_QUOTED_STRING_TEL = 44; -PqlLexer.SINGLE_QUOTED_STRING_SQL = 45; -PqlLexer.SINGLE_LINE_COMMENT = 46; -PqlLexer.MULTILINE_COMMENT = 47; -PqlLexer.SPACES = 48; -PqlLexer.WORD = 49; +PqlLexer.K_BETWEEN = 30; +PqlLexer.K_FALSE = 31; +PqlLexer.K_IN = 32; +PqlLexer.K_IS = 33; +PqlLexer.K_ISNULL = 34; +PqlLexer.K_LIKE = 35; +PqlLexer.K_NOT = 36; +PqlLexer.K_NOTNULL = 37; +PqlLexer.K_NULL = 38; +PqlLexer.K_OR = 39; +PqlLexer.K_TRUE = 40; +PqlLexer.NUMERIC_LITERAL = 41; +PqlLexer.DOUBLE_QUOTED_STRING = 42; +PqlLexer.DOUBLE_QUOTED_STRING_TEL = 43; +PqlLexer.DOUBLE_QUOTED_STRING_SQL = 44; +PqlLexer.SINGLE_QUOTED_STRING = 45; +PqlLexer.SINGLE_QUOTED_STRING_TEL = 46; +PqlLexer.SINGLE_QUOTED_STRING_SQL = 47; +PqlLexer.SINGLE_LINE_COMMENT = 48; +PqlLexer.MULTILINE_COMMENT = 49; +PqlLexer.SPACES = 50; +PqlLexer.WORD = 51; PqlLexer.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; @@ -378,13 +390,14 @@ PqlLexer.prototype.symbolicNames = [ null, "AND", "EQ", "GT_EQ", "LT_EQ", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", - "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", - "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", - "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", - "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", - "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", - "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", - "MULTILINE_COMMENT", "SPACES", "WORD" ]; + "K_BETWEEN", "K_FALSE", "K_IN", "K_IS", + "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", + "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", + "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", + "SPACES", "WORD" ]; PqlLexer.prototype.ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", @@ -392,17 +405,18 @@ PqlLexer.prototype.ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", - "TILDE", "UNDER", "K_AND", "K_FALSE", "K_IS", - "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", - "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", - "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", - "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", - "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", - "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", - "SPACES", "WORD", "DIGIT", "A", "B", "C", - "D", "E", "F", "G", "H", "I", "J", "K", - "L", "M", "N", "O", "P", "Q", "R", "S", - "T", "U", "V", "W", "X", "Y", "Z" ]; + "TILDE", "UNDER", "K_AND", "K_BETWEEN", + "K_FALSE", "K_IN", "K_IS", "K_ISNULL", + "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", + "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "WORD", + "DIGIT", "A", "B", "C", "D", "E", "F", + "G", "H", "I", "J", "K", "L", "M", "N", + "O", "P", "Q", "R", "S", "T", "U", "V", + "W", "X", "Y", "Z" ]; PqlLexer.prototype.grammarFileName = "PqlLexer.g4"; diff --git a/js-temp/PqlParser.js b/js-temp/PqlParser.js index 0b6e7fa..57bad1e 100644 --- a/js-temp/PqlParser.js +++ b/js-temp/PqlParser.js @@ -8,65 +8,78 @@ var grammarFileName = "PqlParser.g4"; var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u00033]\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004", + "\u00035r\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004", "\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004\b", "\t\b\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", "\u0003\u0003\u0003\u0005\u0003\u001e\n\u0003\u0003\u0003\u0003\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", - "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0007\u00032\n\u0003", - "\f\u0003\u000e\u00035\u000b\u0003\u0003\u0004\u0003\u0004\u0003\u0004", - "\u0005\u0004:\n\u0004\u0003\u0004\u0003\u0004\u0003\u0005\u0003\u0005", - "\u0003\u0005\u0007\u0005A\n\u0005\f\u0005\u000e\u0005D\u000b\u0005\u0003", - "\u0006\u0005\u0006G\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005", - "\u0006L\n\u0006\u0003\u0006\u0003\u0006\u0003\u0006\u0005\u0006Q\n\u0006", - "\u0003\u0007\u0003\u0007\u0003\u0007\u0007\u0007V\n\u0007\f\u0007\u000e", - "\u0007Y\u000b\u0007\u0003\b\u0003\b\u0003\b\u0002\u0003\u0004\t\u0002", - "\u0004\u0006\b\n\f\u000e\u0002\n\u0005\u0002\u0015\u0015\u0019\u0019", - "$$\u0005\u0002\u0012\u0012\u0016\u0016\u001c\u001c\u0004\u0002\u0015", - "\u0015\u0019\u0019\u0004\u0002\u0005\u0006\u0013\u0014\u0006\u0002\u0004", - "\u0004\u0007\b\r\r!!\u0004\u0002\u0003\u0003\u001f\u001f\u0004\u0002", - "\t\t\'\'\u0006\u0002 &&(*--\u0002e\u0002\u0010\u0003\u0002\u0002\u0002", - "\u0004\u001d\u0003\u0002\u0002\u0002\u00066\u0003\u0002\u0002\u0002", - "\b=\u0003\u0002\u0002\u0002\nF\u0003\u0002\u0002\u0002\fR\u0003\u0002", - "\u0002\u0002\u000eZ\u0003\u0002\u0002\u0002\u0010\u0011\u0005\u0004", - "\u0003\u0002\u0011\u0012\u0007\u0002\u0002\u0003\u0012\u0003\u0003\u0002", - "\u0002\u0002\u0013\u0014\b\u0003\u0001\u0002\u0014\u0015\t\u0002\u0002", - "\u0002\u0015\u001e\u0005\u0004\u0003\r\u0016\u0017\u0007\u0017\u0002", - "\u0002\u0017\u0018\u0005\u0004\u0003\u0002\u0018\u0019\u0007\u000e\u0002", - "\u0002\u0019\u001e\u0003\u0002\u0002\u0002\u001a\u001e\u0005\u000e\b", - "\u0002\u001b\u001e\u0005\u0006\u0004\u0002\u001c\u001e\u0005\n\u0006", - "\u0002\u001d\u0013\u0003\u0002\u0002\u0002\u001d\u0016\u0003\u0002\u0002", - "\u0002\u001d\u001a\u0003\u0002\u0002\u0002\u001d\u001b\u0003\u0002\u0002", - "\u0002\u001d\u001c\u0003\u0002\u0002\u0002\u001e3\u0003\u0002\u0002", - "\u0002\u001f \f\f\u0002\u0002 !\t\u0003\u0002\u0002!2\u0005\u0004\u0003", - "\r\"#\f\u000b\u0002\u0002#$\t\u0004\u0002\u0002$2\u0005\u0004\u0003", - "\f%&\f\n\u0002\u0002&\'\t\u0005\u0002\u0002\'2\u0005\u0004\u0003\u000b", - "()\f\t\u0002\u0002)*\t\u0006\u0002\u0002*2\u0005\u0004\u0003\n+,\f\b", - "\u0002\u0002,-\t\u0007\u0002\u0002-2\u0005\u0004\u0003\t./\f\u0007\u0002", - "\u0002/0\t\b\u0002\u000202\u0005\u0004\u0003\b1\u001f\u0003\u0002\u0002", - "\u00021\"\u0003\u0002\u0002\u00021%\u0003\u0002\u0002\u00021(\u0003", - "\u0002\u0002\u00021+\u0003\u0002\u0002\u00021.\u0003\u0002\u0002\u0002", - "25\u0003\u0002\u0002\u000231\u0003\u0002\u0002\u000234\u0003\u0002\u0002", - "\u00024\u0005\u0003\u0002\u0002\u000253\u0003\u0002\u0002\u000267\u0005", - "\f\u0007\u000279\u0007\u0017\u0002\u00028:\u0005\b\u0005\u000298\u0003", - "\u0002\u0002\u00029:\u0003\u0002\u0002\u0002:;\u0003\u0002\u0002\u0002", - ";<\u0007\u000e\u0002\u0002<\u0007\u0003\u0002\u0002\u0002=B\u0005\u0004", - "\u0003\u0002>?\u0007\u0010\u0002\u0002?A\u0005\u0004\u0003\u0002@>\u0003", - "\u0002\u0002\u0002AD\u0003\u0002\u0002\u0002B@\u0003\u0002\u0002\u0002", - "BC\u0003\u0002\u0002\u0002C\t\u0003\u0002\u0002\u0002DB\u0003\u0002", - "\u0002\u0002EG\u0007\u001a\u0002\u0002FE\u0003\u0002\u0002\u0002FG\u0003", - "\u0002\u0002\u0002GK\u0003\u0002\u0002\u0002HI\u0005\f\u0007\u0002I", - "J\u0007\u0018\u0002\u0002JL\u0003\u0002\u0002\u0002KH\u0003\u0002\u0002", - "\u0002KL\u0003\u0002\u0002\u0002LM\u0003\u0002\u0002\u0002MP\u0005\f", - "\u0007\u0002NO\u0007\u000f\u0002\u0002OQ\u0005\f\u0007\u0002PN\u0003", - "\u0002\u0002\u0002PQ\u0003\u0002\u0002\u0002Q\u000b\u0003\u0002\u0002", - "\u0002RW\u00073\u0002\u0002ST\u0007\u0011\u0002\u0002TV\u00073\u0002", - "\u0002US\u0003\u0002\u0002\u0002VY\u0003\u0002\u0002\u0002WU\u0003\u0002", - "\u0002\u0002WX\u0003\u0002\u0002\u0002X\r\u0003\u0002\u0002\u0002YW", - "\u0003\u0002\u0002\u0002Z[\t\t\u0002\u0002[\u000f\u0003\u0002\u0002", - "\u0002\u000b\u001d139BFKPW"].join(""); + "\u0005\u0003.\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0005\u0003:\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0005\u0003@\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0007\u0003G\n\u0003\f\u0003\u000e\u0003J\u000b\u0003\u0003", + "\u0004\u0003\u0004\u0003\u0004\u0005\u0004O\n\u0004\u0003\u0004\u0003", + "\u0004\u0003\u0005\u0003\u0005\u0003\u0005\u0007\u0005V\n\u0005\f\u0005", + "\u000e\u0005Y\u000b\u0005\u0003\u0006\u0005\u0006\\\n\u0006\u0003\u0006", + "\u0003\u0006\u0003\u0006\u0005\u0006a\n\u0006\u0003\u0006\u0003\u0006", + "\u0003\u0006\u0005\u0006f\n\u0006\u0003\u0007\u0003\u0007\u0003\u0007", + "\u0007\u0007k\n\u0007\f\u0007\u000e\u0007n\u000b\u0007\u0003\b\u0003", + "\b\u0003\b\u0002\u0003\u0004\t\u0002\u0004\u0006\b\n\f\u000e\u0002\n", + "\u0005\u0002\u0015\u0015\u0019\u0019&&\u0005\u0002\u0012\u0012\u0016", + "\u0016\u001c\u001c\u0004\u0002\u0015\u0015\u0019\u0019\u0004\u0002\u0005", + "\u0006\u0013\u0014\u0006\u0002\u0004\u0004\u0007\b\r\r##\u0004\u0002", + "\u0003\u0003\u001f\u001f\u0004\u0002\t\t))\u0006\u0002!!((*,//\u0002", + "\u0080\u0002\u0010\u0003\u0002\u0002\u0002\u0004\u001d\u0003\u0002\u0002", + "\u0002\u0006K\u0003\u0002\u0002\u0002\bR\u0003\u0002\u0002\u0002\n[", + "\u0003\u0002\u0002\u0002\fg\u0003\u0002\u0002\u0002\u000eo\u0003\u0002", + "\u0002\u0002\u0010\u0011\u0005\u0004\u0003\u0002\u0011\u0012\u0007\u0002", + "\u0002\u0003\u0012\u0003\u0003\u0002\u0002\u0002\u0013\u0014\b\u0003", + "\u0001\u0002\u0014\u0015\t\u0002\u0002\u0002\u0015\u001e\u0005\u0004", + "\u0003\u0010\u0016\u0017\u0007\u0017\u0002\u0002\u0017\u0018\u0005\u0004", + "\u0003\u0002\u0018\u0019\u0007\u000e\u0002\u0002\u0019\u001e\u0003\u0002", + "\u0002\u0002\u001a\u001e\u0005\u000e\b\u0002\u001b\u001e\u0005\u0006", + "\u0004\u0002\u001c\u001e\u0005\n\u0006\u0002\u001d\u0013\u0003\u0002", + "\u0002\u0002\u001d\u0016\u0003\u0002\u0002\u0002\u001d\u001a\u0003\u0002", + "\u0002\u0002\u001d\u001b\u0003\u0002\u0002\u0002\u001d\u001c\u0003\u0002", + "\u0002\u0002\u001eH\u0003\u0002\u0002\u0002\u001f \f\u000f\u0002\u0002", + " !\t\u0003\u0002\u0002!G\u0005\u0004\u0003\u0010\"#\f\u000e\u0002\u0002", + "#$\t\u0004\u0002\u0002$G\u0005\u0004\u0003\u000f%&\f\r\u0002\u0002&", + "\'\t\u0005\u0002\u0002\'G\u0005\u0004\u0003\u000e()\f\f\u0002\u0002", + ")*\t\u0006\u0002\u0002*G\u0005\u0004\u0003\r+-\f\u000b\u0002\u0002,", + ".\u0007&\u0002\u0002-,\u0003\u0002\u0002\u0002-.\u0003\u0002\u0002\u0002", + "./\u0003\u0002\u0002\u0002/0\u0007%\u0002\u00020G\u0005\u0004\u0003", + "\f12\f\t\u0002\u000223\t\u0007\u0002\u00023G\u0005\u0004\u0003\n45\f", + "\b\u0002\u000256\t\b\u0002\u00026G\u0005\u0004\u0003\t79\f\u0007\u0002", + "\u00028:\u0007&\u0002\u000298\u0003\u0002\u0002\u00029:\u0003\u0002", + "\u0002\u0002:;\u0003\u0002\u0002\u0002;<\u0007 \u0002\u0002@\u0007&\u0002\u0002?>\u0003\u0002", + "\u0002\u0002?@\u0003\u0002\u0002\u0002@A\u0003\u0002\u0002\u0002AB\u0007", + "\"\u0002\u0002BC\u0007\u0017\u0002\u0002CD\u0005\b\u0005\u0002DE\u0007", + "\u000e\u0002\u0002EG\u0003\u0002\u0002\u0002F\u001f\u0003\u0002\u0002", + "\u0002F\"\u0003\u0002\u0002\u0002F%\u0003\u0002\u0002\u0002F(\u0003", + "\u0002\u0002\u0002F+\u0003\u0002\u0002\u0002F1\u0003\u0002\u0002\u0002", + "F4\u0003\u0002\u0002\u0002F7\u0003\u0002\u0002\u0002F=\u0003\u0002\u0002", + "\u0002GJ\u0003\u0002\u0002\u0002HF\u0003\u0002\u0002\u0002HI\u0003\u0002", + "\u0002\u0002I\u0005\u0003\u0002\u0002\u0002JH\u0003\u0002\u0002\u0002", + "KL\u0005\f\u0007\u0002LN\u0007\u0017\u0002\u0002MO\u0005\b\u0005\u0002", + "NM\u0003\u0002\u0002\u0002NO\u0003\u0002\u0002\u0002OP\u0003\u0002\u0002", + "\u0002PQ\u0007\u000e\u0002\u0002Q\u0007\u0003\u0002\u0002\u0002RW\u0005", + "\u0004\u0003\u0002ST\u0007\u0010\u0002\u0002TV\u0005\u0004\u0003\u0002", + "US\u0003\u0002\u0002\u0002VY\u0003\u0002\u0002\u0002WU\u0003\u0002\u0002", + "\u0002WX\u0003\u0002\u0002\u0002X\t\u0003\u0002\u0002\u0002YW\u0003", + "\u0002\u0002\u0002Z\\\u0007\u001a\u0002\u0002[Z\u0003\u0002\u0002\u0002", + "[\\\u0003\u0002\u0002\u0002\\`\u0003\u0002\u0002\u0002]^\u0005\f\u0007", + "\u0002^_\u0007\u0018\u0002\u0002_a\u0003\u0002\u0002\u0002`]\u0003\u0002", + "\u0002\u0002`a\u0003\u0002\u0002\u0002ab\u0003\u0002\u0002\u0002be\u0005", + "\f\u0007\u0002cd\u0007\u000f\u0002\u0002df\u0005\f\u0007\u0002ec\u0003", + "\u0002\u0002\u0002ef\u0003\u0002\u0002\u0002f\u000b\u0003\u0002\u0002", + "\u0002gl\u00075\u0002\u0002hi\u0007\u0011\u0002\u0002ik\u00075\u0002", + "\u0002jh\u0003\u0002\u0002\u0002kn\u0003\u0002\u0002\u0002lj\u0003\u0002", + "\u0002\u0002lm\u0003\u0002\u0002\u0002m\r\u0003\u0002\u0002\u0002nl", + "\u0003\u0002\u0002\u0002op\t\t\u0002\u0002p\u000f\u0003\u0002\u0002", + "\u0002\u000e\u001d-9?FHNW[`el"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -85,13 +98,13 @@ var symbolicNames = [ null, "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", - "UNDER", "K_AND", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", - "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", - "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", - "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", - "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", - "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", - "WORD" ]; + "UNDER", "K_AND", "K_BETWEEN", "K_FALSE", "K_IN", + "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "WORD" ]; var ruleNames = [ "parseTel", "expr", "fn", "exprList", "taxon", "identifierMultipart", "literalValue" ]; @@ -144,26 +157,28 @@ PqlParser.STAR = 26; PqlParser.TILDE = 27; PqlParser.UNDER = 28; PqlParser.K_AND = 29; -PqlParser.K_FALSE = 30; -PqlParser.K_IS = 31; -PqlParser.K_ISNULL = 32; -PqlParser.K_LIKE = 33; -PqlParser.K_NOT = 34; -PqlParser.K_NOTNULL = 35; -PqlParser.K_NULL = 36; -PqlParser.K_OR = 37; -PqlParser.K_TRUE = 38; -PqlParser.NUMERIC_LITERAL = 39; -PqlParser.DOUBLE_QUOTED_STRING = 40; -PqlParser.DOUBLE_QUOTED_STRING_TEL = 41; -PqlParser.DOUBLE_QUOTED_STRING_SQL = 42; -PqlParser.SINGLE_QUOTED_STRING = 43; -PqlParser.SINGLE_QUOTED_STRING_TEL = 44; -PqlParser.SINGLE_QUOTED_STRING_SQL = 45; -PqlParser.SINGLE_LINE_COMMENT = 46; -PqlParser.MULTILINE_COMMENT = 47; -PqlParser.SPACES = 48; -PqlParser.WORD = 49; +PqlParser.K_BETWEEN = 30; +PqlParser.K_FALSE = 31; +PqlParser.K_IN = 32; +PqlParser.K_IS = 33; +PqlParser.K_ISNULL = 34; +PqlParser.K_LIKE = 35; +PqlParser.K_NOT = 36; +PqlParser.K_NOTNULL = 37; +PqlParser.K_NULL = 38; +PqlParser.K_OR = 39; +PqlParser.K_TRUE = 40; +PqlParser.NUMERIC_LITERAL = 41; +PqlParser.DOUBLE_QUOTED_STRING = 42; +PqlParser.DOUBLE_QUOTED_STRING_TEL = 43; +PqlParser.DOUBLE_QUOTED_STRING_SQL = 44; +PqlParser.SINGLE_QUOTED_STRING = 45; +PqlParser.SINGLE_QUOTED_STRING_TEL = 46; +PqlParser.SINGLE_QUOTED_STRING_SQL = 47; +PqlParser.SINGLE_LINE_COMMENT = 48; +PqlParser.MULTILINE_COMMENT = 49; +PqlParser.SPACES = 50; +PqlParser.WORD = 51; PqlParser.RULE_parseTel = 0; PqlParser.RULE_expr = 1; @@ -263,6 +278,8 @@ function ExprContext(parser, parent, invokingState) { this.right = null; // ExprContext this.inner = null; // ExprContext this.operator = null; // Token + this.is_negated = null; // Token + this.right_list = null; // ExprListContext return this; } @@ -360,6 +377,10 @@ ExprContext.prototype.K_IS = function() { return this.getToken(PqlParser.K_IS, 0); }; +ExprContext.prototype.K_LIKE = function() { + return this.getToken(PqlParser.K_LIKE, 0); +}; + ExprContext.prototype.K_AND = function() { return this.getToken(PqlParser.K_AND, 0); }; @@ -376,6 +397,18 @@ ExprContext.prototype.OR = function() { return this.getToken(PqlParser.OR, 0); }; +ExprContext.prototype.K_BETWEEN = function() { + return this.getToken(PqlParser.K_BETWEEN, 0); +}; + +ExprContext.prototype.K_IN = function() { + return this.getToken(PqlParser.K_IN, 0); +}; + +ExprContext.prototype.exprList = function() { + return this.getTypedRuleContext(ExprListContext,0); +}; + ExprContext.prototype.enterRule = function(listener) { if(listener instanceof PqlParserListener ) { listener.enterExpr(this); @@ -427,7 +460,7 @@ PqlParser.prototype.expr = function(_p) { this.consume(); } this.state = 19; - localctx.right = this.expr(11); + localctx.right = this.expr(14); break; case 2: @@ -456,26 +489,26 @@ PqlParser.prototype.expr = function(_p) { } this._ctx.stop = this._input.LT(-1); - this.state = 49; + this.state = 70; this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,2,this._ctx) + var _alt = this._interp.adaptivePredict(this._input,5,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { if(_alt===1) { if(this._parseListeners!==null) { this.triggerExitRuleEvent(); } _prevctx = localctx; - this.state = 47; + this.state = 68; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,1,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,4,this._ctx); switch(la_) { case 1: localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); this.state = 29; - if (!( this.precpred(this._ctx, 10))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 10)"); + if (!( this.precpred(this._ctx, 13))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 13)"); } this.state = 30; localctx.operator = this._input.LT(1); @@ -488,7 +521,7 @@ PqlParser.prototype.expr = function(_p) { this.consume(); } this.state = 31; - localctx.right = this.expr(11); + localctx.right = this.expr(14); break; case 2: @@ -496,8 +529,8 @@ PqlParser.prototype.expr = function(_p) { localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); this.state = 32; - if (!( this.precpred(this._ctx, 9))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 9)"); + if (!( this.precpred(this._ctx, 12))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 12)"); } this.state = 33; localctx.operator = this._input.LT(1); @@ -510,7 +543,7 @@ PqlParser.prototype.expr = function(_p) { this.consume(); } this.state = 34; - localctx.right = this.expr(10); + localctx.right = this.expr(13); break; case 3: @@ -518,8 +551,8 @@ PqlParser.prototype.expr = function(_p) { localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); this.state = 35; - if (!( this.precpred(this._ctx, 8))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 8)"); + if (!( this.precpred(this._ctx, 11))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 11)"); } this.state = 36; localctx.operator = this._input.LT(1); @@ -532,7 +565,7 @@ PqlParser.prototype.expr = function(_p) { this.consume(); } this.state = 37; - localctx.right = this.expr(9); + localctx.right = this.expr(12); break; case 4: @@ -540,13 +573,13 @@ PqlParser.prototype.expr = function(_p) { localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); this.state = 38; - if (!( this.precpred(this._ctx, 7))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 7)"); + if (!( this.precpred(this._ctx, 10))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 10)"); } this.state = 39; localctx.operator = this._input.LT(1); _la = this._input.LA(1); - if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) !== 0))) { + if(!(((((_la - 2)) & ~0x1f) == 0 && ((1 << (_la - 2)) & ((1 << (PqlParser.EQ - 2)) | (1 << (PqlParser.NOT_EQ1 - 2)) | (1 << (PqlParser.NOT_EQ2 - 2)) | (1 << (PqlParser.ASSIGN - 2)) | (1 << (PqlParser.K_IS - 2)))) !== 0))) { localctx.operator = this._errHandler.recoverInline(this); } else { @@ -554,7 +587,7 @@ PqlParser.prototype.expr = function(_p) { this.consume(); } this.state = 40; - localctx.right = this.expr(8); + localctx.right = this.expr(11); break; case 5: @@ -562,10 +595,32 @@ PqlParser.prototype.expr = function(_p) { localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); this.state = 41; - if (!( this.precpred(this._ctx, 6))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 6)"); + if (!( this.precpred(this._ctx, 9))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 9)"); + } + this.state = 43; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.K_NOT) { + this.state = 42; + localctx.is_negated = this.match(PqlParser.K_NOT); + } + + this.state = 45; + localctx.operator = this.match(PqlParser.K_LIKE); + this.state = 46; + localctx.right = this.expr(10); + break; + + case 6: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 47; + if (!( this.precpred(this._ctx, 7))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 7)"); } - this.state = 42; + this.state = 48; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.AND || _la===PqlParser.K_AND)) { @@ -575,19 +630,19 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 43; - localctx.right = this.expr(7); + this.state = 49; + localctx.right = this.expr(8); break; - case 6: + case 7: localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 44; - if (!( this.precpred(this._ctx, 5))) { - throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); + this.state = 50; + if (!( this.precpred(this._ctx, 6))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 6)"); } - this.state = 45; + this.state = 51; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.OR || _la===PqlParser.K_OR)) { @@ -597,15 +652,63 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 46; + this.state = 52; + localctx.right = this.expr(7); + break; + + case 8: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 53; + if (!( this.precpred(this._ctx, 5))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); + } + this.state = 55; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.K_NOT) { + this.state = 54; + localctx.is_negated = this.match(PqlParser.K_NOT); + } + + this.state = 57; + localctx.operator = this.match(PqlParser.K_BETWEEN); + this.state = 58; localctx.right = this.expr(6); break; + case 9: + localctx = new ExprContext(this, _parentctx, _parentState); + localctx.left = _prevctx; + this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); + this.state = 59; + if (!( this.precpred(this._ctx, 8))) { + throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 8)"); + } + this.state = 61; + this._errHandler.sync(this); + _la = this._input.LA(1); + if(_la===PqlParser.K_NOT) { + this.state = 60; + localctx.is_negated = this.match(PqlParser.K_NOT); + } + + this.state = 63; + localctx.operator = this.match(PqlParser.K_IN); + this.state = 64; + this.match(PqlParser.OPEN_PAREN); + this.state = 65; + localctx.right_list = this.exprList(); + this.state = 66; + this.match(PqlParser.CLOSE_PAREN); + break; + } } - this.state = 51; + this.state = 72; this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,2,this._ctx); + _alt = this._interp.adaptivePredict(this._input,5,this._ctx); } } catch( error) { @@ -689,19 +792,19 @@ PqlParser.prototype.fn = function() { var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 52; + this.state = 73; localctx.function_name = this.identifierMultipart(); - this.state = 53; + this.state = 74; this.match(PqlParser.OPEN_PAREN); - this.state = 55; + this.state = 76; this._errHandler.sync(this); _la = this._input.LA(1); - if(((((_la - 19)) & ~0x1f) == 0 && ((1 << (_la - 19)) & ((1 << (PqlParser.MINUS - 19)) | (1 << (PqlParser.OPEN_PAREN - 19)) | (1 << (PqlParser.PLUS - 19)) | (1 << (PqlParser.QUESTION_MARK - 19)) | (1 << (PqlParser.K_FALSE - 19)) | (1 << (PqlParser.K_NOT - 19)) | (1 << (PqlParser.K_NULL - 19)) | (1 << (PqlParser.K_TRUE - 19)) | (1 << (PqlParser.NUMERIC_LITERAL - 19)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 19)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 19)) | (1 << (PqlParser.WORD - 19)))) !== 0)) { - this.state = 54; + if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE))) !== 0) || ((((_la - 36)) & ~0x1f) == 0 && ((1 << (_la - 36)) & ((1 << (PqlParser.K_NOT - 36)) | (1 << (PqlParser.K_NULL - 36)) | (1 << (PqlParser.K_TRUE - 36)) | (1 << (PqlParser.NUMERIC_LITERAL - 36)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 36)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 36)) | (1 << (PqlParser.WORD - 36)))) !== 0)) { + this.state = 75; localctx.arguments = this.exprList(); } - this.state = 57; + this.state = 78; this.match(PqlParser.CLOSE_PAREN); } catch (re) { if(re instanceof antlr4.error.RecognitionException) { @@ -789,17 +892,17 @@ PqlParser.prototype.exprList = function() { var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 59; + this.state = 80; this.expr(0); - this.state = 64; + this.state = 85; this._errHandler.sync(this); _la = this._input.LA(1); while(_la===PqlParser.COMMA) { - this.state = 60; + this.state = 81; this.match(PqlParser.COMMA); - this.state = 61; + this.state = 82; this.expr(0); - this.state = 66; + this.state = 87; this._errHandler.sync(this); _la = this._input.LA(1); } @@ -893,33 +996,33 @@ PqlParser.prototype.taxon = function() { var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 68; + this.state = 89; this._errHandler.sync(this); _la = this._input.LA(1); if(_la===PqlParser.QUESTION_MARK) { - this.state = 67; + this.state = 88; localctx.is_optional = this.match(PqlParser.QUESTION_MARK); } - this.state = 73; + this.state = 94; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,6,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,9,this._ctx); if(la_===1) { - this.state = 70; + this.state = 91; localctx.namespace = this.identifierMultipart(); - this.state = 71; + this.state = 92; this.match(PqlParser.PIPE); } - this.state = 75; + this.state = 96; localctx.slug = this.identifierMultipart(); - this.state = 78; + this.state = 99; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,7,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,10,this._ctx); if(la_===1) { - this.state = 76; + this.state = 97; this.match(PqlParser.COLON); - this.state = 77; + this.state = 98; localctx.tag = this.identifierMultipart(); } @@ -1009,21 +1112,21 @@ PqlParser.prototype.identifierMultipart = function() { this.enterRule(localctx, 10, PqlParser.RULE_identifierMultipart); try { this.enterOuterAlt(localctx, 1); - this.state = 80; + this.state = 101; this.match(PqlParser.WORD); - this.state = 85; + this.state = 106; this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,8,this._ctx) + var _alt = this._interp.adaptivePredict(this._input,11,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { if(_alt===1) { - this.state = 81; + this.state = 102; this.match(PqlParser.DOT); - this.state = 82; + this.state = 103; this.match(PqlParser.WORD); } - this.state = 87; + this.state = 108; this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,8,this._ctx); + _alt = this._interp.adaptivePredict(this._input,11,this._ctx); } } catch (re) { @@ -1113,9 +1216,9 @@ PqlParser.prototype.literalValue = function() { var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 88; + this.state = 109; _la = this._input.LA(1); - if(!(((((_la - 30)) & ~0x1f) == 0 && ((1 << (_la - 30)) & ((1 << (PqlParser.K_FALSE - 30)) | (1 << (PqlParser.K_NULL - 30)) | (1 << (PqlParser.K_TRUE - 30)) | (1 << (PqlParser.NUMERIC_LITERAL - 30)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 30)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 30)))) !== 0))) { + if(!(((((_la - 31)) & ~0x1f) == 0 && ((1 << (_la - 31)) & ((1 << (PqlParser.K_FALSE - 31)) | (1 << (PqlParser.K_NULL - 31)) | (1 << (PqlParser.K_TRUE - 31)) | (1 << (PqlParser.NUMERIC_LITERAL - 31)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 31)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 31)))) !== 0))) { this._errHandler.recoverInline(this); } else { @@ -1149,17 +1252,23 @@ PqlParser.prototype.sempred = function(localctx, ruleIndex, predIndex) { PqlParser.prototype.expr_sempred = function(localctx, predIndex) { switch(predIndex) { case 0: - return this.precpred(this._ctx, 10); + return this.precpred(this._ctx, 13); case 1: - return this.precpred(this._ctx, 9); + return this.precpred(this._ctx, 12); case 2: - return this.precpred(this._ctx, 8); + return this.precpred(this._ctx, 11); case 3: - return this.precpred(this._ctx, 7); + return this.precpred(this._ctx, 10); case 4: - return this.precpred(this._ctx, 6); + return this.precpred(this._ctx, 9); case 5: + return this.precpred(this._ctx, 7); + case 6: + return this.precpred(this._ctx, 6); + case 7: return this.precpred(this._ctx, 5); + case 8: + return this.precpred(this._ctx, 8); default: throw "No predicate with index:" + predIndex; } diff --git a/python/src/pql_grammar/antlr/PqlLexer.py b/python/src/pql_grammar/antlr/PqlLexer.py index 9649113..0ee8cdb 100644 --- a/python/src/pql_grammar/antlr/PqlLexer.py +++ b/python/src/pql_grammar/antlr/PqlLexer.py @@ -8,8 +8,8 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\63") - buf.write("\u01ca\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\65") + buf.write("\u01d9\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") @@ -20,43 +20,45 @@ def serializedATN(): buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") - buf.write("L\4M\tM\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3") - buf.write("\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\3\t\3\n") - buf.write("\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17") - buf.write("\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25") - buf.write("\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32") - buf.write("\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\36\3\36\3\37") - buf.write("\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3!\3!\3!\3!\3!\3!\3") - buf.write("!\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3") - buf.write("$\3$\3%\3%\3%\3%\3%\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\6") - buf.write("(\u0110\n(\r(\16(\u0111\3(\3(\7(\u0116\n(\f(\16(\u0119") - buf.write("\13(\5(\u011b\n(\3(\3(\5(\u011f\n(\3(\6(\u0122\n(\r(\16") - buf.write("(\u0123\5(\u0126\n(\3(\3(\6(\u012a\n(\r(\16(\u012b\3(") - buf.write("\3(\5(\u0130\n(\3(\6(\u0133\n(\r(\16(\u0134\5(\u0137\n") - buf.write("(\5(\u0139\n(\3)\3)\3*\3*\3*\3*\7*\u0141\n*\f*\16*\u0144") - buf.write("\13*\3*\3*\3+\3+\3+\3+\7+\u014c\n+\f+\16+\u014f\13+\3") - buf.write("+\3+\3,\3,\3-\3-\3-\3-\7-\u0159\n-\f-\16-\u015c\13-\3") - buf.write("-\3-\3.\3.\3.\3.\7.\u0164\n.\f.\16.\u0167\13.\3.\3.\3") - buf.write("/\3/\3/\3/\3/\5/\u0170\n/\3/\7/\u0173\n/\f/\16/\u0176") - buf.write("\13/\3/\3/\3\60\3\60\3\60\3\60\7\60\u017e\n\60\f\60\16") - buf.write("\60\u0181\13\60\3\60\3\60\3\60\5\60\u0186\n\60\3\60\3") - buf.write("\60\3\61\3\61\3\61\3\61\3\62\3\62\7\62\u0190\n\62\f\62") - buf.write("\16\62\u0193\13\62\3\63\3\63\3\64\3\64\3\65\3\65\3\66") - buf.write("\3\66\3\67\3\67\38\38\39\39\3:\3:\3;\3;\3<\3<\3=\3=\3") - buf.write(">\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3") - buf.write("G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3\u017f\2N\3") - buf.write("\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16") - buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61") - buf.write("\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*") - buf.write("S+U,W-Y.[/]\60_\61a\62c\63e\2g\2i\2k\2m\2o\2q\2s\2u\2") - buf.write("w\2y\2{\2}\2\177\2\u0081\2\u0083\2\u0085\2\u0087\2\u0089") - buf.write("\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093\2\u0095\2\u0097") - buf.write("\2\u0099\2\3\2$\4\2--//\3\2$$\3\2))\4\2\f\f\17\17\5\2") + buf.write("L\4M\tM\4N\tN\4O\tO\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3") + buf.write("\4\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t") + buf.write("\3\t\3\t\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3") + buf.write("\16\3\17\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23") + buf.write("\3\24\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31") + buf.write("\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36") + buf.write("\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3 ") + buf.write("\3 \3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3#\3#\3#\3#\3#\3") + buf.write("#\3#\3$\3$\3$\3$\3$\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3&\3") + buf.write("&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3)\3)\3)\3)\3)\3*\6*\u011f") + buf.write("\n*\r*\16*\u0120\3*\3*\7*\u0125\n*\f*\16*\u0128\13*\5") + buf.write("*\u012a\n*\3*\3*\5*\u012e\n*\3*\6*\u0131\n*\r*\16*\u0132") + buf.write("\5*\u0135\n*\3*\3*\6*\u0139\n*\r*\16*\u013a\3*\3*\5*\u013f") + buf.write("\n*\3*\6*\u0142\n*\r*\16*\u0143\5*\u0146\n*\5*\u0148\n") + buf.write("*\3+\3+\3,\3,\3,\3,\7,\u0150\n,\f,\16,\u0153\13,\3,\3") + buf.write(",\3-\3-\3-\3-\7-\u015b\n-\f-\16-\u015e\13-\3-\3-\3.\3") + buf.write(".\3/\3/\3/\3/\7/\u0168\n/\f/\16/\u016b\13/\3/\3/\3\60") + buf.write("\3\60\3\60\3\60\7\60\u0173\n\60\f\60\16\60\u0176\13\60") + buf.write("\3\60\3\60\3\61\3\61\3\61\3\61\3\61\5\61\u017f\n\61\3") + buf.write("\61\7\61\u0182\n\61\f\61\16\61\u0185\13\61\3\61\3\61\3") + buf.write("\62\3\62\3\62\3\62\7\62\u018d\n\62\f\62\16\62\u0190\13") + buf.write("\62\3\62\3\62\3\62\5\62\u0195\n\62\3\62\3\62\3\63\3\63") + buf.write("\3\63\3\63\3\64\3\64\7\64\u019f\n\64\f\64\16\64\u01a2") + buf.write("\13\64\3\65\3\65\3\66\3\66\3\67\3\67\38\38\39\39\3:\3") + buf.write(":\3;\3;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3") + buf.write("C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3") + buf.write("L\3M\3M\3N\3N\3O\3O\3\u018e\2P\3\3\5\4\7\5\t\6\13\7\r") + buf.write("\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!") + buf.write("\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67") + buf.write("\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61") + buf.write("a\62c\63e\64g\65i\2k\2m\2o\2q\2s\2u\2w\2y\2{\2}\2\177") + buf.write("\2\u0081\2\u0083\2\u0085\2\u0087\2\u0089\2\u008b\2\u008d") + buf.write("\2\u008f\2\u0091\2\u0093\2\u0095\2\u0097\2\u0099\2\u009b") + buf.write("\2\u009d\2\3\2$\4\2--//\3\2$$\3\2))\4\2\f\f\17\17\5\2") buf.write("\13\r\17\17\"\"\5\2C\\aac|\6\2\62;C\\aac|\3\2\62;\4\2") buf.write("CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4") buf.write("\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPp") buf.write("p\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2") - buf.write("WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u01c7") + buf.write("WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u01d6") buf.write("\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13") buf.write("\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3") buf.write("\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2") @@ -67,149 +69,155 @@ def serializedATN(): buf.write("A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2") buf.write("\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2") buf.write("\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2") - buf.write("\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\3\u009b\3\2\2\2") - buf.write("\5\u009e\3\2\2\2\7\u00a1\3\2\2\2\t\u00a4\3\2\2\2\13\u00a7") - buf.write("\3\2\2\2\r\u00aa\3\2\2\2\17\u00ad\3\2\2\2\21\u00b0\3\2") - buf.write("\2\2\23\u00b3\3\2\2\2\25\u00b6\3\2\2\2\27\u00b8\3\2\2") - buf.write("\2\31\u00ba\3\2\2\2\33\u00bc\3\2\2\2\35\u00be\3\2\2\2") - buf.write("\37\u00c0\3\2\2\2!\u00c2\3\2\2\2#\u00c4\3\2\2\2%\u00c6") - buf.write("\3\2\2\2\'\u00c8\3\2\2\2)\u00ca\3\2\2\2+\u00cc\3\2\2\2") - buf.write("-\u00ce\3\2\2\2/\u00d0\3\2\2\2\61\u00d2\3\2\2\2\63\u00d4") - buf.write("\3\2\2\2\65\u00d6\3\2\2\2\67\u00d8\3\2\2\29\u00da\3\2") - buf.write("\2\2;\u00dc\3\2\2\2=\u00e0\3\2\2\2?\u00e6\3\2\2\2A\u00e9") - buf.write("\3\2\2\2C\u00f0\3\2\2\2E\u00f5\3\2\2\2G\u00f9\3\2\2\2") - buf.write("I\u0101\3\2\2\2K\u0106\3\2\2\2M\u0109\3\2\2\2O\u0138\3") - buf.write("\2\2\2Q\u013a\3\2\2\2S\u013c\3\2\2\2U\u0147\3\2\2\2W\u0152") - buf.write("\3\2\2\2Y\u0154\3\2\2\2[\u015f\3\2\2\2]\u016f\3\2\2\2") - buf.write("_\u0179\3\2\2\2a\u0189\3\2\2\2c\u018d\3\2\2\2e\u0194\3") - buf.write("\2\2\2g\u0196\3\2\2\2i\u0198\3\2\2\2k\u019a\3\2\2\2m\u019c") - buf.write("\3\2\2\2o\u019e\3\2\2\2q\u01a0\3\2\2\2s\u01a2\3\2\2\2") - buf.write("u\u01a4\3\2\2\2w\u01a6\3\2\2\2y\u01a8\3\2\2\2{\u01aa\3") - buf.write("\2\2\2}\u01ac\3\2\2\2\177\u01ae\3\2\2\2\u0081\u01b0\3") - buf.write("\2\2\2\u0083\u01b2\3\2\2\2\u0085\u01b4\3\2\2\2\u0087\u01b6") - buf.write("\3\2\2\2\u0089\u01b8\3\2\2\2\u008b\u01ba\3\2\2\2\u008d") - buf.write("\u01bc\3\2\2\2\u008f\u01be\3\2\2\2\u0091\u01c0\3\2\2\2") - buf.write("\u0093\u01c2\3\2\2\2\u0095\u01c4\3\2\2\2\u0097\u01c6\3") - buf.write("\2\2\2\u0099\u01c8\3\2\2\2\u009b\u009c\7(\2\2\u009c\u009d") - buf.write("\7(\2\2\u009d\4\3\2\2\2\u009e\u009f\7?\2\2\u009f\u00a0") - buf.write("\7?\2\2\u00a0\6\3\2\2\2\u00a1\u00a2\7@\2\2\u00a2\u00a3") - buf.write("\7?\2\2\u00a3\b\3\2\2\2\u00a4\u00a5\7>\2\2\u00a5\u00a6") - buf.write("\7?\2\2\u00a6\n\3\2\2\2\u00a7\u00a8\7#\2\2\u00a8\u00a9") - buf.write("\7?\2\2\u00a9\f\3\2\2\2\u00aa\u00ab\7>\2\2\u00ab\u00ac") - buf.write("\7@\2\2\u00ac\16\3\2\2\2\u00ad\u00ae\7~\2\2\u00ae\u00af") - buf.write("\7~\2\2\u00af\20\3\2\2\2\u00b0\u00b1\7>\2\2\u00b1\u00b2") - buf.write("\7>\2\2\u00b2\22\3\2\2\2\u00b3\u00b4\7@\2\2\u00b4\u00b5") - buf.write("\7@\2\2\u00b5\24\3\2\2\2\u00b6\u00b7\7(\2\2\u00b7\26\3") - buf.write("\2\2\2\u00b8\u00b9\7?\2\2\u00b9\30\3\2\2\2\u00ba\u00bb") - buf.write("\7+\2\2\u00bb\32\3\2\2\2\u00bc\u00bd\7<\2\2\u00bd\34\3") - buf.write("\2\2\2\u00be\u00bf\7.\2\2\u00bf\36\3\2\2\2\u00c0\u00c1") - buf.write("\7\60\2\2\u00c1 \3\2\2\2\u00c2\u00c3\7\61\2\2\u00c3\"") - buf.write("\3\2\2\2\u00c4\u00c5\7@\2\2\u00c5$\3\2\2\2\u00c6\u00c7") - buf.write("\7>\2\2\u00c7&\3\2\2\2\u00c8\u00c9\7/\2\2\u00c9(\3\2\2") - buf.write("\2\u00ca\u00cb\7\'\2\2\u00cb*\3\2\2\2\u00cc\u00cd\7*\2") - buf.write("\2\u00cd,\3\2\2\2\u00ce\u00cf\7~\2\2\u00cf.\3\2\2\2\u00d0") - buf.write("\u00d1\7-\2\2\u00d1\60\3\2\2\2\u00d2\u00d3\7A\2\2\u00d3") - buf.write("\62\3\2\2\2\u00d4\u00d5\7=\2\2\u00d5\64\3\2\2\2\u00d6") - buf.write("\u00d7\7,\2\2\u00d7\66\3\2\2\2\u00d8\u00d9\7\u0080\2\2") - buf.write("\u00d98\3\2\2\2\u00da\u00db\7a\2\2\u00db:\3\2\2\2\u00dc") - buf.write("\u00dd\5g\64\2\u00dd\u00de\5\u0081A\2\u00de\u00df\5m\67") - buf.write("\2\u00df<\3\2\2\2\u00e0\u00e1\5q9\2\u00e1\u00e2\5g\64") - buf.write("\2\u00e2\u00e3\5}?\2\u00e3\u00e4\5\u008bF\2\u00e4\u00e5") - buf.write("\5o8\2\u00e5>\3\2\2\2\u00e6\u00e7\5w<\2\u00e7\u00e8\5") - buf.write("\u008bF\2\u00e8@\3\2\2\2\u00e9\u00ea\5w<\2\u00ea\u00eb") - buf.write("\5\u008bF\2\u00eb\u00ec\5\u0081A\2\u00ec\u00ed\5\u008f") - buf.write("H\2\u00ed\u00ee\5}?\2\u00ee\u00ef\5}?\2\u00efB\3\2\2\2") - buf.write("\u00f0\u00f1\5}?\2\u00f1\u00f2\5w<\2\u00f2\u00f3\5{>\2") - buf.write("\u00f3\u00f4\5o8\2\u00f4D\3\2\2\2\u00f5\u00f6\5\u0081") - buf.write("A\2\u00f6\u00f7\5\u0083B\2\u00f7\u00f8\5\u008dG\2\u00f8") - buf.write("F\3\2\2\2\u00f9\u00fa\5\u0081A\2\u00fa\u00fb\5\u0083B") - buf.write("\2\u00fb\u00fc\5\u008dG\2\u00fc\u00fd\5\u0081A\2\u00fd") - buf.write("\u00fe\5\u008fH\2\u00fe\u00ff\5}?\2\u00ff\u0100\5}?\2") - buf.write("\u0100H\3\2\2\2\u0101\u0102\5\u0081A\2\u0102\u0103\5\u008f") - buf.write("H\2\u0103\u0104\5}?\2\u0104\u0105\5}?\2\u0105J\3\2\2\2") - buf.write("\u0106\u0107\5\u0083B\2\u0107\u0108\5\u0089E\2\u0108L") - buf.write("\3\2\2\2\u0109\u010a\5\u008dG\2\u010a\u010b\5\u0089E\2") - buf.write("\u010b\u010c\5\u008fH\2\u010c\u010d\5o8\2\u010dN\3\2\2") - buf.write("\2\u010e\u0110\5e\63\2\u010f\u010e\3\2\2\2\u0110\u0111") - buf.write("\3\2\2\2\u0111\u010f\3\2\2\2\u0111\u0112\3\2\2\2\u0112") - buf.write("\u011a\3\2\2\2\u0113\u0117\7\60\2\2\u0114\u0116\5e\63") - buf.write("\2\u0115\u0114\3\2\2\2\u0116\u0119\3\2\2\2\u0117\u0115") - buf.write("\3\2\2\2\u0117\u0118\3\2\2\2\u0118\u011b\3\2\2\2\u0119") - buf.write("\u0117\3\2\2\2\u011a\u0113\3\2\2\2\u011a\u011b\3\2\2\2") - buf.write("\u011b\u0125\3\2\2\2\u011c\u011e\5o8\2\u011d\u011f\t\2") - buf.write("\2\2\u011e\u011d\3\2\2\2\u011e\u011f\3\2\2\2\u011f\u0121") - buf.write("\3\2\2\2\u0120\u0122\5e\63\2\u0121\u0120\3\2\2\2\u0122") - buf.write("\u0123\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124\3\2\2\2") - buf.write("\u0124\u0126\3\2\2\2\u0125\u011c\3\2\2\2\u0125\u0126\3") - buf.write("\2\2\2\u0126\u0139\3\2\2\2\u0127\u0129\7\60\2\2\u0128") - buf.write("\u012a\5e\63\2\u0129\u0128\3\2\2\2\u012a\u012b\3\2\2\2") - buf.write("\u012b\u0129\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u0136\3") - buf.write("\2\2\2\u012d\u012f\5o8\2\u012e\u0130\t\2\2\2\u012f\u012e") - buf.write("\3\2\2\2\u012f\u0130\3\2\2\2\u0130\u0132\3\2\2\2\u0131") - buf.write("\u0133\5e\63\2\u0132\u0131\3\2\2\2\u0133\u0134\3\2\2\2") - buf.write("\u0134\u0132\3\2\2\2\u0134\u0135\3\2\2\2\u0135\u0137\3") - buf.write("\2\2\2\u0136\u012d\3\2\2\2\u0136\u0137\3\2\2\2\u0137\u0139") - buf.write("\3\2\2\2\u0138\u010f\3\2\2\2\u0138\u0127\3\2\2\2\u0139") - buf.write("P\3\2\2\2\u013a\u013b\5S*\2\u013bR\3\2\2\2\u013c\u0142") - buf.write("\7$\2\2\u013d\u013e\7^\2\2\u013e\u0141\7$\2\2\u013f\u0141") - buf.write("\n\3\2\2\u0140\u013d\3\2\2\2\u0140\u013f\3\2\2\2\u0141") - buf.write("\u0144\3\2\2\2\u0142\u0140\3\2\2\2\u0142\u0143\3\2\2\2") - buf.write("\u0143\u0145\3\2\2\2\u0144\u0142\3\2\2\2\u0145\u0146\7") - buf.write("$\2\2\u0146T\3\2\2\2\u0147\u014d\7$\2\2\u0148\u0149\7") - buf.write("$\2\2\u0149\u014c\7$\2\2\u014a\u014c\n\3\2\2\u014b\u0148") - buf.write("\3\2\2\2\u014b\u014a\3\2\2\2\u014c\u014f\3\2\2\2\u014d") - buf.write("\u014b\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u0150\3\2\2\2") - buf.write("\u014f\u014d\3\2\2\2\u0150\u0151\7$\2\2\u0151V\3\2\2\2") - buf.write("\u0152\u0153\5Y-\2\u0153X\3\2\2\2\u0154\u015a\7)\2\2\u0155") - buf.write("\u0156\7^\2\2\u0156\u0159\7)\2\2\u0157\u0159\n\4\2\2\u0158") - buf.write("\u0155\3\2\2\2\u0158\u0157\3\2\2\2\u0159\u015c\3\2\2\2") - buf.write("\u015a\u0158\3\2\2\2\u015a\u015b\3\2\2\2\u015b\u015d\3") - buf.write("\2\2\2\u015c\u015a\3\2\2\2\u015d\u015e\7)\2\2\u015eZ\3") - buf.write("\2\2\2\u015f\u0165\7)\2\2\u0160\u0161\7)\2\2\u0161\u0164") - buf.write("\7)\2\2\u0162\u0164\n\4\2\2\u0163\u0160\3\2\2\2\u0163") - buf.write("\u0162\3\2\2\2\u0164\u0167\3\2\2\2\u0165\u0163\3\2\2\2") - buf.write("\u0165\u0166\3\2\2\2\u0166\u0168\3\2\2\2\u0167\u0165\3") - buf.write("\2\2\2\u0168\u0169\7)\2\2\u0169\\\3\2\2\2\u016a\u016b") - buf.write("\7/\2\2\u016b\u0170\7/\2\2\u016c\u016d\7\61\2\2\u016d") - buf.write("\u0170\7\61\2\2\u016e\u0170\7%\2\2\u016f\u016a\3\2\2\2") - buf.write("\u016f\u016c\3\2\2\2\u016f\u016e\3\2\2\2\u0170\u0174\3") - buf.write("\2\2\2\u0171\u0173\n\5\2\2\u0172\u0171\3\2\2\2\u0173\u0176") - buf.write("\3\2\2\2\u0174\u0172\3\2\2\2\u0174\u0175\3\2\2\2\u0175") - buf.write("\u0177\3\2\2\2\u0176\u0174\3\2\2\2\u0177\u0178\b/\2\2") - buf.write("\u0178^\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u017b\7,\2") - buf.write("\2\u017b\u017f\3\2\2\2\u017c\u017e\13\2\2\2\u017d\u017c") - buf.write("\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u0180\3\2\2\2\u017f") - buf.write("\u017d\3\2\2\2\u0180\u0185\3\2\2\2\u0181\u017f\3\2\2\2") - buf.write("\u0182\u0183\7,\2\2\u0183\u0186\7\61\2\2\u0184\u0186\7") - buf.write("\2\2\3\u0185\u0182\3\2\2\2\u0185\u0184\3\2\2\2\u0186\u0187") - buf.write("\3\2\2\2\u0187\u0188\b\60\2\2\u0188`\3\2\2\2\u0189\u018a") - buf.write("\t\6\2\2\u018a\u018b\3\2\2\2\u018b\u018c\b\61\2\2\u018c") - buf.write("b\3\2\2\2\u018d\u0191\t\7\2\2\u018e\u0190\t\b\2\2\u018f") - buf.write("\u018e\3\2\2\2\u0190\u0193\3\2\2\2\u0191\u018f\3\2\2\2") - buf.write("\u0191\u0192\3\2\2\2\u0192d\3\2\2\2\u0193\u0191\3\2\2") - buf.write("\2\u0194\u0195\t\t\2\2\u0195f\3\2\2\2\u0196\u0197\t\n") - buf.write("\2\2\u0197h\3\2\2\2\u0198\u0199\t\13\2\2\u0199j\3\2\2") - buf.write("\2\u019a\u019b\t\f\2\2\u019bl\3\2\2\2\u019c\u019d\t\r") - buf.write("\2\2\u019dn\3\2\2\2\u019e\u019f\t\16\2\2\u019fp\3\2\2") - buf.write("\2\u01a0\u01a1\t\17\2\2\u01a1r\3\2\2\2\u01a2\u01a3\t\20") - buf.write("\2\2\u01a3t\3\2\2\2\u01a4\u01a5\t\21\2\2\u01a5v\3\2\2") - buf.write("\2\u01a6\u01a7\t\22\2\2\u01a7x\3\2\2\2\u01a8\u01a9\t\23") - buf.write("\2\2\u01a9z\3\2\2\2\u01aa\u01ab\t\24\2\2\u01ab|\3\2\2") - buf.write("\2\u01ac\u01ad\t\25\2\2\u01ad~\3\2\2\2\u01ae\u01af\t\26") - buf.write("\2\2\u01af\u0080\3\2\2\2\u01b0\u01b1\t\27\2\2\u01b1\u0082") - buf.write("\3\2\2\2\u01b2\u01b3\t\30\2\2\u01b3\u0084\3\2\2\2\u01b4") - buf.write("\u01b5\t\31\2\2\u01b5\u0086\3\2\2\2\u01b6\u01b7\t\32\2") - buf.write("\2\u01b7\u0088\3\2\2\2\u01b8\u01b9\t\33\2\2\u01b9\u008a") - buf.write("\3\2\2\2\u01ba\u01bb\t\34\2\2\u01bb\u008c\3\2\2\2\u01bc") - buf.write("\u01bd\t\35\2\2\u01bd\u008e\3\2\2\2\u01be\u01bf\t\36\2") - buf.write("\2\u01bf\u0090\3\2\2\2\u01c0\u01c1\t\37\2\2\u01c1\u0092") - buf.write("\3\2\2\2\u01c2\u01c3\t \2\2\u01c3\u0094\3\2\2\2\u01c4") - buf.write("\u01c5\t!\2\2\u01c5\u0096\3\2\2\2\u01c6\u01c7\t\"\2\2") - buf.write("\u01c7\u0098\3\2\2\2\u01c8\u01c9\t#\2\2\u01c9\u009a\3") - buf.write("\2\2\2\33\2\u0111\u0117\u011a\u011e\u0123\u0125\u012b") - buf.write("\u012f\u0134\u0136\u0138\u0140\u0142\u014b\u014d\u0158") - buf.write("\u015a\u0163\u0165\u016f\u0174\u017f\u0185\u0191\3\2\3") - buf.write("\2") + buf.write("\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3") + buf.write("\2\2\2\3\u009f\3\2\2\2\5\u00a2\3\2\2\2\7\u00a5\3\2\2\2") + buf.write("\t\u00a8\3\2\2\2\13\u00ab\3\2\2\2\r\u00ae\3\2\2\2\17\u00b1") + buf.write("\3\2\2\2\21\u00b4\3\2\2\2\23\u00b7\3\2\2\2\25\u00ba\3") + buf.write("\2\2\2\27\u00bc\3\2\2\2\31\u00be\3\2\2\2\33\u00c0\3\2") + buf.write("\2\2\35\u00c2\3\2\2\2\37\u00c4\3\2\2\2!\u00c6\3\2\2\2") + buf.write("#\u00c8\3\2\2\2%\u00ca\3\2\2\2\'\u00cc\3\2\2\2)\u00ce") + buf.write("\3\2\2\2+\u00d0\3\2\2\2-\u00d2\3\2\2\2/\u00d4\3\2\2\2") + buf.write("\61\u00d6\3\2\2\2\63\u00d8\3\2\2\2\65\u00da\3\2\2\2\67") + buf.write("\u00dc\3\2\2\29\u00de\3\2\2\2;\u00e0\3\2\2\2=\u00e4\3") + buf.write("\2\2\2?\u00ec\3\2\2\2A\u00f2\3\2\2\2C\u00f5\3\2\2\2E\u00f8") + buf.write("\3\2\2\2G\u00ff\3\2\2\2I\u0104\3\2\2\2K\u0108\3\2\2\2") + buf.write("M\u0110\3\2\2\2O\u0115\3\2\2\2Q\u0118\3\2\2\2S\u0147\3") + buf.write("\2\2\2U\u0149\3\2\2\2W\u014b\3\2\2\2Y\u0156\3\2\2\2[\u0161") + buf.write("\3\2\2\2]\u0163\3\2\2\2_\u016e\3\2\2\2a\u017e\3\2\2\2") + buf.write("c\u0188\3\2\2\2e\u0198\3\2\2\2g\u019c\3\2\2\2i\u01a3\3") + buf.write("\2\2\2k\u01a5\3\2\2\2m\u01a7\3\2\2\2o\u01a9\3\2\2\2q\u01ab") + buf.write("\3\2\2\2s\u01ad\3\2\2\2u\u01af\3\2\2\2w\u01b1\3\2\2\2") + buf.write("y\u01b3\3\2\2\2{\u01b5\3\2\2\2}\u01b7\3\2\2\2\177\u01b9") + buf.write("\3\2\2\2\u0081\u01bb\3\2\2\2\u0083\u01bd\3\2\2\2\u0085") + buf.write("\u01bf\3\2\2\2\u0087\u01c1\3\2\2\2\u0089\u01c3\3\2\2\2") + buf.write("\u008b\u01c5\3\2\2\2\u008d\u01c7\3\2\2\2\u008f\u01c9\3") + buf.write("\2\2\2\u0091\u01cb\3\2\2\2\u0093\u01cd\3\2\2\2\u0095\u01cf") + buf.write("\3\2\2\2\u0097\u01d1\3\2\2\2\u0099\u01d3\3\2\2\2\u009b") + buf.write("\u01d5\3\2\2\2\u009d\u01d7\3\2\2\2\u009f\u00a0\7(\2\2") + buf.write("\u00a0\u00a1\7(\2\2\u00a1\4\3\2\2\2\u00a2\u00a3\7?\2\2") + buf.write("\u00a3\u00a4\7?\2\2\u00a4\6\3\2\2\2\u00a5\u00a6\7@\2\2") + buf.write("\u00a6\u00a7\7?\2\2\u00a7\b\3\2\2\2\u00a8\u00a9\7>\2\2") + buf.write("\u00a9\u00aa\7?\2\2\u00aa\n\3\2\2\2\u00ab\u00ac\7#\2\2") + buf.write("\u00ac\u00ad\7?\2\2\u00ad\f\3\2\2\2\u00ae\u00af\7>\2\2") + buf.write("\u00af\u00b0\7@\2\2\u00b0\16\3\2\2\2\u00b1\u00b2\7~\2") + buf.write("\2\u00b2\u00b3\7~\2\2\u00b3\20\3\2\2\2\u00b4\u00b5\7>") + buf.write("\2\2\u00b5\u00b6\7>\2\2\u00b6\22\3\2\2\2\u00b7\u00b8\7") + buf.write("@\2\2\u00b8\u00b9\7@\2\2\u00b9\24\3\2\2\2\u00ba\u00bb") + buf.write("\7(\2\2\u00bb\26\3\2\2\2\u00bc\u00bd\7?\2\2\u00bd\30\3") + buf.write("\2\2\2\u00be\u00bf\7+\2\2\u00bf\32\3\2\2\2\u00c0\u00c1") + buf.write("\7<\2\2\u00c1\34\3\2\2\2\u00c2\u00c3\7.\2\2\u00c3\36\3") + buf.write("\2\2\2\u00c4\u00c5\7\60\2\2\u00c5 \3\2\2\2\u00c6\u00c7") + buf.write("\7\61\2\2\u00c7\"\3\2\2\2\u00c8\u00c9\7@\2\2\u00c9$\3") + buf.write("\2\2\2\u00ca\u00cb\7>\2\2\u00cb&\3\2\2\2\u00cc\u00cd\7") + buf.write("/\2\2\u00cd(\3\2\2\2\u00ce\u00cf\7\'\2\2\u00cf*\3\2\2") + buf.write("\2\u00d0\u00d1\7*\2\2\u00d1,\3\2\2\2\u00d2\u00d3\7~\2") + buf.write("\2\u00d3.\3\2\2\2\u00d4\u00d5\7-\2\2\u00d5\60\3\2\2\2") + buf.write("\u00d6\u00d7\7A\2\2\u00d7\62\3\2\2\2\u00d8\u00d9\7=\2") + buf.write("\2\u00d9\64\3\2\2\2\u00da\u00db\7,\2\2\u00db\66\3\2\2") + buf.write("\2\u00dc\u00dd\7\u0080\2\2\u00dd8\3\2\2\2\u00de\u00df") + buf.write("\7a\2\2\u00df:\3\2\2\2\u00e0\u00e1\5k\66\2\u00e1\u00e2") + buf.write("\5\u0085C\2\u00e2\u00e3\5q9\2\u00e3<\3\2\2\2\u00e4\u00e5") + buf.write("\5m\67\2\u00e5\u00e6\5s:\2\u00e6\u00e7\5\u0091I\2\u00e7") + buf.write("\u00e8\5\u0097L\2\u00e8\u00e9\5s:\2\u00e9\u00ea\5s:\2") + buf.write("\u00ea\u00eb\5\u0085C\2\u00eb>\3\2\2\2\u00ec\u00ed\5u") + buf.write(";\2\u00ed\u00ee\5k\66\2\u00ee\u00ef\5\u0081A\2\u00ef\u00f0") + buf.write("\5\u008fH\2\u00f0\u00f1\5s:\2\u00f1@\3\2\2\2\u00f2\u00f3") + buf.write("\5{>\2\u00f3\u00f4\5\u0085C\2\u00f4B\3\2\2\2\u00f5\u00f6") + buf.write("\5{>\2\u00f6\u00f7\5\u008fH\2\u00f7D\3\2\2\2\u00f8\u00f9") + buf.write("\5{>\2\u00f9\u00fa\5\u008fH\2\u00fa\u00fb\5\u0085C\2\u00fb") + buf.write("\u00fc\5\u0093J\2\u00fc\u00fd\5\u0081A\2\u00fd\u00fe\5") + buf.write("\u0081A\2\u00feF\3\2\2\2\u00ff\u0100\5\u0081A\2\u0100") + buf.write("\u0101\5{>\2\u0101\u0102\5\177@\2\u0102\u0103\5s:\2\u0103") + buf.write("H\3\2\2\2\u0104\u0105\5\u0085C\2\u0105\u0106\5\u0087D") + buf.write("\2\u0106\u0107\5\u0091I\2\u0107J\3\2\2\2\u0108\u0109\5") + buf.write("\u0085C\2\u0109\u010a\5\u0087D\2\u010a\u010b\5\u0091I") + buf.write("\2\u010b\u010c\5\u0085C\2\u010c\u010d\5\u0093J\2\u010d") + buf.write("\u010e\5\u0081A\2\u010e\u010f\5\u0081A\2\u010fL\3\2\2") + buf.write("\2\u0110\u0111\5\u0085C\2\u0111\u0112\5\u0093J\2\u0112") + buf.write("\u0113\5\u0081A\2\u0113\u0114\5\u0081A\2\u0114N\3\2\2") + buf.write("\2\u0115\u0116\5\u0087D\2\u0116\u0117\5\u008dG\2\u0117") + buf.write("P\3\2\2\2\u0118\u0119\5\u0091I\2\u0119\u011a\5\u008dG") + buf.write("\2\u011a\u011b\5\u0093J\2\u011b\u011c\5s:\2\u011cR\3\2") + buf.write("\2\2\u011d\u011f\5i\65\2\u011e\u011d\3\2\2\2\u011f\u0120") + buf.write("\3\2\2\2\u0120\u011e\3\2\2\2\u0120\u0121\3\2\2\2\u0121") + buf.write("\u0129\3\2\2\2\u0122\u0126\7\60\2\2\u0123\u0125\5i\65") + buf.write("\2\u0124\u0123\3\2\2\2\u0125\u0128\3\2\2\2\u0126\u0124") + buf.write("\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u012a\3\2\2\2\u0128") + buf.write("\u0126\3\2\2\2\u0129\u0122\3\2\2\2\u0129\u012a\3\2\2\2") + buf.write("\u012a\u0134\3\2\2\2\u012b\u012d\5s:\2\u012c\u012e\t\2") + buf.write("\2\2\u012d\u012c\3\2\2\2\u012d\u012e\3\2\2\2\u012e\u0130") + buf.write("\3\2\2\2\u012f\u0131\5i\65\2\u0130\u012f\3\2\2\2\u0131") + buf.write("\u0132\3\2\2\2\u0132\u0130\3\2\2\2\u0132\u0133\3\2\2\2") + buf.write("\u0133\u0135\3\2\2\2\u0134\u012b\3\2\2\2\u0134\u0135\3") + buf.write("\2\2\2\u0135\u0148\3\2\2\2\u0136\u0138\7\60\2\2\u0137") + buf.write("\u0139\5i\65\2\u0138\u0137\3\2\2\2\u0139\u013a\3\2\2\2") + buf.write("\u013a\u0138\3\2\2\2\u013a\u013b\3\2\2\2\u013b\u0145\3") + buf.write("\2\2\2\u013c\u013e\5s:\2\u013d\u013f\t\2\2\2\u013e\u013d") + buf.write("\3\2\2\2\u013e\u013f\3\2\2\2\u013f\u0141\3\2\2\2\u0140") + buf.write("\u0142\5i\65\2\u0141\u0140\3\2\2\2\u0142\u0143\3\2\2\2") + buf.write("\u0143\u0141\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0146\3") + buf.write("\2\2\2\u0145\u013c\3\2\2\2\u0145\u0146\3\2\2\2\u0146\u0148") + buf.write("\3\2\2\2\u0147\u011e\3\2\2\2\u0147\u0136\3\2\2\2\u0148") + buf.write("T\3\2\2\2\u0149\u014a\5W,\2\u014aV\3\2\2\2\u014b\u0151") + buf.write("\7$\2\2\u014c\u014d\7^\2\2\u014d\u0150\7$\2\2\u014e\u0150") + buf.write("\n\3\2\2\u014f\u014c\3\2\2\2\u014f\u014e\3\2\2\2\u0150") + buf.write("\u0153\3\2\2\2\u0151\u014f\3\2\2\2\u0151\u0152\3\2\2\2") + buf.write("\u0152\u0154\3\2\2\2\u0153\u0151\3\2\2\2\u0154\u0155\7") + buf.write("$\2\2\u0155X\3\2\2\2\u0156\u015c\7$\2\2\u0157\u0158\7") + buf.write("$\2\2\u0158\u015b\7$\2\2\u0159\u015b\n\3\2\2\u015a\u0157") + buf.write("\3\2\2\2\u015a\u0159\3\2\2\2\u015b\u015e\3\2\2\2\u015c") + buf.write("\u015a\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u015f\3\2\2\2") + buf.write("\u015e\u015c\3\2\2\2\u015f\u0160\7$\2\2\u0160Z\3\2\2\2") + buf.write("\u0161\u0162\5]/\2\u0162\\\3\2\2\2\u0163\u0169\7)\2\2") + buf.write("\u0164\u0165\7^\2\2\u0165\u0168\7)\2\2\u0166\u0168\n\4") + buf.write("\2\2\u0167\u0164\3\2\2\2\u0167\u0166\3\2\2\2\u0168\u016b") + buf.write("\3\2\2\2\u0169\u0167\3\2\2\2\u0169\u016a\3\2\2\2\u016a") + buf.write("\u016c\3\2\2\2\u016b\u0169\3\2\2\2\u016c\u016d\7)\2\2") + buf.write("\u016d^\3\2\2\2\u016e\u0174\7)\2\2\u016f\u0170\7)\2\2") + buf.write("\u0170\u0173\7)\2\2\u0171\u0173\n\4\2\2\u0172\u016f\3") + buf.write("\2\2\2\u0172\u0171\3\2\2\2\u0173\u0176\3\2\2\2\u0174\u0172") + buf.write("\3\2\2\2\u0174\u0175\3\2\2\2\u0175\u0177\3\2\2\2\u0176") + buf.write("\u0174\3\2\2\2\u0177\u0178\7)\2\2\u0178`\3\2\2\2\u0179") + buf.write("\u017a\7/\2\2\u017a\u017f\7/\2\2\u017b\u017c\7\61\2\2") + buf.write("\u017c\u017f\7\61\2\2\u017d\u017f\7%\2\2\u017e\u0179\3") + buf.write("\2\2\2\u017e\u017b\3\2\2\2\u017e\u017d\3\2\2\2\u017f\u0183") + buf.write("\3\2\2\2\u0180\u0182\n\5\2\2\u0181\u0180\3\2\2\2\u0182") + buf.write("\u0185\3\2\2\2\u0183\u0181\3\2\2\2\u0183\u0184\3\2\2\2") + buf.write("\u0184\u0186\3\2\2\2\u0185\u0183\3\2\2\2\u0186\u0187\b") + buf.write("\61\2\2\u0187b\3\2\2\2\u0188\u0189\7\61\2\2\u0189\u018a") + buf.write("\7,\2\2\u018a\u018e\3\2\2\2\u018b\u018d\13\2\2\2\u018c") + buf.write("\u018b\3\2\2\2\u018d\u0190\3\2\2\2\u018e\u018f\3\2\2\2") + buf.write("\u018e\u018c\3\2\2\2\u018f\u0194\3\2\2\2\u0190\u018e\3") + buf.write("\2\2\2\u0191\u0192\7,\2\2\u0192\u0195\7\61\2\2\u0193\u0195") + buf.write("\7\2\2\3\u0194\u0191\3\2\2\2\u0194\u0193\3\2\2\2\u0195") + buf.write("\u0196\3\2\2\2\u0196\u0197\b\62\2\2\u0197d\3\2\2\2\u0198") + buf.write("\u0199\t\6\2\2\u0199\u019a\3\2\2\2\u019a\u019b\b\63\2") + buf.write("\2\u019bf\3\2\2\2\u019c\u01a0\t\7\2\2\u019d\u019f\t\b") + buf.write("\2\2\u019e\u019d\3\2\2\2\u019f\u01a2\3\2\2\2\u01a0\u019e") + buf.write("\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1h\3\2\2\2\u01a2\u01a0") + buf.write("\3\2\2\2\u01a3\u01a4\t\t\2\2\u01a4j\3\2\2\2\u01a5\u01a6") + buf.write("\t\n\2\2\u01a6l\3\2\2\2\u01a7\u01a8\t\13\2\2\u01a8n\3") + buf.write("\2\2\2\u01a9\u01aa\t\f\2\2\u01aap\3\2\2\2\u01ab\u01ac") + buf.write("\t\r\2\2\u01acr\3\2\2\2\u01ad\u01ae\t\16\2\2\u01aet\3") + buf.write("\2\2\2\u01af\u01b0\t\17\2\2\u01b0v\3\2\2\2\u01b1\u01b2") + buf.write("\t\20\2\2\u01b2x\3\2\2\2\u01b3\u01b4\t\21\2\2\u01b4z\3") + buf.write("\2\2\2\u01b5\u01b6\t\22\2\2\u01b6|\3\2\2\2\u01b7\u01b8") + buf.write("\t\23\2\2\u01b8~\3\2\2\2\u01b9\u01ba\t\24\2\2\u01ba\u0080") + buf.write("\3\2\2\2\u01bb\u01bc\t\25\2\2\u01bc\u0082\3\2\2\2\u01bd") + buf.write("\u01be\t\26\2\2\u01be\u0084\3\2\2\2\u01bf\u01c0\t\27\2") + buf.write("\2\u01c0\u0086\3\2\2\2\u01c1\u01c2\t\30\2\2\u01c2\u0088") + buf.write("\3\2\2\2\u01c3\u01c4\t\31\2\2\u01c4\u008a\3\2\2\2\u01c5") + buf.write("\u01c6\t\32\2\2\u01c6\u008c\3\2\2\2\u01c7\u01c8\t\33\2") + buf.write("\2\u01c8\u008e\3\2\2\2\u01c9\u01ca\t\34\2\2\u01ca\u0090") + buf.write("\3\2\2\2\u01cb\u01cc\t\35\2\2\u01cc\u0092\3\2\2\2\u01cd") + buf.write("\u01ce\t\36\2\2\u01ce\u0094\3\2\2\2\u01cf\u01d0\t\37\2") + buf.write("\2\u01d0\u0096\3\2\2\2\u01d1\u01d2\t \2\2\u01d2\u0098") + buf.write("\3\2\2\2\u01d3\u01d4\t!\2\2\u01d4\u009a\3\2\2\2\u01d5") + buf.write("\u01d6\t\"\2\2\u01d6\u009c\3\2\2\2\u01d7\u01d8\t#\2\2") + buf.write("\u01d8\u009e\3\2\2\2\33\2\u0120\u0126\u0129\u012d\u0132") + buf.write("\u0134\u013a\u013e\u0143\u0145\u0147\u014f\u0151\u015a") + buf.write("\u015c\u0167\u0169\u0172\u0174\u017e\u0183\u018e\u0194") + buf.write("\u01a0\3\2\3\2") return buf.getvalue() @@ -248,26 +256,28 @@ class PqlLexer(Lexer): TILDE = 27 UNDER = 28 K_AND = 29 - K_FALSE = 30 - K_IS = 31 - K_ISNULL = 32 - K_LIKE = 33 - K_NOT = 34 - K_NOTNULL = 35 - K_NULL = 36 - K_OR = 37 - K_TRUE = 38 - NUMERIC_LITERAL = 39 - DOUBLE_QUOTED_STRING = 40 - DOUBLE_QUOTED_STRING_TEL = 41 - DOUBLE_QUOTED_STRING_SQL = 42 - SINGLE_QUOTED_STRING = 43 - SINGLE_QUOTED_STRING_TEL = 44 - SINGLE_QUOTED_STRING_SQL = 45 - SINGLE_LINE_COMMENT = 46 - MULTILINE_COMMENT = 47 - SPACES = 48 - WORD = 49 + K_BETWEEN = 30 + K_FALSE = 31 + K_IN = 32 + K_IS = 33 + K_ISNULL = 34 + K_LIKE = 35 + K_NOT = 36 + K_NOTNULL = 37 + K_NULL = 38 + K_OR = 39 + K_TRUE = 40 + NUMERIC_LITERAL = 41 + DOUBLE_QUOTED_STRING = 42 + DOUBLE_QUOTED_STRING_TEL = 43 + DOUBLE_QUOTED_STRING_SQL = 44 + SINGLE_QUOTED_STRING = 45 + SINGLE_QUOTED_STRING_TEL = 46 + SINGLE_QUOTED_STRING_SQL = 47 + SINGLE_LINE_COMMENT = 48 + MULTILINE_COMMENT = 49 + SPACES = 50 + WORD = 51 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] @@ -284,20 +294,21 @@ class PqlLexer(Lexer): "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", - "K_AND", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", - "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", - "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", - "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", - "MULTILINE_COMMENT", "SPACES", "WORD" ] + "K_AND", "K_BETWEEN", "K_FALSE", "K_IN", "K_IS", "K_ISNULL", + "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", + "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", + "SPACES", "WORD" ] ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", - "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_FALSE", - "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", - "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", - "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_BETWEEN", + "K_FALSE", "K_IN", "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", + "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", + "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD", "DIGIT", "A", "B", "C", "D", "E", "F", "G", "H", diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index 2da7ba1..2736cc5 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -11,40 +11,46 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\63") - buf.write("]\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\65") + buf.write("r\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b") buf.write("\t\b\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") buf.write("\3\5\3\36\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") - buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\7\3\62\n\3\f\3\16\3\65") - buf.write("\13\3\3\4\3\4\3\4\5\4:\n\4\3\4\3\4\3\5\3\5\3\5\7\5A\n") - buf.write("\5\f\5\16\5D\13\5\3\6\5\6G\n\6\3\6\3\6\3\6\5\6L\n\6\3") - buf.write("\6\3\6\3\6\5\6Q\n\6\3\7\3\7\3\7\7\7V\n\7\f\7\16\7Y\13") - buf.write("\7\3\b\3\b\3\b\2\3\4\t\2\4\6\b\n\f\16\2\n\5\2\25\25\31") - buf.write("\31$$\5\2\22\22\26\26\34\34\4\2\25\25\31\31\4\2\5\6\23") - buf.write("\24\6\2\4\4\7\b\r\r!!\4\2\3\3\37\37\4\2\t\t\'\'\6\2 ") - buf.write("&&(*--\2e\2\20\3\2\2\2\4\35\3\2\2\2\6\66\3\2\2\2\b=\3") - buf.write("\2\2\2\nF\3\2\2\2\fR\3\2\2\2\16Z\3\2\2\2\20\21\5\4\3\2") - buf.write("\21\22\7\2\2\3\22\3\3\2\2\2\23\24\b\3\1\2\24\25\t\2\2") - buf.write("\2\25\36\5\4\3\r\26\27\7\27\2\2\27\30\5\4\3\2\30\31\7") - buf.write("\16\2\2\31\36\3\2\2\2\32\36\5\16\b\2\33\36\5\6\4\2\34") - buf.write("\36\5\n\6\2\35\23\3\2\2\2\35\26\3\2\2\2\35\32\3\2\2\2") - buf.write("\35\33\3\2\2\2\35\34\3\2\2\2\36\63\3\2\2\2\37 \f\f\2\2") - buf.write(" !\t\3\2\2!\62\5\4\3\r\"#\f\13\2\2#$\t\4\2\2$\62\5\4\3") - buf.write("\f%&\f\n\2\2&\'\t\5\2\2\'\62\5\4\3\13()\f\t\2\2)*\t\6") - buf.write("\2\2*\62\5\4\3\n+,\f\b\2\2,-\t\7\2\2-\62\5\4\3\t./\f\7") - buf.write("\2\2/\60\t\b\2\2\60\62\5\4\3\b\61\37\3\2\2\2\61\"\3\2") - buf.write("\2\2\61%\3\2\2\2\61(\3\2\2\2\61+\3\2\2\2\61.\3\2\2\2\62") - buf.write("\65\3\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64\5\3\2\2\2\65") - buf.write("\63\3\2\2\2\66\67\5\f\7\2\679\7\27\2\28:\5\b\5\298\3\2") - buf.write("\2\29:\3\2\2\2:;\3\2\2\2;<\7\16\2\2<\7\3\2\2\2=B\5\4\3") - buf.write("\2>?\7\20\2\2?A\5\4\3\2@>\3\2\2\2AD\3\2\2\2B@\3\2\2\2") - buf.write("BC\3\2\2\2C\t\3\2\2\2DB\3\2\2\2EG\7\32\2\2FE\3\2\2\2F") - buf.write("G\3\2\2\2GK\3\2\2\2HI\5\f\7\2IJ\7\30\2\2JL\3\2\2\2KH\3") - buf.write("\2\2\2KL\3\2\2\2LM\3\2\2\2MP\5\f\7\2NO\7\17\2\2OQ\5\f") - buf.write("\7\2PN\3\2\2\2PQ\3\2\2\2Q\13\3\2\2\2RW\7\63\2\2ST\7\21") - buf.write("\2\2TV\7\63\2\2US\3\2\2\2VY\3\2\2\2WU\3\2\2\2WX\3\2\2") - buf.write("\2X\r\3\2\2\2YW\3\2\2\2Z[\t\t\2\2[\17\3\2\2\2\13\35\61") - buf.write("\639BFKPW") + buf.write("\3\3\3\3\3\3\3\3\5\3.\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") + buf.write("\3\3\3\3\3\3\5\3:\n\3\3\3\3\3\3\3\3\3\5\3@\n\3\3\3\3\3") + buf.write("\3\3\3\3\3\3\7\3G\n\3\f\3\16\3J\13\3\3\4\3\4\3\4\5\4O") + buf.write("\n\4\3\4\3\4\3\5\3\5\3\5\7\5V\n\5\f\5\16\5Y\13\5\3\6\5") + buf.write("\6\\\n\6\3\6\3\6\3\6\5\6a\n\6\3\6\3\6\3\6\5\6f\n\6\3\7") + buf.write("\3\7\3\7\7\7k\n\7\f\7\16\7n\13\7\3\b\3\b\3\b\2\3\4\t\2") + buf.write("\4\6\b\n\f\16\2\n\5\2\25\25\31\31&&\5\2\22\22\26\26\34") + buf.write("\34\4\2\25\25\31\31\4\2\5\6\23\24\6\2\4\4\7\b\r\r##\4") + buf.write("\2\3\3\37\37\4\2\t\t))\6\2!!((*,//\2\u0080\2\20\3\2\2") + buf.write("\2\4\35\3\2\2\2\6K\3\2\2\2\bR\3\2\2\2\n[\3\2\2\2\fg\3") + buf.write("\2\2\2\16o\3\2\2\2\20\21\5\4\3\2\21\22\7\2\2\3\22\3\3") + buf.write("\2\2\2\23\24\b\3\1\2\24\25\t\2\2\2\25\36\5\4\3\20\26\27") + buf.write("\7\27\2\2\27\30\5\4\3\2\30\31\7\16\2\2\31\36\3\2\2\2\32") + buf.write("\36\5\16\b\2\33\36\5\6\4\2\34\36\5\n\6\2\35\23\3\2\2\2") + buf.write("\35\26\3\2\2\2\35\32\3\2\2\2\35\33\3\2\2\2\35\34\3\2\2") + buf.write("\2\36H\3\2\2\2\37 \f\17\2\2 !\t\3\2\2!G\5\4\3\20\"#\f") + buf.write("\16\2\2#$\t\4\2\2$G\5\4\3\17%&\f\r\2\2&\'\t\5\2\2\'G\5") + buf.write("\4\3\16()\f\f\2\2)*\t\6\2\2*G\5\4\3\r+-\f\13\2\2,.\7&") + buf.write("\2\2-,\3\2\2\2-.\3\2\2\2./\3\2\2\2/\60\7%\2\2\60G\5\4") + buf.write("\3\f\61\62\f\t\2\2\62\63\t\7\2\2\63G\5\4\3\n\64\65\f\b") + buf.write("\2\2\65\66\t\b\2\2\66G\5\4\3\t\679\f\7\2\28:\7&\2\298") + buf.write("\3\2\2\29:\3\2\2\2:;\3\2\2\2;<\7 \2\2@\7&\2\2?>\3\2\2\2?@\3\2\2\2@A\3\2\2\2AB\7\"\2\2") + buf.write("BC\7\27\2\2CD\5\b\5\2DE\7\16\2\2EG\3\2\2\2F\37\3\2\2\2") + buf.write("F\"\3\2\2\2F%\3\2\2\2F(\3\2\2\2F+\3\2\2\2F\61\3\2\2\2") + buf.write("F\64\3\2\2\2F\67\3\2\2\2F=\3\2\2\2GJ\3\2\2\2HF\3\2\2\2") + buf.write("HI\3\2\2\2I\5\3\2\2\2JH\3\2\2\2KL\5\f\7\2LN\7\27\2\2M") + buf.write("O\5\b\5\2NM\3\2\2\2NO\3\2\2\2OP\3\2\2\2PQ\7\16\2\2Q\7") + buf.write("\3\2\2\2RW\5\4\3\2ST\7\20\2\2TV\5\4\3\2US\3\2\2\2VY\3") + buf.write("\2\2\2WU\3\2\2\2WX\3\2\2\2X\t\3\2\2\2YW\3\2\2\2Z\\\7\32") + buf.write("\2\2[Z\3\2\2\2[\\\3\2\2\2\\`\3\2\2\2]^\5\f\7\2^_\7\30") + buf.write("\2\2_a\3\2\2\2`]\3\2\2\2`a\3\2\2\2ab\3\2\2\2be\5\f\7\2") + buf.write("cd\7\17\2\2df\5\f\7\2ec\3\2\2\2ef\3\2\2\2f\13\3\2\2\2") + buf.write("gl\7\65\2\2hi\7\21\2\2ik\7\65\2\2jh\3\2\2\2kn\3\2\2\2") + buf.write("lj\3\2\2\2lm\3\2\2\2m\r\3\2\2\2nl\3\2\2\2op\t\t\2\2p\17") + buf.write("\3\2\2\2\16\35-9?FHNW[`el") return buf.getvalue() @@ -68,13 +74,13 @@ class PqlParser ( Parser ): "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", - "UNDER", "K_AND", "K_FALSE", "K_IS", "K_ISNULL", "K_LIKE", - "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", - "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", - "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", - "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", - "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", - "WORD" ] + "UNDER", "K_AND", "K_BETWEEN", "K_FALSE", "K_IN", + "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", + "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "WORD" ] RULE_parseTel = 0 RULE_expr = 1 @@ -117,26 +123,28 @@ class PqlParser ( Parser ): TILDE=27 UNDER=28 K_AND=29 - K_FALSE=30 - K_IS=31 - K_ISNULL=32 - K_LIKE=33 - K_NOT=34 - K_NOTNULL=35 - K_NULL=36 - K_OR=37 - K_TRUE=38 - NUMERIC_LITERAL=39 - DOUBLE_QUOTED_STRING=40 - DOUBLE_QUOTED_STRING_TEL=41 - DOUBLE_QUOTED_STRING_SQL=42 - SINGLE_QUOTED_STRING=43 - SINGLE_QUOTED_STRING_TEL=44 - SINGLE_QUOTED_STRING_SQL=45 - SINGLE_LINE_COMMENT=46 - MULTILINE_COMMENT=47 - SPACES=48 - WORD=49 + K_BETWEEN=30 + K_FALSE=31 + K_IN=32 + K_IS=33 + K_ISNULL=34 + K_LIKE=35 + K_NOT=36 + K_NOTNULL=37 + K_NULL=38 + K_OR=39 + K_TRUE=40 + NUMERIC_LITERAL=41 + DOUBLE_QUOTED_STRING=42 + DOUBLE_QUOTED_STRING_TEL=43 + DOUBLE_QUOTED_STRING_SQL=44 + SINGLE_QUOTED_STRING=45 + SINGLE_QUOTED_STRING_TEL=46 + SINGLE_QUOTED_STRING_SQL=47 + SINGLE_LINE_COMMENT=48 + MULTILINE_COMMENT=49 + SPACES=50 + WORD=51 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -209,6 +217,8 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): self.right = None # ExprContext self.inner = None # ExprContext self.operator = None # Token + self.is_negated = None # Token + self.right_list = None # ExprListContext def expr(self, i:int=None): if i is None: @@ -280,6 +290,9 @@ def NOT_EQ2(self): def K_IS(self): return self.getToken(PqlParser.K_IS, 0) + def K_LIKE(self): + return self.getToken(PqlParser.K_LIKE, 0) + def K_AND(self): return self.getToken(PqlParser.K_AND, 0) @@ -292,6 +305,16 @@ def K_OR(self): def OR(self): return self.getToken(PqlParser.OR, 0) + def K_BETWEEN(self): + return self.getToken(PqlParser.K_BETWEEN, 0) + + def K_IN(self): + return self.getToken(PqlParser.K_IN, 0) + + def exprList(self): + return self.getTypedRuleContext(PqlParser.ExprListContext,0) + + def getRuleIndex(self): return PqlParser.RULE_expr @@ -334,7 +357,7 @@ def expr(self, _p:int=0): self._errHandler.reportMatch(self) self.consume() self.state = 19 - localctx.right = self.expr(11) + localctx.right = self.expr(14) pass elif la_ == 2: @@ -363,25 +386,25 @@ def expr(self, _p:int=0): self._ctx.stop = self._input.LT(-1) - self.state = 49 + self.state = 70 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,2,self._ctx) + _alt = self._interp.adaptivePredict(self._input,5,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 47 + self.state = 68 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,1,self._ctx) + la_ = self._interp.adaptivePredict(self._input,4,self._ctx) if la_ == 1: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) self.state = 29 - if not self.precpred(self._ctx, 10): + if not self.precpred(self._ctx, 13): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 13)") self.state = 30 localctx.operator = self._input.LT(1) _la = self._input.LA(1) @@ -391,7 +414,7 @@ def expr(self, _p:int=0): self._errHandler.reportMatch(self) self.consume() self.state = 31 - localctx.right = self.expr(11) + localctx.right = self.expr(14) pass elif la_ == 2: @@ -399,9 +422,9 @@ def expr(self, _p:int=0): localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) self.state = 32 - if not self.precpred(self._ctx, 9): + if not self.precpred(self._ctx, 12): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 12)") self.state = 33 localctx.operator = self._input.LT(1) _la = self._input.LA(1) @@ -411,7 +434,7 @@ def expr(self, _p:int=0): self._errHandler.reportMatch(self) self.consume() self.state = 34 - localctx.right = self.expr(10) + localctx.right = self.expr(13) pass elif la_ == 3: @@ -419,9 +442,9 @@ def expr(self, _p:int=0): localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) self.state = 35 - if not self.precpred(self._ctx, 8): + if not self.precpred(self._ctx, 11): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 11)") self.state = 36 localctx.operator = self._input.LT(1) _la = self._input.LA(1) @@ -431,7 +454,7 @@ def expr(self, _p:int=0): self._errHandler.reportMatch(self) self.consume() self.state = 37 - localctx.right = self.expr(9) + localctx.right = self.expr(12) pass elif la_ == 4: @@ -439,9 +462,9 @@ def expr(self, _p:int=0): localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) self.state = 38 - if not self.precpred(self._ctx, 7): + if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") + raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") self.state = 39 localctx.operator = self._input.LT(1) _la = self._input.LA(1) @@ -451,7 +474,7 @@ def expr(self, _p:int=0): self._errHandler.reportMatch(self) self.consume() self.state = 40 - localctx.right = self.expr(8) + localctx.right = self.expr(11) pass elif la_ == 5: @@ -459,10 +482,32 @@ def expr(self, _p:int=0): localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) self.state = 41 - if not self.precpred(self._ctx, 6): + if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 42 + raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") + self.state = 43 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_NOT: + self.state = 42 + localctx.is_negated = self.match(PqlParser.K_NOT) + + + self.state = 45 + localctx.operator = self.match(PqlParser.K_LIKE) + self.state = 46 + localctx.right = self.expr(10) + pass + + elif la_ == 6: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 47 + if not self.precpred(self._ctx, 7): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") + self.state = 48 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.AND or _la==PqlParser.K_AND): @@ -470,19 +515,19 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 43 - localctx.right = self.expr(7) + self.state = 49 + localctx.right = self.expr(8) pass - elif la_ == 6: + elif la_ == 7: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 44 - if not self.precpred(self._ctx, 5): + self.state = 50 + if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException - raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 45 + raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") + self.state = 51 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.OR or _la==PqlParser.K_OR): @@ -490,14 +535,62 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 46 + self.state = 52 + localctx.right = self.expr(7) + pass + + elif la_ == 8: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 53 + if not self.precpred(self._ctx, 5): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") + self.state = 55 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_NOT: + self.state = 54 + localctx.is_negated = self.match(PqlParser.K_NOT) + + + self.state = 57 + localctx.operator = self.match(PqlParser.K_BETWEEN) + self.state = 58 localctx.right = self.expr(6) pass + elif la_ == 9: + localctx = PqlParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 59 + if not self.precpred(self._ctx, 8): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") + self.state = 61 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==PqlParser.K_NOT: + self.state = 60 + localctx.is_negated = self.match(PqlParser.K_NOT) + + + self.state = 63 + localctx.operator = self.match(PqlParser.K_IN) + self.state = 64 + self.match(PqlParser.OPEN_PAREN) + self.state = 65 + localctx.right_list = self.exprList() + self.state = 66 + self.match(PqlParser.CLOSE_PAREN) + pass + - self.state = 51 + self.state = 72 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,2,self._ctx) + _alt = self._interp.adaptivePredict(self._input,5,self._ctx) except RecognitionException as re: localctx.exception = re @@ -557,19 +650,19 @@ def fn(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 52 + self.state = 73 localctx.function_name = self.identifierMultipart() - self.state = 53 + self.state = 74 self.match(PqlParser.OPEN_PAREN) - self.state = 55 + self.state = 76 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): - self.state = 54 + self.state = 75 localctx.arguments = self.exprList() - self.state = 57 + self.state = 78 self.match(PqlParser.CLOSE_PAREN) except RecognitionException as re: localctx.exception = re @@ -626,17 +719,17 @@ def exprList(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 59 + self.state = 80 self.expr(0) - self.state = 64 + self.state = 85 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 60 + self.state = 81 self.match(PqlParser.COMMA) - self.state = 61 + self.state = 82 self.expr(0) - self.state = 66 + self.state = 87 self._errHandler.sync(self) _la = self._input.LA(1) @@ -702,33 +795,33 @@ def taxon(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 68 + self.state = 89 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.QUESTION_MARK: - self.state = 67 + self.state = 88 localctx.is_optional = self.match(PqlParser.QUESTION_MARK) - self.state = 73 + self.state = 94 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,6,self._ctx) + la_ = self._interp.adaptivePredict(self._input,9,self._ctx) if la_ == 1: - self.state = 70 + self.state = 91 localctx.namespace = self.identifierMultipart() - self.state = 71 + self.state = 92 self.match(PqlParser.PIPE) - self.state = 75 + self.state = 96 localctx.slug = self.identifierMultipart() - self.state = 78 + self.state = 99 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,7,self._ctx) + la_ = self._interp.adaptivePredict(self._input,10,self._ctx) if la_ == 1: - self.state = 76 + self.state = 97 self.match(PqlParser.COLON) - self.state = 77 + self.state = 98 localctx.tag = self.identifierMultipart() @@ -785,20 +878,20 @@ def identifierMultipart(self): self.enterRule(localctx, 10, self.RULE_identifierMultipart) try: self.enterOuterAlt(localctx, 1) - self.state = 80 + self.state = 101 self.match(PqlParser.WORD) - self.state = 85 + self.state = 106 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,8,self._ctx) + _alt = self._interp.adaptivePredict(self._input,11,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 81 + self.state = 102 self.match(PqlParser.DOT) - self.state = 82 + self.state = 103 self.match(PqlParser.WORD) - self.state = 87 + self.state = 108 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,8,self._ctx) + _alt = self._interp.adaptivePredict(self._input,11,self._ctx) except RecognitionException as re: localctx.exception = re @@ -860,7 +953,7 @@ def literalValue(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 88 + self.state = 109 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): self._errHandler.recoverInline(self) @@ -889,29 +982,41 @@ def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): def expr_sempred(self, localctx:ExprContext, predIndex:int): if predIndex == 0: - return self.precpred(self._ctx, 10) + return self.precpred(self._ctx, 13) if predIndex == 1: - return self.precpred(self._ctx, 9) + return self.precpred(self._ctx, 12) if predIndex == 2: - return self.precpred(self._ctx, 8) + return self.precpred(self._ctx, 11) if predIndex == 3: - return self.precpred(self._ctx, 7) + return self.precpred(self._ctx, 10) if predIndex == 4: - return self.precpred(self._ctx, 6) + return self.precpred(self._ctx, 9) if predIndex == 5: + return self.precpred(self._ctx, 7) + + + if predIndex == 6: + return self.precpred(self._ctx, 6) + + + if predIndex == 7: return self.precpred(self._ctx, 5) + if predIndex == 8: + return self.precpred(self._ctx, 8) + + diff --git a/python/src/pql_grammar/from_pql.py b/python/src/pql_grammar/from_pql.py index 5e9a98a..f37eb17 100644 --- a/python/src/pql_grammar/from_pql.py +++ b/python/src/pql_grammar/from_pql.py @@ -127,10 +127,6 @@ def parse_literal_value(e:PqlParser.LiteralValueContext): is_null = bool(e.K_NULL()) is_bool = bool(e.K_TRUE()) or bool(e.K_FALSE()) - # TODO: - # - BLOB_LITERAL - # - CURRENT_[DATE|TIME|TIMESTAMP] - if is_null: return None @@ -173,24 +169,136 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : (cls.parse_expr(ctx.right),) ) + v: Optional[str] = full_text(ctx.operator) if v: # this is super generic expression of type # left OP right - # with a lot of options for OP value. - return ast.Expr( - v.upper(), - ( - cls.parse_expr(ctx.left), - cls.parse_expr(ctx.right) - ) + # with a lot of options for OP values. + # The only exception is IN operator where there no `right` but `right_list` + + # we standardize operator keywords to upper case + # this is to establish a standard expectation for consuming code + # 'and' -> 'AND' + op = v.upper() + + # Let's handle IN-like cases first and fall through left-OP-right for rest. + # IN-like cases are characterized by non-null `.right_list` (instead of .right) + if ctx.right_list: + right = [ + cls.parse_expr(expr) + for expr in ctx.right_list.expr() + ] + else: + right = [cls.parse_expr(ctx.right)] + + is_negated = ctx.is_negated + + # Normally AST parsers should not be in business of + # rewriting the subject matter. + # However, there is one ugly nuance of SQL-like language + # that does not warrant "rewrite" but a "more standard way to express" + # a BETWEEN b AND c + # a NOT BETWEEN b AND c + # It's an ugly wart of SQL that requires very special-cased handling + # in all consumer code if it stays as BETWEEN. + # TO save the children, and humanity, will express BETWEEN as explicit inequality + # a BETWEEN b AND c --> (a >= b) AND (a <= c) + # a NOT BETWEEN b AND c --> (a < b) OR (a > c) + # Dont think of it as "transform". + # Think of it as the only sane way to express what BETWEEN means. + + if op == 'BETWEEN': + left = cls.parse_expr(ctx.left) + + # this one is an Expr('AND', [v1, v2])) + between_and = cls.parse_expr(ctx.right) + + if ( + isinstance(between_and, ast.Expr) and + between_and.operator == 'AND' and + len(between_and.args) == 2 + ): + pass + else: + raise ParseError( + f"Contents of BETWEEN's AND expression - {full_text(ctx.right)} - are not valid. " + "Must be of form `valueA AND valueB`." + ) + + if is_negated: + # a NOT BETWEEN b AND c --> (a < b) OR (a > c) + ex = ast.Expr( + 'OR', + ( + ast.Expr( + '<', + ( + left, # TODO: think about copy + between_and.args[0] + ) + ), + ast.Expr( + '>', + ( + left, # TODO: think about copy + between_and.args[1] + ) + ), + ) + ) + else: + # a BETWEEN b AND c --> (a >= b) AND (a <= c) + ex = ast.Expr( + 'AND', + ( + ast.Expr( + '>=', + ( + left, # TODO: think about copy + between_and.args[0] + ) + ), + ast.Expr( + '<=', + ( + left, # TODO: think about copy + between_and.args[1] + ) + ), + ) + ) + # we internalized NOT into the expression. + # can return without further NOT processing + return ex + + ex = ast.Expr( + op, + tuple([cls.parse_expr(ctx.left)] + right) ) + # lastly, some statements allow NOT before operator + # (if it's before expression, it's captured by Unary operator) + # In this case as opposed to creating of a separate NOT-variant operator + # we just wrap the non-NOT version of the statement into + # a unary NOT + # c not in (1,2,3) + # becomes + # not (c in (1,2,3)) + + if ctx.is_negated: + return ast.Expr( + 'NOT', + (ex,) + ) + else: + return ex + v: PqlParser.TaxonContext = ctx.taxon() if v: return cls.parse_taxon(v) - v: PqlParser.FunctionContext = ctx.fn() + v: PqlParser.FnContext = ctx.fn() if v: return cls.parse_function(v) diff --git a/python/src/pql_grammar/to_pql.py b/python/src/pql_grammar/to_pql.py index f7c40a4..6226b36 100644 --- a/python/src/pql_grammar/to_pql.py +++ b/python/src/pql_grammar/to_pql.py @@ -23,19 +23,50 @@ def __str__(self): raise NotImplementedError(f'Renderer for "{self.n}" is not implemented.') +def _in_expr_render(o, vv): + right = ", ".join([ + str(to_r(v)) + for v in vv[1:] + ]) + return f'{to_r(vv[0])} IN ({right})' + + +def _between_expr_renderer(o, vv): + # `right` is a nested Expr('AND", [left, right]) expression + # as long as all expressions get parens on outside, + # we need to strip them out otherwise we get: + # a BETWEEN (b AND c) + # which does not work as SQL + right = str(to_r(vv[LAST]))[1:-1] + return f'{to_r(vv[FIRST])} BETWEEN {right}' + + +def _default_expr_render(o, vv): + return f' {o} '.join([ + str(to_r(v)) + for v in vv + ]) + + +_expr_renderers = { + 'IN': _in_expr_render, + 'BETWEEN': _between_expr_renderer, +} + + class Expr(Node): n: ast.Expr def __str__(self): op = self.n.operator - if len(self.n.args) == 1: + args_len = len(self.n.args) + if args_len == 1: right = self.n.args[FIRST] # could be something like 'NOT' which needs padding padding = '' if op in ('+','-') else ' ' return f'{op}{padding}{to_r(right)}' - else: - left = self.n.args[FIRST] - right = self.n.args[SECOND] - return f'({to_r(left)} {op} {to_r(right)})' + + renderer = _expr_renderers.get(op, _default_expr_render) + return '(' + renderer(op, self.n.args) + ')' class Literal(Node): diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index 97156b4..92bb515 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -19,7 +19,13 @@ '(ns3|taxon3 + 5)', 'fn_4(fn_1(slug))', 'TypeCast(arg1=\'value1\')', - 'ns6|taxon6 > 1234 and (ns0|taxon10 + 4321) == 0' + 'ns6|taxon6 > 1234 and (ns0|taxon10 + 4321) == 0', + 'slug like \'blah%\'', + 'slug not like \'blah%\'', + 'slug in (1,2)', + 'slug not in (1,2)', + 'a BETWEEN b AND c', ## --> (a >= b) AND (a <= c) + 'a NOT BETWEEN b AND c', ## --> (a < b) OR (a > c) ) @@ -90,6 +96,94 @@ ), ), ), + ast.Expr( + 'LIKE', + ( + ast.Taxon( + 'slug', + ), + ast.Literal('blah%', "'blah%'"), + ), + ), + ast.Expr( + 'NOT', + ( + ast.Expr( + 'LIKE', + ( + ast.Taxon( + 'slug', + ), + ast.Literal('blah%', "'blah%'"), + ), + ), + ) + ), + ast.Expr( + 'IN', + ( + ast.Taxon( + 'slug', + ), + ast.Literal(1, "1"), + ast.Literal(2, "2"), + ), + ), + ast.Expr( + 'NOT', + ( + ast.Expr( + 'IN', + ( + ast.Taxon( + 'slug', + ), + ast.Literal(1, "1"), + ast.Literal(2, "2"), + ), + ), + ) + ), + # 'a BETWEEN b AND c', ## --> (a >= b) AND (a <= c) + ast.Expr( + 'AND', + ( + ast.Expr( + '>=', + ( + ast.Taxon('a'), + ast.Taxon('b'), + ), + ), + ast.Expr( + '<=', + ( + ast.Taxon('a'), + ast.Taxon('c'), + ), + ), + ), + ), + # 'a NOT BETWEEN b AND c', ## --> (a < b) OR (a > c) + ast.Expr( + 'OR', + ( + ast.Expr( + '<', + ( + ast.Taxon('a'), + ast.Taxon('b'), + ), + ), + ast.Expr( + '>', + ( + ast.Taxon('a'), + ast.Taxon('c'), + ), + ), + ), + ), ) @@ -101,7 +195,13 @@ '(ns3|taxon3 + 5)', 'fn_4(fn_1(slug))', 'TypeCast(arg1=\'value1\')', - '((ns6|taxon6 > 1234) AND ((ns0|taxon10 + 4321) == 0))' + '((ns6|taxon6 > 1234) AND ((ns0|taxon10 + 4321) == 0))', + '(slug LIKE \'blah%\')', + 'NOT (slug LIKE \'blah%\')', + '(slug IN (1, 2))', + 'NOT (slug IN (1, 2))', + '((a >= b) AND (a <= c))', # was: a BETWEEN b AND c + '((a < b) OR (a > c))', # was: a NOT BETWEEN b AND c ) @@ -115,3 +215,53 @@ def test_tel_to_ast_and_back(input: str, ast_should_be: ast.Node, output_should_ output_is = to_tel(ast_is) assert output_is == output_should_be + + + +@pytest.mark.parametrize( + 'ast_input, str_output_should_be', + ( + ( + ast.Expr( + 'AND', + ( + ast.Taxon( + 'slug', + ), + ast.Expr( + 'OR', + ( + ast.Literal(1, "1"), + ast.Literal(2, "2"), + ), + ), + ast.Literal(3, "3"), + ), + ), + '(slug AND (1 OR 2) AND 3)', + ), + ( + ast.Expr( + 'BETWEEN', + ( + ast.Taxon( + 'slug', + ), + ast.Expr( + 'AND', + ( + ast.Literal(2, "2"), + ast.Literal(3, "3"), + ), + ), + ) + ), + '(slug BETWEEN 2 AND 3)', + ), + ) +) +def test_ast_to_tel_special_cases(ast_input: ast.Node, str_output_should_be: str): + output_is = to_tel(ast_input) + assert output_is == str_output_should_be + + diff --git a/python/tests/tel_grammar_test.py b/python/tests/tel_grammar_test.py index b4a6ff1..d5cbc9b 100644 --- a/python/tests/tel_grammar_test.py +++ b/python/tests/tel_grammar_test.py @@ -77,8 +77,10 @@ def test_grammar(test_case): ('ds|sl|ug - sluging',), # Handle nested functions ('slug is',), - ('slug not',), + ('not',), ('',), + ('a BETWEEN e',), + ('a BETWEEN f OR x',), ], ) def test_grammar_bad(test_case): From 5c1afa674223e40eee70ec36db34552571264859 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sat, 5 Dec 2020 12:27:44 -0800 Subject: [PATCH 28/32] FIX - Taxon.raw_value prop renamed to Taxon.value to reflect "processed" properly This is different from raw_value which may have original text, spacing and different quotation. .value is pure synthetically-re-assembed taxon expression value --- python/src/pql_grammar/model.py | 2 +- python/tests/model_test.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/python/src/pql_grammar/model.py b/python/src/pql_grammar/model.py index 6d22612..97ae4b5 100644 --- a/python/src/pql_grammar/model.py +++ b/python/src/pql_grammar/model.py @@ -47,7 +47,7 @@ class Taxon(Node): tag: Optional[str] = None @property - def raw_value(self): + def value(self): is_optional = '?' if self.is_optional else '' namespace = self.namespace + '|' if self.namespace else '' slug = self.slug diff --git a/python/tests/model_test.py b/python/tests/model_test.py index 051f29c..30d1d1d 100644 --- a/python/tests/model_test.py +++ b/python/tests/model_test.py @@ -32,5 +32,5 @@ @pytest.mark.parametrize('input, output', zip(inputs, outputs)) -def test_taxon_raw_value(input: ast.Taxon, output: str): - assert input.raw_value == output +def test_taxon_value(input: ast.Taxon, output: str): + assert input.value == output From e5744d6526412f9459f9068a0548ea36c5a3e56b Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Sat, 5 Dec 2020 14:08:37 -0800 Subject: [PATCH 29/32] tone down agreegious unary parsing. Ignore unary + and merge - into numbers. --- python/src/pql_grammar/from_pql.py | 46 ++++++++++++++++++++++++++++-- python/tests/ast_pql_test.py | 32 +++++++++++++++++++-- 2 files changed, 74 insertions(+), 4 deletions(-) diff --git a/python/src/pql_grammar/from_pql.py b/python/src/pql_grammar/from_pql.py index f37eb17..d4034cd 100644 --- a/python/src/pql_grammar/from_pql.py +++ b/python/src/pql_grammar/from_pql.py @@ -2,6 +2,7 @@ from antlr4 import CommonTokenStream, InputStream, ParserRuleContext from antlr4 import ParserRuleContext +from decimal import Decimal from typing import Optional, Tuple, List, Type, Any from .antlr.PqlLexer import PqlLexer @@ -164,12 +165,53 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : v = ctx.unary_operator if v: operator = full_text(v).upper() + right = cls.parse_expr(ctx.right) + + # some unary have no meaning + # and packing them into AST just creates noise for consuming + if operator == '+': + # skip the BS. ignore the plus + # We can do this because we don't support `++a` expressions + return right + + if ( + operator == '-' and + isinstance(right, ast.Literal) and + isinstance(right.value, (int, float, Decimal)) + ): + # right.value will always be positive digit. + # Our syntax parser guarantees that. + return ast.Literal( + right.value * -1, + full_text(ctx) # unary minus with underlying literal value as one string + ) + + if ( + operator == 'NOT' and + isinstance(right, ast.Literal) + ): + # unlikely to ever happen, but still + v = not right.value + return ast.Literal( + v, + 'true' if v else 'false' + ) + + + # else: + # # cannot avoid packaging unary "-" separate. + # # it's in front of a non-literal expression that need to be negated manually later + # TODO: contemplate converting this from unary `-expr` into regular `-1 * expr` + # to escape Unary minus completely. + + # We dealt with '+'. We half-dealt with '-' + # What's left is 'NOT' + # These leftovers we pass through as unary. return ast.Expr( operator, - (cls.parse_expr(ctx.right),) + (right,) ) - v: Optional[str] = full_text(ctx.operator) if v: # this is super generic expression of type diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index 92bb515..d4fb587 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -24,8 +24,22 @@ 'slug not like \'blah%\'', 'slug in (1,2)', 'slug not in (1,2)', + # Special non-literal extractions: + # + # BETWEEN expressions extremely annoying to deal with in consumer code + # Between is expressed as union of simple expressions depicting its meaning. 'a BETWEEN b AND c', ## --> (a >= b) AND (a <= c) 'a NOT BETWEEN b AND c', ## --> (a < b) OR (a > c) + # unary expressions are annoying to deal with in consumer code + # too many edge cases. This parser extracts meaning statically where possible. + 'NOT a', ## (NOT (Taxon a)) + '-a', ## ('-' (Taxon a)) + '+a', ## (Taxon a) # skip + + '-2', ## (Literal -2) # negate number if expr is number + 'NOT 3', ## becomes boolean false + 'NOT 0', ## becomes boolean true + 'NOT "text"', ## becomes boolean false + 'NOT ""', ## becomes boolean true ) @@ -184,6 +198,14 @@ ), ), ), + ast.Expr('NOT', (ast.Taxon('a'),)), # 'not a' + ast.Expr('-', (ast.Taxon('a'),)), # '-a' + ast.Taxon('a'), ## '+a' skips + + ast.Literal(-2, '-2'), ## '-2' + ast.Literal(False, 'false'), # NOT 3 + ast.Literal(True, 'true'), # NOT 0 + ast.Literal(False, 'false'), # NOT 'text' + ast.Literal(True, 'true'), # NOT '' ) @@ -202,6 +224,14 @@ 'NOT (slug IN (1, 2))', '((a >= b) AND (a <= c))', # was: a BETWEEN b AND c '((a < b) OR (a > c))', # was: a NOT BETWEEN b AND c + 'NOT a', # ast.Expr('NOT', (ast.Taxon('a'),)), # 'not a' + '-a', # ast.Expr('-', (ast.Taxon('a'),)), # '-a' + 'a', # ast.Taxon('a'), ## '+a' skips + + '-2', # ast.Literal(-2, '-2'), ## '-2' + 'false', # NOT 3 + 'true', # NOT 0 + 'false', # NOT 'text' + 'true', # NOT '' ) @@ -263,5 +293,3 @@ def test_tel_to_ast_and_back(input: str, ast_should_be: ast.Node, output_should_ def test_ast_to_tel_special_cases(ast_input: ast.Node, str_output_should_be: str): output_is = to_tel(ast_input) assert output_is == str_output_should_be - - From 6e346d4f781366bfda2c915d32925bf3c6108d06 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Tue, 8 Dec 2020 01:19:41 -0800 Subject: [PATCH 30/32] change Visitor parser helpers from imperative class methods to chained visit(node) style. More in-line with ANTLR way of doing things. --- grammar/PqlParser.g4 | 7 +- js-temp/PqlParser.js | 560 ++++++++++++------ js-temp/PqlParserListener.js | 26 +- js-temp/PqlParserVisitor.js | 16 +- python/src/pql_grammar/antlr/PqlParser.py | 438 +++++++++----- .../pql_grammar/antlr/PqlParserListener.py | 26 +- .../src/pql_grammar/antlr/PqlParserVisitor.py | 14 +- python/src/pql_grammar/from_pql.py | 252 ++++---- 8 files changed, 886 insertions(+), 453 deletions(-) diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index e1b09bd..f0cd8e9 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -37,6 +37,8 @@ expr | taxon ; +exprList: expr ( COMMA expr )* ; + // Note that function supports optional list of arguments trapped as `expr` // which allows us to have // named (`arg1=value1, arg2=value2'` and @@ -44,8 +46,9 @@ expr // Named ones will come as `expr` with left=expr,operator=ASSIGN,right=expr contents. // You might need to express these as ordered dict / list of tuples to preserve names of args. // Positional will be whatever literal or other single-valued expr content could be. -fn: function_name=identifierMultipart OPEN_PAREN arguments=exprList? CLOSE_PAREN; -exprList: expr ( COMMA expr )* ; +fn: function_name=identifierMultipart OPEN_PAREN arguments=fnArgs? CLOSE_PAREN ; +fnArgs: fnArg ( COMMA fnArg)* ; +fnArg: ( argument_name=WORD ASSIGN)? argument_value=expr ; // TODO: TAXON_TAG_DELIMITER is being killed off. Remove when we migrate out of taxon tags. taxon: diff --git a/js-temp/PqlParser.js b/js-temp/PqlParser.js index 57bad1e..a4ab6ae 100644 --- a/js-temp/PqlParser.js +++ b/js-temp/PqlParser.js @@ -8,78 +8,86 @@ var grammarFileName = "PqlParser.g4"; var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u00035r\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t\u0004", - "\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004\b", - "\t\b\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003\u0003\u0003\u0003", + "\u00035\u0084\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t", + "\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004", + "\b\t\b\u0004\t\t\t\u0004\n\t\n\u0003\u0002\u0003\u0002\u0003\u0002\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", - "\u0003\u0003\u0003\u0005\u0003\u001e\n\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0005\u0003\"\n\u0003\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", - "\u0005\u0003.\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0005\u00032\n\u0003\u0003\u0003\u0003\u0003\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", - "\u0005\u0003:\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", - "\u0005\u0003@\n\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", - "\u0003\u0003\u0007\u0003G\n\u0003\f\u0003\u000e\u0003J\u000b\u0003\u0003", - "\u0004\u0003\u0004\u0003\u0004\u0005\u0004O\n\u0004\u0003\u0004\u0003", - "\u0004\u0003\u0005\u0003\u0005\u0003\u0005\u0007\u0005V\n\u0005\f\u0005", - "\u000e\u0005Y\u000b\u0005\u0003\u0006\u0005\u0006\\\n\u0006\u0003\u0006", - "\u0003\u0006\u0003\u0006\u0005\u0006a\n\u0006\u0003\u0006\u0003\u0006", - "\u0003\u0006\u0005\u0006f\n\u0006\u0003\u0007\u0003\u0007\u0003\u0007", - "\u0007\u0007k\n\u0007\f\u0007\u000e\u0007n\u000b\u0007\u0003\b\u0003", - "\b\u0003\b\u0002\u0003\u0004\t\u0002\u0004\u0006\b\n\f\u000e\u0002\n", - "\u0005\u0002\u0015\u0015\u0019\u0019&&\u0005\u0002\u0012\u0012\u0016", - "\u0016\u001c\u001c\u0004\u0002\u0015\u0015\u0019\u0019\u0004\u0002\u0005", - "\u0006\u0013\u0014\u0006\u0002\u0004\u0004\u0007\b\r\r##\u0004\u0002", - "\u0003\u0003\u001f\u001f\u0004\u0002\t\t))\u0006\u0002!!((*,//\u0002", - "\u0080\u0002\u0010\u0003\u0002\u0002\u0002\u0004\u001d\u0003\u0002\u0002", - "\u0002\u0006K\u0003\u0002\u0002\u0002\bR\u0003\u0002\u0002\u0002\n[", - "\u0003\u0002\u0002\u0002\fg\u0003\u0002\u0002\u0002\u000eo\u0003\u0002", - "\u0002\u0002\u0010\u0011\u0005\u0004\u0003\u0002\u0011\u0012\u0007\u0002", - "\u0002\u0003\u0012\u0003\u0003\u0002\u0002\u0002\u0013\u0014\b\u0003", - "\u0001\u0002\u0014\u0015\t\u0002\u0002\u0002\u0015\u001e\u0005\u0004", - "\u0003\u0010\u0016\u0017\u0007\u0017\u0002\u0002\u0017\u0018\u0005\u0004", - "\u0003\u0002\u0018\u0019\u0007\u000e\u0002\u0002\u0019\u001e\u0003\u0002", - "\u0002\u0002\u001a\u001e\u0005\u000e\b\u0002\u001b\u001e\u0005\u0006", - "\u0004\u0002\u001c\u001e\u0005\n\u0006\u0002\u001d\u0013\u0003\u0002", - "\u0002\u0002\u001d\u0016\u0003\u0002\u0002\u0002\u001d\u001a\u0003\u0002", - "\u0002\u0002\u001d\u001b\u0003\u0002\u0002\u0002\u001d\u001c\u0003\u0002", - "\u0002\u0002\u001eH\u0003\u0002\u0002\u0002\u001f \f\u000f\u0002\u0002", - " !\t\u0003\u0002\u0002!G\u0005\u0004\u0003\u0010\"#\f\u000e\u0002\u0002", - "#$\t\u0004\u0002\u0002$G\u0005\u0004\u0003\u000f%&\f\r\u0002\u0002&", - "\'\t\u0005\u0002\u0002\'G\u0005\u0004\u0003\u000e()\f\f\u0002\u0002", - ")*\t\u0006\u0002\u0002*G\u0005\u0004\u0003\r+-\f\u000b\u0002\u0002,", - ".\u0007&\u0002\u0002-,\u0003\u0002\u0002\u0002-.\u0003\u0002\u0002\u0002", - "./\u0003\u0002\u0002\u0002/0\u0007%\u0002\u00020G\u0005\u0004\u0003", - "\f12\f\t\u0002\u000223\t\u0007\u0002\u00023G\u0005\u0004\u0003\n45\f", - "\b\u0002\u000256\t\b\u0002\u00026G\u0005\u0004\u0003\t79\f\u0007\u0002", - "\u00028:\u0007&\u0002\u000298\u0003\u0002\u0002\u00029:\u0003\u0002", - "\u0002\u0002:;\u0003\u0002\u0002\u0002;<\u0007 \u0002\u0002@\u0007&\u0002\u0002?>\u0003\u0002", - "\u0002\u0002?@\u0003\u0002\u0002\u0002@A\u0003\u0002\u0002\u0002AB\u0007", - "\"\u0002\u0002BC\u0007\u0017\u0002\u0002CD\u0005\b\u0005\u0002DE\u0007", - "\u000e\u0002\u0002EG\u0003\u0002\u0002\u0002F\u001f\u0003\u0002\u0002", - "\u0002F\"\u0003\u0002\u0002\u0002F%\u0003\u0002\u0002\u0002F(\u0003", - "\u0002\u0002\u0002F+\u0003\u0002\u0002\u0002F1\u0003\u0002\u0002\u0002", - "F4\u0003\u0002\u0002\u0002F7\u0003\u0002\u0002\u0002F=\u0003\u0002\u0002", - "\u0002GJ\u0003\u0002\u0002\u0002HF\u0003\u0002\u0002\u0002HI\u0003\u0002", - "\u0002\u0002I\u0005\u0003\u0002\u0002\u0002JH\u0003\u0002\u0002\u0002", - "KL\u0005\f\u0007\u0002LN\u0007\u0017\u0002\u0002MO\u0005\b\u0005\u0002", - "NM\u0003\u0002\u0002\u0002NO\u0003\u0002\u0002\u0002OP\u0003\u0002\u0002", - "\u0002PQ\u0007\u000e\u0002\u0002Q\u0007\u0003\u0002\u0002\u0002RW\u0005", - "\u0004\u0003\u0002ST\u0007\u0010\u0002\u0002TV\u0005\u0004\u0003\u0002", - "US\u0003\u0002\u0002\u0002VY\u0003\u0002\u0002\u0002WU\u0003\u0002\u0002", - "\u0002WX\u0003\u0002\u0002\u0002X\t\u0003\u0002\u0002\u0002YW\u0003", - "\u0002\u0002\u0002Z\\\u0007\u001a\u0002\u0002[Z\u0003\u0002\u0002\u0002", - "[\\\u0003\u0002\u0002\u0002\\`\u0003\u0002\u0002\u0002]^\u0005\f\u0007", - "\u0002^_\u0007\u0018\u0002\u0002_a\u0003\u0002\u0002\u0002`]\u0003\u0002", - "\u0002\u0002`a\u0003\u0002\u0002\u0002ab\u0003\u0002\u0002\u0002be\u0005", - "\f\u0007\u0002cd\u0007\u000f\u0002\u0002df\u0005\f\u0007\u0002ec\u0003", - "\u0002\u0002\u0002ef\u0003\u0002\u0002\u0002f\u000b\u0003\u0002\u0002", - "\u0002gl\u00075\u0002\u0002hi\u0007\u0011\u0002\u0002ik\u00075\u0002", - "\u0002jh\u0003\u0002\u0002\u0002kn\u0003\u0002\u0002\u0002lj\u0003\u0002", - "\u0002\u0002lm\u0003\u0002\u0002\u0002m\r\u0003\u0002\u0002\u0002nl", - "\u0003\u0002\u0002\u0002op\t\t\u0002\u0002p\u000f\u0003\u0002\u0002", - "\u0002\u000e\u001d-9?FHNW[`el"].join(""); + "\u0003\u0003\u0003\u0005\u0003>\n\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0005\u0003D\n\u0003\u0003\u0003\u0003\u0003\u0003", + "\u0003\u0003\u0003\u0003\u0003\u0007\u0003K\n\u0003\f\u0003\u000e\u0003", + "N\u000b\u0003\u0003\u0004\u0003\u0004\u0003\u0004\u0007\u0004S\n\u0004", + "\f\u0004\u000e\u0004V\u000b\u0004\u0003\u0005\u0003\u0005\u0003\u0005", + "\u0005\u0005[\n\u0005\u0003\u0005\u0003\u0005\u0003\u0006\u0003\u0006", + "\u0003\u0006\u0007\u0006b\n\u0006\f\u0006\u000e\u0006e\u000b\u0006\u0003", + "\u0007\u0003\u0007\u0005\u0007i\n\u0007\u0003\u0007\u0003\u0007\u0003", + "\b\u0005\bn\n\b\u0003\b\u0003\b\u0003\b\u0005\bs\n\b\u0003\b\u0003\b", + "\u0003\b\u0005\bx\n\b\u0003\t\u0003\t\u0003\t\u0007\t}\n\t\f\t\u000e", + "\t\u0080\u000b\t\u0003\n\u0003\n\u0003\n\u0002\u0003\u0004\u000b\u0002", + "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0002\n\u0005\u0002\u0015\u0015", + "\u0019\u0019&&\u0005\u0002\u0012\u0012\u0016\u0016\u001c\u001c\u0004", + "\u0002\u0015\u0015\u0019\u0019\u0004\u0002\u0005\u0006\u0013\u0014\u0006", + "\u0002\u0004\u0004\u0007\b\r\r##\u0004\u0002\u0003\u0003\u001f\u001f", + "\u0004\u0002\t\t))\u0006\u0002!!((*,//\u0002\u0092\u0002\u0014\u0003", + "\u0002\u0002\u0002\u0004!\u0003\u0002\u0002\u0002\u0006O\u0003\u0002", + "\u0002\u0002\bW\u0003\u0002\u0002\u0002\n^\u0003\u0002\u0002\u0002\f", + "h\u0003\u0002\u0002\u0002\u000em\u0003\u0002\u0002\u0002\u0010y\u0003", + "\u0002\u0002\u0002\u0012\u0081\u0003\u0002\u0002\u0002\u0014\u0015\u0005", + "\u0004\u0003\u0002\u0015\u0016\u0007\u0002\u0002\u0003\u0016\u0003\u0003", + "\u0002\u0002\u0002\u0017\u0018\b\u0003\u0001\u0002\u0018\u0019\t\u0002", + "\u0002\u0002\u0019\"\u0005\u0004\u0003\u0010\u001a\u001b\u0007\u0017", + "\u0002\u0002\u001b\u001c\u0005\u0004\u0003\u0002\u001c\u001d\u0007\u000e", + "\u0002\u0002\u001d\"\u0003\u0002\u0002\u0002\u001e\"\u0005\u0012\n\u0002", + "\u001f\"\u0005\b\u0005\u0002 \"\u0005\u000e\b\u0002!\u0017\u0003\u0002", + "\u0002\u0002!\u001a\u0003\u0002\u0002\u0002!\u001e\u0003\u0002\u0002", + "\u0002!\u001f\u0003\u0002\u0002\u0002! \u0003\u0002\u0002\u0002\"L\u0003", + "\u0002\u0002\u0002#$\f\u000f\u0002\u0002$%\t\u0003\u0002\u0002%K\u0005", + "\u0004\u0003\u0010&\'\f\u000e\u0002\u0002\'(\t\u0004\u0002\u0002(K\u0005", + "\u0004\u0003\u000f)*\f\r\u0002\u0002*+\t\u0005\u0002\u0002+K\u0005\u0004", + "\u0003\u000e,-\f\f\u0002\u0002-.\t\u0006\u0002\u0002.K\u0005\u0004\u0003", + "\r/1\f\u000b\u0002\u000202\u0007&\u0002\u000210\u0003\u0002\u0002\u0002", + "12\u0003\u0002\u0002\u000223\u0003\u0002\u0002\u000234\u0007%\u0002", + "\u00024K\u0005\u0004\u0003\f56\f\t\u0002\u000267\t\u0007\u0002\u0002", + "7K\u0005\u0004\u0003\n89\f\b\u0002\u00029:\t\b\u0002\u0002:K\u0005\u0004", + "\u0003\t;=\f\u0007\u0002\u0002<>\u0007&\u0002\u0002=<\u0003\u0002\u0002", + "\u0002=>\u0003\u0002\u0002\u0002>?\u0003\u0002\u0002\u0002?@\u0007 ", + "\u0002\u0002@K\u0005\u0004\u0003\bAC\f\n\u0002\u0002BD\u0007&\u0002", + "\u0002CB\u0003\u0002\u0002\u0002CD\u0003\u0002\u0002\u0002DE\u0003\u0002", + "\u0002\u0002EF\u0007\"\u0002\u0002FG\u0007\u0017\u0002\u0002GH\u0005", + "\u0006\u0004\u0002HI\u0007\u000e\u0002\u0002IK\u0003\u0002\u0002\u0002", + "J#\u0003\u0002\u0002\u0002J&\u0003\u0002\u0002\u0002J)\u0003\u0002\u0002", + "\u0002J,\u0003\u0002\u0002\u0002J/\u0003\u0002\u0002\u0002J5\u0003\u0002", + "\u0002\u0002J8\u0003\u0002\u0002\u0002J;\u0003\u0002\u0002\u0002JA\u0003", + "\u0002\u0002\u0002KN\u0003\u0002\u0002\u0002LJ\u0003\u0002\u0002\u0002", + "LM\u0003\u0002\u0002\u0002M\u0005\u0003\u0002\u0002\u0002NL\u0003\u0002", + "\u0002\u0002OT\u0005\u0004\u0003\u0002PQ\u0007\u0010\u0002\u0002QS\u0005", + "\u0004\u0003\u0002RP\u0003\u0002\u0002\u0002SV\u0003\u0002\u0002\u0002", + "TR\u0003\u0002\u0002\u0002TU\u0003\u0002\u0002\u0002U\u0007\u0003\u0002", + "\u0002\u0002VT\u0003\u0002\u0002\u0002WX\u0005\u0010\t\u0002XZ\u0007", + "\u0017\u0002\u0002Y[\u0005\n\u0006\u0002ZY\u0003\u0002\u0002\u0002Z", + "[\u0003\u0002\u0002\u0002[\\\u0003\u0002\u0002\u0002\\]\u0007\u000e", + "\u0002\u0002]\t\u0003\u0002\u0002\u0002^c\u0005\f\u0007\u0002_`\u0007", + "\u0010\u0002\u0002`b\u0005\f\u0007\u0002a_\u0003\u0002\u0002\u0002b", + "e\u0003\u0002\u0002\u0002ca\u0003\u0002\u0002\u0002cd\u0003\u0002\u0002", + "\u0002d\u000b\u0003\u0002\u0002\u0002ec\u0003\u0002\u0002\u0002fg\u0007", + "5\u0002\u0002gi\u0007\r\u0002\u0002hf\u0003\u0002\u0002\u0002hi\u0003", + "\u0002\u0002\u0002ij\u0003\u0002\u0002\u0002jk\u0005\u0004\u0003\u0002", + "k\r\u0003\u0002\u0002\u0002ln\u0007\u001a\u0002\u0002ml\u0003\u0002", + "\u0002\u0002mn\u0003\u0002\u0002\u0002nr\u0003\u0002\u0002\u0002op\u0005", + "\u0010\t\u0002pq\u0007\u0018\u0002\u0002qs\u0003\u0002\u0002\u0002r", + "o\u0003\u0002\u0002\u0002rs\u0003\u0002\u0002\u0002st\u0003\u0002\u0002", + "\u0002tw\u0005\u0010\t\u0002uv\u0007\u000f\u0002\u0002vx\u0005\u0010", + "\t\u0002wu\u0003\u0002\u0002\u0002wx\u0003\u0002\u0002\u0002x\u000f", + "\u0003\u0002\u0002\u0002y~\u00075\u0002\u0002z{\u0007\u0011\u0002\u0002", + "{}\u00075\u0002\u0002|z\u0003\u0002\u0002\u0002}\u0080\u0003\u0002\u0002", + "\u0002~|\u0003\u0002\u0002\u0002~\u007f\u0003\u0002\u0002\u0002\u007f", + "\u0011\u0003\u0002\u0002\u0002\u0080~\u0003\u0002\u0002\u0002\u0081", + "\u0082\t\t\u0002\u0002\u0082\u0013\u0003\u0002\u0002\u0002\u0010!1=", + "CJLTZchmrw~"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -106,8 +114,8 @@ var symbolicNames = [ null, "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD" ]; -var ruleNames = [ "parseTel", "expr", "fn", "exprList", "taxon", "identifierMultipart", - "literalValue" ]; +var ruleNames = [ "parseTel", "expr", "exprList", "fn", "fnArgs", "fnArg", + "taxon", "identifierMultipart", "literalValue" ]; function PqlParser (input) { antlr4.Parser.call(this, input); @@ -182,11 +190,13 @@ PqlParser.WORD = 51; PqlParser.RULE_parseTel = 0; PqlParser.RULE_expr = 1; -PqlParser.RULE_fn = 2; -PqlParser.RULE_exprList = 3; -PqlParser.RULE_taxon = 4; -PqlParser.RULE_identifierMultipart = 5; -PqlParser.RULE_literalValue = 6; +PqlParser.RULE_exprList = 2; +PqlParser.RULE_fn = 3; +PqlParser.RULE_fnArgs = 4; +PqlParser.RULE_fnArg = 5; +PqlParser.RULE_taxon = 6; +PqlParser.RULE_identifierMultipart = 7; +PqlParser.RULE_literalValue = 8; function ParseTelContext(parser, parent, invokingState) { @@ -244,9 +254,9 @@ PqlParser.prototype.parseTel = function() { this.enterRule(localctx, 0, PqlParser.RULE_parseTel); try { this.enterOuterAlt(localctx, 1); - this.state = 14; + this.state = 18; this.expr(0); - this.state = 15; + this.state = 19; this.match(PqlParser.EOF); } catch (re) { if(re instanceof antlr4.error.RecognitionException) { @@ -444,12 +454,12 @@ PqlParser.prototype.expr = function(_p) { var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 27; + this.state = 31; this._errHandler.sync(this); var la_ = this._interp.adaptivePredict(this._input,0,this._ctx); switch(la_) { case 1: - this.state = 18; + this.state = 22; localctx.unary_operator = this._input.LT(1); _la = this._input.LA(1); if(!(((((_la - 19)) & ~0x1f) == 0 && ((1 << (_la - 19)) & ((1 << (PqlParser.MINUS - 19)) | (1 << (PqlParser.PLUS - 19)) | (1 << (PqlParser.K_NOT - 19)))) !== 0))) { @@ -459,37 +469,37 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 19; + this.state = 23; localctx.right = this.expr(14); break; case 2: - this.state = 20; + this.state = 24; this.match(PqlParser.OPEN_PAREN); - this.state = 21; + this.state = 25; localctx.inner = this.expr(0); - this.state = 22; + this.state = 26; this.match(PqlParser.CLOSE_PAREN); break; case 3: - this.state = 24; + this.state = 28; this.literalValue(); break; case 4: - this.state = 25; + this.state = 29; this.fn(); break; case 5: - this.state = 26; + this.state = 30; this.taxon(); break; } this._ctx.stop = this._input.LT(-1); - this.state = 70; + this.state = 74; this._errHandler.sync(this); var _alt = this._interp.adaptivePredict(this._input,5,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { @@ -498,7 +508,7 @@ PqlParser.prototype.expr = function(_p) { this.triggerExitRuleEvent(); } _prevctx = localctx; - this.state = 68; + this.state = 72; this._errHandler.sync(this); var la_ = this._interp.adaptivePredict(this._input,4,this._ctx); switch(la_) { @@ -506,11 +516,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 29; + this.state = 33; if (!( this.precpred(this._ctx, 13))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 13)"); } - this.state = 30; + this.state = 34; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) !== 0))) { @@ -520,7 +530,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 31; + this.state = 35; localctx.right = this.expr(14); break; @@ -528,11 +538,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 32; + this.state = 36; if (!( this.precpred(this._ctx, 12))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 12)"); } - this.state = 33; + this.state = 37; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.MINUS || _la===PqlParser.PLUS)) { @@ -542,7 +552,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 34; + this.state = 38; localctx.right = this.expr(13); break; @@ -550,11 +560,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 35; + this.state = 39; if (!( this.precpred(this._ctx, 11))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 11)"); } - this.state = 36; + this.state = 40; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) !== 0))) { @@ -564,7 +574,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 37; + this.state = 41; localctx.right = this.expr(12); break; @@ -572,11 +582,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 38; + this.state = 42; if (!( this.precpred(this._ctx, 10))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 10)"); } - this.state = 39; + this.state = 43; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(((((_la - 2)) & ~0x1f) == 0 && ((1 << (_la - 2)) & ((1 << (PqlParser.EQ - 2)) | (1 << (PqlParser.NOT_EQ1 - 2)) | (1 << (PqlParser.NOT_EQ2 - 2)) | (1 << (PqlParser.ASSIGN - 2)) | (1 << (PqlParser.K_IS - 2)))) !== 0))) { @@ -586,7 +596,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 40; + this.state = 44; localctx.right = this.expr(11); break; @@ -594,21 +604,21 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 41; + this.state = 45; if (!( this.precpred(this._ctx, 9))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 9)"); } - this.state = 43; + this.state = 47; this._errHandler.sync(this); _la = this._input.LA(1); if(_la===PqlParser.K_NOT) { - this.state = 42; + this.state = 46; localctx.is_negated = this.match(PqlParser.K_NOT); } - this.state = 45; + this.state = 49; localctx.operator = this.match(PqlParser.K_LIKE); - this.state = 46; + this.state = 50; localctx.right = this.expr(10); break; @@ -616,11 +626,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 47; + this.state = 51; if (!( this.precpred(this._ctx, 7))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 7)"); } - this.state = 48; + this.state = 52; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.AND || _la===PqlParser.K_AND)) { @@ -630,7 +640,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 49; + this.state = 53; localctx.right = this.expr(8); break; @@ -638,11 +648,11 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 50; + this.state = 54; if (!( this.precpred(this._ctx, 6))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 6)"); } - this.state = 51; + this.state = 55; localctx.operator = this._input.LT(1); _la = this._input.LA(1); if(!(_la===PqlParser.OR || _la===PqlParser.K_OR)) { @@ -652,7 +662,7 @@ PqlParser.prototype.expr = function(_p) { this._errHandler.reportMatch(this); this.consume(); } - this.state = 52; + this.state = 56; localctx.right = this.expr(7); break; @@ -660,21 +670,21 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 53; + this.state = 57; if (!( this.precpred(this._ctx, 5))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 5)"); } - this.state = 55; + this.state = 59; this._errHandler.sync(this); _la = this._input.LA(1); if(_la===PqlParser.K_NOT) { - this.state = 54; + this.state = 58; localctx.is_negated = this.match(PqlParser.K_NOT); } - this.state = 57; + this.state = 61; localctx.operator = this.match(PqlParser.K_BETWEEN); - this.state = 58; + this.state = 62; localctx.right = this.expr(6); break; @@ -682,31 +692,31 @@ PqlParser.prototype.expr = function(_p) { localctx = new ExprContext(this, _parentctx, _parentState); localctx.left = _prevctx; this.pushNewRecursionContext(localctx, _startState, PqlParser.RULE_expr); - this.state = 59; + this.state = 63; if (!( this.precpred(this._ctx, 8))) { throw new antlr4.error.FailedPredicateException(this, "this.precpred(this._ctx, 8)"); } - this.state = 61; + this.state = 65; this._errHandler.sync(this); _la = this._input.LA(1); if(_la===PqlParser.K_NOT) { - this.state = 60; + this.state = 64; localctx.is_negated = this.match(PqlParser.K_NOT); } - this.state = 63; + this.state = 67; localctx.operator = this.match(PqlParser.K_IN); - this.state = 64; + this.state = 68; this.match(PqlParser.OPEN_PAREN); - this.state = 65; + this.state = 69; localctx.right_list = this.exprList(); - this.state = 66; + this.state = 70; this.match(PqlParser.CLOSE_PAREN); break; } } - this.state = 72; + this.state = 76; this._errHandler.sync(this); _alt = this._interp.adaptivePredict(this._input,5,this._ctx); } @@ -726,6 +736,106 @@ PqlParser.prototype.expr = function(_p) { }; +function ExprListContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_exprList; + return this; +} + +ExprListContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +ExprListContext.prototype.constructor = ExprListContext; + +ExprListContext.prototype.expr = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTypedRuleContexts(ExprContext); + } else { + return this.getTypedRuleContext(ExprContext,i); + } +}; + +ExprListContext.prototype.COMMA = function(i) { + if(i===undefined) { + i = null; + } + if(i===null) { + return this.getTokens(PqlParser.COMMA); + } else { + return this.getToken(PqlParser.COMMA, i); + } +}; + + +ExprListContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterExprList(this); + } +}; + +ExprListContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitExprList(this); + } +}; + +ExprListContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitExprList(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.ExprListContext = ExprListContext; + +PqlParser.prototype.exprList = function() { + + var localctx = new ExprListContext(this, this._ctx, this.state); + this.enterRule(localctx, 4, PqlParser.RULE_exprList); + var _la = 0; // Token type + try { + this.enterOuterAlt(localctx, 1); + this.state = 77; + this.expr(0); + this.state = 82; + this._errHandler.sync(this); + _la = this._input.LA(1); + while(_la===PqlParser.COMMA) { + this.state = 78; + this.match(PqlParser.COMMA); + this.state = 79; + this.expr(0); + this.state = 84; + this._errHandler.sync(this); + _la = this._input.LA(1); + } + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + function FnContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; @@ -737,7 +847,7 @@ function FnContext(parser, parent, invokingState) { this.parser = parser; this.ruleIndex = PqlParser.RULE_fn; this.function_name = null; // IdentifierMultipartContext - this.arguments = null; // ExprListContext + this.arguments = null; // FnArgsContext return this; } @@ -756,8 +866,8 @@ FnContext.prototype.identifierMultipart = function() { return this.getTypedRuleContext(IdentifierMultipartContext,0); }; -FnContext.prototype.exprList = function() { - return this.getTypedRuleContext(ExprListContext,0); +FnContext.prototype.fnArgs = function() { + return this.getTypedRuleContext(FnArgsContext,0); }; FnContext.prototype.enterRule = function(listener) { @@ -788,23 +898,23 @@ PqlParser.FnContext = FnContext; PqlParser.prototype.fn = function() { var localctx = new FnContext(this, this._ctx, this.state); - this.enterRule(localctx, 4, PqlParser.RULE_fn); + this.enterRule(localctx, 6, PqlParser.RULE_fn); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 73; + this.state = 85; localctx.function_name = this.identifierMultipart(); - this.state = 74; + this.state = 86; this.match(PqlParser.OPEN_PAREN); - this.state = 76; + this.state = 88; this._errHandler.sync(this); _la = this._input.LA(1); if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE))) !== 0) || ((((_la - 36)) & ~0x1f) == 0 && ((1 << (_la - 36)) & ((1 << (PqlParser.K_NOT - 36)) | (1 << (PqlParser.K_NULL - 36)) | (1 << (PqlParser.K_TRUE - 36)) | (1 << (PqlParser.NUMERIC_LITERAL - 36)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 36)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 36)) | (1 << (PqlParser.WORD - 36)))) !== 0)) { - this.state = 75; - localctx.arguments = this.exprList(); + this.state = 87; + localctx.arguments = this.fnArgs(); } - this.state = 78; + this.state = 90; this.match(PqlParser.CLOSE_PAREN); } catch (re) { if(re instanceof antlr4.error.RecognitionException) { @@ -821,7 +931,7 @@ PqlParser.prototype.fn = function() { }; -function ExprListContext(parser, parent, invokingState) { +function FnArgsContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; } @@ -830,25 +940,25 @@ function ExprListContext(parser, parent, invokingState) { } antlr4.ParserRuleContext.call(this, parent, invokingState); this.parser = parser; - this.ruleIndex = PqlParser.RULE_exprList; + this.ruleIndex = PqlParser.RULE_fnArgs; return this; } -ExprListContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); -ExprListContext.prototype.constructor = ExprListContext; +FnArgsContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +FnArgsContext.prototype.constructor = FnArgsContext; -ExprListContext.prototype.expr = function(i) { +FnArgsContext.prototype.fnArg = function(i) { if(i===undefined) { i = null; } if(i===null) { - return this.getTypedRuleContexts(ExprContext); + return this.getTypedRuleContexts(FnArgContext); } else { - return this.getTypedRuleContext(ExprContext,i); + return this.getTypedRuleContext(FnArgContext,i); } }; -ExprListContext.prototype.COMMA = function(i) { +FnArgsContext.prototype.COMMA = function(i) { if(i===undefined) { i = null; } @@ -860,21 +970,21 @@ ExprListContext.prototype.COMMA = function(i) { }; -ExprListContext.prototype.enterRule = function(listener) { +FnArgsContext.prototype.enterRule = function(listener) { if(listener instanceof PqlParserListener ) { - listener.enterExprList(this); + listener.enterFnArgs(this); } }; -ExprListContext.prototype.exitRule = function(listener) { +FnArgsContext.prototype.exitRule = function(listener) { if(listener instanceof PqlParserListener ) { - listener.exitExprList(this); + listener.exitFnArgs(this); } }; -ExprListContext.prototype.accept = function(visitor) { +FnArgsContext.prototype.accept = function(visitor) { if ( visitor instanceof PqlParserVisitor ) { - return visitor.visitExprList(this); + return visitor.visitFnArgs(this); } else { return visitor.visitChildren(this); } @@ -883,26 +993,26 @@ ExprListContext.prototype.accept = function(visitor) { -PqlParser.ExprListContext = ExprListContext; +PqlParser.FnArgsContext = FnArgsContext; -PqlParser.prototype.exprList = function() { +PqlParser.prototype.fnArgs = function() { - var localctx = new ExprListContext(this, this._ctx, this.state); - this.enterRule(localctx, 6, PqlParser.RULE_exprList); + var localctx = new FnArgsContext(this, this._ctx, this.state); + this.enterRule(localctx, 8, PqlParser.RULE_fnArgs); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 80; - this.expr(0); - this.state = 85; + this.state = 92; + this.fnArg(); + this.state = 97; this._errHandler.sync(this); _la = this._input.LA(1); while(_la===PqlParser.COMMA) { - this.state = 81; + this.state = 93; this.match(PqlParser.COMMA); - this.state = 82; - this.expr(0); - this.state = 87; + this.state = 94; + this.fnArg(); + this.state = 99; this._errHandler.sync(this); _la = this._input.LA(1); } @@ -921,6 +1031,94 @@ PqlParser.prototype.exprList = function() { }; +function FnArgContext(parser, parent, invokingState) { + if(parent===undefined) { + parent = null; + } + if(invokingState===undefined || invokingState===null) { + invokingState = -1; + } + antlr4.ParserRuleContext.call(this, parent, invokingState); + this.parser = parser; + this.ruleIndex = PqlParser.RULE_fnArg; + this.argument_name = null; // Token + this.argument_value = null; // ExprContext + return this; +} + +FnArgContext.prototype = Object.create(antlr4.ParserRuleContext.prototype); +FnArgContext.prototype.constructor = FnArgContext; + +FnArgContext.prototype.expr = function() { + return this.getTypedRuleContext(ExprContext,0); +}; + +FnArgContext.prototype.ASSIGN = function() { + return this.getToken(PqlParser.ASSIGN, 0); +}; + +FnArgContext.prototype.WORD = function() { + return this.getToken(PqlParser.WORD, 0); +}; + +FnArgContext.prototype.enterRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.enterFnArg(this); + } +}; + +FnArgContext.prototype.exitRule = function(listener) { + if(listener instanceof PqlParserListener ) { + listener.exitFnArg(this); + } +}; + +FnArgContext.prototype.accept = function(visitor) { + if ( visitor instanceof PqlParserVisitor ) { + return visitor.visitFnArg(this); + } else { + return visitor.visitChildren(this); + } +}; + + + + +PqlParser.FnArgContext = FnArgContext; + +PqlParser.prototype.fnArg = function() { + + var localctx = new FnArgContext(this, this._ctx, this.state); + this.enterRule(localctx, 10, PqlParser.RULE_fnArg); + try { + this.enterOuterAlt(localctx, 1); + this.state = 102; + this._errHandler.sync(this); + var la_ = this._interp.adaptivePredict(this._input,9,this._ctx); + if(la_===1) { + this.state = 100; + localctx.argument_name = this.match(PqlParser.WORD); + this.state = 101; + this.match(PqlParser.ASSIGN); + + } + this.state = 104; + localctx.argument_value = this.expr(0); + } catch (re) { + if(re instanceof antlr4.error.RecognitionException) { + localctx.exception = re; + this._errHandler.reportError(this, re); + this._errHandler.recover(this, re); + } else { + throw re; + } + } finally { + this.exitRule(); + } + return localctx; +}; + + function TaxonContext(parser, parent, invokingState) { if(parent===undefined) { parent = null; @@ -992,37 +1190,37 @@ PqlParser.TaxonContext = TaxonContext; PqlParser.prototype.taxon = function() { var localctx = new TaxonContext(this, this._ctx, this.state); - this.enterRule(localctx, 8, PqlParser.RULE_taxon); + this.enterRule(localctx, 12, PqlParser.RULE_taxon); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 89; + this.state = 107; this._errHandler.sync(this); _la = this._input.LA(1); if(_la===PqlParser.QUESTION_MARK) { - this.state = 88; + this.state = 106; localctx.is_optional = this.match(PqlParser.QUESTION_MARK); } - this.state = 94; + this.state = 112; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,9,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,11,this._ctx); if(la_===1) { - this.state = 91; + this.state = 109; localctx.namespace = this.identifierMultipart(); - this.state = 92; + this.state = 110; this.match(PqlParser.PIPE); } - this.state = 96; + this.state = 114; localctx.slug = this.identifierMultipart(); - this.state = 99; + this.state = 117; this._errHandler.sync(this); - var la_ = this._interp.adaptivePredict(this._input,10,this._ctx); + var la_ = this._interp.adaptivePredict(this._input,12,this._ctx); if(la_===1) { - this.state = 97; + this.state = 115; this.match(PqlParser.COLON); - this.state = 98; + this.state = 116; localctx.tag = this.identifierMultipart(); } @@ -1109,24 +1307,24 @@ PqlParser.IdentifierMultipartContext = IdentifierMultipartContext; PqlParser.prototype.identifierMultipart = function() { var localctx = new IdentifierMultipartContext(this, this._ctx, this.state); - this.enterRule(localctx, 10, PqlParser.RULE_identifierMultipart); + this.enterRule(localctx, 14, PqlParser.RULE_identifierMultipart); try { this.enterOuterAlt(localctx, 1); - this.state = 101; + this.state = 119; this.match(PqlParser.WORD); - this.state = 106; + this.state = 124; this._errHandler.sync(this); - var _alt = this._interp.adaptivePredict(this._input,11,this._ctx) + var _alt = this._interp.adaptivePredict(this._input,13,this._ctx) while(_alt!=2 && _alt!=antlr4.atn.ATN.INVALID_ALT_NUMBER) { if(_alt===1) { - this.state = 102; + this.state = 120; this.match(PqlParser.DOT); - this.state = 103; + this.state = 121; this.match(PqlParser.WORD); } - this.state = 108; + this.state = 126; this._errHandler.sync(this); - _alt = this._interp.adaptivePredict(this._input,11,this._ctx); + _alt = this._interp.adaptivePredict(this._input,13,this._ctx); } } catch (re) { @@ -1212,11 +1410,11 @@ PqlParser.LiteralValueContext = LiteralValueContext; PqlParser.prototype.literalValue = function() { var localctx = new LiteralValueContext(this, this._ctx, this.state); - this.enterRule(localctx, 12, PqlParser.RULE_literalValue); + this.enterRule(localctx, 16, PqlParser.RULE_literalValue); var _la = 0; // Token type try { this.enterOuterAlt(localctx, 1); - this.state = 109; + this.state = 127; _la = this._input.LA(1); if(!(((((_la - 31)) & ~0x1f) == 0 && ((1 << (_la - 31)) & ((1 << (PqlParser.K_FALSE - 31)) | (1 << (PqlParser.K_NULL - 31)) | (1 << (PqlParser.K_TRUE - 31)) | (1 << (PqlParser.NUMERIC_LITERAL - 31)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 31)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 31)))) !== 0))) { this._errHandler.recoverInline(this); diff --git a/js-temp/PqlParserListener.js b/js-temp/PqlParserListener.js index a1dff0a..c3d76e8 100644 --- a/js-temp/PqlParserListener.js +++ b/js-temp/PqlParserListener.js @@ -29,6 +29,15 @@ PqlParserListener.prototype.exitExpr = function(ctx) { }; +// Enter a parse tree produced by PqlParser#exprList. +PqlParserListener.prototype.enterExprList = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#exprList. +PqlParserListener.prototype.exitExprList = function(ctx) { +}; + + // Enter a parse tree produced by PqlParser#fn. PqlParserListener.prototype.enterFn = function(ctx) { }; @@ -38,12 +47,21 @@ PqlParserListener.prototype.exitFn = function(ctx) { }; -// Enter a parse tree produced by PqlParser#exprList. -PqlParserListener.prototype.enterExprList = function(ctx) { +// Enter a parse tree produced by PqlParser#fnArgs. +PqlParserListener.prototype.enterFnArgs = function(ctx) { }; -// Exit a parse tree produced by PqlParser#exprList. -PqlParserListener.prototype.exitExprList = function(ctx) { +// Exit a parse tree produced by PqlParser#fnArgs. +PqlParserListener.prototype.exitFnArgs = function(ctx) { +}; + + +// Enter a parse tree produced by PqlParser#fnArg. +PqlParserListener.prototype.enterFnArg = function(ctx) { +}; + +// Exit a parse tree produced by PqlParser#fnArg. +PqlParserListener.prototype.exitFnArg = function(ctx) { }; diff --git a/js-temp/PqlParserVisitor.js b/js-temp/PqlParserVisitor.js index e1e457a..5755bc1 100644 --- a/js-temp/PqlParserVisitor.js +++ b/js-temp/PqlParserVisitor.js @@ -24,14 +24,26 @@ PqlParserVisitor.prototype.visitExpr = function(ctx) { }; +// Visit a parse tree produced by PqlParser#exprList. +PqlParserVisitor.prototype.visitExprList = function(ctx) { + return this.visitChildren(ctx); +}; + + // Visit a parse tree produced by PqlParser#fn. PqlParserVisitor.prototype.visitFn = function(ctx) { return this.visitChildren(ctx); }; -// Visit a parse tree produced by PqlParser#exprList. -PqlParserVisitor.prototype.visitExprList = function(ctx) { +// Visit a parse tree produced by PqlParser#fnArgs. +PqlParserVisitor.prototype.visitFnArgs = function(ctx) { + return this.visitChildren(ctx); +}; + + +// Visit a parse tree produced by PqlParser#fnArg. +PqlParserVisitor.prototype.visitFnArg = function(ctx) { return this.visitChildren(ctx); }; diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index 2736cc5..f6d6e66 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -12,45 +12,51 @@ def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\65") - buf.write("r\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b") - buf.write("\t\b\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") - buf.write("\3\5\3\36\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") - buf.write("\3\3\3\3\3\3\3\3\5\3.\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3") - buf.write("\3\3\3\3\3\3\5\3:\n\3\3\3\3\3\3\3\3\3\5\3@\n\3\3\3\3\3") - buf.write("\3\3\3\3\3\3\7\3G\n\3\f\3\16\3J\13\3\3\4\3\4\3\4\5\4O") - buf.write("\n\4\3\4\3\4\3\5\3\5\3\5\7\5V\n\5\f\5\16\5Y\13\5\3\6\5") - buf.write("\6\\\n\6\3\6\3\6\3\6\5\6a\n\6\3\6\3\6\3\6\5\6f\n\6\3\7") - buf.write("\3\7\3\7\7\7k\n\7\f\7\16\7n\13\7\3\b\3\b\3\b\2\3\4\t\2") - buf.write("\4\6\b\n\f\16\2\n\5\2\25\25\31\31&&\5\2\22\22\26\26\34") - buf.write("\34\4\2\25\25\31\31\4\2\5\6\23\24\6\2\4\4\7\b\r\r##\4") - buf.write("\2\3\3\37\37\4\2\t\t))\6\2!!((*,//\2\u0080\2\20\3\2\2") - buf.write("\2\4\35\3\2\2\2\6K\3\2\2\2\bR\3\2\2\2\n[\3\2\2\2\fg\3") - buf.write("\2\2\2\16o\3\2\2\2\20\21\5\4\3\2\21\22\7\2\2\3\22\3\3") - buf.write("\2\2\2\23\24\b\3\1\2\24\25\t\2\2\2\25\36\5\4\3\20\26\27") - buf.write("\7\27\2\2\27\30\5\4\3\2\30\31\7\16\2\2\31\36\3\2\2\2\32") - buf.write("\36\5\16\b\2\33\36\5\6\4\2\34\36\5\n\6\2\35\23\3\2\2\2") - buf.write("\35\26\3\2\2\2\35\32\3\2\2\2\35\33\3\2\2\2\35\34\3\2\2") - buf.write("\2\36H\3\2\2\2\37 \f\17\2\2 !\t\3\2\2!G\5\4\3\20\"#\f") - buf.write("\16\2\2#$\t\4\2\2$G\5\4\3\17%&\f\r\2\2&\'\t\5\2\2\'G\5") - buf.write("\4\3\16()\f\f\2\2)*\t\6\2\2*G\5\4\3\r+-\f\13\2\2,.\7&") - buf.write("\2\2-,\3\2\2\2-.\3\2\2\2./\3\2\2\2/\60\7%\2\2\60G\5\4") - buf.write("\3\f\61\62\f\t\2\2\62\63\t\7\2\2\63G\5\4\3\n\64\65\f\b") - buf.write("\2\2\65\66\t\b\2\2\66G\5\4\3\t\679\f\7\2\28:\7&\2\298") - buf.write("\3\2\2\29:\3\2\2\2:;\3\2\2\2;<\7 \2\2@\7&\2\2?>\3\2\2\2?@\3\2\2\2@A\3\2\2\2AB\7\"\2\2") - buf.write("BC\7\27\2\2CD\5\b\5\2DE\7\16\2\2EG\3\2\2\2F\37\3\2\2\2") - buf.write("F\"\3\2\2\2F%\3\2\2\2F(\3\2\2\2F+\3\2\2\2F\61\3\2\2\2") - buf.write("F\64\3\2\2\2F\67\3\2\2\2F=\3\2\2\2GJ\3\2\2\2HF\3\2\2\2") - buf.write("HI\3\2\2\2I\5\3\2\2\2JH\3\2\2\2KL\5\f\7\2LN\7\27\2\2M") - buf.write("O\5\b\5\2NM\3\2\2\2NO\3\2\2\2OP\3\2\2\2PQ\7\16\2\2Q\7") - buf.write("\3\2\2\2RW\5\4\3\2ST\7\20\2\2TV\5\4\3\2US\3\2\2\2VY\3") - buf.write("\2\2\2WU\3\2\2\2WX\3\2\2\2X\t\3\2\2\2YW\3\2\2\2Z\\\7\32") - buf.write("\2\2[Z\3\2\2\2[\\\3\2\2\2\\`\3\2\2\2]^\5\f\7\2^_\7\30") - buf.write("\2\2_a\3\2\2\2`]\3\2\2\2`a\3\2\2\2ab\3\2\2\2be\5\f\7\2") - buf.write("cd\7\17\2\2df\5\f\7\2ec\3\2\2\2ef\3\2\2\2f\13\3\2\2\2") - buf.write("gl\7\65\2\2hi\7\21\2\2ik\7\65\2\2jh\3\2\2\2kn\3\2\2\2") - buf.write("lj\3\2\2\2lm\3\2\2\2m\r\3\2\2\2nl\3\2\2\2op\t\t\2\2p\17") - buf.write("\3\2\2\2\16\35-9?FHNW[`el") + buf.write("\u0084\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3") + buf.write("\3\3\3\3\3\3\3\3\3\3\3\5\3\"\n\3\3\3\3\3\3\3\3\3\3\3\3") + buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\62\n\3\3\3\3\3") + buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3>\n\3\3\3\3\3\3\3") + buf.write("\3\3\5\3D\n\3\3\3\3\3\3\3\3\3\3\3\7\3K\n\3\f\3\16\3N\13") + buf.write("\3\3\4\3\4\3\4\7\4S\n\4\f\4\16\4V\13\4\3\5\3\5\3\5\5\5") + buf.write("[\n\5\3\5\3\5\3\6\3\6\3\6\7\6b\n\6\f\6\16\6e\13\6\3\7") + buf.write("\3\7\5\7i\n\7\3\7\3\7\3\b\5\bn\n\b\3\b\3\b\3\b\5\bs\n") + buf.write("\b\3\b\3\b\3\b\5\bx\n\b\3\t\3\t\3\t\7\t}\n\t\f\t\16\t") + buf.write("\u0080\13\t\3\n\3\n\3\n\2\3\4\13\2\4\6\b\n\f\16\20\22") + buf.write("\2\n\5\2\25\25\31\31&&\5\2\22\22\26\26\34\34\4\2\25\25") + buf.write("\31\31\4\2\5\6\23\24\6\2\4\4\7\b\r\r##\4\2\3\3\37\37\4") + buf.write("\2\t\t))\6\2!!((*,//\2\u0092\2\24\3\2\2\2\4!\3\2\2\2\6") + buf.write("O\3\2\2\2\bW\3\2\2\2\n^\3\2\2\2\fh\3\2\2\2\16m\3\2\2\2") + buf.write("\20y\3\2\2\2\22\u0081\3\2\2\2\24\25\5\4\3\2\25\26\7\2") + buf.write("\2\3\26\3\3\2\2\2\27\30\b\3\1\2\30\31\t\2\2\2\31\"\5\4") + buf.write("\3\20\32\33\7\27\2\2\33\34\5\4\3\2\34\35\7\16\2\2\35\"") + buf.write("\3\2\2\2\36\"\5\22\n\2\37\"\5\b\5\2 \"\5\16\b\2!\27\3") + buf.write("\2\2\2!\32\3\2\2\2!\36\3\2\2\2!\37\3\2\2\2! \3\2\2\2\"") + buf.write("L\3\2\2\2#$\f\17\2\2$%\t\3\2\2%K\5\4\3\20&\'\f\16\2\2") + buf.write("\'(\t\4\2\2(K\5\4\3\17)*\f\r\2\2*+\t\5\2\2+K\5\4\3\16") + buf.write(",-\f\f\2\2-.\t\6\2\2.K\5\4\3\r/\61\f\13\2\2\60\62\7&\2") + buf.write("\2\61\60\3\2\2\2\61\62\3\2\2\2\62\63\3\2\2\2\63\64\7%") + buf.write("\2\2\64K\5\4\3\f\65\66\f\t\2\2\66\67\t\7\2\2\67K\5\4\3") + buf.write("\n89\f\b\2\29:\t\b\2\2:K\5\4\3\t;=\f\7\2\2<>\7&\2\2=<") + buf.write("\3\2\2\2=>\3\2\2\2>?\3\2\2\2?@\7 \2\2@K\5\4\3\bAC\f\n") + buf.write("\2\2BD\7&\2\2CB\3\2\2\2CD\3\2\2\2DE\3\2\2\2EF\7\"\2\2") + buf.write("FG\7\27\2\2GH\5\6\4\2HI\7\16\2\2IK\3\2\2\2J#\3\2\2\2J") + buf.write("&\3\2\2\2J)\3\2\2\2J,\3\2\2\2J/\3\2\2\2J\65\3\2\2\2J8") + buf.write("\3\2\2\2J;\3\2\2\2JA\3\2\2\2KN\3\2\2\2LJ\3\2\2\2LM\3\2") + buf.write("\2\2M\5\3\2\2\2NL\3\2\2\2OT\5\4\3\2PQ\7\20\2\2QS\5\4\3") + buf.write("\2RP\3\2\2\2SV\3\2\2\2TR\3\2\2\2TU\3\2\2\2U\7\3\2\2\2") + buf.write("VT\3\2\2\2WX\5\20\t\2XZ\7\27\2\2Y[\5\n\6\2ZY\3\2\2\2Z") + buf.write("[\3\2\2\2[\\\3\2\2\2\\]\7\16\2\2]\t\3\2\2\2^c\5\f\7\2") + buf.write("_`\7\20\2\2`b\5\f\7\2a_\3\2\2\2be\3\2\2\2ca\3\2\2\2cd") + buf.write("\3\2\2\2d\13\3\2\2\2ec\3\2\2\2fg\7\65\2\2gi\7\r\2\2hf") + buf.write("\3\2\2\2hi\3\2\2\2ij\3\2\2\2jk\5\4\3\2k\r\3\2\2\2ln\7") + buf.write("\32\2\2ml\3\2\2\2mn\3\2\2\2nr\3\2\2\2op\5\20\t\2pq\7\30") + buf.write("\2\2qs\3\2\2\2ro\3\2\2\2rs\3\2\2\2st\3\2\2\2tw\5\20\t") + buf.write("\2uv\7\17\2\2vx\5\20\t\2wu\3\2\2\2wx\3\2\2\2x\17\3\2\2") + buf.write("\2y~\7\65\2\2z{\7\21\2\2{}\7\65\2\2|z\3\2\2\2}\u0080\3") + buf.write("\2\2\2~|\3\2\2\2~\177\3\2\2\2\177\21\3\2\2\2\u0080~\3") + buf.write("\2\2\2\u0081\u0082\t\t\2\2\u0082\23\3\2\2\2\20!\61=CJ") + buf.write("LTZchmrw~") return buf.getvalue() @@ -84,14 +90,16 @@ class PqlParser ( Parser ): RULE_parseTel = 0 RULE_expr = 1 - RULE_fn = 2 - RULE_exprList = 3 - RULE_taxon = 4 - RULE_identifierMultipart = 5 - RULE_literalValue = 6 + RULE_exprList = 2 + RULE_fn = 3 + RULE_fnArgs = 4 + RULE_fnArg = 5 + RULE_taxon = 6 + RULE_identifierMultipart = 7 + RULE_literalValue = 8 - ruleNames = [ "parseTel", "expr", "fn", "exprList", "taxon", "identifierMultipart", - "literalValue" ] + ruleNames = [ "parseTel", "expr", "exprList", "fn", "fnArgs", "fnArg", + "taxon", "identifierMultipart", "literalValue" ] EOF = Token.EOF AND=1 @@ -194,9 +202,9 @@ def parseTel(self): self.enterRule(localctx, 0, self.RULE_parseTel) try: self.enterOuterAlt(localctx, 1) - self.state = 14 + self.state = 18 self.expr(0) - self.state = 15 + self.state = 19 self.match(PqlParser.EOF) except RecognitionException as re: localctx.exception = re @@ -344,11 +352,11 @@ def expr(self, _p:int=0): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 27 + self.state = 31 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,0,self._ctx) if la_ == 1: - self.state = 18 + self.state = 22 localctx.unary_operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.PLUS) | (1 << PqlParser.K_NOT))) != 0)): @@ -356,37 +364,37 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 19 + self.state = 23 localctx.right = self.expr(14) pass elif la_ == 2: - self.state = 20 + self.state = 24 self.match(PqlParser.OPEN_PAREN) - self.state = 21 + self.state = 25 localctx.inner = self.expr(0) - self.state = 22 + self.state = 26 self.match(PqlParser.CLOSE_PAREN) pass elif la_ == 3: - self.state = 24 + self.state = 28 self.literalValue() pass elif la_ == 4: - self.state = 25 + self.state = 29 self.fn() pass elif la_ == 5: - self.state = 26 + self.state = 30 self.taxon() pass self._ctx.stop = self._input.LT(-1) - self.state = 70 + self.state = 74 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,5,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: @@ -394,18 +402,18 @@ def expr(self, _p:int=0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 68 + self.state = 72 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input,4,self._ctx) if la_ == 1: localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 29 + self.state = 33 if not self.precpred(self._ctx, 13): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 13)") - self.state = 30 + self.state = 34 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.FORWARD_SLASH) | (1 << PqlParser.MOD) | (1 << PqlParser.STAR))) != 0)): @@ -413,7 +421,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 31 + self.state = 35 localctx.right = self.expr(14) pass @@ -421,11 +429,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 32 + self.state = 36 if not self.precpred(self._ctx, 12): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 12)") - self.state = 33 + self.state = 37 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.MINUS or _la==PqlParser.PLUS): @@ -433,7 +441,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 34 + self.state = 38 localctx.right = self.expr(13) pass @@ -441,11 +449,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 35 + self.state = 39 if not self.precpred(self._ctx, 11): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 11)") - self.state = 36 + self.state = 40 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.GT_EQ) | (1 << PqlParser.LT_EQ) | (1 << PqlParser.GT) | (1 << PqlParser.LT))) != 0)): @@ -453,7 +461,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 37 + self.state = 41 localctx.right = self.expr(12) pass @@ -461,11 +469,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 38 + self.state = 42 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 39 + self.state = 43 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN) | (1 << PqlParser.K_IS))) != 0)): @@ -473,7 +481,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 40 + self.state = 44 localctx.right = self.expr(11) pass @@ -481,21 +489,21 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 41 + self.state = 45 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 43 + self.state = 47 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_NOT: - self.state = 42 + self.state = 46 localctx.is_negated = self.match(PqlParser.K_NOT) - self.state = 45 + self.state = 49 localctx.operator = self.match(PqlParser.K_LIKE) - self.state = 46 + self.state = 50 localctx.right = self.expr(10) pass @@ -503,11 +511,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 47 + self.state = 51 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 48 + self.state = 52 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.AND or _la==PqlParser.K_AND): @@ -515,7 +523,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 49 + self.state = 53 localctx.right = self.expr(8) pass @@ -523,11 +531,11 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 50 + self.state = 54 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 51 + self.state = 55 localctx.operator = self._input.LT(1) _la = self._input.LA(1) if not(_la==PqlParser.OR or _la==PqlParser.K_OR): @@ -535,7 +543,7 @@ def expr(self, _p:int=0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 52 + self.state = 56 localctx.right = self.expr(7) pass @@ -543,21 +551,21 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 53 + self.state = 57 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 55 + self.state = 59 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_NOT: - self.state = 54 + self.state = 58 localctx.is_negated = self.match(PqlParser.K_NOT) - self.state = 57 + self.state = 61 localctx.operator = self.match(PqlParser.K_BETWEEN) - self.state = 58 + self.state = 62 localctx.right = self.expr(6) pass @@ -565,30 +573,30 @@ def expr(self, _p:int=0): localctx = PqlParser.ExprContext(self, _parentctx, _parentState) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 59 + self.state = 63 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 61 + self.state = 65 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.K_NOT: - self.state = 60 + self.state = 64 localctx.is_negated = self.match(PqlParser.K_NOT) - self.state = 63 + self.state = 67 localctx.operator = self.match(PqlParser.K_IN) - self.state = 64 + self.state = 68 self.match(PqlParser.OPEN_PAREN) - self.state = 65 + self.state = 69 localctx.right_list = self.exprList() - self.state = 66 + self.state = 70 self.match(PqlParser.CLOSE_PAREN) pass - self.state = 72 + self.state = 76 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input,5,self._ctx) @@ -601,13 +609,82 @@ def expr(self, _p:int=0): return localctx + class ExprListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PqlParser.ExprContext) + else: + return self.getTypedRuleContext(PqlParser.ExprContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PqlParser.COMMA) + else: + return self.getToken(PqlParser.COMMA, i) + + def getRuleIndex(self): + return PqlParser.RULE_exprList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterExprList" ): + listener.enterExprList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitExprList" ): + listener.exitExprList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExprList" ): + return visitor.visitExprList(self) + else: + return visitor.visitChildren(self) + + + + + def exprList(self): + + localctx = PqlParser.ExprListContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_exprList) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 77 + self.expr(0) + self.state = 82 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==PqlParser.COMMA: + self.state = 78 + self.match(PqlParser.COMMA) + self.state = 79 + self.expr(0) + self.state = 84 + self._errHandler.sync(self) + _la = self._input.LA(1) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class FnContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.function_name = None # IdentifierMultipartContext - self.arguments = None # ExprListContext + self.arguments = None # FnArgsContext def OPEN_PAREN(self): return self.getToken(PqlParser.OPEN_PAREN, 0) @@ -619,8 +696,8 @@ def identifierMultipart(self): return self.getTypedRuleContext(PqlParser.IdentifierMultipartContext,0) - def exprList(self): - return self.getTypedRuleContext(PqlParser.ExprListContext,0) + def fnArgs(self): + return self.getTypedRuleContext(PqlParser.FnArgsContext,0) def getRuleIndex(self): @@ -646,23 +723,23 @@ def accept(self, visitor:ParseTreeVisitor): def fn(self): localctx = PqlParser.FnContext(self, self._ctx, self.state) - self.enterRule(localctx, 4, self.RULE_fn) + self.enterRule(localctx, 6, self.RULE_fn) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 73 + self.state = 85 localctx.function_name = self.identifierMultipart() - self.state = 74 + self.state = 86 self.match(PqlParser.OPEN_PAREN) - self.state = 76 + self.state = 88 self._errHandler.sync(self) _la = self._input.LA(1) if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NOT) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING) | (1 << PqlParser.WORD))) != 0): - self.state = 75 - localctx.arguments = self.exprList() + self.state = 87 + localctx.arguments = self.fnArgs() - self.state = 78 + self.state = 90 self.match(PqlParser.CLOSE_PAREN) except RecognitionException as re: localctx.exception = re @@ -673,17 +750,17 @@ def fn(self): return localctx - class ExprListContext(ParserRuleContext): + class FnArgsContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser - def expr(self, i:int=None): + def fnArg(self, i:int=None): if i is None: - return self.getTypedRuleContexts(PqlParser.ExprContext) + return self.getTypedRuleContexts(PqlParser.FnArgContext) else: - return self.getTypedRuleContext(PqlParser.ExprContext,i) + return self.getTypedRuleContext(PqlParser.FnArgContext,i) def COMMA(self, i:int=None): @@ -693,43 +770,43 @@ def COMMA(self, i:int=None): return self.getToken(PqlParser.COMMA, i) def getRuleIndex(self): - return PqlParser.RULE_exprList + return PqlParser.RULE_fnArgs def enterRule(self, listener:ParseTreeListener): - if hasattr( listener, "enterExprList" ): - listener.enterExprList(self) + if hasattr( listener, "enterFnArgs" ): + listener.enterFnArgs(self) def exitRule(self, listener:ParseTreeListener): - if hasattr( listener, "exitExprList" ): - listener.exitExprList(self) + if hasattr( listener, "exitFnArgs" ): + listener.exitFnArgs(self) def accept(self, visitor:ParseTreeVisitor): - if hasattr( visitor, "visitExprList" ): - return visitor.visitExprList(self) + if hasattr( visitor, "visitFnArgs" ): + return visitor.visitFnArgs(self) else: return visitor.visitChildren(self) - def exprList(self): + def fnArgs(self): - localctx = PqlParser.ExprListContext(self, self._ctx, self.state) - self.enterRule(localctx, 6, self.RULE_exprList) + localctx = PqlParser.FnArgsContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_fnArgs) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 80 - self.expr(0) - self.state = 85 + self.state = 92 + self.fnArg() + self.state = 97 self._errHandler.sync(self) _la = self._input.LA(1) while _la==PqlParser.COMMA: - self.state = 81 + self.state = 93 self.match(PqlParser.COMMA) - self.state = 82 - self.expr(0) - self.state = 87 + self.state = 94 + self.fnArg() + self.state = 99 self._errHandler.sync(self) _la = self._input.LA(1) @@ -742,6 +819,71 @@ def exprList(self): return localctx + class FnArgContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.argument_name = None # Token + self.argument_value = None # ExprContext + + def expr(self): + return self.getTypedRuleContext(PqlParser.ExprContext,0) + + + def ASSIGN(self): + return self.getToken(PqlParser.ASSIGN, 0) + + def WORD(self): + return self.getToken(PqlParser.WORD, 0) + + def getRuleIndex(self): + return PqlParser.RULE_fnArg + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterFnArg" ): + listener.enterFnArg(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitFnArg" ): + listener.exitFnArg(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitFnArg" ): + return visitor.visitFnArg(self) + else: + return visitor.visitChildren(self) + + + + + def fnArg(self): + + localctx = PqlParser.FnArgContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_fnArg) + try: + self.enterOuterAlt(localctx, 1) + self.state = 102 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,9,self._ctx) + if la_ == 1: + self.state = 100 + localctx.argument_name = self.match(PqlParser.WORD) + self.state = 101 + self.match(PqlParser.ASSIGN) + + + self.state = 104 + localctx.argument_value = self.expr(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class TaxonContext(ParserRuleContext): def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): @@ -791,37 +933,37 @@ def accept(self, visitor:ParseTreeVisitor): def taxon(self): localctx = PqlParser.TaxonContext(self, self._ctx, self.state) - self.enterRule(localctx, 8, self.RULE_taxon) + self.enterRule(localctx, 12, self.RULE_taxon) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 89 + self.state = 107 self._errHandler.sync(self) _la = self._input.LA(1) if _la==PqlParser.QUESTION_MARK: - self.state = 88 + self.state = 106 localctx.is_optional = self.match(PqlParser.QUESTION_MARK) - self.state = 94 + self.state = 112 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,9,self._ctx) + la_ = self._interp.adaptivePredict(self._input,11,self._ctx) if la_ == 1: - self.state = 91 + self.state = 109 localctx.namespace = self.identifierMultipart() - self.state = 92 + self.state = 110 self.match(PqlParser.PIPE) - self.state = 96 + self.state = 114 localctx.slug = self.identifierMultipart() - self.state = 99 + self.state = 117 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input,10,self._ctx) + la_ = self._interp.adaptivePredict(self._input,12,self._ctx) if la_ == 1: - self.state = 97 + self.state = 115 self.match(PqlParser.COLON) - self.state = 98 + self.state = 116 localctx.tag = self.identifierMultipart() @@ -875,23 +1017,23 @@ def accept(self, visitor:ParseTreeVisitor): def identifierMultipart(self): localctx = PqlParser.IdentifierMultipartContext(self, self._ctx, self.state) - self.enterRule(localctx, 10, self.RULE_identifierMultipart) + self.enterRule(localctx, 14, self.RULE_identifierMultipart) try: self.enterOuterAlt(localctx, 1) - self.state = 101 + self.state = 119 self.match(PqlParser.WORD) - self.state = 106 + self.state = 124 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,11,self._ctx) + _alt = self._interp.adaptivePredict(self._input,13,self._ctx) while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: if _alt==1: - self.state = 102 + self.state = 120 self.match(PqlParser.DOT) - self.state = 103 + self.state = 121 self.match(PqlParser.WORD) - self.state = 108 + self.state = 126 self._errHandler.sync(self) - _alt = self._interp.adaptivePredict(self._input,11,self._ctx) + _alt = self._interp.adaptivePredict(self._input,13,self._ctx) except RecognitionException as re: localctx.exception = re @@ -949,11 +1091,11 @@ def accept(self, visitor:ParseTreeVisitor): def literalValue(self): localctx = PqlParser.LiteralValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 12, self.RULE_literalValue) + self.enterRule(localctx, 16, self.RULE_literalValue) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 109 + self.state = 127 _la = self._input.LA(1) if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << PqlParser.K_FALSE) | (1 << PqlParser.K_NULL) | (1 << PqlParser.K_TRUE) | (1 << PqlParser.NUMERIC_LITERAL) | (1 << PqlParser.DOUBLE_QUOTED_STRING) | (1 << PqlParser.SINGLE_QUOTED_STRING))) != 0)): self._errHandler.recoverInline(self) diff --git a/python/src/pql_grammar/antlr/PqlParserListener.py b/python/src/pql_grammar/antlr/PqlParserListener.py index 4f57470..c133297 100644 --- a/python/src/pql_grammar/antlr/PqlParserListener.py +++ b/python/src/pql_grammar/antlr/PqlParserListener.py @@ -26,6 +26,15 @@ def exitExpr(self, ctx:PqlParser.ExprContext): pass + # Enter a parse tree produced by PqlParser#exprList. + def enterExprList(self, ctx:PqlParser.ExprListContext): + pass + + # Exit a parse tree produced by PqlParser#exprList. + def exitExprList(self, ctx:PqlParser.ExprListContext): + pass + + # Enter a parse tree produced by PqlParser#fn. def enterFn(self, ctx:PqlParser.FnContext): pass @@ -35,12 +44,21 @@ def exitFn(self, ctx:PqlParser.FnContext): pass - # Enter a parse tree produced by PqlParser#exprList. - def enterExprList(self, ctx:PqlParser.ExprListContext): + # Enter a parse tree produced by PqlParser#fnArgs. + def enterFnArgs(self, ctx:PqlParser.FnArgsContext): pass - # Exit a parse tree produced by PqlParser#exprList. - def exitExprList(self, ctx:PqlParser.ExprListContext): + # Exit a parse tree produced by PqlParser#fnArgs. + def exitFnArgs(self, ctx:PqlParser.FnArgsContext): + pass + + + # Enter a parse tree produced by PqlParser#fnArg. + def enterFnArg(self, ctx:PqlParser.FnArgContext): + pass + + # Exit a parse tree produced by PqlParser#fnArg. + def exitFnArg(self, ctx:PqlParser.FnArgContext): pass diff --git a/python/src/pql_grammar/antlr/PqlParserVisitor.py b/python/src/pql_grammar/antlr/PqlParserVisitor.py index 4caf92d..764036f 100644 --- a/python/src/pql_grammar/antlr/PqlParserVisitor.py +++ b/python/src/pql_grammar/antlr/PqlParserVisitor.py @@ -19,13 +19,23 @@ def visitExpr(self, ctx:PqlParser.ExprContext): return self.visitChildren(ctx) + # Visit a parse tree produced by PqlParser#exprList. + def visitExprList(self, ctx:PqlParser.ExprListContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by PqlParser#fn. def visitFn(self, ctx:PqlParser.FnContext): return self.visitChildren(ctx) - # Visit a parse tree produced by PqlParser#exprList. - def visitExprList(self, ctx:PqlParser.ExprListContext): + # Visit a parse tree produced by PqlParser#fnArgs. + def visitFnArgs(self, ctx:PqlParser.FnArgsContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by PqlParser#fnArg. + def visitFnArg(self, ctx:PqlParser.FnArgContext): return self.visitChildren(ctx) diff --git a/python/src/pql_grammar/from_pql.py b/python/src/pql_grammar/from_pql.py index d4034cd..6087c7d 100644 --- a/python/src/pql_grammar/from_pql.py +++ b/python/src/pql_grammar/from_pql.py @@ -1,9 +1,16 @@ # fmt: off -from antlr4 import CommonTokenStream, InputStream, ParserRuleContext -from antlr4 import ParserRuleContext +from antlr4 import ( + CommonTokenStream, + InputStream, + ParserRuleContext, + RecognitionException, + Recognizer, + Token, +) +from antlr4.error.ErrorListener import ErrorListener from decimal import Decimal -from typing import Optional, Tuple, List, Type, Any +from typing import Optional, Tuple, List, Type, Any, Union from .antlr.PqlLexer import PqlLexer from .antlr.PqlParser import PqlParser @@ -16,6 +23,59 @@ class ParseError(ValueError): pass +class PqlErrorListener(ErrorListener): + # TODO: Contemplate DiagnosticErrorListener as base class for richer error reporting + + def syntaxError( + self, + recognizer: Recognizer, + offending_symbol: Token, + line: int, + column: int, + msg: str, + e: RecognitionException + ): + # See chapter 9.2 "Altering and Redirecting ANTLR Error Messages" + # http://books.killf.info/%E7%BC%96%E8%AF%91%E5%8E%9F%E7%90%86/The%20Definitive%20ANTLR4%20Reference.pdf + + tokens = recognizer.getInputStream() + input = full_text(tokens.tokenSource.inputStream) + # when input == '' splitlines makes it [] - empty. Need at last one line. + lines = input.splitlines() or [''] + error_line = lines[line - 1] + start = offending_symbol.start + stop = offending_symbol.stop + + base_msg = f'Unexpected "{full_text(offending_symbol)}"' if offending_symbol else msg + base_msg = base_msg.replace('', '') + + if len(lines) > 1: + line_msg = f'line {line}, ' + else: + line_msg = '' + + # "unexpected end of line" errors have index reversed + # stop is smaller than start. + if start < stop: + pos_msg = f'positions {start+1} to {stop+1}' + else: + pos_msg = f'position {start+1}' + + + if len(error_line) <= start + 1: + error_line_focus = error_line + else: + error_line_focus = ( + error_line[:start] + + '-->' + + error_line[start:stop+1] + + '<--' + + error_line[stop+1:] + ) + msg = f'{base_msg} ({line_msg}{pos_msg}) in fragment "{error_line_focus}"' + raise ParseError(msg) + + def full_text(ctx: ParserRuleContext) -> str: # extracts full text from a tree of nodes, # including white space. @@ -68,76 +128,50 @@ def unquote(s: str): # return s.replace('""', '"').replace("''", "'") -class PqlAntlrToAstParser: - - @classmethod - def unwrap_expr_parens(cls, e: PqlParser.ExprContext) -> PqlParser.ExprContext: - # it's allowed to wrap expressions into superflous amounts of parens - # (((column > 5))) - # These come across as triple-nested [TerminalNodeImpl('('), expr, TerminalNodeImpl(')')] - # Here we check for len == 3 and if last and first Terminals are (), return middle element - expression, - # Run this recursively. - # inner attribute is enabled only on cleanly-paren-wrapped expressions - if e.inner: - return cls.unwrap_expr_parens(e.inner) - else: - return e - - @classmethod - def parse_taxon(cls, e: PqlParser.TaxonContext) -> ast.Taxon: - return ast.Taxon( - full_text(e.slug), - full_text(e.namespace), - bool(e.is_optional), - full_text(e.tag) - ) +class PqlVisitor(_PqlParserVisitor): - @classmethod - def parse_function_argument_pair(cls, e: PqlParser.ExprContext) -> Tuple[Optional[str],Any]: - e = cls.unwrap_expr_parens(e) - o = full_text(e.operator) - if o == '=': - arg_name = full_text(e.left) - arg_value = cls.parse_expr(e.right) - else: - arg_name = None - arg_value = cls.parse_expr(e) - return arg_name, arg_value + def visitErrorNode(self, node): + """ + Override this with no-op if you don't want automatic syntax errors emitted + """ + wrong_symbol = node.symbol.text + line = node.symbol.line + column = node.symbol.column + 1 + details = f'Unexpected symbol "{wrong_symbol}" on line {line}, position {column}' + raise ParseError(details) - @classmethod - def parse_function(cls, e: PqlParser.FnContext) -> ast.Function: - return ast.Function( - full_text(e.function_name), - tuple([ - cls.parse_function_argument_pair(expr) - for expr in e.arguments.expr() - ]) if e.arguments else None - ) + def visit_from_tel_string(self, tel: str): + inp_stream = InputStream(tel) + error_listener = PqlErrorListener() + lexer = PqlLexer(inp_stream) + lexer.removeErrorListeners() # default is PrintToConsole + lexer.addErrorListener(error_listener) + stream = CommonTokenStream(lexer) + parser = PqlParser(stream) + parser.removeErrorListeners() # default is PrintToConsole + parser.addErrorListener(error_listener) + tree = parser.parseTel() + return self.visit(tree) - @classmethod - def parse_literal(cls, e:PqlParser.LiteralValueContext): - return ast.Literal( - cls.parse_literal_value(e), - full_text(e) - ) + def visitParseTel(self, ctx:PqlParser.ParseTelContext): + return self.visitExpr(ctx.expr()) - @staticmethod - def parse_literal_value(e:PqlParser.LiteralValueContext): - is_number = bool(e.NUMERIC_LITERAL()) - is_string = bool(e.DOUBLE_QUOTED_STRING()) or bool(e.SINGLE_QUOTED_STRING()) - is_null = bool(e.K_NULL()) - is_bool = bool(e.K_TRUE()) or bool(e.K_FALSE()) + def _parse_literal(self, ctx: PqlParser.LiteralValueContext): + is_number = bool(ctx.NUMERIC_LITERAL()) + is_string = bool(ctx.DOUBLE_QUOTED_STRING()) or bool(ctx.SINGLE_QUOTED_STRING()) + is_null = bool(ctx.K_NULL()) + is_bool = bool(ctx.K_TRUE()) or bool(ctx.K_FALSE()) if is_null: return None if is_bool: - return bool(e.K_TRUE()) + return bool(ctx.K_TRUE()) try: - v = full_text(e) + v = full_text(ctx) except IndexError: - raise ParseError(f"Could not extract literal value node from '{e.getText()}'.") + raise ParseError(f"Could not extract literal value node from '{ctx.getText()}'.") if is_number: # TODO: contemplate decimal type instead @@ -154,20 +188,50 @@ def parse_literal_value(e:PqlParser.LiteralValueContext): return v - @classmethod - def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : - ctx = cls.unwrap_expr_parens(ctx) + def visitLiteralValue(self, ctx:PqlParser.LiteralValueContext): + return ast.Literal( + self._parse_literal(ctx), + full_text(ctx) + ) + + def visitTaxon(self, ctx:PqlParser.TaxonContext): + return ast.Taxon( + full_text(ctx.slug), + full_text(ctx.namespace), + bool(ctx.is_optional), + full_text(ctx.tag) + ) + + def visitFn(self, ctx:PqlParser.FnContext): + return ast.Function( + full_text(ctx.function_name), + tuple([ + # argument_name may be undefined, returning Null for name. + # that's fine. + # Null for name value means it's not named, but positional argument. + # positional args are stored as (None, arg_value) tuples. + (full_text(fn_arg.argument_name), self.visitExpr(fn_arg.argument_value)) + for fn_arg in ctx.arguments.fnArg() + ]) if ctx.arguments else None + ) + + def visitExpr(self, ctx:PqlParser.ExprContext): + # unpack parens + if ctx.inner: + return self.visitExpr(ctx.inner) v = ctx.literalValue() if v: - return cls.parse_literal(v) + return self.visitLiteralValue(v) v = ctx.unary_operator if v: operator = full_text(v).upper() - right = cls.parse_expr(ctx.right) - # some unary have no meaning + # expr + right = self.visitExpr(ctx.right) + + # some unary operators have no meaning # and packing them into AST just creates noise for consuming if operator == '+': # skip the BS. ignore the plus @@ -191,13 +255,12 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : isinstance(right, ast.Literal) ): # unlikely to ever happen, but still - v = not right.value + _v = not right.value return ast.Literal( - v, - 'true' if v else 'false' + _v, + 'true' if _v else 'false' ) - # else: # # cannot avoid packaging unary "-" separate. # # it's in front of a non-literal expression that need to be negated manually later @@ -228,11 +291,11 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : # IN-like cases are characterized by non-null `.right_list` (instead of .right) if ctx.right_list: right = [ - cls.parse_expr(expr) + self.visitExpr(expr) for expr in ctx.right_list.expr() ] else: - right = [cls.parse_expr(ctx.right)] + right = [self.visitExpr(ctx.right)] is_negated = ctx.is_negated @@ -251,10 +314,10 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : # Think of it as the only sane way to express what BETWEEN means. if op == 'BETWEEN': - left = cls.parse_expr(ctx.left) + left = self.visitExpr(ctx.left) # this one is an Expr('AND', [v1, v2])) - between_and = cls.parse_expr(ctx.right) + between_and = self.visitExpr(ctx.right) if ( isinstance(between_and, ast.Expr) and @@ -316,7 +379,7 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : ex = ast.Expr( op, - tuple([cls.parse_expr(ctx.left)] + right) + tuple([self.visitExpr(ctx.left)] + right) ) # lastly, some statements allow NOT before operator @@ -338,43 +401,12 @@ def parse_expr(cls, ctx: PqlParser.ExprContext) -> ast.Node : v: PqlParser.TaxonContext = ctx.taxon() if v: - return cls.parse_taxon(v) + return self.visitTaxon(v) v: PqlParser.FnContext = ctx.fn() if v: - return cls.parse_function(v) - - raise ParseError(f'Where expression "{full_text(ctx)}" is not supported yet.') - - -class PqlVisitor(_PqlParserVisitor): - - def visitErrorNode(self, node): - wrong_symbol = node.symbol.text - line = node.symbol.line - column = node.symbol.column + 1 - details = f'Unexpected symbol "{wrong_symbol}" on line {line}, position {column}' - raise ParseError(details) - - def visit_from_tel_string(self, tel: str): - inp_stream = InputStream(tel) - lexer = PqlLexer(inp_stream) - stream = CommonTokenStream(lexer) - parser = PqlParser(stream) - tree = parser.parseTel() - self.visit(tree) + return self.visitFn(v) def from_tel(tel: str, cls:Type[PqlVisitor] = PqlVisitor) -> ast.Node: - - statements = [] - - class V(cls): - def visitExpr(self, ctx:PqlParser.ExprContext): - statements.append( - PqlAntlrToAstParser.parse_expr(ctx) - ) - - V().visit_from_tel_string(tel) - - return statements[0] if statements else None + return cls().visit_from_tel_string(tel) From 0bcf50f81257d9966f0c9294c183e4537e57448b Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Tue, 8 Dec 2020 01:49:07 -0800 Subject: [PATCH 31/32] add ILIKE support --- grammar/PqlLexer.g4 | 1 + grammar/PqlParser.g4 | 2 +- js-temp/PqlLexer.js | 615 +++++++++++----------- js-temp/PqlParser.js | 183 ++++--- python/src/pql_grammar/antlr/PqlLexer.py | 454 ++++++++-------- python/src/pql_grammar/antlr/PqlParser.py | 126 +++-- python/tests/tel_grammar_test.py | 3 +- 7 files changed, 709 insertions(+), 675 deletions(-) diff --git a/grammar/PqlLexer.g4 b/grammar/PqlLexer.g4 index 9a0b6c2..299f1ca 100644 --- a/grammar/PqlLexer.g4 +++ b/grammar/PqlLexer.g4 @@ -36,6 +36,7 @@ UNDER: '_'; K_AND : A N D; K_BETWEEN : B E T W E E N; K_FALSE : F A L S E; +K_ILIKE: I L I K E ; K_IN : I N; K_IS : I S; K_ISNULL : I S N U L L; diff --git a/grammar/PqlParser.g4 b/grammar/PqlParser.g4 index f0cd8e9..022a714 100644 --- a/grammar/PqlParser.g4 +++ b/grammar/PqlParser.g4 @@ -24,7 +24,7 @@ expr | left=expr operator=( PLUS | MINUS ) right=expr | left=expr operator=( LT | LT_EQ | GT | GT_EQ ) right=expr | left=expr operator=( ASSIGN | EQ | NOT_EQ1 | NOT_EQ2 | K_IS ) right=expr - | left=expr is_negated=K_NOT? operator=K_LIKE right=expr + | left=expr is_negated=K_NOT? operator=(K_LIKE | K_ILIKE) right=expr | left=expr is_negated=K_NOT? operator=K_IN OPEN_PAREN right_list=exprList CLOSE_PAREN | left=expr operator=( K_AND | AND ) right=expr | left=expr operator=( K_OR | OR ) right=expr diff --git a/js-temp/PqlLexer.js b/js-temp/PqlLexer.js index 26a3460..c75b504 100644 --- a/js-temp/PqlLexer.js +++ b/js-temp/PqlLexer.js @@ -5,7 +5,7 @@ var antlr4 = require('antlr4/index'); var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u00025\u01d9\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", + "\u00026\u01e1\b\u0001\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004", "\u0004\t\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t", "\u0007\u0004\b\t\b\u0004\t\t\t\u0004\n\t\n\u0004\u000b\t\u000b\u0004", "\f\t\f\u0004\r\t\r\u0004\u000e\t\u000e\u0004\u000f\t\u000f\u0004\u0010", @@ -20,284 +20,288 @@ var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", "8\t8\u00049\t9\u0004:\t:\u0004;\t;\u0004<\t<\u0004=\t=\u0004>\t>\u0004", "?\t?\u0004@\t@\u0004A\tA\u0004B\tB\u0004C\tC\u0004D\tD\u0004E\tE\u0004", "F\tF\u0004G\tG\u0004H\tH\u0004I\tI\u0004J\tJ\u0004K\tK\u0004L\tL\u0004", - "M\tM\u0004N\tN\u0004O\tO\u0003\u0002\u0003\u0002\u0003\u0002\u0003\u0003", - "\u0003\u0003\u0003\u0003\u0003\u0004\u0003\u0004\u0003\u0004\u0003\u0005", - "\u0003\u0005\u0003\u0005\u0003\u0006\u0003\u0006\u0003\u0006\u0003\u0007", - "\u0003\u0007\u0003\u0007\u0003\b\u0003\b\u0003\b\u0003\t\u0003\t\u0003", - "\t\u0003\n\u0003\n\u0003\n\u0003\u000b\u0003\u000b\u0003\f\u0003\f\u0003", - "\r\u0003\r\u0003\u000e\u0003\u000e\u0003\u000f\u0003\u000f\u0003\u0010", - "\u0003\u0010\u0003\u0011\u0003\u0011\u0003\u0012\u0003\u0012\u0003\u0013", - "\u0003\u0013\u0003\u0014\u0003\u0014\u0003\u0015\u0003\u0015\u0003\u0016", - "\u0003\u0016\u0003\u0017\u0003\u0017\u0003\u0018\u0003\u0018\u0003\u0019", - "\u0003\u0019\u0003\u001a\u0003\u001a\u0003\u001b\u0003\u001b\u0003\u001c", - "\u0003\u001c\u0003\u001d\u0003\u001d\u0003\u001e\u0003\u001e\u0003\u001e", - "\u0003\u001e\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f", - "\u0003\u001f\u0003\u001f\u0003\u001f\u0003 \u0003 \u0003 \u0003 \u0003", - " \u0003 \u0003!\u0003!\u0003!\u0003\"\u0003\"\u0003\"\u0003#\u0003#", - "\u0003#\u0003#\u0003#\u0003#\u0003#\u0003$\u0003$\u0003$\u0003$\u0003", - "$\u0003%\u0003%\u0003%\u0003%\u0003&\u0003&\u0003&\u0003&\u0003&\u0003", - "&\u0003&\u0003&\u0003\'\u0003\'\u0003\'\u0003\'\u0003\'\u0003(\u0003", - "(\u0003(\u0003)\u0003)\u0003)\u0003)\u0003)\u0003*\u0006*\u011f\n*\r", - "*\u000e*\u0120\u0003*\u0003*\u0007*\u0125\n*\f*\u000e*\u0128\u000b*", - "\u0005*\u012a\n*\u0003*\u0003*\u0005*\u012e\n*\u0003*\u0006*\u0131\n", - "*\r*\u000e*\u0132\u0005*\u0135\n*\u0003*\u0003*\u0006*\u0139\n*\r*\u000e", - "*\u013a\u0003*\u0003*\u0005*\u013f\n*\u0003*\u0006*\u0142\n*\r*\u000e", - "*\u0143\u0005*\u0146\n*\u0005*\u0148\n*\u0003+\u0003+\u0003,\u0003,", - "\u0003,\u0003,\u0007,\u0150\n,\f,\u000e,\u0153\u000b,\u0003,\u0003,", - "\u0003-\u0003-\u0003-\u0003-\u0007-\u015b\n-\f-\u000e-\u015e\u000b-", - "\u0003-\u0003-\u0003.\u0003.\u0003/\u0003/\u0003/\u0003/\u0007/\u0168", - "\n/\f/\u000e/\u016b\u000b/\u0003/\u0003/\u00030\u00030\u00030\u0003", - "0\u00070\u0173\n0\f0\u000e0\u0176\u000b0\u00030\u00030\u00031\u0003", - "1\u00031\u00031\u00031\u00051\u017f\n1\u00031\u00071\u0182\n1\f1\u000e", - "1\u0185\u000b1\u00031\u00031\u00032\u00032\u00032\u00032\u00072\u018d", - "\n2\f2\u000e2\u0190\u000b2\u00032\u00032\u00032\u00052\u0195\n2\u0003", - "2\u00032\u00033\u00033\u00033\u00033\u00034\u00034\u00074\u019f\n4\f", - "4\u000e4\u01a2\u000b4\u00035\u00035\u00036\u00036\u00037\u00037\u0003", - "8\u00038\u00039\u00039\u0003:\u0003:\u0003;\u0003;\u0003<\u0003<\u0003", - "=\u0003=\u0003>\u0003>\u0003?\u0003?\u0003@\u0003@\u0003A\u0003A\u0003", - "B\u0003B\u0003C\u0003C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003F\u0003", - "G\u0003G\u0003H\u0003H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003K\u0003", - "L\u0003L\u0003M\u0003M\u0003N\u0003N\u0003O\u0003O\u0003\u018e\u0002", - "P\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b\u0007\r\b\u000f", + "M\tM\u0004N\tN\u0004O\tO\u0004P\tP\u0003\u0002\u0003\u0002\u0003\u0002", + "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0004\u0003\u0004\u0003\u0004", + "\u0003\u0005\u0003\u0005\u0003\u0005\u0003\u0006\u0003\u0006\u0003\u0006", + "\u0003\u0007\u0003\u0007\u0003\u0007\u0003\b\u0003\b\u0003\b\u0003\t", + "\u0003\t\u0003\t\u0003\n\u0003\n\u0003\n\u0003\u000b\u0003\u000b\u0003", + "\f\u0003\f\u0003\r\u0003\r\u0003\u000e\u0003\u000e\u0003\u000f\u0003", + "\u000f\u0003\u0010\u0003\u0010\u0003\u0011\u0003\u0011\u0003\u0012\u0003", + "\u0012\u0003\u0013\u0003\u0013\u0003\u0014\u0003\u0014\u0003\u0015\u0003", + "\u0015\u0003\u0016\u0003\u0016\u0003\u0017\u0003\u0017\u0003\u0018\u0003", + "\u0018\u0003\u0019\u0003\u0019\u0003\u001a\u0003\u001a\u0003\u001b\u0003", + "\u001b\u0003\u001c\u0003\u001c\u0003\u001d\u0003\u001d\u0003\u001e\u0003", + "\u001e\u0003\u001e\u0003\u001e\u0003\u001f\u0003\u001f\u0003\u001f\u0003", + "\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003\u001f\u0003 \u0003", + " \u0003 \u0003 \u0003 \u0003 \u0003!\u0003!\u0003!\u0003!\u0003!\u0003", + "!\u0003\"\u0003\"\u0003\"\u0003#\u0003#\u0003#\u0003$\u0003$\u0003$", + "\u0003$\u0003$\u0003$\u0003$\u0003%\u0003%\u0003%\u0003%\u0003%\u0003", + "&\u0003&\u0003&\u0003&\u0003\'\u0003\'\u0003\'\u0003\'\u0003\'\u0003", + "\'\u0003\'\u0003\'\u0003(\u0003(\u0003(\u0003(\u0003(\u0003)\u0003)", + "\u0003)\u0003*\u0003*\u0003*\u0003*\u0003*\u0003+\u0006+\u0127\n+\r", + "+\u000e+\u0128\u0003+\u0003+\u0007+\u012d\n+\f+\u000e+\u0130\u000b+", + "\u0005+\u0132\n+\u0003+\u0003+\u0005+\u0136\n+\u0003+\u0006+\u0139\n", + "+\r+\u000e+\u013a\u0005+\u013d\n+\u0003+\u0003+\u0006+\u0141\n+\r+\u000e", + "+\u0142\u0003+\u0003+\u0005+\u0147\n+\u0003+\u0006+\u014a\n+\r+\u000e", + "+\u014b\u0005+\u014e\n+\u0005+\u0150\n+\u0003,\u0003,\u0003-\u0003-", + "\u0003-\u0003-\u0007-\u0158\n-\f-\u000e-\u015b\u000b-\u0003-\u0003-", + "\u0003.\u0003.\u0003.\u0003.\u0007.\u0163\n.\f.\u000e.\u0166\u000b.", + "\u0003.\u0003.\u0003/\u0003/\u00030\u00030\u00030\u00030\u00070\u0170", + "\n0\f0\u000e0\u0173\u000b0\u00030\u00030\u00031\u00031\u00031\u0003", + "1\u00071\u017b\n1\f1\u000e1\u017e\u000b1\u00031\u00031\u00032\u0003", + "2\u00032\u00032\u00032\u00052\u0187\n2\u00032\u00072\u018a\n2\f2\u000e", + "2\u018d\u000b2\u00032\u00032\u00033\u00033\u00033\u00033\u00073\u0195", + "\n3\f3\u000e3\u0198\u000b3\u00033\u00033\u00033\u00053\u019d\n3\u0003", + "3\u00033\u00034\u00034\u00034\u00034\u00035\u00035\u00075\u01a7\n5\f", + "5\u000e5\u01aa\u000b5\u00036\u00036\u00037\u00037\u00038\u00038\u0003", + "9\u00039\u0003:\u0003:\u0003;\u0003;\u0003<\u0003<\u0003=\u0003=\u0003", + ">\u0003>\u0003?\u0003?\u0003@\u0003@\u0003A\u0003A\u0003B\u0003B\u0003", + "C\u0003C\u0003D\u0003D\u0003E\u0003E\u0003F\u0003F\u0003G\u0003G\u0003", + "H\u0003H\u0003I\u0003I\u0003J\u0003J\u0003K\u0003K\u0003L\u0003L\u0003", + "M\u0003M\u0003N\u0003N\u0003O\u0003O\u0003P\u0003P\u0003\u0196\u0002", + "Q\u0003\u0003\u0005\u0004\u0007\u0005\t\u0006\u000b\u0007\r\b\u000f", "\t\u0011\n\u0013\u000b\u0015\f\u0017\r\u0019\u000e\u001b\u000f\u001d", "\u0010\u001f\u0011!\u0012#\u0013%\u0014\'\u0015)\u0016+\u0017-\u0018", "/\u00191\u001a3\u001b5\u001c7\u001d9\u001e;\u001f= ?!A\"C#E$G%I&K\'", - "M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i\u0002k\u0002m\u0002o\u0002q\u0002s\u0002", - "u\u0002w\u0002y\u0002{\u0002}\u0002\u007f\u0002\u0081\u0002\u0083\u0002", - "\u0085\u0002\u0087\u0002\u0089\u0002\u008b\u0002\u008d\u0002\u008f\u0002", - "\u0091\u0002\u0093\u0002\u0095\u0002\u0097\u0002\u0099\u0002\u009b\u0002", - "\u009d\u0002\u0003\u0002$\u0004\u0002--//\u0003\u0002$$\u0003\u0002", - "))\u0004\u0002\f\f\u000f\u000f\u0005\u0002\u000b\r\u000f\u000f\"\"\u0005", - "\u0002C\\aac|\u0006\u00022;C\\aac|\u0003\u00022;\u0004\u0002CCcc\u0004", - "\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002GGgg\u0004\u0002", - "HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004\u0002LLl", - "l\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002PPpp\u0004", - "\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002TTtt\u0004\u0002", - "UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004\u0002YYy", - "y\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||\u0002\u01d6\u0002", - "\u0003\u0003\u0002\u0002\u0002\u0002\u0005\u0003\u0002\u0002\u0002\u0002", - "\u0007\u0003\u0002\u0002\u0002\u0002\t\u0003\u0002\u0002\u0002\u0002", - "\u000b\u0003\u0002\u0002\u0002\u0002\r\u0003\u0002\u0002\u0002\u0002", - "\u000f\u0003\u0002\u0002\u0002\u0002\u0011\u0003\u0002\u0002\u0002\u0002", - "\u0013\u0003\u0002\u0002\u0002\u0002\u0015\u0003\u0002\u0002\u0002\u0002", - "\u0017\u0003\u0002\u0002\u0002\u0002\u0019\u0003\u0002\u0002\u0002\u0002", - "\u001b\u0003\u0002\u0002\u0002\u0002\u001d\u0003\u0002\u0002\u0002\u0002", - "\u001f\u0003\u0002\u0002\u0002\u0002!\u0003\u0002\u0002\u0002\u0002", - "#\u0003\u0002\u0002\u0002\u0002%\u0003\u0002\u0002\u0002\u0002\'\u0003", - "\u0002\u0002\u0002\u0002)\u0003\u0002\u0002\u0002\u0002+\u0003\u0002", - "\u0002\u0002\u0002-\u0003\u0002\u0002\u0002\u0002/\u0003\u0002\u0002", - "\u0002\u00021\u0003\u0002\u0002\u0002\u00023\u0003\u0002\u0002\u0002", - "\u00025\u0003\u0002\u0002\u0002\u00027\u0003\u0002\u0002\u0002\u0002", - "9\u0003\u0002\u0002\u0002\u0002;\u0003\u0002\u0002\u0002\u0002=\u0003", - "\u0002\u0002\u0002\u0002?\u0003\u0002\u0002\u0002\u0002A\u0003\u0002", - "\u0002\u0002\u0002C\u0003\u0002\u0002\u0002\u0002E\u0003\u0002\u0002", - "\u0002\u0002G\u0003\u0002\u0002\u0002\u0002I\u0003\u0002\u0002\u0002", - "\u0002K\u0003\u0002\u0002\u0002\u0002M\u0003\u0002\u0002\u0002\u0002", - "O\u0003\u0002\u0002\u0002\u0002Q\u0003\u0002\u0002\u0002\u0002S\u0003", - "\u0002\u0002\u0002\u0002U\u0003\u0002\u0002\u0002\u0002W\u0003\u0002", - "\u0002\u0002\u0002Y\u0003\u0002\u0002\u0002\u0002[\u0003\u0002\u0002", - "\u0002\u0002]\u0003\u0002\u0002\u0002\u0002_\u0003\u0002\u0002\u0002", - "\u0002a\u0003\u0002\u0002\u0002\u0002c\u0003\u0002\u0002\u0002\u0002", - "e\u0003\u0002\u0002\u0002\u0002g\u0003\u0002\u0002\u0002\u0003\u009f", - "\u0003\u0002\u0002\u0002\u0005\u00a2\u0003\u0002\u0002\u0002\u0007\u00a5", - "\u0003\u0002\u0002\u0002\t\u00a8\u0003\u0002\u0002\u0002\u000b\u00ab", - "\u0003\u0002\u0002\u0002\r\u00ae\u0003\u0002\u0002\u0002\u000f\u00b1", - "\u0003\u0002\u0002\u0002\u0011\u00b4\u0003\u0002\u0002\u0002\u0013\u00b7", - "\u0003\u0002\u0002\u0002\u0015\u00ba\u0003\u0002\u0002\u0002\u0017\u00bc", - "\u0003\u0002\u0002\u0002\u0019\u00be\u0003\u0002\u0002\u0002\u001b\u00c0", - "\u0003\u0002\u0002\u0002\u001d\u00c2\u0003\u0002\u0002\u0002\u001f\u00c4", - "\u0003\u0002\u0002\u0002!\u00c6\u0003\u0002\u0002\u0002#\u00c8\u0003", - "\u0002\u0002\u0002%\u00ca\u0003\u0002\u0002\u0002\'\u00cc\u0003\u0002", - "\u0002\u0002)\u00ce\u0003\u0002\u0002\u0002+\u00d0\u0003\u0002\u0002", - "\u0002-\u00d2\u0003\u0002\u0002\u0002/\u00d4\u0003\u0002\u0002\u0002", - "1\u00d6\u0003\u0002\u0002\u00023\u00d8\u0003\u0002\u0002\u00025\u00da", - "\u0003\u0002\u0002\u00027\u00dc\u0003\u0002\u0002\u00029\u00de\u0003", - "\u0002\u0002\u0002;\u00e0\u0003\u0002\u0002\u0002=\u00e4\u0003\u0002", - "\u0002\u0002?\u00ec\u0003\u0002\u0002\u0002A\u00f2\u0003\u0002\u0002", - "\u0002C\u00f5\u0003\u0002\u0002\u0002E\u00f8\u0003\u0002\u0002\u0002", - "G\u00ff\u0003\u0002\u0002\u0002I\u0104\u0003\u0002\u0002\u0002K\u0108", - "\u0003\u0002\u0002\u0002M\u0110\u0003\u0002\u0002\u0002O\u0115\u0003", - "\u0002\u0002\u0002Q\u0118\u0003\u0002\u0002\u0002S\u0147\u0003\u0002", - "\u0002\u0002U\u0149\u0003\u0002\u0002\u0002W\u014b\u0003\u0002\u0002", - "\u0002Y\u0156\u0003\u0002\u0002\u0002[\u0161\u0003\u0002\u0002\u0002", - "]\u0163\u0003\u0002\u0002\u0002_\u016e\u0003\u0002\u0002\u0002a\u017e", - "\u0003\u0002\u0002\u0002c\u0188\u0003\u0002\u0002\u0002e\u0198\u0003", - "\u0002\u0002\u0002g\u019c\u0003\u0002\u0002\u0002i\u01a3\u0003\u0002", - "\u0002\u0002k\u01a5\u0003\u0002\u0002\u0002m\u01a7\u0003\u0002\u0002", - "\u0002o\u01a9\u0003\u0002\u0002\u0002q\u01ab\u0003\u0002\u0002\u0002", - "s\u01ad\u0003\u0002\u0002\u0002u\u01af\u0003\u0002\u0002\u0002w\u01b1", - "\u0003\u0002\u0002\u0002y\u01b3\u0003\u0002\u0002\u0002{\u01b5\u0003", - "\u0002\u0002\u0002}\u01b7\u0003\u0002\u0002\u0002\u007f\u01b9\u0003", - "\u0002\u0002\u0002\u0081\u01bb\u0003\u0002\u0002\u0002\u0083\u01bd\u0003", - "\u0002\u0002\u0002\u0085\u01bf\u0003\u0002\u0002\u0002\u0087\u01c1\u0003", - "\u0002\u0002\u0002\u0089\u01c3\u0003\u0002\u0002\u0002\u008b\u01c5\u0003", - "\u0002\u0002\u0002\u008d\u01c7\u0003\u0002\u0002\u0002\u008f\u01c9\u0003", - "\u0002\u0002\u0002\u0091\u01cb\u0003\u0002\u0002\u0002\u0093\u01cd\u0003", - "\u0002\u0002\u0002\u0095\u01cf\u0003\u0002\u0002\u0002\u0097\u01d1\u0003", - "\u0002\u0002\u0002\u0099\u01d3\u0003\u0002\u0002\u0002\u009b\u01d5\u0003", - "\u0002\u0002\u0002\u009d\u01d7\u0003\u0002\u0002\u0002\u009f\u00a0\u0007", - "(\u0002\u0002\u00a0\u00a1\u0007(\u0002\u0002\u00a1\u0004\u0003\u0002", - "\u0002\u0002\u00a2\u00a3\u0007?\u0002\u0002\u00a3\u00a4\u0007?\u0002", - "\u0002\u00a4\u0006\u0003\u0002\u0002\u0002\u00a5\u00a6\u0007@\u0002", - "\u0002\u00a6\u00a7\u0007?\u0002\u0002\u00a7\b\u0003\u0002\u0002\u0002", - "\u00a8\u00a9\u0007>\u0002\u0002\u00a9\u00aa\u0007?\u0002\u0002\u00aa", - "\n\u0003\u0002\u0002\u0002\u00ab\u00ac\u0007#\u0002\u0002\u00ac\u00ad", - "\u0007?\u0002\u0002\u00ad\f\u0003\u0002\u0002\u0002\u00ae\u00af\u0007", - ">\u0002\u0002\u00af\u00b0\u0007@\u0002\u0002\u00b0\u000e\u0003\u0002", - "\u0002\u0002\u00b1\u00b2\u0007~\u0002\u0002\u00b2\u00b3\u0007~\u0002", - "\u0002\u00b3\u0010\u0003\u0002\u0002\u0002\u00b4\u00b5\u0007>\u0002", - "\u0002\u00b5\u00b6\u0007>\u0002\u0002\u00b6\u0012\u0003\u0002\u0002", - "\u0002\u00b7\u00b8\u0007@\u0002\u0002\u00b8\u00b9\u0007@\u0002\u0002", - "\u00b9\u0014\u0003\u0002\u0002\u0002\u00ba\u00bb\u0007(\u0002\u0002", - "\u00bb\u0016\u0003\u0002\u0002\u0002\u00bc\u00bd\u0007?\u0002\u0002", - "\u00bd\u0018\u0003\u0002\u0002\u0002\u00be\u00bf\u0007+\u0002\u0002", - "\u00bf\u001a\u0003\u0002\u0002\u0002\u00c0\u00c1\u0007<\u0002\u0002", - "\u00c1\u001c\u0003\u0002\u0002\u0002\u00c2\u00c3\u0007.\u0002\u0002", - "\u00c3\u001e\u0003\u0002\u0002\u0002\u00c4\u00c5\u00070\u0002\u0002", - "\u00c5 \u0003\u0002\u0002\u0002\u00c6\u00c7\u00071\u0002\u0002\u00c7", - "\"\u0003\u0002\u0002\u0002\u00c8\u00c9\u0007@\u0002\u0002\u00c9$\u0003", - "\u0002\u0002\u0002\u00ca\u00cb\u0007>\u0002\u0002\u00cb&\u0003\u0002", - "\u0002\u0002\u00cc\u00cd\u0007/\u0002\u0002\u00cd(\u0003\u0002\u0002", - "\u0002\u00ce\u00cf\u0007\'\u0002\u0002\u00cf*\u0003\u0002\u0002\u0002", - "\u00d0\u00d1\u0007*\u0002\u0002\u00d1,\u0003\u0002\u0002\u0002\u00d2", - "\u00d3\u0007~\u0002\u0002\u00d3.\u0003\u0002\u0002\u0002\u00d4\u00d5", - "\u0007-\u0002\u0002\u00d50\u0003\u0002\u0002\u0002\u00d6\u00d7\u0007", - "A\u0002\u0002\u00d72\u0003\u0002\u0002\u0002\u00d8\u00d9\u0007=\u0002", - "\u0002\u00d94\u0003\u0002\u0002\u0002\u00da\u00db\u0007,\u0002\u0002", - "\u00db6\u0003\u0002\u0002\u0002\u00dc\u00dd\u0007\u0080\u0002\u0002", - "\u00dd8\u0003\u0002\u0002\u0002\u00de\u00df\u0007a\u0002\u0002\u00df", - ":\u0003\u0002\u0002\u0002\u00e0\u00e1\u0005k6\u0002\u00e1\u00e2\u0005", - "\u0085C\u0002\u00e2\u00e3\u0005q9\u0002\u00e3<\u0003\u0002\u0002\u0002", - "\u00e4\u00e5\u0005m7\u0002\u00e5\u00e6\u0005s:\u0002\u00e6\u00e7\u0005", - "\u0091I\u0002\u00e7\u00e8\u0005\u0097L\u0002\u00e8\u00e9\u0005s:\u0002", - "\u00e9\u00ea\u0005s:\u0002\u00ea\u00eb\u0005\u0085C\u0002\u00eb>\u0003", - "\u0002\u0002\u0002\u00ec\u00ed\u0005u;\u0002\u00ed\u00ee\u0005k6\u0002", - "\u00ee\u00ef\u0005\u0081A\u0002\u00ef\u00f0\u0005\u008fH\u0002\u00f0", - "\u00f1\u0005s:\u0002\u00f1@\u0003\u0002\u0002\u0002\u00f2\u00f3\u0005", - "{>\u0002\u00f3\u00f4\u0005\u0085C\u0002\u00f4B\u0003\u0002\u0002\u0002", - "\u00f5\u00f6\u0005{>\u0002\u00f6\u00f7\u0005\u008fH\u0002\u00f7D\u0003", - "\u0002\u0002\u0002\u00f8\u00f9\u0005{>\u0002\u00f9\u00fa\u0005\u008f", - "H\u0002\u00fa\u00fb\u0005\u0085C\u0002\u00fb\u00fc\u0005\u0093J\u0002", - "\u00fc\u00fd\u0005\u0081A\u0002\u00fd\u00fe\u0005\u0081A\u0002\u00fe", - "F\u0003\u0002\u0002\u0002\u00ff\u0100\u0005\u0081A\u0002\u0100\u0101", - "\u0005{>\u0002\u0101\u0102\u0005\u007f@\u0002\u0102\u0103\u0005s:\u0002", - "\u0103H\u0003\u0002\u0002\u0002\u0104\u0105\u0005\u0085C\u0002\u0105", - "\u0106\u0005\u0087D\u0002\u0106\u0107\u0005\u0091I\u0002\u0107J\u0003", - "\u0002\u0002\u0002\u0108\u0109\u0005\u0085C\u0002\u0109\u010a\u0005", - "\u0087D\u0002\u010a\u010b\u0005\u0091I\u0002\u010b\u010c\u0005\u0085", - "C\u0002\u010c\u010d\u0005\u0093J\u0002\u010d\u010e\u0005\u0081A\u0002", - "\u010e\u010f\u0005\u0081A\u0002\u010fL\u0003\u0002\u0002\u0002\u0110", - "\u0111\u0005\u0085C\u0002\u0111\u0112\u0005\u0093J\u0002\u0112\u0113", - "\u0005\u0081A\u0002\u0113\u0114\u0005\u0081A\u0002\u0114N\u0003\u0002", - "\u0002\u0002\u0115\u0116\u0005\u0087D\u0002\u0116\u0117\u0005\u008d", - "G\u0002\u0117P\u0003\u0002\u0002\u0002\u0118\u0119\u0005\u0091I\u0002", - "\u0119\u011a\u0005\u008dG\u0002\u011a\u011b\u0005\u0093J\u0002\u011b", - "\u011c\u0005s:\u0002\u011cR\u0003\u0002\u0002\u0002\u011d\u011f\u0005", - "i5\u0002\u011e\u011d\u0003\u0002\u0002\u0002\u011f\u0120\u0003\u0002", - "\u0002\u0002\u0120\u011e\u0003\u0002\u0002\u0002\u0120\u0121\u0003\u0002", - "\u0002\u0002\u0121\u0129\u0003\u0002\u0002\u0002\u0122\u0126\u00070", - "\u0002\u0002\u0123\u0125\u0005i5\u0002\u0124\u0123\u0003\u0002\u0002", - "\u0002\u0125\u0128\u0003\u0002\u0002\u0002\u0126\u0124\u0003\u0002\u0002", - "\u0002\u0126\u0127\u0003\u0002\u0002\u0002\u0127\u012a\u0003\u0002\u0002", - "\u0002\u0128\u0126\u0003\u0002\u0002\u0002\u0129\u0122\u0003\u0002\u0002", - "\u0002\u0129\u012a\u0003\u0002\u0002\u0002\u012a\u0134\u0003\u0002\u0002", - "\u0002\u012b\u012d\u0005s:\u0002\u012c\u012e\t\u0002\u0002\u0002\u012d", - "\u012c\u0003\u0002\u0002\u0002\u012d\u012e\u0003\u0002\u0002\u0002\u012e", - "\u0130\u0003\u0002\u0002\u0002\u012f\u0131\u0005i5\u0002\u0130\u012f", - "\u0003\u0002\u0002\u0002\u0131\u0132\u0003\u0002\u0002\u0002\u0132\u0130", - "\u0003\u0002\u0002\u0002\u0132\u0133\u0003\u0002\u0002\u0002\u0133\u0135", - "\u0003\u0002\u0002\u0002\u0134\u012b\u0003\u0002\u0002\u0002\u0134\u0135", - "\u0003\u0002\u0002\u0002\u0135\u0148\u0003\u0002\u0002\u0002\u0136\u0138", - "\u00070\u0002\u0002\u0137\u0139\u0005i5\u0002\u0138\u0137\u0003\u0002", - "\u0002\u0002\u0139\u013a\u0003\u0002\u0002\u0002\u013a\u0138\u0003\u0002", - "\u0002\u0002\u013a\u013b\u0003\u0002\u0002\u0002\u013b\u0145\u0003\u0002", - "\u0002\u0002\u013c\u013e\u0005s:\u0002\u013d\u013f\t\u0002\u0002\u0002", - "\u013e\u013d\u0003\u0002\u0002\u0002\u013e\u013f\u0003\u0002\u0002\u0002", - "\u013f\u0141\u0003\u0002\u0002\u0002\u0140\u0142\u0005i5\u0002\u0141", - "\u0140\u0003\u0002\u0002\u0002\u0142\u0143\u0003\u0002\u0002\u0002\u0143", - "\u0141\u0003\u0002\u0002\u0002\u0143\u0144\u0003\u0002\u0002\u0002\u0144", - "\u0146\u0003\u0002\u0002\u0002\u0145\u013c\u0003\u0002\u0002\u0002\u0145", - "\u0146\u0003\u0002\u0002\u0002\u0146\u0148\u0003\u0002\u0002\u0002\u0147", - "\u011e\u0003\u0002\u0002\u0002\u0147\u0136\u0003\u0002\u0002\u0002\u0148", - "T\u0003\u0002\u0002\u0002\u0149\u014a\u0005W,\u0002\u014aV\u0003\u0002", - "\u0002\u0002\u014b\u0151\u0007$\u0002\u0002\u014c\u014d\u0007^\u0002", - "\u0002\u014d\u0150\u0007$\u0002\u0002\u014e\u0150\n\u0003\u0002\u0002", - "\u014f\u014c\u0003\u0002\u0002\u0002\u014f\u014e\u0003\u0002\u0002\u0002", - "\u0150\u0153\u0003\u0002\u0002\u0002\u0151\u014f\u0003\u0002\u0002\u0002", - "\u0151\u0152\u0003\u0002\u0002\u0002\u0152\u0154\u0003\u0002\u0002\u0002", - "\u0153\u0151\u0003\u0002\u0002\u0002\u0154\u0155\u0007$\u0002\u0002", - "\u0155X\u0003\u0002\u0002\u0002\u0156\u015c\u0007$\u0002\u0002\u0157", - "\u0158\u0007$\u0002\u0002\u0158\u015b\u0007$\u0002\u0002\u0159\u015b", - "\n\u0003\u0002\u0002\u015a\u0157\u0003\u0002\u0002\u0002\u015a\u0159", - "\u0003\u0002\u0002\u0002\u015b\u015e\u0003\u0002\u0002\u0002\u015c\u015a", - "\u0003\u0002\u0002\u0002\u015c\u015d\u0003\u0002\u0002\u0002\u015d\u015f", - "\u0003\u0002\u0002\u0002\u015e\u015c\u0003\u0002\u0002\u0002\u015f\u0160", - "\u0007$\u0002\u0002\u0160Z\u0003\u0002\u0002\u0002\u0161\u0162\u0005", - "]/\u0002\u0162\\\u0003\u0002\u0002\u0002\u0163\u0169\u0007)\u0002\u0002", - "\u0164\u0165\u0007^\u0002\u0002\u0165\u0168\u0007)\u0002\u0002\u0166", - "\u0168\n\u0004\u0002\u0002\u0167\u0164\u0003\u0002\u0002\u0002\u0167", - "\u0166\u0003\u0002\u0002\u0002\u0168\u016b\u0003\u0002\u0002\u0002\u0169", - "\u0167\u0003\u0002\u0002\u0002\u0169\u016a\u0003\u0002\u0002\u0002\u016a", - "\u016c\u0003\u0002\u0002\u0002\u016b\u0169\u0003\u0002\u0002\u0002\u016c", - "\u016d\u0007)\u0002\u0002\u016d^\u0003\u0002\u0002\u0002\u016e\u0174", - "\u0007)\u0002\u0002\u016f\u0170\u0007)\u0002\u0002\u0170\u0173\u0007", - ")\u0002\u0002\u0171\u0173\n\u0004\u0002\u0002\u0172\u016f\u0003\u0002", - "\u0002\u0002\u0172\u0171\u0003\u0002\u0002\u0002\u0173\u0176\u0003\u0002", - "\u0002\u0002\u0174\u0172\u0003\u0002\u0002\u0002\u0174\u0175\u0003\u0002", - "\u0002\u0002\u0175\u0177\u0003\u0002\u0002\u0002\u0176\u0174\u0003\u0002", - "\u0002\u0002\u0177\u0178\u0007)\u0002\u0002\u0178`\u0003\u0002\u0002", - "\u0002\u0179\u017a\u0007/\u0002\u0002\u017a\u017f\u0007/\u0002\u0002", - "\u017b\u017c\u00071\u0002\u0002\u017c\u017f\u00071\u0002\u0002\u017d", - "\u017f\u0007%\u0002\u0002\u017e\u0179\u0003\u0002\u0002\u0002\u017e", - "\u017b\u0003\u0002\u0002\u0002\u017e\u017d\u0003\u0002\u0002\u0002\u017f", - "\u0183\u0003\u0002\u0002\u0002\u0180\u0182\n\u0005\u0002\u0002\u0181", - "\u0180\u0003\u0002\u0002\u0002\u0182\u0185\u0003\u0002\u0002\u0002\u0183", - "\u0181\u0003\u0002\u0002\u0002\u0183\u0184\u0003\u0002\u0002\u0002\u0184", - "\u0186\u0003\u0002\u0002\u0002\u0185\u0183\u0003\u0002\u0002\u0002\u0186", - "\u0187\b1\u0002\u0002\u0187b\u0003\u0002\u0002\u0002\u0188\u0189\u0007", - "1\u0002\u0002\u0189\u018a\u0007,\u0002\u0002\u018a\u018e\u0003\u0002", - "\u0002\u0002\u018b\u018d\u000b\u0002\u0002\u0002\u018c\u018b\u0003\u0002", - "\u0002\u0002\u018d\u0190\u0003\u0002\u0002\u0002\u018e\u018f\u0003\u0002", - "\u0002\u0002\u018e\u018c\u0003\u0002\u0002\u0002\u018f\u0194\u0003\u0002", - "\u0002\u0002\u0190\u018e\u0003\u0002\u0002\u0002\u0191\u0192\u0007,", - "\u0002\u0002\u0192\u0195\u00071\u0002\u0002\u0193\u0195\u0007\u0002", - "\u0002\u0003\u0194\u0191\u0003\u0002\u0002\u0002\u0194\u0193\u0003\u0002", - "\u0002\u0002\u0195\u0196\u0003\u0002\u0002\u0002\u0196\u0197\b2\u0002", - "\u0002\u0197d\u0003\u0002\u0002\u0002\u0198\u0199\t\u0006\u0002\u0002", - "\u0199\u019a\u0003\u0002\u0002\u0002\u019a\u019b\b3\u0002\u0002\u019b", - "f\u0003\u0002\u0002\u0002\u019c\u01a0\t\u0007\u0002\u0002\u019d\u019f", - "\t\b\u0002\u0002\u019e\u019d\u0003\u0002\u0002\u0002\u019f\u01a2\u0003", - "\u0002\u0002\u0002\u01a0\u019e\u0003\u0002\u0002\u0002\u01a0\u01a1\u0003", - "\u0002\u0002\u0002\u01a1h\u0003\u0002\u0002\u0002\u01a2\u01a0\u0003", - "\u0002\u0002\u0002\u01a3\u01a4\t\t\u0002\u0002\u01a4j\u0003\u0002\u0002", - "\u0002\u01a5\u01a6\t\n\u0002\u0002\u01a6l\u0003\u0002\u0002\u0002\u01a7", - "\u01a8\t\u000b\u0002\u0002\u01a8n\u0003\u0002\u0002\u0002\u01a9\u01aa", - "\t\f\u0002\u0002\u01aap\u0003\u0002\u0002\u0002\u01ab\u01ac\t\r\u0002", - "\u0002\u01acr\u0003\u0002\u0002\u0002\u01ad\u01ae\t\u000e\u0002\u0002", - "\u01aet\u0003\u0002\u0002\u0002\u01af\u01b0\t\u000f\u0002\u0002\u01b0", - "v\u0003\u0002\u0002\u0002\u01b1\u01b2\t\u0010\u0002\u0002\u01b2x\u0003", - "\u0002\u0002\u0002\u01b3\u01b4\t\u0011\u0002\u0002\u01b4z\u0003\u0002", - "\u0002\u0002\u01b5\u01b6\t\u0012\u0002\u0002\u01b6|\u0003\u0002\u0002", - "\u0002\u01b7\u01b8\t\u0013\u0002\u0002\u01b8~\u0003\u0002\u0002\u0002", - "\u01b9\u01ba\t\u0014\u0002\u0002\u01ba\u0080\u0003\u0002\u0002\u0002", - "\u01bb\u01bc\t\u0015\u0002\u0002\u01bc\u0082\u0003\u0002\u0002\u0002", - "\u01bd\u01be\t\u0016\u0002\u0002\u01be\u0084\u0003\u0002\u0002\u0002", - "\u01bf\u01c0\t\u0017\u0002\u0002\u01c0\u0086\u0003\u0002\u0002\u0002", - "\u01c1\u01c2\t\u0018\u0002\u0002\u01c2\u0088\u0003\u0002\u0002\u0002", - "\u01c3\u01c4\t\u0019\u0002\u0002\u01c4\u008a\u0003\u0002\u0002\u0002", - "\u01c5\u01c6\t\u001a\u0002\u0002\u01c6\u008c\u0003\u0002\u0002\u0002", - "\u01c7\u01c8\t\u001b\u0002\u0002\u01c8\u008e\u0003\u0002\u0002\u0002", - "\u01c9\u01ca\t\u001c\u0002\u0002\u01ca\u0090\u0003\u0002\u0002\u0002", - "\u01cb\u01cc\t\u001d\u0002\u0002\u01cc\u0092\u0003\u0002\u0002\u0002", - "\u01cd\u01ce\t\u001e\u0002\u0002\u01ce\u0094\u0003\u0002\u0002\u0002", - "\u01cf\u01d0\t\u001f\u0002\u0002\u01d0\u0096\u0003\u0002\u0002\u0002", - "\u01d1\u01d2\t \u0002\u0002\u01d2\u0098\u0003\u0002\u0002\u0002\u01d3", - "\u01d4\t!\u0002\u0002\u01d4\u009a\u0003\u0002\u0002\u0002\u01d5\u01d6", - "\t\"\u0002\u0002\u01d6\u009c\u0003\u0002\u0002\u0002\u01d7\u01d8\t#", - "\u0002\u0002\u01d8\u009e\u0003\u0002\u0002\u0002\u001b\u0002\u0120\u0126", - "\u0129\u012d\u0132\u0134\u013a\u013e\u0143\u0145\u0147\u014f\u0151\u015a", - "\u015c\u0167\u0169\u0172\u0174\u017e\u0183\u018e\u0194\u01a0\u0003\u0002", - "\u0003\u0002"].join(""); + "M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k\u0002m\u0002o\u0002q\u0002s\u0002u\u0002", + "w\u0002y\u0002{\u0002}\u0002\u007f\u0002\u0081\u0002\u0083\u0002\u0085", + "\u0002\u0087\u0002\u0089\u0002\u008b\u0002\u008d\u0002\u008f\u0002\u0091", + "\u0002\u0093\u0002\u0095\u0002\u0097\u0002\u0099\u0002\u009b\u0002\u009d", + "\u0002\u009f\u0002\u0003\u0002$\u0004\u0002--//\u0003\u0002$$\u0003", + "\u0002))\u0004\u0002\f\f\u000f\u000f\u0005\u0002\u000b\r\u000f\u000f", + "\"\"\u0005\u0002C\\aac|\u0006\u00022;C\\aac|\u0003\u00022;\u0004\u0002", + "CCcc\u0004\u0002DDdd\u0004\u0002EEee\u0004\u0002FFff\u0004\u0002GGg", + "g\u0004\u0002HHhh\u0004\u0002IIii\u0004\u0002JJjj\u0004\u0002KKkk\u0004", + "\u0002LLll\u0004\u0002MMmm\u0004\u0002NNnn\u0004\u0002OOoo\u0004\u0002", + "PPpp\u0004\u0002QQqq\u0004\u0002RRrr\u0004\u0002SSss\u0004\u0002TTt", + "t\u0004\u0002UUuu\u0004\u0002VVvv\u0004\u0002WWww\u0004\u0002XXxx\u0004", + "\u0002YYyy\u0004\u0002ZZzz\u0004\u0002[[{{\u0004\u0002\\\\||\u0002\u01de", + "\u0002\u0003\u0003\u0002\u0002\u0002\u0002\u0005\u0003\u0002\u0002\u0002", + "\u0002\u0007\u0003\u0002\u0002\u0002\u0002\t\u0003\u0002\u0002\u0002", + "\u0002\u000b\u0003\u0002\u0002\u0002\u0002\r\u0003\u0002\u0002\u0002", + "\u0002\u000f\u0003\u0002\u0002\u0002\u0002\u0011\u0003\u0002\u0002\u0002", + "\u0002\u0013\u0003\u0002\u0002\u0002\u0002\u0015\u0003\u0002\u0002\u0002", + "\u0002\u0017\u0003\u0002\u0002\u0002\u0002\u0019\u0003\u0002\u0002\u0002", + "\u0002\u001b\u0003\u0002\u0002\u0002\u0002\u001d\u0003\u0002\u0002\u0002", + "\u0002\u001f\u0003\u0002\u0002\u0002\u0002!\u0003\u0002\u0002\u0002", + "\u0002#\u0003\u0002\u0002\u0002\u0002%\u0003\u0002\u0002\u0002\u0002", + "\'\u0003\u0002\u0002\u0002\u0002)\u0003\u0002\u0002\u0002\u0002+\u0003", + "\u0002\u0002\u0002\u0002-\u0003\u0002\u0002\u0002\u0002/\u0003\u0002", + "\u0002\u0002\u00021\u0003\u0002\u0002\u0002\u00023\u0003\u0002\u0002", + "\u0002\u00025\u0003\u0002\u0002\u0002\u00027\u0003\u0002\u0002\u0002", + "\u00029\u0003\u0002\u0002\u0002\u0002;\u0003\u0002\u0002\u0002\u0002", + "=\u0003\u0002\u0002\u0002\u0002?\u0003\u0002\u0002\u0002\u0002A\u0003", + "\u0002\u0002\u0002\u0002C\u0003\u0002\u0002\u0002\u0002E\u0003\u0002", + "\u0002\u0002\u0002G\u0003\u0002\u0002\u0002\u0002I\u0003\u0002\u0002", + "\u0002\u0002K\u0003\u0002\u0002\u0002\u0002M\u0003\u0002\u0002\u0002", + "\u0002O\u0003\u0002\u0002\u0002\u0002Q\u0003\u0002\u0002\u0002\u0002", + "S\u0003\u0002\u0002\u0002\u0002U\u0003\u0002\u0002\u0002\u0002W\u0003", + "\u0002\u0002\u0002\u0002Y\u0003\u0002\u0002\u0002\u0002[\u0003\u0002", + "\u0002\u0002\u0002]\u0003\u0002\u0002\u0002\u0002_\u0003\u0002\u0002", + "\u0002\u0002a\u0003\u0002\u0002\u0002\u0002c\u0003\u0002\u0002\u0002", + "\u0002e\u0003\u0002\u0002\u0002\u0002g\u0003\u0002\u0002\u0002\u0002", + "i\u0003\u0002\u0002\u0002\u0003\u00a1\u0003\u0002\u0002\u0002\u0005", + "\u00a4\u0003\u0002\u0002\u0002\u0007\u00a7\u0003\u0002\u0002\u0002\t", + "\u00aa\u0003\u0002\u0002\u0002\u000b\u00ad\u0003\u0002\u0002\u0002\r", + "\u00b0\u0003\u0002\u0002\u0002\u000f\u00b3\u0003\u0002\u0002\u0002\u0011", + "\u00b6\u0003\u0002\u0002\u0002\u0013\u00b9\u0003\u0002\u0002\u0002\u0015", + "\u00bc\u0003\u0002\u0002\u0002\u0017\u00be\u0003\u0002\u0002\u0002\u0019", + "\u00c0\u0003\u0002\u0002\u0002\u001b\u00c2\u0003\u0002\u0002\u0002\u001d", + "\u00c4\u0003\u0002\u0002\u0002\u001f\u00c6\u0003\u0002\u0002\u0002!", + "\u00c8\u0003\u0002\u0002\u0002#\u00ca\u0003\u0002\u0002\u0002%\u00cc", + "\u0003\u0002\u0002\u0002\'\u00ce\u0003\u0002\u0002\u0002)\u00d0\u0003", + "\u0002\u0002\u0002+\u00d2\u0003\u0002\u0002\u0002-\u00d4\u0003\u0002", + "\u0002\u0002/\u00d6\u0003\u0002\u0002\u00021\u00d8\u0003\u0002\u0002", + "\u00023\u00da\u0003\u0002\u0002\u00025\u00dc\u0003\u0002\u0002\u0002", + "7\u00de\u0003\u0002\u0002\u00029\u00e0\u0003\u0002\u0002\u0002;\u00e2", + "\u0003\u0002\u0002\u0002=\u00e6\u0003\u0002\u0002\u0002?\u00ee\u0003", + "\u0002\u0002\u0002A\u00f4\u0003\u0002\u0002\u0002C\u00fa\u0003\u0002", + "\u0002\u0002E\u00fd\u0003\u0002\u0002\u0002G\u0100\u0003\u0002\u0002", + "\u0002I\u0107\u0003\u0002\u0002\u0002K\u010c\u0003\u0002\u0002\u0002", + "M\u0110\u0003\u0002\u0002\u0002O\u0118\u0003\u0002\u0002\u0002Q\u011d", + "\u0003\u0002\u0002\u0002S\u0120\u0003\u0002\u0002\u0002U\u014f\u0003", + "\u0002\u0002\u0002W\u0151\u0003\u0002\u0002\u0002Y\u0153\u0003\u0002", + "\u0002\u0002[\u015e\u0003\u0002\u0002\u0002]\u0169\u0003\u0002\u0002", + "\u0002_\u016b\u0003\u0002\u0002\u0002a\u0176\u0003\u0002\u0002\u0002", + "c\u0186\u0003\u0002\u0002\u0002e\u0190\u0003\u0002\u0002\u0002g\u01a0", + "\u0003\u0002\u0002\u0002i\u01a4\u0003\u0002\u0002\u0002k\u01ab\u0003", + "\u0002\u0002\u0002m\u01ad\u0003\u0002\u0002\u0002o\u01af\u0003\u0002", + "\u0002\u0002q\u01b1\u0003\u0002\u0002\u0002s\u01b3\u0003\u0002\u0002", + "\u0002u\u01b5\u0003\u0002\u0002\u0002w\u01b7\u0003\u0002\u0002\u0002", + "y\u01b9\u0003\u0002\u0002\u0002{\u01bb\u0003\u0002\u0002\u0002}\u01bd", + "\u0003\u0002\u0002\u0002\u007f\u01bf\u0003\u0002\u0002\u0002\u0081\u01c1", + "\u0003\u0002\u0002\u0002\u0083\u01c3\u0003\u0002\u0002\u0002\u0085\u01c5", + "\u0003\u0002\u0002\u0002\u0087\u01c7\u0003\u0002\u0002\u0002\u0089\u01c9", + "\u0003\u0002\u0002\u0002\u008b\u01cb\u0003\u0002\u0002\u0002\u008d\u01cd", + "\u0003\u0002\u0002\u0002\u008f\u01cf\u0003\u0002\u0002\u0002\u0091\u01d1", + "\u0003\u0002\u0002\u0002\u0093\u01d3\u0003\u0002\u0002\u0002\u0095\u01d5", + "\u0003\u0002\u0002\u0002\u0097\u01d7\u0003\u0002\u0002\u0002\u0099\u01d9", + "\u0003\u0002\u0002\u0002\u009b\u01db\u0003\u0002\u0002\u0002\u009d\u01dd", + "\u0003\u0002\u0002\u0002\u009f\u01df\u0003\u0002\u0002\u0002\u00a1\u00a2", + "\u0007(\u0002\u0002\u00a2\u00a3\u0007(\u0002\u0002\u00a3\u0004\u0003", + "\u0002\u0002\u0002\u00a4\u00a5\u0007?\u0002\u0002\u00a5\u00a6\u0007", + "?\u0002\u0002\u00a6\u0006\u0003\u0002\u0002\u0002\u00a7\u00a8\u0007", + "@\u0002\u0002\u00a8\u00a9\u0007?\u0002\u0002\u00a9\b\u0003\u0002\u0002", + "\u0002\u00aa\u00ab\u0007>\u0002\u0002\u00ab\u00ac\u0007?\u0002\u0002", + "\u00ac\n\u0003\u0002\u0002\u0002\u00ad\u00ae\u0007#\u0002\u0002\u00ae", + "\u00af\u0007?\u0002\u0002\u00af\f\u0003\u0002\u0002\u0002\u00b0\u00b1", + "\u0007>\u0002\u0002\u00b1\u00b2\u0007@\u0002\u0002\u00b2\u000e\u0003", + "\u0002\u0002\u0002\u00b3\u00b4\u0007~\u0002\u0002\u00b4\u00b5\u0007", + "~\u0002\u0002\u00b5\u0010\u0003\u0002\u0002\u0002\u00b6\u00b7\u0007", + ">\u0002\u0002\u00b7\u00b8\u0007>\u0002\u0002\u00b8\u0012\u0003\u0002", + "\u0002\u0002\u00b9\u00ba\u0007@\u0002\u0002\u00ba\u00bb\u0007@\u0002", + "\u0002\u00bb\u0014\u0003\u0002\u0002\u0002\u00bc\u00bd\u0007(\u0002", + "\u0002\u00bd\u0016\u0003\u0002\u0002\u0002\u00be\u00bf\u0007?\u0002", + "\u0002\u00bf\u0018\u0003\u0002\u0002\u0002\u00c0\u00c1\u0007+\u0002", + "\u0002\u00c1\u001a\u0003\u0002\u0002\u0002\u00c2\u00c3\u0007<\u0002", + "\u0002\u00c3\u001c\u0003\u0002\u0002\u0002\u00c4\u00c5\u0007.\u0002", + "\u0002\u00c5\u001e\u0003\u0002\u0002\u0002\u00c6\u00c7\u00070\u0002", + "\u0002\u00c7 \u0003\u0002\u0002\u0002\u00c8\u00c9\u00071\u0002\u0002", + "\u00c9\"\u0003\u0002\u0002\u0002\u00ca\u00cb\u0007@\u0002\u0002\u00cb", + "$\u0003\u0002\u0002\u0002\u00cc\u00cd\u0007>\u0002\u0002\u00cd&\u0003", + "\u0002\u0002\u0002\u00ce\u00cf\u0007/\u0002\u0002\u00cf(\u0003\u0002", + "\u0002\u0002\u00d0\u00d1\u0007\'\u0002\u0002\u00d1*\u0003\u0002\u0002", + "\u0002\u00d2\u00d3\u0007*\u0002\u0002\u00d3,\u0003\u0002\u0002\u0002", + "\u00d4\u00d5\u0007~\u0002\u0002\u00d5.\u0003\u0002\u0002\u0002\u00d6", + "\u00d7\u0007-\u0002\u0002\u00d70\u0003\u0002\u0002\u0002\u00d8\u00d9", + "\u0007A\u0002\u0002\u00d92\u0003\u0002\u0002\u0002\u00da\u00db\u0007", + "=\u0002\u0002\u00db4\u0003\u0002\u0002\u0002\u00dc\u00dd\u0007,\u0002", + "\u0002\u00dd6\u0003\u0002\u0002\u0002\u00de\u00df\u0007\u0080\u0002", + "\u0002\u00df8\u0003\u0002\u0002\u0002\u00e0\u00e1\u0007a\u0002\u0002", + "\u00e1:\u0003\u0002\u0002\u0002\u00e2\u00e3\u0005m7\u0002\u00e3\u00e4", + "\u0005\u0087D\u0002\u00e4\u00e5\u0005s:\u0002\u00e5<\u0003\u0002\u0002", + "\u0002\u00e6\u00e7\u0005o8\u0002\u00e7\u00e8\u0005u;\u0002\u00e8\u00e9", + "\u0005\u0093J\u0002\u00e9\u00ea\u0005\u0099M\u0002\u00ea\u00eb\u0005", + "u;\u0002\u00eb\u00ec\u0005u;\u0002\u00ec\u00ed\u0005\u0087D\u0002\u00ed", + ">\u0003\u0002\u0002\u0002\u00ee\u00ef\u0005w<\u0002\u00ef\u00f0\u0005", + "m7\u0002\u00f0\u00f1\u0005\u0083B\u0002\u00f1\u00f2\u0005\u0091I\u0002", + "\u00f2\u00f3\u0005u;\u0002\u00f3@\u0003\u0002\u0002\u0002\u00f4\u00f5", + "\u0005}?\u0002\u00f5\u00f6\u0005\u0083B\u0002\u00f6\u00f7\u0005}?\u0002", + "\u00f7\u00f8\u0005\u0081A\u0002\u00f8\u00f9\u0005u;\u0002\u00f9B\u0003", + "\u0002\u0002\u0002\u00fa\u00fb\u0005}?\u0002\u00fb\u00fc\u0005\u0087", + "D\u0002\u00fcD\u0003\u0002\u0002\u0002\u00fd\u00fe\u0005}?\u0002\u00fe", + "\u00ff\u0005\u0091I\u0002\u00ffF\u0003\u0002\u0002\u0002\u0100\u0101", + "\u0005}?\u0002\u0101\u0102\u0005\u0091I\u0002\u0102\u0103\u0005\u0087", + "D\u0002\u0103\u0104\u0005\u0095K\u0002\u0104\u0105\u0005\u0083B\u0002", + "\u0105\u0106\u0005\u0083B\u0002\u0106H\u0003\u0002\u0002\u0002\u0107", + "\u0108\u0005\u0083B\u0002\u0108\u0109\u0005}?\u0002\u0109\u010a\u0005", + "\u0081A\u0002\u010a\u010b\u0005u;\u0002\u010bJ\u0003\u0002\u0002\u0002", + "\u010c\u010d\u0005\u0087D\u0002\u010d\u010e\u0005\u0089E\u0002\u010e", + "\u010f\u0005\u0093J\u0002\u010fL\u0003\u0002\u0002\u0002\u0110\u0111", + "\u0005\u0087D\u0002\u0111\u0112\u0005\u0089E\u0002\u0112\u0113\u0005", + "\u0093J\u0002\u0113\u0114\u0005\u0087D\u0002\u0114\u0115\u0005\u0095", + "K\u0002\u0115\u0116\u0005\u0083B\u0002\u0116\u0117\u0005\u0083B\u0002", + "\u0117N\u0003\u0002\u0002\u0002\u0118\u0119\u0005\u0087D\u0002\u0119", + "\u011a\u0005\u0095K\u0002\u011a\u011b\u0005\u0083B\u0002\u011b\u011c", + "\u0005\u0083B\u0002\u011cP\u0003\u0002\u0002\u0002\u011d\u011e\u0005", + "\u0089E\u0002\u011e\u011f\u0005\u008fH\u0002\u011fR\u0003\u0002\u0002", + "\u0002\u0120\u0121\u0005\u0093J\u0002\u0121\u0122\u0005\u008fH\u0002", + "\u0122\u0123\u0005\u0095K\u0002\u0123\u0124\u0005u;\u0002\u0124T\u0003", + "\u0002\u0002\u0002\u0125\u0127\u0005k6\u0002\u0126\u0125\u0003\u0002", + "\u0002\u0002\u0127\u0128\u0003\u0002\u0002\u0002\u0128\u0126\u0003\u0002", + "\u0002\u0002\u0128\u0129\u0003\u0002\u0002\u0002\u0129\u0131\u0003\u0002", + "\u0002\u0002\u012a\u012e\u00070\u0002\u0002\u012b\u012d\u0005k6\u0002", + "\u012c\u012b\u0003\u0002\u0002\u0002\u012d\u0130\u0003\u0002\u0002\u0002", + "\u012e\u012c\u0003\u0002\u0002\u0002\u012e\u012f\u0003\u0002\u0002\u0002", + "\u012f\u0132\u0003\u0002\u0002\u0002\u0130\u012e\u0003\u0002\u0002\u0002", + "\u0131\u012a\u0003\u0002\u0002\u0002\u0131\u0132\u0003\u0002\u0002\u0002", + "\u0132\u013c\u0003\u0002\u0002\u0002\u0133\u0135\u0005u;\u0002\u0134", + "\u0136\t\u0002\u0002\u0002\u0135\u0134\u0003\u0002\u0002\u0002\u0135", + "\u0136\u0003\u0002\u0002\u0002\u0136\u0138\u0003\u0002\u0002\u0002\u0137", + "\u0139\u0005k6\u0002\u0138\u0137\u0003\u0002\u0002\u0002\u0139\u013a", + "\u0003\u0002\u0002\u0002\u013a\u0138\u0003\u0002\u0002\u0002\u013a\u013b", + "\u0003\u0002\u0002\u0002\u013b\u013d\u0003\u0002\u0002\u0002\u013c\u0133", + "\u0003\u0002\u0002\u0002\u013c\u013d\u0003\u0002\u0002\u0002\u013d\u0150", + "\u0003\u0002\u0002\u0002\u013e\u0140\u00070\u0002\u0002\u013f\u0141", + "\u0005k6\u0002\u0140\u013f\u0003\u0002\u0002\u0002\u0141\u0142\u0003", + "\u0002\u0002\u0002\u0142\u0140\u0003\u0002\u0002\u0002\u0142\u0143\u0003", + "\u0002\u0002\u0002\u0143\u014d\u0003\u0002\u0002\u0002\u0144\u0146\u0005", + "u;\u0002\u0145\u0147\t\u0002\u0002\u0002\u0146\u0145\u0003\u0002\u0002", + "\u0002\u0146\u0147\u0003\u0002\u0002\u0002\u0147\u0149\u0003\u0002\u0002", + "\u0002\u0148\u014a\u0005k6\u0002\u0149\u0148\u0003\u0002\u0002\u0002", + "\u014a\u014b\u0003\u0002\u0002\u0002\u014b\u0149\u0003\u0002\u0002\u0002", + "\u014b\u014c\u0003\u0002\u0002\u0002\u014c\u014e\u0003\u0002\u0002\u0002", + "\u014d\u0144\u0003\u0002\u0002\u0002\u014d\u014e\u0003\u0002\u0002\u0002", + "\u014e\u0150\u0003\u0002\u0002\u0002\u014f\u0126\u0003\u0002\u0002\u0002", + "\u014f\u013e\u0003\u0002\u0002\u0002\u0150V\u0003\u0002\u0002\u0002", + "\u0151\u0152\u0005Y-\u0002\u0152X\u0003\u0002\u0002\u0002\u0153\u0159", + "\u0007$\u0002\u0002\u0154\u0155\u0007^\u0002\u0002\u0155\u0158\u0007", + "$\u0002\u0002\u0156\u0158\n\u0003\u0002\u0002\u0157\u0154\u0003\u0002", + "\u0002\u0002\u0157\u0156\u0003\u0002\u0002\u0002\u0158\u015b\u0003\u0002", + "\u0002\u0002\u0159\u0157\u0003\u0002\u0002\u0002\u0159\u015a\u0003\u0002", + "\u0002\u0002\u015a\u015c\u0003\u0002\u0002\u0002\u015b\u0159\u0003\u0002", + "\u0002\u0002\u015c\u015d\u0007$\u0002\u0002\u015dZ\u0003\u0002\u0002", + "\u0002\u015e\u0164\u0007$\u0002\u0002\u015f\u0160\u0007$\u0002\u0002", + "\u0160\u0163\u0007$\u0002\u0002\u0161\u0163\n\u0003\u0002\u0002\u0162", + "\u015f\u0003\u0002\u0002\u0002\u0162\u0161\u0003\u0002\u0002\u0002\u0163", + "\u0166\u0003\u0002\u0002\u0002\u0164\u0162\u0003\u0002\u0002\u0002\u0164", + "\u0165\u0003\u0002\u0002\u0002\u0165\u0167\u0003\u0002\u0002\u0002\u0166", + "\u0164\u0003\u0002\u0002\u0002\u0167\u0168\u0007$\u0002\u0002\u0168", + "\\\u0003\u0002\u0002\u0002\u0169\u016a\u0005_0\u0002\u016a^\u0003\u0002", + "\u0002\u0002\u016b\u0171\u0007)\u0002\u0002\u016c\u016d\u0007^\u0002", + "\u0002\u016d\u0170\u0007)\u0002\u0002\u016e\u0170\n\u0004\u0002\u0002", + "\u016f\u016c\u0003\u0002\u0002\u0002\u016f\u016e\u0003\u0002\u0002\u0002", + "\u0170\u0173\u0003\u0002\u0002\u0002\u0171\u016f\u0003\u0002\u0002\u0002", + "\u0171\u0172\u0003\u0002\u0002\u0002\u0172\u0174\u0003\u0002\u0002\u0002", + "\u0173\u0171\u0003\u0002\u0002\u0002\u0174\u0175\u0007)\u0002\u0002", + "\u0175`\u0003\u0002\u0002\u0002\u0176\u017c\u0007)\u0002\u0002\u0177", + "\u0178\u0007)\u0002\u0002\u0178\u017b\u0007)\u0002\u0002\u0179\u017b", + "\n\u0004\u0002\u0002\u017a\u0177\u0003\u0002\u0002\u0002\u017a\u0179", + "\u0003\u0002\u0002\u0002\u017b\u017e\u0003\u0002\u0002\u0002\u017c\u017a", + "\u0003\u0002\u0002\u0002\u017c\u017d\u0003\u0002\u0002\u0002\u017d\u017f", + "\u0003\u0002\u0002\u0002\u017e\u017c\u0003\u0002\u0002\u0002\u017f\u0180", + "\u0007)\u0002\u0002\u0180b\u0003\u0002\u0002\u0002\u0181\u0182\u0007", + "/\u0002\u0002\u0182\u0187\u0007/\u0002\u0002\u0183\u0184\u00071\u0002", + "\u0002\u0184\u0187\u00071\u0002\u0002\u0185\u0187\u0007%\u0002\u0002", + "\u0186\u0181\u0003\u0002\u0002\u0002\u0186\u0183\u0003\u0002\u0002\u0002", + "\u0186\u0185\u0003\u0002\u0002\u0002\u0187\u018b\u0003\u0002\u0002\u0002", + "\u0188\u018a\n\u0005\u0002\u0002\u0189\u0188\u0003\u0002\u0002\u0002", + "\u018a\u018d\u0003\u0002\u0002\u0002\u018b\u0189\u0003\u0002\u0002\u0002", + "\u018b\u018c\u0003\u0002\u0002\u0002\u018c\u018e\u0003\u0002\u0002\u0002", + "\u018d\u018b\u0003\u0002\u0002\u0002\u018e\u018f\b2\u0002\u0002\u018f", + "d\u0003\u0002\u0002\u0002\u0190\u0191\u00071\u0002\u0002\u0191\u0192", + "\u0007,\u0002\u0002\u0192\u0196\u0003\u0002\u0002\u0002\u0193\u0195", + "\u000b\u0002\u0002\u0002\u0194\u0193\u0003\u0002\u0002\u0002\u0195\u0198", + "\u0003\u0002\u0002\u0002\u0196\u0197\u0003\u0002\u0002\u0002\u0196\u0194", + "\u0003\u0002\u0002\u0002\u0197\u019c\u0003\u0002\u0002\u0002\u0198\u0196", + "\u0003\u0002\u0002\u0002\u0199\u019a\u0007,\u0002\u0002\u019a\u019d", + "\u00071\u0002\u0002\u019b\u019d\u0007\u0002\u0002\u0003\u019c\u0199", + "\u0003\u0002\u0002\u0002\u019c\u019b\u0003\u0002\u0002\u0002\u019d\u019e", + "\u0003\u0002\u0002\u0002\u019e\u019f\b3\u0002\u0002\u019ff\u0003\u0002", + "\u0002\u0002\u01a0\u01a1\t\u0006\u0002\u0002\u01a1\u01a2\u0003\u0002", + "\u0002\u0002\u01a2\u01a3\b4\u0002\u0002\u01a3h\u0003\u0002\u0002\u0002", + "\u01a4\u01a8\t\u0007\u0002\u0002\u01a5\u01a7\t\b\u0002\u0002\u01a6\u01a5", + "\u0003\u0002\u0002\u0002\u01a7\u01aa\u0003\u0002\u0002\u0002\u01a8\u01a6", + "\u0003\u0002\u0002\u0002\u01a8\u01a9\u0003\u0002\u0002\u0002\u01a9j", + "\u0003\u0002\u0002\u0002\u01aa\u01a8\u0003\u0002\u0002\u0002\u01ab\u01ac", + "\t\t\u0002\u0002\u01acl\u0003\u0002\u0002\u0002\u01ad\u01ae\t\n\u0002", + "\u0002\u01aen\u0003\u0002\u0002\u0002\u01af\u01b0\t\u000b\u0002\u0002", + "\u01b0p\u0003\u0002\u0002\u0002\u01b1\u01b2\t\f\u0002\u0002\u01b2r\u0003", + "\u0002\u0002\u0002\u01b3\u01b4\t\r\u0002\u0002\u01b4t\u0003\u0002\u0002", + "\u0002\u01b5\u01b6\t\u000e\u0002\u0002\u01b6v\u0003\u0002\u0002\u0002", + "\u01b7\u01b8\t\u000f\u0002\u0002\u01b8x\u0003\u0002\u0002\u0002\u01b9", + "\u01ba\t\u0010\u0002\u0002\u01baz\u0003\u0002\u0002\u0002\u01bb\u01bc", + "\t\u0011\u0002\u0002\u01bc|\u0003\u0002\u0002\u0002\u01bd\u01be\t\u0012", + "\u0002\u0002\u01be~\u0003\u0002\u0002\u0002\u01bf\u01c0\t\u0013\u0002", + "\u0002\u01c0\u0080\u0003\u0002\u0002\u0002\u01c1\u01c2\t\u0014\u0002", + "\u0002\u01c2\u0082\u0003\u0002\u0002\u0002\u01c3\u01c4\t\u0015\u0002", + "\u0002\u01c4\u0084\u0003\u0002\u0002\u0002\u01c5\u01c6\t\u0016\u0002", + "\u0002\u01c6\u0086\u0003\u0002\u0002\u0002\u01c7\u01c8\t\u0017\u0002", + "\u0002\u01c8\u0088\u0003\u0002\u0002\u0002\u01c9\u01ca\t\u0018\u0002", + "\u0002\u01ca\u008a\u0003\u0002\u0002\u0002\u01cb\u01cc\t\u0019\u0002", + "\u0002\u01cc\u008c\u0003\u0002\u0002\u0002\u01cd\u01ce\t\u001a\u0002", + "\u0002\u01ce\u008e\u0003\u0002\u0002\u0002\u01cf\u01d0\t\u001b\u0002", + "\u0002\u01d0\u0090\u0003\u0002\u0002\u0002\u01d1\u01d2\t\u001c\u0002", + "\u0002\u01d2\u0092\u0003\u0002\u0002\u0002\u01d3\u01d4\t\u001d\u0002", + "\u0002\u01d4\u0094\u0003\u0002\u0002\u0002\u01d5\u01d6\t\u001e\u0002", + "\u0002\u01d6\u0096\u0003\u0002\u0002\u0002\u01d7\u01d8\t\u001f\u0002", + "\u0002\u01d8\u0098\u0003\u0002\u0002\u0002\u01d9\u01da\t \u0002\u0002", + "\u01da\u009a\u0003\u0002\u0002\u0002\u01db\u01dc\t!\u0002\u0002\u01dc", + "\u009c\u0003\u0002\u0002\u0002\u01dd\u01de\t\"\u0002\u0002\u01de\u009e", + "\u0003\u0002\u0002\u0002\u01df\u01e0\t#\u0002\u0002\u01e0\u00a0\u0003", + "\u0002\u0002\u0002\u001b\u0002\u0128\u012e\u0131\u0135\u013a\u013c\u0142", + "\u0146\u014b\u014d\u014f\u0157\u0159\u0162\u0164\u016f\u0171\u017a\u017c", + "\u0186\u018b\u0196\u019c\u01a8\u0003\u0002\u0003\u0002"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -351,26 +355,27 @@ PqlLexer.UNDER = 28; PqlLexer.K_AND = 29; PqlLexer.K_BETWEEN = 30; PqlLexer.K_FALSE = 31; -PqlLexer.K_IN = 32; -PqlLexer.K_IS = 33; -PqlLexer.K_ISNULL = 34; -PqlLexer.K_LIKE = 35; -PqlLexer.K_NOT = 36; -PqlLexer.K_NOTNULL = 37; -PqlLexer.K_NULL = 38; -PqlLexer.K_OR = 39; -PqlLexer.K_TRUE = 40; -PqlLexer.NUMERIC_LITERAL = 41; -PqlLexer.DOUBLE_QUOTED_STRING = 42; -PqlLexer.DOUBLE_QUOTED_STRING_TEL = 43; -PqlLexer.DOUBLE_QUOTED_STRING_SQL = 44; -PqlLexer.SINGLE_QUOTED_STRING = 45; -PqlLexer.SINGLE_QUOTED_STRING_TEL = 46; -PqlLexer.SINGLE_QUOTED_STRING_SQL = 47; -PqlLexer.SINGLE_LINE_COMMENT = 48; -PqlLexer.MULTILINE_COMMENT = 49; -PqlLexer.SPACES = 50; -PqlLexer.WORD = 51; +PqlLexer.K_ILIKE = 32; +PqlLexer.K_IN = 33; +PqlLexer.K_IS = 34; +PqlLexer.K_ISNULL = 35; +PqlLexer.K_LIKE = 36; +PqlLexer.K_NOT = 37; +PqlLexer.K_NOTNULL = 38; +PqlLexer.K_NULL = 39; +PqlLexer.K_OR = 40; +PqlLexer.K_TRUE = 41; +PqlLexer.NUMERIC_LITERAL = 42; +PqlLexer.DOUBLE_QUOTED_STRING = 43; +PqlLexer.DOUBLE_QUOTED_STRING_TEL = 44; +PqlLexer.DOUBLE_QUOTED_STRING_SQL = 45; +PqlLexer.SINGLE_QUOTED_STRING = 46; +PqlLexer.SINGLE_QUOTED_STRING_TEL = 47; +PqlLexer.SINGLE_QUOTED_STRING_SQL = 48; +PqlLexer.SINGLE_LINE_COMMENT = 49; +PqlLexer.MULTILINE_COMMENT = 50; +PqlLexer.SPACES = 51; +PqlLexer.WORD = 52; PqlLexer.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; @@ -390,14 +395,14 @@ PqlLexer.prototype.symbolicNames = [ null, "AND", "EQ", "GT_EQ", "LT_EQ", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", - "K_BETWEEN", "K_FALSE", "K_IN", "K_IS", - "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", - "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", - "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", - "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", - "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", - "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", - "SPACES", "WORD" ]; + "K_BETWEEN", "K_FALSE", "K_ILIKE", + "K_IN", "K_IS", "K_ISNULL", "K_LIKE", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", + "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", + "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", + "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", + "MULTILINE_COMMENT", "SPACES", "WORD" ]; PqlLexer.prototype.ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "OR", "SHIFT_LEFT", "SHIFT_RIGHT", @@ -406,7 +411,7 @@ PqlLexer.prototype.ruleNames = [ "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_BETWEEN", - "K_FALSE", "K_IN", "K_IS", "K_ISNULL", + "K_FALSE", "K_ILIKE", "K_IN", "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", diff --git a/js-temp/PqlParser.js b/js-temp/PqlParser.js index a4ab6ae..3d96615 100644 --- a/js-temp/PqlParser.js +++ b/js-temp/PqlParser.js @@ -8,7 +8,7 @@ var grammarFileName = "PqlParser.g4"; var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", - "\u00035\u0084\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t", + "\u00036\u0084\u0004\u0002\t\u0002\u0004\u0003\t\u0003\u0004\u0004\t", "\u0004\u0004\u0005\t\u0005\u0004\u0006\t\u0006\u0004\u0007\t\u0007\u0004", "\b\t\b\u0004\t\t\t\u0004\n\t\n\u0003\u0002\u0003\u0002\u0003\u0002\u0003", "\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003\u0003", @@ -28,66 +28,66 @@ var serializedATN = ["\u0003\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964", "\b\u0005\bn\n\b\u0003\b\u0003\b\u0003\b\u0005\bs\n\b\u0003\b\u0003\b", "\u0003\b\u0005\bx\n\b\u0003\t\u0003\t\u0003\t\u0007\t}\n\t\f\t\u000e", "\t\u0080\u000b\t\u0003\n\u0003\n\u0003\n\u0002\u0003\u0004\u000b\u0002", - "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0002\n\u0005\u0002\u0015\u0015", - "\u0019\u0019&&\u0005\u0002\u0012\u0012\u0016\u0016\u001c\u001c\u0004", + "\u0004\u0006\b\n\f\u000e\u0010\u0012\u0002\u000b\u0005\u0002\u0015\u0015", + "\u0019\u0019\'\'\u0005\u0002\u0012\u0012\u0016\u0016\u001c\u001c\u0004", "\u0002\u0015\u0015\u0019\u0019\u0004\u0002\u0005\u0006\u0013\u0014\u0006", - "\u0002\u0004\u0004\u0007\b\r\r##\u0004\u0002\u0003\u0003\u001f\u001f", - "\u0004\u0002\t\t))\u0006\u0002!!((*,//\u0002\u0092\u0002\u0014\u0003", - "\u0002\u0002\u0002\u0004!\u0003\u0002\u0002\u0002\u0006O\u0003\u0002", - "\u0002\u0002\bW\u0003\u0002\u0002\u0002\n^\u0003\u0002\u0002\u0002\f", - "h\u0003\u0002\u0002\u0002\u000em\u0003\u0002\u0002\u0002\u0010y\u0003", - "\u0002\u0002\u0002\u0012\u0081\u0003\u0002\u0002\u0002\u0014\u0015\u0005", - "\u0004\u0003\u0002\u0015\u0016\u0007\u0002\u0002\u0003\u0016\u0003\u0003", - "\u0002\u0002\u0002\u0017\u0018\b\u0003\u0001\u0002\u0018\u0019\t\u0002", - "\u0002\u0002\u0019\"\u0005\u0004\u0003\u0010\u001a\u001b\u0007\u0017", - "\u0002\u0002\u001b\u001c\u0005\u0004\u0003\u0002\u001c\u001d\u0007\u000e", - "\u0002\u0002\u001d\"\u0003\u0002\u0002\u0002\u001e\"\u0005\u0012\n\u0002", - "\u001f\"\u0005\b\u0005\u0002 \"\u0005\u000e\b\u0002!\u0017\u0003\u0002", - "\u0002\u0002!\u001a\u0003\u0002\u0002\u0002!\u001e\u0003\u0002\u0002", - "\u0002!\u001f\u0003\u0002\u0002\u0002! \u0003\u0002\u0002\u0002\"L\u0003", - "\u0002\u0002\u0002#$\f\u000f\u0002\u0002$%\t\u0003\u0002\u0002%K\u0005", - "\u0004\u0003\u0010&\'\f\u000e\u0002\u0002\'(\t\u0004\u0002\u0002(K\u0005", - "\u0004\u0003\u000f)*\f\r\u0002\u0002*+\t\u0005\u0002\u0002+K\u0005\u0004", - "\u0003\u000e,-\f\f\u0002\u0002-.\t\u0006\u0002\u0002.K\u0005\u0004\u0003", - "\r/1\f\u000b\u0002\u000202\u0007&\u0002\u000210\u0003\u0002\u0002\u0002", - "12\u0003\u0002\u0002\u000223\u0003\u0002\u0002\u000234\u0007%\u0002", - "\u00024K\u0005\u0004\u0003\f56\f\t\u0002\u000267\t\u0007\u0002\u0002", - "7K\u0005\u0004\u0003\n89\f\b\u0002\u00029:\t\b\u0002\u0002:K\u0005\u0004", - "\u0003\t;=\f\u0007\u0002\u0002<>\u0007&\u0002\u0002=<\u0003\u0002\u0002", - "\u0002=>\u0003\u0002\u0002\u0002>?\u0003\u0002\u0002\u0002?@\u0007 ", - "\u0002\u0002@K\u0005\u0004\u0003\bAC\f\n\u0002\u0002BD\u0007&\u0002", - "\u0002CB\u0003\u0002\u0002\u0002CD\u0003\u0002\u0002\u0002DE\u0003\u0002", - "\u0002\u0002EF\u0007\"\u0002\u0002FG\u0007\u0017\u0002\u0002GH\u0005", - "\u0006\u0004\u0002HI\u0007\u000e\u0002\u0002IK\u0003\u0002\u0002\u0002", - "J#\u0003\u0002\u0002\u0002J&\u0003\u0002\u0002\u0002J)\u0003\u0002\u0002", - "\u0002J,\u0003\u0002\u0002\u0002J/\u0003\u0002\u0002\u0002J5\u0003\u0002", - "\u0002\u0002J8\u0003\u0002\u0002\u0002J;\u0003\u0002\u0002\u0002JA\u0003", - "\u0002\u0002\u0002KN\u0003\u0002\u0002\u0002LJ\u0003\u0002\u0002\u0002", - "LM\u0003\u0002\u0002\u0002M\u0005\u0003\u0002\u0002\u0002NL\u0003\u0002", - "\u0002\u0002OT\u0005\u0004\u0003\u0002PQ\u0007\u0010\u0002\u0002QS\u0005", - "\u0004\u0003\u0002RP\u0003\u0002\u0002\u0002SV\u0003\u0002\u0002\u0002", - "TR\u0003\u0002\u0002\u0002TU\u0003\u0002\u0002\u0002U\u0007\u0003\u0002", - "\u0002\u0002VT\u0003\u0002\u0002\u0002WX\u0005\u0010\t\u0002XZ\u0007", - "\u0017\u0002\u0002Y[\u0005\n\u0006\u0002ZY\u0003\u0002\u0002\u0002Z", - "[\u0003\u0002\u0002\u0002[\\\u0003\u0002\u0002\u0002\\]\u0007\u000e", - "\u0002\u0002]\t\u0003\u0002\u0002\u0002^c\u0005\f\u0007\u0002_`\u0007", - "\u0010\u0002\u0002`b\u0005\f\u0007\u0002a_\u0003\u0002\u0002\u0002b", - "e\u0003\u0002\u0002\u0002ca\u0003\u0002\u0002\u0002cd\u0003\u0002\u0002", - "\u0002d\u000b\u0003\u0002\u0002\u0002ec\u0003\u0002\u0002\u0002fg\u0007", - "5\u0002\u0002gi\u0007\r\u0002\u0002hf\u0003\u0002\u0002\u0002hi\u0003", - "\u0002\u0002\u0002ij\u0003\u0002\u0002\u0002jk\u0005\u0004\u0003\u0002", - "k\r\u0003\u0002\u0002\u0002ln\u0007\u001a\u0002\u0002ml\u0003\u0002", - "\u0002\u0002mn\u0003\u0002\u0002\u0002nr\u0003\u0002\u0002\u0002op\u0005", - "\u0010\t\u0002pq\u0007\u0018\u0002\u0002qs\u0003\u0002\u0002\u0002r", - "o\u0003\u0002\u0002\u0002rs\u0003\u0002\u0002\u0002st\u0003\u0002\u0002", - "\u0002tw\u0005\u0010\t\u0002uv\u0007\u000f\u0002\u0002vx\u0005\u0010", - "\t\u0002wu\u0003\u0002\u0002\u0002wx\u0003\u0002\u0002\u0002x\u000f", - "\u0003\u0002\u0002\u0002y~\u00075\u0002\u0002z{\u0007\u0011\u0002\u0002", - "{}\u00075\u0002\u0002|z\u0003\u0002\u0002\u0002}\u0080\u0003\u0002\u0002", - "\u0002~|\u0003\u0002\u0002\u0002~\u007f\u0003\u0002\u0002\u0002\u007f", - "\u0011\u0003\u0002\u0002\u0002\u0080~\u0003\u0002\u0002\u0002\u0081", - "\u0082\t\t\u0002\u0002\u0082\u0013\u0003\u0002\u0002\u0002\u0010!1=", - "CJLTZchmrw~"].join(""); + "\u0002\u0004\u0004\u0007\b\r\r$$\u0004\u0002\"\"&&\u0004\u0002\u0003", + "\u0003\u001f\u001f\u0004\u0002\t\t**\u0006\u0002!!))+-00\u0002\u0092", + "\u0002\u0014\u0003\u0002\u0002\u0002\u0004!\u0003\u0002\u0002\u0002", + "\u0006O\u0003\u0002\u0002\u0002\bW\u0003\u0002\u0002\u0002\n^\u0003", + "\u0002\u0002\u0002\fh\u0003\u0002\u0002\u0002\u000em\u0003\u0002\u0002", + "\u0002\u0010y\u0003\u0002\u0002\u0002\u0012\u0081\u0003\u0002\u0002", + "\u0002\u0014\u0015\u0005\u0004\u0003\u0002\u0015\u0016\u0007\u0002\u0002", + "\u0003\u0016\u0003\u0003\u0002\u0002\u0002\u0017\u0018\b\u0003\u0001", + "\u0002\u0018\u0019\t\u0002\u0002\u0002\u0019\"\u0005\u0004\u0003\u0010", + "\u001a\u001b\u0007\u0017\u0002\u0002\u001b\u001c\u0005\u0004\u0003\u0002", + "\u001c\u001d\u0007\u000e\u0002\u0002\u001d\"\u0003\u0002\u0002\u0002", + "\u001e\"\u0005\u0012\n\u0002\u001f\"\u0005\b\u0005\u0002 \"\u0005\u000e", + "\b\u0002!\u0017\u0003\u0002\u0002\u0002!\u001a\u0003\u0002\u0002\u0002", + "!\u001e\u0003\u0002\u0002\u0002!\u001f\u0003\u0002\u0002\u0002! \u0003", + "\u0002\u0002\u0002\"L\u0003\u0002\u0002\u0002#$\f\u000f\u0002\u0002", + "$%\t\u0003\u0002\u0002%K\u0005\u0004\u0003\u0010&\'\f\u000e\u0002\u0002", + "\'(\t\u0004\u0002\u0002(K\u0005\u0004\u0003\u000f)*\f\r\u0002\u0002", + "*+\t\u0005\u0002\u0002+K\u0005\u0004\u0003\u000e,-\f\f\u0002\u0002-", + ".\t\u0006\u0002\u0002.K\u0005\u0004\u0003\r/1\f\u000b\u0002\u000202", + "\u0007\'\u0002\u000210\u0003\u0002\u0002\u000212\u0003\u0002\u0002\u0002", + "23\u0003\u0002\u0002\u000234\t\u0007\u0002\u00024K\u0005\u0004\u0003", + "\f56\f\t\u0002\u000267\t\b\u0002\u00027K\u0005\u0004\u0003\n89\f\b\u0002", + "\u00029:\t\t\u0002\u0002:K\u0005\u0004\u0003\t;=\f\u0007\u0002\u0002", + "<>\u0007\'\u0002\u0002=<\u0003\u0002\u0002\u0002=>\u0003\u0002\u0002", + "\u0002>?\u0003\u0002\u0002\u0002?@\u0007 \u0002\u0002@K\u0005\u0004", + "\u0003\bAC\f\n\u0002\u0002BD\u0007\'\u0002\u0002CB\u0003\u0002\u0002", + "\u0002CD\u0003\u0002\u0002\u0002DE\u0003\u0002\u0002\u0002EF\u0007#", + "\u0002\u0002FG\u0007\u0017\u0002\u0002GH\u0005\u0006\u0004\u0002HI\u0007", + "\u000e\u0002\u0002IK\u0003\u0002\u0002\u0002J#\u0003\u0002\u0002\u0002", + "J&\u0003\u0002\u0002\u0002J)\u0003\u0002\u0002\u0002J,\u0003\u0002\u0002", + "\u0002J/\u0003\u0002\u0002\u0002J5\u0003\u0002\u0002\u0002J8\u0003\u0002", + "\u0002\u0002J;\u0003\u0002\u0002\u0002JA\u0003\u0002\u0002\u0002KN\u0003", + "\u0002\u0002\u0002LJ\u0003\u0002\u0002\u0002LM\u0003\u0002\u0002\u0002", + "M\u0005\u0003\u0002\u0002\u0002NL\u0003\u0002\u0002\u0002OT\u0005\u0004", + "\u0003\u0002PQ\u0007\u0010\u0002\u0002QS\u0005\u0004\u0003\u0002RP\u0003", + "\u0002\u0002\u0002SV\u0003\u0002\u0002\u0002TR\u0003\u0002\u0002\u0002", + "TU\u0003\u0002\u0002\u0002U\u0007\u0003\u0002\u0002\u0002VT\u0003\u0002", + "\u0002\u0002WX\u0005\u0010\t\u0002XZ\u0007\u0017\u0002\u0002Y[\u0005", + "\n\u0006\u0002ZY\u0003\u0002\u0002\u0002Z[\u0003\u0002\u0002\u0002[", + "\\\u0003\u0002\u0002\u0002\\]\u0007\u000e\u0002\u0002]\t\u0003\u0002", + "\u0002\u0002^c\u0005\f\u0007\u0002_`\u0007\u0010\u0002\u0002`b\u0005", + "\f\u0007\u0002a_\u0003\u0002\u0002\u0002be\u0003\u0002\u0002\u0002c", + "a\u0003\u0002\u0002\u0002cd\u0003\u0002\u0002\u0002d\u000b\u0003\u0002", + "\u0002\u0002ec\u0003\u0002\u0002\u0002fg\u00076\u0002\u0002gi\u0007", + "\r\u0002\u0002hf\u0003\u0002\u0002\u0002hi\u0003\u0002\u0002\u0002i", + "j\u0003\u0002\u0002\u0002jk\u0005\u0004\u0003\u0002k\r\u0003\u0002\u0002", + "\u0002ln\u0007\u001a\u0002\u0002ml\u0003\u0002\u0002\u0002mn\u0003\u0002", + "\u0002\u0002nr\u0003\u0002\u0002\u0002op\u0005\u0010\t\u0002pq\u0007", + "\u0018\u0002\u0002qs\u0003\u0002\u0002\u0002ro\u0003\u0002\u0002\u0002", + "rs\u0003\u0002\u0002\u0002st\u0003\u0002\u0002\u0002tw\u0005\u0010\t", + "\u0002uv\u0007\u000f\u0002\u0002vx\u0005\u0010\t\u0002wu\u0003\u0002", + "\u0002\u0002wx\u0003\u0002\u0002\u0002x\u000f\u0003\u0002\u0002\u0002", + "y~\u00076\u0002\u0002z{\u0007\u0011\u0002\u0002{}\u00076\u0002\u0002", + "|z\u0003\u0002\u0002\u0002}\u0080\u0003\u0002\u0002\u0002~|\u0003\u0002", + "\u0002\u0002~\u007f\u0003\u0002\u0002\u0002\u007f\u0011\u0003\u0002", + "\u0002\u0002\u0080~\u0003\u0002\u0002\u0002\u0081\u0082\t\n\u0002\u0002", + "\u0082\u0013\u0003\u0002\u0002\u0002\u0010!1=CJLTZchmrw~"].join(""); var atn = new antlr4.atn.ATNDeserializer().deserialize(serializedATN); @@ -106,8 +106,8 @@ var symbolicNames = [ null, "AND", "EQ", "GT_EQ", "LT_EQ", "NOT_EQ1", "NOT_EQ2", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", - "UNDER", "K_AND", "K_BETWEEN", "K_FALSE", "K_IN", - "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", + "UNDER", "K_AND", "K_BETWEEN", "K_FALSE", "K_ILIKE", + "K_IN", "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", @@ -167,26 +167,27 @@ PqlParser.UNDER = 28; PqlParser.K_AND = 29; PqlParser.K_BETWEEN = 30; PqlParser.K_FALSE = 31; -PqlParser.K_IN = 32; -PqlParser.K_IS = 33; -PqlParser.K_ISNULL = 34; -PqlParser.K_LIKE = 35; -PqlParser.K_NOT = 36; -PqlParser.K_NOTNULL = 37; -PqlParser.K_NULL = 38; -PqlParser.K_OR = 39; -PqlParser.K_TRUE = 40; -PqlParser.NUMERIC_LITERAL = 41; -PqlParser.DOUBLE_QUOTED_STRING = 42; -PqlParser.DOUBLE_QUOTED_STRING_TEL = 43; -PqlParser.DOUBLE_QUOTED_STRING_SQL = 44; -PqlParser.SINGLE_QUOTED_STRING = 45; -PqlParser.SINGLE_QUOTED_STRING_TEL = 46; -PqlParser.SINGLE_QUOTED_STRING_SQL = 47; -PqlParser.SINGLE_LINE_COMMENT = 48; -PqlParser.MULTILINE_COMMENT = 49; -PqlParser.SPACES = 50; -PqlParser.WORD = 51; +PqlParser.K_ILIKE = 32; +PqlParser.K_IN = 33; +PqlParser.K_IS = 34; +PqlParser.K_ISNULL = 35; +PqlParser.K_LIKE = 36; +PqlParser.K_NOT = 37; +PqlParser.K_NOTNULL = 38; +PqlParser.K_NULL = 39; +PqlParser.K_OR = 40; +PqlParser.K_TRUE = 41; +PqlParser.NUMERIC_LITERAL = 42; +PqlParser.DOUBLE_QUOTED_STRING = 43; +PqlParser.DOUBLE_QUOTED_STRING_TEL = 44; +PqlParser.DOUBLE_QUOTED_STRING_SQL = 45; +PqlParser.SINGLE_QUOTED_STRING = 46; +PqlParser.SINGLE_QUOTED_STRING_TEL = 47; +PqlParser.SINGLE_QUOTED_STRING_SQL = 48; +PqlParser.SINGLE_LINE_COMMENT = 49; +PqlParser.MULTILINE_COMMENT = 50; +PqlParser.SPACES = 51; +PqlParser.WORD = 52; PqlParser.RULE_parseTel = 0; PqlParser.RULE_expr = 1; @@ -391,6 +392,10 @@ ExprContext.prototype.K_LIKE = function() { return this.getToken(PqlParser.K_LIKE, 0); }; +ExprContext.prototype.K_ILIKE = function() { + return this.getToken(PqlParser.K_ILIKE, 0); +}; + ExprContext.prototype.K_AND = function() { return this.getToken(PqlParser.K_AND, 0); }; @@ -589,7 +594,7 @@ PqlParser.prototype.expr = function(_p) { this.state = 43; localctx.operator = this._input.LT(1); _la = this._input.LA(1); - if(!(((((_la - 2)) & ~0x1f) == 0 && ((1 << (_la - 2)) & ((1 << (PqlParser.EQ - 2)) | (1 << (PqlParser.NOT_EQ1 - 2)) | (1 << (PqlParser.NOT_EQ2 - 2)) | (1 << (PqlParser.ASSIGN - 2)) | (1 << (PqlParser.K_IS - 2)))) !== 0))) { + if(!((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.EQ) | (1 << PqlParser.NOT_EQ1) | (1 << PqlParser.NOT_EQ2) | (1 << PqlParser.ASSIGN))) !== 0) || _la===PqlParser.K_IS)) { localctx.operator = this._errHandler.recoverInline(this); } else { @@ -617,7 +622,15 @@ PqlParser.prototype.expr = function(_p) { } this.state = 49; - localctx.operator = this.match(PqlParser.K_LIKE); + localctx.operator = this._input.LT(1); + _la = this._input.LA(1); + if(!(_la===PqlParser.K_ILIKE || _la===PqlParser.K_LIKE)) { + localctx.operator = this._errHandler.recoverInline(this); + } + else { + this._errHandler.reportMatch(this); + this.consume(); + } this.state = 50; localctx.right = this.expr(10); break; @@ -909,7 +922,7 @@ PqlParser.prototype.fn = function() { this.state = 88; this._errHandler.sync(this); _la = this._input.LA(1); - if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE))) !== 0) || ((((_la - 36)) & ~0x1f) == 0 && ((1 << (_la - 36)) & ((1 << (PqlParser.K_NOT - 36)) | (1 << (PqlParser.K_NULL - 36)) | (1 << (PqlParser.K_TRUE - 36)) | (1 << (PqlParser.NUMERIC_LITERAL - 36)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 36)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 36)) | (1 << (PqlParser.WORD - 36)))) !== 0)) { + if((((_la) & ~0x1f) == 0 && ((1 << _la) & ((1 << PqlParser.MINUS) | (1 << PqlParser.OPEN_PAREN) | (1 << PqlParser.PLUS) | (1 << PqlParser.QUESTION_MARK) | (1 << PqlParser.K_FALSE))) !== 0) || ((((_la - 37)) & ~0x1f) == 0 && ((1 << (_la - 37)) & ((1 << (PqlParser.K_NOT - 37)) | (1 << (PqlParser.K_NULL - 37)) | (1 << (PqlParser.K_TRUE - 37)) | (1 << (PqlParser.NUMERIC_LITERAL - 37)) | (1 << (PqlParser.DOUBLE_QUOTED_STRING - 37)) | (1 << (PqlParser.SINGLE_QUOTED_STRING - 37)) | (1 << (PqlParser.WORD - 37)))) !== 0)) { this.state = 87; localctx.arguments = this.fnArgs(); } diff --git a/python/src/pql_grammar/antlr/PqlLexer.py b/python/src/pql_grammar/antlr/PqlLexer.py index 0ee8cdb..67c5c15 100644 --- a/python/src/pql_grammar/antlr/PqlLexer.py +++ b/python/src/pql_grammar/antlr/PqlLexer.py @@ -8,8 +8,8 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\65") - buf.write("\u01d9\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\66") + buf.write("\u01e1\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") @@ -20,204 +20,207 @@ def serializedATN(): buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") - buf.write("L\4M\tM\4N\tN\4O\tO\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3") - buf.write("\4\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t") - buf.write("\3\t\3\t\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3") - buf.write("\16\3\17\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23") - buf.write("\3\24\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31") - buf.write("\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36") - buf.write("\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3 ") - buf.write("\3 \3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3#\3#\3#\3#\3#\3") - buf.write("#\3#\3$\3$\3$\3$\3$\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3&\3") - buf.write("&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3)\3)\3)\3)\3)\3*\6*\u011f") - buf.write("\n*\r*\16*\u0120\3*\3*\7*\u0125\n*\f*\16*\u0128\13*\5") - buf.write("*\u012a\n*\3*\3*\5*\u012e\n*\3*\6*\u0131\n*\r*\16*\u0132") - buf.write("\5*\u0135\n*\3*\3*\6*\u0139\n*\r*\16*\u013a\3*\3*\5*\u013f") - buf.write("\n*\3*\6*\u0142\n*\r*\16*\u0143\5*\u0146\n*\5*\u0148\n") - buf.write("*\3+\3+\3,\3,\3,\3,\7,\u0150\n,\f,\16,\u0153\13,\3,\3") - buf.write(",\3-\3-\3-\3-\7-\u015b\n-\f-\16-\u015e\13-\3-\3-\3.\3") - buf.write(".\3/\3/\3/\3/\7/\u0168\n/\f/\16/\u016b\13/\3/\3/\3\60") - buf.write("\3\60\3\60\3\60\7\60\u0173\n\60\f\60\16\60\u0176\13\60") - buf.write("\3\60\3\60\3\61\3\61\3\61\3\61\3\61\5\61\u017f\n\61\3") - buf.write("\61\7\61\u0182\n\61\f\61\16\61\u0185\13\61\3\61\3\61\3") - buf.write("\62\3\62\3\62\3\62\7\62\u018d\n\62\f\62\16\62\u0190\13") - buf.write("\62\3\62\3\62\3\62\5\62\u0195\n\62\3\62\3\62\3\63\3\63") - buf.write("\3\63\3\63\3\64\3\64\7\64\u019f\n\64\f\64\16\64\u01a2") - buf.write("\13\64\3\65\3\65\3\66\3\66\3\67\3\67\38\38\39\39\3:\3") - buf.write(":\3;\3;\3<\3<\3=\3=\3>\3>\3?\3?\3@\3@\3A\3A\3B\3B\3C\3") - buf.write("C\3D\3D\3E\3E\3F\3F\3G\3G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3") - buf.write("L\3M\3M\3N\3N\3O\3O\3\u018e\2P\3\3\5\4\7\5\t\6\13\7\r") - buf.write("\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!") - buf.write("\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67") - buf.write("\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61") - buf.write("a\62c\63e\64g\65i\2k\2m\2o\2q\2s\2u\2w\2y\2{\2}\2\177") - buf.write("\2\u0081\2\u0083\2\u0085\2\u0087\2\u0089\2\u008b\2\u008d") - buf.write("\2\u008f\2\u0091\2\u0093\2\u0095\2\u0097\2\u0099\2\u009b") - buf.write("\2\u009d\2\3\2$\4\2--//\3\2$$\3\2))\4\2\f\f\17\17\5\2") - buf.write("\13\r\17\17\"\"\5\2C\\aac|\6\2\62;C\\aac|\3\2\62;\4\2") - buf.write("CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4") - buf.write("\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPp") - buf.write("p\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2") - buf.write("WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u01d6") - buf.write("\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13") - buf.write("\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3") - buf.write("\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2") - buf.write("\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2") - buf.write("%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2") - buf.write("\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67") - buf.write("\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2") - buf.write("A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2") - buf.write("\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2") - buf.write("\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2") - buf.write("\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3") - buf.write("\2\2\2\3\u009f\3\2\2\2\5\u00a2\3\2\2\2\7\u00a5\3\2\2\2") - buf.write("\t\u00a8\3\2\2\2\13\u00ab\3\2\2\2\r\u00ae\3\2\2\2\17\u00b1") - buf.write("\3\2\2\2\21\u00b4\3\2\2\2\23\u00b7\3\2\2\2\25\u00ba\3") - buf.write("\2\2\2\27\u00bc\3\2\2\2\31\u00be\3\2\2\2\33\u00c0\3\2") - buf.write("\2\2\35\u00c2\3\2\2\2\37\u00c4\3\2\2\2!\u00c6\3\2\2\2") - buf.write("#\u00c8\3\2\2\2%\u00ca\3\2\2\2\'\u00cc\3\2\2\2)\u00ce") - buf.write("\3\2\2\2+\u00d0\3\2\2\2-\u00d2\3\2\2\2/\u00d4\3\2\2\2") - buf.write("\61\u00d6\3\2\2\2\63\u00d8\3\2\2\2\65\u00da\3\2\2\2\67") - buf.write("\u00dc\3\2\2\29\u00de\3\2\2\2;\u00e0\3\2\2\2=\u00e4\3") - buf.write("\2\2\2?\u00ec\3\2\2\2A\u00f2\3\2\2\2C\u00f5\3\2\2\2E\u00f8") - buf.write("\3\2\2\2G\u00ff\3\2\2\2I\u0104\3\2\2\2K\u0108\3\2\2\2") - buf.write("M\u0110\3\2\2\2O\u0115\3\2\2\2Q\u0118\3\2\2\2S\u0147\3") - buf.write("\2\2\2U\u0149\3\2\2\2W\u014b\3\2\2\2Y\u0156\3\2\2\2[\u0161") - buf.write("\3\2\2\2]\u0163\3\2\2\2_\u016e\3\2\2\2a\u017e\3\2\2\2") - buf.write("c\u0188\3\2\2\2e\u0198\3\2\2\2g\u019c\3\2\2\2i\u01a3\3") - buf.write("\2\2\2k\u01a5\3\2\2\2m\u01a7\3\2\2\2o\u01a9\3\2\2\2q\u01ab") - buf.write("\3\2\2\2s\u01ad\3\2\2\2u\u01af\3\2\2\2w\u01b1\3\2\2\2") - buf.write("y\u01b3\3\2\2\2{\u01b5\3\2\2\2}\u01b7\3\2\2\2\177\u01b9") - buf.write("\3\2\2\2\u0081\u01bb\3\2\2\2\u0083\u01bd\3\2\2\2\u0085") - buf.write("\u01bf\3\2\2\2\u0087\u01c1\3\2\2\2\u0089\u01c3\3\2\2\2") - buf.write("\u008b\u01c5\3\2\2\2\u008d\u01c7\3\2\2\2\u008f\u01c9\3") - buf.write("\2\2\2\u0091\u01cb\3\2\2\2\u0093\u01cd\3\2\2\2\u0095\u01cf") - buf.write("\3\2\2\2\u0097\u01d1\3\2\2\2\u0099\u01d3\3\2\2\2\u009b") - buf.write("\u01d5\3\2\2\2\u009d\u01d7\3\2\2\2\u009f\u00a0\7(\2\2") - buf.write("\u00a0\u00a1\7(\2\2\u00a1\4\3\2\2\2\u00a2\u00a3\7?\2\2") - buf.write("\u00a3\u00a4\7?\2\2\u00a4\6\3\2\2\2\u00a5\u00a6\7@\2\2") - buf.write("\u00a6\u00a7\7?\2\2\u00a7\b\3\2\2\2\u00a8\u00a9\7>\2\2") - buf.write("\u00a9\u00aa\7?\2\2\u00aa\n\3\2\2\2\u00ab\u00ac\7#\2\2") - buf.write("\u00ac\u00ad\7?\2\2\u00ad\f\3\2\2\2\u00ae\u00af\7>\2\2") - buf.write("\u00af\u00b0\7@\2\2\u00b0\16\3\2\2\2\u00b1\u00b2\7~\2") - buf.write("\2\u00b2\u00b3\7~\2\2\u00b3\20\3\2\2\2\u00b4\u00b5\7>") - buf.write("\2\2\u00b5\u00b6\7>\2\2\u00b6\22\3\2\2\2\u00b7\u00b8\7") - buf.write("@\2\2\u00b8\u00b9\7@\2\2\u00b9\24\3\2\2\2\u00ba\u00bb") - buf.write("\7(\2\2\u00bb\26\3\2\2\2\u00bc\u00bd\7?\2\2\u00bd\30\3") - buf.write("\2\2\2\u00be\u00bf\7+\2\2\u00bf\32\3\2\2\2\u00c0\u00c1") - buf.write("\7<\2\2\u00c1\34\3\2\2\2\u00c2\u00c3\7.\2\2\u00c3\36\3") - buf.write("\2\2\2\u00c4\u00c5\7\60\2\2\u00c5 \3\2\2\2\u00c6\u00c7") - buf.write("\7\61\2\2\u00c7\"\3\2\2\2\u00c8\u00c9\7@\2\2\u00c9$\3") - buf.write("\2\2\2\u00ca\u00cb\7>\2\2\u00cb&\3\2\2\2\u00cc\u00cd\7") - buf.write("/\2\2\u00cd(\3\2\2\2\u00ce\u00cf\7\'\2\2\u00cf*\3\2\2") - buf.write("\2\u00d0\u00d1\7*\2\2\u00d1,\3\2\2\2\u00d2\u00d3\7~\2") - buf.write("\2\u00d3.\3\2\2\2\u00d4\u00d5\7-\2\2\u00d5\60\3\2\2\2") - buf.write("\u00d6\u00d7\7A\2\2\u00d7\62\3\2\2\2\u00d8\u00d9\7=\2") - buf.write("\2\u00d9\64\3\2\2\2\u00da\u00db\7,\2\2\u00db\66\3\2\2") - buf.write("\2\u00dc\u00dd\7\u0080\2\2\u00dd8\3\2\2\2\u00de\u00df") - buf.write("\7a\2\2\u00df:\3\2\2\2\u00e0\u00e1\5k\66\2\u00e1\u00e2") - buf.write("\5\u0085C\2\u00e2\u00e3\5q9\2\u00e3<\3\2\2\2\u00e4\u00e5") - buf.write("\5m\67\2\u00e5\u00e6\5s:\2\u00e6\u00e7\5\u0091I\2\u00e7") - buf.write("\u00e8\5\u0097L\2\u00e8\u00e9\5s:\2\u00e9\u00ea\5s:\2") - buf.write("\u00ea\u00eb\5\u0085C\2\u00eb>\3\2\2\2\u00ec\u00ed\5u") - buf.write(";\2\u00ed\u00ee\5k\66\2\u00ee\u00ef\5\u0081A\2\u00ef\u00f0") - buf.write("\5\u008fH\2\u00f0\u00f1\5s:\2\u00f1@\3\2\2\2\u00f2\u00f3") - buf.write("\5{>\2\u00f3\u00f4\5\u0085C\2\u00f4B\3\2\2\2\u00f5\u00f6") - buf.write("\5{>\2\u00f6\u00f7\5\u008fH\2\u00f7D\3\2\2\2\u00f8\u00f9") - buf.write("\5{>\2\u00f9\u00fa\5\u008fH\2\u00fa\u00fb\5\u0085C\2\u00fb") - buf.write("\u00fc\5\u0093J\2\u00fc\u00fd\5\u0081A\2\u00fd\u00fe\5") - buf.write("\u0081A\2\u00feF\3\2\2\2\u00ff\u0100\5\u0081A\2\u0100") - buf.write("\u0101\5{>\2\u0101\u0102\5\177@\2\u0102\u0103\5s:\2\u0103") - buf.write("H\3\2\2\2\u0104\u0105\5\u0085C\2\u0105\u0106\5\u0087D") - buf.write("\2\u0106\u0107\5\u0091I\2\u0107J\3\2\2\2\u0108\u0109\5") - buf.write("\u0085C\2\u0109\u010a\5\u0087D\2\u010a\u010b\5\u0091I") - buf.write("\2\u010b\u010c\5\u0085C\2\u010c\u010d\5\u0093J\2\u010d") - buf.write("\u010e\5\u0081A\2\u010e\u010f\5\u0081A\2\u010fL\3\2\2") - buf.write("\2\u0110\u0111\5\u0085C\2\u0111\u0112\5\u0093J\2\u0112") - buf.write("\u0113\5\u0081A\2\u0113\u0114\5\u0081A\2\u0114N\3\2\2") - buf.write("\2\u0115\u0116\5\u0087D\2\u0116\u0117\5\u008dG\2\u0117") - buf.write("P\3\2\2\2\u0118\u0119\5\u0091I\2\u0119\u011a\5\u008dG") - buf.write("\2\u011a\u011b\5\u0093J\2\u011b\u011c\5s:\2\u011cR\3\2") - buf.write("\2\2\u011d\u011f\5i\65\2\u011e\u011d\3\2\2\2\u011f\u0120") - buf.write("\3\2\2\2\u0120\u011e\3\2\2\2\u0120\u0121\3\2\2\2\u0121") - buf.write("\u0129\3\2\2\2\u0122\u0126\7\60\2\2\u0123\u0125\5i\65") - buf.write("\2\u0124\u0123\3\2\2\2\u0125\u0128\3\2\2\2\u0126\u0124") - buf.write("\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u012a\3\2\2\2\u0128") - buf.write("\u0126\3\2\2\2\u0129\u0122\3\2\2\2\u0129\u012a\3\2\2\2") - buf.write("\u012a\u0134\3\2\2\2\u012b\u012d\5s:\2\u012c\u012e\t\2") - buf.write("\2\2\u012d\u012c\3\2\2\2\u012d\u012e\3\2\2\2\u012e\u0130") - buf.write("\3\2\2\2\u012f\u0131\5i\65\2\u0130\u012f\3\2\2\2\u0131") - buf.write("\u0132\3\2\2\2\u0132\u0130\3\2\2\2\u0132\u0133\3\2\2\2") - buf.write("\u0133\u0135\3\2\2\2\u0134\u012b\3\2\2\2\u0134\u0135\3") - buf.write("\2\2\2\u0135\u0148\3\2\2\2\u0136\u0138\7\60\2\2\u0137") - buf.write("\u0139\5i\65\2\u0138\u0137\3\2\2\2\u0139\u013a\3\2\2\2") - buf.write("\u013a\u0138\3\2\2\2\u013a\u013b\3\2\2\2\u013b\u0145\3") - buf.write("\2\2\2\u013c\u013e\5s:\2\u013d\u013f\t\2\2\2\u013e\u013d") - buf.write("\3\2\2\2\u013e\u013f\3\2\2\2\u013f\u0141\3\2\2\2\u0140") - buf.write("\u0142\5i\65\2\u0141\u0140\3\2\2\2\u0142\u0143\3\2\2\2") - buf.write("\u0143\u0141\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0146\3") - buf.write("\2\2\2\u0145\u013c\3\2\2\2\u0145\u0146\3\2\2\2\u0146\u0148") - buf.write("\3\2\2\2\u0147\u011e\3\2\2\2\u0147\u0136\3\2\2\2\u0148") - buf.write("T\3\2\2\2\u0149\u014a\5W,\2\u014aV\3\2\2\2\u014b\u0151") - buf.write("\7$\2\2\u014c\u014d\7^\2\2\u014d\u0150\7$\2\2\u014e\u0150") - buf.write("\n\3\2\2\u014f\u014c\3\2\2\2\u014f\u014e\3\2\2\2\u0150") - buf.write("\u0153\3\2\2\2\u0151\u014f\3\2\2\2\u0151\u0152\3\2\2\2") - buf.write("\u0152\u0154\3\2\2\2\u0153\u0151\3\2\2\2\u0154\u0155\7") - buf.write("$\2\2\u0155X\3\2\2\2\u0156\u015c\7$\2\2\u0157\u0158\7") - buf.write("$\2\2\u0158\u015b\7$\2\2\u0159\u015b\n\3\2\2\u015a\u0157") - buf.write("\3\2\2\2\u015a\u0159\3\2\2\2\u015b\u015e\3\2\2\2\u015c") - buf.write("\u015a\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u015f\3\2\2\2") - buf.write("\u015e\u015c\3\2\2\2\u015f\u0160\7$\2\2\u0160Z\3\2\2\2") - buf.write("\u0161\u0162\5]/\2\u0162\\\3\2\2\2\u0163\u0169\7)\2\2") - buf.write("\u0164\u0165\7^\2\2\u0165\u0168\7)\2\2\u0166\u0168\n\4") - buf.write("\2\2\u0167\u0164\3\2\2\2\u0167\u0166\3\2\2\2\u0168\u016b") - buf.write("\3\2\2\2\u0169\u0167\3\2\2\2\u0169\u016a\3\2\2\2\u016a") - buf.write("\u016c\3\2\2\2\u016b\u0169\3\2\2\2\u016c\u016d\7)\2\2") - buf.write("\u016d^\3\2\2\2\u016e\u0174\7)\2\2\u016f\u0170\7)\2\2") - buf.write("\u0170\u0173\7)\2\2\u0171\u0173\n\4\2\2\u0172\u016f\3") - buf.write("\2\2\2\u0172\u0171\3\2\2\2\u0173\u0176\3\2\2\2\u0174\u0172") - buf.write("\3\2\2\2\u0174\u0175\3\2\2\2\u0175\u0177\3\2\2\2\u0176") - buf.write("\u0174\3\2\2\2\u0177\u0178\7)\2\2\u0178`\3\2\2\2\u0179") - buf.write("\u017a\7/\2\2\u017a\u017f\7/\2\2\u017b\u017c\7\61\2\2") - buf.write("\u017c\u017f\7\61\2\2\u017d\u017f\7%\2\2\u017e\u0179\3") - buf.write("\2\2\2\u017e\u017b\3\2\2\2\u017e\u017d\3\2\2\2\u017f\u0183") - buf.write("\3\2\2\2\u0180\u0182\n\5\2\2\u0181\u0180\3\2\2\2\u0182") - buf.write("\u0185\3\2\2\2\u0183\u0181\3\2\2\2\u0183\u0184\3\2\2\2") - buf.write("\u0184\u0186\3\2\2\2\u0185\u0183\3\2\2\2\u0186\u0187\b") - buf.write("\61\2\2\u0187b\3\2\2\2\u0188\u0189\7\61\2\2\u0189\u018a") - buf.write("\7,\2\2\u018a\u018e\3\2\2\2\u018b\u018d\13\2\2\2\u018c") - buf.write("\u018b\3\2\2\2\u018d\u0190\3\2\2\2\u018e\u018f\3\2\2\2") - buf.write("\u018e\u018c\3\2\2\2\u018f\u0194\3\2\2\2\u0190\u018e\3") - buf.write("\2\2\2\u0191\u0192\7,\2\2\u0192\u0195\7\61\2\2\u0193\u0195") - buf.write("\7\2\2\3\u0194\u0191\3\2\2\2\u0194\u0193\3\2\2\2\u0195") - buf.write("\u0196\3\2\2\2\u0196\u0197\b\62\2\2\u0197d\3\2\2\2\u0198") - buf.write("\u0199\t\6\2\2\u0199\u019a\3\2\2\2\u019a\u019b\b\63\2") - buf.write("\2\u019bf\3\2\2\2\u019c\u01a0\t\7\2\2\u019d\u019f\t\b") - buf.write("\2\2\u019e\u019d\3\2\2\2\u019f\u01a2\3\2\2\2\u01a0\u019e") - buf.write("\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1h\3\2\2\2\u01a2\u01a0") - buf.write("\3\2\2\2\u01a3\u01a4\t\t\2\2\u01a4j\3\2\2\2\u01a5\u01a6") - buf.write("\t\n\2\2\u01a6l\3\2\2\2\u01a7\u01a8\t\13\2\2\u01a8n\3") - buf.write("\2\2\2\u01a9\u01aa\t\f\2\2\u01aap\3\2\2\2\u01ab\u01ac") - buf.write("\t\r\2\2\u01acr\3\2\2\2\u01ad\u01ae\t\16\2\2\u01aet\3") - buf.write("\2\2\2\u01af\u01b0\t\17\2\2\u01b0v\3\2\2\2\u01b1\u01b2") - buf.write("\t\20\2\2\u01b2x\3\2\2\2\u01b3\u01b4\t\21\2\2\u01b4z\3") - buf.write("\2\2\2\u01b5\u01b6\t\22\2\2\u01b6|\3\2\2\2\u01b7\u01b8") - buf.write("\t\23\2\2\u01b8~\3\2\2\2\u01b9\u01ba\t\24\2\2\u01ba\u0080") - buf.write("\3\2\2\2\u01bb\u01bc\t\25\2\2\u01bc\u0082\3\2\2\2\u01bd") - buf.write("\u01be\t\26\2\2\u01be\u0084\3\2\2\2\u01bf\u01c0\t\27\2") - buf.write("\2\u01c0\u0086\3\2\2\2\u01c1\u01c2\t\30\2\2\u01c2\u0088") - buf.write("\3\2\2\2\u01c3\u01c4\t\31\2\2\u01c4\u008a\3\2\2\2\u01c5") - buf.write("\u01c6\t\32\2\2\u01c6\u008c\3\2\2\2\u01c7\u01c8\t\33\2") - buf.write("\2\u01c8\u008e\3\2\2\2\u01c9\u01ca\t\34\2\2\u01ca\u0090") - buf.write("\3\2\2\2\u01cb\u01cc\t\35\2\2\u01cc\u0092\3\2\2\2\u01cd") - buf.write("\u01ce\t\36\2\2\u01ce\u0094\3\2\2\2\u01cf\u01d0\t\37\2") - buf.write("\2\u01d0\u0096\3\2\2\2\u01d1\u01d2\t \2\2\u01d2\u0098") - buf.write("\3\2\2\2\u01d3\u01d4\t!\2\2\u01d4\u009a\3\2\2\2\u01d5") - buf.write("\u01d6\t\"\2\2\u01d6\u009c\3\2\2\2\u01d7\u01d8\t#\2\2") - buf.write("\u01d8\u009e\3\2\2\2\33\2\u0120\u0126\u0129\u012d\u0132") - buf.write("\u0134\u013a\u013e\u0143\u0145\u0147\u014f\u0151\u015a") - buf.write("\u015c\u0167\u0169\u0172\u0174\u017e\u0183\u018e\u0194") - buf.write("\u01a0\3\2\3\2") + buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\3\2\3\2\3\2\3\3\3\3\3\3\3\4") + buf.write("\3\4\3\4\3\5\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\7\3\b\3\b\3") + buf.write("\b\3\t\3\t\3\t\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3") + buf.write("\16\3\16\3\17\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23") + buf.write("\3\23\3\24\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3\30\3\30") + buf.write("\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36") + buf.write("\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37") + buf.write("\3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3#\3") + buf.write("#\3#\3$\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3&\3&\3&\3&\3") + buf.write("\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3") + buf.write(")\3*\3*\3*\3*\3*\3+\6+\u0127\n+\r+\16+\u0128\3+\3+\7+") + buf.write("\u012d\n+\f+\16+\u0130\13+\5+\u0132\n+\3+\3+\5+\u0136") + buf.write("\n+\3+\6+\u0139\n+\r+\16+\u013a\5+\u013d\n+\3+\3+\6+\u0141") + buf.write("\n+\r+\16+\u0142\3+\3+\5+\u0147\n+\3+\6+\u014a\n+\r+\16") + buf.write("+\u014b\5+\u014e\n+\5+\u0150\n+\3,\3,\3-\3-\3-\3-\7-\u0158") + buf.write("\n-\f-\16-\u015b\13-\3-\3-\3.\3.\3.\3.\7.\u0163\n.\f.") + buf.write("\16.\u0166\13.\3.\3.\3/\3/\3\60\3\60\3\60\3\60\7\60\u0170") + buf.write("\n\60\f\60\16\60\u0173\13\60\3\60\3\60\3\61\3\61\3\61") + buf.write("\3\61\7\61\u017b\n\61\f\61\16\61\u017e\13\61\3\61\3\61") + buf.write("\3\62\3\62\3\62\3\62\3\62\5\62\u0187\n\62\3\62\7\62\u018a") + buf.write("\n\62\f\62\16\62\u018d\13\62\3\62\3\62\3\63\3\63\3\63") + buf.write("\3\63\7\63\u0195\n\63\f\63\16\63\u0198\13\63\3\63\3\63") + buf.write("\3\63\5\63\u019d\n\63\3\63\3\63\3\64\3\64\3\64\3\64\3") + buf.write("\65\3\65\7\65\u01a7\n\65\f\65\16\65\u01aa\13\65\3\66\3") + buf.write("\66\3\67\3\67\38\38\39\39\3:\3:\3;\3;\3<\3<\3=\3=\3>\3") + buf.write(">\3?\3?\3@\3@\3A\3A\3B\3B\3C\3C\3D\3D\3E\3E\3F\3F\3G\3") + buf.write("G\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3N\3N\3O\3O\3P\3") + buf.write("P\3\u0196\2Q\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13") + buf.write("\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26") + buf.write("+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#") + buf.write("E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66") + buf.write("k\2m\2o\2q\2s\2u\2w\2y\2{\2}\2\177\2\u0081\2\u0083\2\u0085") + buf.write("\2\u0087\2\u0089\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093") + buf.write("\2\u0095\2\u0097\2\u0099\2\u009b\2\u009d\2\u009f\2\3\2") + buf.write("$\4\2--//\3\2$$\3\2))\4\2\f\f\17\17\5\2\13\r\17\17\"\"") + buf.write("\5\2C\\aac|\6\2\62;C\\aac|\3\2\62;\4\2CCcc\4\2DDdd\4\2") + buf.write("EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4\2JJjj\4\2KKkk\4") + buf.write("\2LLll\4\2MMmm\4\2NNnn\4\2OOoo\4\2PPpp\4\2QQqq\4\2RRr") + buf.write("r\4\2SSss\4\2TTtt\4\2UUuu\4\2VVvv\4\2WWww\4\2XXxx\4\2") + buf.write("YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\2\u01de\2\3\3\2\2\2\2\5") + buf.write("\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2") + buf.write("\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2") + buf.write("\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2") + buf.write("\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2") + buf.write("\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61") + buf.write("\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2") + buf.write("\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3") + buf.write("\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M") + buf.write("\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2") + buf.write("W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2") + buf.write("\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2") + buf.write("\2\3\u00a1\3\2\2\2\5\u00a4\3\2\2\2\7\u00a7\3\2\2\2\t\u00aa") + buf.write("\3\2\2\2\13\u00ad\3\2\2\2\r\u00b0\3\2\2\2\17\u00b3\3\2") + buf.write("\2\2\21\u00b6\3\2\2\2\23\u00b9\3\2\2\2\25\u00bc\3\2\2") + buf.write("\2\27\u00be\3\2\2\2\31\u00c0\3\2\2\2\33\u00c2\3\2\2\2") + buf.write("\35\u00c4\3\2\2\2\37\u00c6\3\2\2\2!\u00c8\3\2\2\2#\u00ca") + buf.write("\3\2\2\2%\u00cc\3\2\2\2\'\u00ce\3\2\2\2)\u00d0\3\2\2\2") + buf.write("+\u00d2\3\2\2\2-\u00d4\3\2\2\2/\u00d6\3\2\2\2\61\u00d8") + buf.write("\3\2\2\2\63\u00da\3\2\2\2\65\u00dc\3\2\2\2\67\u00de\3") + buf.write("\2\2\29\u00e0\3\2\2\2;\u00e2\3\2\2\2=\u00e6\3\2\2\2?\u00ee") + buf.write("\3\2\2\2A\u00f4\3\2\2\2C\u00fa\3\2\2\2E\u00fd\3\2\2\2") + buf.write("G\u0100\3\2\2\2I\u0107\3\2\2\2K\u010c\3\2\2\2M\u0110\3") + buf.write("\2\2\2O\u0118\3\2\2\2Q\u011d\3\2\2\2S\u0120\3\2\2\2U\u014f") + buf.write("\3\2\2\2W\u0151\3\2\2\2Y\u0153\3\2\2\2[\u015e\3\2\2\2") + buf.write("]\u0169\3\2\2\2_\u016b\3\2\2\2a\u0176\3\2\2\2c\u0186\3") + buf.write("\2\2\2e\u0190\3\2\2\2g\u01a0\3\2\2\2i\u01a4\3\2\2\2k\u01ab") + buf.write("\3\2\2\2m\u01ad\3\2\2\2o\u01af\3\2\2\2q\u01b1\3\2\2\2") + buf.write("s\u01b3\3\2\2\2u\u01b5\3\2\2\2w\u01b7\3\2\2\2y\u01b9\3") + buf.write("\2\2\2{\u01bb\3\2\2\2}\u01bd\3\2\2\2\177\u01bf\3\2\2\2") + buf.write("\u0081\u01c1\3\2\2\2\u0083\u01c3\3\2\2\2\u0085\u01c5\3") + buf.write("\2\2\2\u0087\u01c7\3\2\2\2\u0089\u01c9\3\2\2\2\u008b\u01cb") + buf.write("\3\2\2\2\u008d\u01cd\3\2\2\2\u008f\u01cf\3\2\2\2\u0091") + buf.write("\u01d1\3\2\2\2\u0093\u01d3\3\2\2\2\u0095\u01d5\3\2\2\2") + buf.write("\u0097\u01d7\3\2\2\2\u0099\u01d9\3\2\2\2\u009b\u01db\3") + buf.write("\2\2\2\u009d\u01dd\3\2\2\2\u009f\u01df\3\2\2\2\u00a1\u00a2") + buf.write("\7(\2\2\u00a2\u00a3\7(\2\2\u00a3\4\3\2\2\2\u00a4\u00a5") + buf.write("\7?\2\2\u00a5\u00a6\7?\2\2\u00a6\6\3\2\2\2\u00a7\u00a8") + buf.write("\7@\2\2\u00a8\u00a9\7?\2\2\u00a9\b\3\2\2\2\u00aa\u00ab") + buf.write("\7>\2\2\u00ab\u00ac\7?\2\2\u00ac\n\3\2\2\2\u00ad\u00ae") + buf.write("\7#\2\2\u00ae\u00af\7?\2\2\u00af\f\3\2\2\2\u00b0\u00b1") + buf.write("\7>\2\2\u00b1\u00b2\7@\2\2\u00b2\16\3\2\2\2\u00b3\u00b4") + buf.write("\7~\2\2\u00b4\u00b5\7~\2\2\u00b5\20\3\2\2\2\u00b6\u00b7") + buf.write("\7>\2\2\u00b7\u00b8\7>\2\2\u00b8\22\3\2\2\2\u00b9\u00ba") + buf.write("\7@\2\2\u00ba\u00bb\7@\2\2\u00bb\24\3\2\2\2\u00bc\u00bd") + buf.write("\7(\2\2\u00bd\26\3\2\2\2\u00be\u00bf\7?\2\2\u00bf\30\3") + buf.write("\2\2\2\u00c0\u00c1\7+\2\2\u00c1\32\3\2\2\2\u00c2\u00c3") + buf.write("\7<\2\2\u00c3\34\3\2\2\2\u00c4\u00c5\7.\2\2\u00c5\36\3") + buf.write("\2\2\2\u00c6\u00c7\7\60\2\2\u00c7 \3\2\2\2\u00c8\u00c9") + buf.write("\7\61\2\2\u00c9\"\3\2\2\2\u00ca\u00cb\7@\2\2\u00cb$\3") + buf.write("\2\2\2\u00cc\u00cd\7>\2\2\u00cd&\3\2\2\2\u00ce\u00cf\7") + buf.write("/\2\2\u00cf(\3\2\2\2\u00d0\u00d1\7\'\2\2\u00d1*\3\2\2") + buf.write("\2\u00d2\u00d3\7*\2\2\u00d3,\3\2\2\2\u00d4\u00d5\7~\2") + buf.write("\2\u00d5.\3\2\2\2\u00d6\u00d7\7-\2\2\u00d7\60\3\2\2\2") + buf.write("\u00d8\u00d9\7A\2\2\u00d9\62\3\2\2\2\u00da\u00db\7=\2") + buf.write("\2\u00db\64\3\2\2\2\u00dc\u00dd\7,\2\2\u00dd\66\3\2\2") + buf.write("\2\u00de\u00df\7\u0080\2\2\u00df8\3\2\2\2\u00e0\u00e1") + buf.write("\7a\2\2\u00e1:\3\2\2\2\u00e2\u00e3\5m\67\2\u00e3\u00e4") + buf.write("\5\u0087D\2\u00e4\u00e5\5s:\2\u00e5<\3\2\2\2\u00e6\u00e7") + buf.write("\5o8\2\u00e7\u00e8\5u;\2\u00e8\u00e9\5\u0093J\2\u00e9") + buf.write("\u00ea\5\u0099M\2\u00ea\u00eb\5u;\2\u00eb\u00ec\5u;\2") + buf.write("\u00ec\u00ed\5\u0087D\2\u00ed>\3\2\2\2\u00ee\u00ef\5w") + buf.write("<\2\u00ef\u00f0\5m\67\2\u00f0\u00f1\5\u0083B\2\u00f1\u00f2") + buf.write("\5\u0091I\2\u00f2\u00f3\5u;\2\u00f3@\3\2\2\2\u00f4\u00f5") + buf.write("\5}?\2\u00f5\u00f6\5\u0083B\2\u00f6\u00f7\5}?\2\u00f7") + buf.write("\u00f8\5\u0081A\2\u00f8\u00f9\5u;\2\u00f9B\3\2\2\2\u00fa") + buf.write("\u00fb\5}?\2\u00fb\u00fc\5\u0087D\2\u00fcD\3\2\2\2\u00fd") + buf.write("\u00fe\5}?\2\u00fe\u00ff\5\u0091I\2\u00ffF\3\2\2\2\u0100") + buf.write("\u0101\5}?\2\u0101\u0102\5\u0091I\2\u0102\u0103\5\u0087") + buf.write("D\2\u0103\u0104\5\u0095K\2\u0104\u0105\5\u0083B\2\u0105") + buf.write("\u0106\5\u0083B\2\u0106H\3\2\2\2\u0107\u0108\5\u0083B") + buf.write("\2\u0108\u0109\5}?\2\u0109\u010a\5\u0081A\2\u010a\u010b") + buf.write("\5u;\2\u010bJ\3\2\2\2\u010c\u010d\5\u0087D\2\u010d\u010e") + buf.write("\5\u0089E\2\u010e\u010f\5\u0093J\2\u010fL\3\2\2\2\u0110") + buf.write("\u0111\5\u0087D\2\u0111\u0112\5\u0089E\2\u0112\u0113\5") + buf.write("\u0093J\2\u0113\u0114\5\u0087D\2\u0114\u0115\5\u0095K") + buf.write("\2\u0115\u0116\5\u0083B\2\u0116\u0117\5\u0083B\2\u0117") + buf.write("N\3\2\2\2\u0118\u0119\5\u0087D\2\u0119\u011a\5\u0095K") + buf.write("\2\u011a\u011b\5\u0083B\2\u011b\u011c\5\u0083B\2\u011c") + buf.write("P\3\2\2\2\u011d\u011e\5\u0089E\2\u011e\u011f\5\u008fH") + buf.write("\2\u011fR\3\2\2\2\u0120\u0121\5\u0093J\2\u0121\u0122\5") + buf.write("\u008fH\2\u0122\u0123\5\u0095K\2\u0123\u0124\5u;\2\u0124") + buf.write("T\3\2\2\2\u0125\u0127\5k\66\2\u0126\u0125\3\2\2\2\u0127") + buf.write("\u0128\3\2\2\2\u0128\u0126\3\2\2\2\u0128\u0129\3\2\2\2") + buf.write("\u0129\u0131\3\2\2\2\u012a\u012e\7\60\2\2\u012b\u012d") + buf.write("\5k\66\2\u012c\u012b\3\2\2\2\u012d\u0130\3\2\2\2\u012e") + buf.write("\u012c\3\2\2\2\u012e\u012f\3\2\2\2\u012f\u0132\3\2\2\2") + buf.write("\u0130\u012e\3\2\2\2\u0131\u012a\3\2\2\2\u0131\u0132\3") + buf.write("\2\2\2\u0132\u013c\3\2\2\2\u0133\u0135\5u;\2\u0134\u0136") + buf.write("\t\2\2\2\u0135\u0134\3\2\2\2\u0135\u0136\3\2\2\2\u0136") + buf.write("\u0138\3\2\2\2\u0137\u0139\5k\66\2\u0138\u0137\3\2\2\2") + buf.write("\u0139\u013a\3\2\2\2\u013a\u0138\3\2\2\2\u013a\u013b\3") + buf.write("\2\2\2\u013b\u013d\3\2\2\2\u013c\u0133\3\2\2\2\u013c\u013d") + buf.write("\3\2\2\2\u013d\u0150\3\2\2\2\u013e\u0140\7\60\2\2\u013f") + buf.write("\u0141\5k\66\2\u0140\u013f\3\2\2\2\u0141\u0142\3\2\2\2") + buf.write("\u0142\u0140\3\2\2\2\u0142\u0143\3\2\2\2\u0143\u014d\3") + buf.write("\2\2\2\u0144\u0146\5u;\2\u0145\u0147\t\2\2\2\u0146\u0145") + buf.write("\3\2\2\2\u0146\u0147\3\2\2\2\u0147\u0149\3\2\2\2\u0148") + buf.write("\u014a\5k\66\2\u0149\u0148\3\2\2\2\u014a\u014b\3\2\2\2") + buf.write("\u014b\u0149\3\2\2\2\u014b\u014c\3\2\2\2\u014c\u014e\3") + buf.write("\2\2\2\u014d\u0144\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u0150") + buf.write("\3\2\2\2\u014f\u0126\3\2\2\2\u014f\u013e\3\2\2\2\u0150") + buf.write("V\3\2\2\2\u0151\u0152\5Y-\2\u0152X\3\2\2\2\u0153\u0159") + buf.write("\7$\2\2\u0154\u0155\7^\2\2\u0155\u0158\7$\2\2\u0156\u0158") + buf.write("\n\3\2\2\u0157\u0154\3\2\2\2\u0157\u0156\3\2\2\2\u0158") + buf.write("\u015b\3\2\2\2\u0159\u0157\3\2\2\2\u0159\u015a\3\2\2\2") + buf.write("\u015a\u015c\3\2\2\2\u015b\u0159\3\2\2\2\u015c\u015d\7") + buf.write("$\2\2\u015dZ\3\2\2\2\u015e\u0164\7$\2\2\u015f\u0160\7") + buf.write("$\2\2\u0160\u0163\7$\2\2\u0161\u0163\n\3\2\2\u0162\u015f") + buf.write("\3\2\2\2\u0162\u0161\3\2\2\2\u0163\u0166\3\2\2\2\u0164") + buf.write("\u0162\3\2\2\2\u0164\u0165\3\2\2\2\u0165\u0167\3\2\2\2") + buf.write("\u0166\u0164\3\2\2\2\u0167\u0168\7$\2\2\u0168\\\3\2\2") + buf.write("\2\u0169\u016a\5_\60\2\u016a^\3\2\2\2\u016b\u0171\7)\2") + buf.write("\2\u016c\u016d\7^\2\2\u016d\u0170\7)\2\2\u016e\u0170\n") + buf.write("\4\2\2\u016f\u016c\3\2\2\2\u016f\u016e\3\2\2\2\u0170\u0173") + buf.write("\3\2\2\2\u0171\u016f\3\2\2\2\u0171\u0172\3\2\2\2\u0172") + buf.write("\u0174\3\2\2\2\u0173\u0171\3\2\2\2\u0174\u0175\7)\2\2") + buf.write("\u0175`\3\2\2\2\u0176\u017c\7)\2\2\u0177\u0178\7)\2\2") + buf.write("\u0178\u017b\7)\2\2\u0179\u017b\n\4\2\2\u017a\u0177\3") + buf.write("\2\2\2\u017a\u0179\3\2\2\2\u017b\u017e\3\2\2\2\u017c\u017a") + buf.write("\3\2\2\2\u017c\u017d\3\2\2\2\u017d\u017f\3\2\2\2\u017e") + buf.write("\u017c\3\2\2\2\u017f\u0180\7)\2\2\u0180b\3\2\2\2\u0181") + buf.write("\u0182\7/\2\2\u0182\u0187\7/\2\2\u0183\u0184\7\61\2\2") + buf.write("\u0184\u0187\7\61\2\2\u0185\u0187\7%\2\2\u0186\u0181\3") + buf.write("\2\2\2\u0186\u0183\3\2\2\2\u0186\u0185\3\2\2\2\u0187\u018b") + buf.write("\3\2\2\2\u0188\u018a\n\5\2\2\u0189\u0188\3\2\2\2\u018a") + buf.write("\u018d\3\2\2\2\u018b\u0189\3\2\2\2\u018b\u018c\3\2\2\2") + buf.write("\u018c\u018e\3\2\2\2\u018d\u018b\3\2\2\2\u018e\u018f\b") + buf.write("\62\2\2\u018fd\3\2\2\2\u0190\u0191\7\61\2\2\u0191\u0192") + buf.write("\7,\2\2\u0192\u0196\3\2\2\2\u0193\u0195\13\2\2\2\u0194") + buf.write("\u0193\3\2\2\2\u0195\u0198\3\2\2\2\u0196\u0197\3\2\2\2") + buf.write("\u0196\u0194\3\2\2\2\u0197\u019c\3\2\2\2\u0198\u0196\3") + buf.write("\2\2\2\u0199\u019a\7,\2\2\u019a\u019d\7\61\2\2\u019b\u019d") + buf.write("\7\2\2\3\u019c\u0199\3\2\2\2\u019c\u019b\3\2\2\2\u019d") + buf.write("\u019e\3\2\2\2\u019e\u019f\b\63\2\2\u019ff\3\2\2\2\u01a0") + buf.write("\u01a1\t\6\2\2\u01a1\u01a2\3\2\2\2\u01a2\u01a3\b\64\2") + buf.write("\2\u01a3h\3\2\2\2\u01a4\u01a8\t\7\2\2\u01a5\u01a7\t\b") + buf.write("\2\2\u01a6\u01a5\3\2\2\2\u01a7\u01aa\3\2\2\2\u01a8\u01a6") + buf.write("\3\2\2\2\u01a8\u01a9\3\2\2\2\u01a9j\3\2\2\2\u01aa\u01a8") + buf.write("\3\2\2\2\u01ab\u01ac\t\t\2\2\u01acl\3\2\2\2\u01ad\u01ae") + buf.write("\t\n\2\2\u01aen\3\2\2\2\u01af\u01b0\t\13\2\2\u01b0p\3") + buf.write("\2\2\2\u01b1\u01b2\t\f\2\2\u01b2r\3\2\2\2\u01b3\u01b4") + buf.write("\t\r\2\2\u01b4t\3\2\2\2\u01b5\u01b6\t\16\2\2\u01b6v\3") + buf.write("\2\2\2\u01b7\u01b8\t\17\2\2\u01b8x\3\2\2\2\u01b9\u01ba") + buf.write("\t\20\2\2\u01baz\3\2\2\2\u01bb\u01bc\t\21\2\2\u01bc|\3") + buf.write("\2\2\2\u01bd\u01be\t\22\2\2\u01be~\3\2\2\2\u01bf\u01c0") + buf.write("\t\23\2\2\u01c0\u0080\3\2\2\2\u01c1\u01c2\t\24\2\2\u01c2") + buf.write("\u0082\3\2\2\2\u01c3\u01c4\t\25\2\2\u01c4\u0084\3\2\2") + buf.write("\2\u01c5\u01c6\t\26\2\2\u01c6\u0086\3\2\2\2\u01c7\u01c8") + buf.write("\t\27\2\2\u01c8\u0088\3\2\2\2\u01c9\u01ca\t\30\2\2\u01ca") + buf.write("\u008a\3\2\2\2\u01cb\u01cc\t\31\2\2\u01cc\u008c\3\2\2") + buf.write("\2\u01cd\u01ce\t\32\2\2\u01ce\u008e\3\2\2\2\u01cf\u01d0") + buf.write("\t\33\2\2\u01d0\u0090\3\2\2\2\u01d1\u01d2\t\34\2\2\u01d2") + buf.write("\u0092\3\2\2\2\u01d3\u01d4\t\35\2\2\u01d4\u0094\3\2\2") + buf.write("\2\u01d5\u01d6\t\36\2\2\u01d6\u0096\3\2\2\2\u01d7\u01d8") + buf.write("\t\37\2\2\u01d8\u0098\3\2\2\2\u01d9\u01da\t \2\2\u01da") + buf.write("\u009a\3\2\2\2\u01db\u01dc\t!\2\2\u01dc\u009c\3\2\2\2") + buf.write("\u01dd\u01de\t\"\2\2\u01de\u009e\3\2\2\2\u01df\u01e0\t") + buf.write("#\2\2\u01e0\u00a0\3\2\2\2\33\2\u0128\u012e\u0131\u0135") + buf.write("\u013a\u013c\u0142\u0146\u014b\u014d\u014f\u0157\u0159") + buf.write("\u0162\u0164\u016f\u0171\u017a\u017c\u0186\u018b\u0196") + buf.write("\u019c\u01a8\3\2\3\2") return buf.getvalue() @@ -258,26 +261,27 @@ class PqlLexer(Lexer): K_AND = 29 K_BETWEEN = 30 K_FALSE = 31 - K_IN = 32 - K_IS = 33 - K_ISNULL = 34 - K_LIKE = 35 - K_NOT = 36 - K_NOTNULL = 37 - K_NULL = 38 - K_OR = 39 - K_TRUE = 40 - NUMERIC_LITERAL = 41 - DOUBLE_QUOTED_STRING = 42 - DOUBLE_QUOTED_STRING_TEL = 43 - DOUBLE_QUOTED_STRING_SQL = 44 - SINGLE_QUOTED_STRING = 45 - SINGLE_QUOTED_STRING_TEL = 46 - SINGLE_QUOTED_STRING_SQL = 47 - SINGLE_LINE_COMMENT = 48 - MULTILINE_COMMENT = 49 - SPACES = 50 - WORD = 51 + K_ILIKE = 32 + K_IN = 33 + K_IS = 34 + K_ISNULL = 35 + K_LIKE = 36 + K_NOT = 37 + K_NOTNULL = 38 + K_NULL = 39 + K_OR = 40 + K_TRUE = 41 + NUMERIC_LITERAL = 42 + DOUBLE_QUOTED_STRING = 43 + DOUBLE_QUOTED_STRING_TEL = 44 + DOUBLE_QUOTED_STRING_SQL = 45 + SINGLE_QUOTED_STRING = 46 + SINGLE_QUOTED_STRING_TEL = 47 + SINGLE_QUOTED_STRING_SQL = 48 + SINGLE_LINE_COMMENT = 49 + MULTILINE_COMMENT = 50 + SPACES = 51 + WORD = 52 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] @@ -294,9 +298,9 @@ class PqlLexer(Lexer): "SHIFT_RIGHT", "AMP", "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", - "K_AND", "K_BETWEEN", "K_FALSE", "K_IN", "K_IS", "K_ISNULL", - "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", - "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", + "K_AND", "K_BETWEEN", "K_FALSE", "K_ILIKE", "K_IN", "K_IS", + "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", + "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "WORD" ] @@ -306,8 +310,8 @@ class PqlLexer(Lexer): "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", "UNDER", "K_AND", "K_BETWEEN", - "K_FALSE", "K_IN", "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", - "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", + "K_FALSE", "K_ILIKE", "K_IN", "K_IS", "K_ISNULL", "K_LIKE", + "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", "SINGLE_QUOTED_STRING_SQL", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", diff --git a/python/src/pql_grammar/antlr/PqlParser.py b/python/src/pql_grammar/antlr/PqlParser.py index f6d6e66..77f86d8 100644 --- a/python/src/pql_grammar/antlr/PqlParser.py +++ b/python/src/pql_grammar/antlr/PqlParser.py @@ -11,7 +11,7 @@ def serializedATN(): with StringIO() as buf: - buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\65") + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\66") buf.write("\u0084\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3") buf.write("\3\3\3\3\3\3\3\3\3\3\3\5\3\"\n\3\3\3\3\3\3\3\3\3\3\3\3") @@ -23,40 +23,40 @@ def serializedATN(): buf.write("\3\7\5\7i\n\7\3\7\3\7\3\b\5\bn\n\b\3\b\3\b\3\b\5\bs\n") buf.write("\b\3\b\3\b\3\b\5\bx\n\b\3\t\3\t\3\t\7\t}\n\t\f\t\16\t") buf.write("\u0080\13\t\3\n\3\n\3\n\2\3\4\13\2\4\6\b\n\f\16\20\22") - buf.write("\2\n\5\2\25\25\31\31&&\5\2\22\22\26\26\34\34\4\2\25\25") - buf.write("\31\31\4\2\5\6\23\24\6\2\4\4\7\b\r\r##\4\2\3\3\37\37\4") - buf.write("\2\t\t))\6\2!!((*,//\2\u0092\2\24\3\2\2\2\4!\3\2\2\2\6") - buf.write("O\3\2\2\2\bW\3\2\2\2\n^\3\2\2\2\fh\3\2\2\2\16m\3\2\2\2") - buf.write("\20y\3\2\2\2\22\u0081\3\2\2\2\24\25\5\4\3\2\25\26\7\2") - buf.write("\2\3\26\3\3\2\2\2\27\30\b\3\1\2\30\31\t\2\2\2\31\"\5\4") - buf.write("\3\20\32\33\7\27\2\2\33\34\5\4\3\2\34\35\7\16\2\2\35\"") - buf.write("\3\2\2\2\36\"\5\22\n\2\37\"\5\b\5\2 \"\5\16\b\2!\27\3") - buf.write("\2\2\2!\32\3\2\2\2!\36\3\2\2\2!\37\3\2\2\2! \3\2\2\2\"") - buf.write("L\3\2\2\2#$\f\17\2\2$%\t\3\2\2%K\5\4\3\20&\'\f\16\2\2") - buf.write("\'(\t\4\2\2(K\5\4\3\17)*\f\r\2\2*+\t\5\2\2+K\5\4\3\16") - buf.write(",-\f\f\2\2-.\t\6\2\2.K\5\4\3\r/\61\f\13\2\2\60\62\7&\2") - buf.write("\2\61\60\3\2\2\2\61\62\3\2\2\2\62\63\3\2\2\2\63\64\7%") - buf.write("\2\2\64K\5\4\3\f\65\66\f\t\2\2\66\67\t\7\2\2\67K\5\4\3") - buf.write("\n89\f\b\2\29:\t\b\2\2:K\5\4\3\t;=\f\7\2\2<>\7&\2\2=<") - buf.write("\3\2\2\2=>\3\2\2\2>?\3\2\2\2?@\7 \2\2@K\5\4\3\bAC\f\n") - buf.write("\2\2BD\7&\2\2CB\3\2\2\2CD\3\2\2\2DE\3\2\2\2EF\7\"\2\2") - buf.write("FG\7\27\2\2GH\5\6\4\2HI\7\16\2\2IK\3\2\2\2J#\3\2\2\2J") - buf.write("&\3\2\2\2J)\3\2\2\2J,\3\2\2\2J/\3\2\2\2J\65\3\2\2\2J8") - buf.write("\3\2\2\2J;\3\2\2\2JA\3\2\2\2KN\3\2\2\2LJ\3\2\2\2LM\3\2") - buf.write("\2\2M\5\3\2\2\2NL\3\2\2\2OT\5\4\3\2PQ\7\20\2\2QS\5\4\3") - buf.write("\2RP\3\2\2\2SV\3\2\2\2TR\3\2\2\2TU\3\2\2\2U\7\3\2\2\2") - buf.write("VT\3\2\2\2WX\5\20\t\2XZ\7\27\2\2Y[\5\n\6\2ZY\3\2\2\2Z") - buf.write("[\3\2\2\2[\\\3\2\2\2\\]\7\16\2\2]\t\3\2\2\2^c\5\f\7\2") - buf.write("_`\7\20\2\2`b\5\f\7\2a_\3\2\2\2be\3\2\2\2ca\3\2\2\2cd") - buf.write("\3\2\2\2d\13\3\2\2\2ec\3\2\2\2fg\7\65\2\2gi\7\r\2\2hf") - buf.write("\3\2\2\2hi\3\2\2\2ij\3\2\2\2jk\5\4\3\2k\r\3\2\2\2ln\7") - buf.write("\32\2\2ml\3\2\2\2mn\3\2\2\2nr\3\2\2\2op\5\20\t\2pq\7\30") - buf.write("\2\2qs\3\2\2\2ro\3\2\2\2rs\3\2\2\2st\3\2\2\2tw\5\20\t") - buf.write("\2uv\7\17\2\2vx\5\20\t\2wu\3\2\2\2wx\3\2\2\2x\17\3\2\2") - buf.write("\2y~\7\65\2\2z{\7\21\2\2{}\7\65\2\2|z\3\2\2\2}\u0080\3") - buf.write("\2\2\2~|\3\2\2\2~\177\3\2\2\2\177\21\3\2\2\2\u0080~\3") - buf.write("\2\2\2\u0081\u0082\t\t\2\2\u0082\23\3\2\2\2\20!\61=CJ") - buf.write("LTZchmrw~") + buf.write("\2\13\5\2\25\25\31\31\'\'\5\2\22\22\26\26\34\34\4\2\25") + buf.write("\25\31\31\4\2\5\6\23\24\6\2\4\4\7\b\r\r$$\4\2\"\"&&\4") + buf.write("\2\3\3\37\37\4\2\t\t**\6\2!!))+-\60\60\2\u0092\2\24\3") + buf.write("\2\2\2\4!\3\2\2\2\6O\3\2\2\2\bW\3\2\2\2\n^\3\2\2\2\fh") + buf.write("\3\2\2\2\16m\3\2\2\2\20y\3\2\2\2\22\u0081\3\2\2\2\24\25") + buf.write("\5\4\3\2\25\26\7\2\2\3\26\3\3\2\2\2\27\30\b\3\1\2\30\31") + buf.write("\t\2\2\2\31\"\5\4\3\20\32\33\7\27\2\2\33\34\5\4\3\2\34") + buf.write("\35\7\16\2\2\35\"\3\2\2\2\36\"\5\22\n\2\37\"\5\b\5\2 ") + buf.write("\"\5\16\b\2!\27\3\2\2\2!\32\3\2\2\2!\36\3\2\2\2!\37\3") + buf.write("\2\2\2! \3\2\2\2\"L\3\2\2\2#$\f\17\2\2$%\t\3\2\2%K\5\4") + buf.write("\3\20&\'\f\16\2\2\'(\t\4\2\2(K\5\4\3\17)*\f\r\2\2*+\t") + buf.write("\5\2\2+K\5\4\3\16,-\f\f\2\2-.\t\6\2\2.K\5\4\3\r/\61\f") + buf.write("\13\2\2\60\62\7\'\2\2\61\60\3\2\2\2\61\62\3\2\2\2\62\63") + buf.write("\3\2\2\2\63\64\t\7\2\2\64K\5\4\3\f\65\66\f\t\2\2\66\67") + buf.write("\t\b\2\2\67K\5\4\3\n89\f\b\2\29:\t\t\2\2:K\5\4\3\t;=\f") + buf.write("\7\2\2<>\7\'\2\2=<\3\2\2\2=>\3\2\2\2>?\3\2\2\2?@\7 \2") + buf.write("\2@K\5\4\3\bAC\f\n\2\2BD\7\'\2\2CB\3\2\2\2CD\3\2\2\2D") + buf.write("E\3\2\2\2EF\7#\2\2FG\7\27\2\2GH\5\6\4\2HI\7\16\2\2IK\3") + buf.write("\2\2\2J#\3\2\2\2J&\3\2\2\2J)\3\2\2\2J,\3\2\2\2J/\3\2\2") + buf.write("\2J\65\3\2\2\2J8\3\2\2\2J;\3\2\2\2JA\3\2\2\2KN\3\2\2\2") + buf.write("LJ\3\2\2\2LM\3\2\2\2M\5\3\2\2\2NL\3\2\2\2OT\5\4\3\2PQ") + buf.write("\7\20\2\2QS\5\4\3\2RP\3\2\2\2SV\3\2\2\2TR\3\2\2\2TU\3") + buf.write("\2\2\2U\7\3\2\2\2VT\3\2\2\2WX\5\20\t\2XZ\7\27\2\2Y[\5") + buf.write("\n\6\2ZY\3\2\2\2Z[\3\2\2\2[\\\3\2\2\2\\]\7\16\2\2]\t\3") + buf.write("\2\2\2^c\5\f\7\2_`\7\20\2\2`b\5\f\7\2a_\3\2\2\2be\3\2") + buf.write("\2\2ca\3\2\2\2cd\3\2\2\2d\13\3\2\2\2ec\3\2\2\2fg\7\66") + buf.write("\2\2gi\7\r\2\2hf\3\2\2\2hi\3\2\2\2ij\3\2\2\2jk\5\4\3\2") + buf.write("k\r\3\2\2\2ln\7\32\2\2ml\3\2\2\2mn\3\2\2\2nr\3\2\2\2o") + buf.write("p\5\20\t\2pq\7\30\2\2qs\3\2\2\2ro\3\2\2\2rs\3\2\2\2st") + buf.write("\3\2\2\2tw\5\20\t\2uv\7\17\2\2vx\5\20\t\2wu\3\2\2\2wx") + buf.write("\3\2\2\2x\17\3\2\2\2y~\7\66\2\2z{\7\21\2\2{}\7\66\2\2") + buf.write("|z\3\2\2\2}\u0080\3\2\2\2~|\3\2\2\2~\177\3\2\2\2\177\21") + buf.write("\3\2\2\2\u0080~\3\2\2\2\u0081\u0082\t\n\2\2\u0082\23\3") + buf.write("\2\2\2\20!\61=CJLTZchmrw~") return buf.getvalue() @@ -80,8 +80,8 @@ class PqlParser ( Parser ): "ASSIGN", "CLOSE_PAREN", "COLON", "COMMA", "DOT", "FORWARD_SLASH", "GT", "LT", "MINUS", "MOD", "OPEN_PAREN", "PIPE", "PLUS", "QUESTION_MARK", "SCOL", "STAR", "TILDE", - "UNDER", "K_AND", "K_BETWEEN", "K_FALSE", "K_IN", - "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", + "UNDER", "K_AND", "K_BETWEEN", "K_FALSE", "K_ILIKE", + "K_IN", "K_IS", "K_ISNULL", "K_LIKE", "K_NOT", "K_NOTNULL", "K_NULL", "K_OR", "K_TRUE", "NUMERIC_LITERAL", "DOUBLE_QUOTED_STRING", "DOUBLE_QUOTED_STRING_TEL", "DOUBLE_QUOTED_STRING_SQL", "SINGLE_QUOTED_STRING", "SINGLE_QUOTED_STRING_TEL", @@ -133,26 +133,27 @@ class PqlParser ( Parser ): K_AND=29 K_BETWEEN=30 K_FALSE=31 - K_IN=32 - K_IS=33 - K_ISNULL=34 - K_LIKE=35 - K_NOT=36 - K_NOTNULL=37 - K_NULL=38 - K_OR=39 - K_TRUE=40 - NUMERIC_LITERAL=41 - DOUBLE_QUOTED_STRING=42 - DOUBLE_QUOTED_STRING_TEL=43 - DOUBLE_QUOTED_STRING_SQL=44 - SINGLE_QUOTED_STRING=45 - SINGLE_QUOTED_STRING_TEL=46 - SINGLE_QUOTED_STRING_SQL=47 - SINGLE_LINE_COMMENT=48 - MULTILINE_COMMENT=49 - SPACES=50 - WORD=51 + K_ILIKE=32 + K_IN=33 + K_IS=34 + K_ISNULL=35 + K_LIKE=36 + K_NOT=37 + K_NOTNULL=38 + K_NULL=39 + K_OR=40 + K_TRUE=41 + NUMERIC_LITERAL=42 + DOUBLE_QUOTED_STRING=43 + DOUBLE_QUOTED_STRING_TEL=44 + DOUBLE_QUOTED_STRING_SQL=45 + SINGLE_QUOTED_STRING=46 + SINGLE_QUOTED_STRING_TEL=47 + SINGLE_QUOTED_STRING_SQL=48 + SINGLE_LINE_COMMENT=49 + MULTILINE_COMMENT=50 + SPACES=51 + WORD=52 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -301,6 +302,9 @@ def K_IS(self): def K_LIKE(self): return self.getToken(PqlParser.K_LIKE, 0) + def K_ILIKE(self): + return self.getToken(PqlParser.K_ILIKE, 0) + def K_AND(self): return self.getToken(PqlParser.K_AND, 0) @@ -502,7 +506,13 @@ def expr(self, _p:int=0): self.state = 49 - localctx.operator = self.match(PqlParser.K_LIKE) + localctx.operator = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==PqlParser.K_ILIKE or _la==PqlParser.K_LIKE): + localctx.operator = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() self.state = 50 localctx.right = self.expr(10) pass diff --git a/python/tests/tel_grammar_test.py b/python/tests/tel_grammar_test.py index d5cbc9b..478dd2b 100644 --- a/python/tests/tel_grammar_test.py +++ b/python/tests/tel_grammar_test.py @@ -59,6 +59,8 @@ ('slug IS NOT NULL',), ('slug is NULL',), ('slug is NOT NULL',), + ("a like 'asdf'",), + ("a ilike 'asdf'",), ], ) def test_grammar(test_case): @@ -77,7 +79,6 @@ def test_grammar(test_case): ('ds|sl|ug - sluging',), # Handle nested functions ('slug is',), - ('not',), ('',), ('a BETWEEN e',), ('a BETWEEN f OR x',), From 6e73986f6d0665acea80b4cfb2888b8c321bb689 Mon Sep 17 00:00:00 2001 From: Daniel Dotsenko Date: Tue, 8 Dec 2020 02:05:40 -0800 Subject: [PATCH 32/32] move ANTLR visitor responsible for TEL-to-AST extraction to seprate file Reduces noise in the file, allows better reuse of visitor --- .../pql_grammar/antlr_tel_to_ast_visitor.py | 384 ++++++++++++++++ python/src/pql_grammar/from_pql.py | 414 +----------------- python/tests/ast_pql_test.py | 3 +- 3 files changed, 402 insertions(+), 399 deletions(-) create mode 100644 python/src/pql_grammar/antlr_tel_to_ast_visitor.py diff --git a/python/src/pql_grammar/antlr_tel_to_ast_visitor.py b/python/src/pql_grammar/antlr_tel_to_ast_visitor.py new file mode 100644 index 0000000..be725ae --- /dev/null +++ b/python/src/pql_grammar/antlr_tel_to_ast_visitor.py @@ -0,0 +1,384 @@ +# fmt: off + +from antlr4 import ( + CommonTokenStream, + InputStream, + ParserRuleContext, + RecognitionException, + Recognizer, + Token, +) +from antlr4.error.ErrorListener import ErrorListener +from decimal import Decimal +from typing import Optional + +from .antlr.PqlParser import PqlParser +from .antlr.PqlParserVisitor import PqlParserVisitor as _PqlParserVisitor + +from . import model as ast + + +class ParseError(ValueError): + pass + + +class TelErrorListener(ErrorListener): + # TODO: Contemplate DiagnosticErrorListener as base class for richer error reporting + + def syntaxError( + self, + recognizer: Recognizer, + offending_symbol: Token, + line: int, + column: int, + msg: str, + e: RecognitionException + ): + # See chapter 9.2 "Altering and Redirecting ANTLR Error Messages" + # http://books.killf.info/%E7%BC%96%E8%AF%91%E5%8E%9F%E7%90%86/The%20Definitive%20ANTLR4%20Reference.pdf + + tokens = recognizer.getInputStream() + input = full_text(tokens.tokenSource.inputStream) + # when input == '' splitlines makes it [] - empty. Need at last one line. + lines = input.splitlines() or [''] + error_line = lines[line - 1] + start = offending_symbol.start + stop = offending_symbol.stop + + base_msg = f'Unexpected "{full_text(offending_symbol)}"' if offending_symbol else msg + base_msg = base_msg.replace('', '') + + if len(lines) > 1: + line_msg = f'line {line}, ' + else: + line_msg = '' + + # "unexpected end of line" errors have index reversed + # stop is smaller than start. + if start < stop: + pos_msg = f'positions {start+1} to {stop+1}' + else: + pos_msg = f'position {start+1}' + + + if len(error_line) <= start + 1: + error_line_focus = error_line + else: + error_line_focus = ( + error_line[:start] + + '-->' + + error_line[start:stop+1] + + '<--' + + error_line[stop+1:] + ) + msg = f'{base_msg} ({line_msg}{pos_msg}) in fragment "{error_line_focus}"' + raise ParseError(msg) + + +def full_text(ctx: ParserRuleContext) -> str: + # extracts full text from a tree of nodes, + # including white space. + if ctx: + if isinstance(ctx, ParserRuleContext): + try: + start = ctx.start.start + except AttributeError: + start = None + try: + stop = ctx.stop.stop + except AttributeError: + stop = None + + if not(start is None) and stop is None: + stop = start + + if start is None: + return str(ctx) + + return ctx.start.getInputStream().getText(start, stop) + else: + try: + # some primitive context object + return ctx.text + except AttributeError: + # Terminal Node of some sort + return str(ctx) + else: + return None + + + +def unquote(s: str): + # Quoted schema, table, column names come in Postgres style - double-quotes + # in-string double-quotes are escaped by doubling the double-quotes ANSI SQL style. + # https://docs.oracle.com/goldengate/1212/gg-winux/GWURF/gg_parameters183.htm#GWURF728 + # Example: + # '"table name ""with quoted portion"""' becomes 'table name "with quoted portion"' + if not s: + return s + wrapper = (s[0], s[-1]) + if wrapper == ('"', '"') or wrapper == ("'", "'"): + s = s[1:-1] + + # TODO: decide which one we want to support + # TEL style escapes + return s.replace('\\"', '"').replace("\\'", "'") + # # SQL style escapes + # return s.replace('""', '"').replace("''", "'") + + +class TelVisitor(_PqlParserVisitor): + + def visitParseTel(self, ctx:PqlParser.ParseTelContext): + return self.visitExpr(ctx.expr()) + + def _parse_literal(self, ctx: PqlParser.LiteralValueContext): + is_number = bool(ctx.NUMERIC_LITERAL()) + is_string = bool(ctx.DOUBLE_QUOTED_STRING()) or bool(ctx.SINGLE_QUOTED_STRING()) + is_null = bool(ctx.K_NULL()) + is_bool = bool(ctx.K_TRUE()) or bool(ctx.K_FALSE()) + + if is_null: + return None + + if is_bool: + return bool(ctx.K_TRUE()) + + try: + v = full_text(ctx) + except IndexError: + raise ParseError(f"Could not extract literal value node from '{ctx.getText()}'.") + + if is_number: + # TODO: contemplate decimal type instead + try: + return int(v) + except ValueError: + try: + return float(v) + except Exception: + raise ParseError(f"Could not convert SQL number {v} to native number representation.") + + if is_string: + return unquote(v) + + return v + + def visitLiteralValue(self, ctx:PqlParser.LiteralValueContext): + return ast.Literal( + self._parse_literal(ctx), + full_text(ctx) + ) + + def visitTaxon(self, ctx:PqlParser.TaxonContext): + return ast.Taxon( + full_text(ctx.slug), + full_text(ctx.namespace), + bool(ctx.is_optional), + full_text(ctx.tag) + ) + + def visitFn(self, ctx:PqlParser.FnContext): + return ast.Function( + full_text(ctx.function_name), + tuple([ + # argument_name may be undefined, returning Null for name. + # that's fine. + # Null for name value means it's not named, but positional argument. + # positional args are stored as (None, arg_value) tuples. + (full_text(fn_arg.argument_name), self.visitExpr(fn_arg.argument_value)) + for fn_arg in ctx.arguments.fnArg() + ]) if ctx.arguments else None + ) + + def visitExpr(self, ctx:PqlParser.ExprContext): + # unpack parens + if ctx.inner: + return self.visitExpr(ctx.inner) + + v = ctx.literalValue() + if v: + return self.visitLiteralValue(v) + + v = ctx.unary_operator + if v: + operator = full_text(v).upper() + + # expr + right = self.visitExpr(ctx.right) + + # some unary operators have no meaning + # and packing them into AST just creates noise for consuming + if operator == '+': + # skip the BS. ignore the plus + # We can do this because we don't support `++a` expressions + return right + + if ( + operator == '-' and + isinstance(right, ast.Literal) and + isinstance(right.value, (int, float, Decimal)) + ): + # right.value will always be positive digit. + # Our syntax parser guarantees that. + return ast.Literal( + right.value * -1, + full_text(ctx) # unary minus with underlying literal value as one string + ) + + if ( + operator == 'NOT' and + isinstance(right, ast.Literal) + ): + # unlikely to ever happen, but still + _v = not right.value + return ast.Literal( + _v, + 'true' if _v else 'false' + ) + + # else: + # # cannot avoid packaging unary "-" separate. + # # it's in front of a non-literal expression that need to be negated manually later + # TODO: contemplate converting this from unary `-expr` into regular `-1 * expr` + # to escape Unary minus completely. + + # We dealt with '+'. We half-dealt with '-' + # What's left is 'NOT' + # These leftovers we pass through as unary. + return ast.Expr( + operator, + (right,) + ) + + v: Optional[str] = full_text(ctx.operator) + if v: + # this is super generic expression of type + # left OP right + # with a lot of options for OP values. + # The only exception is IN operator where there no `right` but `right_list` + + # we standardize operator keywords to upper case + # this is to establish a standard expectation for consuming code + # 'and' -> 'AND' + op = v.upper() + + # Let's handle IN-like cases first and fall through left-OP-right for rest. + # IN-like cases are characterized by non-null `.right_list` (instead of .right) + if ctx.right_list: + right = [ + self.visitExpr(expr) + for expr in ctx.right_list.expr() + ] + else: + right = [self.visitExpr(ctx.right)] + + is_negated = ctx.is_negated + + # Normally AST parsers should not be in business of + # rewriting the subject matter. + # However, there is one ugly nuance of SQL-like language + # that does not warrant "rewrite" but a "more standard way to express" + # a BETWEEN b AND c + # a NOT BETWEEN b AND c + # It's an ugly wart of SQL that requires very special-cased handling + # in all consumer code if it stays as BETWEEN. + # TO save the children, and humanity, will express BETWEEN as explicit inequality + # a BETWEEN b AND c --> (a >= b) AND (a <= c) + # a NOT BETWEEN b AND c --> (a < b) OR (a > c) + # Dont think of it as "transform". + # Think of it as the only sane way to express what BETWEEN means. + + if op == 'BETWEEN': + left = self.visitExpr(ctx.left) + + # this one is an Expr('AND', [v1, v2])) + between_and = self.visitExpr(ctx.right) + + if ( + isinstance(between_and, ast.Expr) and + between_and.operator == 'AND' and + len(between_and.args) == 2 + ): + pass + else: + raise ParseError( + f"Contents of BETWEEN's AND expression - {full_text(ctx.right)} - are not valid. " + "Must be of form `valueA AND valueB`." + ) + + if is_negated: + # a NOT BETWEEN b AND c --> (a < b) OR (a > c) + ex = ast.Expr( + 'OR', + ( + ast.Expr( + '<', + ( + left, # TODO: think about copy + between_and.args[0] + ) + ), + ast.Expr( + '>', + ( + left, # TODO: think about copy + between_and.args[1] + ) + ), + ) + ) + else: + # a BETWEEN b AND c --> (a >= b) AND (a <= c) + ex = ast.Expr( + 'AND', + ( + ast.Expr( + '>=', + ( + left, # TODO: think about copy + between_and.args[0] + ) + ), + ast.Expr( + '<=', + ( + left, # TODO: think about copy + between_and.args[1] + ) + ), + ) + ) + # we internalized NOT into the expression. + # can return without further NOT processing + return ex + + ex = ast.Expr( + op, + tuple([self.visitExpr(ctx.left)] + right) + ) + + # lastly, some statements allow NOT before operator + # (if it's before expression, it's captured by Unary operator) + # In this case as opposed to creating of a separate NOT-variant operator + # we just wrap the non-NOT version of the statement into + # a unary NOT + # c not in (1,2,3) + # becomes + # not (c in (1,2,3)) + + if ctx.is_negated: + return ast.Expr( + 'NOT', + (ex,) + ) + else: + return ex + + v: PqlParser.TaxonContext = ctx.taxon() + if v: + return self.visitTaxon(v) + + v: PqlParser.FnContext = ctx.fn() + if v: + return self.visitFn(v) diff --git a/python/src/pql_grammar/from_pql.py b/python/src/pql_grammar/from_pql.py index 6087c7d..6792f73 100644 --- a/python/src/pql_grammar/from_pql.py +++ b/python/src/pql_grammar/from_pql.py @@ -8,405 +8,25 @@ Recognizer, Token, ) -from antlr4.error.ErrorListener import ErrorListener -from decimal import Decimal -from typing import Optional, Tuple, List, Type, Any, Union + +from typing import Type from .antlr.PqlLexer import PqlLexer from .antlr.PqlParser import PqlParser -from .antlr.PqlParserVisitor import PqlParserVisitor as _PqlParserVisitor from . import model as ast - - -class ParseError(ValueError): - pass - - -class PqlErrorListener(ErrorListener): - # TODO: Contemplate DiagnosticErrorListener as base class for richer error reporting - - def syntaxError( - self, - recognizer: Recognizer, - offending_symbol: Token, - line: int, - column: int, - msg: str, - e: RecognitionException - ): - # See chapter 9.2 "Altering and Redirecting ANTLR Error Messages" - # http://books.killf.info/%E7%BC%96%E8%AF%91%E5%8E%9F%E7%90%86/The%20Definitive%20ANTLR4%20Reference.pdf - - tokens = recognizer.getInputStream() - input = full_text(tokens.tokenSource.inputStream) - # when input == '' splitlines makes it [] - empty. Need at last one line. - lines = input.splitlines() or [''] - error_line = lines[line - 1] - start = offending_symbol.start - stop = offending_symbol.stop - - base_msg = f'Unexpected "{full_text(offending_symbol)}"' if offending_symbol else msg - base_msg = base_msg.replace('', '') - - if len(lines) > 1: - line_msg = f'line {line}, ' - else: - line_msg = '' - - # "unexpected end of line" errors have index reversed - # stop is smaller than start. - if start < stop: - pos_msg = f'positions {start+1} to {stop+1}' - else: - pos_msg = f'position {start+1}' - - - if len(error_line) <= start + 1: - error_line_focus = error_line - else: - error_line_focus = ( - error_line[:start] + - '-->' + - error_line[start:stop+1] + - '<--' + - error_line[stop+1:] - ) - msg = f'{base_msg} ({line_msg}{pos_msg}) in fragment "{error_line_focus}"' - raise ParseError(msg) - - -def full_text(ctx: ParserRuleContext) -> str: - # extracts full text from a tree of nodes, - # including white space. - if ctx: - if isinstance(ctx, ParserRuleContext): - try: - start = ctx.start.start - except AttributeError: - start = None - try: - stop = ctx.stop.stop - except AttributeError: - stop = None - - if not(start is None) and stop is None: - stop = start - - if start is None: - return str(ctx) - - return ctx.start.getInputStream().getText(start, stop) - else: - try: - # some primitive context object - return ctx.text - except AttributeError: - # Terminal Node of some sort - return str(ctx) - else: - return None - - - -def unquote(s: str): - # Quoted schema, table, column names come in Postgres style - double-quotes - # in-string double-quotes are escaped by doubling the double-quotes ANSI SQL style. - # https://docs.oracle.com/goldengate/1212/gg-winux/GWURF/gg_parameters183.htm#GWURF728 - # Example: - # '"table name ""with quoted portion"""' becomes 'table name "with quoted portion"' - if not s: - return s - wrapper = (s[0], s[-1]) - if wrapper == ('"', '"') or wrapper == ("'", "'"): - s = s[1:-1] - - # TODO: decide which one we want to support - # TEL style escapes - return s.replace('\\"', '"').replace("\\'", "'") - # # SQL style escapes - # return s.replace('""', '"').replace("''", "'") - - -class PqlVisitor(_PqlParserVisitor): - - def visitErrorNode(self, node): - """ - Override this with no-op if you don't want automatic syntax errors emitted - """ - wrong_symbol = node.symbol.text - line = node.symbol.line - column = node.symbol.column + 1 - details = f'Unexpected symbol "{wrong_symbol}" on line {line}, position {column}' - raise ParseError(details) - - def visit_from_tel_string(self, tel: str): - inp_stream = InputStream(tel) - error_listener = PqlErrorListener() - lexer = PqlLexer(inp_stream) - lexer.removeErrorListeners() # default is PrintToConsole - lexer.addErrorListener(error_listener) - stream = CommonTokenStream(lexer) - parser = PqlParser(stream) - parser.removeErrorListeners() # default is PrintToConsole - parser.addErrorListener(error_listener) - tree = parser.parseTel() - return self.visit(tree) - - def visitParseTel(self, ctx:PqlParser.ParseTelContext): - return self.visitExpr(ctx.expr()) - - def _parse_literal(self, ctx: PqlParser.LiteralValueContext): - is_number = bool(ctx.NUMERIC_LITERAL()) - is_string = bool(ctx.DOUBLE_QUOTED_STRING()) or bool(ctx.SINGLE_QUOTED_STRING()) - is_null = bool(ctx.K_NULL()) - is_bool = bool(ctx.K_TRUE()) or bool(ctx.K_FALSE()) - - if is_null: - return None - - if is_bool: - return bool(ctx.K_TRUE()) - - try: - v = full_text(ctx) - except IndexError: - raise ParseError(f"Could not extract literal value node from '{ctx.getText()}'.") - - if is_number: - # TODO: contemplate decimal type instead - try: - return int(v) - except ValueError: - try: - return float(v) - except Exception: - raise ParseError(f"Could not convert SQL number {v} to native number representation.") - - if is_string: - return unquote(v) - - return v - - def visitLiteralValue(self, ctx:PqlParser.LiteralValueContext): - return ast.Literal( - self._parse_literal(ctx), - full_text(ctx) - ) - - def visitTaxon(self, ctx:PqlParser.TaxonContext): - return ast.Taxon( - full_text(ctx.slug), - full_text(ctx.namespace), - bool(ctx.is_optional), - full_text(ctx.tag) - ) - - def visitFn(self, ctx:PqlParser.FnContext): - return ast.Function( - full_text(ctx.function_name), - tuple([ - # argument_name may be undefined, returning Null for name. - # that's fine. - # Null for name value means it's not named, but positional argument. - # positional args are stored as (None, arg_value) tuples. - (full_text(fn_arg.argument_name), self.visitExpr(fn_arg.argument_value)) - for fn_arg in ctx.arguments.fnArg() - ]) if ctx.arguments else None - ) - - def visitExpr(self, ctx:PqlParser.ExprContext): - # unpack parens - if ctx.inner: - return self.visitExpr(ctx.inner) - - v = ctx.literalValue() - if v: - return self.visitLiteralValue(v) - - v = ctx.unary_operator - if v: - operator = full_text(v).upper() - - # expr - right = self.visitExpr(ctx.right) - - # some unary operators have no meaning - # and packing them into AST just creates noise for consuming - if operator == '+': - # skip the BS. ignore the plus - # We can do this because we don't support `++a` expressions - return right - - if ( - operator == '-' and - isinstance(right, ast.Literal) and - isinstance(right.value, (int, float, Decimal)) - ): - # right.value will always be positive digit. - # Our syntax parser guarantees that. - return ast.Literal( - right.value * -1, - full_text(ctx) # unary minus with underlying literal value as one string - ) - - if ( - operator == 'NOT' and - isinstance(right, ast.Literal) - ): - # unlikely to ever happen, but still - _v = not right.value - return ast.Literal( - _v, - 'true' if _v else 'false' - ) - - # else: - # # cannot avoid packaging unary "-" separate. - # # it's in front of a non-literal expression that need to be negated manually later - # TODO: contemplate converting this from unary `-expr` into regular `-1 * expr` - # to escape Unary minus completely. - - # We dealt with '+'. We half-dealt with '-' - # What's left is 'NOT' - # These leftovers we pass through as unary. - return ast.Expr( - operator, - (right,) - ) - - v: Optional[str] = full_text(ctx.operator) - if v: - # this is super generic expression of type - # left OP right - # with a lot of options for OP values. - # The only exception is IN operator where there no `right` but `right_list` - - # we standardize operator keywords to upper case - # this is to establish a standard expectation for consuming code - # 'and' -> 'AND' - op = v.upper() - - # Let's handle IN-like cases first and fall through left-OP-right for rest. - # IN-like cases are characterized by non-null `.right_list` (instead of .right) - if ctx.right_list: - right = [ - self.visitExpr(expr) - for expr in ctx.right_list.expr() - ] - else: - right = [self.visitExpr(ctx.right)] - - is_negated = ctx.is_negated - - # Normally AST parsers should not be in business of - # rewriting the subject matter. - # However, there is one ugly nuance of SQL-like language - # that does not warrant "rewrite" but a "more standard way to express" - # a BETWEEN b AND c - # a NOT BETWEEN b AND c - # It's an ugly wart of SQL that requires very special-cased handling - # in all consumer code if it stays as BETWEEN. - # TO save the children, and humanity, will express BETWEEN as explicit inequality - # a BETWEEN b AND c --> (a >= b) AND (a <= c) - # a NOT BETWEEN b AND c --> (a < b) OR (a > c) - # Dont think of it as "transform". - # Think of it as the only sane way to express what BETWEEN means. - - if op == 'BETWEEN': - left = self.visitExpr(ctx.left) - - # this one is an Expr('AND', [v1, v2])) - between_and = self.visitExpr(ctx.right) - - if ( - isinstance(between_and, ast.Expr) and - between_and.operator == 'AND' and - len(between_and.args) == 2 - ): - pass - else: - raise ParseError( - f"Contents of BETWEEN's AND expression - {full_text(ctx.right)} - are not valid. " - "Must be of form `valueA AND valueB`." - ) - - if is_negated: - # a NOT BETWEEN b AND c --> (a < b) OR (a > c) - ex = ast.Expr( - 'OR', - ( - ast.Expr( - '<', - ( - left, # TODO: think about copy - between_and.args[0] - ) - ), - ast.Expr( - '>', - ( - left, # TODO: think about copy - between_and.args[1] - ) - ), - ) - ) - else: - # a BETWEEN b AND c --> (a >= b) AND (a <= c) - ex = ast.Expr( - 'AND', - ( - ast.Expr( - '>=', - ( - left, # TODO: think about copy - between_and.args[0] - ) - ), - ast.Expr( - '<=', - ( - left, # TODO: think about copy - between_and.args[1] - ) - ), - ) - ) - # we internalized NOT into the expression. - # can return without further NOT processing - return ex - - ex = ast.Expr( - op, - tuple([self.visitExpr(ctx.left)] + right) - ) - - # lastly, some statements allow NOT before operator - # (if it's before expression, it's captured by Unary operator) - # In this case as opposed to creating of a separate NOT-variant operator - # we just wrap the non-NOT version of the statement into - # a unary NOT - # c not in (1,2,3) - # becomes - # not (c in (1,2,3)) - - if ctx.is_negated: - return ast.Expr( - 'NOT', - (ex,) - ) - else: - return ex - - v: PqlParser.TaxonContext = ctx.taxon() - if v: - return self.visitTaxon(v) - - v: PqlParser.FnContext = ctx.fn() - if v: - return self.visitFn(v) - - -def from_tel(tel: str, cls:Type[PqlVisitor] = PqlVisitor) -> ast.Node: - return cls().visit_from_tel_string(tel) +from .antlr_tel_to_ast_visitor import ParseError, TelVisitor, TelErrorListener + + +def from_tel(tel: str, cls:Type[TelVisitor] = TelVisitor) -> ast.Node: + inp_stream = InputStream(tel) + error_listener = TelErrorListener() + lexer = PqlLexer(inp_stream) + lexer.removeErrorListeners() # default is PrintToConsole + lexer.addErrorListener(error_listener) + stream = CommonTokenStream(lexer) + parser = PqlParser(stream) + parser.removeErrorListeners() # default is PrintToConsole + parser.addErrorListener(error_listener) + tree = parser.parseTel() + return cls().visit(tree) diff --git a/python/tests/ast_pql_test.py b/python/tests/ast_pql_test.py index d4fb587..fc82e92 100644 --- a/python/tests/ast_pql_test.py +++ b/python/tests/ast_pql_test.py @@ -7,8 +7,7 @@ from pql_grammar import model as ast from pql_grammar.to_pql import to_tel -from pql_grammar.from_pql import PqlVisitor, from_tel -from pql_grammar.tools import find_all +from pql_grammar.from_pql import from_tel inputs = (