`` elements with the individual token styles.'),
diff --git a/vendor/pygments-main/pygments/formatters/html.py b/vendor/pygments-main/pygments/formatters/html.py
index 3ba104f3..3029c1cc 100644
--- a/vendor/pygments-main/pygments/formatters/html.py
+++ b/vendor/pygments-main/pygments/formatters/html.py
@@ -272,7 +272,9 @@ class HtmlFormatter(Formatter):
125%``).
`hl_lines`
- Specify a list of lines to be highlighted.
+ Specify a list of lines to be highlighted. The line numbers are always
+ relative to the input (i.e. the first line is line 1) and are
+ independent of `linenostart`.
.. versionadded:: 0.11
@@ -303,7 +305,7 @@ class ``"special"`` (default: ``0``).
`lineanchors`
If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in an anchor tag with a ``name`` of ``foo-linenumber``.
+ output line in an anchor tag with an ``id`` (and `name`) of ``foo-linenumber``.
This allows easy linking to certain lines.
.. versionadded:: 0.9
@@ -335,7 +337,9 @@ class ``"special"`` (default: ``0``).
`filename`
A string used to generate a filename when rendering ```` blocks,
- for example if displaying source code.
+ for example if displaying source code. If `linenos` is set to
+ ``'table'`` then the filename will be rendered in an initial row
+ containing a single `| ` which spans both columns.
.. versionadded:: 2.1
@@ -691,11 +695,20 @@ def _wrap_tablelinenos(self, inner):
ls = '\n'.join(lines)
+ # If a filename was specified, we can't put it into the code table as it
+ # would misalign the line numbers. Hence we emit a separate row for it.
+ filename_tr = ""
+ if self.filename:
+ filename_tr = (
+ ' | '
+ '' + self.filename + ' '
+ ' |
')
+
# in case you wonder about the seemingly redundant here: since the
# content in the other cell also is wrapped in a div, some browsers in
# some configurations seem to mess up the formatting...
yield 0, (
- '
' % self.cssclass +
+ '' % self.cssclass + filename_tr +
' | '
)
@@ -752,7 +765,7 @@ def _wrap_lineanchors(self, inner):
for t, line in inner:
if t:
i += 1
- yield 1, '' % (s, i) + line
+ yield 1, '' % (s, i, s, i) + line
else:
yield 0, line
@@ -788,7 +801,7 @@ def _wrap_pre(self, inner):
style.append(self._pre_style)
style = '; '.join(style)
- if self.filename:
+ if self.filename and self.linenos != 1:
yield 0, ('' + self.filename + '')
# the empty span here is to keep leading empty lines from being
@@ -877,7 +890,7 @@ def _format_lines(self, tokensource):
def _lookup_ctag(self, token):
entry = ctags.TagEntry()
- if self._ctags.find(entry, token, 0):
+ if self._ctags.find(entry, token.encode(), 0):
return entry['file'], entry['lineNumber']
else:
return None, None
diff --git a/vendor/pygments-main/pygments/formatters/pangomarkup.py b/vendor/pygments-main/pygments/formatters/pangomarkup.py
new file mode 100644
index 00000000..926ccc6d
--- /dev/null
+++ b/vendor/pygments-main/pygments/formatters/pangomarkup.py
@@ -0,0 +1,83 @@
+"""
+ pygments.formatters.pangomarkup
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for Pango markup output.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
+
+
+__all__ = ['PangoMarkupFormatter']
+
+
+_escape_table = {
+ ord('&'): '&',
+ ord('<'): '<',
+}
+
+
+def escape_special_chars(text, table=_escape_table):
+ """Escape & and < for Pango Markup."""
+ return text.translate(table)
+
+
+class PangoMarkupFormatter(Formatter):
+ """
+ Format tokens as Pango Markup code. It can then be rendered to an SVG.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'Pango Markup'
+ aliases = ['pango', 'pangomarkup']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+
+ self.styles = {}
+
+ for token, style in self.style:
+ start = ''
+ end = ''
+ if style['color']:
+ start += '' % style['color']
+ end = '' + end
+ if style['bold']:
+ start += ''
+ end = '' + end
+ if style['italic']:
+ start += ''
+ end = '' + end
+ if style['underline']:
+ start += ''
+ end = '' + end
+ self.styles[token] = (start, end)
+
+ def format_unencoded(self, tokensource, outfile):
+ lastval = ''
+ lasttype = None
+
+ outfile.write('')
+
+ for ttype, value in tokensource:
+ while ttype not in self.styles:
+ ttype = ttype.parent
+ if ttype == lasttype:
+ lastval += escape_special_chars(value)
+ else:
+ if lastval:
+ stylebegin, styleend = self.styles[lasttype]
+ outfile.write(stylebegin + lastval + styleend)
+ lastval = escape_special_chars(value)
+ lasttype = ttype
+
+ if lastval:
+ stylebegin, styleend = self.styles[lasttype]
+ outfile.write(stylebegin + lastval + styleend)
+
+ outfile.write('')
diff --git a/vendor/pygments-main/pygments/lexers/_csound_builtins.py b/vendor/pygments-main/pygments/lexers/_csound_builtins.py
index 89fcb66b..e7e395dc 100644
--- a/vendor/pygments-main/pygments/lexers/_csound_builtins.py
+++ b/vendor/pygments-main/pygments/lexers/_csound_builtins.py
@@ -6,7 +6,35 @@
:license: BSD, see LICENSE for details.
"""
-# Opcodes in Csound 6.14.0 using:
+REMOVED_OPCODES = set('''
+OSCsendA
+beadsynt
+beosc
+buchla
+getrowlin
+lua_exec
+lua_iaopcall
+lua_iaopcall_off
+lua_ikopcall
+lua_ikopcall_off
+lua_iopcall
+lua_iopcall_off
+lua_opdef
+mp3scal_check
+mp3scal_load
+mp3scal_load2
+mp3scal_play
+mp3scal_play2
+pvsgendy
+socksend_k
+signalflowgraph
+sumTableFilter
+systime
+tabrowlin
+vbap1move
+'''.split())
+
+# Opcodes in Csound 6.16.0 using:
# python3 -c "
# import re
# from subprocess import Popen, PIPE
@@ -209,6 +237,7 @@
ampmidid
apoleparams
arduinoRead
+arduinoReadF
arduinoStart
arduinoStop
areson
@@ -216,6 +245,7 @@
atone
atonek
atonex
+autocorr
babo
balance
balance2
@@ -223,8 +253,6 @@
barmodel
bbcutm
bbcuts
-beadsynt
-beosc
betarand
bexprnd
bformdec1
@@ -259,6 +287,7 @@
ceps
cepsinv
chanctrl
+changed
changed2
chani
chano
@@ -293,6 +322,8 @@
cmplxprod
cntCreate
cntCycles
+cntDelete
+cntDelete_i
cntRead
cntReset
cntState
@@ -342,6 +373,11 @@
ctrl21
ctrl7
ctrlinit
+ctrlpreset
+ctrlprint
+ctrlprintpresets
+ctrlsave
+ctrlselect
cuserrnd
dam
date
@@ -523,7 +559,6 @@
getcol
getftargs
getrow
-getrowlin
getseed
gogobel
grain
@@ -776,6 +811,7 @@
lastcycle
lenarray
lfo
+lfsr
limit
limit1
lincos
@@ -836,14 +872,6 @@
lpshold
lpsholdp
lpslot
-lua_exec
-lua_iaopcall
-lua_iaopcall_off
-lua_ikopcall
-lua_ikopcall_off
-lua_iopcall
-lua_iopcall_off
-lua_opdef
lufs
mac
maca
@@ -920,6 +948,7 @@
mp3sr
mpulse
mrtmsg
+ms2st
mtof
mton
multitap
@@ -929,6 +958,7 @@
mvclpf2
mvclpf3
mvclpf4
+mvmfilter
mxadsr
nchnls_hw
nestedap
@@ -972,6 +1002,7 @@
oscilx
out
out32
+outall
outc
outch
outh
@@ -1279,11 +1310,13 @@
sc_phasor
sc_trig
scale
+scale2
scalearray
scanhammer
scans
scantable
scanu
+scanu2
schedkwhen
schedkwhennamed
schedule
@@ -1333,6 +1366,7 @@
sinh
sininv
sinsyn
+skf
sleighbells
slicearray
slicearray_i
@@ -1368,12 +1402,14 @@
spat3di
spat3dt
spdist
+spf
splitrig
sprintf
sprintfk
spsend
sqrt
squinewave
+st2ms
statevar
sterrain
stix
@@ -1414,6 +1450,7 @@
sum
sumarray
svfilter
+svn
syncgrain
syncloop
syncphasor
@@ -1454,7 +1491,6 @@
tabmorphi
tabplay
tabrec
-tabrowlin
tabsum
tabw
tabw_i
@@ -1486,6 +1522,8 @@
trcross
trfilter
trhighest
+trigExpseg
+trigLinseg
trigger
trighold
trigphasor
@@ -1500,6 +1538,8 @@
trsplit
turnoff
turnoff2
+turnoff2_i
+turnoff3
turnon
tvconv
unirand
@@ -1523,6 +1563,7 @@
vbapz
vbapzmove
vcella
+vclpf
vco
vco2
vco2ft
@@ -1611,6 +1652,7 @@
wrap
writescratch
wterrain
+wterrain2
xadsr
xin
xout
@@ -1647,7 +1689,6 @@
array
bformdec
bformenc
-changed
copy2ftab
copy2ttab
hrtfer
diff --git a/vendor/pygments-main/pygments/lexers/_julia_builtins.py b/vendor/pygments-main/pygments/lexers/_julia_builtins.py
new file mode 100644
index 00000000..c1dcca09
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/_julia_builtins.py
@@ -0,0 +1,401 @@
+# operators
+# see https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
+# Julia v1.6.0-rc1
+OPERATORS_LIST = [
+ # other
+ '->',
+ # prec-assignment
+ ':=', '$=',
+ # prec-conditional, prec-lazy-or, prec-lazy-and
+ '?', '||', '&&',
+ # prec-colon
+ ':',
+ # prec-plus
+ '$',
+ # prec-decl
+ '::',
+]
+DOTTED_OPERATORS_LIST = [
+ # prec-assignment
+ r'=', r'+=', r'-=', r'*=', r'/=', r'//=', r'\=', r'^=', r'÷=', r'%=', r'<<=',
+ r'>>=', r'>>>=', r'|=', r'&=', r'⊻=', r'≔', r'⩴', r"≕'", r'~',
+ # prec-pair
+ '=>',
+ # prec-arrow
+ r'→', r'↔', r'↚', r'↛', r'↞', r'↠', r'↢', r'↣', r'↦', r'↤', r'↮', r'⇎', r'⇍', r'⇏',
+ r'⇐', r'⇒', r'⇔', r'⇴', r'⇶', r'⇷', r'⇸', r'⇹', r'⇺', r'⇻', r'⇼', r'⇽', r'⇾', r'⇿',
+ r'⟵', r'⟶', r'⟷', r'⟹', r'⟺', r'⟻', r'⟼', r'⟽', r'⟾', r'⟿', r'⤀', r'⤁', r'⤂', r'⤃',
+ r'⤄', r'⤅', r'⤆', r'⤇', r'⤌', r'⤍', r'⤎', r'⤏', r'⤐', r'⤑', r'⤔', r'⤕', r'⤖', r'⤗',
+ r'⤘', r'⤝', r'⤞', r'⤟', r'⤠', r'⥄', r'⥅', r'⥆', r'⥇', r'⥈', r'⥊', r'⥋', r'⥎', r'⥐',
+ r'⥒', r'⥓', r'⥖', r'⥗', r'⥚', r'⥛', r'⥞', r'⥟', r'⥢', r'⥤', r'⥦', r'⥧', r'⥨', r'⥩',
+ r'⥪', r'⥫', r'⥬', r'⥭', r'⥰', r'⧴', r'⬱', r'⬰', r'⬲', r'⬳', r'⬴', r'⬵', r'⬶', r'⬷',
+ r'⬸', r'⬹', r'⬺', r'⬻', r'⬼', r'⬽', r'⬾', r'⬿', r'⭀', r'⭁', r'⭂', r'⭃', r'⭄', r'⭇',
+ r'⭈', r'⭉', r'⭊', r'⭋', r'⭌', r'←', r'→', r'⇜', r'⇝', r'↜', r'↝', r'↩', r'↪', r'↫',
+ r'↬', r'↼', r'↽', r'⇀', r'⇁', r'⇄', r'⇆', r'⇇', r'⇉', r'⇋', r'⇌', r'⇚', r'⇛', r'⇠',
+ r'⇢', r'↷', r'↶', r'↺', r'↻', r'-->', r'<--', r'<-->',
+ # prec-comparison
+ r'>', r'<', r'>=', r'≥', r'<=', r'≤', r'==', r'===', r'≡', r'!=', r'≠', r'!==',
+ r'≢', r'∈', r'∉', r'∋', r'∌', r'⊆', r'⊈', r'⊂', r'⊄', r'⊊', r'∝', r'∊', r'∍', r'∥',
+ r'∦', r'∷', r'∺', r'∻', r'∽', r'∾', r'≁', r'≃', r'≂', r'≄', r'≅', r'≆', r'≇', r'≈',
+ r'≉', r'≊', r'≋', r'≌', r'≍', r'≎', r'≐', r'≑', r'≒', r'≓', r'≖', r'≗', r'≘', r'≙',
+ r'≚', r'≛', r'≜', r'≝', r'≞', r'≟', r'≣', r'≦', r'≧', r'≨', r'≩', r'≪', r'≫', r'≬',
+ r'≭', r'≮', r'≯', r'≰', r'≱', r'≲', r'≳', r'≴', r'≵', r'≶', r'≷', r'≸', r'≹', r'≺',
+ r'≻', r'≼', r'≽', r'≾', r'≿', r'⊀', r'⊁', r'⊃', r'⊅', r'⊇', r'⊉', r'⊋', r'⊏', r'⊐',
+ r'⊑', r'⊒', r'⊜', r'⊩', r'⊬', r'⊮', r'⊰', r'⊱', r'⊲', r'⊳', r'⊴', r'⊵', r'⊶', r'⊷',
+ r'⋍', r'⋐', r'⋑', r'⋕', r'⋖', r'⋗', r'⋘', r'⋙', r'⋚', r'⋛', r'⋜', r'⋝', r'⋞', r'⋟',
+ r'⋠', r'⋡', r'⋢', r'⋣', r'⋤', r'⋥', r'⋦', r'⋧', r'⋨', r'⋩', r'⋪', r'⋫', r'⋬', r'⋭',
+ r'⋲', r'⋳', r'⋴', r'⋵', r'⋶', r'⋷', r'⋸', r'⋹', r'⋺', r'⋻', r'⋼', r'⋽', r'⋾', r'⋿',
+ r'⟈', r'⟉', r'⟒', r'⦷', r'⧀', r'⧁', r'⧡', r'⧣', r'⧤', r'⧥', r'⩦', r'⩧', r'⩪', r'⩫',
+ r'⩬', r'⩭', r'⩮', r'⩯', r'⩰', r'⩱', r'⩲', r'⩳', r'⩵', r'⩶', r'⩷', r'⩸', r'⩹', r'⩺',
+ r'⩻', r'⩼', r'⩽', r'⩾', r'⩿', r'⪀', r'⪁', r'⪂', r'⪃', r'⪄', r'⪅', r'⪆', r'⪇', r'⪈',
+ r'⪉', r'⪊', r'⪋', r'⪌', r'⪍', r'⪎', r'⪏', r'⪐', r'⪑', r'⪒', r'⪓', r'⪔', r'⪕', r'⪖',
+ r'⪗', r'⪘', r'⪙', r'⪚', r'⪛', r'⪜', r'⪝', r'⪞', r'⪟', r'⪠', r'⪡', r'⪢', r'⪣', r'⪤',
+ r'⪥', r'⪦', r'⪧', r'⪨', r'⪩', r'⪪', r'⪫', r'⪬', r'⪭', r'⪮', r'⪯', r'⪰', r'⪱', r'⪲',
+ r'⪳', r'⪴', r'⪵', r'⪶', r'⪷', r'⪸', r'⪹', r'⪺', r'⪻', r'⪼', r'⪽', r'⪾', r'⪿', r'⫀',
+ r'⫁', r'⫂', r'⫃', r'⫄', r'⫅', r'⫆', r'⫇', r'⫈', r'⫉', r'⫊', r'⫋', r'⫌', r'⫍', r'⫎',
+ r'⫏', r'⫐', r'⫑', r'⫒', r'⫓', r'⫔', r'⫕', r'⫖', r'⫗', r'⫘', r'⫙', r'⫷', r'⫸', r'⫹',
+ r'⫺', r'⊢', r'⊣', r'⟂', r'<:', r'>:',
+ # prec-pipe
+ '<|', '|>',
+ # prec-colon
+ r'…', r'⁝', r'⋮', r'⋱', r'⋰', r'⋯',
+ # prec-plus
+ r'+', r'-', r'¦', r'|', r'⊕', r'⊖', r'⊞', r'⊟', r'++', r'∪', r'∨', r'⊔', r'±', r'∓',
+ r'∔', r'∸', r'≏', r'⊎', r'⊻', r'⊽', r'⋎', r'⋓', r'⧺', r'⧻', r'⨈', r'⨢', r'⨣', r'⨤',
+ r'⨥', r'⨦', r'⨧', r'⨨', r'⨩', r'⨪', r'⨫', r'⨬', r'⨭', r'⨮', r'⨹', r'⨺', r'⩁', r'⩂',
+ r'⩅', r'⩊', r'⩌', r'⩏', r'⩐', r'⩒', r'⩔', r'⩖', r'⩗', r'⩛', r'⩝', r'⩡', r'⩢', r'⩣',
+ # prec-times
+ r'*', r'/', r'⌿', r'÷', r'%', r'&', r'⋅', r'∘', r'×', '\\', r'∩', r'∧', r'⊗', r'⊘',
+ r'⊙', r'⊚', r'⊛', r'⊠', r'⊡', r'⊓', r'∗', r'∙', r'∤', r'⅋', r'≀', r'⊼', r'⋄', r'⋆',
+ r'⋇', r'⋉', r'⋊', r'⋋', r'⋌', r'⋏', r'⋒', r'⟑', r'⦸', r'⦼', r'⦾', r'⦿', r'⧶', r'⧷',
+ r'⨇', r'⨰', r'⨱', r'⨲', r'⨳', r'⨴', r'⨵', r'⨶', r'⨷', r'⨸', r'⨻', r'⨼', r'⨽', r'⩀',
+ r'⩃', r'⩄', r'⩋', r'⩍', r'⩎', r'⩑', r'⩓', r'⩕', r'⩘', r'⩚', r'⩜', r'⩞', r'⩟', r'⩠',
+ r'⫛', r'⊍', r'▷', r'⨝', r'⟕', r'⟖', r'⟗', r'⨟',
+ # prec-rational, prec-bitshift
+ '//', '>>', '<<', '>>>',
+ # prec-power
+ r'^', r'↑', r'↓', r'⇵', r'⟰', r'⟱', r'⤈', r'⤉', r'⤊', r'⤋', r'⤒', r'⤓', r'⥉', r'⥌',
+ r'⥍', r'⥏', r'⥑', r'⥔', r'⥕', r'⥘', r'⥙', r'⥜', r'⥝', r'⥠', r'⥡', r'⥣', r'⥥', r'⥮',
+ r'⥯', r'↑', r'↓',
+ # unary-ops, excluding unary-and-binary-ops
+ '!', r'¬', r'√', r'∛', r'∜'
+]
+
+# Generated with the following in Julia v1.6.0-rc1
+'''
+#!/usr/bin/env julia
+
+import REPL.REPLCompletions
+res = String["in", "isa", "where"]
+for kw in collect(x.keyword for x in REPLCompletions.complete_keyword(""))
+ if !(contains(kw, " ") || kw == "struct")
+ push!(res, kw)
+ end
+end
+sort!(unique!(setdiff!(res, ["true", "false"])))
+foreach(x -> println("\'", x, "\',"), res)
+'''
+KEYWORD_LIST = (
+ 'baremodule',
+ 'begin',
+ 'break',
+ 'catch',
+ 'ccall',
+ 'const',
+ 'continue',
+ 'do',
+ 'else',
+ 'elseif',
+ 'end',
+ 'export',
+ 'finally',
+ 'for',
+ 'function',
+ 'global',
+ 'if',
+ 'import',
+ 'in',
+ 'isa',
+ 'let',
+ 'local',
+ 'macro',
+ 'module',
+ 'quote',
+ 'return',
+ 'try',
+ 'using',
+ 'where',
+ 'while',
+)
+
+# Generated with the following in Julia v1.6.0-rc1
+'''
+#!/usr/bin/env julia
+
+import REPL.REPLCompletions
+res = String[]
+for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
+ REPLCompletions.completions("", 0)[1])
+ try
+ v = eval(Symbol(compl.mod))
+ if (v isa Type || v isa TypeVar) && (compl.mod != "=>")
+ push!(res, compl.mod)
+ end
+ catch e
+ end
+end
+sort!(unique!(res))
+foreach(x -> println("\'", x, "\',"), res)
+'''
+BUILTIN_LIST = (
+ 'AbstractArray',
+ 'AbstractChannel',
+ 'AbstractChar',
+ 'AbstractDict',
+ 'AbstractDisplay',
+ 'AbstractFloat',
+ 'AbstractIrrational',
+ 'AbstractMatch',
+ 'AbstractMatrix',
+ 'AbstractPattern',
+ 'AbstractRange',
+ 'AbstractSet',
+ 'AbstractString',
+ 'AbstractUnitRange',
+ 'AbstractVecOrMat',
+ 'AbstractVector',
+ 'Any',
+ 'ArgumentError',
+ 'Array',
+ 'AssertionError',
+ 'BigFloat',
+ 'BigInt',
+ 'BitArray',
+ 'BitMatrix',
+ 'BitSet',
+ 'BitVector',
+ 'Bool',
+ 'BoundsError',
+ 'CapturedException',
+ 'CartesianIndex',
+ 'CartesianIndices',
+ 'Cchar',
+ 'Cdouble',
+ 'Cfloat',
+ 'Channel',
+ 'Char',
+ 'Cint',
+ 'Cintmax_t',
+ 'Clong',
+ 'Clonglong',
+ 'Cmd',
+ 'Colon',
+ 'Complex',
+ 'ComplexF16',
+ 'ComplexF32',
+ 'ComplexF64',
+ 'ComposedFunction',
+ 'CompositeException',
+ 'Condition',
+ 'Cptrdiff_t',
+ 'Cshort',
+ 'Csize_t',
+ 'Cssize_t',
+ 'Cstring',
+ 'Cuchar',
+ 'Cuint',
+ 'Cuintmax_t',
+ 'Culong',
+ 'Culonglong',
+ 'Cushort',
+ 'Cvoid',
+ 'Cwchar_t',
+ 'Cwstring',
+ 'DataType',
+ 'DenseArray',
+ 'DenseMatrix',
+ 'DenseVecOrMat',
+ 'DenseVector',
+ 'Dict',
+ 'DimensionMismatch',
+ 'Dims',
+ 'DivideError',
+ 'DomainError',
+ 'EOFError',
+ 'Enum',
+ 'ErrorException',
+ 'Exception',
+ 'ExponentialBackOff',
+ 'Expr',
+ 'Float16',
+ 'Float32',
+ 'Float64',
+ 'Function',
+ 'GlobalRef',
+ 'HTML',
+ 'IO',
+ 'IOBuffer',
+ 'IOContext',
+ 'IOStream',
+ 'IdDict',
+ 'IndexCartesian',
+ 'IndexLinear',
+ 'IndexStyle',
+ 'InexactError',
+ 'InitError',
+ 'Int',
+ 'Int128',
+ 'Int16',
+ 'Int32',
+ 'Int64',
+ 'Int8',
+ 'Integer',
+ 'InterruptException',
+ 'InvalidStateException',
+ 'Irrational',
+ 'KeyError',
+ 'LinRange',
+ 'LineNumberNode',
+ 'LinearIndices',
+ 'LoadError',
+ 'MIME',
+ 'Matrix',
+ 'Method',
+ 'MethodError',
+ 'Missing',
+ 'MissingException',
+ 'Module',
+ 'NTuple',
+ 'NamedTuple',
+ 'Nothing',
+ 'Number',
+ 'OrdinalRange',
+ 'OutOfMemoryError',
+ 'OverflowError',
+ 'Pair',
+ 'PartialQuickSort',
+ 'PermutedDimsArray',
+ 'Pipe',
+ 'ProcessFailedException',
+ 'Ptr',
+ 'QuoteNode',
+ 'Rational',
+ 'RawFD',
+ 'ReadOnlyMemoryError',
+ 'Real',
+ 'ReentrantLock',
+ 'Ref',
+ 'Regex',
+ 'RegexMatch',
+ 'RoundingMode',
+ 'SegmentationFault',
+ 'Set',
+ 'Signed',
+ 'Some',
+ 'StackOverflowError',
+ 'StepRange',
+ 'StepRangeLen',
+ 'StridedArray',
+ 'StridedMatrix',
+ 'StridedVecOrMat',
+ 'StridedVector',
+ 'String',
+ 'StringIndexError',
+ 'SubArray',
+ 'SubString',
+ 'SubstitutionString',
+ 'Symbol',
+ 'SystemError',
+ 'Task',
+ 'TaskFailedException',
+ 'Text',
+ 'TextDisplay',
+ 'Timer',
+ 'Tuple',
+ 'Type',
+ 'TypeError',
+ 'TypeVar',
+ 'UInt',
+ 'UInt128',
+ 'UInt16',
+ 'UInt32',
+ 'UInt64',
+ 'UInt8',
+ 'UndefInitializer',
+ 'UndefKeywordError',
+ 'UndefRefError',
+ 'UndefVarError',
+ 'Union',
+ 'UnionAll',
+ 'UnitRange',
+ 'Unsigned',
+ 'Val',
+ 'Vararg',
+ 'VecElement',
+ 'VecOrMat',
+ 'Vector',
+ 'VersionNumber',
+ 'WeakKeyDict',
+ 'WeakRef',
+)
+
+# Generated with the following in Julia v1.6.0-rc1
+'''
+#!/usr/bin/env julia
+
+import REPL.REPLCompletions
+res = String["true", "false"]
+for compl in filter!(x -> isa(x, REPLCompletions.ModuleCompletion) && (x.parent === Base || x.parent === Core),
+ REPLCompletions.completions("", 0)[1])
+ try
+ v = eval(Symbol(compl.mod))
+ if !(v isa Function || v isa Type || v isa TypeVar || v isa Module || v isa Colon)
+ push!(res, compl.mod)
+ end
+ catch e
+ end
+end
+sort!(unique!(res))
+foreach(x -> println("\'", x, "\',"), res)
+'''
+LITERAL_LIST = (
+ 'ARGS',
+ 'C_NULL',
+ 'DEPOT_PATH',
+ 'ENDIAN_BOM',
+ 'ENV',
+ 'Inf',
+ 'Inf16',
+ 'Inf32',
+ 'Inf64',
+ 'InsertionSort',
+ 'LOAD_PATH',
+ 'MergeSort',
+ 'NaN',
+ 'NaN16',
+ 'NaN32',
+ 'NaN64',
+ 'PROGRAM_FILE',
+ 'QuickSort',
+ 'RoundDown',
+ 'RoundFromZero',
+ 'RoundNearest',
+ 'RoundNearestTiesAway',
+ 'RoundNearestTiesUp',
+ 'RoundToZero',
+ 'RoundUp',
+ 'VERSION',
+ 'devnull',
+ 'false',
+ 'im',
+ 'missing',
+ 'nothing',
+ 'pi',
+ 'stderr',
+ 'stdin',
+ 'stdout',
+ 'true',
+ 'undef',
+ 'π',
+ 'ℯ',
+)
diff --git a/vendor/pygments-main/pygments/lexers/_mapping.py b/vendor/pygments-main/pygments/lexers/_mapping.py
index 45bbde26..24e3668d 100644
--- a/vendor/pygments-main/pygments/lexers/_mapping.py
+++ b/vendor/pygments-main/pygments/lexers/_mapping.py
@@ -15,20 +15,20 @@
LEXERS = {
'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
'AMDGPULexer': ('pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()),
- 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
+ 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()),
'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
- 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
- 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
+ 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
+ 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
- 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
+ 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)),
'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
- 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
+ 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()),
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
@@ -41,11 +41,12 @@
'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
'ArrowLexer': ('pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
+ 'AscLexer': ('pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')),
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
- 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
+ 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
- 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
+ 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
@@ -55,9 +56,9 @@
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
- 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
+ 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
- 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)),
+ 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)),
'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
@@ -81,11 +82,11 @@
'CddlLexer': ('pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
- 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
+ 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
- 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
+ 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
@@ -95,7 +96,7 @@
'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
- 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
+ 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
@@ -112,7 +113,7 @@
'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
- 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
+ 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)),
'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
@@ -125,7 +126,7 @@
'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
- 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
+ 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
'DevicetreeLexer': ('pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
@@ -144,9 +145,9 @@
'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
- 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs'), ('text/x-elixir',)),
+ 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)),
'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
- 'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
+ 'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
@@ -175,13 +176,15 @@
'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
'GDScriptLexer': ('pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
+ 'GSQLLexer': ('pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()),
'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
+ 'GcodeLexer': ('pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()),
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
- 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
+ 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
- 'GoLexer': ('pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
+ 'GoLexer': ('pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
@@ -194,7 +197,7 @@
'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
- 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
+ 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()),
@@ -214,28 +217,29 @@
'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
- 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf'), ('text/x-ini', 'text/inf')),
+ 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ('text/x-ini', 'text/inf')),
'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
+ 'JSLTLexer': ('pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
- 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
- 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
+ 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
+ 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm', '*.mjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
- 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
- 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
+ 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
+ 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
+ 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', (), (), ()),
'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
'JsonLexer': ('pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
- 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
+ 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
@@ -243,22 +247,23 @@
'KernelLogLexer': ('pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()),
'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)),
+ 'KuinLexer': ('pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()),
'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
- 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
+ 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
- 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
+ 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
- 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
- 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)),
- 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
- 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)),
- 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
+ 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)),
+ 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)),
+ 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)),
+ 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)),
+ 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
'LlvmMirBodyLexer': ('pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()),
'LlvmMirLexer': ('pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()),
@@ -271,24 +276,25 @@
'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
- 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
+ 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
- 'MarkdownLexer': ('pygments.lexers.markup', 'markdown', ('md', 'markdown'), ('*.md', '*.markdown'), ('text/x-markdown',)),
+ 'MarkdownLexer': ('pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
+ 'MesonLexer': ('pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)),
'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
- 'MiniScriptLexer': ('pygments.lexers.scripting', 'MiniScript', ('ms', 'miniscript'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
+ 'MiniScriptLexer': ('pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
- 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
+ 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
'MoselLexer': ('pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()),
'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
@@ -302,7 +308,7 @@
'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
- 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
+ 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
'NCLLexer': ('pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
@@ -311,12 +317,14 @@
'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
+ 'NestedTextLexer': ('pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()),
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
- 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
+ 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
+ 'NodeConsoleLexer': ('pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)),
'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
@@ -327,9 +335,11 @@
'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
+ 'OmgIdlLexer': ('pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()),
'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
+ 'OutputLexer': ('pygments.lexers.special', 'Text output', ('output',), (), ()),
'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
@@ -348,9 +358,10 @@
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
- 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
- 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()),
+ 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
+ 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()),
'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
+ 'ProcfileLexer': ('pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
@@ -368,7 +379,7 @@
'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
- 'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()),
+ 'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()),
'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
@@ -381,12 +392,12 @@
'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)),
'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
- 'ReasonLexer': ('pygments.lexers.ml', 'ReasonML', ('reason', 'reasonml'), ('*.re', '*.rei'), ('text/x-reasonml',)),
+ 'ReasonLexer': ('pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)),
'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
- 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()),
+ 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RideLexer': ('pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)),
@@ -395,10 +406,10 @@
'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
- 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
- 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()),
+ 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
+ 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()),
'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
- 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
+ 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('ruby', 'rb', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')),
'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
@@ -423,11 +434,12 @@
'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
+ 'SmithyLexer': ('pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()),
'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
- 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
+ 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
@@ -435,7 +447,7 @@
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
- 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
+ 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
@@ -448,12 +460,14 @@
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
- 'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('ttl', 'teraterm', 'teratermmacro'), ('*.ttl',), ('text/x-teratermmacro',)),
+ 'TealLexer': ('pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()),
+ 'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)),
'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
+ 'ThingsDBLexer': ('pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
'TiddlyWiki5Lexer': ('pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
@@ -462,7 +476,7 @@
'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
- 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)),
+ 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')),
'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
@@ -485,12 +499,13 @@
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
+ 'WatLexer': ('pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
'WebIDLLexer': ('pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
- 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
+ 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)),
'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
@@ -504,6 +519,7 @@
'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
+ 'apdlexer': ('pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()),
}
if __name__ == '__main__': # pragma: no cover
diff --git a/vendor/pygments-main/pygments/lexers/actionscript.py b/vendor/pygments-main/pygments/lexers/actionscript.py
index 3d6df13e..f0ab6524 100644
--- a/vendor/pygments-main/pygments/lexers/actionscript.py
+++ b/vendor/pygments-main/pygments/lexers/actionscript.py
@@ -25,7 +25,7 @@ class ActionScriptLexer(RegexLexer):
"""
name = 'ActionScript'
- aliases = ['as', 'actionscript']
+ aliases = ['actionscript', 'as']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
@@ -123,7 +123,7 @@ class ActionScript3Lexer(RegexLexer):
"""
name = 'ActionScript 3'
- aliases = ['as3', 'actionscript3']
+ aliases = ['actionscript3', 'as3']
filenames = ['*.as']
mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
'text/actionscript3']
diff --git a/vendor/pygments-main/pygments/lexers/ambient.py b/vendor/pygments-main/pygments/lexers/ambient.py
index d27d126f..f5d8bd26 100644
--- a/vendor/pygments-main/pygments/lexers/ambient.py
+++ b/vendor/pygments-main/pygments/lexers/ambient.py
@@ -25,7 +25,7 @@ class AmbientTalkLexer(RegexLexer):
"""
name = 'AmbientTalk'
filenames = ['*.at']
- aliases = ['at', 'ambienttalk', 'ambienttalk/2']
+ aliases = ['ambienttalk', 'ambienttalk/2', 'at']
mimetypes = ['text/x-ambienttalk']
flags = re.MULTILINE | re.DOTALL
diff --git a/vendor/pygments-main/pygments/lexers/amdgpu.py b/vendor/pygments-main/pygments/lexers/amdgpu.py
index 529a687b..756b7731 100644
--- a/vendor/pygments-main/pygments/lexers/amdgpu.py
+++ b/vendor/pygments-main/pygments/lexers/amdgpu.py
@@ -8,13 +8,14 @@
:license: BSD, see LICENSE for details.
"""
-from pygments.lexer import RegexLexer
+from pygments.lexer import RegexLexer, words
from pygments.token import Name, Text, Keyword, Whitespace, Number, Comment
import re
__all__ = ['AMDGPULexer']
+
class AMDGPULexer(RegexLexer):
"""
For AMD GPU assembly.
@@ -24,7 +25,7 @@ class AMDGPULexer(RegexLexer):
name = 'AMDGPU'
aliases = ['amdgpu']
filenames = ['*.isa']
-
+
flags = re.IGNORECASE
tokens = {
@@ -36,7 +37,11 @@ class AMDGPULexer(RegexLexer):
(r'([;#]|//).*?\n', Comment.Single),
(r'((s_)?(ds|buffer|flat|image)_[a-z0-9_]+)', Keyword.Reserved),
(r'(_lo|_hi)', Name.Variable),
- (r'(vmcnt|lgkmcnt|expcnt|vmcnt|lit|unorm|glc)', Name.Attribute),
+ (r'(vmcnt|lgkmcnt|expcnt)', Name.Attribute),
+ (words((
+ 'op', 'vaddr', 'vdata', 'soffset', 'srsrc', 'format',
+ 'offset', 'offen', 'idxen', 'glc', 'dlc', 'slc', 'tfe', 'lds',
+ 'lit', 'unorm'), suffix=r'\b'), Name.Attribute),
(r'(label_[a-z0-9]+)', Keyword),
(r'(_L[0-9]*)', Name.Variable),
(r'(s|v)_[a-z0-9_]+', Keyword),
@@ -45,4 +50,4 @@ class AMDGPULexer(RegexLexer):
(r'[0-9]+\.[^0-9]+', Number.Float),
(r'(0[xX][a-z0-9]+)|([0-9]+)', Number.Integer)
]
- }
\ No newline at end of file
+ }
diff --git a/vendor/pygments-main/pygments/lexers/apdlexer.py b/vendor/pygments-main/pygments/lexers/apdlexer.py
new file mode 100644
index 00000000..f670198d
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/apdlexer.py
@@ -0,0 +1,448 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.apdlexer
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for ANSYS Parametric Design Language.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Comment, Keyword, Name, Text, Number, Operator, \
+ String, Generic, Punctuation
+
+__all__ = ['apdlexer']
+
+
+class apdlexer(RegexLexer):
+ """
+ For APDL source code.
+
+ .. versionadded:: 2.9
+ """
+ name = 'ANSYS parametric design language'
+ aliases = ['ansys', 'apdl']
+ filenames = ['*.ans']
+ flags = re.IGNORECASE
+
+ # list of elements
+ elafunb = ("SURF152", "SURF153", "SURF154", "SURF156", "SHELL157",
+ "SURF159", "LINK160", "BEAM161", "PLANE162",
+ "SHELL163", "SOLID164", "COMBI165", "MASS166",
+ "LINK167", "SOLID168", "TARGE169", "TARGE170",
+ "CONTA171", "CONTA172", "CONTA173", "CONTA174",
+ "CONTA175", "CONTA176", "CONTA177", "CONTA178",
+ "PRETS179", "LINK180", "SHELL181", "PLANE182",
+ "PLANE183", "MPC184", "SOLID185", "SOLID186",
+ "SOLID187", "BEAM188", "BEAM189", "SOLSH190",
+ "INTER192", "INTER193", "INTER194", "INTER195",
+ "MESH200", "FOLLW201", "INTER202", "INTER203",
+ "INTER204", "INTER205", "SHELL208", "SHELL209",
+ "CPT212", "CPT213", "COMBI214", "CPT215", "CPT216",
+ "CPT217", "FLUID220", "FLUID221", "PLANE223",
+ "SOLID226", "SOLID227", "PLANE230", "SOLID231",
+ "SOLID232", "PLANE233", "SOLID236", "SOLID237",
+ "PLANE238", "SOLID239", "SOLID240", "HSFLD241",
+ "HSFLD242", "SURF251", "SURF252", "REINF263",
+ "REINF264", "REINF265", "SOLID272", "SOLID273",
+ "SOLID278", "SOLID279", "SHELL281", "SOLID285",
+ "PIPE288", "PIPE289", "ELBOW290", "USER300", "BEAM3",
+ "BEAM4", "BEAM23", "BEAM24", "BEAM44", "BEAM54",
+ "COMBIN7", "FLUID79", "FLUID80", "FLUID81", "FLUID141",
+ "FLUID142", "INFIN9", "INFIN47", "PLANE13", "PLANE25",
+ "PLANE42", "PLANE53", "PLANE67", "PLANE82", "PLANE83",
+ "PLANE145", "PLANE146", "CONTAC12", "CONTAC52",
+ "LINK1", "LINK8", "LINK10", "LINK32", "PIPE16",
+ "PIPE17", "PIPE18", "PIPE20", "PIPE59", "PIPE60",
+ "SHELL41", "SHELL43", "SHELL57", "SHELL63", "SHELL91",
+ "SHELL93", "SHELL99", "SHELL150", "SOLID5", "SOLID45",
+ "SOLID46", "SOLID65", "SOLID69", "SOLID92", "SOLID95",
+ "SOLID117", "SOLID127", "SOLID128", "SOLID147",
+ "SOLID148", "SOLID191", "VISCO88", "VISCO89",
+ "VISCO106", "VISCO107", "VISCO108", "TRANS109")
+
+ elafunc = ("PGRAPH", "/VT", "VTIN", "VTRFIL", "VTTEMP", "PGRSET",
+ "VTCLR", "VTMETH", "VTRSLT", "VTVMOD", "PGSELE",
+ "VTDISC", "VTMP", "VTSEC", "PGWRITE", "VTEVAL", "VTOP",
+ "VTSFE", "POUTRES", "VTFREQ", "VTPOST", "VTSL",
+ "FLDATA1-40", "HFPCSWP", "MSDATA", "MSVARY", "QFACT",
+ "FLOCHECK", "HFPOWER", "MSMASS", "PERI", "SPADP",
+ "FLREAD", "HFPORT", "MSMETH", "PLFSS", "SPARM",
+ "FLOTRAN", "HFSCAT", "MSMIR", "PLSCH", "SPFSS",
+ "HFADP", "ICE", "MSNOMF", "PLSYZ", "SPICE", "HFARRAY",
+ "ICEDELE", "MSPROP", "PLTD", "SPSCAN", "HFDEEM",
+ "ICELIST", "MSQUAD", "PLTLINE", "SPSWP", "HFEIGOPT",
+ "ICVFRC", "MSRELAX", "PLVFRC", "HFEREFINE", "LPRT",
+ "MSSOLU", "/PICE", "HFMODPRT", "MSADV", "MSSPEC",
+ "PLWAVE", "HFPA", "MSCAP", "MSTERM", "PRSYZ")
+
+ elafund = ("*VOPER", "VOVLAP", "*VPLOT", "VPLOT", "VPTN", "*VPUT",
+ "VPUT", "*VREAD", "VROTAT", "VSBA", "VSBV", "VSBW",
+ "/VSCALE", "*VSCFUN", "VSEL", "VSLA", "*VSTAT", "VSUM",
+ "VSWEEP", "VSYMM", "VTRAN", "VTYPE", "/VUP", "*VWRITE",
+ "/WAIT", "WAVES", "WERASE", "WFRONT", "/WINDOW",
+ "WMID", "WMORE", "WPAVE", "WPCSYS", "WPLANE", "WPOFFS",
+ "WPROTA", "WPSTYL", "WRFULL", "WRITE", "WRITEMAP",
+ "*WRK", "WSORT", "WSPRINGS", "WSTART", "WTBCREATE",
+ "XFDATA", "XFENRICH", "XFLIST", "/XFRM", "/XRANGE",
+ "XVAR", "/YRANGE", "/ZOOM", "/WB", "XMLO", "/XML",
+ "CNTR", "EBLOCK", "CMBLOCK", "NBLOCK", "/TRACK",
+ "CWZPLOT", "~EUI", "NELE", "EALL", "NALL", "FLITEM",
+ "LSLN", "PSOLVE", "ASLN", "/VERIFY", "/SSS", "~CFIN",
+ "*EVAL", "*MOONEY", "/RUNSTAT", "ALPFILL",
+ "ARCOLLAPSE", "ARDETACH", "ARFILL", "ARMERGE",
+ "ARSPLIT", "FIPLOT", "GAPFINISH", "GAPLIST",
+ "GAPMERGE", "GAPOPT", "GAPPLOT", "LNCOLLAPSE",
+ "LNDETACH", "LNFILL", "LNMERGE", "LNSPLIT", "PCONV",
+ "PLCONV", "PEMOPTS", "PEXCLUDE", "PINCLUDE", "PMETH",
+ "/PMETH", "PMOPTS", "PPLOT", "PPRANGE", "PRCONV",
+ "PRECISION", "RALL", "RFILSZ", "RITER", "RMEMRY",
+ "RSPEED", "RSTAT", "RTIMST", "/RUNST", "RWFRNT",
+ "SARPLOT", "SHSD", "SLPPLOT", "SLSPLOT", "VCVFILL",
+ "/OPT", "OPEQN", "OPFACT", "OPFRST", "OPGRAD",
+ "OPKEEP", "OPLOOP", "OPPRNT", "OPRAND", "OPSUBP",
+ "OPSWEEP", "OPTYPE", "OPUSER", "OPVAR", "OPADD",
+ "OPCLR", "OPDEL", "OPMAKE", "OPSEL", "OPANL", "OPDATA",
+ "OPRESU", "OPSAVE", "OPEXE", "OPLFA", "OPLGR",
+ "OPLIST", "OPLSW", "OPRFA", "OPRGR", "OPRSW",
+ "PILECALC", "PILEDISPSET", "PILEGEN", "PILELOAD",
+ "PILEMASS", "PILERUN", "PILESEL", "PILESTIF",
+ "PLVAROPT", "PRVAROPT", "TOCOMP", "TODEF", "TOFREQ",
+ "TOTYPE", "TOVAR", "TOEXE", "TOLOOP", "TOGRAPH",
+ "TOLIST", "TOPLOT", "TOPRINT", "TOSTAT", "TZAMESH",
+ "TZDELE", "TZEGEN", "XVAROPT", "PGSAVE", "SOLCONTROL",
+ "TOTAL", "VTGEOM", "VTREAL", "VTSTAT")
+
+ elafune = ("/ANUM", "AOFFST", "AOVLAP", "APLOT", "APPEND", "APTN",
+ "ARCLEN", "ARCTRM", "AREAS", "AREFINE", "AREMESH",
+ "AREVERSE", "AROTAT", "ARSCALE", "ARSYM", "ASBA",
+ "ASBL", "ASBV", "ASBW", "ASCRES", "ASEL", "ASIFILE",
+ "*ASK", "ASKIN", "ASLL", "ASLV", "ASOL", "/ASSIGN",
+ "ASUB", "ASUM", "ATAN", "ATRAN", "ATYPE", "/AUTO",
+ "AUTOTS", "/AUX2", "/AUX3", "/AUX12", "/AUX15",
+ "AVPRIN", "AVRES", "AWAVE", "/AXLAB", "*AXPY",
+ "/BATCH", "BCSOPTION", "BETAD", "BF", "BFA", "BFADELE",
+ "BFALIST", "BFCUM", "BFDELE", "BFE", "BFECUM",
+ "BFEDELE", "BFELIST", "BFESCAL", "BFINT", "BFK",
+ "BFKDELE", "BFKLIST", "BFL", "BFLDELE", "BFLIST",
+ "BFLLIST", "BFSCALE", "BFTRAN", "BFUNIF", "BFV",
+ "BFVDELE", "BFVLIST", "BIOOPT", "BIOT", "BLC4", "BLC5",
+ "BLOCK", "BOOL", "BOPTN", "BSAX", "BSMD", "BSM1",
+ "BSM2", "BSPLIN", "BSS1", "BSS2", "BSTE", "BSTQ",
+ "BTOL", "BUCOPT", "C", "CALC", "CAMPBELL", "CBDOF",
+ "CBMD", "CBMX", "CBTE", "CBTMP", "CDOPT", "CDREAD",
+ "CDWRITE", "CE", "CECHECK", "CECMOD", "CECYC",
+ "CEDELE", "CEINTF", "CELIST", "CENTER", "CEQN",
+ "CERIG", "CESGEN", "CFACT", "*CFCLOS", "*CFOPEN",
+ "*CFWRITE", "/CFORMAT", "CGLOC", "CGOMGA", "CGROW",
+ "CHECK", "CHKMSH", "CINT", "CIRCLE", "CISOL",
+ "/CLABEL", "/CLEAR", "CLOCAL", "CLOG", "/CLOG",
+ "CLRMSHLN", "CM", "CMACEL", "/CMAP", "CMATRIX",
+ "CMDELE", "CMDOMEGA", "CMEDIT", "CMGRP", "CMLIST",
+ "CMMOD", "CMOMEGA", "CMPLOT", "CMROTATE", "CMSEL",
+ "CMSFILE", "CMSOPT", "CMWRITE", "CNCHECK", "CNKMOD",
+ "CNTR", "CNVTOL", "/COLOR", "/COM", "*COMP", "COMBINE",
+ "COMPRESS", "CON4", "CONE", "/CONFIG", "CONJUG",
+ "/CONTOUR", "/COPY", "CORIOLIS", "COUPLE", "COVAL",
+ "CP", "CPCYC", "CPDELE", "CPINTF", "/CPLANE", "CPLGEN",
+ "CPLIST", "CPMERGE", "CPNGEN", "CPSGEN", "CQC",
+ "*CREATE", "CRPLIM", "CS", "CSCIR", "CSDELE", "CSKP",
+ "CSLIST", "CSWPLA", "CSYS", "/CTYPE", "CURR2D",
+ "CUTCONTROL", "/CVAL", "CVAR", "/CWD", "CYCCALC",
+ "/CYCEXPAND", "CYCFILES", "CYCFREQ", "*CYCLE",
+ "CYCLIC", "CYCOPT", "CYCPHASE", "CYCSPEC", "CYL4",
+ "CYL5", "CYLIND", "CZDEL", "CZMESH", "D", "DA",
+ "DADELE", "DALIST", "DAMORPH", "DATA", "DATADEF",
+ "DCGOMG", "DCUM", "DCVSWP", "DDASPEC", "DDELE",
+ "DDOPTION", "DEACT", "DEFINE", "*DEL", "DELETE",
+ "/DELETE", "DELTIM", "DEMORPH", "DERIV", "DESIZE",
+ "DESOL", "DETAB", "/DEVDISP", "/DEVICE", "/DFLAB",
+ "DFLX", "DFSWAVE", "DIG", "DIGIT", "*DIM",
+ "/DIRECTORY", "DISPLAY", "/DIST", "DJ", "DJDELE",
+ "DJLIST", "DK", "DKDELE", "DKLIST", "DL", "DLDELE",
+ "DLIST", "DLLIST", "*DMAT", "DMOVE", "DMPEXT",
+ "DMPOPTION", "DMPRAT", "DMPSTR", "DNSOL", "*DO", "DOF",
+ "DOFSEL", "DOMEGA", "*DOT", "*DOWHILE", "DSCALE",
+ "/DSCALE", "DSET", "DSPOPTION", "DSUM", "DSURF",
+ "DSYM", "DSYS", "DTRAN", "DUMP", "/DV3D", "DVAL",
+ "DVMORPH", "DYNOPT", "E", "EALIVE", "EDADAPT", "EDALE",
+ "EDASMP", "EDBOUND", "EDBX", "EDBVIS", "EDCADAPT",
+ "EDCGEN", "EDCLIST", "EDCMORE", "EDCNSTR", "EDCONTACT",
+ "EDCPU", "EDCRB", "EDCSC", "EDCTS", "EDCURVE",
+ "EDDAMP", "EDDBL", "EDDC", "EDDRELAX", "EDDUMP",
+ "EDELE", "EDENERGY", "EDFPLOT", "EDGCALE", "/EDGE",
+ "EDHGLS", "EDHIST", "EDHTIME", "EDINT", "EDIPART",
+ "EDIS", "EDLCS", "EDLOAD", "EDMP", "EDNB", "EDNDTSD",
+ "EDNROT", "EDOPT", "EDOUT", "EDPART", "EDPC", "EDPL",
+ "EDPVEL", "EDRC", "EDRD", "EDREAD", "EDRI", "EDRST",
+ "EDRUN", "EDSHELL", "EDSOLV", "EDSP", "EDSTART",
+ "EDTERM", "EDTP", "EDVEL", "EDWELD", "EDWRITE",
+ "EEXTRUDE", "/EFACET", "EGEN", "*EIGEN", "EINFIN",
+ "EINTF", "EKILL", "ELBOW", "ELEM", "ELIST", "*ELSE",
+ "*ELSEIF", "EMAGERR", "EMATWRITE", "EMF", "EMFT",
+ "EMID", "EMIS", "EMODIF", "EMORE", "EMSYM", "EMTGEN",
+ "EMUNIT", "EN", "*END", "*ENDDO", "*ENDIF",
+ "ENDRELEASE", "ENERSOL", "ENGEN", "ENORM", "ENSYM",
+ "EORIENT", "EPLOT", "EQSLV", "ERASE", "/ERASE",
+ "EREAD", "EREFINE", "EREINF", "ERESX", "ERNORM",
+ "ERRANG", "ESCHECK", "ESEL", "/ESHAPE", "ESIZE",
+ "ESLA", "ESLL", "ESLN", "ESLV", "ESOL", "ESORT",
+ "ESSOLV", "ESTIF", "ESURF", "ESYM", "ESYS", "ET",
+ "ETABLE", "ETCHG", "ETCONTROL", "ETDELE", "ETLIST",
+ "ETYPE", "EUSORT", "EWRITE", "*EXIT", "/EXIT", "EXP",
+ "EXPAND", "/EXPAND", "EXPASS", "*EXPORT", "EXPROFILE",
+ "EXPSOL", "EXTOPT", "EXTREM", "EXUNIT", "F", "/FACET",
+ "FATIGUE", "FC", "FCCHECK", "FCDELE", "FCLIST", "FCUM",
+ "FCTYP", "FDELE", "/FDELE", "FE", "FEBODY", "FECONS",
+ "FEFOR", "FELIST", "FESURF", "*FFT", "FILE",
+ "FILEAUX2", "FILEAUX3", "FILEDISP", "FILL", "FILLDATA",
+ "/FILNAME", "FINISH", "FITEM", "FJ", "FJDELE",
+ "FJLIST", "FK", "FKDELE", "FKLIST", "FL", "FLIST",
+ "FLLIST", "FLST", "FLUXV", "FLUREAD", "FMAGBC",
+ "FMAGSUM", "/FOCUS", "FOR2D", "FORCE", "FORM",
+ "/FORMAT", "FP", "FPLIST", "*FREE", "FREQ", "FRQSCL",
+ "FS", "FSCALE", "FSDELE", "FSLIST", "FSNODE", "FSPLOT",
+ "FSSECT", "FSSPARM", "FSUM", "FTCALC", "FTRAN",
+ "FTSIZE", "FTWRITE", "FTYPE", "FVMESH", "GAP", "GAPF",
+ "GAUGE", "GCDEF", "GCGEN", "/GCMD", "/GCOLUMN",
+ "GENOPT", "GEOM", "GEOMETRY", "*GET", "/GFILE",
+ "/GFORMAT", "/GLINE", "/GMARKER", "GMATRIX", "GMFACE",
+ "*GO", "/GO", "/GOLIST", "/GOPR", "GP", "GPDELE",
+ "GPLIST", "GPLOT", "/GRAPHICS", "/GRESUME", "/GRID",
+ "/GROPT", "GRP", "/GRTYP", "/GSAVE", "GSBDATA",
+ "GSGDATA", "GSLIST", "GSSOL", "/GST", "GSUM", "/GTHK",
+ "/GTYPE", "HARFRQ", "/HBC", "HBMAT", "/HEADER", "HELP",
+ "HELPDISP", "HEMIOPT", "HFANG", "HFSYM", "HMAGSOLV",
+ "HPGL", "HPTCREATE", "HPTDELETE", "HRCPLX", "HREXP",
+ "HROPT", "HROCEAN", "HROUT", "IC", "ICDELE", "ICLIST",
+ "/ICLWID", "/ICSCALE", "*IF", "IGESIN", "IGESOUT",
+ "/IMAGE", "IMAGIN", "IMESH", "IMMED", "IMPD",
+ "INISTATE", "*INIT", "/INPUT", "/INQUIRE", "INRES",
+ "INRTIA", "INT1", "INTSRF", "IOPTN", "IRLF", "IRLIST",
+ "*ITENGINE", "JPEG", "JSOL", "K", "KATT", "KBC",
+ "KBETW", "KCALC", "KCENTER", "KCLEAR", "KDELE",
+ "KDIST", "KEEP", "KESIZE", "KEYOPT", "KEYPTS", "KEYW",
+ "KFILL", "KGEN", "KL", "KLIST", "KMESH", "KMODIF",
+ "KMOVE", "KNODE", "KPLOT", "KPSCALE", "KREFINE",
+ "KSCALE", "KSCON", "KSEL", "KSLL", "KSLN", "KSUM",
+ "KSYMM", "KTRAN", "KUSE", "KWPAVE", "KWPLAN", "L",
+ "L2ANG", "L2TAN", "LANG", "LARC", "/LARC", "LAREA",
+ "LARGE", "LATT", "LAYER", "LAYERP26", "LAYLIST",
+ "LAYPLOT", "LCABS", "LCASE", "LCCALC", "LCCAT",
+ "LCDEF", "LCFACT", "LCFILE", "LCLEAR", "LCOMB",
+ "LCOPER", "LCSEL", "LCSL", "LCSUM", "LCWRITE",
+ "LCZERO", "LDELE", "LDIV", "LDRAG", "LDREAD", "LESIZE",
+ "LEXTND", "LFILLT", "LFSURF", "LGEN", "LGLUE",
+ "LGWRITE", "/LIGHT", "LINA", "LINE", "/LINE", "LINES",
+ "LINL", "LINP", "LINV", "LIST", "*LIST", "LLIST",
+ "LMATRIX", "LMESH", "LNSRCH", "LOCAL", "LOVLAP",
+ "LPLOT", "LPTN", "LREFINE", "LREVERSE", "LROTAT",
+ "LSBA", "*LSBAC", "LSBL", "LSBV", "LSBW", "LSCLEAR",
+ "LSDELE", "*LSDUMP", "LSEL", "*LSENGINE", "*LSFACTOR",
+ "LSLA", "LSLK", "LSOPER", "/LSPEC", "LSREAD",
+ "*LSRESTORE", "LSSCALE", "LSSOLVE", "LSTR", "LSUM",
+ "LSWRITE", "/LSYMBOL", "LSYMM", "LTAN", "LTRAN",
+ "LUMPM", "LVSCALE", "LWPLAN", "M", "MADAPT", "MAGOPT",
+ "MAGSOLV", "/MAIL", "MAP", "/MAP", "MAP2DTO3D",
+ "MAPSOLVE", "MAPVAR", "MASTER", "MAT", "MATER",
+ "MCHECK", "MDAMP", "MDELE", "MDPLOT", "MEMM", "/MENU",
+ "MESHING", "MFANALYSIS", "MFBUCKET", "MFCALC", "MFCI",
+ "MFCLEAR", "MFCMMAND", "MFCONV", "MFDTIME", "MFELEM",
+ "MFEM", "MFEXTER", "MFFNAME", "MFFR", "MFIMPORT",
+ "MFINTER", "MFITER", "MFLCOMM", "MFLIST", "MFMAP",
+ "MFORDER", "MFOUTPUT", "*MFOURI", "MFPSIMUL", "MFRC",
+ "MFRELAX", "MFRSTART", "MFSORDER", "MFSURFACE",
+ "MFTIME", "MFTOL", "*MFUN", "MFVOLUME", "MFWRITE",
+ "MGEN", "MIDTOL", "/MKDIR", "MLIST", "MMASS", "MMF",
+ "MODCONT", "MODE", "MODIFY", "MODMSH", "MODSELOPTION",
+ "MODOPT", "MONITOR", "*MOPER", "MOPT", "MORPH", "MOVE",
+ "MP", "MPAMOD", "MPCHG", "MPCOPY", "MPDATA", "MPDELE",
+ "MPDRES", "/MPLIB", "MPLIST", "MPPLOT", "MPREAD",
+ "MPRINT", "MPTEMP", "MPTGEN", "MPTRES", "MPWRITE",
+ "/MREP", "MSAVE", "*MSG", "MSHAPE", "MSHCOPY",
+ "MSHKEY", "MSHMID", "MSHPATTERN", "MSOLVE", "/MSTART",
+ "MSTOLE", "*MULT", "*MWRITE", "MXPAND", "N", "NANG",
+ "NAXIS", "NCNV", "NDELE", "NDIST", "NDSURF", "NEQIT",
+ "/NERR", "NFORCE", "NGEN", "NKPT", "NLADAPTIVE",
+ "NLDIAG", "NLDPOST", "NLGEOM", "NLHIST", "NLIST",
+ "NLMESH", "NLOG", "NLOPT", "NMODIF", "NOCOLOR",
+ "NODES", "/NOERASE", "/NOLIST", "NOOFFSET", "NOORDER",
+ "/NOPR", "NORA", "NORL", "/NORMAL", "NPLOT", "NPRINT",
+ "NREAD", "NREFINE", "NRLSUM", "*NRM", "NROPT",
+ "NROTAT", "NRRANG", "NSCALE", "NSEL", "NSLA", "NSLE",
+ "NSLK", "NSLL", "NSLV", "NSMOOTH", "NSOL", "NSORT",
+ "NSTORE", "NSUBST", "NSVR", "NSYM", "/NUMBER",
+ "NUMCMP", "NUMEXP", "NUMMRG", "NUMOFF", "NUMSTR",
+ "NUMVAR", "NUSORT", "NWPAVE", "NWPLAN", "NWRITE",
+ "OCDATA", "OCDELETE", "OCLIST", "OCREAD", "OCTABLE",
+ "OCTYPE", "OCZONE", "OMEGA", "OPERATE", "OPNCONTROL",
+ "OUTAERO", "OUTOPT", "OUTPR", "/OUTPUT", "OUTRES",
+ "OVCHECK", "PADELE", "/PAGE", "PAGET", "PAPUT",
+ "PARESU", "PARTSEL", "PARRES", "PARSAV", "PASAVE",
+ "PATH", "PAUSE", "/PBC", "/PBF", "PCALC", "PCGOPT",
+ "PCIRC", "/PCIRCLE", "/PCOPY", "PCROSS", "PDANL",
+ "PDCDF", "PDCFLD", "PDCLR", "PDCMAT", "PDCORR",
+ "PDDMCS", "PDDOEL", "PDEF", "PDEXE", "PDHIST",
+ "PDINQR", "PDLHS", "PDMETH", "PDOT", "PDPINV",
+ "PDPLOT", "PDPROB", "PDRESU", "PDROPT", "/PDS",
+ "PDSAVE", "PDSCAT", "PDSENS", "PDSHIS", "PDUSER",
+ "PDVAR", "PDWRITE", "PERBC2D", "PERTURB", "PFACT",
+ "PHYSICS", "PIVCHECK", "PLCAMP", "PLCFREQ", "PLCHIST",
+ "PLCINT", "PLCPLX", "PLCRACK", "PLDISP", "PLESOL",
+ "PLETAB", "PLFAR", "PLF2D", "PLGEOM", "PLLS", "PLMAP",
+ "PLMC", "PLNEAR", "PLNSOL", "/PLOPTS", "PLORB", "PLOT",
+ "PLOTTING", "PLPAGM", "PLPATH", "PLSECT", "PLST",
+ "PLTIME", "PLTRAC", "PLVAR", "PLVECT", "PLZZ",
+ "/PMACRO", "PMAP", "PMGTRAN", "PMLOPT", "PMLSIZE",
+ "/PMORE", "PNGR", "/PNUM", "POINT", "POLY", "/POLYGON",
+ "/POST1", "/POST26", "POWERH", "PPATH", "PRANGE",
+ "PRAS", "PRCAMP", "PRCINT", "PRCPLX", "PRED",
+ "PRENERGY", "/PREP7", "PRERR", "PRESOL", "PRETAB",
+ "PRFAR", "PRI2", "PRIM", "PRINT", "*PRINT", "PRISM",
+ "PRITER", "PRJSOL", "PRNEAR", "PRNLD", "PRNSOL",
+ "PROD", "PRORB", "PRPATH", "PRRFOR", "PRRSOL",
+ "PRSCONTROL", "PRSECT", "PRTIME", "PRVAR", "PRVECT",
+ "PSCONTROL", "PSCR", "PSDCOM", "PSDFRQ", "PSDGRAPH",
+ "PSDRES", "PSDSPL", "PSDUNIT", "PSDVAL", "PSDWAV",
+ "/PSEARCH", "PSEL", "/PSF", "PSMAT", "PSMESH",
+ "/PSPEC", "/PSTATUS", "PSTRES", "/PSYMB", "PTR",
+ "PTXY", "PVECT", "/PWEDGE", "QDVAL", "QRDOPT", "QSOPT",
+ "QUAD", "/QUIT", "QUOT", "R", "RACE", "RADOPT",
+ "RAPPND", "RATE", "/RATIO", "RBE3", "RCON", "RCYC",
+ "RDEC", "RDELE", "READ", "REAL", "REALVAR", "RECTNG",
+ "REMESH", "/RENAME", "REORDER", "*REPEAT", "/REPLOT",
+ "RESCOMBINE", "RESCONTROL", "RESET", "/RESET", "RESP",
+ "RESUME", "RESVEC", "RESWRITE", "*RETURN", "REXPORT",
+ "REZONE", "RFORCE", "/RGB", "RIGID", "RIGRESP",
+ "RIMPORT", "RLIST", "RMALIST", "RMANL", "RMASTER",
+ "RMCAP", "RMCLIST", "/RMDIR", "RMFLVEC", "RMLVSCALE",
+ "RMMLIST", "RMMRANGE", "RMMSELECT", "RMNDISP",
+ "RMNEVEC", "RMODIF", "RMORE", "RMPORDER", "RMRESUME",
+ "RMRGENERATE", "RMROPTIONS", "RMRPLOT", "RMRSTATUS",
+ "RMSAVE", "RMSMPLE", "RMUSE", "RMXPORT", "ROCK",
+ "ROSE", "RPOLY", "RPR4", "RPRISM", "RPSD", "RSFIT",
+ "RSOPT", "RSPLIT", "RSPLOT", "RSPRNT", "RSSIMS",
+ "RSTMAC", "RSTOFF", "RSURF", "RSYMM", "RSYS", "RTHICK",
+ "SABS", "SADD", "SALLOW", "SAVE", "SBCLIST", "SBCTRAN",
+ "SDELETE", "SE", "SECCONTROL", "SECDATA",
+ "SECFUNCTION", "SECJOINT", "/SECLIB", "SECLOCK",
+ "SECMODIF", "SECNUM", "SECOFFSET", "SECPLOT",
+ "SECREAD", "SECSTOP", "SECTYPE", "SECWRITE", "SED",
+ "SEDLIST", "SEEXP", "/SEG", "SEGEN", "SELIST", "SELM",
+ "SELTOL", "SENERGY", "SEOPT", "SESYMM", "*SET", "SET",
+ "SETFGAP", "SETRAN", "SEXP", "SF", "SFA", "SFACT",
+ "SFADELE", "SFALIST", "SFBEAM", "SFCALC", "SFCUM",
+ "SFDELE", "SFE", "SFEDELE", "SFELIST", "SFFUN",
+ "SFGRAD", "SFL", "SFLDELE", "SFLEX", "SFLIST",
+ "SFLLIST", "SFSCALE", "SFTRAN", "/SHADE", "SHELL",
+ "/SHOW", "/SHOWDISP", "SHPP", "/SHRINK", "SLIST",
+ "SLOAD", "SMALL", "*SMAT", "SMAX", "/SMBC", "SMBODY",
+ "SMCONS", "SMFOR", "SMIN", "SMOOTH", "SMRTSIZE",
+ "SMSURF", "SMULT", "SNOPTION", "SOLU", "/SOLU",
+ "SOLUOPT", "SOLVE", "SORT", "SOURCE", "SPACE",
+ "SPCNOD", "SPCTEMP", "SPDAMP", "SPEC", "SPFREQ",
+ "SPGRAPH", "SPH4", "SPH5", "SPHERE", "SPLINE", "SPLOT",
+ "SPMWRITE", "SPOINT", "SPOPT", "SPREAD", "SPTOPT",
+ "SPOWER", "SPUNIT", "SPVAL", "SQRT", "*SREAD", "SRSS",
+ "SSBT", "/SSCALE", "SSLN", "SSMT", "SSPA", "SSPB",
+ "SSPD", "SSPE", "SSPM", "SSUM", "SSTATE", "STABILIZE",
+ "STAOPT", "STAT", "*STATUS", "/STATUS", "STEF",
+ "/STITLE", "STORE", "SUBOPT", "SUBSET", "SUCALC",
+ "SUCR", "SUDEL", "SUEVAL", "SUGET", "SUMAP", "SUMTYPE",
+ "SUPL", "SUPR", "SURESU", "SUSAVE", "SUSEL", "SUVECT",
+ "SV", "SVPLOT", "SVTYP", "SWADD", "SWDEL", "SWGEN",
+ "SWLIST", "SYNCHRO", "/SYP", "/SYS", "TALLOW",
+ "TARGET", "*TAXIS", "TB", "TBCOPY", "TBDATA", "TBDELE",
+ "TBEO", "TBIN", "TBFIELD", "TBFT", "TBLE", "TBLIST",
+ "TBMODIF", "TBPLOT", "TBPT", "TBTEMP", "TCHG", "/TEE",
+ "TERM", "THEXPAND", "THOPT", "TIFF", "TIME",
+ "TIMERANGE", "TIMINT", "TIMP", "TINTP", "/TITLE",
+ "/TLABEL", "TOFFST", "*TOPER", "TORQ2D", "TORQC2D",
+ "TORQSUM", "TORUS", "TRANS", "TRANSFER", "*TREAD",
+ "TREF", "/TRIAD", "/TRLCY", "TRNOPT", "TRPDEL",
+ "TRPLIS", "TRPOIN", "TRTIME", "TSHAP", "/TSPEC",
+ "TSRES", "TUNIF", "TVAR", "/TXTRE", "/TYPE", "TYPE",
+ "/UCMD", "/UDOC", "/UI", "UIMP", "/UIS", "*ULIB",
+ "UNDELETE", "UNDO", "/UNITS", "UNPAUSE", "UPCOORD",
+ "UPGEOM", "*USE", "/USER", "USRCAL", "USRDOF",
+ "USRELEM", "V", "V2DOPT", "VA", "*VABS", "VADD",
+ "VARDEL", "VARNAM", "VATT", "VCLEAR", "*VCOL",
+ "/VCONE", "VCROSS", "*VCUM", "VDDAM", "VDELE", "VDGL",
+ "VDOT", "VDRAG", "*VEC", "*VEDIT", "VEORIENT", "VEXT",
+ "*VFACT", "*VFILL", "VFOPT", "VFQUERY", "VFSM",
+ "*VFUN", "VGEN", "*VGET", "VGET", "VGLUE", "/VIEW",
+ "VIMP", "VINP", "VINV", "*VITRP", "*VLEN", "VLIST",
+ "VLSCALE", "*VMASK", "VMESH", "VOFFST", "VOLUMES")
+
+ # list of in-built () functions
+ elafunf = ("NX()", "NY()", "NZ()", "KX()", "KY()", "KZ()", "LX()",
+ "LY()", "LZ()", "LSX()", "LSY()", "LSZ()", "NODE()",
+ "KP()", "DISTND()", "DISTKP()", "DISTEN()", "ANGLEN()",
+ "ANGLEK()", "NNEAR()", "KNEAR()", "ENEARN()",
+ "AREAND()", "AREAKP()", "ARNODE()", "NORMNX()",
+ "NORMNY()", "NORMNZ()", "NORMKX()", "NORMKY()",
+ "NORMKZ()", "ENEXTN()", "NELEM()", "NODEDOF()",
+ "ELADJ()", "NDFACE()", "NMFACE()", "ARFACE()", "UX()",
+ "UY()", "UZ()", "ROTX()", "ROTY()", "ROTZ()", "TEMP()",
+ "PRES()", "VX()", "VY()", "VZ()", "ENKE()", "ENDS()",
+ "VOLT()", "MAG()", "AX()", "AY()", "AZ()",
+ "VIRTINQR()", "KWGET()", "VALCHR()", "VALHEX()",
+ "CHRHEX()", "STRFILL()", "STRCOMP()", "STRPOS()",
+ "STRLENG()", "UPCASE()", "LWCASE()", "JOIN()",
+ "SPLIT()", "ABS()", "SIGN()", "CXABS()", "EXP()",
+ "LOG()", "LOG10()", "SQRT()", "NINT()", "MOD()",
+ "RAND()", "GDIS()", "SIN()", "COS()", "TAN()",
+ "SINH()", "COSH()", "TANH()", "ASIN()", "ACOS()",
+ "ATAN()", "ATAN2()")
+
+ elafung = ("NSEL()", "ESEL()", "KSEL()", "LSEL()", "ASEL()",
+ "VSEL()", "NDNEXT()", "ELNEXT()", "KPNEXT()",
+ "LSNEXT()", "ARNEXT()", "VLNEXT()", "CENTRX()",
+ "CENTRY()", "CENTRZ()")
+
+ elafunh = ("~CAT5IN", "~CATIAIN", "~PARAIN", "~PROEIN", "~SATIN",
+ "~UGIN", "A", "AADD", "AATT", "ABEXTRACT", "*ABBR",
+ "ABBRES", "ABBSAV", "ABS", "ACCAT", "ACCOPTION",
+ "ACEL", "ACLEAR", "ADAMS", "ADAPT", "ADD", "ADDAM",
+ "ADELE", "ADGL", "ADRAG", "AESIZE", "AFILLT", "AFLIST",
+ "AFSURF", "*AFUN", "AGEN", "AGLUE", "AINA", "AINP",
+ "AINV", "AL", "ALIST", "ALLSEL", "ALPHAD", "AMAP",
+ "AMESH", "/AN3D", "ANCNTR", "ANCUT", "ANCYC", "ANDATA",
+ "ANDSCL", "ANDYNA", "/ANFILE", "ANFLOW", "/ANGLE",
+ "ANHARM", "ANIM", "ANISOS", "ANMODE", "ANMRES",
+ "/ANNOT", "ANORM", "ANPRES", "ANSOL", "ANSTOAQWA",
+ "ANSTOASAS", "ANTIME", "ANTYPE")
+
+ tokens = {
+ 'root': [
+ (r'!.*\n', Comment),
+ include('strings'),
+ include('core'),
+ include('nums'),
+ (words((elafunb+elafunc+elafund+elafune+elafunh), suffix=r'\b'), Keyword),
+ (words((elafunf+elafung), suffix=r'\b'), Name.Builtin),
+ (r'AR[0-9]+', Name.Variable.Instance),
+ (r'[a-z][a-z0-9_]*', Name.Variable),
+ (r'[\s]+', Text),
+ ],
+ 'core': [
+ # Operators
+ (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
+ (r'/EOF', Generic.Emph),
+ (r'[(),:&;]', Punctuation),
+ ],
+ 'strings': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'[$%]', String.Symbol),
+ ],
+ 'nums': [
+ (r'\d+(?![.ef])', Number.Integer),
+ (r'[+-]?\d*\.?\d+([ef][-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.?\d*([ef][-+]?\d+)?', Number.Float),
+ ]
+ }
diff --git a/vendor/pygments-main/pygments/lexers/apl.py b/vendor/pygments-main/pygments/lexers/apl.py
index b6811164..8c5eee7a 100644
--- a/vendor/pygments-main/pygments/lexers/apl.py
+++ b/vendor/pygments-main/pygments/lexers/apl.py
@@ -23,7 +23,10 @@ class APLLexer(RegexLexer):
"""
name = 'APL'
aliases = ['apl']
- filenames = ['*.apl']
+ filenames = [
+ '*.apl', '*.aplf', '*.aplo', '*.apln',
+ '*.aplc', '*.apli', '*.dyalog',
+ ]
tokens = {
'root': [
@@ -65,8 +68,8 @@ class APLLexer(RegexLexer):
#
# Variables
# =========
- # following IBM APL2 standard
- (r'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
+ # following IBM APL2 standard (with a leading _ ok for GNU APL and Dyalog)
+ (r'[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
#
# Numbers
# =======
diff --git a/vendor/pygments-main/pygments/lexers/asc.py b/vendor/pygments-main/pygments/lexers/asc.py
new file mode 100644
index 00000000..f775fd07
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/asc.py
@@ -0,0 +1,51 @@
+"""
+ pygments.lexers.asc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for various ASCII armored files.
+
+ :copyright: Copyright 2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Generic, Name, Operator, String, Whitespace
+
+__all__ = ['AscLexer']
+
+
+class AscLexer(RegexLexer):
+ """
+ Lexer for ASCII armored files, containing `-----BEGIN/END ...-----` wrapped base64 data.
+
+ .. versionadded:: 2.10
+ """
+ name = 'ASCII armored'
+ aliases = ['asc', 'pem']
+ filenames = [
+ '*.asc', # PGP; *.gpg, *.pgp, and *.sig too, but those can be binary
+ '*.pem', # X.509; *.cer, *.crt, *.csr, and key etc too, but those can be binary
+ 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa', # SSH private keys
+ ]
+ mimetypes = ['application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature']
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'^-----BEGIN [^\n]+-----$', Generic.Heading, 'data'),
+ (r'\S+', Comment),
+ ],
+ 'data': [
+ (r'\s+', Whitespace),
+ (r'^([^:]+)(:)([ \t]+)(.*)', bygroups(Name.Attribute, Operator, Whitespace, String)),
+ (r'^-----END [^\n]+-----$', Generic.Heading, 'root'),
+ (r'\S+', String),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'^-----BEGIN [^\n]+-----\r?\n', text):
+ return True
diff --git a/vendor/pygments-main/pygments/lexers/asm.py b/vendor/pygments-main/pygments/lexers/asm.py
index d7e3e4ba..f83abdef 100644
--- a/vendor/pygments-main/pygments/lexers/asm.py
+++ b/vendor/pygments-main/pygments/lexers/asm.py
@@ -353,13 +353,14 @@ class LlvmLexer(RegexLexer):
#: optional Comment or Whitespace
string = r'"[^"]*?"'
identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
+ block_label = r'(' + identifier + r'|(\d+))'
tokens = {
'root': [
include('whitespace'),
# Before keywords, because keywords are valid label names :(...
- (identifier + r'\s*:', Name.Label),
+ (block_label + r'\s*:', Name.Label),
include('keyword'),
@@ -384,68 +385,95 @@ class LlvmLexer(RegexLexer):
'keyword': [
# Regular keywords
(words((
- 'acq_rel', 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
- 'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca', 'allocsize', 'allOnes',
- 'alwaysinline', 'amdgpu_cs', 'amdgpu_es', 'amdgpu_gs', 'amdgpu_hs',
- 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps', 'amdgpu_vs', 'and', 'any',
- 'anyregcc', 'appending', 'arcp', 'argmemonly', 'args', 'arm_aapcs_vfpcc',
- 'arm_aapcscc', 'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw', 'attributes',
- 'available_externally', 'avr_intrcc', 'avr_signalcc', 'bit', 'bitcast',
- 'bitMask', 'blockaddress', 'br', 'branchFunnel', 'builtin', 'byArg', 'byte',
- 'byteArray', 'byval', 'c', 'call', 'callee', 'caller', 'calls', 'catch',
- 'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc', 'cleanup', 'cleanuppad',
- 'cleanupret', 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant',
- 'contract', 'convergent', 'critical', 'cxx_fast_tlscc', 'datalayout', 'declare',
- 'default', 'define', 'deplibs', 'dereferenceable', 'dereferenceable_or_null',
- 'distinct', 'dllexport', 'dllimport', 'dso_local', 'dso_preemptable',
- 'dsoLocal', 'eq', 'exact', 'exactmatch', 'extern_weak', 'external',
- 'externally_initialized', 'extractelement', 'extractvalue', 'fadd', 'false',
- 'fast', 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'fmul',
- 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'freeze', 'frem', 'from', 'fsub',
- 'funcFlags', 'function', 'gc', 'getelementptr', 'ghccc', 'global', 'guid', 'gv',
- 'hash', 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp',
- 'ifunc', 'inaccessiblemem_or_argmemonly', 'inaccessiblememonly', 'inalloca',
- 'inbounds', 'indir', 'indirectbr', 'info', 'initialexec', 'inline',
- 'inlineBits', 'inlinehint', 'inrange', 'inreg', 'insertelement', 'insertvalue',
- 'insts', 'intel_ocl_bicc', 'inteldialect', 'internal', 'inttoptr', 'invoke',
- 'jumptable', 'kind', 'landingpad', 'largest', 'linkage', 'linkonce',
- 'linkonce_odr', 'live', 'load', 'local_unnamed_addr', 'localdynamic',
- 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize', 'module', 'monotonic',
- 'msp430_intrcc', 'mul', 'musttail', 'naked', 'name', 'nand', 'ne', 'nest',
- 'ninf', 'nnan', 'noalias', 'nobuiltin', 'nocapture', 'nocf_check',
- 'noduplicate', 'noduplicates', 'noimplicitfloat', 'noinline', 'none',
- 'nonlazybind', 'nonnull', 'norecurse', 'noRecurse', 'noredzone', 'noreturn',
- 'notail', 'notEligibleToImport', 'nounwind', 'nsw', 'nsz', 'null', 'nuw', 'oeq',
- 'offset', 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing',
- 'optnone', 'optsize', 'or', 'ord', 'path', 'personality', 'phi', 'poison',
- 'prefix', 'preserve_allcc', 'preserve_mostcc', 'private', 'prologue',
- 'protected', 'ptrtoint', 'ptx_device', 'ptx_kernel', 'readnone', 'readNone',
- 'readonly', 'readOnly', 'reassoc', 'refs', 'relbf', 'release', 'resByArg',
- 'resume', 'ret', 'returnDoesNotAlias', 'returned', 'returns_twice', 'safestack',
- 'samesize', 'sanitize_address', 'sanitize_hwaddress', 'sanitize_memory',
- 'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst', 'sext', 'sge', 'sgt',
- 'shadowcallstack', 'shl', 'shufflevector', 'sideeffect', 'signext', 'single',
- 'singleImpl', 'singleImplName', 'sitofp', 'sizeM1', 'sizeM1BitWidth', 'sle',
- 'slt', 'source_filename', 'speculatable', 'spir_func', 'spir_kernel', 'srem',
- 'sret', 'ssp', 'sspreq', 'sspstrong', 'store', 'strictfp', 'sub', 'summaries',
- 'summary', 'swiftcc', 'swifterror', 'swiftself', 'switch', 'syncscope', 'tail',
- 'target', 'thread_local', 'to', 'token', 'triple', 'true', 'trunc', 'type',
- 'typeCheckedLoadConstVCalls', 'typeCheckedLoadVCalls', 'typeid', 'typeIdInfo',
- 'typeTestAssumeConstVCalls', 'typeTestAssumeVCalls', 'typeTestRes', 'typeTests',
- 'udiv', 'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin', 'undef',
- 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown', 'unnamed_addr', 'uno',
- 'unordered', 'unreachable', 'unsat', 'unwind', 'urem', 'uselistorder',
- 'uselistorder_bb', 'uwtable', 'va_arg', 'variable', 'vFuncId',
- 'virtualConstProp', 'void', 'volatile', 'weak', 'weak_odr', 'webkit_jscc',
+ 'aarch64_sve_vector_pcs', 'aarch64_vector_pcs', 'acq_rel',
+ 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
+ 'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca',
+ 'allocsize', 'allOnes', 'alwaysinline', 'alwaysInline',
+ 'amdgpu_cs', 'amdgpu_es', 'amdgpu_gfx', 'amdgpu_gs',
+ 'amdgpu_hs', 'amdgpu_kernel', 'amdgpu_ls', 'amdgpu_ps',
+ 'amdgpu_vs', 'and', 'any', 'anyregcc', 'appending', 'arcp',
+ 'argmemonly', 'args', 'arm_aapcs_vfpcc', 'arm_aapcscc',
+ 'arm_apcscc', 'ashr', 'asm', 'atomic', 'atomicrmw',
+ 'attributes', 'available_externally', 'avr_intrcc',
+ 'avr_signalcc', 'bit', 'bitcast', 'bitMask', 'blockaddress',
+ 'blockcount', 'br', 'branchFunnel', 'builtin', 'byArg',
+ 'byref', 'byte', 'byteArray', 'byval', 'c', 'call', 'callbr',
+ 'callee', 'caller', 'calls', 'canAutoHide', 'catch',
+ 'catchpad', 'catchret', 'catchswitch', 'cc', 'ccc',
+ 'cfguard_checkcc', 'cleanup', 'cleanuppad', 'cleanupret',
+ 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant',
+ 'contract', 'convergent', 'critical', 'cxx_fast_tlscc',
+ 'datalayout', 'declare', 'default', 'define', 'deplibs',
+ 'dereferenceable', 'dereferenceable_or_null', 'distinct',
+ 'dllexport', 'dllimport', 'dso_local', 'dso_local_equivalent',
+ 'dso_preemptable', 'dsoLocal', 'eq', 'exact', 'exactmatch',
+ 'extern_weak', 'external', 'externally_initialized',
+ 'extractelement', 'extractvalue', 'fadd', 'false', 'fast',
+ 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'fmul',
+ 'fneg', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'freeze',
+ 'frem', 'from', 'fsub', 'funcFlags', 'function', 'gc',
+ 'getelementptr', 'ghccc', 'global', 'guid', 'gv', 'hash',
+ 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp',
+ 'ifunc', 'inaccessiblemem_or_argmemonly',
+ 'inaccessiblememonly', 'inalloca', 'inbounds', 'indir',
+ 'indirectbr', 'info', 'initialexec', 'inline', 'inlineBits',
+ 'inlinehint', 'inrange', 'inreg', 'insertelement',
+ 'insertvalue', 'insts', 'intel_ocl_bicc', 'inteldialect',
+ 'internal', 'inttoptr', 'invoke', 'jumptable', 'kind',
+ 'landingpad', 'largest', 'linkage', 'linkonce', 'linkonce_odr',
+ 'live', 'load', 'local_unnamed_addr', 'localdynamic',
+ 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize',
+ 'module', 'monotonic', 'msp430_intrcc', 'mul', 'mustprogress',
+ 'musttail', 'naked', 'name', 'nand', 'ne', 'nest', 'ninf',
+ 'nnan', 'noalias', 'nobuiltin', 'nocallback', 'nocapture',
+ 'nocf_check', 'noduplicate', 'noduplicates', 'nofree',
+ 'noimplicitfloat', 'noinline', 'noInline', 'nomerge', 'none',
+ 'nonlazybind', 'nonnull', 'noprofile', 'norecurse',
+ 'noRecurse', 'noredzone', 'noreturn', 'nosync', 'notail',
+ 'notEligibleToImport', 'noundef', 'nounwind', 'nsw',
+ 'nsz', 'null', 'null_pointer_is_valid', 'nuw', 'oeq', 'offset',
+ 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing',
+ 'optnone', 'optsize', 'or', 'ord', 'param', 'params',
+ 'partition', 'path', 'personality', 'phi', 'poison',
+ 'preallocated', 'prefix', 'preserve_allcc', 'preserve_mostcc',
+ 'private', 'prologue', 'protected', 'ptrtoint', 'ptx_device',
+ 'ptx_kernel', 'readnone', 'readNone', 'readonly', 'readOnly',
+ 'reassoc', 'refs', 'relbf', 'release', 'resByArg', 'resume',
+ 'ret', 'returnDoesNotAlias', 'returned', 'returns_twice',
+ 'safestack', 'samesize', 'sanitize_address',
+ 'sanitize_hwaddress', 'sanitize_memory', 'sanitize_memtag',
+ 'sanitize_thread', 'sdiv', 'section', 'select', 'seq_cst',
+ 'sext', 'sge', 'sgt', 'shadowcallstack', 'shl',
+ 'shufflevector', 'sideeffect', 'signext', 'single',
+ 'singleImpl', 'singleImplName', 'sitofp', 'sizeM1',
+ 'sizeM1BitWidth', 'sle', 'slt', 'source_filename',
+ 'speculatable', 'speculative_load_hardening', 'spir_func',
+ 'spir_kernel', 'srem', 'sret', 'ssp', 'sspreq', 'sspstrong',
+ 'store', 'strictfp', 'sub', 'summaries', 'summary', 'swiftcc',
+ 'swifterror', 'swiftself', 'switch', 'syncscope', 'tail',
+ 'tailcc', 'target', 'thread_local', 'to', 'token', 'triple',
+ 'true', 'trunc', 'type', 'typeCheckedLoadConstVCalls',
+ 'typeCheckedLoadVCalls', 'typeid', 'typeidCompatibleVTable',
+ 'typeIdInfo', 'typeTestAssumeConstVCalls',
+ 'typeTestAssumeVCalls', 'typeTestRes', 'typeTests', 'udiv',
+ 'ueq', 'uge', 'ugt', 'uitofp', 'ule', 'ult', 'umax', 'umin',
+ 'undef', 'une', 'uniformRetVal', 'uniqueRetVal', 'unknown',
+ 'unnamed_addr', 'uno', 'unordered', 'unreachable', 'unsat',
+ 'unwind', 'urem', 'uselistorder', 'uselistorder_bb', 'uwtable',
+ 'va_arg', 'varFlags', 'variable', 'vcall_visibility',
+ 'vFuncId', 'virtFunc', 'virtualConstProp', 'void', 'volatile',
+ 'vscale', 'vTableFuncs', 'weak', 'weak_odr', 'webkit_jscc',
'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly',
- 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_intrcc', 'x86_mmx',
- 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc', 'x86_vectorcallcc', 'xchg',
- 'xor', 'zeroext', 'zeroinitializer', 'zext', 'immarg', 'willreturn'),
+ 'x', 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_intrcc',
+ 'x86_mmx', 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc',
+ 'x86_vectorcallcc', 'xchg', 'xor', 'zeroext',
+ 'zeroinitializer', 'zext', 'immarg', 'willreturn'),
suffix=r'\b'), Keyword),
# Types
(words(('void', 'half', 'bfloat', 'float', 'double', 'fp128',
- 'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'token')),
+ 'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'x86_mmx',
+ 'x86_amx', 'token')),
Keyword.Type),
# Integer types
diff --git a/vendor/pygments-main/pygments/lexers/automation.py b/vendor/pygments-main/pygments/lexers/automation.py
index e3b2ad85..7b03e39a 100644
--- a/vendor/pygments-main/pygments/lexers/automation.py
+++ b/vendor/pygments-main/pygments/lexers/automation.py
@@ -22,7 +22,7 @@ class AutohotkeyLexer(RegexLexer):
.. versionadded:: 1.4
"""
name = 'autohotkey'
- aliases = ['ahk', 'autohotkey']
+ aliases = ['autohotkey', 'ahk']
filenames = ['*.ahk', '*.ahkl']
mimetypes = ['text/x-autohotkey']
diff --git a/vendor/pygments-main/pygments/lexers/bibtex.py b/vendor/pygments-main/pygments/lexers/bibtex.py
index 56b5218d..bb572d6e 100644
--- a/vendor/pygments-main/pygments/lexers/bibtex.py
+++ b/vendor/pygments-main/pygments/lexers/bibtex.py
@@ -26,7 +26,7 @@ class BibTeXLexer(ExtendedRegexLexer):
"""
name = 'BibTeX'
- aliases = ['bib', 'bibtex']
+ aliases = ['bibtex', 'bib']
filenames = ['*.bib']
mimetypes = ["text/x-bibtex"]
flags = re.IGNORECASE
@@ -154,6 +154,6 @@ class BSTLexer(RegexLexer):
],
'whitespace': [
(r'\s+', Text),
- ('%.*?$', Comment.SingleLine),
+ ('%.*?$', Comment.Single),
],
}
diff --git a/vendor/pygments-main/pygments/lexers/c_cpp.py b/vendor/pygments-main/pygments/lexers/c_cpp.py
index 1bdebd2b..cf9a3d26 100644
--- a/vendor/pygments-main/pygments/lexers/c_cpp.py
+++ b/vendor/pygments-main/pygments/lexers/c_cpp.py
@@ -14,7 +14,7 @@
this, inherit, default, words
from pygments.util import get_bool_opt
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+ Number, Punctuation, Whitespace
__all__ = ['CLexer', 'CppLexer']
@@ -43,6 +43,7 @@ class CFamilyLexer(RegexLexer):
# Identifier regex with C and C++ Universal Character Name (UCN) support.
_ident = r'(?:[a-zA-Z_$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})*'
+ _namespaced_ident = r'(?:[a-zA-Z_$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)*'
tokens = {
'whitespace': [
@@ -54,8 +55,8 @@ class CFamilyLexer(RegexLexer):
bygroups(using(this), Comment.Preproc), 'if0'),
('^(' + _ws1 + ')(#)',
bygroups(using(this), Comment.Preproc), 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
+ (r'\n', Whitespace),
+ (r'[^\S\n]+', Whitespace),
(r'\\\n', Text), # line continuation
(r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
@@ -63,6 +64,8 @@ class CFamilyLexer(RegexLexer):
(r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
],
'statements': [
+ include('keywords'),
+ include('types'),
(r'([LuU]|u8)?(")', bygroups(String.Affix, String), 'string'),
(r"([LuU]|u8)?(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
bygroups(String.Affix, String.Char, String.Char, String.Char)),
@@ -78,6 +81,17 @@ class CFamilyLexer(RegexLexer):
(r'(-)?' + _decpart + _intsuffix, Number.Integer),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.]', Punctuation),
+ (r'(true|false|NULL)\b', Name.Builtin),
+ (r'(' + _ident + r')(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
+ (_ident, Name)
+ ],
+ 'types': [
+ (words(('int8', 'int16', 'int32', 'int64', 'wchar_t'), prefix=r'__',
+ suffix=r'\b'), Keyword.Reserved),
+ (words(('bool', 'int', 'long', 'float', 'short', 'double', 'char',
+ 'unsigned', 'signed', 'void'), suffix=r'\b'), Keyword.Type)
+ ],
+ 'keywords': [
(r'(struct|union)(\s+)', bygroups(Keyword, Text), 'classname'),
(words(('asm', 'auto', 'break', 'case', 'const', 'continue',
'default', 'do', 'else', 'enum', 'extern', 'for', 'goto',
@@ -85,36 +99,32 @@ class CFamilyLexer(RegexLexer):
'static', 'switch', 'typedef', 'volatile', 'while', 'union',
'thread_local', 'alignas', 'alignof', 'static_assert', '_Pragma'),
suffix=r'\b'), Keyword),
- (r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b',
- Keyword.Type),
(words(('inline', '_inline', '__inline', 'naked', 'restrict',
'thread'), suffix=r'\b'), Keyword.Reserved),
# Vector intrinsics
(r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
# Microsoft-isms
(words((
- 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl',
- 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try',
- 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop',
+ 'asm', 'based', 'except', 'stdcall', 'cdecl',
+ 'fastcall', 'declspec', 'finally', 'try',
+ 'leave', 'w64', 'unaligned', 'raise', 'noop',
'identifier', 'forceinline', 'assume'),
- prefix=r'__', suffix=r'\b'), Keyword.Reserved),
- (r'(true|false|NULL)\b', Name.Builtin),
- (r'(' + _ident + r')(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
- (_ident, Name)
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved)
],
'root': [
include('whitespace'),
+ include('keywords'),
# functions
- (r'((?:' + _ident + r'(?:[&*\s])+))' # return arguments
- r'(' + _ident + r')' # method name
+ (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
+ r'(' + _namespaced_ident + r')' # method name
r'(\s*\([^;]*?\))' # signature
r'([^;{]*)(\{)',
bygroups(using(this), Name.Function, using(this), using(this),
Punctuation),
'function'),
# function declarations
- (r'((?:' + _ident + r'(?:[&*\s])+))' # return arguments
- r'(' + _ident + r')' # method name
+ (r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
+ r'(' + _namespaced_ident + r')' # method name
r'(\s*\([^;]*?\))' # signature
r'([^;]*)(;)',
bygroups(using(this), Name.Function, using(this), using(this),
@@ -143,8 +153,10 @@ class CFamilyLexer(RegexLexer):
(r'\\', String), # stray backslash
],
'macro': [
- (r'(include)('+_ws1+r')("[^"]+")([^\n]*)', bygroups(Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
- (r'(include)('+_ws1+r')(<[^>]+>)([^\n]*)', bygroups(Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
+ (r'('+_ws1+r')(include)('+_ws1+r')("[^"]+")([^\n]*)',
+ bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
+ (r'('+_ws1+r')(include)('+_ws1+r')(<[^>]+>)([^\n]*)',
+ bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
(r'[^/\n]+', Comment.Preproc),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'//.*?\n', Comment.Single, '#pop'),
@@ -245,11 +257,14 @@ class CLexer(CFamilyLexer):
priority = 0.1
tokens = {
- 'statements': [
+ 'keywords': [
(words((
'_Alignas', '_Alignof', '_Noreturn', '_Generic', '_Thread_local',
'_Static_assert', '_Imaginary', 'noreturn', 'imaginary', 'complex'),
suffix=r'\b'), Keyword),
+ inherit
+ ],
+ 'types': [
(words(('_Bool', '_Complex', '_Atomic'), suffix=r'\b'), Keyword.Type),
inherit
]
@@ -295,20 +310,6 @@ class CppLexer(CFamilyLexer):
tokens = {
'statements': [
- (r'(class|concept|typename)(\s+)', bygroups(Keyword, Text), 'classname'),
- (words((
- 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
- 'export', 'friend', 'mutable', 'namespace', 'new', 'operator',
- 'private', 'protected', 'public', 'reinterpret_cast', 'class',
- 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
- 'try', 'typeid', 'using', 'virtual', 'constexpr', 'nullptr', 'concept',
- 'decltype', 'noexcept', 'override', 'final', 'constinit', 'consteval',
- 'co_await', 'co_return', 'co_yield', 'requires', 'import', 'module',
- 'typename'),
- suffix=r'\b'), Keyword),
- (r'char(16_t|32_t|8_t)\b', Keyword.Type),
- (r'(enum)(\s+)', bygroups(Keyword, Text), 'enumname'),
-
# C++11 raw strings
(r'((?:[LuU]|u8)?R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
bygroups(String.Affix, String, String.Delimiter, String.Delimiter,
@@ -332,6 +333,32 @@ class CppLexer(CFamilyLexer):
# template specification
(r'\s*(?=>)', Text, '#pop'),
default('#pop')
+ ],
+ 'keywords': [
+ (r'(class|concept|typename)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (words((
+ 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
+ 'export', 'friend', 'mutable', 'new', 'operator',
+ 'private', 'protected', 'public', 'reinterpret_cast', 'class',
+ 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
+ 'try', 'typeid', 'using', 'virtual', 'constexpr', 'nullptr', 'concept',
+ 'decltype', 'noexcept', 'override', 'final', 'constinit', 'consteval',
+ 'co_await', 'co_return', 'co_yield', 'requires', 'import', 'module',
+ 'typename'),
+ suffix=r'\b'), Keyword),
+ (r'namespace\b', Keyword, 'namespace'),
+ (r'(enum)(\s+)', bygroups(Keyword, Text), 'enumname'),
+ inherit
+ ],
+ 'types': [
+ (r'char(16_t|32_t|8_t)\b', Keyword.Type),
+ inherit
+ ],
+ 'namespace': [
+ (r'[;{]', Punctuation, ('#pop', 'root')),
+ (r'inline\b', Keyword.Reserved),
+ (CFamilyLexer._ident, Name.Namespace),
+ include('statement')
]
}
diff --git a/vendor/pygments-main/pygments/lexers/c_like.py b/vendor/pygments-main/pygments/lexers/c_like.py
index 41af0359..39967832 100644
--- a/vendor/pygments-main/pygments/lexers/c_like.py
+++ b/vendor/pygments-main/pygments/lexers/c_like.py
@@ -13,13 +13,14 @@
from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+ Number, Punctuation, Whitespace
from pygments.lexers.c_cpp import CLexer, CppLexer
from pygments.lexers import _mql_builtins
__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
- 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer']
+ 'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer',
+ 'OmgIdlLexer']
class PikeLexer(CppLexer):
@@ -334,6 +335,11 @@ class SwigLexer(CppLexer):
priority = 0.04 # Lower than C/C++ and Objective C/C++
tokens = {
+ 'root': [
+ # Match it here so it won't be matched as a function in the rest of root
+ (r'\$\**\&?\w+', Name),
+ inherit
+ ],
'statements': [
# SWIG directives
(r'(%[a-z_][a-z0-9_]*)', Name.Function),
@@ -549,7 +555,7 @@ class CharmciLexer(CppLexer):
mimetypes = []
tokens = {
- 'statements': [
+ 'keywords': [
(r'(module)(\s+)', bygroups(Keyword, Text), 'classname'),
(words(('mainmodule', 'mainchare', 'chare', 'array', 'group',
'nodegroup', 'message', 'conditional')), Keyword),
@@ -563,3 +569,96 @@ class CharmciLexer(CppLexer):
inherit,
],
}
+
+
+class OmgIdlLexer(CLexer):
+ """
+ Lexer for `Object Management Group Interface Definition Language `_.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'OMG Interface Definition Language'
+ aliases = ['omg-idl']
+ filenames = ['*.idl', '*.pidl']
+ mimetypes = []
+
+ scoped_name = r'((::)?\w+)+'
+
+ tokens = {
+ 'values': [
+ (words(('true', 'false'), prefix=r'(?i)', suffix=r'\b'), Number),
+ (r'([Ll]?)(")', bygroups(String.Affix, String.Double), 'string'),
+ (r'([Ll]?)(\')(\\[^\']+)(\')',
+ bygroups(String.Affix, String.Char, String.Escape, String.Char)),
+ (r'([Ll]?)(\')(\\\')(\')',
+ bygroups(String.Affix, String.Char, String.Escape, String.Char)),
+ (r'([Ll]?)(\'.\')', bygroups(String.Affix, String.Char)),
+ (r'[+-]?\d+(\.\d*)?[Ee][+-]?\d+', Number.Float),
+ (r'[+-]?(\d+\.\d*)|(\d*\.\d+)([Ee][+-]?\d+)?', Number.Float),
+ (r'(?i)[+-]?0x[0-9a-f]+', Number.Hex),
+ (r'[+-]?[1-9]\d*', Number.Integer),
+ (r'[+-]?0[0-7]*', Number.Oct),
+ (r'[\+\-\*\/%^&\|~]', Operator),
+ (words(('<<', '>>')), Operator),
+ (scoped_name, Name),
+ (r'[{};:,<>\[\]]', Punctuation),
+ ],
+ 'annotation_params': [
+ include('whitespace'),
+ (r'\(', Punctuation, '#push'),
+ include('values'),
+ (r'=', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'annotation_params_maybe': [
+ (r'\(', Punctuation, 'annotation_params'),
+ include('whitespace'),
+ default('#pop'),
+ ],
+ 'annotation_appl': [
+ (r'@' + scoped_name, Name.Decorator, 'annotation_params_maybe'),
+ ],
+ 'enum': [
+ include('whitespace'),
+ (r'[{,]', Punctuation),
+ (r'\w+', Name.Constant),
+ include('annotation_appl'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (words((
+ 'typedef', 'const',
+ 'in', 'out', 'inout', 'local',
+ ), prefix=r'(?i)', suffix=r'\b'), Keyword.Declaration),
+ (words((
+ 'void', 'any', 'native', 'bitfield',
+ 'unsigned', 'boolean', 'char', 'wchar', 'octet', 'short', 'long',
+ 'int8', 'uint8', 'int16', 'int32', 'int64', 'uint16', 'uint32', 'uint64',
+ 'float', 'double', 'fixed',
+ 'sequence', 'string', 'wstring', 'map',
+ ), prefix=r'(?i)', suffix=r'\b'), Keyword.Type),
+ (words((
+ '@annotation', 'struct', 'union', 'bitset', 'interface',
+ 'exception', 'valuetype', 'eventtype', 'component',
+ ), prefix=r'(?i)', suffix=r'(\s+)(\w+)'), bygroups(Keyword, Whitespace, Name.Class)),
+ (words((
+ 'abstract', 'alias', 'attribute', 'case', 'connector',
+ 'consumes', 'context', 'custom', 'default', 'emits', 'factory',
+ 'finder', 'getraises', 'home', 'import', 'manages', 'mirrorport',
+ 'multiple', 'Object', 'oneway', 'primarykey', 'private', 'port',
+ 'porttype', 'provides', 'public', 'publishes', 'raises',
+ 'readonly', 'setraises', 'supports', 'switch', 'truncatable',
+ 'typeid', 'typename', 'typeprefix', 'uses', 'ValueBase',
+ ), prefix=r'(?i)', suffix=r'\b'), Keyword),
+ (r'(?i)(enum|bitmask)(\s+)(\w+)',
+ bygroups(Keyword, Whitespace, Name.Class), 'enum'),
+ (r'(?i)(module)(\s+)(\w+)',
+ bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
+ (r'(\w+)(\s*)(=)', bygroups(Name.Constant, Whitespace, Operator)),
+ (r'[\(\)]', Punctuation),
+ include('values'),
+ include('annotation_appl'),
+ ],
+ }
diff --git a/vendor/pygments-main/pygments/lexers/cddl.py b/vendor/pygments-main/pygments/lexers/cddl.py
index 09bb5839..99eb17c9 100644
--- a/vendor/pygments-main/pygments/lexers/cddl.py
+++ b/vendor/pygments-main/pygments/lexers/cddl.py
@@ -119,7 +119,7 @@ class CddlLexer(RegexLexer):
"root": [
include("commentsandwhitespace"),
# tag types
- (r"#(\d\.{uint})?".format(uint=_re_uint), Keyword.Type), # type or any
+ (r"#(\d\.{uint})?".format(uint=_re_uint), Keyword.Type), # type or any
# occurence
(
r"({uint})?(\*)({uint})?".format(uint=_re_uint),
@@ -164,7 +164,7 @@ class CddlLexer(RegexLexer):
Number.Float,
),
# Int
- (_re_int, Number.Int),
+ (_re_int, Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
],
"bstrb64url": [
diff --git a/vendor/pygments-main/pygments/lexers/chapel.py b/vendor/pygments-main/pygments/lexers/chapel.py
index a3f8ff7e..73068626 100644
--- a/vendor/pygments-main/pygments/lexers/chapel.py
+++ b/vendor/pygments-main/pygments/lexers/chapel.py
@@ -26,6 +26,38 @@ class ChapelLexer(RegexLexer):
aliases = ['chapel', 'chpl']
# mimetypes = ['text/x-chapel']
+ known_types = ('bool', 'bytes', 'complex', 'imag', 'int', 'locale',
+ 'nothing', 'opaque', 'range', 'real', 'string', 'uint',
+ 'void')
+
+ type_modifiers_par = ('atomic', 'single', 'sync')
+ type_modifiers_mem = ('borrowed', 'owned', 'shared', 'unmanaged')
+ type_modifiers = (*type_modifiers_par, *type_modifiers_mem)
+
+ declarations = ('config', 'const', 'in', 'inout', 'out', 'param', 'ref',
+ 'type', 'var')
+
+ constants = ('false', 'nil', 'none', 'true')
+
+ other_keywords = ('align', 'as',
+ 'begin', 'break', 'by',
+ 'catch', 'cobegin', 'coforall', 'continue',
+ 'defer', 'delete', 'dmapped', 'do', 'domain',
+ 'else', 'enum', 'except', 'export', 'extern',
+ 'for', 'forall', 'foreach', 'forwarding',
+ 'if', 'implements', 'import', 'index', 'init', 'inline',
+ 'label', 'lambda', 'let', 'lifetime', 'local',
+ 'new', 'noinit',
+ 'on', 'only', 'otherwise', 'override',
+ 'pragma', 'primitive', 'private', 'prototype', 'public',
+ 'reduce', 'require', 'return',
+ 'scan', 'select', 'serial', 'sparse', 'subdomain',
+ 'then', 'this', 'throw', 'throws', 'try',
+ 'use',
+ 'when', 'where', 'while', 'with',
+ 'yield',
+ 'zip')
+
tokens = {
'root': [
(r'\n', Text),
@@ -35,34 +67,15 @@ class ChapelLexer(RegexLexer):
(r'//(.*?)\n', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(config|const|in|inout|out|param|ref|type|var)\b',
- Keyword.Declaration),
- (r'(false|nil|none|true)\b', Keyword.Constant),
- (r'(bool|bytes|complex|imag|int|nothing|opaque|range|real|string|uint|void)\b',
- Keyword.Type),
- (words((
- 'align', 'as', 'atomic',
- 'begin', 'borrowed', 'break', 'by',
- 'catch', 'cobegin', 'coforall', 'continue',
- 'defer', 'delete', 'dmapped', 'do', 'domain',
- 'else', 'enum', 'except', 'export', 'extern',
- 'for', 'forall', 'forwarding',
- 'if', 'import', 'index', 'init', 'inline',
- 'label', 'lambda', 'let', 'lifetime', 'local', 'locale'
- 'new', 'noinit',
- 'on', 'only', 'otherwise', 'override', 'owned',
- 'pragma', 'private', 'prototype', 'public',
- 'reduce', 'require', 'return',
- 'scan', 'select', 'serial', 'shared', 'single', 'sparse', 'subdomain', 'sync',
- 'then', 'this', 'throw', 'throws', 'try',
- 'unmanaged', 'use',
- 'when', 'where', 'while', 'with',
- 'yield',
- 'zip'), suffix=r'\b'),
- Keyword),
+ (words(declarations, suffix=r'\b'), Keyword.Declaration),
+ (words(constants, suffix=r'\b'), Keyword.Constant),
+ (words(known_types, suffix=r'\b'), Keyword.Type),
+ (words((*type_modifiers, *other_keywords), suffix=r'\b'), Keyword),
+
(r'(iter)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
(r'(proc)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
- (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
+ (r'(operator)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
+ (r'(class|interface|module|record|union)(\s+)', bygroups(Keyword, Text),
'classname'),
# imaginary integers
@@ -87,8 +100,8 @@ class ChapelLexer(RegexLexer):
(r'[0-9]+', Number.Integer),
# strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
# tokens
(r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
@@ -105,7 +118,18 @@ class ChapelLexer(RegexLexer):
(r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
],
'procname': [
- (r'([a-zA-Z_][.\w$]*|\~[a-zA-Z_][.\w$]*|[+*/!~%<>=&^|\-]{1,2})',
+ (r'([a-zA-Z_][.\w$]*|' # regular function name, including secondary
+ r'\~[a-zA-Z_][.\w$]*|' # support for legacy destructors
+ r'[+*/!~%<>=&^|\-:]{1,2})', # operators
Name.Function, '#pop'),
+
+ # allow `proc (atomic T).foo`
+ (r'\(', Punctuation, "receivertype"),
+ (r'\)+\.', Punctuation),
+ ],
+ 'receivertype': [
+ (words(type_modifiers, suffix=r'\b'), Keyword),
+ (words(known_types, suffix=r'\b'), Keyword.Type),
+ (r'[^()]*', Name.Other, '#pop'),
],
}
diff --git a/vendor/pygments-main/pygments/lexers/clean.py b/vendor/pygments-main/pygments/lexers/clean.py
index bf1d4099..579cf7c3 100644
--- a/vendor/pygments-main/pygments/lexers/clean.py
+++ b/vendor/pygments-main/pygments/lexers/clean.py
@@ -59,15 +59,15 @@ class CleanLexer(ExtendedRegexLexer):
],
'comments': [
(r'//.*\n', Comment.Single),
- (r'/\*', Comment.Multi, 'comments.in'),
+ (r'/\*', Comment.Multiline, 'comments.in'),
(r'/\*\*', Comment.Special, 'comments.in'),
],
'comments.in': [
- (r'\*\/', Comment.Multi, '#pop'),
- (r'/\*', Comment.Multi, '#push'),
- (r'[^*/]+', Comment.Multi),
- (r'\*(?!/)', Comment.Multi),
- (r'/', Comment.Multi),
+ (r'\*\/', Comment.Multiline, '#pop'),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'[^*/]+', Comment.Multiline),
+ (r'\*(?!/)', Comment.Multiline),
+ (r'/', Comment.Multiline),
],
'keywords': [
(words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
diff --git a/vendor/pygments-main/pygments/lexers/configs.py b/vendor/pygments-main/pygments/lexers/configs.py
index 16484287..c728e74a 100644
--- a/vendor/pygments-main/pygments/lexers/configs.py
+++ b/vendor/pygments-main/pygments/lexers/configs.py
@@ -21,7 +21,7 @@
'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer',
- 'SingularityLexer']
+ 'NestedTextLexer', 'SingularityLexer']
class IniLexer(RegexLexer):
@@ -31,7 +31,13 @@ class IniLexer(RegexLexer):
name = 'INI'
aliases = ['ini', 'cfg', 'dosini']
- filenames = ['*.ini', '*.cfg', '*.inf']
+ filenames = [
+ '*.ini', '*.cfg', '*.inf',
+ # systemd unit files
+ # https://www.freedesktop.org/software/systemd/man/systemd.unit.html
+ '*.service', '*.socket', '*.device', '*.mount', '*.automount',
+ '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope',
+ ]
mimetypes = ['text/x-ini', 'text/inf']
tokens = {
@@ -349,7 +355,7 @@ class SquidConfLexer(RegexLexer):
"cache_effective_user", "cache_host", "cache_host_acl",
"cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
"cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
- "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
+ "cache_peer_access", "cache_replacement_policy", "cache_stoplist",
"cache_stoplist_pattern", "cache_store_log", "cache_swap",
"cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
"client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
@@ -509,8 +515,8 @@ class LighttpdConfLexer(RegexLexer):
.. versionadded:: 0.11
"""
name = 'Lighttpd configuration file'
- aliases = ['lighty', 'lighttpd']
- filenames = []
+ aliases = ['lighttpd', 'lighty']
+ filenames = ['lighttpd.conf']
mimetypes = ['text/x-lighttpd-conf']
tokens = {
@@ -577,56 +583,113 @@ class TerraformLexer(RegexLexer):
filenames = ['*.tf']
mimetypes = ['application/x-tf', 'application/x-terraform']
- embedded_keywords = ('ingress', 'egress', 'listener', 'default',
- 'connection', 'alias', 'terraform', 'tags', 'vars',
- 'config', 'lifecycle', 'timeouts')
+ classes = ('backend', 'data', 'module', 'output', 'provider',
+ 'provisioner', 'resource', 'variable')
+ classes_re = "({})".format(('|').join(classes))
+
+ types = ('string', 'number', 'bool', 'list', 'tuple', 'map', 'object', 'null')
+
+ numeric_functions = ('abs', 'ceil', 'floor', 'log', 'max',
+ 'mix', 'parseint', 'pow', 'signum')
+
+ string_functions = ('chomp', 'format', 'formatlist', 'indent',
+ 'join', 'lower', 'regex', 'regexall', 'replace',
+ 'split', 'strrev', 'substr', 'title', 'trim',
+ 'trimprefix', 'trimsuffix', 'trimspace', 'upper'
+ )
+
+ collection_functions = ('alltrue', 'anytrue', 'chunklist', 'coalesce',
+ 'coalescelist', 'compact', 'concat', 'contains',
+ 'distinct', 'element', 'flatten', 'index', 'keys',
+ 'length', 'list', 'lookup', 'map', 'matchkeys',
+ 'merge', 'range', 'reverse', 'setintersection',
+ 'setproduct', 'setsubtract', 'setunion', 'slice',
+ 'sort', 'sum', 'transpose', 'values', 'zipmap'
+ )
+
+ encoding_functions = ('base64decode', 'base64encode', 'base64gzip',
+ 'csvdecode', 'jsondecode', 'jsonencode', 'textdecodebase64',
+ 'textencodebase64', 'urlencode', 'yamldecode', 'yamlencode')
+
+
+ filesystem_functions = ('abspath', 'dirname', 'pathexpand', 'basename',
+ 'file', 'fileexists', 'fileset', 'filebase64', 'templatefile')
+
+ date_time_functions = ('formatdate', 'timeadd', 'timestamp')
+
+ hash_crypto_functions = ('base64sha256', 'base64sha512', 'bcrypt', 'filebase64sha256',
+ 'filebase64sha512', 'filemd5', 'filesha1', 'filesha256', 'filesha512',
+ 'md5', 'rsadecrypt', 'sha1', 'sha256', 'sha512', 'uuid', 'uuidv5')
+
+ ip_network_functions = ('cidrhost', 'cidrnetmask', 'cidrsubnet', 'cidrsubnets')
+
+ type_conversion_functions = ('can', 'defaults', 'tobool', 'tolist', 'tomap',
+ 'tonumber', 'toset', 'tostring', 'try')
+
+ builtins = numeric_functions + string_functions + collection_functions + encoding_functions +\
+ filesystem_functions + date_time_functions + hash_crypto_functions + ip_network_functions +\
+ type_conversion_functions
+ builtins_re = "({})".format(('|').join(builtins))
tokens = {
'root': [
- include('string'),
- include('punctuation'),
- include('curly'),
include('basic'),
include('whitespace'),
+
+ # Strings
+ (r'(".*")', bygroups(String.Double)),
+
+ # Constants
+ (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Name.Constant),
+
+ # Types
+ (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ include('identifier'),
+ include('punctuation'),
(r'[0-9]+', Number),
],
'basic': [
- (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
(r'\s*/\*', Comment.Multiline, 'comment'),
(r'\s*#.*\n', Comment.Single),
- (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)),
- (words(('variable', 'resource', 'provider', 'provisioner', 'module',
- 'backend', 'data', 'output'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved, 'function'),
- (words(embedded_keywords, prefix=r'\b', suffix=r'\b'),
- Keyword.Declaration),
- (r'\$\{', String.Interpol, 'var_builtin'),
- ],
- 'function': [
- (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)),
- include('punctuation'),
- include('curly'),
+ include('whitespace'),
+
+ # e.g. terraform {
+ # e.g. egress {
+ (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=?)(\s*)(\{)',
+ bygroups(Text, Name.Builtin, Text, Operator, Text, Punctuation)),
+
+ # Assignment with attributes, e.g. something = ...
+ (r'(\s*)([0-9a-zA-Z-_]+)(\s*)(=)(\s*)',
+ bygroups(Text, Name.Attribute, Text, Operator, Text)),
+
+ # Assignment with environment variables and similar, e.g. "something" = ...
+ # or key value assignment, e.g. "SlotName" : ...
+ (r'(\s*)("\S+")(\s*)([=:])(\s*)',
+ bygroups(Text, Literal.String.Double, Text, Operator, Text)),
+
+ # Functions, e.g. jsonencode(element("value"))
+ (builtins_re + r'(\()', bygroups(Name.Function, Punctuation)),
+
+ # List of attributes, e.g. ignore_changes = [last_modified, filename]
+ (r'(\[)([a-z_,\s]+)(\])', bygroups(Punctuation, Name.Builtin, Punctuation)),
+
+ # e.g. resource "aws_security_group" "allow_tls" {
+ # e.g. backend "consul" {
+ (classes_re + r'(\s+)', bygroups(Keyword.Reserved, Text), 'blockname'),
],
- 'var_builtin': [
- (r'\$\{', String.Interpol, '#push'),
- (words(('concat', 'file', 'join', 'lookup', 'element'),
- prefix=r'\b', suffix=r'\b'), Name.Builtin),
- include('string'),
- include('punctuation'),
- (r'\s+', Text),
- (r'\}', String.Interpol, '#pop'),
+ 'blockname': [
+ # e.g. resource "aws_security_group" "allow_tls" {
+ # e.g. backend "consul" {
+ (r'(\s*)("[0-9a-zA-Z-_]+")?(\s*)("[0-9a-zA-Z-_]+")(\s+)(\{)',
+ bygroups(Text, Name.Class, Text, Name.Variable, Text, Punctuation)),
],
- 'string': [
- (r'(".*")', bygroups(String.Double)),
+ 'identifier': [
+ (r'\b(var\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
+ (r'\b([0-9a-zA-Z-_\[\]]+\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
],
'punctuation': [
- (r'[\[\](),.]', Punctuation),
- ],
- # Keep this seperate from punctuation - we sometimes want to use different
- # Tokens for { }
- 'curly': [
- (r'\{', Text.Punctuation),
- (r'\}', Text.Punctuation),
+ (r'[\[\]()\{\},.?:!=]', Punctuation),
],
'comment': [
(r'[^*/]', Comment.Multiline),
@@ -905,9 +968,12 @@ class TOMLLexer(RegexLexer):
tokens = {
'root': [
+ # Table
+ (r'^(\s*)(\[.*?\])$', bygroups(Text, Keyword)),
# Basics, comments, strings
- (r'\s+', Text),
+ (r'[ \t]+', Text),
+ (r'\n', Text),
(r'#.*?$', Comment.Single),
# Basic string
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
@@ -917,7 +983,6 @@ class TOMLLexer(RegexLexer):
(r'(true|false)$', Keyword.Constant),
(r'[a-zA-Z_][\w\-]*', Name),
- (r'\[.*?\]$', Keyword),
# Datetime
# TODO this needs to be expanded, as TOML is rather flexible:
# https://github.com/toml-lang/toml#offset-date-time
@@ -940,6 +1005,31 @@ class TOMLLexer(RegexLexer):
]
}
+class NestedTextLexer(RegexLexer):
+ """
+ Lexer for `NextedText `_, a human-friendly data
+ format.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'NestedText'
+ aliases = ['nestedtext', 'nt']
+ filenames = ['*.nt']
+
+ _quoted_dict_item = r'^(\s*)({0})(.*?)({0}: ?)(.*?)(\s*)$'
+
+ tokens = {
+ 'root': [
+ (r'^(\s*)(#.*?)$', bygroups(Text, Comment)),
+ (r'^(\s*)(> ?)(.*?)(\s*)$', bygroups(Text, Punctuation, String, Whitespace)),
+ (r'^(\s*)(- ?)(.*?)(\s*)$', bygroups(Text, Punctuation, String, Whitespace)),
+ (_quoted_dict_item.format("'"), bygroups(Text, Punctuation, Name, Punctuation, String, Whitespace)),
+ (_quoted_dict_item.format('"'), bygroups(Text, Punctuation, Name, Punctuation, String, Whitespace)),
+ (r'^(\s*)(.*?)(: ?)(.*?)(\s*)$', bygroups(Text, Name, Punctuation, String, Whitespace)),
+ ],
+ }
+
class SingularityLexer(RegexLexer):
"""
diff --git a/vendor/pygments-main/pygments/lexers/csound.py b/vendor/pygments-main/pygments/lexers/csound.py
index b969aa65..7b49dc29 100644
--- a/vendor/pygments-main/pygments/lexers/csound.py
+++ b/vendor/pygments-main/pygments/lexers/csound.py
@@ -13,7 +13,7 @@
from pygments.lexer import RegexLexer, bygroups, default, include, using, words
from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \
String, Text, Whitespace
-from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES
+from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES, REMOVED_OPCODES
from pygments.lexers.html import HtmlLexer
from pygments.lexers.python import PythonLexer
from pygments.lexers.scripting import LuaLexer
@@ -219,7 +219,7 @@ def name_callback(lexer, match):
type_annotation_token = Keyword.Type
name = match.group(1)
- if name in OPCODES or name in DEPRECATED_OPCODES:
+ if name in OPCODES or name in DEPRECATED_OPCODES or name in REMOVED_OPCODES:
yield match.start(), Name.Builtin, name
elif name in lexer.user_defined_opcodes:
yield match.start(), Name.Function, name
diff --git a/vendor/pygments-main/pygments/lexers/devicetree.py b/vendor/pygments-main/pygments/lexers/devicetree.py
index 9eca82a0..240a4516 100644
--- a/vendor/pygments-main/pygments/lexers/devicetree.py
+++ b/vendor/pygments-main/pygments/lexers/devicetree.py
@@ -58,7 +58,7 @@ class DevicetreeLexer(RegexLexer):
(r'(L?)(")', bygroups(String.Affix, String), 'string'),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
- (r'([^\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation)),
+ (r'([^\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation), '#pop'),
(words(('compatible', 'model', 'phandle', 'status', '#address-cells',
'#size-cells', 'reg', 'virtual-reg', 'ranges', 'dma-ranges',
'device_type', 'name'), suffix=r'\b'), Keyword.Reserved),
diff --git a/vendor/pygments-main/pygments/lexers/dotnet.py b/vendor/pygments-main/pygments/lexers/dotnet.py
index 541e3eae..4a7e354b 100644
--- a/vendor/pygments-main/pygments/lexers/dotnet.py
+++ b/vendor/pygments-main/pygments/lexers/dotnet.py
@@ -87,7 +87,7 @@ class CSharpLexer(RegexLexer):
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
- (r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
+ (r'\$?"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
diff --git a/vendor/pygments-main/pygments/lexers/erlang.py b/vendor/pygments-main/pygments/lexers/erlang.py
index 74b52c8a..35ea718f 100644
--- a/vendor/pygments-main/pygments/lexers/erlang.py
+++ b/vendor/pygments-main/pygments/lexers/erlang.py
@@ -230,7 +230,7 @@ class ElixirLexer(RegexLexer):
name = 'Elixir'
aliases = ['elixir', 'ex', 'exs']
- filenames = ['*.ex', '*.eex', '*.exs']
+ filenames = ['*.ex', '*.eex', '*.exs', '*.leex']
mimetypes = ['text/x-elixir']
KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
diff --git a/vendor/pygments-main/pygments/lexers/floscript.py b/vendor/pygments-main/pygments/lexers/floscript.py
index 82ebc0c3..d6d3a20c 100644
--- a/vendor/pygments-main/pygments/lexers/floscript.py
+++ b/vendor/pygments-main/pygments/lexers/floscript.py
@@ -59,7 +59,7 @@ def innerstring_rules(ttype):
include('name'),
include('numbers'),
- (r'#.+$', Comment.Singleline),
+ (r'#.+$', Comment.Single),
],
'string': [
('[^"]+', String),
diff --git a/vendor/pygments-main/pygments/lexers/futhark.py b/vendor/pygments-main/pygments/lexers/futhark.py
index 8892bb79..f580ea72 100644
--- a/vendor/pygments-main/pygments/lexers/futhark.py
+++ b/vendor/pygments-main/pygments/lexers/futhark.py
@@ -10,9 +10,9 @@
import re
-from pygments.lexer import RegexLexer, include, bygroups, default, words
+from pygments.lexer import RegexLexer
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
+ Number, Punctuation
from pygments import unistring as uni
__all__ = ['FutharkLexer']
@@ -25,7 +25,7 @@ class FutharkLexer(RegexLexer):
"""
A Futhark lexer
- .. versionadded:: 2.8.0
+ .. versionadded:: 2.8
"""
name = 'Futhark'
aliases = ['futhark']
@@ -70,7 +70,8 @@ class FutharkLexer(RegexLexer):
(r'[][(),:;`{}]', Punctuation),
# Numbers
- (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*' + num_postfix, Number.Float),
+ (r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*' + num_postfix,
+ Number.Float),
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
r'(_*[pP][+-]?\d(_*\d)*)?' + num_postfix, Number.Float),
(r'\d(_*\d)*_*[eE][+-]?\d(_*\d)*' + num_postfix, Number.Float),
diff --git a/vendor/pygments-main/pygments/lexers/gcodelexer.py b/vendor/pygments-main/pygments/lexers/gcodelexer.py
new file mode 100644
index 00000000..1a76df72
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/gcodelexer.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.gcodelexer
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the G Code Language.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Comment, Name, Text, Keyword, Number
+
+__all__ = ['GcodeLexer']
+
+
+class GcodeLexer(RegexLexer):
+ """
+ For gcode source code.
+
+ .. versionadded:: 2.9
+ """
+ name = 'g-code'
+ aliases = ['gcode']
+ filenames = ['*.gcode']
+
+ tokens = {
+ 'root': [
+ (r';.*\n', Comment),
+ (r'^[gmGM]\d{1,4}\s', Name.Builtin), # M or G commands
+ (r'([^gGmM])([+-]?\d*[.]?\d+)', bygroups(Keyword, Number)),
+ (r'\s', Text.Whitespace),
+ (r'.*\n', Text),
+ ]
+ }
diff --git a/vendor/pygments-main/pygments/lexers/go.py b/vendor/pygments-main/pygments/lexers/go.py
index b37a6e16..4e287caf 100644
--- a/vendor/pygments-main/pygments/lexers/go.py
+++ b/vendor/pygments-main/pygments/lexers/go.py
@@ -25,7 +25,7 @@ class GoLexer(RegexLexer):
"""
name = 'Go'
filenames = ['*.go']
- aliases = ['go']
+ aliases = ['go', 'golang']
mimetypes = ['text/x-gosrc']
flags = re.MULTILINE | re.UNICODE
diff --git a/vendor/pygments-main/pygments/lexers/graphics.py b/vendor/pygments-main/pygments/lexers/graphics.py
index 7e944bab..f0b36b0c 100644
--- a/vendor/pygments-main/pygments/lexers/graphics.py
+++ b/vendor/pygments-main/pygments/lexers/graphics.py
@@ -407,7 +407,7 @@ class AsymptoteLexer(RegexLexer):
.. versionadded:: 1.2
"""
name = 'Asymptote'
- aliases = ['asy', 'asymptote']
+ aliases = ['asymptote', 'asy']
filenames = ['*.asy']
mimetypes = ['text/x-asymptote']
diff --git a/vendor/pygments-main/pygments/lexers/graphviz.py b/vendor/pygments-main/pygments/lexers/graphviz.py
index 55cf2e4c..a6859337 100644
--- a/vendor/pygments-main/pygments/lexers/graphviz.py
+++ b/vendor/pygments-main/pygments/lexers/graphviz.py
@@ -9,7 +9,8 @@
"""
from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Keyword, Operator, Name, String, Number, Punctuation, Whitespace
+from pygments.token import Comment, Keyword, Operator, Name, String, Number, \
+ Punctuation, Whitespace
__all__ = ['GraphvizLexer']
@@ -37,9 +38,9 @@ class GraphvizLexer(RegexLexer):
bygroups(Name.Attribute, Whitespace, Punctuation, Whitespace),
'attr_id'),
(r'\b(n|ne|e|se|s|sw|w|nw|c|_)\b', Name.Builtin),
- (r'\b\D\w*', Name.Tag), # node
+ (r'\b\D\w*', Name.Tag), # node
(r'[-]?((\.[0-9]+)|([0-9]+(\.[0-9]*)?))', Number),
- (r'"(\\"|[^"])*?"', Name.Tag), # quoted node
+ (r'"(\\"|[^"])*?"', Name.Tag), # quoted node
(r'<', Punctuation, 'xml'),
],
'attr_id': [
@@ -54,4 +55,4 @@ class GraphvizLexer(RegexLexer):
(r'\s+', Whitespace),
(r'[^<>\s]', Name.Tag),
]
- }
\ No newline at end of file
+ }
diff --git a/vendor/pygments-main/pygments/lexers/gsql.py b/vendor/pygments-main/pygments/lexers/gsql.py
new file mode 100644
index 00000000..914df261
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/gsql.py
@@ -0,0 +1,92 @@
+"""
+ pygments.lexers.gsql
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for TigerGraph GSQL graph query language
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, words
+from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
+ String, Number, Whitespace, Token
+
+
+__all__ = ["GSQLLexer"]
+
+class GSQLLexer(RegexLexer):
+
+ """
+ For `GSQL `_ queries (version 3.x).
+ .. versionadded:: 2.10
+ """
+
+ name = 'GSQL'
+ aliases = ['gsql']
+ filenames = ['*.gsql']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('keywords'),
+ include('clauses'),
+ include('accums'),
+ include('relations'),
+ include('strings'),
+ include('whitespace'),
+ include('barewords'),
+ include('operators'),
+ ],
+ 'comment': [
+ (r'.*\#.*\n', Comment.Single),
+ (r'.*\/\*\s*.*\s*\*\/', Comment.Multiline),
+ ],
+ 'keywords': [
+ (words((
+ 'ACCUM', 'AND', 'ANY', 'API', 'AS', 'ASC', 'AVG', 'BAG', 'BATCH', 'BETWEEN', 'BOOL', 'BOTH',
+ 'BREAK', 'BY', 'CASE', 'CATCH', 'COALESCE', 'COMPRESS', 'CONTINUE', 'COUNT',
+ 'CREATE', 'DATETIME', 'DATETIME_ADD', 'DATETIME_SUB', 'DELETE', 'DESC', 'DISTRIBUTED', 'DO',
+ 'DOUBLE', 'EDGE', 'ELSE', 'END', 'ESCAPE', 'EXCEPTION', 'FALSE', 'FILE', 'FILTER', 'FLOAT', 'FOREACH', 'FOR',
+ 'FROM', 'GRAPH', 'GROUP', 'GSQL_INT_MAX', 'GSQL_INT_MIN', 'GSQL_UINT_MAX', 'HAVING', 'IF',
+ 'IN', 'INSERT', 'INT', 'INTERPRET', 'INTERSECT', 'INTERVAL', 'INTO', 'IS', 'ISEMPTY', 'JSONARRAY', 'JSONOBJECT', 'LASTHOP',
+ 'LEADING', 'LIKE', 'LIMIT', 'LIST', 'LOAD_ACCUM', 'LOG', 'MAP', 'MATCH', 'MAX', 'MIN', 'MINUS', 'NOT',
+ 'NOW', 'NULL', 'OFFSET', 'OR', 'ORDER', 'PATH', 'PER', 'PINNED', 'POST_ACCUM', 'POST-ACCUM', 'PRIMARY_ID', 'PRINT',
+ 'QUERY', 'RAISE', 'RANGE', 'REPLACE', 'RESET_COLLECTION_ACCUM', 'RETURN', 'RETURNS', 'RUN', 'SAMPLE', 'SELECT', 'SELECT_VERTEX',
+ 'SET', 'SRC', 'STATIC', 'STRING', 'SUM', 'SYNTAX', 'TARGET', 'TAGSTGT', 'THEN', 'TO', 'TO_CSV', 'TO_DATETIME', 'TRAILING', 'TRIM', 'TRUE',
+ 'TRY', 'TUPLE', 'TYPEDEF', 'UINT', 'UNION', 'UPDATE', 'VALUES', 'VERTEX', 'WHEN', 'WHERE', 'WHILE', 'WITH'), prefix=r'(?|<-', Operator),
+ (r'[.*{}]', Punctuation),
+ ],
+ 'strings': [
+ (r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
+ (r'@{1,2}\w+', Name.Variable),
+ (r'(\<\w+)?\<(\w+\>?\,?\s?)+\>+', Name.Constant),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'barewords': [
+ (r'[a-z]\w*', Name),
+ (r'(\d+\.\d+|\d+)', Number),
+ ],
+ 'operators': [
+ (r'[^0-9|\/|\-](\-\=|\+\=|\*\=|\\\=|\=|\=\=|\=\=\=|\+|\-|\*|\\|\+\=|\>|\<)[^\>|\/]', Operator),
+ (r'(\(|\)|\,|\;|\=|\-|\+|\*|\/|\>|\<|\:)', Operator),
+ ],
+ }
diff --git a/vendor/pygments-main/pygments/lexers/haskell.py b/vendor/pygments-main/pygments/lexers/haskell.py
index bc4b2011..74cd8dcc 100644
--- a/vendor/pygments-main/pygments/lexers/haskell.py
+++ b/vendor/pygments-main/pygments/lexers/haskell.py
@@ -575,7 +575,7 @@ class LiterateHaskellLexer(LiterateLexer):
.. versionadded:: 0.9
"""
name = 'Literate Haskell'
- aliases = ['lhs', 'literate-haskell', 'lhaskell']
+ aliases = ['literate-haskell', 'lhaskell', 'lhs']
filenames = ['*.lhs']
mimetypes = ['text/x-literate-haskell']
@@ -598,7 +598,7 @@ class LiterateIdrisLexer(LiterateLexer):
.. versionadded:: 2.0
"""
name = 'Literate Idris'
- aliases = ['lidr', 'literate-idris', 'lidris']
+ aliases = ['literate-idris', 'lidris', 'lidr']
filenames = ['*.lidr']
mimetypes = ['text/x-literate-idris']
@@ -621,7 +621,7 @@ class LiterateAgdaLexer(LiterateLexer):
.. versionadded:: 2.0
"""
name = 'Literate Agda'
- aliases = ['lagda', 'literate-agda']
+ aliases = ['literate-agda', 'lagda']
filenames = ['*.lagda']
mimetypes = ['text/x-literate-agda']
@@ -644,7 +644,7 @@ class LiterateCryptolLexer(LiterateLexer):
.. versionadded:: 2.0
"""
name = 'Literate Cryptol'
- aliases = ['lcry', 'literate-cryptol', 'lcryptol']
+ aliases = ['literate-cryptol', 'lcryptol', 'lcry']
filenames = ['*.lcry']
mimetypes = ['text/x-literate-cryptol']
diff --git a/vendor/pygments-main/pygments/lexers/haxe.py b/vendor/pygments-main/pygments/lexers/haxe.py
index d970eacb..69e35693 100644
--- a/vendor/pygments-main/pygments/lexers/haxe.py
+++ b/vendor/pygments-main/pygments/lexers/haxe.py
@@ -26,7 +26,7 @@ class HaxeLexer(ExtendedRegexLexer):
"""
name = 'Haxe'
- aliases = ['hx', 'haxe', 'hxsl']
+ aliases = ['haxe', 'hxsl', 'hx']
filenames = ['*.hx', '*.hxsl']
mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
diff --git a/vendor/pygments-main/pygments/lexers/installers.py b/vendor/pygments-main/pygments/lexers/installers.py
index a4bc0774..ab333202 100644
--- a/vendor/pygments-main/pygments/lexers/installers.py
+++ b/vendor/pygments-main/pygments/lexers/installers.py
@@ -223,7 +223,7 @@ class SourcesListLexer(RegexLexer):
"""
name = 'Debian Sourcelist'
- aliases = ['sourceslist', 'sources.list', 'debsources']
+ aliases = ['debsources', 'sourceslist', 'sources.list']
filenames = ['sources.list']
mimetype = ['application/x-debian-sourceslist']
@@ -270,7 +270,7 @@ class DebianControlLexer(RegexLexer):
.. versionadded:: 0.9
"""
name = 'Debian Control file'
- aliases = ['control', 'debcontrol']
+ aliases = ['debcontrol', 'control']
filenames = ['control']
tokens = {
diff --git a/vendor/pygments-main/pygments/lexers/javascript.py b/vendor/pygments-main/pygments/lexers/javascript.py
index a06272b4..7ddd1148 100644
--- a/vendor/pygments-main/pygments/lexers/javascript.py
+++ b/vendor/pygments-main/pygments/lexers/javascript.py
@@ -10,16 +10,17 @@
import re
-from pygments.lexer import RegexLexer, include, bygroups, default, using, \
- this, words, combined
+from pygments.lexer import bygroups, combined, default, do_insertions, include, \
+ inherit, Lexer, RegexLexer, this, using, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other
+ Number, Punctuation, Other, Generic
from pygments.util import get_bool_opt
import pygments.unistring as uni
__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
- 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer']
+ 'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer',
+ 'NodeConsoleLexer']
JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
']|\\\\u[a-fA-F0-9]{4})')
@@ -28,6 +29,7 @@
'\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
+line_re = re.compile('.*?\n')
class JavascriptLexer(RegexLexer):
"""
@@ -35,8 +37,8 @@ class JavascriptLexer(RegexLexer):
"""
name = 'JavaScript'
- aliases = ['js', 'javascript']
- filenames = ['*.js', '*.jsm', '*.mjs']
+ aliases = ['javascript', 'js']
+ filenames = ['*.js', '*.jsm', '*.mjs', '*.cjs']
mimetypes = ['application/javascript', 'application/x-javascript',
'text/x-javascript', 'text/javascript']
@@ -52,7 +54,7 @@ class JavascriptLexer(RegexLexer):
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuys]+\b|\B)', String.Regex, '#pop'),
+ r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop')
],
@@ -75,23 +77,43 @@ class JavascriptLexer(RegexLexer):
(r'(\.[0-9]+|[0-9]+\.[0-9]*|[0-9]+)([eE][-+]?[0-9]+)?', Number.Float),
(r'\.\.\.|=>', Punctuation),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'\+\+|--|~|\?\?=?|\?|:|\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|(?:\*\*|\|\||&&|[-<>+*%&|^/]))=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
+
+ (r'(typeof|instanceof|in|void|delete|new)\b', Operator.Word, 'slashstartsregex'),
+
+ # Match stuff like: constructor
+ (r'\b(constructor|from|as)\b', Keyword.Reserved),
+
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|await|async|'
- r'this|of|static|export|import|debugger|extends|super)\b', Keyword, 'slashstartsregex'),
+ r'throw|try|catch|finally|yield|await|async|this|of|static|export|'
+ r'import|debugger|extends|super)\b', Keyword, 'slashstartsregex'),
(r'(var|let|const|with|function|class)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|double|enum|final|float|goto'
- r'implements|int|interface|long|native|package|private|protected'
+
+ (r'(abstract|boolean|byte|char|double|enum|final|float|goto|'
+ r'implements|int|interface|long|native|package|private|protected|'
r'public|short|synchronized|throws|transient|volatile)\b', Keyword.Reserved),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|BigInt|Error|Function|Math|'
+
+ (r'(Array|Boolean|Date|BigInt|Function|Math|ArrayBuffer|'
r'Number|Object|RegExp|String|Promise|Proxy|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|'
- r'document|this|window|globalThis|Symbol)\b', Name.Builtin),
+ r'eval|isFinite|isNaN|parseFloat|parseInt|DataView|'
+ r'document|window|globalThis|global|Symbol|Intl|'
+ r'WeakSet|WeakMap|Set|Map|Reflect|JSON|Atomics|'
+ r'Int(?:8|16|32)Array|BigInt64Array|Float32Array|Float64Array|'
+ r'Uint8ClampedArray|Uint(?:8|16|32)Array|BigUint64Array)\b', Name.Builtin),
+
+ (r'((?:Eval|Internal|Range|Reference|Syntax|Type|URI)?Error)\b', Name.Exception),
+
+ # Match stuff like: super(argument, list)
+ (r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
+ bygroups(Keyword, Text), 'slashstartsregex'),
+ # Match stuff like: function() {...}
+ (r'([a-zA-Z_?.$][\w?.$]*)(?=\(\) \{)', Name.Other, 'slashstartsregex'),
+
(JS_IDENT, Name.Other),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
@@ -112,6 +134,43 @@ class JavascriptLexer(RegexLexer):
}
+class TypeScriptLexer(JavascriptLexer):
+ """
+ For `TypeScript `_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'TypeScript'
+ aliases = ['typescript', 'ts']
+ filenames = ['*.ts']
+ mimetypes = ['application/x-typescript', 'text/x-typescript']
+
+ # Higher priority than the TypoScriptLexer, as TypeScript is far more
+ # common these days
+ priority = 0.5
+
+ tokens = {
+ 'root': [
+ (r'(abstract|implements|private|protected|public|readonly)\b',
+ Keyword, 'slashstartsregex'),
+ (r'(enum|interface|override)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'\b(declare|type)\b', Keyword.Reserved),
+ # Match variable type keywords
+ (r'\b(string|boolean|number)\b', Keyword.Type),
+ # Match stuff like: module name {...}
+ (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
+ bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
+ # Match stuff like: (function: return type)
+ (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
+ bygroups(Name.Other, Text, Keyword.Type)),
+ # Match stuff like: Decorators
+ (r'@' + JS_IDENT, Keyword.Declaration),
+ inherit,
+ ],
+ }
+
+
class KalLexer(RegexLexer):
"""
For `Kal`_ source code.
@@ -157,7 +216,7 @@ class KalLexer(RegexLexer):
'root': [
include('commentsandwhitespace'),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuys]+\b|\B)', String.Regex),
+ r'([gimuysd]+\b|\B)', String.Regex),
(r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
Operator),
(r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
@@ -231,13 +290,13 @@ class LiveScriptLexer(RegexLexer):
"""
For `LiveScript`_ source code.
- .. _LiveScript: http://gkz.github.com/LiveScript/
+ .. _LiveScript: https://livescript.net/
.. versionadded:: 1.6
"""
name = 'LiveScript'
- aliases = ['live-script', 'livescript']
+ aliases = ['livescript', 'live-script']
filenames = ['*.ls']
mimetypes = ['text/livescript']
@@ -250,7 +309,7 @@ class LiveScriptLexer(RegexLexer):
],
'multilineregex': [
include('commentsandwhitespace'),
- (r'//([gimuys]+\b|\B)', String.Regex, '#pop'),
+ (r'//([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'/', String.Regex),
(r'[^/#]+', String.Regex)
],
@@ -258,7 +317,7 @@ class LiveScriptLexer(RegexLexer):
include('commentsandwhitespace'),
(r'//', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gimuys]+\b|\B)', String.Regex, '#pop'),
+ r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'/', Operator, '#pop'),
default('#pop'),
],
@@ -441,108 +500,6 @@ class DartLexer(RegexLexer):
}
-class TypeScriptLexer(RegexLexer):
- """
- For `TypeScript `_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'TypeScript'
- aliases = ['ts', 'typescript']
- filenames = ['*.ts', '*.tsx']
- mimetypes = ['text/x-typescript']
-
- flags = re.DOTALL | re.MULTILINE
-
- # Higher priority than the TypoScriptLexer, as TypeScript is far more
- # common these days
- priority = 0.5
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'',
- # prec-lazy-or
- '||',
- # prec-lazy-and
- '&&',
- # prec-comparison
- '>', '<', '>=', '≥', '<=', '≤', '==', '===', '≡', '!=', '≠',
- '!==', '≢', '.>', '.<', '.>=', '.≥', '.<=', '.≤', '.==', '.!=',
- '.≠', '.=', '.!', '<:', '>:', '∈', '∉', '∋', '∌', '⊆',
- '⊈', '⊂',
- '⊄', '⊊',
- # prec-pipe
- '|>', '<|',
- # prec-colon
- ':',
- # prec-plus
- '.+', '.-', '|', '∪', '$',
- # prec-bitshift
- '<<', '>>', '>>>', '.<<', '.>>', '.>>>',
- # prec-times
- '*', '/', './', '÷', '.÷', '%', '⋅', '.%', '.*', '\\', '.\\', '&', '∩',
- # prec-rational
- '//', './/',
- # prec-power
- '^', '.^',
- # prec-decl
- '::',
- # prec-dot
- '.',
- # unary op
- '+', '-', '!', '√', '∛', '∜',
- )), Operator),
# chars
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
# try to match trailing transpose
- (r'(?<=[.\w)\]])\'+', Operator),
-
- # strings
- (r'"""', String, 'tqstring'),
- (r'"', String, 'string'),
+ (r'(?<=[.\w)\]])(\'' + operator_suffixes + ')+', Operator),
+ # raw strings
+ (r'(raw)(""")', bygroups(String.Affix, String), 'tqrawstring'),
+ (r'(raw)(")', bygroups(String.Affix, String), 'rawstring'),
# regular expressions
- (r'r"""', String.Regex, 'tqregex'),
- (r'r"', String.Regex, 'regex'),
+ (r'(r)(""")', bygroups(String.Affix, String.Regex), 'tqregex'),
+ (r'(r)(")', bygroups(String.Affix, String.Regex), 'regex'),
+ # other strings
+ (r'(' + allowed_variable + ')?(""")', bygroups(String.Affix, String), 'tqstring'),
+ (r'(' + allowed_variable + ')?(")', bygroups(String.Affix, String), 'string'),
# backticks
- (r'`', String.Backtick, 'command'),
+ (r'(' + allowed_variable + ')?(```)', bygroups(String.Affix, String.Backtick), 'tqcommand'),
+ (r'(' + allowed_variable + ')?(`)', bygroups(String.Affix, String.Backtick), 'command'),
+
+ # type names
+ # - names that begin a curly expression
+ ('(' + allowed_variable + r')(\{)',
+ bygroups(Keyword.Type, Punctuation), 'curly'),
+ # - names as part of bare 'where'
+ (r'(where)(\s+)(' + allowed_variable + ')',
+ bygroups(Keyword, Text, Keyword.Type)),
+ # - curly expressions in general
+ (r'(\{)', Punctuation, 'curly'),
+ # - names as part of type declaration
+ (r'(abstract[ \t]+type|primitive[ \t]+type|mutable[ \t]+struct|struct)([\s()]+)(' +
+ allowed_variable + r')', bygroups(Keyword, Text, Keyword.Type)),
+
+ # macros
+ (r'@' + allowed_variable, Name.Decorator),
+ (words([*OPERATORS_LIST, '..', '.', *DOTTED_OPERATORS_LIST],
+ prefix='@', suffix=operator_suffixes), Name.Decorator),
+
+ # keywords
+ (words(KEYWORD_LIST, suffix=r'\b'), Keyword),
+ # builtin types
+ (words(BUILTIN_LIST, suffix=r'\b'), Keyword.Type),
+ # builtin literals
+ (words(LITERAL_LIST, suffix=r'\b'), Name.Builtin),
# names
(allowed_variable, Name),
- (r'@' + allowed_variable, Name.Decorator),
# numbers
- (r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+(_\d+)+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'0b[01]+(_[01]+)+', Number.Bin),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+(_[0-7]+)+', Number.Oct),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+', Number.Hex),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'\d+(_\d+)+', Number.Integer),
- (r'\d+', Number.Integer)
+ (r'(\d+((_\d+)+)?\.(?!\.)(\d+((_\d+)+)?)?|\.\d+((_\d+)+)?)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+((_\d+)+)?[eEf][+-]?[0-9]+', Number.Float),
+ (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?(\.([a-fA-F0-9]+((_[a-fA-F0-9]+)+)?)?)?p[+-]?\d+', Number.Float),
+ (r'0b[01]+((_[01]+)+)?', Number.Bin),
+ (r'0o[0-7]+((_[0-7]+)+)?', Number.Oct),
+ (r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?', Number.Hex),
+ (r'\d+((_\d+)+)?', Number.Integer),
+
+ # single dot operator matched last to permit e.g. ".1" as a float
+ (words(['.']), Operator),
],
"blockcomment": [
@@ -227,53 +152,80 @@ class JuliaLexer(RegexLexer):
(r'[=#]', Comment.Multiline),
],
- 'string': [
+ 'curly': [
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (allowed_variable, Keyword.Type),
+ include('root'),
+ ],
+
+ 'tqrawstring': [
+ (r'"""', String, '#pop'),
+ (r'([^"]|"[^"][^"])+', String),
+ ],
+ 'rawstring': [
(r'"', String, '#pop'),
- # FIXME: This escape pattern is not perfect.
- (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
- # Interpolation is defined as "$" followed by the shortest full
- # expression, which is something we can't parse.
- # Include the most common cases here: $word, and $(paren'd expr).
+ (r'\\"', String.Escape),
+ (r'([^"\\]|\\[^"])+', String),
+ ],
+
+ # Interpolation is defined as "$" followed by the shortest full expression, which is
+ # something we can't parse.
+ # Include the most common cases here: $word, and $(paren'd expr).
+ 'interp': [
(r'\$' + allowed_variable, String.Interpol),
- # (r'\$[a-zA-Z_]+', String.Interpol),
(r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
+ ],
+ 'in-intp': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+
+ 'string': [
+ (r'(")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
+ # FIXME: This escape pattern is not perfect.
+ (r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
+ include('interp'),
# @printf and @sprintf formats
(r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]',
String.Interpol),
- (r'.|\s', String),
+ (r'[^"$%\\]+', String),
+ (r'.', String),
],
-
'tqstring': [
- (r'"""', String, '#pop'),
+ (r'(""")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
- (r'\$' + allowed_variable, String.Interpol),
- (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
- (r'.|\s', String),
+ include('interp'),
+ (r'[^"$%\\]+', String),
+ (r'.', String),
],
'regex': [
- (r'"', String.Regex, '#pop'),
+ (r'(")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
(r'\\"', String.Regex),
- (r'.|\s', String.Regex),
+ (r'[^\\"]+', String.Regex),
],
'tqregex': [
- (r'"""', String.Regex, '#pop'),
- (r'.|\s', String.Regex),
+ (r'(""")([imsxa]*)?', bygroups(String.Regex, String.Affix), '#pop'),
+ (r'[^"]+', String.Regex),
],
'command': [
- (r'`', String.Backtick, '#pop'),
- (r'\$' + allowed_variable, String.Interpol),
- (r'(\$)(\()', bygroups(String.Interpol, Punctuation), 'in-intp'),
- (r'.|\s', String.Backtick)
+ (r'(`)(' + allowed_variable + r'|\d+)?', bygroups(String.Backtick, String.Affix), '#pop'),
+ (r'\\[`$]', String.Escape),
+ include('interp'),
+ (r'[^\\`$]+', String.Backtick),
+ (r'.', String.Backtick),
+ ],
+ 'tqcommand': [
+ (r'(```)(' + allowed_variable + r'|\d+)?', bygroups(String.Backtick, String.Affix), '#pop'),
+ (r'\\\$', String.Escape),
+ include('interp'),
+ (r'[^\\`$]+', String.Backtick),
+ (r'.', String.Backtick),
],
-
- 'in-intp': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ]
}
def analyse_text(text):
@@ -287,7 +239,7 @@ class JuliaConsoleLexer(Lexer):
.. versionadded:: 1.6
"""
name = 'Julia console'
- aliases = ['jlcon']
+ aliases = ['jlcon', 'julia-repl']
def get_tokens_unprocessed(self, text):
jllexer = JuliaLexer(**self.options)
diff --git a/vendor/pygments-main/pygments/lexers/jvm.py b/vendor/pygments-main/pygments/lexers/jvm.py
index f6e12031..3500e266 100644
--- a/vendor/pygments-main/pygments/lexers/jvm.py
+++ b/vendor/pygments-main/pygments/lexers/jvm.py
@@ -68,7 +68,9 @@ class JavaLexer(RegexLexer):
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
Name.Attribute)),
- (r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
+ (r'^(\s*)(default)(:)', bygroups(Text, Keyword, Punctuation)),
+ (r'^(\s*)((?:[^\W\d]|\$)[\w$]*)(:)', bygroups(Text, Name.Label,
+ Punctuation)),
(r'([^\W\d]|\$)[\w$]*', Name),
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
r'\.[0-9][0-9_]*)'
@@ -156,226 +158,278 @@ class ScalaLexer(RegexLexer):
flags = re.MULTILINE | re.DOTALL
- # don't use raw unicode strings!
- op = ('[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1'
- '\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9'
- '\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2'
- '\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38'
- '\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940'
- '\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c'
- '\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118'
- '\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144'
- '\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767'
- '\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb'
- '\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020'
- '\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3'
- '\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff'
- '\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66'
- '\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+')
-
- letter = ('[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6'
- '\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386'
- '\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2'
- '\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5'
- '\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5'
- '\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961'
- '\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1'
- '\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd'
- '\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9'
- '\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1'
- '\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30'
- '\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4'
- '\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f'
- '\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070'
- '\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f'
- '\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711'
- '\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc'
- '\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7'
- '\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf'
- '\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77'
- '\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb'
- '\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113'
- '\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139'
- '\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c'
- '\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029'
- '\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e'
- '\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c'
- '\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f'
- '\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822'
- '\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28'
- '\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28'
- '\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a'
- '\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]')
-
- upper = ('[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108'
- '\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c'
- '\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130'
- '\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145'
- '\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a'
- '\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e'
- '\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182'
- '\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194'
- '\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7'
- '\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc'
- '\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9'
- '\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee'
- '\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204'
- '\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218'
- '\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c'
- '\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246'
- '\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f'
- '\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0'
- '\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7'
- '\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a'
- '\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e'
- '\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a'
- '\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae'
- '\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1'
- '\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6'
- '\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea'
- '\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe'
- '\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512'
- '\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556'
- '\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e'
- '\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22'
- '\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36'
- '\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a'
- '\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e'
- '\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72'
- '\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86'
- '\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2'
- '\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6'
- '\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca'
- '\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede'
- '\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2'
- '\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d'
- '\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f'
- '\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb'
- '\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112'
- '\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133'
- '\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67'
- '\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86'
- '\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a'
- '\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae'
- '\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2'
- '\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6'
- '\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646'
- '\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a'
- '\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682'
- '\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696'
- '\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736'
- '\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a'
- '\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e'
- '\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
- '\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
-
- idrest = '%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
- letter_letter_digit = '%s(?:%s|\\d)*' % (letter, letter)
+ opchar = '[!#%&*\\-\\/:?@^' + uni.combine('Sm', 'So') + ']'
+ letter = '[_\\$' + uni.combine('Ll', 'Lu', 'Lo', 'Nl', 'Lt') + ']'
+ upperLetter = '[' + uni.combine('Lu', 'Lt') + ']'
+ letterOrDigit = '(?:%s|[0-9])' % letter
+ letterOrDigitNoDollarSign = '(?:%s|[0-9])' % letter.replace('\\$', '')
+ alphaId = '%s+' % letter
+ simpleInterpolatedVariable = '%s%s*' % (letter, letterOrDigitNoDollarSign)
+ idrest = '%s%s*(?:(?<=_)%s+)?' % (letter, letterOrDigit, opchar)
+ idUpper = '%s%s*(?:(?<=_)%s+)?' % (upperLetter, letterOrDigit, opchar)
+ plainid = '(?:%s|%s+)' % (idrest, opchar)
+ backQuotedId = r'`[^`]+`'
+ anyId = r'(?:%s|%s)' % (plainid, backQuotedId)
+ notStartOfComment = r'(?!//|/\*)'
+ endOfLineMaybeWithComment = r'(?=\s*(//|$))'
+
+ keywords = (
+ 'new', 'return', 'throw', 'classOf', 'isInstanceOf', 'asInstanceOf',
+ 'else', 'if', 'then', 'do', 'while', 'for', 'yield', 'match', 'case',
+ 'catch', 'finally', 'try'
+ )
+
+ operators = (
+ '<%', '=:=', '<:<', '<%<', '>:', '<:', '=', '==', '!=', '<=', '>=',
+ '<>', '<', '>', '<-', '←', '->', '→', '=>', '⇒', '?', '@', '|', '-',
+ '+', '*', '%', '~', '\\'
+ )
+
+ storage_modifiers = (
+ 'private', 'protected', 'synchronized', '@volatile', 'abstract',
+ 'final', 'lazy', 'sealed', 'implicit', 'override', '@transient',
+ '@native'
+ )
tokens = {
'root': [
- # method names
- (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
- (r'[^\S\n]+', Text),
- include('comments'),
- (r'@%s' % idrest, Name.Decorator),
- (r'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
- r'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
- r'lazy|match|new|override|pr(?:ivate|otected)'
- r'|re(?:quires|turn)|s(?:ealed|uper)|'
- r't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|'
- r'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])\b', Keyword),
- (r':(?!%s)' % op, Keyword, 'type'),
- (r'%s%s\b' % (upper, idrest), Name.Class),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
- (r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
- (r'""".*?"""(?!")', String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r"'%s" % idrest, Text.Symbol),
- (r'[fs]"""', String, 'interptriplestring'), # interpolated strings
- (r'[fs]"', String, 'interpstring'), # interpolated strings
- (r'raw"(\\\\|\\[^\\]|[^"\\])*"', String), # raw strings
- # (r'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
- # Name.Attribute)),
- (idrest, Name),
- (r'`[^`]+`', Name),
- (r'\[', Operator, 'typeparam'),
- (r'[(){};,.#]', Operator),
- (op, Operator),
- (r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
- Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'(%s|%s|`[^`]+`)(\s*)(\[)' % (idrest, op),
- bygroups(Name.Class, Text, Operator), ('#pop', 'typeparam')),
- (r'\s+', Text),
+ include('whitespace'),
include('comments'),
- (r'\{', Operator, '#pop'),
- (r'\(', Operator, '#pop'),
- (r'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
+ include('script-header'),
+ include('imports'),
+ include('exports'),
+ include('storage-modifiers'),
+ include('annotations'),
+ include('using'),
+ include('declarations'),
+ include('inheritance'),
+ include('extension'),
+ include('end'),
+ include('constants'),
+ include('strings'),
+ include('symbols'),
+ include('singleton-type'),
+ include('inline'),
+ include('quoted'),
+ include('keywords'),
+ include('operators'),
+ include('punctuation'),
+ include('names'),
],
- 'type': [
- (r'\s+', Text),
- include('comments'),
- (r'<[%:]|>:|[#_]|\bforSome\b|\btype\b', Keyword),
- (r'([,);}]|=>|=|\u21d2)(\s*)', bygroups(Operator, Text), '#pop'),
- (r'[({]', Operator, '#push'),
- (r'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)(\[)' %
- (idrest, op, idrest, op),
- bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
- (r'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)$' %
- (idrest, op, idrest, op),
- bygroups(Keyword.Type, Text), '#pop'),
- (r'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
- ],
- 'typeparam': [
+
+ # Includes:
+ 'whitespace': [
(r'\s+', Text),
- include('comments'),
- (r',+', Punctuation),
- (r'<[%:]|=>|>:|[#_\u21D2]|\bforSome\b|\btype\b', Keyword),
- (r'([\])}])', Operator, '#pop'),
- (r'[(\[{]', Operator, '#push'),
- (r'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
],
'comments': [
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
],
+ 'script-header': [
+ (r'^#!([^\n]*)$', Comment.Hashbang),
+ ],
+ 'imports': [
+ (r'\b(import)(\s+)', bygroups(Keyword, Text), 'import-path'),
+ ],
+ 'exports': [
+ (r'\b(export)(\s+)(given)(\s+)',
+ bygroups(Keyword, Text, Keyword, Text), 'export-path'),
+ (r'\b(export)(\s+)', bygroups(Keyword, Text), 'export-path'),
+ ],
+ 'storage-modifiers': [
+ (words(storage_modifiers, prefix=r'\b', suffix=r'\b'), Keyword),
+ # Only highlight soft modifiers if they are eventually followed by
+ # the correct keyword. Note that soft modifiers can be followed by a
+ # sequence of regular modifiers; [a-z\s]* skips those, and we just
+ # check that the soft modifier is applied to a supported statement.
+ (r'\b(transparent|opaque|infix|open|inline)\b(?=[a-z\s]*\b'
+ r'(def|val|var|given|type|class|trait|object|enum)\b)', Keyword),
+ ],
+ 'annotations': [
+ (r'@%s' % idrest, Name.Decorator),
+ ],
+ 'using': [
+ # using is a soft keyword, can only be used in the first position of
+ # a parameter or argument list.
+ (r'(\()(\s*)(using)(\s)', bygroups(Punctuation, Text, Keyword, Text)),
+ ],
+ 'declarations': [
+ (r'\b(def)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Name.Function)),
+ (r'\b(trait)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Name.Class)),
+ (r'\b(?:(case)(\s+))?(class|object|enum)\b(\s*)%s(%s)?' %
+ (notStartOfComment, anyId),
+ bygroups(Keyword, Text, Keyword, Text, Name.Class)),
+ (r'(?)|(?=%s)|(?="))?' %
+ (idUpper, backQuotedId, plainid),
+ bygroups(Keyword, Text, Name.Class)),
+ ],
+ 'extension': [
+ (r'\b(extension)(\s+)(?=[\[\(])', bygroups(Keyword, Text)),
+ ],
+ 'end': [
+ # end is a soft keyword, should only be highlighted in certain cases
+ (r'\b(end)(\s+)(if|while|for|match|new|extension|val|var)\b',
+ bygroups(Keyword, Text, Keyword)),
+ (r'\b(end)(\s+)(%s)%s' % (idUpper, endOfLineMaybeWithComment),
+ bygroups(Keyword, Text, Name.Class)),
+ (r'\b(end)(\s+)(%s|%s)?%s' %
+ (backQuotedId, plainid, endOfLineMaybeWithComment),
+ bygroups(Keyword, Text, Name.Namespace)),
+ ],
+ 'punctuation': [
+ (r'[{}()\[\];,.]', Punctuation),
+ (r'(?', Operator),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'[\[\]]', Punctuation),
+ include('qualified-name'),
+ ],
+ 'export-path': [
+ (r'(?<=[\n;:])', Text, '#pop'),
+ include('comments'),
+ include('qualified-name'),
+ (r'\{', Punctuation, 'export-path-curly-brace'),
],
- 'interptriplestring': [
+ 'export-path-curly-brace': [
+ include('whitespace'),
+ include('comments'),
+ (r'=>', Operator),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation),
+ include('qualified-name'),
+ ],
+ 'package': [
+ (r'(?<=[\n;])', Text, '#pop'),
+ (r':', Punctuation, '#pop'),
+ include('comments'),
+ include('qualified-name'),
+ ],
+ 'interpolated-string-triple': [
(r'"""(?!")', String, '#pop'),
(r'"', String),
- include('interpstringcommon'),
+ include('interpolated-string-common'),
],
- 'interpstring': [
+ 'interpolated-string': [
(r'"', String, '#pop'),
- include('interpstringcommon'),
+ include('interpolated-string-common'),
],
- 'interpbrace': [
+ 'interpolated-string-brace': [
(r'\}', String.Interpol, '#pop'),
- (r'\{', String.Interpol, '#push'),
+ (r'\{', Punctuation, 'interpolated-string-nested-brace'),
include('root'),
],
+ 'interpolated-string-nested-brace': [
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+
+ # Helpers
+ 'qualified-name': [
+ (idUpper, Name.Class),
+ (r'(%s)(\.)' % anyId, bygroups(Name.Namespace, Punctuation)),
+ (r'\.', Punctuation),
+ (anyId, Name),
+ (r'[^\S\n]+', Text),
+ ],
+ 'interpolated-string-common': [
+ (r'[^"$\\]+', String),
+ (r'\$\$', String.Escape),
+ (r'(\$)(%s)' % simpleInterpolatedVariable,
+ bygroups(String.Interpol, Name)),
+ (r'\$\{', String.Interpol, 'interpolated-string-brace'),
+ (r'\\.', String),
+ ],
}
@@ -497,18 +551,24 @@ class GroovyLexer(RegexLexer):
default('base'),
],
'base': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ # keywords: go before method names to avoid lexing "throw new XYZ"
+ # as a method signature
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
Keyword),
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'('
+ r'[a-zA-Z_]\w*' # method name
+ r'|"(?:\\\\|\\[^\\]|[^"\\])*"' # or double-quoted method name
+ r"|'(?:\\\\|\\[^\\]|[^'\\])*'" # or single-quoted method name
+ r')'
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
@@ -1677,7 +1737,7 @@ def analyse_text(text):
r'inner|interface|limit|set|signature|stack)\b', text,
re.MULTILINE):
score += 0.6
- return score
+ return min(score, 1.0)
class SarlLexer(RegexLexer):
diff --git a/vendor/pygments-main/pygments/lexers/kuin.py b/vendor/pygments-main/pygments/lexers/kuin.py
new file mode 100644
index 00000000..5800db9c
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/kuin.py
@@ -0,0 +1,299 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.kuin
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Kuin language.
+"""
+
+from pygments.lexer import RegexLexer, include, using, inherit, this, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, Number, Punctuation
+
+__all__ = ['KuinLexer']
+
+class KuinLexer(RegexLexer):
+ """
+ For `Kuin `_ source code
+
+ .. versionadded:: 2.9
+ """
+ name = 'Kuin'
+ aliases = ['kuin']
+ filenames = ['*.kn']
+
+ tokens = {
+ 'root': [
+ include('statement'),
+ ],
+ 'statement': [
+ # Whitespace / Comment
+ include('whitespace'),
+
+ # Block-statement
+ (r'(\+?[ \t]*\*?[ \t]*\bfunc)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Function), 'func_'),
+ (r'\b(class)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Class), 'class_'),
+ (r'\b(enum)([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Constant), 'enum_'),
+ (r'\b(block)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'block_'),
+ (r'\b(ifdef)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'ifdef_'),
+ (r'\b(if)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'if_'),
+ (r'\b(switch)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'switch_'),
+ (r'\b(while)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'while_'),
+ (r'\b(for)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'for_'),
+ (r'\b(foreach)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'foreach_'),
+ (r'\b(try)\b(?:([ \t]+(?:\n\s*\|)*[ \t]*)([a-zA-Z_][0-9a-zA-Z_]*))?', bygroups(Keyword, using(this), Name.Other), 'try_'),
+
+ # Line-statement
+ (r'\b(do)\b', Keyword, 'do'),
+ (r'(\+?[ \t]*\bvar)\b', Keyword, 'var'),
+ (r'\b(const)\b', Keyword, 'const'),
+ (r'\b(ret)\b', Keyword, 'ret'),
+ (r'\b(throw)\b', Keyword, 'throw'),
+ (r'\b(alias)\b', Keyword, 'alias'),
+ (r'\b(assert)\b', Keyword, 'assert'),
+ (r'\|', Text, 'continued_line'),
+ (r'[ \t]*\n', Text),
+ ],
+
+ # Whitespace / Comment
+ 'whitespace': [
+ (r'^[ \t]*;.*', Comment.Single),
+ (r'[ \t]+(?![; \t])', Text),
+ (r'\{', Comment.Multiline, 'multiline_comment'),
+ ],
+ 'multiline_comment': [
+ (r'\{', Comment.Multiline, 'multiline_comment'),
+ (r'(?:\s*;.*|[^{}\n]+)', Comment.Multiline),
+ (r'\n', Comment.Multiline),
+ (r'\}', Comment.Multiline, '#pop'),
+ ],
+
+ # Block-statement
+ 'func_': [
+ include('expr'),
+ (r'\n', Text, 'func'),
+ ],
+ 'func': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(func)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ ],
+ 'class_': [
+ include('expr'),
+ (r'\n', Text, 'class'),
+ ],
+ 'class': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(class)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ ],
+ 'enum_': [
+ include('expr'),
+ (r'\n', Text, 'enum'),
+ ],
+ 'enum': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(enum)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('expr'),
+ (r'\n', Text),
+ ],
+ 'block_': [
+ include('expr'),
+ (r'\n', Text, 'block'),
+ ],
+ 'block': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(block)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'ifdef_': [
+ include('expr'),
+ (r'\n', Text, 'ifdef'),
+ ],
+ 'ifdef': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(ifdef)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('rls', 'dbg'), prefix=r'\b', suffix=r'\b'), Keyword.Constant, 'ifdef_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'ifdef_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'if_': [
+ include('expr'),
+ (r'\n', Text, 'if'),
+ ],
+ 'if': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(if)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('elif', 'else'), prefix=r'\b', suffix=r'\b'), Keyword, 'if_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'if_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'switch_': [
+ include('expr'),
+ (r'\n', Text, 'switch'),
+ ],
+ 'switch': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(switch)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('case', 'default', 'to'), prefix=r'\b', suffix=r'\b'), Keyword, 'switch_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'switch_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'while_': [
+ include('expr'),
+ (r'\n', Text, 'while'),
+ ],
+ 'while': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(while)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'for_': [
+ include('expr'),
+ (r'\n', Text, 'for'),
+ ],
+ 'for': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(for)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'foreach_': [
+ include('expr'),
+ (r'\n', Text, 'foreach'),
+ ],
+ 'foreach': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(foreach)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'try_': [
+ include('expr'),
+ (r'\n', Text, 'try'),
+ ],
+ 'try': [
+ (r'\b(end)([ \t]+(?:\n\s*\|)*[ \t]*)(try)\b', bygroups(Keyword, using(this), Keyword), '#pop:2'),
+ (words(('catch', 'finally', 'to'), prefix=r'\b', suffix=r'\b'), Keyword, 'try_sp'),
+ include('statement'),
+ include('break'),
+ include('skip'),
+ ],
+ 'try_sp': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+
+ # Line-statement
+ 'break': [
+ (r'\b(break)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Other)),
+ ],
+ 'skip': [
+ (r'\b(skip)\b([ \t]+)([a-zA-Z_][0-9a-zA-Z_]*)', bygroups(Keyword, using(this), Name.Other)),
+ ],
+ 'alias': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'assert': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'const': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'do': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'ret': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'throw': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'var': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'continued_line': [
+ include('expr'),
+ (r'\n', Text, '#pop'),
+ ],
+
+ 'expr': [
+ # Whitespace / Comment
+ include('whitespace'),
+
+ # Punctuation
+ (r'\(', Punctuation,),
+ (r'\)', Punctuation,),
+ (r'\[', Punctuation,),
+ (r'\]', Punctuation,),
+ (r',', Punctuation),
+
+ # Keyword
+ (words((
+ 'true', 'false', 'null', 'inf'
+ ), prefix=r'\b', suffix=r'\b'), Keyword.Constant),
+ (words((
+ 'me'
+ ), prefix=r'\b', suffix=r'\b'), Keyword),
+ (words((
+ 'bit16', 'bit32', 'bit64', 'bit8', 'bool',
+ 'char', 'class', 'dict', 'enum', 'float', 'func',
+ 'int', 'list', 'queue', 'stack'
+ ), prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ # Number
+ (r'\b[0-9]\.[0-9]+(?!\.)(:?e[\+-][0-9]+)?\b', Number.Float),
+ (r'\b2#[01]+(?:b(?:8|16|32|64))?\b', Number.Bin),
+ (r'\b8#[0-7]+(?:b(?:8|16|32|64))?\b', Number.Oct),
+ (r'\b16#[0-9A-F]+(?:b(?:8|16|32|64))?\b', Number.Hex),
+ (r'\b[0-9]+(?:b(?:8|16|32|64))?\b', Number.Decimal),
+
+ # String / Char
+ (r'"', String.Double, 'string'),
+ (r"'(?:\\.|.)+?'", String.Char),
+
+ # Operator
+ (r'(?:\.|\$(?:>|<)?)', Operator),
+ (r'(?:\^)', Operator),
+ (r'(?:\+|-|!|##?)', Operator),
+ (r'(?:\*|/|%)', Operator),
+ (r'(?:~)', Operator),
+ (r'(?:(?:=|<>)(?:&|\$)?|<=?|>=?)', Operator),
+ (r'(?:&)', Operator),
+ (r'(?:\|)', Operator),
+ (r'(?:\?)', Operator),
+ (r'(?::(?::|\+|-|\*|/|%|\^|~)?)', Operator),
+
+ # Identifier
+ (r"\b([a-zA-Z_][0-9a-zA-Z_]*)(?=@)\b", Name),
+ (r"(@)?\b([a-zA-Z_][0-9a-zA-Z_]*)\b", bygroups(Name.Other, Name.Variable)),
+ ],
+
+ # String
+ 'string': [
+ (r'(?:\\[^{\n]|[^"\\])+', String.Double),
+ (r'\\\{', String.Double, 'toStrInString'),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'toStrInString': [
+ include('expr'),
+ (r'\}', String.Double, '#pop'),
+ ],
+ }
diff --git a/vendor/pygments-main/pygments/lexers/lisp.py b/vendor/pygments-main/pygments/lexers/lisp.py
index 531c66a0..f8814058 100644
--- a/vendor/pygments-main/pygments/lexers/lisp.py
+++ b/vendor/pygments-main/pygments/lexers/lisp.py
@@ -1536,7 +1536,7 @@ class EmacsLispLexer(RegexLexer):
.. versionadded:: 2.1
"""
name = 'EmacsLisp'
- aliases = ['emacs', 'elisp', 'emacs-lisp']
+ aliases = ['emacs-lisp', 'elisp', 'emacs']
filenames = ['*.el']
mimetypes = ['text/x-elisp', 'application/x-elisp']
@@ -2631,23 +2631,22 @@ class FennelLexer(RegexLexer):
aliases = ['fennel', 'fnl']
filenames = ['*.fnl']
- # these two lists are taken from fennel-mode.el:
- # https://gitlab.com/technomancy/fennel-mode
- # this list is current as of Fennel version 0.6.0.
+ # this list is current as of Fennel version 0.10.0.
special_forms = (
- 'require-macros', 'eval-compiler', 'doc', 'lua', 'hashfn',
- 'macro', 'macros', 'import-macros', 'pick-args', 'pick-values',
- 'macroexpand', 'macrodebug', 'do', 'values', 'if', 'when',
- 'each', 'for', 'fn', 'lambda', 'λ', 'partial', 'while',
- 'set', 'global', 'var', 'local', 'let', 'tset', 'set-forcibly!',
- 'doto', 'match', 'or', 'and', 'true', 'false', 'nil', 'not',
- 'not=', '.', '+', '..', '^', '-', '*', '%', '/', '>',
- '<', '>=', '<=', '=', '...', ':', '->', '->>', '-?>',
- '-?>>', 'rshift', 'lshift', 'bor', 'band', 'bnot', 'bxor',
- 'with-open', 'length'
+ '#', '%', '*', '+', '-', '->', '->>', '-?>', '-?>>', '.', '..',
+ '/', '//', ':', '<', '<=', '=', '>', '>=', '?.', '^', 'accumulate',
+ 'and', 'band', 'bnot', 'bor', 'bxor', 'collect', 'comment', 'do', 'doc',
+ 'doto', 'each', 'eval-compiler', 'for', 'hashfn', 'icollect', 'if',
+ 'import-macros', 'include', 'length', 'let', 'lshift', 'lua',
+ 'macrodebug', 'match', 'not', 'not=', 'or', 'partial', 'pick-args',
+ 'pick-values', 'quote', 'require-macros', 'rshift', 'set',
+ 'set-forcibly!', 'tset', 'values', 'when', 'while', 'with-open', '~='
+ )
+
+ declarations = (
+ 'fn', 'global', 'lambda', 'local', 'macro', 'macros', 'var', 'λ'
)
- # Might be nicer to use the list from _lua_builtins.py but it's unclear how?
builtins = (
'_G', '_VERSION', 'arg', 'assert', 'bit32', 'collectgarbage',
'coroutine', 'debug', 'dofile', 'error', 'getfenv',
@@ -2673,6 +2672,8 @@ class FennelLexer(RegexLexer):
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'(true|false|nil)', Name.Constant),
+
# these are technically strings, but it's worth visually
# distinguishing them because their intent is different
# from regular strings.
@@ -2680,6 +2681,8 @@ class FennelLexer(RegexLexer):
# special forms are keywords
(words(special_forms, suffix=' '), Keyword),
+ # these are ... even more special!
+ (words(declarations, suffix=' '), Keyword.Declaration),
# lua standard library are builtins
(words(builtins, suffix=' '), Name.Builtin),
# special-case the vararg symbol
diff --git a/vendor/pygments-main/pygments/lexers/markup.py b/vendor/pygments-main/pygments/lexers/markup.py
index e3cc8171..3317d1b9 100644
--- a/vendor/pygments-main/pygments/lexers/markup.py
+++ b/vendor/pygments-main/pygments/lexers/markup.py
@@ -121,7 +121,7 @@ class RstLexer(RegexLexer):
.. versionadded:: 0.8
"""
name = 'reStructuredText'
- aliases = ['rst', 'rest', 'restructuredtext']
+ aliases = ['restructuredtext', 'rst', 'rest']
filenames = ['*.rst', '*.rest']
mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
flags = re.MULTILINE
@@ -501,8 +501,8 @@ class MarkdownLexer(RegexLexer):
.. versionadded:: 2.2
"""
- name = 'markdown'
- aliases = ['md', 'markdown']
+ name = 'Markdown'
+ aliases = ['markdown', 'md']
filenames = ['*.md', '*.markdown']
mimetypes = ["text/x-markdown"]
flags = re.MULTILINE
diff --git a/vendor/pygments-main/pygments/lexers/matlab.py b/vendor/pygments-main/pygments/lexers/matlab.py
index 7a72eedc..e12303dd 100644
--- a/vendor/pygments-main/pygments/lexers/matlab.py
+++ b/vendor/pygments-main/pygments/lexers/matlab.py
@@ -20,7 +20,6 @@
__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
-
class MatlabLexer(RegexLexer):
"""
For Matlab source code.
@@ -2783,8 +2782,8 @@ def get_tokens_unprocessed(self, text):
# Set leading spaces with the length of the prompt to be a generic prompt
# This keeps code aligned when prompts are removed, say with some Javascript
if line.startswith(' '*line_start):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:line_start])]))
+ insertions.append(
+ (len(curcode), [(0, Generic.Prompt, line[:line_start])]))
curcode += line[line_start:]
else:
curcode += line
@@ -3146,18 +3145,21 @@ class OctaveLexer(RegexLexer):
tokens = {
'root': [
- # We should look into multiline comments
+ (r'%\{\s*\n', Comment.Multiline, 'percentblockcomment'),
+ (r'#\{\s*\n', Comment.Multiline, 'hashblockcomment'),
(r'[%#].*$', Comment),
(r'^\s*function\b', Keyword, 'deffunc'),
# from 'iskeyword' on hg changeset 8cc154f45e37
(words((
- '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
- 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
- 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
- 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
- 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
- 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef',
+ 'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch',
+ 'end_unwind_protect', 'endclassdef', 'endevents', 'endfor',
+ 'endfunction', 'endif', 'endmethods', 'endproperties', 'endswitch',
+ 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if',
+ 'methods', 'otherwise', 'persistent', 'properties', 'return',
+ 'set', 'static', 'switch', 'try', 'until', 'unwind_protect',
+ 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
Keyword),
(words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
@@ -3191,8 +3193,19 @@ class OctaveLexer(RegexLexer):
(r'(?`_ language lexer.
+ The grammar definition use to transcribe the syntax was retrieved from
+ https://mesonbuild.com/Syntax.html#grammar for version 0.58
+ Some of those definitions are improperly transcribed so the Meson++
+ implementation was also checked: https://github.com/dcbaker/meson-plus-plus
+
+ .. versionadded:: 2.10
+ """
+
+ # TODO String interpolation @VARNAME@ inner matches
+ # TODO keyword_arg: value inner matches
+
+ name = 'Meson'
+ aliases = ['meson', 'meson.build']
+ filenames = ['meson.build', 'meson_options.txt']
+ mimetypes = ['text/x-meson']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment),
+ (r"'''.*'''", String.Single),
+ (r'[1-9][0-9]*', Number.Integer),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ include('string'),
+ include('keywords'),
+ include('expr'),
+ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
+ (r'\s+', Whitespace),
+ ],
+ 'string': [
+ (r"[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}", String),
+ (r"'.*?(?>|:=|[-~+/*%=<>&^|.]', Operator),
(r'[]{}:(),;[]', Punctuation),
(r'(in|is|and|or|not)\b', Operator.Word),
@@ -168,7 +169,6 @@ def fstring_rules(ttype):
include('magicfuncs'),
include('magicvars'),
include('name'),
- include('numbers'),
],
'expr-inside-fstring': [
(r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
@@ -727,7 +727,7 @@ class PythonTracebackLexer(RegexLexer):
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
bygroups(Text, Name.Builtin, Text, Number, Text)),
(r'^( )(.+)(\n)',
- bygroups(Text, using(PythonLexer), Text)),
+ bygroups(Text, using(PythonLexer), Text), 'markers'),
(r'^([ \t]*)(\.\.\.)(\n)',
bygroups(Text, Comment, Text)), # for doctests...
(r'^([^:]+)(: )(.+)(\n)',
@@ -735,6 +735,15 @@ class PythonTracebackLexer(RegexLexer):
(r'^([a-zA-Z_][\w.]*)(:?\n)',
bygroups(Generic.Error, Text), '#pop')
],
+ 'markers': [
+ # Either `PEP 657 `
+ # error locations in Python 3.11+, or single-caret markers
+ # for syntax errors before that.
+ (r'^( {4,})(\^+)(\n)',
+ bygroups(Text, Punctuation.Marker, Text),
+ '#pop'),
+ default('#pop'),
+ ],
}
@@ -773,7 +782,7 @@ class Python2TracebackLexer(RegexLexer):
(r'^( File )("[^"]+")(, line )(\d+)(\n)',
bygroups(Text, Name.Builtin, Text, Number, Text)),
(r'^( )(.+)(\n)',
- bygroups(Text, using(Python2Lexer), Text)),
+ bygroups(Text, using(Python2Lexer), Text), 'marker'),
(r'^([ \t]*)(\.\.\.)(\n)',
bygroups(Text, Comment, Text)), # for doctests...
(r'^([^:]+)(: )(.+)(\n)',
@@ -781,6 +790,11 @@ class Python2TracebackLexer(RegexLexer):
(r'^([a-zA-Z_]\w*)(:?\n)',
bygroups(Generic.Error, Text), '#pop')
],
+ 'marker': [
+ # For syntax errors.
+ (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
+ default('#pop'),
+ ],
}
@@ -845,14 +859,14 @@ class CythonLexer(RegexLexer):
],
'builtins': [
(words((
- '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
- 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
diff --git a/vendor/pygments-main/pygments/lexers/resource.py b/vendor/pygments-main/pygments/lexers/resource.py
index 2cbacbcc..3ed176a1 100644
--- a/vendor/pygments-main/pygments/lexers/resource.py
+++ b/vendor/pygments-main/pygments/lexers/resource.py
@@ -24,7 +24,7 @@ class ResourceLexer(RegexLexer):
.. versionadded:: 2.0
"""
name = 'ResourceBundle'
- aliases = ['resource', 'resourcebundle']
+ aliases = ['resourcebundle', 'resource']
filenames = []
_types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
diff --git a/vendor/pygments-main/pygments/lexers/rnc.py b/vendor/pygments-main/pygments/lexers/rnc.py
index ffb95f3f..cc8950a0 100644
--- a/vendor/pygments-main/pygments/lexers/rnc.py
+++ b/vendor/pygments-main/pygments/lexers/rnc.py
@@ -23,7 +23,7 @@ class RNCCompactLexer(RegexLexer):
"""
name = 'Relax-NG Compact'
- aliases = ['rnc', 'rng-compact']
+ aliases = ['rng-compact', 'rnc']
filenames = ['*.rnc']
tokens = {
diff --git a/vendor/pygments-main/pygments/lexers/ruby.py b/vendor/pygments-main/pygments/lexers/ruby.py
index 71b5a89b..2c3e7bd3 100644
--- a/vendor/pygments-main/pygments/lexers/ruby.py
+++ b/vendor/pygments-main/pygments/lexers/ruby.py
@@ -33,7 +33,7 @@ class RubyLexer(ExtendedRegexLexer):
"""
name = 'Ruby'
- aliases = ['rb', 'ruby', 'duby']
+ aliases = ['ruby', 'rb', 'duby']
filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
'*.rbx', '*.duby', 'Gemfile']
mimetypes = ['text/x-ruby', 'application/x-ruby']
diff --git a/vendor/pygments-main/pygments/lexers/rust.py b/vendor/pygments-main/pygments/lexers/rust.py
index 3a162b00..d01f73e4 100644
--- a/vendor/pygments-main/pygments/lexers/rust.py
+++ b/vendor/pygments-main/pygments/lexers/rust.py
@@ -109,7 +109,7 @@ class RustLexer(RegexLexer):
# Types in positions.
(r'(?::|->)', Text, 'typename'),
# Labels
- (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?',
+ (r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
bygroups(Keyword, Text.Whitespace, Name.Label)),
# Character literals
@@ -135,7 +135,7 @@ class RustLexer(RegexLexer):
# String literals
(r'b"', String, 'bytestring'),
(r'"', String, 'string'),
- (r'b?r(#*)".*?"\1', String),
+ (r'(?s)b?r(#*)".*?"\1', String),
# Lifetime names
(r"'", Operator, 'lifetime'),
@@ -152,6 +152,11 @@ class RustLexer(RegexLexer):
# Attributes
(r'#!?\[', Comment.Preproc, 'attribute['),
+
+ # Misc
+ # Lone hashes: not used in Rust syntax, but allowed in macro
+ # arguments, most famously for quote::quote!()
+ (r'#', Text),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
@@ -208,16 +213,10 @@ class RustLexer(RegexLexer):
'attribute_common': [
(r'"', String, 'string'),
(r'\[', Comment.Preproc, 'attribute['),
- (r'\(', Comment.Preproc, 'attribute('),
],
'attribute[': [
include('attribute_common'),
- (r'\];?', Comment.Preproc, '#pop'),
- (r'[^"\]]+', Comment.Preproc),
- ],
- 'attribute(': [
- include('attribute_common'),
- (r'\);?', Comment.Preproc, '#pop'),
- (r'[^")]+', Comment.Preproc),
+ (r'\]', Comment.Preproc, '#pop'),
+ (r'[^"\]\[]+', Comment.Preproc),
],
}
diff --git a/vendor/pygments-main/pygments/lexers/scripting.py b/vendor/pygments-main/pygments/lexers/scripting.py
index 7f92eb16..47d066fc 100644
--- a/vendor/pygments-main/pygments/lexers/scripting.py
+++ b/vendor/pygments-main/pygments/lexers/scripting.py
@@ -168,9 +168,9 @@ class MoonScriptLexer(LuaLexer):
.. versionadded:: 1.5
"""
- name = "MoonScript"
- aliases = ["moon", "moonscript"]
- filenames = ["*.moon"]
+ name = 'MoonScript'
+ aliases = ['moonscript', 'moon']
+ filenames = ['*.moon']
mimetypes = ['text/x-moonscript', 'application/x-moonscript']
tokens = {
@@ -237,7 +237,7 @@ class ChaiscriptLexer(RegexLexer):
"""
name = 'ChaiScript'
- aliases = ['chai', 'chaiscript']
+ aliases = ['chaiscript', 'chai']
filenames = ['*.chai']
mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
@@ -1236,9 +1236,9 @@ class MiniScriptLexer(RegexLexer):
.. versionadded:: 2.6
"""
- name = "MiniScript"
- aliases = ["ms", "miniscript"]
- filenames = ["*.ms"]
+ name = 'MiniScript'
+ aliases = ['miniscript', 'ms']
+ filenames = ['*.ms']
mimetypes = ['text/x-minicript', 'application/x-miniscript']
tokens = {
diff --git a/vendor/pygments-main/pygments/lexers/shell.py b/vendor/pygments-main/pygments/lexers/shell.py
index 6283a11a..713b50a1 100644
--- a/vendor/pygments-main/pygments/lexers/shell.py
+++ b/vendor/pygments-main/pygments/lexers/shell.py
@@ -192,10 +192,13 @@ def get_tokens_unprocessed(self, text):
[(0, Generic.Prompt, m.group(1))]))
curcode += m.group(2)
backslash_continuation = curcode.endswith('\\\n')
- elif line.startswith(self._ps2) and backslash_continuation:
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:len(self._ps2)])]))
- curcode += line[len(self._ps2):]
+ elif backslash_continuation:
+ if line.startswith(self._ps2):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:len(self._ps2)])]))
+ curcode += line[len(self._ps2):]
+ else:
+ curcode += line
backslash_continuation = curcode.endswith('\\\n')
else:
if insertions:
@@ -238,7 +241,7 @@ class BatchLexer(RegexLexer):
.. versionadded:: 0.7
"""
name = 'Batchfile'
- aliases = ['bat', 'batch', 'dosbatch', 'winbatch']
+ aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
@@ -652,7 +655,7 @@ class PowerShellLexer(RegexLexer):
.. versionadded:: 1.5
"""
name = 'PowerShell'
- aliases = ['powershell', 'posh', 'ps1', 'psm1']
+ aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1', '*.psm1']
mimetypes = ['text/x-powershell']
@@ -730,7 +733,7 @@ class PowerShellLexer(RegexLexer):
(r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_]\w*', Name),
(r'\w+', Name),
- (r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
+ (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
@@ -768,7 +771,7 @@ class PowerShellSessionLexer(ShellSessionBaseLexer):
"""
name = 'PowerShell Session'
- aliases = ['ps1con']
+ aliases = ['pwsh-session', 'ps1con']
filenames = []
mimetypes = []
diff --git a/vendor/pygments-main/pygments/lexers/smithy.py b/vendor/pygments-main/pygments/lexers/smithy.py
new file mode 100644
index 00000000..0f0a9120
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/smithy.py
@@ -0,0 +1,79 @@
+"""
+ pygments.lexers.smithy
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Smithy IDL.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Keyword, Name, String, \
+ Number, Whitespace, Punctuation
+
+__all__ = ['SmithyLexer']
+
+
+class SmithyLexer(RegexLexer):
+ """
+ For Smithy IDL
+
+ .. versionadded:: 2.10
+ """
+ name = 'Smithy'
+ filenames = ['*.smithy']
+ aliases = ['smithy']
+
+ flags = re.MULTILINE | re.UNICODE
+ unquoted = r'[A-Za-z0-9_\.#$-]+'
+ identifier = r"[A-Za-z0-9_\.#$-]+"
+
+ simple_shapes = (
+ 'use', 'byte', 'short', 'integer', 'long', 'float', 'document',
+ 'double', 'bigInteger', 'bigDecimal', 'boolean', 'blob', 'string',
+ 'timestamp',
+ )
+
+ aggregate_shapes = (
+ 'apply', 'list', 'map', 'set', 'structure', 'union', 'resource',
+ 'operation', 'service', 'trait'
+ )
+
+ tokens = {
+ 'root': [
+ (r'///.*$', Comment.Multiline),
+ (r'//.*$', Comment),
+ (r'@[0-9a-zA-Z\.#-]*', Name.Decorator),
+ (r'(=)', Name.Decorator),
+ (r'^(\$version)(:)(.+)',
+ bygroups(Keyword.Declaration, Name.Decorator, Name.Class)),
+ (r'^(namespace)(\s+' + identifier + r')\b',
+ bygroups(Keyword.Declaration, Name.Class)),
+ (words(simple_shapes,
+ prefix=r'^', suffix=r'(\s+' + identifier + r')\b'),
+ bygroups(Keyword.Declaration, Name.Class)),
+ (words(aggregate_shapes,
+ prefix=r'^', suffix=r'(\s+' + identifier + r')'),
+ bygroups(Keyword.Declaration, Name.Class)),
+ (r'^(metadata)(\s+.+)(\s*)(=)',
+ bygroups(Keyword.Declaration, Name.Class, Whitespace, Name.Decorator)),
+ (r"(true|false|null)", Keyword.Constant),
+ (r"(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)", Number),
+ (identifier + ":", Name.Label),
+ (identifier, Name.Variable.Class),
+ (r'\[', Text, "#push"),
+ (r'\]', Text, "#pop"),
+ (r'\(', Text, "#push"),
+ (r'\)', Text, "#pop"),
+ (r'\{', Text, "#push"),
+ (r'\}', Text, "#pop"),
+ (r'"{3}(\\\\|\n|\\")*"{3}', String.Doc),
+ (r'"(\\\\|\n|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\n|\\'|[^'])*'", String.Single),
+ (r'[:,]+', Punctuation),
+ (r'\s+', Whitespace),
+ ]
+ }
diff --git a/vendor/pygments-main/pygments/lexers/smv.py b/vendor/pygments-main/pygments/lexers/smv.py
index 7db2070f..a4cbf945 100644
--- a/vendor/pygments-main/pygments/lexers/smv.py
+++ b/vendor/pygments-main/pygments/lexers/smv.py
@@ -68,7 +68,7 @@ class NuSMVLexer(RegexLexer):
(r'\-?\d+\b', Number.Integer),
(r'0[su][bB]\d*_[01_]+', Number.Bin),
(r'0[su][oO]\d*_[0-7_]+', Number.Oct),
- (r'0[su][dD]\d*_[\d_]+', Number.Dec),
+ (r'0[su][dD]\d*_[\d_]+', Number.Decimal),
(r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
# Whitespace, punctuation and the rest
diff --git a/vendor/pygments-main/pygments/lexers/special.py b/vendor/pygments-main/pygments/lexers/special.py
index b901e5d3..bff6652c 100644
--- a/vendor/pygments-main/pygments/lexers/special.py
+++ b/vendor/pygments-main/pygments/lexers/special.py
@@ -12,11 +12,11 @@
import re
from pygments.lexer import Lexer
-from pygments.token import Token, Error, Text
+from pygments.token import Token, Error, Text, Generic
from pygments.util import get_choice_opt
-__all__ = ['TextLexer', 'RawTokenLexer']
+__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
class TextLexer(Lexer):
@@ -36,6 +36,19 @@ def analyse_text(text):
return TextLexer.priority
+class OutputLexer(Lexer):
+ """
+ Simple lexer that highlights everything as ``Token.Generic.Output``.
+
+ .. versionadded:: 2.10
+ """
+ name = 'Text output'
+ aliases = ['output']
+
+ def get_tokens_unprocessed(self, text):
+ yield 0, Generic.Output, text
+
+
_ttype_cache = {}
line_re = re.compile('.*?\n')
diff --git a/vendor/pygments-main/pygments/lexers/supercollider.py b/vendor/pygments-main/pygments/lexers/supercollider.py
index 5129e9b4..724674f5 100644
--- a/vendor/pygments-main/pygments/lexers/supercollider.py
+++ b/vendor/pygments-main/pygments/lexers/supercollider.py
@@ -25,7 +25,7 @@ class SuperColliderLexer(RegexLexer):
"""
name = 'SuperCollider'
- aliases = ['sc', 'supercollider']
+ aliases = ['supercollider', 'sc']
filenames = ['*.sc', '*.scd']
mimetypes = ['application/supercollider', 'text/supercollider', ]
diff --git a/vendor/pygments-main/pygments/lexers/tcl.py b/vendor/pygments-main/pygments/lexers/tcl.py
index 9fb50901..7be07357 100644
--- a/vendor/pygments-main/pygments/lexers/tcl.py
+++ b/vendor/pygments-main/pygments/lexers/tcl.py
@@ -10,7 +10,7 @@
from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number
+ Number, Whitespace
from pygments.util import shebang_matches
__all__ = ['TclLexer']
@@ -79,13 +79,13 @@ def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
(r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
],
'data': [
- (r'\s+', Text),
+ (r'\s+', Whitespace),
(r'0x[a-fA-F0-9]+', Number.Hex),
(r'0[0-7]+', Number.Oct),
(r'\d+\.\d+', Number.Float),
(r'\d+', Number.Integer),
(r'\$([\w.:-]+)', Name.Variable),
- (r'([\w.:-]+)', Text),
+ (r'([\w.,@:-]+)', Text),
],
'params': [
(r';', Keyword, '#pop'),
diff --git a/vendor/pygments-main/pygments/lexers/teal.py b/vendor/pygments-main/pygments/lexers/teal.py
new file mode 100644
index 00000000..393000a2
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/teal.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.teal
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for TEAL.
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import Comment, Name, Number, String, Text, Keyword
+
+__all__ = ['TealLexer']
+
+class TealLexer(RegexLexer):
+ """
+ For the `Transaction Execution Approval Language (TEAL)
+ `
+
+ For more information about the grammar, see:
+ https://github.com/algorand/go-algorand/blob/master/data/transactions/logic/assembler.go
+
+ .. versionadded:: 2.9
+ """
+ name = 'teal'
+ aliases = ['teal']
+ filenames = ['*.teal']
+
+ keywords = words({
+ 'Sender', 'Fee', 'FirstValid', 'FirstValidTime', 'LastValid', 'Note',
+ 'Lease', 'Receiver', 'Amount', 'CloseRemainderTo', 'VotePK',
+ 'SelectionPK', 'VoteFirst', 'VoteLast', 'VoteKeyDilution', 'Type',
+ 'TypeEnum', 'XferAsset', 'AssetAmount', 'AssetSender', 'AssetReceiver',
+ 'AssetCloseTo', 'GroupIndex', 'TxID', 'ApplicationID', 'OnCompletion',
+ 'ApplicationArgs', 'NumAppArgs', 'Accounts', 'NumAccounts',
+ 'ApprovalProgram', 'ClearStateProgram', 'RekeyTo', 'ConfigAsset',
+ 'ConfigAssetTotal', 'ConfigAssetDecimals', 'ConfigAssetDefaultFrozen',
+ 'ConfigAssetUnitName', 'ConfigAssetName', 'ConfigAssetURL',
+ 'ConfigAssetMetadataHash', 'ConfigAssetManager', 'ConfigAssetReserve',
+ 'ConfigAssetFreeze', 'ConfigAssetClawback', 'FreezeAsset',
+ 'FreezeAssetAccount', 'FreezeAssetFrozen',
+ 'NoOp', 'OptIn', 'CloseOut', 'ClearState', 'UpdateApplication',
+ 'DeleteApplication',
+ 'MinTxnFee', 'MinBalance', 'MaxTxnLife', 'ZeroAddress', 'GroupSize',
+ 'LogicSigVersion', 'Round', 'LatestTimestamp', 'CurrentApplicationID',
+ 'AssetBalance', 'AssetFrozen',
+ 'AssetTotal', 'AssetDecimals', 'AssetDefaultFrozen', 'AssetUnitName',
+ 'AssetName', 'AssetURL', 'AssetMetadataHash', 'AssetManager',
+ 'AssetReserve', 'AssetFreeze', 'AssetClawback',
+ }, suffix = r'\b')
+
+ identifier = r'[^ \t\n]+(?=\/\/)|[^ \t\n]+'
+ newline = r'\r?\n'
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ # pragmas match specifically on the space character
+ (r'^#pragma .*' + newline, Comment.Directive),
+ # labels must be followed by a space,
+ # but anything after that is ignored
+ ('(' + identifier + ':' + ')' + '([ \t].*)',
+ bygroups(Name.Label, Comment.Single)),
+ (identifier, Name.Function, 'function-args'),
+ ],
+ 'function-args': [
+ include('whitespace'),
+ (r'"', String, 'string'),
+ (r'(b(?:ase)?(?:32|64) ?)(\(?[a-zA-Z0-9+/=]+\)?)',
+ bygroups(String.Affix, String.Other)),
+ (r'[A-Z2-7]{58}', Number), # address
+ (r'0x[\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (keywords, Keyword),
+ (identifier, Name.Attributes), # branch targets
+ (newline, Text, '#pop'),
+ ],
+ 'string': [
+ (r'\\(?:["nrt\\]|x\d\d)', String.Escape),
+ (r'[^\\\"\n]+', String),
+ (r'"', String, '#pop'),
+ ],
+ 'whitespace': [
+ (r'[ \t]+', Text),
+ (r'//[^\n]+', Comment.Single),
+ ],
+ }
diff --git a/vendor/pygments-main/pygments/lexers/templates.py b/vendor/pygments-main/pygments/lexers/templates.py
index 15ea0ec9..548e14af 100644
--- a/vendor/pygments-main/pygments/lexers/templates.py
+++ b/vendor/pygments-main/pygments/lexers/templates.py
@@ -267,11 +267,11 @@ class VelocityLexer(RegexLexer):
def analyse_text(text):
rv = 0.0
- if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
+ if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.25
- if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
+ if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.15
- if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
+ if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
rv += 0.15
if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
r'(\.\w+(\([^)]*\))?)*\}?', text):
@@ -489,7 +489,7 @@ class MyghtyJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Myghty'
- aliases = ['js+myghty', 'javascript+myghty']
+ aliases = ['javascript+myghty', 'js+myghty']
mimetypes = ['application/x-javascript+myghty',
'text/x-javascript+myghty',
'text/javascript+mygthy']
@@ -588,12 +588,12 @@ class MakoLexer(RegexLexer):
tokens = {
'root': [
(r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
- bygroups(Text, Comment.Preproc, Keyword, Other)),
+ bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
(r'(\s*)(%)([^\n]*)(\n|\Z)',
- bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
+ bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
(r'(\s*)(##[^\n]*)(\n|\Z)',
- bygroups(Text, Comment.Preproc, Other)),
- (r'(?s)<%doc>.*?%doc>', Comment.Preproc),
+ bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
+ (r'(?s)<%doc>.*?%doc>', Comment.Multiline),
(r'(<%)([\w.:]+)',
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
(r'(%)([\w.:]+)(>)',
@@ -679,7 +679,7 @@ class MakoJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Mako'
- aliases = ['js+mako', 'javascript+mako']
+ aliases = ['javascript+mako', 'js+mako']
mimetypes = ['application/x-javascript+mako',
'text/x-javascript+mako',
'text/javascript+mako']
@@ -798,8 +798,8 @@ class CheetahJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Cheetah'
- aliases = ['js+cheetah', 'javascript+cheetah',
- 'js+spitfire', 'javascript+spitfire']
+ aliases = ['javascript+cheetah', 'js+cheetah',
+ 'javascript+spitfire', 'js+spitfire']
mimetypes = ['application/x-javascript+cheetah',
'text/x-javascript+cheetah',
'text/javascript+cheetah',
@@ -1026,7 +1026,7 @@ class XmlErbLexer(DelegatingLexer):
"""
name = 'XML+Ruby'
- aliases = ['xml+erb', 'xml+ruby']
+ aliases = ['xml+ruby', 'xml+erb']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+ruby']
@@ -1046,7 +1046,7 @@ class CssErbLexer(DelegatingLexer):
"""
name = 'CSS+Ruby'
- aliases = ['css+erb', 'css+ruby']
+ aliases = ['css+ruby', 'css+erb']
alias_filenames = ['*.css']
mimetypes = ['text/css+ruby']
@@ -1064,7 +1064,7 @@ class JavascriptErbLexer(DelegatingLexer):
"""
name = 'JavaScript+Ruby'
- aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
+ aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+ruby',
'text/x-javascript+ruby',
@@ -1147,7 +1147,7 @@ class JavascriptPhpLexer(DelegatingLexer):
"""
name = 'JavaScript+PHP'
- aliases = ['js+php', 'javascript+php']
+ aliases = ['javascript+php', 'js+php']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+php',
'text/x-javascript+php',
@@ -1229,7 +1229,7 @@ class JavascriptSmartyLexer(DelegatingLexer):
"""
name = 'JavaScript+Smarty'
- aliases = ['js+smarty', 'javascript+smarty']
+ aliases = ['javascript+smarty', 'js+smarty']
alias_filenames = ['*.js', '*.tpl']
mimetypes = ['application/x-javascript+smarty',
'text/x-javascript+smarty',
@@ -1311,8 +1311,8 @@ class JavascriptDjangoLexer(DelegatingLexer):
"""
name = 'JavaScript+Django/Jinja'
- aliases = ['js+django', 'javascript+django',
- 'js+jinja', 'javascript+jinja']
+ aliases = ['javascript+django', 'js+django',
+ 'javascript+jinja', 'js+jinja']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+django',
'application/x-javascript+jinja',
@@ -1750,7 +1750,7 @@ class LassoJavascriptLexer(DelegatingLexer):
"""
name = 'JavaScript+Lasso'
- aliases = ['js+lasso', 'javascript+lasso']
+ aliases = ['javascript+lasso', 'js+lasso']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+lasso',
'text/x-javascript+lasso',
diff --git a/vendor/pygments-main/pygments/lexers/teraterm.py b/vendor/pygments-main/pygments/lexers/teraterm.py
index d09c66f6..feb552d3 100644
--- a/vendor/pygments-main/pygments/lexers/teraterm.py
+++ b/vendor/pygments-main/pygments/lexers/teraterm.py
@@ -24,7 +24,7 @@ class TeraTermLexer(RegexLexer):
.. versionadded:: 2.4
"""
name = 'Tera Term macro'
- aliases = ['ttl', 'teraterm', 'teratermmacro']
+ aliases = ['teratermmacro', 'teraterm', 'ttl']
filenames = ['*.ttl']
mimetypes = ['text/x-teratermmacro']
diff --git a/vendor/pygments-main/pygments/lexers/testing.py b/vendor/pygments-main/pygments/lexers/testing.py
index 81c42971..9111a1ce 100644
--- a/vendor/pygments-main/pygments/lexers/testing.py
+++ b/vendor/pygments-main/pygments/lexers/testing.py
@@ -21,7 +21,7 @@ class GherkinLexer(RegexLexer):
.. versionadded:: 1.2
"""
name = 'Gherkin'
- aliases = ['cucumber', 'gherkin']
+ aliases = ['gherkin', 'cucumber']
filenames = ['*.feature']
mimetypes = ['text/x-gherkin']
diff --git a/vendor/pygments-main/pygments/lexers/theorem.py b/vendor/pygments-main/pygments/lexers/theorem.py
index eee11269..ec55a32e 100644
--- a/vendor/pygments-main/pygments/lexers/theorem.py
+++ b/vendor/pygments-main/pygments/lexers/theorem.py
@@ -43,6 +43,7 @@ class CoqLexer(RegexLexer):
'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
+ 'Abort', 'Admitted',
'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
'Universe', 'Polymorphic', 'Monomorphic', 'Context'
@@ -55,7 +56,7 @@ class CoqLexer(RegexLexer):
)
keywords3 = (
# Sorts
- 'Type', 'Prop',
+ 'Type', 'Prop', 'SProp',
)
keywords4 = (
# Tactics
@@ -94,7 +95,8 @@ class CoqLexer(RegexLexer):
'<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
r'/\\', r'\\/', r'\{\|', r'\|\}',
- 'Π', 'λ',
+ # 'Π', 'Σ', # Not defined in the standard library
+ 'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥',
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
prefix_syms = r'[!?~]'
diff --git a/vendor/pygments-main/pygments/lexers/thingsdb.py b/vendor/pygments-main/pygments/lexers/thingsdb.py
new file mode 100644
index 00000000..ffb1825c
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/thingsdb.py
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.thingsdb
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the ThingsDB language.
+
+ :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Comment, Keyword, Name, Number, String, Text, \
+ Operator, Punctuation, Whitespace
+
+__all__ = ['ThingsDBLexer']
+
+
+class ThingsDBLexer(RegexLexer):
+ """
+ Lexer for the ThingsDB programming language.
+
+ .. versionadded:: 2.9
+ """
+ name = 'ThingsDB'
+ aliases = ['ti', 'thingsdb']
+ filenames = ['*.ti']
+
+ tokens = {
+ 'root': [
+ include('expression'),
+ ],
+ 'expression': [
+ include('comments'),
+ include('whitespace'),
+
+ # numbers
+ (r'[-+]?0b[01]+', Number.Bin),
+ (r'[-+]?0o[0-8]+', Number.Oct),
+ (r'([-+]?0x[0-9a-fA-F]+)', Number.Hex),
+ (r'[-+]?[0-9]+', Number.Integer),
+ (r'[-+]?((inf|nan)([^0-9A-Za-z_]|$)|[0-9]*\.[0-9]+(e[+-][0-9]+)?)',
+ Number.Float),
+
+ # strings
+ (r'(?:"(?:[^"]*)")+', String.Double),
+ (r"(?:'(?:[^']*)')+", String.Single),
+
+ # literals
+ (r'(true|false|nil)\b', Keyword.Constant),
+
+ # regular expressions
+ (r'(/[^/\\]*(?:\\.[^/\\]*)*/i?)', String.Regex),
+
+ # thing id's
+ (r'#[0-9]+', Comment.Preproc),
+
+ # name, assignments and functions
+ include('names'),
+
+ (r'[(){}\[\],;]', Punctuation),
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
+ ],
+ 'names': [
+ (r'(\.)'
+ r'(add|call|contains|del|endswith|extend|filter|find|findindex|'
+ r'get|has|id|indexof|keys|len|lower|map|pop|push|remove|set|sort|'
+ r'splice|startswith|test|unwrap|upper|values|wrap)'
+ r'(\()',
+ bygroups(Name.Function, Name.Function, Punctuation), 'arguments'),
+ (r'(array|assert|assert_err|auth_err|backup_info|backups_info|'
+ r'bad_data_err|bool|closure|collection_info|collections_info|'
+ r'counters|deep|del_backup|del_collection|del_expired|del_node|'
+ r'del_procedure|del_token|del_type|del_user|err|float|'
+ r'forbidden_err|grant|int|isarray|isascii|isbool|isbytes|iserr|'
+ r'isfloat|isinf|isint|islist|isnan|isnil|israw|isset|isstr|'
+ r'isthing|istuple|isutf8|lookup_err|max_quota_err|mod_type|new|'
+ r'new_backup|new_collection|new_node|new_procedure|new_token|'
+ r'new_type|new_user|node_err|node_info|nodes_info|now|'
+ r'num_arguments_err|operation_err|overflow_err|procedure_doc|'
+ r'procedure_info|procedures_info|raise|refs|rename_collection|'
+ r'rename_user|reset_counters|return|revoke|run|set_log_level|set|'
+ r'set_quota|set_type|shutdown|str|syntax_err|thing|try|type|'
+ r'type_err|type_count|type_info|types_info|user_info|users_info|'
+ r'value_err|wse|zero_div_err)'
+ r'(\()',
+ bygroups(Name.Function, Punctuation),
+ 'arguments'),
+ (r'(\.[A-Za-z_][0-9A-Za-z_]*)'
+ r'(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ (r'\.[A-Za-z_][0-9A-Za-z_]*', Name.Attribute),
+ (r'([A-Za-z_][0-9A-Za-z_]*)(\s*)(=)',
+ bygroups(Name.Variable, Text, Operator)),
+ (r'[A-Za-z_][0-9A-Za-z_]*', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ ],
+ 'comments': [
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'arguments': [
+ include('expression'),
+ (',', Punctuation),
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ }
diff --git a/vendor/pygments-main/pygments/lexers/tnt.py b/vendor/pygments-main/pygments/lexers/tnt.py
index 50fa3335..e6e71961 100644
--- a/vendor/pygments-main/pygments/lexers/tnt.py
+++ b/vendor/pygments-main/pygments/lexers/tnt.py
@@ -12,7 +12,7 @@
from pygments.lexer import Lexer
from pygments.token import Text, Comment, Operator, Keyword, Name, Number, \
- Punctuation, Error
+ Punctuation, Error
__all__ = ['TNTLexer']
@@ -66,15 +66,16 @@ def whitespace(self, start, text, required=False):
end += 1
except IndexError:
end = len(text)
- if required:
- assert end != start
+ if required and end == start:
+ raise AssertionError
if end != start:
self.cur.append((start, Text, text[start:end]))
return end
def variable(self, start, text):
"""Tokenize a variable."""
- assert text[start] in self.VARIABLES
+ if text[start] not in self.VARIABLES:
+ raise AssertionError
end = start+1
while text[end] in self.PRIMES:
end += 1
@@ -97,10 +98,12 @@ def term(self, start, text):
if text[start] == '(': # (...+...)
self.cur.append((start, Punctuation, text[start]))
start = self.term(start+1, text)
- assert text[start] in self.OPERATORS
+ if text[start] not in self.OPERATORS:
+ raise AssertionError
self.cur.append((start, Operator, text[start]))
start = self.term(start+1, text)
- assert text[start] == ')'
+ if text[start] != ')':
+ raise AssertionError
self.cur.append((start, Punctuation, text[start]))
return start+1
raise AssertionError # no matches
@@ -116,21 +119,25 @@ def formula(self, start, text):
if text[start] in self.QUANTIFIERS: # Aa:<...>
self.cur.append((start, Keyword.Declaration, text[start]))
start = self.variable(start+1, text)
- assert text[start] == ':'
+ if text[start] != ':':
+ raise AssertionError
self.cur.append((start, Punctuation, text[start]))
return self.formula(start+1, text)
if text[start] == '<': # <...&...>
self.cur.append((start, Punctuation, text[start]))
start = self.formula(start+1, text)
- assert text[start] in self.LOGIC
+ if text[start] not in self.LOGIC:
+ raise AssertionError
self.cur.append((start, Operator, text[start]))
start = self.formula(start+1, text)
- assert text[start] == '>'
+ if text[start] != '>':
+ raise AssertionError
self.cur.append((start, Punctuation, text[start]))
return start+1
# ...=...
start = self.term(start, text)
- assert text[start] == '='
+ if text[start] != '=':
+ raise AssertionError
self.cur.append((start, Operator, text[start]))
start = self.term(start+1, text)
return start
@@ -138,7 +145,8 @@ def formula(self, start, text):
def rule(self, start, text):
"""Tokenize a rule."""
match = self.RULES.match(text, start)
- assert match is not None
+ if match is None:
+ raise AssertionError
groups = sorted(match.regs[1:]) # exclude whole match
for group in groups:
if group[0] >= 0: # this group matched
@@ -162,8 +170,10 @@ def lineno(self, start, text):
self.cur.append((start+1, Text, text[start+1:end]))
start = end
match = self.LINENOS.match(text, start)
- assert match is not None
- assert text[match.end()] == ')'
+ if match is None:
+ raise AssertionError
+ if text[match.end()] != ')':
+ raise AssertionError
self.cur.append((match.start(), Number.Integer, match.group(0)))
self.cur.append((match.end(), Punctuation, text[match.end()]))
return match.end() + 1
@@ -219,7 +229,7 @@ def get_tokens_unprocessed(self, text):
orig = len(self.cur)
try:
start = end = self.formula(start, text)
- except AssertionError: # not well-formed
+ except (AssertionError, RecursionError): # not well-formed
del self.cur[orig:]
while text[end] not in self.WHITESPACE:
end += 1
@@ -257,6 +267,6 @@ def get_tokens_unprocessed(self, text):
try:
del self.cur[orig:]
except NameError:
- pass # if orig was never defined, fine
+ pass # if orig was never defined, fine
self.error_till_line_end(start, text)
return self.cur
diff --git a/vendor/pygments-main/pygments/lexers/trafficscript.py b/vendor/pygments-main/pygments/lexers/trafficscript.py
index 088258ae..e796a275 100644
--- a/vendor/pygments-main/pygments/lexers/trafficscript.py
+++ b/vendor/pygments-main/pygments/lexers/trafficscript.py
@@ -23,7 +23,7 @@ class RtsLexer(RegexLexer):
.. versionadded:: 2.1
"""
name = 'TrafficScript'
- aliases = ['rts','trafficscript']
+ aliases = ['trafficscript', 'rts']
filenames = ['*.rts']
tokens = {
diff --git a/vendor/pygments-main/pygments/lexers/webassembly.py b/vendor/pygments-main/pygments/lexers/webassembly.py
new file mode 100644
index 00000000..d162a46f
--- /dev/null
+++ b/vendor/pygments-main/pygments/lexers/webassembly.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.webassembly
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the WebAssembly text format.
+
+ The grammar can be found at https://github.com/WebAssembly/spec/blob/master/interpreter/README.md
+ and https://webassembly.github.io/spec/core/text/.
+
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, bygroups, default
+from pygments.token import Text, Comment, Operator, Keyword, String, Number, Punctuation, Literal, Error, Name
+
+__all__ = ['WatLexer']
+
+keywords = (
+ 'module', 'import', 'func', 'funcref', 'start', 'param', 'local', 'type',
+ 'result', 'export', 'memory', 'global', 'mut', 'data', 'table', 'elem',
+ 'if', 'then', 'else', 'end', 'block', 'loop'
+)
+
+builtins = (
+ 'unreachable', 'nop', 'block', 'loop', 'if', 'else', 'end', 'br', 'br_if',
+ 'br_table', 'return', 'call', 'call_indirect', 'drop', 'select',
+ 'local.get', 'local.set', 'local.tee', 'global.get', 'global.set',
+ 'i32.load', 'i64.load', 'f32.load', 'f64.load', 'i32.load8_s',
+ 'i32.load8_u', 'i32.load16_s', 'i32.load16_u', 'i64.load8_s',
+ 'i64.load8_u', 'i64.load16_s', 'i64.load16_u', 'i64.load32_s',
+ 'i64.load32_u', 'i32.store', 'i64.store', 'f32.store', 'f64.store',
+ 'i32.store8', 'i32.store16', 'i64.store8', 'i64.store16', 'i64.store32',
+ 'memory.size', 'memory.grow', 'i32.const', 'i64.const', 'f32.const',
+ 'f64.const', 'i32.eqz', 'i32.eq', 'i32.ne', 'i32.lt_s', 'i32.lt_u',
+ 'i32.gt_s', 'i32.gt_u', 'i32.le_s', 'i32.le_u', 'i32.ge_s', 'i32.ge_u',
+ 'i64.eqz', 'i64.eq', 'i64.ne', 'i64.lt_s', 'i64.lt_u', 'i64.gt_s',
+ 'i64.gt_u', 'i64.le_s', 'i64.le_u', 'i64.ge_s', 'i64.ge_u', 'f32.eq',
+ 'f32.ne', 'f32.lt', 'f32.gt', 'f32.le', 'f32.ge', 'f64.eq', 'f64.ne',
+ 'f64.lt', 'f64.gt', 'f64.le', 'f64.ge', 'i32.clz', 'i32.ctz', 'i32.popcnt',
+ 'i32.add', 'i32.sub', 'i32.mul', 'i32.div_s', 'i32.div_u', 'i32.rem_s',
+ 'i32.rem_u', 'i32.and', 'i32.or', 'i32.xor', 'i32.shl', 'i32.shr_s',
+ 'i32.shr_u', 'i32.rotl', 'i32.rotr', 'i64.clz', 'i64.ctz', 'i64.popcnt',
+ 'i64.add', 'i64.sub', 'i64.mul', 'i64.div_s', 'i64.div_u', 'i64.rem_s',
+ 'i64.rem_u', 'i64.and', 'i64.or', 'i64.xor', 'i64.shl', 'i64.shr_s',
+ 'i64.shr_u', 'i64.rotl', 'i64.rotr', 'f32.abs', 'f32.neg', 'f32.ceil',
+ 'f32.floor', 'f32.trunc', 'f32.nearest', 'f32.sqrt', 'f32.add', 'f32.sub',
+ 'f32.mul', 'f32.div', 'f32.min', 'f32.max', 'f32.copysign', 'f64.abs',
+ 'f64.neg', 'f64.ceil', 'f64.floor', 'f64.trunc', 'f64.nearest', 'f64.sqrt',
+ 'f64.add', 'f64.sub', 'f64.mul', 'f64.div', 'f64.min', 'f64.max',
+ 'f64.copysign', 'i32.wrap_i64', 'i32.trunc_f32_s', 'i32.trunc_f32_u',
+ 'i32.trunc_f64_s', 'i32.trunc_f64_u', 'i64.extend_i32_s',
+ 'i64.extend_i32_u', 'i64.trunc_f32_s', 'i64.trunc_f32_u',
+ 'i64.trunc_f64_s', 'i64.trunc_f64_u', 'f32.convert_i32_s',
+ 'f32.convert_i32_u', 'f32.convert_i64_s', 'f32.convert_i64_u',
+ 'f32.demote_f64', 'f64.convert_i32_s', 'f64.convert_i32_u',
+ 'f64.convert_i64_s', 'f64.convert_i64_u', 'f64.promote_f32',
+ 'i32.reinterpret_f32', 'i64.reinterpret_f64', 'f32.reinterpret_i32',
+ 'f64.reinterpret_i64',
+)
+
+
+class WatLexer(RegexLexer):
+ """Lexer for the `WebAssembly text format `_.
+
+ .. versionadded:: 2.9
+ """
+
+ name = 'WebAssembly'
+ aliases = ['wast', 'wat']
+ filenames = ['*.wat', '*.wast']
+
+ tokens = {
+ 'root': [
+ (words(keywords, suffix=r'(?=[^a-z_\.])'), Keyword),
+ (words(builtins), Name.Builtin, 'arguments'),
+ (words(['i32', 'i64', 'f32', 'f64']), Keyword.Type),
+ (r'\$[A-Za-z0-9!#$%&\'*+./:<=>?@\\^_`|~-]+', Name.Variable), # yes, all of the are valid in identifiers
+ (r';;.*?$', Comment.Single),
+ (r'\(;', Comment.Multiline, 'nesting_comment'),
+ (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*(.([\dA-Fa-f](_?[\dA-Fa-f])*)?)?([pP][+-]?[\dA-Fa-f](_?[\dA-Fa-f])*)?', Number.Float),
+ (r'[+-]?\d.\d(_?\d)*[eE][+-]?\d(_?\d)*', Number.Float),
+ (r'[+-]?\d.\d(_?\d)*', Number.Float),
+ (r'[+-]?\d.[eE][+-]?\d(_?\d)*', Number.Float),
+ (r'[+-]?(inf|nan:0x[\dA-Fa-f](_?[\dA-Fa-f])*|nan)', Number.Float),
+ (r'[+-]?0x[\dA-Fa-f](_?[\dA-Fa-f])*', Number.Hex),
+ (r'[+-]?\d(_?\d)*', Number.Integer),
+ (r'[\(\)]', Punctuation),
+ (r'"', String.Double, 'string'),
+ (r'\s+', Text),
+ ],
+ 'nesting_comment': [
+ (r'\(;', Comment.Multiline, '#push'),
+ (r';\)', Comment.Multiline, '#pop'),
+ (r'[^;(]+', Comment.Multiline),
+ (r'[;(]', Comment.Multiline),
+ ],
+ 'string': [
+ (r'\\[\dA-Fa-f][\dA-Fa-f]', String.Escape), # must have exactly two hex digits
+ (r'\\t', String.Escape),
+ (r'\\n', String.Escape),
+ (r'\\r', String.Escape),
+ (r'\\"', String.Escape),
+ (r"\\'", String.Escape),
+ (r'\\u\{[\dA-Fa-f](_?[\dA-Fa-f])*\}', String.Escape),
+ (r'\\\\', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'[^"\\]+', String.Double),
+ ],
+ 'arguments': [
+ (r'\s+', Text),
+ (r'(offset)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
+ (r'(offset)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
+ (r'(align)(=)(0x[\dA-Fa-f](_?[\dA-Fa-f])*)', bygroups(Keyword, Operator, Number.Hex)),
+ (r'(align)(=)(\d(_?\d)*)', bygroups(Keyword, Operator, Number.Integer)),
+ default('#pop'),
+ ]
+ }
diff --git a/vendor/pygments-main/pygments/regexopt.py b/vendor/pygments-main/pygments/regexopt.py
index bcbe829d..cb2c8e21 100644
--- a/vendor/pygments-main/pygments/regexopt.py
+++ b/vendor/pygments-main/pygments/regexopt.py
@@ -15,7 +15,7 @@
from itertools import groupby
from operator import itemgetter
-CS_ESCAPE = re.compile(r'[\^\\\-\]]')
+CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
FIRST_ELEMENT = itemgetter(0)
diff --git a/vendor/pygments-main/pygments/style.py b/vendor/pygments-main/pygments/style.py
index 3bf2b156..8ed05940 100644
--- a/vendor/pygments-main/pygments/style.py
+++ b/vendor/pygments-main/pygments/style.py
@@ -134,7 +134,7 @@ def style_for_token(cls, token):
color = _ansimap[color]
bgcolor = t[4]
if bgcolor in _deprecated_ansicolors:
- bgcolor = _deprecated_ansicolors[color]
+ bgcolor = _deprecated_ansicolors[bgcolor]
if bgcolor in ansicolors:
bgansicolor = bgcolor
bgcolor = _ansimap[bgcolor]
diff --git a/vendor/pygments-main/pygments/styles/__init__.py b/vendor/pygments-main/pygments/styles/__init__.py
index e0cd1961..50f6cb68 100644
--- a/vendor/pygments-main/pygments/styles/__init__.py
+++ b/vendor/pygments-main/pygments/styles/__init__.py
@@ -52,6 +52,8 @@
'stata-dark': 'stata_dark::StataDarkStyle',
'inkpot': 'inkpot::InkPotStyle',
'zenburn': 'zenburn::ZenburnStyle',
+ 'gruvbox-dark': 'gruvbox::GruvboxDarkStyle',
+ 'gruvbox-light': 'gruvbox::GruvboxLightStyle',
}
diff --git a/vendor/pygments-main/pygments/styles/friendly.py b/vendor/pygments-main/pygments/styles/friendly.py
index 86dd514d..82f24794 100644
--- a/vendor/pygments-main/pygments/styles/friendly.py
+++ b/vendor/pygments-main/pygments/styles/friendly.py
@@ -20,6 +20,7 @@ class FriendlyStyle(Style):
background_color = "#f0f0f0"
default_style = ""
+ line_number_color = "#666666"
styles = {
Whitespace: "#bbbbbb",
diff --git a/vendor/pygments-main/pygments/styles/gruvbox.py b/vendor/pygments-main/pygments/styles/gruvbox.py
new file mode 100644
index 00000000..ddf05ea9
--- /dev/null
+++ b/vendor/pygments-main/pygments/styles/gruvbox.py
@@ -0,0 +1,107 @@
+"""
+ pygments.styles.gruvbox
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ pygments version of the "gruvbox" vim theme.
+ https://github.com/morhetz/gruvbox
+
+ :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic
+
+
+class GruvboxDarkStyle(Style):
+ """
+ Pygments version of the "gruvbox" dark vim theme.
+ """
+
+ background_color = '#282828'
+ highlight_color = '#ebdbb2'
+
+ styles = {
+ Comment: 'italic #928374',
+ Comment.PreProc: '#8ec07c',
+ Comment.Special: 'bold italic #ebdbb2',
+
+ Keyword: '#fb4934',
+ Operator.Word: '#fb4934',
+
+ String: '#b8bb26',
+ String.Escape: '#fe8019',
+
+ Number: '#d3869b',
+
+ Name.Builtin: '#fe8019',
+ Name.Variable: '#83a598',
+ Name.Constant: '#d3869b',
+ Name.Class: '#8ec07c',
+ Name.Function: '#8ec07c',
+ Name.Namespace: '#8ec07c',
+ Name.Exception: '#fb4934',
+ Name.Tag: '#8ec07c',
+ Name.Attribute: '#fabd2f',
+ Name.Decorator: '#fb4934',
+
+ Generic.Heading: 'bold #ebdbb2',
+ Generic.Subheading: 'underline #ebdbb2',
+ Generic.Deleted: 'bg:#fb4934 #282828',
+ Generic.Inserted: 'bg:#b8bb26 #282828',
+ Generic.Error: '#fb4934',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#a89984',
+ Generic.Output: '#f2e5bc',
+ Generic.Traceback: '#fb4934',
+
+ Error: 'bg:#fb4934 #282828'
+ }
+
+class GruvboxLightStyle(Style):
+ """
+ Pygments version of the "gruvbox" Light vim theme.
+ """
+
+ background_color = '#fbf1c7'
+ highlight_color = '#3c3836'
+
+ styles = {
+ Comment: 'italic #928374',
+ Comment.PreProc: '#427b58',
+ Comment.Special: 'bold italic #3c3836',
+
+ Keyword: '#9d0006',
+ Operator.Word: '#9d0006',
+
+ String: '#79740e',
+ String.Escape: '#af3a03',
+
+ Number: '#8f3f71',
+
+ Name.Builtin: '#af3a03',
+ Name.Variable: '#076678',
+ Name.Constant: '#8f3f71',
+ Name.Class: '#427b58',
+ Name.Function: '#427b58',
+ Name.Namespace: '#427b58',
+ Name.Exception: '#9d0006',
+ Name.Tag: '#427b58',
+ Name.Attribute: '#b57614',
+ Name.Decorator: '#9d0006',
+
+ Generic.Heading: 'bold #3c3836',
+ Generic.Subheading: 'underline #3c3836',
+ Generic.Deleted: 'bg:#9d0006 #fbf1c7',
+ Generic.Inserted: 'bg:#79740e #fbf1c7',
+ Generic.Error: '#9d0006',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#7c6f64',
+ Generic.Output: '#32302f',
+ Generic.Traceback: '#9d0006',
+
+ Error: 'bg:#9d0006 #fbf1c7'
+ }
diff --git a/vendor/pygments-main/pygments/styles/native.py b/vendor/pygments-main/pygments/styles/native.py
index d990389b..9c2f2c16 100644
--- a/vendor/pygments-main/pygments/styles/native.py
+++ b/vendor/pygments-main/pygments/styles/native.py
@@ -20,6 +20,7 @@ class NativeStyle(Style):
background_color = '#202020'
highlight_color = '#404040'
+ line_number_color = '#aaaaaa'
styles = {
Token: '#d0d0d0',
|