Skip to content

Commit

Permalink
Reorganise location of Pygments lexer. [ci skip]
Browse files Browse the repository at this point in the history
The name has to be src/misc/pygments/custom_lexers so
that Python doesn't get confused with the installed pygments/lexers
when doing an import from sphinx.
  • Loading branch information
skaller committed Nov 26, 2017
1 parent 46155a7 commit f6a432f
Show file tree
Hide file tree
Showing 6 changed files with 58 additions and 263 deletions.
2 changes: 2 additions & 0 deletions doc/tutorial/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,12 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('./_static'))
sys.path.insert(0, os.path.abspath('../../src/misc/pygments'))
import custom_lexers.xfelix

from sphinx.highlighting import lexers
lexers['xfelix']= custom_lexers.xfelix.XFelixLexer()
lexers['felix']= custom_lexers.xfelix.FelixLexer()

# -- General configuration ------------------------------------------------

Expand Down
15 changes: 15 additions & 0 deletions doc/tutorial/hello.rst
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,18 @@ into C++, compiles the program, and runs it. All the generated files
are cached in the .felix/cache subdirectory of your $HOME directory
on Unix like systems, and $USERPROFILE on Windows.

Test felix:

.. code-block:: felix
var x = \alpha \(x^2\)
Test xfelix:

.. code-block:: xfelix
var x = \alpha \(x^2\)
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -14,33 +14,11 @@
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Token

__all__ = ['XFelixLexer']
__all__ = ['XFelixLexer','FelixLexer']

XMath = Token.XMath

class XFelixLexer(RegexLexer):
"""
For `Felix <http://felix-lang.org>`_ source code.
.. versionadded:: 1.2
"""

# screws up latex! Keep for reference. Testing HACK only!
def get_tokens_unprocessed(self,text):
for index,token,value in RegexLexer.get_tokens_unprocessed(self,text):
if token == Generic:
value = "\\(" + value + "\\)"
yield (index,token,value)
elif token == XMath:
yield (index,Generic,value)
else:
yield (index,token,value)


name = 'XFelix'
aliases = ['xfelix', 'xflx']
filenames = ['*.flx', '*.flxh']
mimetypes = ['text/x-felix']
class FelixBase(RegexLexer):

keywords = (
'_', '_deref', 'all', 'as','begin','end','while','connector','pin',
Expand Down Expand Up @@ -107,7 +85,7 @@ def get_tokens_unprocessed(self,text):

# Keywords
(words(('axiom', 'ctor', 'chip', 'fun', 'gen', 'proc', 'reduce','regdef',
'var','val','typedef',
'var','val','typedef','device',
'union'), suffix=r'\b'),
Keyword, 'funcname'),
(words(('class', 'cclass', 'cstruct', 'obj', 'struct', 'object'), suffix=r'\b'),
Expand Down Expand Up @@ -234,3 +212,41 @@ def get_tokens_unprocessed(self,text):
include('nl')
],
}

class XFelixLexer(FelixBase):
def get_tokens_unprocessed(self,text):
for index,token,value in RegexLexer.get_tokens_unprocessed(self,text):
if token == Generic:
value = "\\(" + value + "\\)"
yield (index,token,value)
elif token == XMath:
yield (index,Generic,value)
else:
yield (index,token,value)


name = 'XFelix'
aliases = ['xfelix', 'xflx']
filenames = ['*.flx', '*.flxh']
mimetypes = ['text/x-felix']

class FelixLexer(FelixBase):
def get_tokens_unprocessed(self,text):
for index,token,value in RegexLexer.get_tokens_unprocessed(self,text):
if token == Generic:
yield (index,token,value)
elif token == XMath:
yield (index,Punctuation,u"\\")
yield (index,Punctuation,u"(")
yield (index,Generic,value[2:-2])
yield (index,Punctuation,u"\\")
yield (index,Punctuation,u")")
else:
yield (index,token,value)

name = 'Felix'
aliases = ['felix', 'flx']
filenames = ['*.flx', '*.flxh']
mimetypes = ['text/x-felix']


238 changes: 0 additions & 238 deletions src/misc/pygments/lexers/xfelix.py

This file was deleted.

0 comments on commit f6a432f

Please sign in to comment.