Skip to content

Commit

Permalink
Merge pull request #251 from enthought/fix/250-trait-documenter-fails…
Browse files Browse the repository at this point in the history
…-multiline

Fix for issue 250: Trait documenter failure for multiline traits.
  • Loading branch information
mdickinson committed Jul 22, 2015
2 parents 78eabfe + dc97156 commit 48c4b80
Show file tree
Hide file tree
Showing 2 changed files with 90 additions and 10 deletions.
52 changes: 52 additions & 0 deletions traits/util/tests/test_trait_documenter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
""" Tests for the trait documenter. """

import StringIO
import sys
import tokenize

from traits.testing.unittest_tools import unittest


def _sphinx_present():
try:
import sphinx # noqa
except ImportError:
return False

return True


def _python_version_is_32():
return sys.version_info[:2] == (3, 2)


# Skipping for python 3.2 because sphinx does not work on it.
@unittest.skipIf(not _sphinx_present() or _python_version_is_32(),
"Sphinx not available. Cannot test documenter")
class TestTraitDocumenter(unittest.TestCase):
""" Tests for the trait documenter. """

def setUp(self):
self.source = """
depth_interval = Property(Tuple(Float, Float),
depends_on="_depth_interval")
"""
string_io = StringIO.StringIO(self.source)
tokens = tokenize.generate_tokens(string_io.readline)
self.tokens = tokens

def test_get_definition_tokens(self):

from traits.util.trait_documenter import _get_definition_tokens

definition_tokens = _get_definition_tokens(self.tokens)

# Check if they are correctly untokenized. This should not raise.
string = tokenize.untokenize(definition_tokens)

self.assertEqual(self.source.rstrip(), string)

if __name__ == '__main__':
unittest.main()

# ## EOF ######################################################################
48 changes: 38 additions & 10 deletions traits/util/trait_documenter.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class TraitDocumenter(ClassLevelDocumenter):
"""

### ClassLevelDocumenter interface #####################################
# ClassLevelDocumenter interface #####################################

objtype = 'traitattribute'
directivetype = 'attribute'
Expand Down Expand Up @@ -94,7 +94,7 @@ def import_object(self):
msg = ('autodoc can\'t import/find {0} {r1}, it reported error: '
'"{2}", please check your spelling and sys.path')
self.directive.warn(msg.format(self.objtype, str(self.fullname),
err))
err))
self.env.note_reread()
return False

Expand All @@ -108,7 +108,7 @@ def add_directive_header(self, sig):
self.add_line(u' :annotation: = {0}'.format(definition),
'<autodoc>')

### Private Interface #####################################################
# Private Interface #####################################################

def _get_trait_definition(self):
""" Retrieve the Trait attribute definition
Expand All @@ -131,16 +131,44 @@ def _get_trait_definition(self):
name_found = True

# Retrieve the trait definition.
definition_tokens = []
for type, name, start, stop, line in tokens:
if type == token.NEWLINE:
break
item = (type, name, (1, start[1]), (1, stop[1]), line)
definition_tokens.append(item)

definition_tokens = _get_definition_tokens(tokens)
return tokenize.untokenize(definition_tokens).strip()


def _get_definition_tokens(tokens):
""" Given the tokens, extracts the definition tokens.
Parameters
----------
tokens : iterator
An iterator producing tokens.
Returns
-------
A list of tokens for the definition.
"""
# Retrieve the trait definition.
definition_tokens = []
first_line = None

for type, name, start, stop, line_text in tokens:
if first_line is None:
first_line = start[0]

if type == token.NEWLINE:
break

item = (type,
name,
(start[0] - first_line + 1, start[1]),
(stop[0] - first_line + 1, stop[1]),
line_text)

definition_tokens.append(item)

return definition_tokens


def setup(app):
""" Add the TraitDocumenter in the current sphinx autodoc instance. """
app.add_autodocumenter(TraitDocumenter)

0 comments on commit 48c4b80

Please sign in to comment.