Skip to content

Commit

Permalink
all document transformations now use --preserve-tabs (#18)
Browse files Browse the repository at this point in the history
  • Loading branch information
gpoore committed Jun 17, 2020
1 parent c5cf9d4 commit e36893c
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 7 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
Markdown. Code output in raw format (interpreted as Markdown) is no longer
lost when converting to document formats other than Markdown (#26).
* Added support for SageMath (#5).
* All document transformations now use `--preserve-tabs`, so code indentation
is maintained without change and tabs no longer cause errors in syncing code
to input line numbers (#18).
* Added support for remaining unsupported Pandoc command-line options,
including `--defaults` (#14).
* Julia now uses `--project=@.` (#10).
Expand Down
21 changes: 15 additions & 6 deletions codebraid/converters/pandoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -654,6 +654,7 @@ def _run_pandoc(self, *,
standalone: bool=False,
trace: bool=False,
newline_lf: bool=False,
preserve_tabs: bool=False,
other_pandoc_args: Optional[List[str]]=None):
'''
Convert between formats using Pandoc.
Expand All @@ -678,6 +679,8 @@ def _run_pandoc(self, *,
cmd_list.append('--trace')
if file_scope:
cmd_list.append('--file-scope')
if preserve_tabs:
cmd_list.append('--preserve-tabs')
if output_path:
cmd_list.extend(['--output', output_path.as_posix()])
if other_pandoc_args:
Expand Down Expand Up @@ -902,7 +905,8 @@ def _load_and_process_initial_ast(self, *,
from_format_pandoc_extensions=from_format_pandoc_extensions,
to_format='json',
trace=True,
newline_lf=True)
newline_lf=True,
preserve_tabs=True)
try:
if sys.version_info < (3, 6):
ast = json.loads(stdout_bytes.decode('utf8'))
Expand Down Expand Up @@ -1187,7 +1191,8 @@ def _postprocess_code_chunks(self):
to_format='markdown',
to_format_pandoc_extensions=processed_to_format_extensions,
standalone=True,
newline_lf=True)
newline_lf=True,
preserve_tabs=True)
if stderr_bytes:
sys.stderr.buffer.write(stderr_bytes)
processed_markup[source_name] = markup_bytes
Expand All @@ -1198,7 +1203,8 @@ def _postprocess_code_chunks(self):
from_format='markdown',
from_format_pandoc_extensions=self.from_format_pandoc_extensions,
to_format='json',
newline_lf=True)
newline_lf=True,
preserve_tabs=True)
if stderr_bytes:
sys.stderr.buffer.write(stderr_bytes)
else:
Expand All @@ -1214,7 +1220,8 @@ def _postprocess_code_chunks(self):
from_format='markdown',
from_format_pandoc_extensions=self.from_format_pandoc_extensions,
to_format='json',
newline_lf=True)
newline_lf=True,
preserve_tabs=True)
if stderr_bytes:
sys.stderr.buffer.write(stderr_bytes)
if sys.version_info < (3, 6):
Expand Down Expand Up @@ -1276,7 +1283,8 @@ def convert(self, *, to_format, output_path=None, overwrite=False,
to_format_pandoc_extensions=to_format_pandoc_extensions,
standalone=standalone,
output_path=output_path,
other_pandoc_args=other_pandoc_args)
other_pandoc_args=other_pandoc_args,
preserve_tabs=True)
if stderr_bytes:
sys.stderr.buffer.write(stderr_bytes)
if output_path is None:
Expand All @@ -1290,7 +1298,8 @@ def convert(self, *, to_format, output_path=None, overwrite=False,
to_format_pandoc_extensions=to_format_pandoc_extensions,
standalone=standalone,
newline_lf=True,
other_pandoc_args=other_pandoc_args)
other_pandoc_args=other_pandoc_args,
preserve_tabs=True)
if stderr_bytes:
sys.stderr.buffer.write(stderr_bytes)
converted_lines = util.splitlines_lf(converted_bytes.decode(encoding='utf8')) or ['']
Expand Down
2 changes: 1 addition & 1 deletion codebraid/version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-

from .fmtversion import get_version_plus_info
__version__, __version_info__ = get_version_plus_info(0, 5, 0, 'dev', 5)
__version__, __version_info__ = get_version_plus_info(0, 5, 0, 'dev', 6)

0 comments on commit e36893c

Please sign in to comment.