Skip to content

Commit

Permalink
Merge branch 'master' into catch-up
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisjsewell committed Jan 14, 2021
2 parents 0ba72e6 + 9f826bd commit c4dba05
Show file tree
Hide file tree
Showing 10 changed files with 106 additions and 80 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
@@ -1,5 +1,13 @@
# Change Log

## 0.6.1 - 2021-01-01

This release provides some improvements to the code base:

- 🐛 FIX: Do not resolve backslash escapes inside auto-links
- 🐛 FIX: Add content to image tokens
- 👌 IMPROVE: Add more type annotations, thanks to [@hukkinj1](https://github.com/hukkinj1)

## 0.6.0 - 2020-12-15

🗑 DEPRECATE: Move plugins to `mdit_py_plugins`
Expand Down
2 changes: 1 addition & 1 deletion markdown_it/__init__.py
@@ -1,4 +1,4 @@
from .main import MarkdownIt # noqa: F401


__version__ = "0.6.0"
__version__ = "0.6.1"
13 changes: 7 additions & 6 deletions markdown_it/cli/parse.py
Expand Up @@ -6,6 +6,7 @@
"""
import argparse
import sys
from typing import Iterable, Optional, Sequence

from markdown_it import __version__
from markdown_it.main import MarkdownIt
Expand All @@ -14,7 +15,7 @@
version_str = "markdown-it-py [version {}]".format(__version__)


def main(args=None):
def main(args: Optional[Sequence[str]] = None) -> bool:
namespace = parse_args(args)
if namespace.filenames:
convert(namespace.filenames)
Expand All @@ -23,12 +24,12 @@ def main(args=None):
return True


def convert(filenames):
def convert(filenames: Iterable[str]) -> None:
for filename in filenames:
convert_file(filename)


def convert_file(filename):
def convert_file(filename: str) -> None:
"""
Parse a Markdown file and dump the output to stdout.
"""
Expand All @@ -40,7 +41,7 @@ def convert_file(filename):
sys.exit('Cannot open file "{}".'.format(filename))


def interactive():
def interactive() -> None:
"""
Parse user input, dump to stdout, rinse and repeat.
Python REPL style.
Expand All @@ -61,7 +62,7 @@ def interactive():
break


def parse_args(args):
def parse_args(args: Optional[Sequence[str]]) -> argparse.Namespace:
"""Parse input CLI arguments."""
parser = argparse.ArgumentParser(
description="Parse one or more markdown files, "
Expand Down Expand Up @@ -96,7 +97,7 @@ def parse_args(args):
return parser.parse_args(args)


def print_heading():
def print_heading() -> None:
print("{} (interactive)".format(version_str))
print("Type Ctrl-D to complete input, or Ctrl-C to exit.")

Expand Down
22 changes: 16 additions & 6 deletions markdown_it/main.py
@@ -1,5 +1,15 @@
from contextlib import contextmanager
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Union
from typing import (
Any,
Callable,
Dict,
Generator,
Iterable,
List,
Mapping,
Optional,
Union,
)

from . import helpers, presets # noqa F401
from .common import utils # noqa F401
Expand Down Expand Up @@ -48,10 +58,10 @@ def __init__(

self.linkify = linkify_it.LinkifyIt() if linkify_it else None

def __repr__(self):
def __repr__(self) -> str:
return f"{self.__class__.__module__}.{self.__class__.__name__}()"

def __getitem__(self, name):
def __getitem__(self, name: str) -> Any:
return {
"inline": self.inline,
"block": self.block,
Expand All @@ -69,7 +79,7 @@ def set(self, options: AttrDict) -> None:
"""
self.options = options

def configure(self, presets: Union[str, Mapping]):
def configure(self, presets: Union[str, Mapping]) -> "MarkdownIt":
"""Batch load of all options and component settings.
This is an internal method, and you probably will not need it.
But if you will - see available presets and data structure
Expand Down Expand Up @@ -177,7 +187,7 @@ def disable(
return self

@contextmanager
def reset_rules(self):
def reset_rules(self) -> Generator[None, None, None]:
"""A context manager, that will reset the current enabled rules on exit."""
chain_rules = self.get_active_rules()
yield
Expand All @@ -186,7 +196,7 @@ def reset_rules(self):
self[chain].ruler.enableOnly(rules)
self.inline.ruler2.enableOnly(chain_rules["inline2"])

def add_render_rule(self, name: str, function: Callable, fmt="html"):
def add_render_rule(self, name: str, function: Callable, fmt: str = "html") -> None:
"""Add a rule for rendering a particular Token type.
Only applied when ``renderer.__output__ == fmt``
Expand Down
36 changes: 18 additions & 18 deletions markdown_it/parser_block.py
@@ -1,6 +1,6 @@
"""Block-level tokenizer."""
import logging
from typing import List, Optional
from typing import List, Optional, Tuple

from .ruler import Ruler
from .token import Token
Expand All @@ -10,24 +10,24 @@
LOGGER = logging.getLogger(__name__)


_rules = [
_rules: List[Tuple] = [
# First 2 params - rule name & source. Secondary array - list of rules,
# which can be terminated by this one.
["table", rules_block.table, ["paragraph", "reference"]],
["code", rules_block.code],
["fence", rules_block.fence, ["paragraph", "reference", "blockquote", "list"]],
[
("table", rules_block.table, ["paragraph", "reference"]),
("code", rules_block.code),
("fence", rules_block.fence, ["paragraph", "reference", "blockquote", "list"]),
(
"blockquote",
rules_block.blockquote,
["paragraph", "reference", "blockquote", "list"],
],
["hr", rules_block.hr, ["paragraph", "reference", "blockquote", "list"]],
["list", rules_block.list_block, ["paragraph", "reference", "blockquote"]],
["reference", rules_block.reference],
["heading", rules_block.heading, ["paragraph", "reference", "blockquote"]],
["lheading", rules_block.lheading],
["html_block", rules_block.html_block, ["paragraph", "reference", "blockquote"]],
["paragraph", rules_block.paragraph],
),
("hr", rules_block.hr, ["paragraph", "reference", "blockquote", "list"]),
("list", rules_block.list_block, ["paragraph", "reference", "blockquote"]),
("reference", rules_block.reference),
("heading", rules_block.heading, ["paragraph", "reference", "blockquote"]),
("lheading", rules_block.lheading),
("html_block", rules_block.html_block, ["paragraph", "reference", "blockquote"]),
("paragraph", rules_block.paragraph),
]


Expand All @@ -47,7 +47,7 @@ def __init__(self):

def tokenize(
self, state: StateBlock, startLine: int, endLine: int, silent: bool = False
):
) -> None:
"""Generate tokens for input range."""
rules = self.ruler.getRules("")
line = startLine
Expand Down Expand Up @@ -98,11 +98,11 @@ def parse(
md,
env,
outTokens: List[Token],
ords: Optional[List[int]] = None,
):
ords: Optional[Tuple[int, ...]] = None,
) -> Optional[List[Token]]:
"""Process input string and push block tokens into `outTokens`."""
if not src:
return
return None
state = StateBlock(src, md, env, outTokens, ords)
self.tokenize(state, state.line, state.lineMax)
return state.tokens
20 changes: 10 additions & 10 deletions markdown_it/parser_core.py
Expand Up @@ -4,20 +4,20 @@
* Top-level rules executor. Glues block/inline parsers and does intermediate
* transformations.
"""
from typing import List, Tuple


from .ruler import Ruler
from .ruler import Ruler, RuleFunc
from .rules_core.state_core import StateCore
from .rules_core import normalize, block, inline, replace, smartquotes, linkify


_rules = [
["normalize", normalize],
["block", block],
["inline", inline],
["linkify", linkify],
["replacements", replace],
["smartquotes", smartquotes],
_rules: List[Tuple[str, RuleFunc]] = [
("normalize", normalize),
("block", block),
("inline", inline),
("linkify", linkify),
("replacements", replace),
("smartquotes", smartquotes),
]


Expand All @@ -27,7 +27,7 @@ def __init__(self):
for name, rule in _rules:
self.ruler.push(name, rule)

def process(self, state: StateCore):
def process(self, state: StateCore) -> None:
"""Executes core chain rules."""
for rule in self.ruler.getRules(""):
rule(state)
44 changes: 22 additions & 22 deletions markdown_it/parser_inline.py
@@ -1,32 +1,32 @@
"""Tokenizes paragraph content.
"""
from typing import List
from typing import List, Tuple

from .ruler import Ruler
from .ruler import Ruler, RuleFunc
from .token import Token
from .rules_inline.state_inline import StateInline
from . import rules_inline

# Parser rules
_rules = [
["text", rules_inline.text],
["newline", rules_inline.newline],
["escape", rules_inline.escape],
["backticks", rules_inline.backtick],
["strikethrough", rules_inline.strikethrough.tokenize],
["emphasis", rules_inline.emphasis.tokenize],
["link", rules_inline.link],
["image", rules_inline.image],
["autolink", rules_inline.autolink],
["html_inline", rules_inline.html_inline],
["entity", rules_inline.entity],
_rules: List[Tuple[str, RuleFunc]] = [
("text", rules_inline.text),
("newline", rules_inline.newline),
("escape", rules_inline.escape),
("backticks", rules_inline.backtick),
("strikethrough", rules_inline.strikethrough.tokenize),
("emphasis", rules_inline.emphasis.tokenize),
("link", rules_inline.link),
("image", rules_inline.image),
("autolink", rules_inline.autolink),
("html_inline", rules_inline.html_inline),
("entity", rules_inline.entity),
]

_rules2 = [
["balance_pairs", rules_inline.link_pairs],
["strikethrough", rules_inline.strikethrough.postProcess],
["emphasis", rules_inline.emphasis.postProcess],
["text_collapse", rules_inline.text_collapse],
_rules2: List[Tuple[str, RuleFunc]] = [
("balance_pairs", rules_inline.link_pairs),
("strikethrough", rules_inline.strikethrough.postProcess),
("emphasis", rules_inline.emphasis.postProcess),
("text_collapse", rules_inline.text_collapse),
]


Expand All @@ -40,7 +40,7 @@ def __init__(self):
for name, rule2 in _rules2:
self.ruler2.push(name, rule2)

def skipToken(self, state: StateInline):
def skipToken(self, state: StateInline) -> None:
"""Skip single token by running all rules in validation mode;
returns `True` if any rule reported success
"""
Expand Down Expand Up @@ -82,7 +82,7 @@ def skipToken(self, state: StateInline):
state.pos += 1
cache[pos] = state.pos

def tokenize(self, state: StateInline):
def tokenize(self, state: StateInline) -> None:
"""Generate tokens for input range."""
ok = False
rules = self.ruler.getRules("")
Expand Down Expand Up @@ -114,7 +114,7 @@ def tokenize(self, state: StateInline):
if state.pending:
state.pushPending()

def parse(self, src: str, md, env, tokens: List[Token]):
def parse(self, src: str, md, env, tokens: List[Token]) -> List[Token]:
"""Process input string and push inline tokens into `tokens`"""
state = StateInline(src, md, env, tokens)
self.tokenize(state)
Expand Down

0 comments on commit c4dba05

Please sign in to comment.