Skip to content

Commit 4f1346c

Browse files
authored
👌 IMPROVE: Add more typing (#116)
1 parent e303bd4 commit 4f1346c

File tree

7 files changed

+87
-73
lines changed

7 files changed

+87
-73
lines changed

markdown_it/cli/parse.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
"""
77
import argparse
88
import sys
9+
from typing import Iterable, Optional, Sequence
910

1011
from markdown_it import __version__
1112
from markdown_it.main import MarkdownIt
@@ -14,7 +15,7 @@
1415
version_str = "markdown-it-py [version {}]".format(__version__)
1516

1617

17-
def main(args=None):
18+
def main(args: Optional[Sequence[str]] = None) -> bool:
1819
namespace = parse_args(args)
1920
if namespace.filenames:
2021
convert(namespace.filenames)
@@ -23,12 +24,12 @@ def main(args=None):
2324
return True
2425

2526

26-
def convert(filenames):
27+
def convert(filenames: Iterable[str]) -> None:
2728
for filename in filenames:
2829
convert_file(filename)
2930

3031

31-
def convert_file(filename):
32+
def convert_file(filename: str) -> None:
3233
"""
3334
Parse a Markdown file and dump the output to stdout.
3435
"""
@@ -40,7 +41,7 @@ def convert_file(filename):
4041
sys.exit('Cannot open file "{}".'.format(filename))
4142

4243

43-
def interactive():
44+
def interactive() -> None:
4445
"""
4546
Parse user input, dump to stdout, rinse and repeat.
4647
Python REPL style.
@@ -61,7 +62,7 @@ def interactive():
6162
break
6263

6364

64-
def parse_args(args):
65+
def parse_args(args: Optional[Sequence[str]]) -> argparse.Namespace:
6566
"""Parse input CLI arguments."""
6667
parser = argparse.ArgumentParser(
6768
description="Parse one or more markdown files, "
@@ -96,7 +97,7 @@ def parse_args(args):
9697
return parser.parse_args(args)
9798

9899

99-
def print_heading():
100+
def print_heading() -> None:
100101
print("{} (interactive)".format(version_str))
101102
print("Type Ctrl-D to complete input, or Ctrl-C to exit.")
102103

markdown_it/main.py

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,15 @@
11
from contextlib import contextmanager
2-
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Union
2+
from typing import (
3+
Any,
4+
Callable,
5+
Dict,
6+
Generator,
7+
Iterable,
8+
List,
9+
Mapping,
10+
Optional,
11+
Union,
12+
)
313

414
from . import helpers, presets # noqa F401
515
from .common import utils # noqa F401
@@ -48,10 +58,10 @@ def __init__(
4858

4959
self.linkify = linkify_it.LinkifyIt() if linkify_it else None
5060

51-
def __repr__(self):
61+
def __repr__(self) -> str:
5262
return f"{self.__class__.__module__}.{self.__class__.__name__}()"
5363

54-
def __getitem__(self, name):
64+
def __getitem__(self, name: str) -> Any:
5565
return {
5666
"inline": self.inline,
5767
"block": self.block,
@@ -69,7 +79,7 @@ def set(self, options: AttrDict) -> None:
6979
"""
7080
self.options = options
7181

72-
def configure(self, presets: Union[str, Mapping]):
82+
def configure(self, presets: Union[str, Mapping]) -> "MarkdownIt":
7383
"""Batch load of all options and component settings.
7484
This is an internal method, and you probably will not need it.
7585
But if you will - see available presets and data structure
@@ -177,7 +187,7 @@ def disable(
177187
return self
178188

179189
@contextmanager
180-
def reset_rules(self):
190+
def reset_rules(self) -> Generator[None, None, None]:
181191
"""A context manager, that will reset the current enabled rules on exit."""
182192
chain_rules = self.get_active_rules()
183193
yield
@@ -186,7 +196,7 @@ def reset_rules(self):
186196
self[chain].ruler.enableOnly(rules)
187197
self.inline.ruler2.enableOnly(chain_rules["inline2"])
188198

189-
def add_render_rule(self, name: str, function: Callable, fmt="html"):
199+
def add_render_rule(self, name: str, function: Callable, fmt: str = "html") -> None:
190200
"""Add a rule for rendering a particular Token type.
191201
192202
Only applied when ``renderer.__output__ == fmt``

markdown_it/parser_block.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -10,24 +10,24 @@
1010
LOGGER = logging.getLogger(__name__)
1111

1212

13-
_rules = [
13+
_rules: List[Tuple] = [
1414
# First 2 params - rule name & source. Secondary array - list of rules,
1515
# which can be terminated by this one.
16-
["table", rules_block.table, ["paragraph", "reference"]],
17-
["code", rules_block.code],
18-
["fence", rules_block.fence, ["paragraph", "reference", "blockquote", "list"]],
19-
[
16+
("table", rules_block.table, ["paragraph", "reference"]),
17+
("code", rules_block.code),
18+
("fence", rules_block.fence, ["paragraph", "reference", "blockquote", "list"]),
19+
(
2020
"blockquote",
2121
rules_block.blockquote,
2222
["paragraph", "reference", "blockquote", "list"],
23-
],
24-
["hr", rules_block.hr, ["paragraph", "reference", "blockquote", "list"]],
25-
["list", rules_block.list_block, ["paragraph", "reference", "blockquote"]],
26-
["reference", rules_block.reference],
27-
["heading", rules_block.heading, ["paragraph", "reference", "blockquote"]],
28-
["lheading", rules_block.lheading],
29-
["html_block", rules_block.html_block, ["paragraph", "reference", "blockquote"]],
30-
["paragraph", rules_block.paragraph],
23+
),
24+
("hr", rules_block.hr, ["paragraph", "reference", "blockquote", "list"]),
25+
("list", rules_block.list_block, ["paragraph", "reference", "blockquote"]),
26+
("reference", rules_block.reference),
27+
("heading", rules_block.heading, ["paragraph", "reference", "blockquote"]),
28+
("lheading", rules_block.lheading),
29+
("html_block", rules_block.html_block, ["paragraph", "reference", "blockquote"]),
30+
("paragraph", rules_block.paragraph),
3131
]
3232

3333

@@ -47,7 +47,7 @@ def __init__(self):
4747

4848
def tokenize(
4949
self, state: StateBlock, startLine: int, endLine: int, silent: bool = False
50-
):
50+
) -> None:
5151
"""Generate tokens for input range."""
5252
rules = self.ruler.getRules("")
5353
line = startLine
@@ -99,10 +99,10 @@ def parse(
9999
env,
100100
outTokens: List[Token],
101101
ords: Optional[Tuple[int, ...]] = None,
102-
):
102+
) -> Optional[List[Token]]:
103103
"""Process input string and push block tokens into `outTokens`."""
104104
if not src:
105-
return
105+
return None
106106
state = StateBlock(src, md, env, outTokens, ords)
107107
self.tokenize(state, state.line, state.lineMax)
108108
return state.tokens

markdown_it/parser_core.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,20 +4,20 @@
44
* Top-level rules executor. Glues block/inline parsers and does intermediate
55
* transformations.
66
"""
7+
from typing import List, Tuple
78

8-
9-
from .ruler import Ruler
9+
from .ruler import Ruler, RuleFunc
1010
from .rules_core.state_core import StateCore
1111
from .rules_core import normalize, block, inline, replace, smartquotes, linkify
1212

1313

14-
_rules = [
15-
["normalize", normalize],
16-
["block", block],
17-
["inline", inline],
18-
["linkify", linkify],
19-
["replacements", replace],
20-
["smartquotes", smartquotes],
14+
_rules: List[Tuple[str, RuleFunc]] = [
15+
("normalize", normalize),
16+
("block", block),
17+
("inline", inline),
18+
("linkify", linkify),
19+
("replacements", replace),
20+
("smartquotes", smartquotes),
2121
]
2222

2323

@@ -27,7 +27,7 @@ def __init__(self):
2727
for name, rule in _rules:
2828
self.ruler.push(name, rule)
2929

30-
def process(self, state: StateCore):
30+
def process(self, state: StateCore) -> None:
3131
"""Executes core chain rules."""
3232
for rule in self.ruler.getRules(""):
3333
rule(state)

markdown_it/parser_inline.py

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,32 @@
11
"""Tokenizes paragraph content.
22
"""
3-
from typing import List
3+
from typing import List, Tuple
44

5-
from .ruler import Ruler
5+
from .ruler import Ruler, RuleFunc
66
from .token import Token
77
from .rules_inline.state_inline import StateInline
88
from . import rules_inline
99

1010
# Parser rules
11-
_rules = [
12-
["text", rules_inline.text],
13-
["newline", rules_inline.newline],
14-
["escape", rules_inline.escape],
15-
["backticks", rules_inline.backtick],
16-
["strikethrough", rules_inline.strikethrough.tokenize],
17-
["emphasis", rules_inline.emphasis.tokenize],
18-
["link", rules_inline.link],
19-
["image", rules_inline.image],
20-
["autolink", rules_inline.autolink],
21-
["html_inline", rules_inline.html_inline],
22-
["entity", rules_inline.entity],
11+
_rules: List[Tuple[str, RuleFunc]] = [
12+
("text", rules_inline.text),
13+
("newline", rules_inline.newline),
14+
("escape", rules_inline.escape),
15+
("backticks", rules_inline.backtick),
16+
("strikethrough", rules_inline.strikethrough.tokenize),
17+
("emphasis", rules_inline.emphasis.tokenize),
18+
("link", rules_inline.link),
19+
("image", rules_inline.image),
20+
("autolink", rules_inline.autolink),
21+
("html_inline", rules_inline.html_inline),
22+
("entity", rules_inline.entity),
2323
]
2424

25-
_rules2 = [
26-
["balance_pairs", rules_inline.link_pairs],
27-
["strikethrough", rules_inline.strikethrough.postProcess],
28-
["emphasis", rules_inline.emphasis.postProcess],
29-
["text_collapse", rules_inline.text_collapse],
25+
_rules2: List[Tuple[str, RuleFunc]] = [
26+
("balance_pairs", rules_inline.link_pairs),
27+
("strikethrough", rules_inline.strikethrough.postProcess),
28+
("emphasis", rules_inline.emphasis.postProcess),
29+
("text_collapse", rules_inline.text_collapse),
3030
]
3131

3232

@@ -40,7 +40,7 @@ def __init__(self):
4040
for name, rule2 in _rules2:
4141
self.ruler2.push(name, rule2)
4242

43-
def skipToken(self, state: StateInline):
43+
def skipToken(self, state: StateInline) -> None:
4444
"""Skip single token by running all rules in validation mode;
4545
returns `True` if any rule reported success
4646
"""
@@ -82,7 +82,7 @@ def skipToken(self, state: StateInline):
8282
state.pos += 1
8383
cache[pos] = state.pos
8484

85-
def tokenize(self, state: StateInline):
85+
def tokenize(self, state: StateInline) -> None:
8686
"""Generate tokens for input range."""
8787
ok = False
8888
rules = self.ruler.getRules("")
@@ -114,7 +114,7 @@ def tokenize(self, state: StateInline):
114114
if state.pending:
115115
state.pushPending()
116116

117-
def parse(self, src: str, md, env, tokens: List[Token]):
117+
def parse(self, src: str, md, env, tokens: List[Token]) -> List[Token]:
118118
"""Process input string and push inline tokens into `tokens`"""
119119
state = StateInline(src, md, env, tokens)
120120
self.tokenize(state)

markdown_it/renderer.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ class Renderer
66
rules if you create plugin and adds new token types.
77
"""
88
import inspect
9-
from typing import Sequence
9+
from typing import Optional, Sequence
1010

1111
from .common.utils import unescapeAll, escapeHtml
1212
from .token import Token
@@ -151,7 +151,7 @@ def renderToken(
151151
return result
152152

153153
@staticmethod
154-
def renderAttrs(token):
154+
def renderAttrs(token: Token) -> str:
155155
"""Render token attributes to string."""
156156
if not token.attrs:
157157
return ""
@@ -169,7 +169,9 @@ def renderAttrs(token):
169169

170170
return result
171171

172-
def renderInlineAsText(self, tokens: Sequence[Token], options, env) -> str:
172+
def renderInlineAsText(
173+
self, tokens: Optional[Sequence[Token]], options, env
174+
) -> str:
173175
"""Special kludge for image `alt` attributes to conform CommonMark spec.
174176
175177
Don't try to use it! Spec requires to show `alt` content with stripped markup,
@@ -192,7 +194,7 @@ def renderInlineAsText(self, tokens: Sequence[Token], options, env) -> str:
192194

193195
###################################################
194196

195-
def code_inline(self, tokens: Sequence[Token], idx, options, env):
197+
def code_inline(self, tokens: Sequence[Token], idx: int, options, env) -> str:
196198
token = tokens[idx]
197199
return (
198200
"<code"
@@ -202,7 +204,7 @@ def code_inline(self, tokens: Sequence[Token], idx, options, env):
202204
+ "</code>"
203205
)
204206

205-
def code_block(self, tokens: Sequence[Token], idx, options, env):
207+
def code_block(self, tokens: Sequence[Token], idx: int, options, env) -> str:
206208
token = tokens[idx]
207209

208210
return (
@@ -213,7 +215,7 @@ def code_block(self, tokens: Sequence[Token], idx, options, env):
213215
+ "</code></pre>\n"
214216
)
215217

216-
def fence(self, tokens: Sequence[Token], idx, options, env):
218+
def fence(self, tokens: Sequence[Token], idx: int, options, env) -> str:
217219
token = tokens[idx]
218220
info = unescapeAll(token.info).strip() if token.info else ""
219221
langName = ""
@@ -262,8 +264,9 @@ def fence(self, tokens: Sequence[Token], idx, options, env):
262264
+ "</code></pre>\n"
263265
)
264266

265-
def image(self, tokens: Sequence[Token], idx, options, env):
267+
def image(self, tokens: Sequence[Token], idx: int, options, env) -> str:
266268
token = tokens[idx]
269+
assert token.attrs is not None, '"image" token\'s attrs must not be `None`'
267270

268271
# "alt" attr MUST be set, even if empty. Because it's mandatory and
269272
# should be placed on proper position for tests.
@@ -276,19 +279,19 @@ def image(self, tokens: Sequence[Token], idx, options, env):
276279

277280
return self.renderToken(tokens, idx, options, env)
278281

279-
def hardbreak(self, tokens: Sequence[Token], idx, options, *args):
282+
def hardbreak(self, tokens: Sequence[Token], idx: int, options, *args) -> str:
280283
return "<br />\n" if options.xhtmlOut else "<br>\n"
281284

282-
def softbreak(self, tokens: Sequence[Token], idx, options, *args):
285+
def softbreak(self, tokens: Sequence[Token], idx: int, options, *args) -> str:
283286
return (
284287
("<br />\n" if options.xhtmlOut else "<br>\n") if options.breaks else "\n"
285288
)
286289

287-
def text(self, tokens: Sequence[Token], idx, *args):
290+
def text(self, tokens: Sequence[Token], idx: int, *args) -> str:
288291
return escapeHtml(tokens[idx].content)
289292

290-
def html_block(self, tokens: Sequence[Token], idx, *args):
293+
def html_block(self, tokens: Sequence[Token], idx: int, *args) -> str:
291294
return tokens[idx].content
292295

293-
def html_inline(self, tokens: Sequence[Token], idx, *args):
296+
def html_inline(self, tokens: Sequence[Token], idx: int, *args) -> str:
294297
return tokens[idx].content

markdown_it/ruler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def __find__(self, name: str) -> int:
8484
return i
8585
return -1
8686

87-
def __compile__(self):
87+
def __compile__(self) -> None:
8888
"""Build rules lookup cache"""
8989
chains = {""}
9090
# collect unique names

0 commit comments

Comments
 (0)