Skip to content

Commit ec07b24

Browse files
committed
pygments: add typing annotation for missing base files of pygments
1 parent 6547ec1 commit ec07b24

File tree

14 files changed

+193
-124
lines changed

14 files changed

+193
-124
lines changed

stubs/Pygments/pygments/__init__.pyi

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@ __all__ = ["lex", "format", "highlight"]
1313

1414
def lex(code: str, lexer: Lexer) -> Iterator[tuple[_TokenType, str]]: ...
1515
@overload
16-
def format(tokens, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ...
16+
def format(tokens: Iterator[tuple[_TokenType, str]], formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ...
1717
@overload
18-
def format(tokens, formatter: Formatter[_T], outfile: None = None) -> _T: ...
18+
def format(tokens: Iterator[tuple[_TokenType, str]], formatter: Formatter[_T], outfile: None = None) -> _T: ...
1919
@overload
20-
def highlight(code, lexer, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ...
20+
def highlight(code: str, lexer: Lexer, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ...
2121
@overload
22-
def highlight(code, lexer, formatter: Formatter[_T], outfile: None = None) -> _T: ...
22+
def highlight(code: str, lexer: Lexer, formatter: Formatter[_T], outfile: None = None) -> _T: ...
Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import argparse
2+
import sys
3+
from collections.abc import Sequence
24

3-
def main_inner(parser, argns): ...
5+
def main_inner(parser: argparse.ArgumentParser, argns: argparse.Namespace) -> int: ...
46

57
class HelpFormatter(argparse.HelpFormatter):
6-
def __init__(self, prog, indent_increment: int = 2, max_help_position: int = 16, width=None) -> None: ...
8+
def __init__(self, prog: str, indent_increment: int = 2, max_help_position: int = 16, width: int | None = None) -> None: ...
79

8-
def main(args=...): ...
10+
def main(args: Sequence[str] = sys.argv) -> int: ...
Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,8 @@
1-
from typing import Any
2-
31
esc: str
4-
codes: Any
5-
dark_colors: Any
6-
light_colors: Any
2+
codes: dict[str, str]
3+
dark_colors: list[str]
4+
light_colors: list[str]
75

8-
def reset_color(): ...
9-
def colorize(color_key, text): ...
10-
def ansiformat(attr, text): ...
6+
def reset_color() -> str: ...
7+
def colorize(color_key: str, text: str) -> str: ...
8+
def ansiformat(attr: str, text: str) -> str: ...

stubs/Pygments/pygments/filter.pyi

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,20 @@
1-
from collections.abc import Iterable, Iterator
1+
from collections.abc import Callable, Generator, Iterable, Iterator
22
from typing import Any
33

44
from pygments.lexer import Lexer
55
from pygments.token import _TokenType
66

7-
def apply_filters(stream, filters, lexer=None): ...
8-
def simplefilter(f): ...
7+
def apply_filters(
8+
stream: Callable[[], Iterator[tuple[_TokenType, str]]], filters: list[Filter], lexer: Lexer | None = None
9+
) -> Generator[tuple[_TokenType, str], None, tuple[_TokenType, str]]: ...
10+
def simplefilter(f: Callable[..., Any]) -> type[FunctionFilter]: ...
911

1012
class Filter:
1113
options: Any
12-
def __init__(self, **options) -> None: ...
14+
def __init__(self, **options: Any) -> None: ...
1315
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
1416

1517
class FunctionFilter(Filter):
16-
function: Any
17-
def __init__(self, **options) -> None: ...
18+
function: Callable[..., Any] | None = None
19+
def __init__(self, **options: Any) -> None: ...
1820
def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,27 @@
1+
from _io import _TextIOBase
2+
from collections.abc import Iterator
13
from typing import Any, Generic, TypeVar, overload
24

5+
from pygments.style import Style
6+
from pygments.token import _TokenType
7+
38
_T = TypeVar("_T", str, bytes)
49

510
class Formatter(Generic[_T]):
6-
name: Any
7-
aliases: Any
8-
filenames: Any
11+
name: str | None = None
12+
aliases: list[str]
13+
filenames: list[str]
914
unicodeoutput: bool
10-
style: Any
11-
full: Any
12-
title: Any
13-
encoding: Any
14-
options: Any
15+
style: type[Style]
16+
full: bool
17+
title: str
18+
encoding: str
19+
options: dict[str, Any]
1520
@overload
16-
def __init__(self: Formatter[str], *, encoding: None = None, outencoding: None = None, **options) -> None: ...
21+
def __init__(self: Formatter[str], *, encoding: None = None, outencoding: None = None, **options: Any) -> None: ...
1722
@overload
18-
def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = None, **options) -> None: ...
23+
def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = None, **options: Any) -> None: ...
1924
@overload
20-
def __init__(self: Formatter[bytes], *, encoding: None = None, outencoding: str, **options) -> None: ...
21-
def get_style_defs(self, arg: str = ""): ...
22-
def format(self, tokensource, outfile): ...
25+
def __init__(self: Formatter[bytes], *, encoding: None = None, outencoding: str, **options: Any) -> None: ...
26+
def get_style_defs(self, arg: str = "") -> str: ...
27+
def format(self, tokensource: Iterator[tuple[_TokenType, str]], outfile: _TextIOBase) -> None: ...

stubs/Pygments/pygments/formatters/img.pyi

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Any, TypeVar
1+
from typing import Any, NoReturn, TypeVar
22

33
from pygments.formatter import Formatter
44

@@ -46,7 +46,8 @@ class ImageFormatter(Formatter[_T]):
4646
hl_lines: Any
4747
hl_color: Any
4848
drawables: Any
49-
def get_style_defs(self, arg: str = "") -> None: ...
49+
# raises NotImplementedError
50+
def get_style_defs(self, arg: str = "") -> NoReturn: ...
5051
def format(self, tokensource, outfile) -> None: ...
5152

5253
class GifImageFormatter(ImageFormatter[_T]):

stubs/Pygments/pygments/lexer.pyi

Lines changed: 94 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,17 @@
1+
import re
12
from _typeshed import Incomplete
2-
from collections.abc import Iterable, Iterator, Sequence
3+
from collections.abc import Callable, Iterable, Iterator, Sequence
34
from re import RegexFlag
4-
from typing import ClassVar
5+
from typing import Any, ClassVar
56

7+
from pygments.filter import Filter
68
from pygments.token import _TokenType
79
from pygments.util import Future
810

11+
line_re: re.Pattern[str]
12+
913
class LexerMeta(type):
10-
def __new__(cls, name, bases, d): ...
14+
def __new__(cls, name: str, bases: tuple[type, ...], d: dict[str, Any]): ...
1115
def analyse_text(self, text: str) -> float: ... # actually defined in class Lexer
1216
# ClassVars of Lexer, but same situation as with StyleMeta and Style
1317
name: str
@@ -19,83 +23,129 @@ class LexerMeta(type):
1923
url: str | None
2024

2125
class Lexer(metaclass=LexerMeta):
22-
options: Incomplete
23-
stripnl: Incomplete
24-
stripall: Incomplete
25-
ensurenl: Incomplete
26-
tabsize: Incomplete
27-
encoding: Incomplete
28-
filters: Incomplete
29-
def __init__(self, **options) -> None: ...
30-
def add_filter(self, filter_, **options) -> None: ...
26+
options: dict[str, Any]
27+
stripnl: bool
28+
stripall: bool
29+
ensurenl: bool
30+
tabsize: int
31+
encoding: str
32+
filters: list[Filter]
33+
def __init__(self, **options: Any) -> None: ...
34+
def add_filter(self, filter_: Filter, **options: Any) -> None: ...
3135
def get_tokens(self, text: str, unfiltered: bool = False) -> Iterator[tuple[_TokenType, str]]: ...
3236
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
3337

3438
class DelegatingLexer(Lexer):
35-
root_lexer: Incomplete
36-
language_lexer: Incomplete
39+
root_lexer: Lexer
40+
language_lexer: Lexer
3741
needle: Incomplete
38-
def __init__(self, _root_lexer, _language_lexer, _needle=..., **options) -> None: ...
42+
def __init__(
43+
self, _root_lexer: type[Lexer], _language_lexer: type[Lexer], _needle: _TokenType = ..., **options: Any
44+
) -> None: ...
3945
def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
4046

4147
class include(str): ...
4248
class _inherit: ...
4349

44-
inherit: Incomplete
50+
inherit: _inherit
4551

46-
class combined(tuple[Incomplete, ...]):
47-
def __new__(cls, *args): ...
48-
def __init__(self, *args) -> None: ...
52+
class combined(tuple[str, ...]):
53+
def __new__(cls, *args: str): ...
54+
def __init__(self, *args: str) -> None: ...
4955

5056
class _PseudoMatch:
51-
def __init__(self, start, text) -> None: ...
52-
def start(self, arg=None): ...
53-
def end(self, arg=None): ...
54-
def group(self, arg=None): ...
55-
def groups(self): ...
56-
def groupdict(self): ...
57+
def __init__(self, start: int, text: str) -> None: ...
58+
def start(self, arg=None) -> int: ...
59+
def end(self, arg=None) -> int: ...
60+
def group(self, arg=None) -> str: ...
61+
def groups(self) -> tuple[str]: ...
62+
def groupdict(self) -> dict[str, Any]: ...
5763

58-
def bygroups(*args): ...
64+
def bygroups(
65+
*args: _TokenType | Callable[[Lexer, _PseudoMatch, LexerContext], Iterator[tuple[int, _TokenType, str]]]
66+
) -> Callable[[Lexer, _PseudoMatch, LexerContext], Iterator[tuple[int, _TokenType, str]]]: ...
5967

6068
class _This: ...
6169

62-
this: Incomplete
70+
this: _This
6371

64-
def using(_other, **kwargs): ...
72+
def using(
73+
_other: _This | Lexer, **kwargs: Any
74+
) -> Callable[[Lexer, _PseudoMatch, LexerContext], Iterator[tuple[int, _TokenType, str]]]: ...
6575

6676
class default:
67-
state: Incomplete
68-
def __init__(self, state) -> None: ...
77+
state: str
78+
def __init__(self, state: str) -> None: ...
6979

7080
class words(Future):
71-
words: Incomplete
72-
prefix: Incomplete
73-
suffix: Incomplete
74-
def __init__(self, words, prefix: str = "", suffix: str = "") -> None: ...
75-
def get(self): ...
81+
words: Sequence[str]
82+
prefix: str
83+
suffix: str
84+
def __init__(self, words: Sequence[str], prefix: str = "", suffix: str = "") -> None: ...
85+
def get(self) -> str: ...
7686

7787
class RegexLexerMeta(LexerMeta):
78-
def process_tokendef(cls, name, tokendefs=None): ...
79-
def get_tokendefs(cls): ...
80-
def __call__(cls, *args, **kwds): ...
88+
def process_tokendef(
89+
cls,
90+
name: str,
91+
tokendefs: (
92+
dict[
93+
str,
94+
list[
95+
tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]]]
96+
| tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]], str]
97+
],
98+
]
99+
| None
100+
) = None,
101+
): ...
102+
def get_tokendefs(
103+
cls,
104+
) -> dict[
105+
str,
106+
list[
107+
tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]]]
108+
| tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]], str]
109+
],
110+
]: ...
111+
def __call__(cls, *args: Any, **kwds: Any) -> Any: ...
81112

82113
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
83114
flags: ClassVar[RegexFlag]
84-
tokens: ClassVar[dict[str, list[Incomplete]]]
115+
tokens: ClassVar[
116+
dict[
117+
str,
118+
list[
119+
tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]]]
120+
| tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]], str]
121+
],
122+
]
123+
]
85124
def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ("root",)) -> Iterator[tuple[int, _TokenType, str]]: ...
86125

87126
class LexerContext:
88-
text: Incomplete
89-
pos: Incomplete
90-
end: Incomplete
91-
stack: Incomplete
92-
def __init__(self, text, pos, stack=None, end=None) -> None: ...
127+
text: str
128+
pos: int
129+
end: int
130+
stack: list[str]
131+
def __init__(self, text: str, pos: int, stack: list[str] | None = None, end: int | None = None) -> None: ...
93132

94133
class ExtendedRegexLexer(RegexLexer):
95134
def get_tokens_unprocessed( # type: ignore[override]
96135
self, text: str | None = None, context: LexerContext | None = None
97136
) -> Iterator[tuple[int, _TokenType, str]]: ...
98137

138+
def do_insertions(
139+
insertions: list[tuple[int, list[tuple[int, _TokenType, str]]]],
140+
tokens: dict[
141+
str,
142+
list[
143+
tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]]]
144+
| tuple[str, _TokenType | Iterator[tuple[int, _TokenType, str]], str]
145+
],
146+
],
147+
) -> Iterator[tuple[int, _TokenType, str]]: ...
148+
99149
class ProfilingRegexLexerMeta(RegexLexerMeta): ...
100150

101151
class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
def get_filetype_from_buffer(buf, max_lines: int = 5): ...
1+
def get_filetype_from_buffer(buf: str, max_lines: int = 5) -> str: ...
Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
1-
from typing import Any
1+
import re
2+
from collections.abc import Sequence
3+
from operator import itemgetter
24

3-
CS_ESCAPE: Any
4-
FIRST_ELEMENT: Any
5+
CS_ESCAPE: re.Pattern[str]
6+
FIRST_ELEMENT: itemgetter[int]
57

6-
def make_charset(letters): ...
7-
def regex_opt_inner(strings, open_paren): ...
8-
def regex_opt(strings, prefix: str = "", suffix: str = ""): ...
8+
def make_charset(letters: Sequence[str]) -> str: ...
9+
def regex_opt_inner(strings: Sequence[str], open_paren: str) -> str: ...
10+
def regex_opt(strings: Sequence[str], prefix: str = "", suffix: str = "") -> str: ...
Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,19 @@
1-
from typing import Any
1+
import re
22

33
class EndOfText(RuntimeError): ...
44

55
class Scanner:
6-
data: Any
7-
data_length: Any
6+
data: str
7+
data_length: int
88
start_pos: int
99
pos: int
10-
flags: Any
11-
last: Any
12-
match: Any
13-
def __init__(self, text, flags: int = 0) -> None: ...
10+
flags: int
11+
last: str
12+
match: str
13+
def __init__(self, text: str, flags: int = 0) -> None: ...
1414
@property
15-
def eos(self): ...
16-
def check(self, pattern): ...
17-
def test(self, pattern): ...
18-
def scan(self, pattern): ...
15+
def eos(self) -> bool: ...
16+
def check(self, pattern: str) -> re.Match[str] | None: ...
17+
def test(self, pattern: str) -> bool: ...
18+
def scan(self, pattern: str) -> bool: ...
1919
def get_char(self) -> None: ...

0 commit comments

Comments
 (0)