1
+ import re
1
2
from _typeshed import Incomplete
2
- from collections .abc import Iterable , Iterator , Sequence
3
+ from collections .abc import Callable , Iterable , Iterator , Sequence
3
4
from re import RegexFlag
4
- from typing import ClassVar
5
+ from typing import Any , ClassVar
5
6
7
+ from pygments .filter import Filter
6
8
from pygments .token import _TokenType
7
9
from pygments .util import Future
8
10
11
+ line_re : re .Pattern [str ]
12
+
9
13
class LexerMeta (type ):
10
- def __new__ (cls , name , bases , d ): ...
14
+ def __new__ (cls , name : str , bases : tuple [ type , ...], d : dict [ str , Any ] ): ...
11
15
def analyse_text (self , text : str ) -> float : ... # actually defined in class Lexer
12
16
# ClassVars of Lexer, but same situation as with StyleMeta and Style
13
17
name : str
@@ -19,83 +23,129 @@ class LexerMeta(type):
19
23
url : str | None
20
24
21
25
class Lexer (metaclass = LexerMeta ):
22
- options : Incomplete
23
- stripnl : Incomplete
24
- stripall : Incomplete
25
- ensurenl : Incomplete
26
- tabsize : Incomplete
27
- encoding : Incomplete
28
- filters : Incomplete
29
- def __init__ (self , ** options ) -> None : ...
30
- def add_filter (self , filter_ , ** options ) -> None : ...
26
+ options : dict [ str , Any ]
27
+ stripnl : bool
28
+ stripall : bool
29
+ ensurenl : bool
30
+ tabsize : int
31
+ encoding : str
32
+ filters : list [ Filter ]
33
+ def __init__ (self , ** options : Any ) -> None : ...
34
+ def add_filter (self , filter_ : Filter , ** options : Any ) -> None : ...
31
35
def get_tokens (self , text : str , unfiltered : bool = False ) -> Iterator [tuple [_TokenType , str ]]: ...
32
36
def get_tokens_unprocessed (self , text : str ) -> Iterator [tuple [int , _TokenType , str ]]: ...
33
37
34
38
class DelegatingLexer (Lexer ):
35
- root_lexer : Incomplete
36
- language_lexer : Incomplete
39
+ root_lexer : Lexer
40
+ language_lexer : Lexer
37
41
needle : Incomplete
38
- def __init__ (self , _root_lexer , _language_lexer , _needle = ..., ** options ) -> None : ...
42
+ def __init__ (
43
+ self , _root_lexer : type [Lexer ], _language_lexer : type [Lexer ], _needle : _TokenType = ..., ** options : Any
44
+ ) -> None : ...
39
45
def get_tokens_unprocessed (self , text : str ) -> Iterator [tuple [int , _TokenType , str ]]: ...
40
46
41
47
class include (str ): ...
42
48
class _inherit : ...
43
49
44
- inherit : Incomplete
50
+ inherit : _inherit
45
51
46
- class combined (tuple [Incomplete , ...]):
47
- def __new__ (cls , * args ): ...
48
- def __init__ (self , * args ) -> None : ...
52
+ class combined (tuple [str , ...]):
53
+ def __new__ (cls , * args : str ): ...
54
+ def __init__ (self , * args : str ) -> None : ...
49
55
50
56
class _PseudoMatch :
51
- def __init__ (self , start , text ) -> None : ...
52
- def start (self , arg = None ): ...
53
- def end (self , arg = None ): ...
54
- def group (self , arg = None ): ...
55
- def groups (self ): ...
56
- def groupdict (self ): ...
57
+ def __init__ (self , start : int , text : str ) -> None : ...
58
+ def start (self , arg = None ) -> int : ...
59
+ def end (self , arg = None ) -> int : ...
60
+ def group (self , arg = None ) -> str : ...
61
+ def groups (self ) -> tuple [ str ] : ...
62
+ def groupdict (self ) -> dict [ str , Any ] : ...
57
63
58
- def bygroups (* args ): ...
64
+ def bygroups (
65
+ * args : _TokenType | Callable [[Lexer , _PseudoMatch , LexerContext ], Iterator [tuple [int , _TokenType , str ]]]
66
+ ) -> Callable [[Lexer , _PseudoMatch , LexerContext ], Iterator [tuple [int , _TokenType , str ]]]: ...
59
67
60
68
class _This : ...
61
69
62
- this : Incomplete
70
+ this : _This
63
71
64
- def using (_other , ** kwargs ): ...
72
+ def using (
73
+ _other : _This | Lexer , ** kwargs : Any
74
+ ) -> Callable [[Lexer , _PseudoMatch , LexerContext ], Iterator [tuple [int , _TokenType , str ]]]: ...
65
75
66
76
class default :
67
- state : Incomplete
68
- def __init__ (self , state ) -> None : ...
77
+ state : str
78
+ def __init__ (self , state : str ) -> None : ...
69
79
70
80
class words (Future ):
71
- words : Incomplete
72
- prefix : Incomplete
73
- suffix : Incomplete
74
- def __init__ (self , words , prefix : str = "" , suffix : str = "" ) -> None : ...
75
- def get (self ): ...
81
+ words : Sequence [ str ]
82
+ prefix : str
83
+ suffix : str
84
+ def __init__ (self , words : Sequence [ str ] , prefix : str = "" , suffix : str = "" ) -> None : ...
85
+ def get (self ) -> str : ...
76
86
77
87
class RegexLexerMeta (LexerMeta ):
78
- def process_tokendef (cls , name , tokendefs = None ): ...
79
- def get_tokendefs (cls ): ...
80
- def __call__ (cls , * args , ** kwds ): ...
88
+ def process_tokendef (
89
+ cls ,
90
+ name : str ,
91
+ tokendefs : (
92
+ dict [
93
+ str ,
94
+ list [
95
+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
96
+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
97
+ ],
98
+ ]
99
+ | None
100
+ ) = None ,
101
+ ): ...
102
+ def get_tokendefs (
103
+ cls ,
104
+ ) -> dict [
105
+ str ,
106
+ list [
107
+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
108
+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
109
+ ],
110
+ ]: ...
111
+ def __call__ (cls , * args : Any , ** kwds : Any ) -> Any : ...
81
112
82
113
class RegexLexer (Lexer , metaclass = RegexLexerMeta ):
83
114
flags : ClassVar [RegexFlag ]
84
- tokens : ClassVar [dict [str , list [Incomplete ]]]
115
+ tokens : ClassVar [
116
+ dict [
117
+ str ,
118
+ list [
119
+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
120
+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
121
+ ],
122
+ ]
123
+ ]
85
124
def get_tokens_unprocessed (self , text : str , stack : Iterable [str ] = ("root" ,)) -> Iterator [tuple [int , _TokenType , str ]]: ...
86
125
87
126
class LexerContext :
88
- text : Incomplete
89
- pos : Incomplete
90
- end : Incomplete
91
- stack : Incomplete
92
- def __init__ (self , text , pos , stack = None , end = None ) -> None : ...
127
+ text : str
128
+ pos : int
129
+ end : int
130
+ stack : list [ str ]
131
+ def __init__ (self , text : str , pos : int , stack : list [ str ] | None = None , end : int | None = None ) -> None : ...
93
132
94
133
class ExtendedRegexLexer (RegexLexer ):
95
134
def get_tokens_unprocessed ( # type: ignore[override]
96
135
self , text : str | None = None , context : LexerContext | None = None
97
136
) -> Iterator [tuple [int , _TokenType , str ]]: ...
98
137
138
+ def do_insertions (
139
+ insertions : list [tuple [int , list [tuple [int , _TokenType , str ]]]],
140
+ tokens : dict [
141
+ str ,
142
+ list [
143
+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
144
+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
145
+ ],
146
+ ],
147
+ ) -> Iterator [tuple [int , _TokenType , str ]]: ...
148
+
99
149
class ProfilingRegexLexerMeta (RegexLexerMeta ): ...
100
150
101
151
class ProfilingRegexLexer (RegexLexer , metaclass = ProfilingRegexLexerMeta ):
0 commit comments