aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/lark-stubs/lexer.pyi
blob: 004865c7d6087da46c84eebc4487dfade1bfa371 (about) (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
# -*- coding: utf-8 -*-
from types import ModuleType
from typing import (
    TypeVar, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional, FrozenSet, Any,
    Pattern as REPattern,
)
from abc import abstractmethod, ABC

_T = TypeVar('_T')


class Pattern(ABC):
    value: str
    flags: Collection[str]
    raw: str
    type: str

    def __init__(self, value: str, flags: Collection[str] = (), raw: str = None) -> None:
        ...

    @abstractmethod
    def to_regexp(self) -> str:
        ...

    @property
    @abstractmethod
    def min_width(self) -> int:
        ...

    @property
    @abstractmethod
    def max_width(self) -> int:
        ...


class PatternStr(Pattern):
    type: str = ...

    def to_regexp(self) -> str:
        ...

    @property
    def min_width(self) -> int:
        ...

    @property
    def max_width(self) -> int:
        ...


class PatternRE(Pattern):
    type: str = ...

    def to_regexp(self) -> str:
        ...

    @property
    def min_width(self) -> int:
        ...

    @property
    def max_width(self) -> int:
        ...


class TerminalDef:
    name: str
    pattern: Pattern
    priority: int

    def __init__(self, name: str, pattern: Pattern, priority: int = ...) -> None:
        ...

    def user_repr(self) -> str: ...


class Token(str):
    type: str
    start_pos: int
    value: Any
    line: int
    column: int
    end_line: int
    end_column: int
    end_pos: int

    def __init__(self, type_: str, value: Any, start_pos: int = None, line: int = None, column: int = None, end_line: int = None, end_column: int = None, end_pos: int = None) -> None:
        ...

    def update(self, type_: Optional[str] = None, value: Optional[Any] = None) -> Token:
        ...

    @classmethod
    def new_borrow_pos(cls: Type[_T], type_: str, value: Any, borrow_t: Token) -> _T:
        ...


_Callback = Callable[[Token], Token]


class Lexer(ABC):
    lex: Callable[..., Iterator[Token]]


class LexerConf:
     tokens: Collection[TerminalDef]
     re_module: ModuleType
     ignore: Collection[str] = ()
     postlex: Any =None
     callbacks: Optional[Dict[str, _Callback]] = None
     g_regex_flags: int = 0
     skip_validation: bool = False
     use_bytes: bool = False



class TraditionalLexer(Lexer):
    terminals: Collection[TerminalDef]
    ignore_types: FrozenSet[str]
    newline_types: FrozenSet[str]
    user_callbacks: Dict[str, _Callback]
    callback: Dict[str, _Callback]
    mres: List[Tuple[REPattern, Dict[int, str]]]
    re: ModuleType

    def __init__(
        self,
        conf: LexerConf
    ) -> None:
        ...

    def build(self) -> None:
        ...

    def match(self, stream: str, pos: int) -> Optional[Tuple[str, str]]:
        ...

    def lex(self, stream: str) -> Iterator[Token]:
        ...

    def next_token(self, lex_state: Any, parser_state: Any = None) -> Token:
        ...

class ContextualLexer(Lexer):
    lexers: Dict[str, TraditionalLexer]
    root_lexer: TraditionalLexer

    def __init__(
        self,
        terminals: Collection[TerminalDef],
        states: Dict[str, Collection[str]],
        re_: ModuleType,
        ignore: Collection[str] = ...,
        always_accept: Collection[str] = ...,
        user_callbacks: Dict[str, _Callback] = ...,
        g_regex_flags: int = ...
    ) -> None:
        ...

    def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]:
        ...