about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/lark-stubs
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/lark-stubs
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/lark-stubs')
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/__init__.pyi12
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/ast_utils.pyi17
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/exceptions.pyi65
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/grammar.pyi14
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/indenter.pyi47
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/lark.pyi109
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/lexer.pyi161
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/load_grammar.pyi31
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/reconstruct.pyi39
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/tree.pyi75
-rw-r--r--.venv/lib/python3.12/site-packages/lark-stubs/visitors.pyi108
11 files changed, 678 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/__init__.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/__init__.pyi
new file mode 100644
index 00000000..c79a6ef8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/__init__.pyi
@@ -0,0 +1,12 @@
+# -*- coding: utf-8 -*-
+
+from .tree import *
+from .visitors import *
+from .exceptions import *
+from .lexer import *
+from .load_grammar import *
+from .lark import *
+from logging import Logger as _Logger
+
+logger: _Logger
+__version__: str = ...
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/ast_utils.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/ast_utils.pyi
new file mode 100644
index 00000000..28246cff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/ast_utils.pyi
@@ -0,0 +1,17 @@
+import types
+from typing import Optional
+
+from .visitors import Transformer
+
+class Ast(object):
+    pass
+
+class AsList(object):
+    pass
+
+
+def create_transformer(
+        ast_module: types.ModuleType,
+        transformer: Optional[Transformer]=None
+) -> Transformer:
+    ...
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/exceptions.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/exceptions.pyi
new file mode 100644
index 00000000..1c04fa87
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/exceptions.pyi
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+
+from typing import Dict, Iterable, Callable, Union, TypeVar, Tuple, Any, List, Set
+from .tree import Tree
+from .lexer import Token
+from .parsers.lalr_interactive_parser import InteractiveParser
+
+class LarkError(Exception):
+    pass
+
+
+class ConfigurationError(LarkError, ValueError):
+    pass
+
+
+class GrammarError(LarkError):
+    pass
+
+
+class ParseError(LarkError):
+    pass
+
+
+class LexError(LarkError):
+    pass
+
+
+T = TypeVar('T')
+
+class UnexpectedEOF(ParseError):
+    expected: List[Token]
+
+class UnexpectedInput(LarkError):
+    line: int
+    column: int
+    pos_in_stream: int
+    state: Any
+
+    def get_context(self, text: str, span: int = ...) -> str:
+        ...
+
+    def match_examples(
+            self,
+            parse_fn: Callable[[str], Tree],
+            examples: Union[Dict[T, Iterable[str]], Iterable[Tuple[T, Iterable[str]]]],
+            token_type_match_fallback: bool = False,
+            use_accepts: bool = False,
+    ) -> T:
+        ...
+
+
+class UnexpectedToken(ParseError, UnexpectedInput):
+    expected: Set[str]
+    considered_rules: Set[str]
+    interactive_parser: InteractiveParser
+    accepts: Set[str]
+
+class UnexpectedCharacters(LexError, UnexpectedInput):
+    allowed: Set[str]
+    considered_tokens: Set[Any]
+
+
+class VisitError(LarkError):
+    obj: Union[Tree, Token]
+    orig_exc: Exception
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/grammar.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/grammar.pyi
new file mode 100644
index 00000000..3a3d806f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/grammar.pyi
@@ -0,0 +1,14 @@
+from typing import Optional, Tuple
+
+
+class RuleOptions:
+    keep_all_tokens: bool
+    expand1: bool
+    priority: int
+    template_source: Optional[str]
+    empty_indices: Tuple[bool, ...]
+
+
+class Symbol:
+    name: str
+    is_term: bool
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/indenter.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/indenter.pyi
new file mode 100644
index 00000000..3a7aa973
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/indenter.pyi
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+
+from typing import Tuple, List, Iterator, Optional
+from abc import ABC, abstractmethod
+from .lexer import Token
+from .lark import PostLex
+
+
+class Indenter(PostLex, ABC):
+    paren_level: Optional[int]
+    indent_level: Optional[List[int]]
+
+    def __init__(self) -> None:
+        ...
+
+    def handle_NL(self, token: Token) -> Iterator[Token]:
+        ...
+
+    @property
+    @abstractmethod
+    def NL_type(self) -> str:
+        ...
+
+    @property
+    @abstractmethod
+    def OPEN_PAREN_types(self) -> List[str]:
+        ...
+
+    @property
+    @abstractmethod
+    def CLOSE_PAREN_types(self) -> List[str]:
+        ...
+
+    @property
+    @abstractmethod
+    def INDENT_type(self) -> str:
+        ...
+
+    @property
+    @abstractmethod
+    def DEDENT_type(self) -> str:
+        ...
+
+    @property
+    @abstractmethod
+    def tab_len(self) -> int:
+        ...
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/lark.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/lark.pyi
new file mode 100644
index 00000000..18748d17
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/lark.pyi
@@ -0,0 +1,109 @@
+# -*- coding: utf-8 -*-
+
+from typing import (
+    TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
+    Literal, Protocol, Tuple, Iterable,
+)
+
+from .parsers.lalr_interactive_parser import InteractiveParser
+from .visitors import Transformer
+from .lexer import Token, Lexer, TerminalDef
+from .tree import Tree
+from .exceptions import UnexpectedInput
+from .load_grammar import Grammar
+
+_T = TypeVar('_T')
+
+
+class PostLex(Protocol):
+
+    def process(self, stream: Iterator[Token]) -> Iterator[Token]:
+        ...
+
+    always_accept: Iterable[str]
+
+
+class LarkOptions:
+    start: List[str]
+    parser: str
+    lexer: str
+    transformer: Optional[Transformer]
+    postlex: Optional[PostLex]
+    ambiguity: str
+    regex: bool
+    debug: bool
+    keep_all_tokens: bool
+    propagate_positions: Union[bool, Callable]
+    maybe_placeholders: bool
+    lexer_callbacks: Dict[str, Callable[[Token], Token]]
+    cache: Union[bool, str]
+    g_regex_flags: int
+    use_bytes: bool
+    import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]]
+    source_path: Optional[str]
+
+
+class PackageResource(object):
+    pkg_name: str
+    path: str
+
+    def __init__(self, pkg_name: str, path: str): ...
+
+
+class FromPackageLoader:
+    def __init__(self, pkg_name: str, search_paths: Tuple[str, ...] = ...): ...
+
+    def __call__(self, base_path: Union[None, str, PackageResource], grammar_path: str) -> Tuple[PackageResource, str]: ...
+
+
+class Lark:
+    source_path: str
+    source_grammar: str
+    grammar: Grammar
+    options: LarkOptions
+    lexer: Lexer
+    terminals: List[TerminalDef]
+
+    def __init__(
+        self,
+        grammar: Union[Grammar, str, IO[str]],
+        *,
+        start: Union[None, str, List[str]] = "start",
+        parser: Literal["earley", "lalr", "cyk", "auto"] = "auto",
+        lexer: Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Type[Lexer]] = "auto",
+        transformer: Optional[Transformer] = None,
+        postlex: Optional[PostLex] = None,
+        ambiguity: Literal["explicit", "resolve"] = "resolve",
+        regex: bool = False,
+        debug: bool = False,
+        keep_all_tokens: bool = False,
+        propagate_positions: Union[bool, Callable] = False,
+        maybe_placeholders: bool = False,
+        lexer_callbacks: Optional[Dict[str, Callable[[Token], Token]]] = None,
+        cache: Union[bool, str] = False,
+        g_regex_flags: int = ...,
+        use_bytes: bool = False,
+        import_paths: List[Union[str, Callable[[Union[None, str, PackageResource], str], Tuple[str, str]]]] = ...,
+        source_path: Optional[str]=None,
+    ):
+        ...
+
+    def parse(self, text: str, start: Optional[str] = None, on_error: Callable[[UnexpectedInput], bool] = None) -> Tree:
+        ...
+
+    def parse_interactive(self, text: str = None, start: Optional[str] = None) -> InteractiveParser:
+        ...
+
+    @classmethod
+    def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
+        ...
+
+    @classmethod
+    def open_from_package(cls: Type[_T], package: str, grammar_path: str, search_paths: Tuple[str, ...] = ..., **options) -> _T:
+        ...
+
+    def lex(self, text: str, dont_ignore: bool = False) -> Iterator[Token]:
+        ...
+
+    def get_terminal(self, name: str) -> TerminalDef:
+        ...
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/lexer.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/lexer.pyi
new file mode 100644
index 00000000..004865c7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/lexer.pyi
@@ -0,0 +1,161 @@
+# -*- coding: utf-8 -*-
+from types import ModuleType
+from typing import (
+    TypeVar, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional, FrozenSet, Any,
+    Pattern as REPattern,
+)
+from abc import abstractmethod, ABC
+
+_T = TypeVar('_T')
+
+
+class Pattern(ABC):
+    value: str
+    flags: Collection[str]
+    raw: str
+    type: str
+
+    def __init__(self, value: str, flags: Collection[str] = (), raw: str = None) -> None:
+        ...
+
+    @abstractmethod
+    def to_regexp(self) -> str:
+        ...
+
+    @property
+    @abstractmethod
+    def min_width(self) -> int:
+        ...
+
+    @property
+    @abstractmethod
+    def max_width(self) -> int:
+        ...
+
+
+class PatternStr(Pattern):
+    type: str = ...
+
+    def to_regexp(self) -> str:
+        ...
+
+    @property
+    def min_width(self) -> int:
+        ...
+
+    @property
+    def max_width(self) -> int:
+        ...
+
+
+class PatternRE(Pattern):
+    type: str = ...
+
+    def to_regexp(self) -> str:
+        ...
+
+    @property
+    def min_width(self) -> int:
+        ...
+
+    @property
+    def max_width(self) -> int:
+        ...
+
+
+class TerminalDef:
+    name: str
+    pattern: Pattern
+    priority: int
+
+    def __init__(self, name: str, pattern: Pattern, priority: int = ...) -> None:
+        ...
+
+    def user_repr(self) -> str: ...
+
+
+class Token(str):
+    type: str
+    start_pos: int
+    value: Any
+    line: int
+    column: int
+    end_line: int
+    end_column: int
+    end_pos: int
+
+    def __init__(self, type_: str, value: Any, start_pos: int = None, line: int = None, column: int = None, end_line: int = None, end_column: int = None, end_pos: int = None) -> None:
+        ...
+
+    def update(self, type_: Optional[str] = None, value: Optional[Any] = None) -> Token:
+        ...
+
+    @classmethod
+    def new_borrow_pos(cls: Type[_T], type_: str, value: Any, borrow_t: Token) -> _T:
+        ...
+
+
+_Callback = Callable[[Token], Token]
+
+
+class Lexer(ABC):
+    lex: Callable[..., Iterator[Token]]
+
+
+class LexerConf:
+     tokens: Collection[TerminalDef]
+     re_module: ModuleType
+     ignore: Collection[str] = ()
+     postlex: Any =None
+     callbacks: Optional[Dict[str, _Callback]] = None
+     g_regex_flags: int = 0
+     skip_validation: bool = False
+     use_bytes: bool = False
+
+
+
+class TraditionalLexer(Lexer):
+    terminals: Collection[TerminalDef]
+    ignore_types: FrozenSet[str]
+    newline_types: FrozenSet[str]
+    user_callbacks: Dict[str, _Callback]
+    callback: Dict[str, _Callback]
+    mres: List[Tuple[REPattern, Dict[int, str]]]
+    re: ModuleType
+
+    def __init__(
+        self,
+        conf: LexerConf
+    ) -> None:
+        ...
+
+    def build(self) -> None:
+        ...
+
+    def match(self, stream: str, pos: int) -> Optional[Tuple[str, str]]:
+        ...
+
+    def lex(self, stream: str) -> Iterator[Token]:
+        ...
+
+    def next_token(self, lex_state: Any, parser_state: Any = None) -> Token:
+        ...
+
+class ContextualLexer(Lexer):
+    lexers: Dict[str, TraditionalLexer]
+    root_lexer: TraditionalLexer
+
+    def __init__(
+        self,
+        terminals: Collection[TerminalDef],
+        states: Dict[str, Collection[str]],
+        re_: ModuleType,
+        ignore: Collection[str] = ...,
+        always_accept: Collection[str] = ...,
+        user_callbacks: Dict[str, _Callback] = ...,
+        g_regex_flags: int = ...
+    ) -> None:
+        ...
+
+    def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]:
+        ...
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/load_grammar.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/load_grammar.pyi
new file mode 100644
index 00000000..86a63419
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/load_grammar.pyi
@@ -0,0 +1,31 @@
+from typing import List, Tuple, Union, Callable, Dict, Optional
+
+from .tree import Tree
+from .grammar import RuleOptions
+from .exceptions import UnexpectedInput
+
+
+class Grammar:
+    rule_defs: List[Tuple[str, Tuple[str, ...], Tree, RuleOptions]]
+    term_defs: List[Tuple[str, Tuple[Tree, int]]]
+    ignore: List[str]
+
+
+class GrammarBuilder:
+    global_keep_all_tokens: bool
+    import_paths: List[Union[str, Callable]]
+    used_files: Dict[str, str]
+
+    def __init__(self, global_keep_all_tokens: bool = False, import_paths: List[Union[str, Callable]] = None, used_files: Dict[str, str]=None) -> None: ...
+
+    def load_grammar(self, grammar_text: str, grammar_name: str = ..., mangle: Callable[[str], str] = None) -> None: ...
+
+    def do_import(self, dotted_path: Tuple[str, ...], base_path: Optional[str], aliases: Dict[str, str],
+                  base_mangle: Callable[[str], str] = None) -> None:  ...
+
+    def validate(self) -> None: ...
+
+    def build(self) -> Grammar: ...
+
+
+def find_grammar_errors(text: str, start: str='start') -> List[Tuple[UnexpectedInput, str]]: ...
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/reconstruct.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/reconstruct.pyi
new file mode 100644
index 00000000..a8d39e35
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/reconstruct.pyi
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+
+from typing import List, Dict, Union, Callable, Iterable
+
+from .grammar import Symbol
+from .lark import Lark
+from .tree import Tree
+from .visitors import Transformer_InPlace
+from .lexer import TerminalDef
+
+
+class WriteTokensTransformer(Transformer_InPlace):
+
+    def __init__(self, tokens: Dict[str, TerminalDef], term_subs: Dict[str, Callable[[Symbol], str]] = ...): ...
+
+
+class MatchTree(Tree):
+    pass
+
+
+class MakeMatchTree:
+    name: str
+    expansion: List[TerminalDef]
+
+    def __init__(self, name: str, expansion: List[TerminalDef]):
+        ...
+
+    def __call__(self, args: List[Union[str, Tree]]):
+        ...
+
+
+class Reconstructor:
+
+    def __init__(self, parser: Lark, term_subs: Dict[str, Callable[[Symbol], str]] = ...):
+        ...
+
+    def reconstruct(self, tree: Tree, postproc: Callable[[Iterable[str]], Iterable[str]]=None,
+                    insert_spaces: bool = True) -> str:
+        ...
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/tree.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/tree.pyi
new file mode 100644
index 00000000..0c128198
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/tree.pyi
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+
+from typing import List, Callable, Iterator, Union, Optional, Literal, Any
+from .lexer import TerminalDef
+
+class Meta:
+    empty: bool
+    line: int
+    column: int
+    start_pos: int
+    end_line: int
+    end_column: int
+    end_pos: int
+    orig_expansion: List[TerminalDef]
+    match_tree: bool
+
+
+class Tree:
+    data: str
+    children: List[Union[str, Tree]]
+    meta: Meta
+
+    def __init__(
+        self,
+        data: str,
+        children: List[Union[str, Tree]],
+        meta: Optional[Meta] = None
+    ) -> None:
+        ...
+
+    def pretty(self, indent_str: str = ...) -> str:
+        ...
+
+    def find_pred(self, pred: Callable[[Tree], bool]) -> Iterator[Tree]:
+        ...
+
+    def find_data(self, data: str) -> Iterator[Tree]:
+        ...
+
+    def expand_kids_by_index(self, *indices: int) -> None:
+        ...
+
+    def expand_kids_by_data(self, *data_values: str) -> bool:
+        ...
+
+    def scan_values(self, pred: Callable[[Union[str, Tree]], bool]) -> Iterator[str]:
+        ...
+
+    def iter_subtrees(self) -> Iterator[Tree]:
+        ...
+
+    def iter_subtrees_topdown(self) -> Iterator[Tree]:
+        ...
+
+    def copy(self) -> Tree:
+        ...
+
+    def set(self, data: str, children: List[Union[str, Tree]]) -> None:
+        ...
+
+    def __hash__(self) -> int:
+        ...
+
+
+class SlottedTree(Tree):
+    pass
+
+
+def pydot__tree_to_png(
+    tree: Tree,
+    filename: str,
+    rankdir: Literal["TB", "LR", "BT", "RL"] = ...,
+    **kwargs
+) -> None:
+    ...
diff --git a/.venv/lib/python3.12/site-packages/lark-stubs/visitors.pyi b/.venv/lib/python3.12/site-packages/lark-stubs/visitors.pyi
new file mode 100644
index 00000000..3a934eec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/lark-stubs/visitors.pyi
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+
+from typing import TypeVar, Tuple, List, Callable, Generic, Type, Union
+from abc import ABC
+from .tree import Tree
+
+_T = TypeVar('_T')
+_R = TypeVar('_R')
+_FUNC = Callable[..., _T]
+_DECORATED = Union[_FUNC, type]
+
+
+class Transformer(ABC, Generic[_T]):
+
+    def __init__(self, visit_tokens: bool = True) -> None:
+        ...
+
+    def transform(self, tree: Tree) -> _T:
+        ...
+
+    def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]:
+        ...
+
+
+class TransformerChain(Generic[_T]):
+    transformers: Tuple[Transformer[_T], ...]
+
+    def __init__(self, *transformers: Transformer[_T]) -> None:
+        ...
+
+    def transform(self, tree: Tree) -> _T:
+        ...
+
+    def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]:
+        ...
+
+
+class Transformer_InPlace(Transformer):
+    pass
+
+
+class Transformer_NonRecursive(Transformer):
+    pass
+
+
+class Transformer_InPlaceRecursive(Transformer):
+    pass
+
+
+class VisitorBase:
+    pass
+
+
+class Visitor(VisitorBase, ABC, Generic[_T]):
+
+    def visit(self, tree: Tree) -> Tree:
+        ...
+
+    def visit_topdown(self, tree: Tree) -> Tree:
+        ...
+
+
+class Visitor_Recursive(VisitorBase):
+
+    def visit(self, tree: Tree) -> Tree:
+        ...
+
+    def visit_topdown(self, tree: Tree) -> Tree:
+        ...
+
+
+class Interpreter(ABC, Generic[_T]):
+
+    def visit(self, tree: Tree) -> _T:
+        ...
+
+    def visit_children(self, tree: Tree) -> List[_T]:
+        ...
+
+
+_InterMethod = Callable[[Type[Interpreter], _T], _R]
+
+
+def v_args(
+        inline: bool = False,
+        meta: bool = False,
+        tree: bool = False,
+        wrapper: Callable = None
+) -> Callable[[_DECORATED], _DECORATED]:
+    ...
+
+
+def visit_children_decor(func: _InterMethod) -> _InterMethod:
+    ...
+
+
+class Discard(Exception):
+    pass
+
+
+# Deprecated
+class InlineTransformer:
+    pass
+
+
+# Deprecated
+def inline_args(obj: _FUNC) -> _FUNC:
+    ...