Apply changes and extend more names.

This commit is contained in:
KmolYuan 2020-02-16 14:09:53 +08:00
parent 97afea24d1
commit 803b1fe79e
9 changed files with 336 additions and 24 deletions

View File

@ -6,4 +6,4 @@ from .exceptions import *
from .lexer import *
from .lark import *
__version__: str
__version__: str = ...

View File

@ -23,7 +23,7 @@ class LexError(LarkError):
class UnexpectedInput(LarkError):
pos_in_stream: int
def get_context(self, text: str, span: int = 40):
def get_context(self, text: str, span: int = ...):
...
def match_examples(
@ -39,4 +39,9 @@ class UnexpectedToken(ParseError, UnexpectedInput):
class UnexpectedCharacters(LexError, UnexpectedInput):
line: int
column: int
class VisitError(LarkError):
pass

53
lark-stubs/indenter.pyi Normal file
View File

@ -0,0 +1,53 @@
# -*- coding: utf-8 -*-
from typing import Tuple, List, Iterator, Optional
from abc import ABC, abstractmethod
from .lexer import Token
class Indenter(ABC):
paren_level: Optional[int]
indent_level: Optional[List[int]]
def __init__(self):
...
def handle_NL(self, token: Token) -> Iterator[Token]:
...
def process(self, stream: Iterator[Token]) -> Iterator[Token]:
...
@property
def always_accept(self) -> Tuple[str]:
...
@property
@abstractmethod
def NL_type(self) -> str:
...
@property
@abstractmethod
def OPEN_PAREN_types(self) -> List[str]:
...
@property
@abstractmethod
def CLOSE_PAREN_types(self) -> List[str]:
...
@property
@abstractmethod
def INDENT_type(self) -> str:
...
@property
@abstractmethod
def DEDENT_type(self) -> str:
...
@property
@abstractmethod
def tab_len(self) -> int:
...

View File

@ -1,25 +1,57 @@
# -*- coding: utf-8 -*-
from typing import List, Dict, IO, Callable, Union, Optional, Literal
from typing import (
TypeVar, Type, List, Dict, IO, Iterator, Callable, Union, Optional,
Literal, Protocol,
)
from .visitors import Transformer
from .lexer import Lexer, Token
from .lexer import Token, Lexer, TerminalDef
from .tree import Tree
_T = TypeVar('_T')
_Start = Union[None, str, List[str]]
_Parser = Literal["earley", "lalr", "cyk"]
_Lexer = Union[Literal["auto", "standard", "contextual", "dynamic", "dynamic_complete"], Lexer]
_Ambiguity = Literal["explicit", "resolve"]
class PostLex(Protocol):
def process(self, stream: Iterator[Token]) -> Iterator[Token]:
...
class LarkOptions:
start: _Start
parser: _Parser
lexer: _Lexer
transformer: Optional[Transformer]
postlex: Optional[PostLex]
ambiguity: _Ambiguity
debug: bool
keep_all_tokens: bool
propagate_positions: bool
maybe_placeholders: bool
lexer_callbacks: Dict[str, Callable[[Token], Token]]
cache_grammar: bool
class Lark:
source: str
options: LarkOptions
lexer: Lexer
terminals: List[TerminalDef]
def __init__(
self,
grammar: Union[str, IO[str]],
*,
start: _Start = ...,
parser: Literal["earley", "lalr", "cyk"] = ...,
lexer: Optional[Lexer] = ...,
transformer: Optional[Transformer] = ...,
postlex: Optional[Literal["standard", "contextual"]] = ...,
ambiguity: Literal["explicit", "resolve"] = ...,
parser: _Parser = ...,
lexer: _Lexer = ...,
transformer: Optional[Transformer] = None,
postlex: Optional[PostLex] = None,
ambiguity: _Ambiguity = ...,
debug: bool = False,
keep_all_tokens: bool = False,
propagate_positions: bool = False,
@ -30,3 +62,13 @@ class Lark:
def parse(self, text: str, start: _Start = None) -> Tree:
...
@classmethod
def open(cls: Type[_T], grammar_filename: str, rel_to: Optional[str] = None, **options) -> _T:
...
def lex(self, text: str) -> Iterator[Token]:
...
def get_terminal(self, name: str) -> TerminalDef:
...

View File

@ -1,64 +1,202 @@
# -*- coding: utf-8 -*-
from typing import Tuple, Iterator, Sized
from typing import (
TypeVar, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional,
Pattern as REPattern,
)
from abc import abstractmethod, ABC
_T = TypeVar('_T')
_MRes = List[Tuple[REPattern, Dict[int, str]]]
class Pattern(ABC):
value: str
flags: Collection[str]
def __init__(self, value: str, flags: Collection[str] = ...):
...
@property
@abstractmethod
def type(self) -> str:
...
@abstractmethod
def to_regexp(self) -> str:
...
@property
@abstractmethod
def min_width(self) -> int:
...
@property
@abstractmethod
def max_width(self) -> int:
...
class PatternStr(Pattern):
type: str = ...
def to_regexp(self) -> str:
...
@property
def min_width(self) -> int:
...
@property
def max_width(self) -> int:
...
class PatternRE(Pattern):
type: str = ...
def to_regexp(self) -> str:
...
@property
def min_width(self) -> int:
...
@property
def max_width(self) -> int:
...
class TerminalDef:
name: str
pattern: Pattern
priority: int
def __init__(self, name: str, pattern: Pattern, priority: int = ...):
...
class Token(str):
type: str
pos_in_stream: int
value: str
line: int
column: int
end_line: int
end_column: int
end_pos: int
def update(self, type_: Optional[str] = None, value: Optional[str] = None) -> Token:
...
class Lexer(ABC):
@abstractmethod
def lex(self, stream: Sized) -> Iterator[Token]:
@classmethod
def new_borrow_pos(cls: Type[_T], type_: str, value: str, borrow_t: Token) -> _T:
...
_Callback = Callable[[Token], Token]
def build_mres(terminals: Collection[TerminalDef], match_whole: bool = False) -> _MRes:
...
class UnlessCallback:
mres: _MRes
def __init__(self, mres: _MRes):
...
def __call__(self, t: Token) -> Token:
...
class CallChain:
callback1: _Callback
callback2: _Callback
cond: Callable[[Token], bool]
def __init__(
self,
callback1: _Callback,
callback2: _Callback,
cond: Callable[[Token], bool]
):
...
class LineCounter:
newline_char: str
char_pos: int
line: int
column: int
line_start_pos: int
def __init__(self):
...
def feed(self, token: str, test_newline: bool = True):
...
class _Lex:
lexer: TraditionalLexer
def __init__(self, lexer: TraditionalLexer, state: Optional[str] = None):
...
def lex(
self,
stream: str,
newline_types: Collection[str],
ignore_types: Collection[str]
) -> Iterator[Token]:
...
class Lexer(ABC):
lex: Callable[..., Iterator[Token]]
class TraditionalLexer(Lexer):
terminals: Collection[TerminalDef]
ignore_types: List[str]
newline_types: List[str]
user_callbacks: Dict[str, _Callback]
callback: Dict[str, _Callback]
mres: _MRes
def __init__(
self,
terminals: Collection[TerminalDef],
ignore: Collection[str] = ...,
user_callbacks: Dict[str, _Callback] = ...
):
...
def build(self) -> None:
...
def match(self, stream: str, pos: int) -> Tuple[str, str]:
def match(self, stream: str, pos: int) -> Optional[Tuple[str, str]]:
...
def lex(self, stream: Sized) -> Iterator[Token]:
def lex(self, stream: str) -> Iterator[Token]:
...
class ContextualLexer(Lexer):
lexers: Dict[str, TraditionalLexer]
root_lexer: TraditionalLexer
def lex(self, stream: Sized) -> Iterator[Token]:
def __init__(
self,
terminals: Collection[TerminalDef],
states: Dict[str, Collection[str]],
ignore: Collection[str] = ...,
always_accept: Collection[str] = ...,
user_callbacks: Dict[str, _Callback] = ...
):
...
def lex(self, stream: str, get_parser_state: Callable[[], str]) -> Iterator[Token]:
...

View File

@ -1,7 +1,31 @@
# -*- coding: utf-8 -*-
from typing import List, Dict, Union
from .lark import Lark
from .tree import Tree
from .visitors import Transformer_InPlace
from .lexer import TerminalDef
class WriteTokensTransformer(Transformer_InPlace):
def __init__(self, tokens: Dict[str, TerminalDef], term_subs):
...
class MatchTree(Tree):
pass
class MakeMatchTree:
name: str
expansion: List[TerminalDef]
def __init__(self, name: str, expansion: List[TerminalDef]):
...
def __call__(self, args: List[Union[str, Tree]]):
...
class Reconstructor:

View File

@ -1,16 +1,31 @@
# -*- coding: utf-8 -*-
from typing import List, Callable, Iterator, Union, Optional
from .lexer import Token
from typing import List, Callable, Iterator, Union, Optional, Literal
from .lexer import TerminalDef
class Meta:
empty: bool
line: int
column: int
start_pos: int
end_line: int
end_column: int
end_pos: int
orig_expansion: List[TerminalDef]
match_tree: bool
class Tree:
data: str
children: List[Union[str, Tree]]
meta: Token
meta: Meta
def __init__(self, data: str, children: List[Tree], meta: Optional[Token] = None):
def __init__(
self,
data: str,
children: List[Union[str, Tree]],
meta: Optional[Meta] = None
):
...
def pretty(self, indent_str: str = ...) -> str:
@ -22,13 +37,22 @@ class Tree:
def find_data(self, data: str) -> Iterator[Tree]:
...
def expand_kids_by_index(self, *indices: int) -> None:
...
def scan_values(self, pred: Callable[[Union[str, Tree]], bool]):
...
def iter_subtrees(self) -> Iterator[Tree]:
...
def iter_subtrees_topdown(self) -> Iterator[Tree]:
...
def __eq__(self, other: object) -> bool:
def copy(self) -> Tree:
...
def set(self, data: str, children: List[Union[str, Tree]]) -> None:
...
def __hash__(self) -> int:
@ -37,3 +61,12 @@ class Tree:
class SlottedTree(Tree):
pass
def pydot__tree_to_png(
tree: Tree,
filename: str,
rankdir: Literal["TB", "LR", "BT", "RL"] = ...,
**kwargs
) -> None:
...

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
from typing import TypeVar, List, Callable, Generic, Type
from typing import TypeVar, Tuple, List, Callable, Generic, Type
from abc import ABC
from .tree import Tree
@ -17,6 +17,22 @@ class Transformer(ABC, Generic[_T]):
def transform(self, tree: Tree) -> _T:
...
def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]:
...
class TransformerChain(Generic[_T]):
transformers: Tuple[Transformer[_T], ...]
def __init__(self, *transformers: Transformer[_T]):
...
def transform(self, tree: Tree) -> _T:
...
def __mul__(self, other: Transformer[_T]) -> TransformerChain[_T]:
...
class Transformer_InPlace(Transformer):
pass

View File

@ -48,6 +48,7 @@ class Transformer(_Decoratable):
Can be used to implement map or reduce.
"""
__visit_tokens__ = True # For backwards compatibility
def __init__(self, visit_tokens=True):
self.__visit_tokens__ = visit_tokens