mirror of https://github.com/lark-parser/lark.git
fix custom lexer handling for lalr parser + test
This commit is contained in:
parent
464f720385
commit
c00f4448fa
|
@ -118,7 +118,7 @@ class LALR_ContextualLexer(LALR_WithLexer):
|
|||
|
||||
class LALR_CustomLexer(LALR_WithLexer):
|
||||
def __init__(self, lexer_cls, lexer_conf, parser_conf, options=None):
|
||||
self.lexer = lexer_cls(self.lexer_conf)
|
||||
self.lexer = lexer_cls(lexer_conf)
|
||||
debug = options.debug if options else False
|
||||
self.parser = LALR_Parser(parser_conf, debug=debug)
|
||||
WithLexer.__init__(self, lexer_conf, parser_conf, options)
|
||||
|
|
|
@ -21,6 +21,7 @@ from .test_parser import (
|
|||
TestCykStandard,
|
||||
TestLalrContextual,
|
||||
TestEarleyDynamic,
|
||||
TestLalrCustom,
|
||||
|
||||
# TestFullEarleyStandard,
|
||||
TestFullEarleyDynamic,
|
||||
|
|
|
@ -22,7 +22,7 @@ from lark.exceptions import GrammarError, ParseError, UnexpectedToken, Unexpecte
|
|||
from lark.tree import Tree
|
||||
from lark.visitors import Transformer, Transformer_InPlace, v_args
|
||||
from lark.grammar import Rule
|
||||
from lark.lexer import TerminalDef
|
||||
from lark.lexer import TerminalDef, Lexer, TraditionalLexer
|
||||
|
||||
__path__ = os.path.dirname(__file__)
|
||||
def _read(n, *args):
|
||||
|
@ -431,12 +431,22 @@ def _make_full_earley_test(LEXER):
|
|||
_TestFullEarley.__name__ = _NAME
|
||||
globals()[_NAME] = _TestFullEarley
|
||||
|
||||
class CustomLexer(Lexer):
|
||||
"""
|
||||
Purpose of this custom lexer is to test the integration,
|
||||
so it uses the traditionalparser as implementation without custom lexing behaviour.
|
||||
"""
|
||||
def __init__(self, lexer_conf):
|
||||
self.lexer = TraditionalLexer(lexer_conf.tokens, ignore=lexer_conf.ignore, user_callbacks=lexer_conf.callbacks)
|
||||
def lex(self, *args, **kwargs):
|
||||
return self.lexer.lex(*args, **kwargs)
|
||||
|
||||
def _make_parser_test(LEXER, PARSER):
|
||||
lexer_class_or_name = CustomLexer if LEXER == 'custom' else LEXER
|
||||
def _Lark(grammar, **kwargs):
|
||||
return Lark(grammar, lexer=LEXER, parser=PARSER, propagate_positions=True, **kwargs)
|
||||
return Lark(grammar, lexer=lexer_class_or_name, parser=PARSER, propagate_positions=True, **kwargs)
|
||||
def _Lark_open(gfilename, **kwargs):
|
||||
return Lark.open(gfilename, lexer=LEXER, parser=PARSER, propagate_positions=True, **kwargs)
|
||||
return Lark.open(gfilename, lexer=lexer_class_or_name, parser=PARSER, propagate_positions=True, **kwargs)
|
||||
class _TestParser(unittest.TestCase):
|
||||
def test_basic1(self):
|
||||
g = _Lark("""start: a+ b a* "b" a*
|
||||
|
@ -1532,7 +1542,7 @@ def _make_parser_test(LEXER, PARSER):
|
|||
parser = _Lark(grammar)
|
||||
|
||||
|
||||
@unittest.skipIf(PARSER!='lalr', "Serialize currently only works for LALR parsers (though it should be easy to extend)")
|
||||
@unittest.skipIf(PARSER!='lalr' or LEXER=='custom', "Serialize currently only works for LALR parsers without custom lexers (though it should be easy to extend)")
|
||||
def test_serialize(self):
|
||||
grammar = """
|
||||
start: _ANY b "C"
|
||||
|
@ -1594,6 +1604,7 @@ _TO_TEST = [
|
|||
('dynamic_complete', 'earley'),
|
||||
('standard', 'lalr'),
|
||||
('contextual', 'lalr'),
|
||||
('custom', 'lalr'),
|
||||
# (None, 'earley'),
|
||||
]
|
||||
|
||||
|
|
Loading…
Reference in New Issue