fix custom lexer handling for lalr parser + test

This commit is contained in:
Michael Heyvaert 2019-08-21 12:14:28 +02:00 committed by Erez Sh
parent 464f720385
commit c00f4448fa
3 changed files with 17 additions and 5 deletions

View File

@ -118,7 +118,7 @@ class LALR_ContextualLexer(LALR_WithLexer):
class LALR_CustomLexer(LALR_WithLexer): class LALR_CustomLexer(LALR_WithLexer):
def __init__(self, lexer_cls, lexer_conf, parser_conf, options=None): def __init__(self, lexer_cls, lexer_conf, parser_conf, options=None):
self.lexer = lexer_cls(self.lexer_conf) self.lexer = lexer_cls(lexer_conf)
debug = options.debug if options else False debug = options.debug if options else False
self.parser = LALR_Parser(parser_conf, debug=debug) self.parser = LALR_Parser(parser_conf, debug=debug)
WithLexer.__init__(self, lexer_conf, parser_conf, options) WithLexer.__init__(self, lexer_conf, parser_conf, options)

View File

@ -21,6 +21,7 @@ from .test_parser import (
TestCykStandard, TestCykStandard,
TestLalrContextual, TestLalrContextual,
TestEarleyDynamic, TestEarleyDynamic,
TestLalrCustom,
# TestFullEarleyStandard, # TestFullEarleyStandard,
TestFullEarleyDynamic, TestFullEarleyDynamic,

View File

@ -22,7 +22,7 @@ from lark.exceptions import GrammarError, ParseError, UnexpectedToken, Unexpecte
from lark.tree import Tree from lark.tree import Tree
from lark.visitors import Transformer, Transformer_InPlace, v_args from lark.visitors import Transformer, Transformer_InPlace, v_args
from lark.grammar import Rule from lark.grammar import Rule
from lark.lexer import TerminalDef from lark.lexer import TerminalDef, Lexer, TraditionalLexer
__path__ = os.path.dirname(__file__) __path__ = os.path.dirname(__file__)
def _read(n, *args): def _read(n, *args):
@ -431,12 +431,22 @@ def _make_full_earley_test(LEXER):
_TestFullEarley.__name__ = _NAME _TestFullEarley.__name__ = _NAME
globals()[_NAME] = _TestFullEarley globals()[_NAME] = _TestFullEarley
class CustomLexer(Lexer):
"""
Purpose of this custom lexer is to test the integration,
so it uses the traditionalparser as implementation without custom lexing behaviour.
"""
def __init__(self, lexer_conf):
self.lexer = TraditionalLexer(lexer_conf.tokens, ignore=lexer_conf.ignore, user_callbacks=lexer_conf.callbacks)
def lex(self, *args, **kwargs):
return self.lexer.lex(*args, **kwargs)
def _make_parser_test(LEXER, PARSER): def _make_parser_test(LEXER, PARSER):
lexer_class_or_name = CustomLexer if LEXER == 'custom' else LEXER
def _Lark(grammar, **kwargs): def _Lark(grammar, **kwargs):
return Lark(grammar, lexer=LEXER, parser=PARSER, propagate_positions=True, **kwargs) return Lark(grammar, lexer=lexer_class_or_name, parser=PARSER, propagate_positions=True, **kwargs)
def _Lark_open(gfilename, **kwargs): def _Lark_open(gfilename, **kwargs):
return Lark.open(gfilename, lexer=LEXER, parser=PARSER, propagate_positions=True, **kwargs) return Lark.open(gfilename, lexer=lexer_class_or_name, parser=PARSER, propagate_positions=True, **kwargs)
class _TestParser(unittest.TestCase): class _TestParser(unittest.TestCase):
def test_basic1(self): def test_basic1(self):
g = _Lark("""start: a+ b a* "b" a* g = _Lark("""start: a+ b a* "b" a*
@ -1532,7 +1542,7 @@ def _make_parser_test(LEXER, PARSER):
parser = _Lark(grammar) parser = _Lark(grammar)
@unittest.skipIf(PARSER!='lalr', "Serialize currently only works for LALR parsers (though it should be easy to extend)") @unittest.skipIf(PARSER!='lalr' or LEXER=='custom', "Serialize currently only works for LALR parsers without custom lexers (though it should be easy to extend)")
def test_serialize(self): def test_serialize(self):
grammar = """ grammar = """
start: _ANY b "C" start: _ANY b "C"
@ -1594,6 +1604,7 @@ _TO_TEST = [
('dynamic_complete', 'earley'), ('dynamic_complete', 'earley'),
('standard', 'lalr'), ('standard', 'lalr'),
('contextual', 'lalr'), ('contextual', 'lalr'),
('custom', 'lalr'),
# (None, 'earley'), # (None, 'earley'),
] ]