mirror of
https://github.com/justinian/jsix.git
synced 2025-12-10 08:24:32 -08:00
This commit contains a couple large, interdependent changes: - In preparation for capability checking, the _syscall_verify_* functions now load most handles passed in, and verify that they exist and are of the correct type. Lists and out-handles are not converted to objects. - Also in preparation for capability checking, the internal representation of handles has changed. j6_handle_t is now 32 bits, and a new j6_cap_t (also 32 bits) is added. Handles of a process are now a util::map<j6_handle_t, handle> where handle is a new struct containing the id, capabilities, and object pointer. - The kernel object definition DSL gained a few changes to support auto generating the handle -> object conversion in the _syscall_verify_* functions, mostly knowing the object type, and an optional "cname" attribute on objects where their names differ from C++ code. (Specifically vma/vm_area) - Kernel object code and other code under kernel/objects is now in a new obj:: namespace, because fuck you <cstdlib> for putting "system" in the global namespace. Why even have that header then? - Kernel object types constructed with the construct_handle helper now have a creation_caps static member to declare what capabilities a newly created object's handle should have.
2906 lines
165 KiB
Python
2906 lines
165 KiB
Python
# The file was automatically generated by Lark v0.12.0
|
|
__version__ = "0.12.0"
|
|
|
|
#
|
|
#
|
|
# Lark Stand-alone Generator Tool
|
|
# ----------------------------------
|
|
# Generates a stand-alone LALR(1) parser with a standard lexer
|
|
#
|
|
# Git: https://github.com/erezsh/lark
|
|
# Author: Erez Shinan (erezshin@gmail.com)
|
|
#
|
|
#
|
|
# >>> LICENSE
|
|
#
|
|
# This tool and its generated code use a separate license from Lark,
|
|
# and are subject to the terms of the Mozilla Public License, v. 2.0.
|
|
# If a copy of the MPL was not distributed with this
|
|
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
#
|
|
# If you wish to purchase a commercial license for this tool and its
|
|
# generated code, you may contact me via email or otherwise.
|
|
#
|
|
# If MPL2 is incompatible with your free or open-source project,
|
|
# contact me and we'll work it out.
|
|
#
|
|
#
|
|
|
|
from io import open
|
|
|
|
|
|
|
|
class LarkError(Exception):
|
|
pass
|
|
|
|
|
|
class ConfigurationError(LarkError, ValueError):
|
|
pass
|
|
|
|
|
|
def assert_config(value, options, msg='Got %r, expected one of %s'):
|
|
if value not in options:
|
|
raise ConfigurationError(msg % (value, options))
|
|
|
|
|
|
class GrammarError(LarkError):
|
|
pass
|
|
|
|
|
|
class ParseError(LarkError):
|
|
pass
|
|
|
|
|
|
class LexError(LarkError):
|
|
pass
|
|
|
|
|
|
class UnexpectedInput(LarkError):
|
|
#--
|
|
pos_in_stream = None
|
|
_terminals_by_name = None
|
|
|
|
def get_context(self, text, span=40):
|
|
#--
|
|
assert self.pos_in_stream is not None, self
|
|
pos = self.pos_in_stream
|
|
start = max(pos - span, 0)
|
|
end = pos + span
|
|
if not isinstance(text, bytes):
|
|
before = text[start:pos].rsplit('\n', 1)[-1]
|
|
after = text[pos:end].split('\n', 1)[0]
|
|
return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n'
|
|
else:
|
|
before = text[start:pos].rsplit(b'\n', 1)[-1]
|
|
after = text[pos:end].split(b'\n', 1)[0]
|
|
return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace")
|
|
|
|
def match_examples(self, parse_fn, examples, token_type_match_fallback=False, use_accepts=False):
|
|
#--
|
|
assert self.state is not None, "Not supported for this exception"
|
|
|
|
if isinstance(examples, dict):
|
|
examples = examples.items()
|
|
|
|
candidate = (None, False)
|
|
for i, (label, example) in enumerate(examples):
|
|
assert not isinstance(example, STRING_TYPE)
|
|
|
|
for j, malformed in enumerate(example):
|
|
try:
|
|
parse_fn(malformed)
|
|
except UnexpectedInput as ut:
|
|
if ut.state == self.state:
|
|
if use_accepts and hasattr(self, 'accepts') and ut.accepts != self.accepts:
|
|
logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" %
|
|
(self.state, self.accepts, ut.accepts, i, j))
|
|
continue
|
|
try:
|
|
if ut.token == self.token: ##
|
|
|
|
logger.debug("Exact Match at example [%s][%s]" % (i, j))
|
|
return label
|
|
|
|
if token_type_match_fallback:
|
|
##
|
|
|
|
if (ut.token.type == self.token.type) and not candidate[-1]:
|
|
logger.debug("Token Type Fallback at example [%s][%s]" % (i, j))
|
|
candidate = label, True
|
|
|
|
except AttributeError:
|
|
pass
|
|
if candidate[0] is None:
|
|
logger.debug("Same State match at example [%s][%s]" % (i, j))
|
|
candidate = label, False
|
|
|
|
return candidate[0]
|
|
|
|
def _format_expected(self, expected):
|
|
if self._terminals_by_name:
|
|
d = self._terminals_by_name
|
|
expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected]
|
|
return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected)
|
|
|
|
|
|
class UnexpectedEOF(ParseError, UnexpectedInput):
|
|
#--
|
|
|
|
def __init__(self, expected, state=None, terminals_by_name=None):
|
|
super(UnexpectedEOF, self).__init__()
|
|
|
|
self.expected = expected
|
|
self.state = state
|
|
from .lexer import Token
|
|
self.token = Token("<EOF>", "") ##
|
|
|
|
self.pos_in_stream = -1
|
|
self.line = -1
|
|
self.column = -1
|
|
self._terminals_by_name = terminals_by_name
|
|
|
|
|
|
def __str__(self):
|
|
message = "Unexpected end-of-input. "
|
|
message += self._format_expected(self.expected)
|
|
return message
|
|
|
|
|
|
class UnexpectedCharacters(LexError, UnexpectedInput):
|
|
#--
|
|
|
|
def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None,
|
|
terminals_by_name=None, considered_rules=None):
|
|
super(UnexpectedCharacters, self).__init__()
|
|
|
|
##
|
|
|
|
self.line = line
|
|
self.column = column
|
|
self.pos_in_stream = lex_pos
|
|
self.state = state
|
|
self._terminals_by_name = terminals_by_name
|
|
|
|
self.allowed = allowed
|
|
self.considered_tokens = considered_tokens
|
|
self.considered_rules = considered_rules
|
|
self.token_history = token_history
|
|
|
|
if isinstance(seq, bytes):
|
|
self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace")
|
|
else:
|
|
self.char = seq[lex_pos]
|
|
self._context = self.get_context(seq)
|
|
|
|
|
|
def __str__(self):
|
|
message = "No terminal matches '%s' in the current parser context, at line %d col %d" % (self.char, self.line, self.column)
|
|
message += '\n\n' + self._context
|
|
if self.allowed:
|
|
message += self._format_expected(self.allowed)
|
|
if self.token_history:
|
|
message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history)
|
|
return message
|
|
|
|
|
|
class UnexpectedToken(ParseError, UnexpectedInput):
|
|
#--
|
|
|
|
def __init__(self, token, expected, considered_rules=None, state=None, interactive_parser=None, terminals_by_name=None, token_history=None):
|
|
super(UnexpectedToken, self).__init__()
|
|
|
|
##
|
|
|
|
self.line = getattr(token, 'line', '?')
|
|
self.column = getattr(token, 'column', '?')
|
|
self.pos_in_stream = getattr(token, 'start_pos', None)
|
|
self.state = state
|
|
|
|
self.token = token
|
|
self.expected = expected ##
|
|
|
|
self._accepts = NO_VALUE
|
|
self.considered_rules = considered_rules
|
|
self.interactive_parser = interactive_parser
|
|
self._terminals_by_name = terminals_by_name
|
|
self.token_history = token_history
|
|
|
|
|
|
@property
|
|
def accepts(self):
|
|
if self._accepts is NO_VALUE:
|
|
self._accepts = self.interactive_parser and self.interactive_parser.accepts()
|
|
return self._accepts
|
|
|
|
def __str__(self):
|
|
message = ("Unexpected token %r at line %s, column %s.\n%s"
|
|
% (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected)))
|
|
if self.token_history:
|
|
message += "Previous tokens: %r\n" % self.token_history
|
|
|
|
return message
|
|
|
|
@property
|
|
def puppet(self):
|
|
warn("UnexpectedToken.puppet attribute has been renamed to interactive_parser", DeprecationWarning)
|
|
return self.interactive_parser
|
|
|
|
|
|
|
|
class VisitError(LarkError):
|
|
#--
|
|
|
|
def __init__(self, rule, obj, orig_exc):
|
|
message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc)
|
|
super(VisitError, self).__init__(message)
|
|
|
|
self.rule = rule
|
|
self.obj = obj
|
|
self.orig_exc = orig_exc
|
|
|
|
|
|
import sys, re
|
|
import logging
|
|
from io import open
|
|
logger = logging.getLogger("lark")
|
|
logger.addHandler(logging.StreamHandler())
|
|
##
|
|
|
|
##
|
|
|
|
logger.setLevel(logging.CRITICAL)
|
|
|
|
if sys.version_info[0]>2:
|
|
from abc import ABC, abstractmethod
|
|
else:
|
|
from abc import ABCMeta, abstractmethod
|
|
class ABC(object): ##
|
|
|
|
__slots__ = ()
|
|
__metclass__ = ABCMeta
|
|
|
|
|
|
Py36 = (sys.version_info[:2] >= (3, 6))
|
|
|
|
NO_VALUE = object()
|
|
|
|
|
|
def classify(seq, key=None, value=None):
|
|
d = {}
|
|
for item in seq:
|
|
k = key(item) if (key is not None) else item
|
|
v = value(item) if (value is not None) else item
|
|
if k in d:
|
|
d[k].append(v)
|
|
else:
|
|
d[k] = [v]
|
|
return d
|
|
|
|
|
|
def _deserialize(data, namespace, memo):
|
|
if isinstance(data, dict):
|
|
if '__type__' in data: ##
|
|
|
|
class_ = namespace[data['__type__']]
|
|
return class_.deserialize(data, memo)
|
|
elif '@' in data:
|
|
return memo[data['@']]
|
|
return {key:_deserialize(value, namespace, memo) for key, value in data.items()}
|
|
elif isinstance(data, list):
|
|
return [_deserialize(value, namespace, memo) for value in data]
|
|
return data
|
|
|
|
|
|
class Serialize(object):
|
|
#--
|
|
|
|
def memo_serialize(self, types_to_memoize):
|
|
memo = SerializeMemoizer(types_to_memoize)
|
|
return self.serialize(memo), memo.serialize()
|
|
|
|
def serialize(self, memo=None):
|
|
if memo and memo.in_types(self):
|
|
return {'@': memo.memoized.get(self)}
|
|
|
|
fields = getattr(self, '__serialize_fields__')
|
|
res = {f: _serialize(getattr(self, f), memo) for f in fields}
|
|
res['__type__'] = type(self).__name__
|
|
if hasattr(self, '_serialize'):
|
|
self._serialize(res, memo)
|
|
return res
|
|
|
|
@classmethod
|
|
def deserialize(cls, data, memo):
|
|
namespace = getattr(cls, '__serialize_namespace__', [])
|
|
namespace = {c.__name__:c for c in namespace}
|
|
|
|
fields = getattr(cls, '__serialize_fields__')
|
|
|
|
if '@' in data:
|
|
return memo[data['@']]
|
|
|
|
inst = cls.__new__(cls)
|
|
for f in fields:
|
|
try:
|
|
setattr(inst, f, _deserialize(data[f], namespace, memo))
|
|
except KeyError as e:
|
|
raise KeyError("Cannot find key for class", cls, e)
|
|
|
|
if hasattr(inst, '_deserialize'):
|
|
inst._deserialize()
|
|
|
|
return inst
|
|
|
|
|
|
class SerializeMemoizer(Serialize):
|
|
#--
|
|
|
|
__serialize_fields__ = 'memoized',
|
|
|
|
def __init__(self, types_to_memoize):
|
|
self.types_to_memoize = tuple(types_to_memoize)
|
|
self.memoized = Enumerator()
|
|
|
|
def in_types(self, value):
|
|
return isinstance(value, self.types_to_memoize)
|
|
|
|
def serialize(self):
|
|
return _serialize(self.memoized.reversed(), None)
|
|
|
|
@classmethod
|
|
def deserialize(cls, data, namespace, memo):
|
|
return _deserialize(data, namespace, memo)
|
|
|
|
|
|
try:
|
|
STRING_TYPE = basestring
|
|
except NameError: ##
|
|
|
|
STRING_TYPE = str
|
|
|
|
|
|
import types
|
|
from functools import wraps, partial
|
|
from contextlib import contextmanager
|
|
|
|
Str = type(u'')
|
|
try:
|
|
classtype = types.ClassType ##
|
|
|
|
except AttributeError:
|
|
classtype = type ##
|
|
|
|
|
|
|
|
def smart_decorator(f, create_decorator):
|
|
if isinstance(f, types.FunctionType):
|
|
return wraps(f)(create_decorator(f, True))
|
|
|
|
elif isinstance(f, (classtype, type, types.BuiltinFunctionType)):
|
|
return wraps(f)(create_decorator(f, False))
|
|
|
|
elif isinstance(f, types.MethodType):
|
|
return wraps(f)(create_decorator(f.__func__, True))
|
|
|
|
elif isinstance(f, partial):
|
|
##
|
|
|
|
return wraps(f.func)(create_decorator(lambda *args, **kw: f(*args[1:], **kw), True))
|
|
|
|
else:
|
|
return create_decorator(f.__func__.__call__, True)
|
|
|
|
|
|
try:
|
|
import regex
|
|
except ImportError:
|
|
regex = None
|
|
|
|
import sre_parse
|
|
import sre_constants
|
|
categ_pattern = re.compile(r'\\p{[A-Za-z_]+}')
|
|
|
|
def get_regexp_width(expr):
|
|
if regex:
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
regexp_final = re.sub(categ_pattern, 'A', expr)
|
|
else:
|
|
if re.search(categ_pattern, expr):
|
|
raise ImportError('`regex` module must be installed in order to use Unicode categories.', expr)
|
|
regexp_final = expr
|
|
try:
|
|
return [int(x) for x in sre_parse.parse(regexp_final).getwidth()]
|
|
except sre_constants.error:
|
|
if not regex:
|
|
raise ValueError(expr)
|
|
else:
|
|
##
|
|
|
|
##
|
|
|
|
c = regex.compile(regexp_final)
|
|
if c.match('') is None:
|
|
return 1, sre_constants.MAXREPEAT
|
|
else:
|
|
return 0, sre_constants.MAXREPEAT
|
|
|
|
|
|
from collections import OrderedDict
|
|
|
|
|
|
class Meta:
|
|
def __init__(self):
|
|
self.empty = True
|
|
|
|
|
|
class Tree(object):
|
|
#--
|
|
def __init__(self, data, children, meta=None):
|
|
self.data = data
|
|
self.children = children
|
|
self._meta = meta
|
|
|
|
@property
|
|
def meta(self):
|
|
if self._meta is None:
|
|
self._meta = Meta()
|
|
return self._meta
|
|
|
|
def __repr__(self):
|
|
return 'Tree(%r, %r)' % (self.data, self.children)
|
|
|
|
def _pretty_label(self):
|
|
return self.data
|
|
|
|
def _pretty(self, level, indent_str):
|
|
if len(self.children) == 1 and not isinstance(self.children[0], Tree):
|
|
return [indent_str*level, self._pretty_label(), '\t', '%s' % (self.children[0],), '\n']
|
|
|
|
l = [indent_str*level, self._pretty_label(), '\n']
|
|
for n in self.children:
|
|
if isinstance(n, Tree):
|
|
l += n._pretty(level+1, indent_str)
|
|
else:
|
|
l += [indent_str*(level+1), '%s' % (n,), '\n']
|
|
|
|
return l
|
|
|
|
def pretty(self, indent_str=' '):
|
|
#--
|
|
return ''.join(self._pretty(0, indent_str))
|
|
|
|
def __eq__(self, other):
|
|
try:
|
|
return self.data == other.data and self.children == other.children
|
|
except AttributeError:
|
|
return False
|
|
|
|
def __ne__(self, other):
|
|
return not (self == other)
|
|
|
|
def __hash__(self):
|
|
return hash((self.data, tuple(self.children)))
|
|
|
|
def iter_subtrees(self):
|
|
#--
|
|
queue = [self]
|
|
subtrees = OrderedDict()
|
|
for subtree in queue:
|
|
subtrees[id(subtree)] = subtree
|
|
queue += [c for c in reversed(subtree.children)
|
|
if isinstance(c, Tree) and id(c) not in subtrees]
|
|
|
|
del queue
|
|
return reversed(list(subtrees.values()))
|
|
|
|
def find_pred(self, pred):
|
|
#--
|
|
return filter(pred, self.iter_subtrees())
|
|
|
|
def find_data(self, data):
|
|
#--
|
|
return self.find_pred(lambda t: t.data == data)
|
|
|
|
|
|
from inspect import getmembers, getmro
|
|
|
|
|
|
class Discard(Exception):
|
|
#--
|
|
pass
|
|
|
|
##
|
|
|
|
|
|
|
|
class _Decoratable:
|
|
#--
|
|
|
|
@classmethod
|
|
def _apply_decorator(cls, decorator, **kwargs):
|
|
mro = getmro(cls)
|
|
assert mro[0] is cls
|
|
libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)}
|
|
for name, value in getmembers(cls):
|
|
|
|
##
|
|
|
|
if name.startswith('_') or (name in libmembers and name not in cls.__dict__):
|
|
continue
|
|
if not callable(value):
|
|
continue
|
|
|
|
##
|
|
|
|
if hasattr(cls.__dict__[name], 'vargs_applied') or hasattr(value, 'vargs_applied'):
|
|
continue
|
|
|
|
static = isinstance(cls.__dict__[name], (staticmethod, classmethod))
|
|
setattr(cls, name, decorator(value, static=static, **kwargs))
|
|
return cls
|
|
|
|
def __class_getitem__(cls, _):
|
|
return cls
|
|
|
|
|
|
class Transformer(_Decoratable):
|
|
#--
|
|
__visit_tokens__ = True ##
|
|
|
|
|
|
def __init__(self, visit_tokens=True):
|
|
self.__visit_tokens__ = visit_tokens
|
|
|
|
def _call_userfunc(self, tree, new_children=None):
|
|
##
|
|
|
|
children = new_children if new_children is not None else tree.children
|
|
try:
|
|
f = getattr(self, tree.data)
|
|
except AttributeError:
|
|
return self.__default__(tree.data, children, tree.meta)
|
|
else:
|
|
try:
|
|
wrapper = getattr(f, 'visit_wrapper', None)
|
|
if wrapper is not None:
|
|
return f.visit_wrapper(f, tree.data, children, tree.meta)
|
|
else:
|
|
return f(children)
|
|
except (GrammarError, Discard):
|
|
raise
|
|
except Exception as e:
|
|
raise VisitError(tree.data, tree, e)
|
|
|
|
def _call_userfunc_token(self, token):
|
|
try:
|
|
f = getattr(self, token.type)
|
|
except AttributeError:
|
|
return self.__default_token__(token)
|
|
else:
|
|
try:
|
|
return f(token)
|
|
except (GrammarError, Discard):
|
|
raise
|
|
except Exception as e:
|
|
raise VisitError(token.type, token, e)
|
|
|
|
def _transform_children(self, children):
|
|
for c in children:
|
|
try:
|
|
if isinstance(c, Tree):
|
|
yield self._transform_tree(c)
|
|
elif self.__visit_tokens__ and isinstance(c, Token):
|
|
yield self._call_userfunc_token(c)
|
|
else:
|
|
yield c
|
|
except Discard:
|
|
pass
|
|
|
|
def _transform_tree(self, tree):
|
|
children = list(self._transform_children(tree.children))
|
|
return self._call_userfunc(tree, children)
|
|
|
|
def transform(self, tree):
|
|
#--
|
|
return self._transform_tree(tree)
|
|
|
|
def __mul__(self, other):
|
|
#--
|
|
return TransformerChain(self, other)
|
|
|
|
def __default__(self, data, children, meta):
|
|
#--
|
|
return Tree(data, children, meta)
|
|
|
|
def __default_token__(self, token):
|
|
#--
|
|
return token
|
|
|
|
|
|
def merge_transformers(base_transformer=None, **transformers_to_merge):
|
|
#--
|
|
if base_transformer is None:
|
|
base_transformer = Transformer()
|
|
for prefix, transformer in transformers_to_merge.items():
|
|
for method_name in dir(transformer):
|
|
method = getattr(transformer, method_name)
|
|
if not callable(method):
|
|
continue
|
|
if method_name.startswith("_") or method_name == "transform":
|
|
continue
|
|
prefixed_method = prefix + "__" + method_name
|
|
if hasattr(base_transformer, prefixed_method):
|
|
raise AttributeError("Cannot merge: method '%s' appears more than once" % prefixed_method)
|
|
|
|
setattr(base_transformer, prefixed_method, method)
|
|
|
|
return base_transformer
|
|
|
|
|
|
class InlineTransformer(Transformer): ##
|
|
|
|
def _call_userfunc(self, tree, new_children=None):
|
|
##
|
|
|
|
children = new_children if new_children is not None else tree.children
|
|
try:
|
|
f = getattr(self, tree.data)
|
|
except AttributeError:
|
|
return self.__default__(tree.data, children, tree.meta)
|
|
else:
|
|
return f(*children)
|
|
|
|
|
|
class TransformerChain(object):
|
|
def __init__(self, *transformers):
|
|
self.transformers = transformers
|
|
|
|
def transform(self, tree):
|
|
for t in self.transformers:
|
|
tree = t.transform(tree)
|
|
return tree
|
|
|
|
def __mul__(self, other):
|
|
return TransformerChain(*self.transformers + (other,))
|
|
|
|
|
|
class Transformer_InPlace(Transformer):
|
|
#--
|
|
def _transform_tree(self, tree): ##
|
|
|
|
return self._call_userfunc(tree)
|
|
|
|
def transform(self, tree):
|
|
for subtree in tree.iter_subtrees():
|
|
subtree.children = list(self._transform_children(subtree.children))
|
|
|
|
return self._transform_tree(tree)
|
|
|
|
|
|
class Transformer_NonRecursive(Transformer):
|
|
#--
|
|
|
|
def transform(self, tree):
|
|
##
|
|
|
|
rev_postfix = []
|
|
q = [tree]
|
|
while q:
|
|
t = q.pop()
|
|
rev_postfix.append(t)
|
|
if isinstance(t, Tree):
|
|
q += t.children
|
|
|
|
##
|
|
|
|
stack = []
|
|
for x in reversed(rev_postfix):
|
|
if isinstance(x, Tree):
|
|
size = len(x.children)
|
|
if size:
|
|
args = stack[-size:]
|
|
del stack[-size:]
|
|
else:
|
|
args = []
|
|
stack.append(self._call_userfunc(x, args))
|
|
elif self.__visit_tokens__ and isinstance(x, Token):
|
|
stack.append(self._call_userfunc_token(x))
|
|
else:
|
|
stack.append(x)
|
|
|
|
t ,= stack ##
|
|
|
|
return t
|
|
|
|
|
|
class Transformer_InPlaceRecursive(Transformer):
|
|
#--
|
|
def _transform_tree(self, tree):
|
|
tree.children = list(self._transform_children(tree.children))
|
|
return self._call_userfunc(tree)
|
|
|
|
|
|
##
|
|
|
|
|
|
class VisitorBase:
|
|
def _call_userfunc(self, tree):
|
|
return getattr(self, tree.data, self.__default__)(tree)
|
|
|
|
def __default__(self, tree):
|
|
#--
|
|
return tree
|
|
|
|
def __class_getitem__(cls, _):
|
|
return cls
|
|
|
|
|
|
class Visitor(VisitorBase):
|
|
#--
|
|
|
|
def visit(self, tree):
|
|
#--
|
|
for subtree in tree.iter_subtrees():
|
|
self._call_userfunc(subtree)
|
|
return tree
|
|
|
|
def visit_topdown(self,tree):
|
|
#--
|
|
for subtree in tree.iter_subtrees_topdown():
|
|
self._call_userfunc(subtree)
|
|
return tree
|
|
|
|
|
|
class Visitor_Recursive(VisitorBase):
|
|
#--
|
|
|
|
def visit(self, tree):
|
|
#--
|
|
for child in tree.children:
|
|
if isinstance(child, Tree):
|
|
self.visit(child)
|
|
|
|
self._call_userfunc(tree)
|
|
return tree
|
|
|
|
def visit_topdown(self,tree):
|
|
#--
|
|
self._call_userfunc(tree)
|
|
|
|
for child in tree.children:
|
|
if isinstance(child, Tree):
|
|
self.visit_topdown(child)
|
|
|
|
return tree
|
|
|
|
|
|
def visit_children_decor(func):
|
|
#--
|
|
@wraps(func)
|
|
def inner(cls, tree):
|
|
values = cls.visit_children(tree)
|
|
return func(cls, values)
|
|
return inner
|
|
|
|
|
|
class Interpreter(_Decoratable):
|
|
#--
|
|
|
|
def visit(self, tree):
|
|
f = getattr(self, tree.data)
|
|
wrapper = getattr(f, 'visit_wrapper', None)
|
|
if wrapper is not None:
|
|
return f.visit_wrapper(f, tree.data, tree.children, tree.meta)
|
|
else:
|
|
return f(tree)
|
|
|
|
def visit_children(self, tree):
|
|
return [self.visit(child) if isinstance(child, Tree) else child
|
|
for child in tree.children]
|
|
|
|
def __getattr__(self, name):
|
|
return self.__default__
|
|
|
|
def __default__(self, tree):
|
|
return self.visit_children(tree)
|
|
|
|
|
|
##
|
|
|
|
|
|
def _apply_decorator(obj, decorator, **kwargs):
|
|
try:
|
|
_apply = obj._apply_decorator
|
|
except AttributeError:
|
|
return decorator(obj, **kwargs)
|
|
else:
|
|
return _apply(decorator, **kwargs)
|
|
|
|
|
|
def _inline_args__func(func):
|
|
@wraps(func)
|
|
def create_decorator(_f, with_self):
|
|
if with_self:
|
|
def f(self, children):
|
|
return _f(self, *children)
|
|
else:
|
|
def f(self, children):
|
|
return _f(*children)
|
|
return f
|
|
|
|
return smart_decorator(func, create_decorator)
|
|
|
|
|
|
def inline_args(obj): ##
|
|
|
|
return _apply_decorator(obj, _inline_args__func)
|
|
|
|
|
|
def _visitor_args_func_dec(func, visit_wrapper=None, static=False):
|
|
def create_decorator(_f, with_self):
|
|
if with_self:
|
|
def f(self, *args, **kwargs):
|
|
return _f(self, *args, **kwargs)
|
|
else:
|
|
def f(self, *args, **kwargs):
|
|
return _f(*args, **kwargs)
|
|
return f
|
|
|
|
if static:
|
|
f = wraps(func)(create_decorator(func, False))
|
|
else:
|
|
f = smart_decorator(func, create_decorator)
|
|
f.vargs_applied = True
|
|
f.visit_wrapper = visit_wrapper
|
|
return f
|
|
|
|
|
|
def _vargs_inline(f, _data, children, _meta):
|
|
return f(*children)
|
|
def _vargs_meta_inline(f, _data, children, meta):
|
|
return f(meta, *children)
|
|
def _vargs_meta(f, _data, children, meta):
|
|
return f(children, meta) ##
|
|
|
|
def _vargs_tree(f, data, children, meta):
|
|
return f(Tree(data, children, meta))
|
|
|
|
|
|
def v_args(inline=False, meta=False, tree=False, wrapper=None):
|
|
#--
|
|
if tree and (meta or inline):
|
|
raise ValueError("Visitor functions cannot combine 'tree' with 'meta' or 'inline'.")
|
|
|
|
func = None
|
|
if meta:
|
|
if inline:
|
|
func = _vargs_meta_inline
|
|
else:
|
|
func = _vargs_meta
|
|
elif inline:
|
|
func = _vargs_inline
|
|
elif tree:
|
|
func = _vargs_tree
|
|
|
|
if wrapper is not None:
|
|
if func is not None:
|
|
raise ValueError("Cannot use 'wrapper' along with 'tree', 'meta' or 'inline'.")
|
|
func = wrapper
|
|
|
|
def _visitor_args_dec(obj):
|
|
return _apply_decorator(obj, _visitor_args_func_dec, visit_wrapper=func)
|
|
return _visitor_args_dec
|
|
|
|
|
|
|
|
|
|
class Symbol(Serialize):
|
|
__slots__ = ('name',)
|
|
|
|
is_term = NotImplemented
|
|
|
|
def __init__(self, name):
|
|
self.name = name
|
|
|
|
def __eq__(self, other):
|
|
assert isinstance(other, Symbol), other
|
|
return self.is_term == other.is_term and self.name == other.name
|
|
|
|
def __ne__(self, other):
|
|
return not (self == other)
|
|
|
|
def __hash__(self):
|
|
return hash(self.name)
|
|
|
|
def __repr__(self):
|
|
return '%s(%r)' % (type(self).__name__, self.name)
|
|
|
|
fullrepr = property(__repr__)
|
|
|
|
|
|
class Terminal(Symbol):
|
|
__serialize_fields__ = 'name', 'filter_out'
|
|
|
|
is_term = True
|
|
|
|
def __init__(self, name, filter_out=False):
|
|
self.name = name
|
|
self.filter_out = filter_out
|
|
|
|
@property
|
|
def fullrepr(self):
|
|
return '%s(%r, %r)' % (type(self).__name__, self.name, self.filter_out)
|
|
|
|
|
|
class NonTerminal(Symbol):
|
|
__serialize_fields__ = 'name',
|
|
|
|
is_term = False
|
|
|
|
|
|
class RuleOptions(Serialize):
|
|
__serialize_fields__ = 'keep_all_tokens', 'expand1', 'priority', 'template_source', 'empty_indices'
|
|
|
|
def __init__(self, keep_all_tokens=False, expand1=False, priority=None, template_source=None, empty_indices=()):
|
|
self.keep_all_tokens = keep_all_tokens
|
|
self.expand1 = expand1
|
|
self.priority = priority
|
|
self.template_source = template_source
|
|
self.empty_indices = empty_indices
|
|
|
|
def __repr__(self):
|
|
return 'RuleOptions(%r, %r, %r, %r)' % (
|
|
self.keep_all_tokens,
|
|
self.expand1,
|
|
self.priority,
|
|
self.template_source
|
|
)
|
|
|
|
|
|
class Rule(Serialize):
|
|
#--
|
|
__slots__ = ('origin', 'expansion', 'alias', 'options', 'order', '_hash')
|
|
|
|
__serialize_fields__ = 'origin', 'expansion', 'order', 'alias', 'options'
|
|
__serialize_namespace__ = Terminal, NonTerminal, RuleOptions
|
|
|
|
def __init__(self, origin, expansion, order=0, alias=None, options=None):
|
|
self.origin = origin
|
|
self.expansion = expansion
|
|
self.alias = alias
|
|
self.order = order
|
|
self.options = options or RuleOptions()
|
|
self._hash = hash((self.origin, tuple(self.expansion)))
|
|
|
|
def _deserialize(self):
|
|
self._hash = hash((self.origin, tuple(self.expansion)))
|
|
|
|
def __str__(self):
|
|
return '<%s : %s>' % (self.origin.name, ' '.join(x.name for x in self.expansion))
|
|
|
|
def __repr__(self):
|
|
return 'Rule(%r, %r, %r, %r)' % (self.origin, self.expansion, self.alias, self.options)
|
|
|
|
def __hash__(self):
|
|
return self._hash
|
|
|
|
def __eq__(self, other):
|
|
if not isinstance(other, Rule):
|
|
return False
|
|
return self.origin == other.origin and self.expansion == other.expansion
|
|
|
|
|
|
|
|
from warnings import warn
|
|
from copy import copy
|
|
|
|
|
|
class Pattern(Serialize):
|
|
raw = None
|
|
type = None
|
|
|
|
def __init__(self, value, flags=(), raw=None):
|
|
self.value = value
|
|
self.flags = frozenset(flags)
|
|
self.raw = raw
|
|
|
|
def __repr__(self):
|
|
return repr(self.to_regexp())
|
|
|
|
##
|
|
|
|
def __hash__(self):
|
|
return hash((type(self), self.value, self.flags))
|
|
|
|
def __eq__(self, other):
|
|
return type(self) == type(other) and self.value == other.value and self.flags == other.flags
|
|
|
|
def to_regexp(self):
|
|
raise NotImplementedError()
|
|
|
|
def min_width(self):
|
|
raise NotImplementedError()
|
|
|
|
def max_width(self):
|
|
raise NotImplementedError()
|
|
|
|
if Py36:
|
|
##
|
|
|
|
def _get_flags(self, value):
|
|
for f in self.flags:
|
|
value = ('(?%s:%s)' % (f, value))
|
|
return value
|
|
|
|
else:
|
|
def _get_flags(self, value):
|
|
for f in self.flags:
|
|
value = ('(?%s)' % f) + value
|
|
return value
|
|
|
|
|
|
|
|
class PatternStr(Pattern):
|
|
__serialize_fields__ = 'value', 'flags'
|
|
|
|
type = "str"
|
|
|
|
def to_regexp(self):
|
|
return self._get_flags(re.escape(self.value))
|
|
|
|
@property
|
|
def min_width(self):
|
|
return len(self.value)
|
|
max_width = min_width
|
|
|
|
|
|
class PatternRE(Pattern):
|
|
__serialize_fields__ = 'value', 'flags', '_width'
|
|
|
|
type = "re"
|
|
|
|
def to_regexp(self):
|
|
return self._get_flags(self.value)
|
|
|
|
_width = None
|
|
def _get_width(self):
|
|
if self._width is None:
|
|
self._width = get_regexp_width(self.to_regexp())
|
|
return self._width
|
|
|
|
@property
|
|
def min_width(self):
|
|
return self._get_width()[0]
|
|
|
|
@property
|
|
def max_width(self):
|
|
return self._get_width()[1]
|
|
|
|
|
|
class TerminalDef(Serialize):
|
|
__serialize_fields__ = 'name', 'pattern', 'priority'
|
|
__serialize_namespace__ = PatternStr, PatternRE
|
|
|
|
def __init__(self, name, pattern, priority=1):
|
|
assert isinstance(pattern, Pattern), pattern
|
|
self.name = name
|
|
self.pattern = pattern
|
|
self.priority = priority
|
|
|
|
def __repr__(self):
|
|
return '%s(%r, %r)' % (type(self).__name__, self.name, self.pattern)
|
|
|
|
def user_repr(self):
|
|
if self.name.startswith('__'): ##
|
|
|
|
return self.pattern.raw or self.name
|
|
else:
|
|
return self.name
|
|
|
|
|
|
class Token(Str):
|
|
#--
|
|
__slots__ = ('type', 'start_pos', 'value', 'line', 'column', 'end_line', 'end_column', 'end_pos')
|
|
|
|
def __new__(cls, type_, value, start_pos=None, line=None, column=None, end_line=None, end_column=None, end_pos=None, pos_in_stream=None):
|
|
try:
|
|
inst = super(Token, cls).__new__(cls, value)
|
|
except UnicodeDecodeError:
|
|
value = value.decode('latin1')
|
|
inst = super(Token, cls).__new__(cls, value)
|
|
|
|
inst.type = type_
|
|
inst.start_pos = start_pos if start_pos is not None else pos_in_stream
|
|
inst.value = value
|
|
inst.line = line
|
|
inst.column = column
|
|
inst.end_line = end_line
|
|
inst.end_column = end_column
|
|
inst.end_pos = end_pos
|
|
return inst
|
|
|
|
@property
|
|
def pos_in_stream(self):
|
|
warn("Attribute Token.pos_in_stream was renamed to Token.start_pos", DeprecationWarning, 2)
|
|
return self.start_pos
|
|
|
|
def update(self, type_=None, value=None):
|
|
return Token.new_borrow_pos(
|
|
type_ if type_ is not None else self.type,
|
|
value if value is not None else self.value,
|
|
self
|
|
)
|
|
|
|
@classmethod
|
|
def new_borrow_pos(cls, type_, value, borrow_t):
|
|
return cls(type_, value, borrow_t.start_pos, borrow_t.line, borrow_t.column, borrow_t.end_line, borrow_t.end_column, borrow_t.end_pos)
|
|
|
|
def __reduce__(self):
|
|
return (self.__class__, (self.type, self.value, self.start_pos, self.line, self.column))
|
|
|
|
def __repr__(self):
|
|
return 'Token(%r, %r)' % (self.type, self.value)
|
|
|
|
def __deepcopy__(self, memo):
|
|
return Token(self.type, self.value, self.start_pos, self.line, self.column)
|
|
|
|
def __eq__(self, other):
|
|
if isinstance(other, Token) and self.type != other.type:
|
|
return False
|
|
|
|
return Str.__eq__(self, other)
|
|
|
|
__hash__ = Str.__hash__
|
|
|
|
|
|
class LineCounter:
|
|
__slots__ = 'char_pos', 'line', 'column', 'line_start_pos', 'newline_char'
|
|
|
|
def __init__(self, newline_char):
|
|
self.newline_char = newline_char
|
|
self.char_pos = 0
|
|
self.line = 1
|
|
self.column = 1
|
|
self.line_start_pos = 0
|
|
|
|
def __eq__(self, other):
|
|
if not isinstance(other, LineCounter):
|
|
return NotImplemented
|
|
|
|
return self.char_pos == other.char_pos and self.newline_char == other.newline_char
|
|
|
|
def feed(self, token, test_newline=True):
|
|
#--
|
|
if test_newline:
|
|
newlines = token.count(self.newline_char)
|
|
if newlines:
|
|
self.line += newlines
|
|
self.line_start_pos = self.char_pos + token.rindex(self.newline_char) + 1
|
|
|
|
self.char_pos += len(token)
|
|
self.column = self.char_pos - self.line_start_pos + 1
|
|
|
|
|
|
class UnlessCallback:
|
|
def __init__(self, scanner):
|
|
self.scanner = scanner
|
|
|
|
def __call__(self, t):
|
|
res = self.scanner.match(t.value, 0)
|
|
if res:
|
|
_value, t.type = res
|
|
return t
|
|
|
|
|
|
class CallChain:
|
|
def __init__(self, callback1, callback2, cond):
|
|
self.callback1 = callback1
|
|
self.callback2 = callback2
|
|
self.cond = cond
|
|
|
|
def __call__(self, t):
|
|
t2 = self.callback1(t)
|
|
return self.callback2(t) if self.cond(t2) else t2
|
|
|
|
|
|
def _get_match(re_, regexp, s, flags):
|
|
m = re_.match(regexp, s, flags)
|
|
if m:
|
|
return m.group(0)
|
|
|
|
def _create_unless(terminals, g_regex_flags, re_, use_bytes):
|
|
tokens_by_type = classify(terminals, lambda t: type(t.pattern))
|
|
assert len(tokens_by_type) <= 2, tokens_by_type.keys()
|
|
embedded_strs = set()
|
|
callback = {}
|
|
for retok in tokens_by_type.get(PatternRE, []):
|
|
unless = []
|
|
for strtok in tokens_by_type.get(PatternStr, []):
|
|
if strtok.priority > retok.priority:
|
|
continue
|
|
s = strtok.pattern.value
|
|
if s == _get_match(re_, retok.pattern.to_regexp(), s, g_regex_flags):
|
|
unless.append(strtok)
|
|
if strtok.pattern.flags <= retok.pattern.flags:
|
|
embedded_strs.add(strtok)
|
|
if unless:
|
|
callback[retok.name] = UnlessCallback(Scanner(unless, g_regex_flags, re_, match_whole=True, use_bytes=use_bytes))
|
|
|
|
new_terminals = [t for t in terminals if t not in embedded_strs]
|
|
return new_terminals, callback
|
|
|
|
|
|
|
|
class Scanner:
|
|
def __init__(self, terminals, g_regex_flags, re_, use_bytes, match_whole=False):
|
|
self.terminals = terminals
|
|
self.g_regex_flags = g_regex_flags
|
|
self.re_ = re_
|
|
self.use_bytes = use_bytes
|
|
self.match_whole = match_whole
|
|
|
|
self.allowed_types = {t.name for t in self.terminals}
|
|
|
|
self._mres = self._build_mres(terminals, len(terminals))
|
|
|
|
def _build_mres(self, terminals, max_size):
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
postfix = '$' if self.match_whole else ''
|
|
mres = []
|
|
while terminals:
|
|
pattern = u'|'.join(u'(?P<%s>%s)' % (t.name, t.pattern.to_regexp() + postfix) for t in terminals[:max_size])
|
|
if self.use_bytes:
|
|
pattern = pattern.encode('latin-1')
|
|
try:
|
|
mre = self.re_.compile(pattern, self.g_regex_flags)
|
|
except AssertionError: ##
|
|
|
|
return self._build_mres(terminals, max_size//2)
|
|
|
|
mres.append((mre, {i: n for n, i in mre.groupindex.items()}))
|
|
terminals = terminals[max_size:]
|
|
return mres
|
|
|
|
def match(self, text, pos):
|
|
for mre, type_from_index in self._mres:
|
|
m = mre.match(text, pos)
|
|
if m:
|
|
return m.group(0), type_from_index[m.lastindex]
|
|
|
|
|
|
def _regexp_has_newline(r):
|
|
#--
|
|
return '\n' in r or '\\n' in r or '\\s' in r or '[^' in r or ('(?s' in r and '.' in r)
|
|
|
|
|
|
class Lexer(object):
|
|
#--
|
|
lex = NotImplemented
|
|
|
|
def make_lexer_state(self, text):
|
|
line_ctr = LineCounter(b'\n' if isinstance(text, bytes) else '\n')
|
|
return LexerState(text, line_ctr)
|
|
|
|
|
|
class TraditionalLexer(Lexer):
|
|
|
|
def __init__(self, conf):
|
|
terminals = list(conf.terminals)
|
|
assert all(isinstance(t, TerminalDef) for t in terminals), terminals
|
|
|
|
self.re = conf.re_module
|
|
|
|
if not conf.skip_validation:
|
|
##
|
|
|
|
for t in terminals:
|
|
try:
|
|
self.re.compile(t.pattern.to_regexp(), conf.g_regex_flags)
|
|
except self.re.error:
|
|
raise LexError("Cannot compile token %s: %s" % (t.name, t.pattern))
|
|
|
|
if t.pattern.min_width == 0:
|
|
raise LexError("Lexer does not allow zero-width terminals. (%s: %s)" % (t.name, t.pattern))
|
|
|
|
if not (set(conf.ignore) <= {t.name for t in terminals}):
|
|
raise LexError("Ignore terminals are not defined: %s" % (set(conf.ignore) - {t.name for t in terminals}))
|
|
|
|
##
|
|
|
|
self.newline_types = frozenset(t.name for t in terminals if _regexp_has_newline(t.pattern.to_regexp()))
|
|
self.ignore_types = frozenset(conf.ignore)
|
|
|
|
terminals.sort(key=lambda x: (-x.priority, -x.pattern.max_width, -len(x.pattern.value), x.name))
|
|
self.terminals = terminals
|
|
self.user_callbacks = conf.callbacks
|
|
self.g_regex_flags = conf.g_regex_flags
|
|
self.use_bytes = conf.use_bytes
|
|
self.terminals_by_name = conf.terminals_by_name
|
|
|
|
self._scanner = None
|
|
|
|
def _build_scanner(self):
|
|
terminals, self.callback = _create_unless(self.terminals, self.g_regex_flags, self.re, self.use_bytes)
|
|
assert all(self.callback.values())
|
|
|
|
for type_, f in self.user_callbacks.items():
|
|
if type_ in self.callback:
|
|
##
|
|
|
|
self.callback[type_] = CallChain(self.callback[type_], f, lambda t: t.type == type_)
|
|
else:
|
|
self.callback[type_] = f
|
|
|
|
self._scanner = Scanner(terminals, self.g_regex_flags, self.re, self.use_bytes)
|
|
|
|
@property
|
|
def scanner(self):
|
|
if self._scanner is None:
|
|
self._build_scanner()
|
|
return self._scanner
|
|
|
|
def match(self, text, pos):
|
|
return self.scanner.match(text, pos)
|
|
|
|
def lex(self, state, parser_state):
|
|
with suppress(EOFError):
|
|
while True:
|
|
yield self.next_token(state, parser_state)
|
|
|
|
def next_token(self, lex_state, parser_state=None):
|
|
line_ctr = lex_state.line_ctr
|
|
while line_ctr.char_pos < len(lex_state.text):
|
|
res = self.match(lex_state.text, line_ctr.char_pos)
|
|
if not res:
|
|
allowed = self.scanner.allowed_types - self.ignore_types
|
|
if not allowed:
|
|
allowed = {"<END-OF-FILE>"}
|
|
raise UnexpectedCharacters(lex_state.text, line_ctr.char_pos, line_ctr.line, line_ctr.column,
|
|
allowed=allowed, token_history=lex_state.last_token and [lex_state.last_token],
|
|
state=parser_state, terminals_by_name=self.terminals_by_name)
|
|
|
|
value, type_ = res
|
|
|
|
if type_ not in self.ignore_types:
|
|
t = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column)
|
|
line_ctr.feed(value, type_ in self.newline_types)
|
|
t.end_line = line_ctr.line
|
|
t.end_column = line_ctr.column
|
|
t.end_pos = line_ctr.char_pos
|
|
if t.type in self.callback:
|
|
t = self.callback[t.type](t)
|
|
if not isinstance(t, Token):
|
|
raise LexError("Callbacks must return a token (returned %r)" % t)
|
|
lex_state.last_token = t
|
|
return t
|
|
else:
|
|
if type_ in self.callback:
|
|
t2 = Token(type_, value, line_ctr.char_pos, line_ctr.line, line_ctr.column)
|
|
self.callback[type_](t2)
|
|
line_ctr.feed(value, type_ in self.newline_types)
|
|
|
|
##
|
|
|
|
raise EOFError(self)
|
|
|
|
|
|
class LexerState(object):
|
|
__slots__ = 'text', 'line_ctr', 'last_token'
|
|
|
|
def __init__(self, text, line_ctr, last_token=None):
|
|
self.text = text
|
|
self.line_ctr = line_ctr
|
|
self.last_token = last_token
|
|
|
|
def __eq__(self, other):
|
|
if not isinstance(other, LexerState):
|
|
return NotImplemented
|
|
|
|
return self.text is other.text and self.line_ctr == other.line_ctr and self.last_token == other.last_token
|
|
|
|
def __copy__(self):
|
|
return type(self)(self.text, copy(self.line_ctr), self.last_token)
|
|
|
|
|
|
class ContextualLexer(Lexer):
|
|
|
|
def __init__(self, conf, states, always_accept=()):
|
|
terminals = list(conf.terminals)
|
|
terminals_by_name = conf.terminals_by_name
|
|
|
|
trad_conf = copy(conf)
|
|
trad_conf.terminals = terminals
|
|
|
|
lexer_by_tokens = {}
|
|
self.lexers = {}
|
|
for state, accepts in states.items():
|
|
key = frozenset(accepts)
|
|
try:
|
|
lexer = lexer_by_tokens[key]
|
|
except KeyError:
|
|
accepts = set(accepts) | set(conf.ignore) | set(always_accept)
|
|
lexer_conf = copy(trad_conf)
|
|
lexer_conf.terminals = [terminals_by_name[n] for n in accepts if n in terminals_by_name]
|
|
lexer = TraditionalLexer(lexer_conf)
|
|
lexer_by_tokens[key] = lexer
|
|
|
|
self.lexers[state] = lexer
|
|
|
|
assert trad_conf.terminals is terminals
|
|
self.root_lexer = TraditionalLexer(trad_conf)
|
|
|
|
def make_lexer_state(self, text):
|
|
return self.root_lexer.make_lexer_state(text)
|
|
|
|
def lex(self, lexer_state, parser_state):
|
|
try:
|
|
while True:
|
|
lexer = self.lexers[parser_state.position]
|
|
yield lexer.next_token(lexer_state, parser_state)
|
|
except EOFError:
|
|
pass
|
|
except UnexpectedCharacters as e:
|
|
##
|
|
|
|
##
|
|
|
|
try:
|
|
last_token = lexer_state.last_token ##
|
|
|
|
token = self.root_lexer.next_token(lexer_state, parser_state)
|
|
raise UnexpectedToken(token, e.allowed, state=parser_state, token_history=[last_token], terminals_by_name=self.root_lexer.terminals_by_name)
|
|
except UnexpectedCharacters:
|
|
raise e ##
|
|
|
|
|
|
class LexerThread(object):
|
|
#--
|
|
|
|
def __init__(self, lexer, text):
|
|
self.lexer = lexer
|
|
self.state = lexer.make_lexer_state(text)
|
|
|
|
def lex(self, parser_state):
|
|
return self.lexer.lex(self.state, parser_state)
|
|
|
|
def __copy__(self):
|
|
copied = object.__new__(LexerThread)
|
|
copied.lexer = self.lexer
|
|
copied.state = copy(self.state)
|
|
return copied
|
|
|
|
|
|
|
|
class LexerConf(Serialize):
|
|
__serialize_fields__ = 'terminals', 'ignore', 'g_regex_flags', 'use_bytes', 'lexer_type'
|
|
__serialize_namespace__ = TerminalDef,
|
|
|
|
def __init__(self, terminals, re_module, ignore=(), postlex=None, callbacks=None, g_regex_flags=0, skip_validation=False, use_bytes=False):
|
|
self.terminals = terminals
|
|
self.terminals_by_name = {t.name: t for t in self.terminals}
|
|
assert len(self.terminals) == len(self.terminals_by_name)
|
|
self.ignore = ignore
|
|
self.postlex = postlex
|
|
self.callbacks = callbacks or {}
|
|
self.g_regex_flags = g_regex_flags
|
|
self.re_module = re_module
|
|
self.skip_validation = skip_validation
|
|
self.use_bytes = use_bytes
|
|
self.lexer_type = None
|
|
|
|
@property
|
|
def tokens(self):
|
|
warn("LexerConf.tokens is deprecated. Use LexerConf.terminals instead", DeprecationWarning)
|
|
return self.terminals
|
|
|
|
def _deserialize(self):
|
|
self.terminals_by_name = {t.name: t for t in self.terminals}
|
|
|
|
def __deepcopy__(self, memo=None):
|
|
return type(self)(
|
|
deepcopy(self.terminals, memo),
|
|
self.re_module,
|
|
deepcopy(self.ignore, memo),
|
|
deepcopy(self.postlex, memo),
|
|
deepcopy(self.callbacks, memo),
|
|
deepcopy(self.g_regex_flags, memo),
|
|
deepcopy(self.skip_validation, memo),
|
|
deepcopy(self.use_bytes, memo),
|
|
)
|
|
|
|
|
|
class ParserConf(Serialize):
|
|
__serialize_fields__ = 'rules', 'start', 'parser_type'
|
|
|
|
def __init__(self, rules, callbacks, start):
|
|
assert isinstance(start, list)
|
|
self.rules = rules
|
|
self.callbacks = callbacks
|
|
self.start = start
|
|
|
|
self.parser_type = None
|
|
|
|
|
|
from functools import partial, wraps
|
|
from itertools import repeat, product
|
|
|
|
|
|
class ExpandSingleChild:
|
|
def __init__(self, node_builder):
|
|
self.node_builder = node_builder
|
|
|
|
def __call__(self, children):
|
|
if len(children) == 1:
|
|
return children[0]
|
|
else:
|
|
return self.node_builder(children)
|
|
|
|
|
|
|
|
class PropagatePositions:
|
|
def __init__(self, node_builder, node_filter=None):
|
|
self.node_builder = node_builder
|
|
self.node_filter = node_filter
|
|
|
|
def __call__(self, children):
|
|
res = self.node_builder(children)
|
|
|
|
if isinstance(res, Tree):
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
|
|
res_meta = res.meta
|
|
|
|
first_meta = self._pp_get_meta(children)
|
|
if first_meta is not None:
|
|
if not hasattr(res_meta, 'line'):
|
|
##
|
|
|
|
res_meta.line = getattr(first_meta, 'container_line', first_meta.line)
|
|
res_meta.column = getattr(first_meta, 'container_column', first_meta.column)
|
|
res_meta.start_pos = getattr(first_meta, 'container_start_pos', first_meta.start_pos)
|
|
res_meta.empty = False
|
|
|
|
res_meta.container_line = getattr(first_meta, 'container_line', first_meta.line)
|
|
res_meta.container_column = getattr(first_meta, 'container_column', first_meta.column)
|
|
|
|
last_meta = self._pp_get_meta(reversed(children))
|
|
if last_meta is not None:
|
|
if not hasattr(res_meta, 'end_line'):
|
|
res_meta.end_line = getattr(last_meta, 'container_end_line', last_meta.end_line)
|
|
res_meta.end_column = getattr(last_meta, 'container_end_column', last_meta.end_column)
|
|
res_meta.end_pos = getattr(last_meta, 'container_end_pos', last_meta.end_pos)
|
|
res_meta.empty = False
|
|
|
|
res_meta.container_end_line = getattr(last_meta, 'container_end_line', last_meta.end_line)
|
|
res_meta.container_end_column = getattr(last_meta, 'container_end_column', last_meta.end_column)
|
|
|
|
return res
|
|
|
|
def _pp_get_meta(self, children):
|
|
for c in children:
|
|
if self.node_filter is not None and not self.node_filter(c):
|
|
continue
|
|
if isinstance(c, Tree):
|
|
if not c.meta.empty:
|
|
return c.meta
|
|
elif isinstance(c, Token):
|
|
return c
|
|
|
|
def make_propagate_positions(option):
|
|
if callable(option):
|
|
return partial(PropagatePositions, node_filter=option)
|
|
elif option is True:
|
|
return PropagatePositions
|
|
elif option is False:
|
|
return None
|
|
|
|
raise ConfigurationError('Invalid option for propagate_positions: %r' % option)
|
|
|
|
|
|
class ChildFilter:
|
|
def __init__(self, to_include, append_none, node_builder):
|
|
self.node_builder = node_builder
|
|
self.to_include = to_include
|
|
self.append_none = append_none
|
|
|
|
def __call__(self, children):
|
|
filtered = []
|
|
|
|
for i, to_expand, add_none in self.to_include:
|
|
if add_none:
|
|
filtered += [None] * add_none
|
|
if to_expand:
|
|
filtered += children[i].children
|
|
else:
|
|
filtered.append(children[i])
|
|
|
|
if self.append_none:
|
|
filtered += [None] * self.append_none
|
|
|
|
return self.node_builder(filtered)
|
|
|
|
|
|
class ChildFilterLALR(ChildFilter):
|
|
#--
|
|
|
|
def __call__(self, children):
|
|
filtered = []
|
|
for i, to_expand, add_none in self.to_include:
|
|
if add_none:
|
|
filtered += [None] * add_none
|
|
if to_expand:
|
|
if filtered:
|
|
filtered += children[i].children
|
|
else: ##
|
|
|
|
filtered = children[i].children
|
|
else:
|
|
filtered.append(children[i])
|
|
|
|
if self.append_none:
|
|
filtered += [None] * self.append_none
|
|
|
|
return self.node_builder(filtered)
|
|
|
|
|
|
class ChildFilterLALR_NoPlaceholders(ChildFilter):
|
|
#--
|
|
def __init__(self, to_include, node_builder):
|
|
self.node_builder = node_builder
|
|
self.to_include = to_include
|
|
|
|
def __call__(self, children):
|
|
filtered = []
|
|
for i, to_expand in self.to_include:
|
|
if to_expand:
|
|
if filtered:
|
|
filtered += children[i].children
|
|
else: ##
|
|
|
|
filtered = children[i].children
|
|
else:
|
|
filtered.append(children[i])
|
|
return self.node_builder(filtered)
|
|
|
|
|
|
def _should_expand(sym):
|
|
return not sym.is_term and sym.name.startswith('_')
|
|
|
|
|
|
def maybe_create_child_filter(expansion, keep_all_tokens, ambiguous, _empty_indices):
|
|
##
|
|
|
|
if _empty_indices:
|
|
assert _empty_indices.count(False) == len(expansion)
|
|
s = ''.join(str(int(b)) for b in _empty_indices)
|
|
empty_indices = [len(ones) for ones in s.split('0')]
|
|
assert len(empty_indices) == len(expansion)+1, (empty_indices, len(expansion))
|
|
else:
|
|
empty_indices = [0] * (len(expansion)+1)
|
|
|
|
to_include = []
|
|
nones_to_add = 0
|
|
for i, sym in enumerate(expansion):
|
|
nones_to_add += empty_indices[i]
|
|
if keep_all_tokens or not (sym.is_term and sym.filter_out):
|
|
to_include.append((i, _should_expand(sym), nones_to_add))
|
|
nones_to_add = 0
|
|
|
|
nones_to_add += empty_indices[len(expansion)]
|
|
|
|
if _empty_indices or len(to_include) < len(expansion) or any(to_expand for i, to_expand,_ in to_include):
|
|
if _empty_indices or ambiguous:
|
|
return partial(ChildFilter if ambiguous else ChildFilterLALR, to_include, nones_to_add)
|
|
else:
|
|
##
|
|
|
|
return partial(ChildFilterLALR_NoPlaceholders, [(i, x) for i,x,_ in to_include])
|
|
|
|
|
|
class AmbiguousExpander:
|
|
#--
|
|
def __init__(self, to_expand, tree_class, node_builder):
|
|
self.node_builder = node_builder
|
|
self.tree_class = tree_class
|
|
self.to_expand = to_expand
|
|
|
|
def __call__(self, children):
|
|
def _is_ambig_tree(t):
|
|
return hasattr(t, 'data') and t.data == '_ambig'
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
ambiguous = []
|
|
for i, child in enumerate(children):
|
|
if _is_ambig_tree(child):
|
|
if i in self.to_expand:
|
|
ambiguous.append(i)
|
|
|
|
child.expand_kids_by_data('_ambig')
|
|
|
|
if not ambiguous:
|
|
return self.node_builder(children)
|
|
|
|
expand = [iter(child.children) if i in ambiguous else repeat(child) for i, child in enumerate(children)]
|
|
return self.tree_class('_ambig', [self.node_builder(list(f[0])) for f in product(zip(*expand))])
|
|
|
|
|
|
def maybe_create_ambiguous_expander(tree_class, expansion, keep_all_tokens):
|
|
to_expand = [i for i, sym in enumerate(expansion)
|
|
if keep_all_tokens or ((not (sym.is_term and sym.filter_out)) and _should_expand(sym))]
|
|
if to_expand:
|
|
return partial(AmbiguousExpander, to_expand, tree_class)
|
|
|
|
|
|
class AmbiguousIntermediateExpander:
|
|
#--
|
|
|
|
def __init__(self, tree_class, node_builder):
|
|
self.node_builder = node_builder
|
|
self.tree_class = tree_class
|
|
|
|
def __call__(self, children):
|
|
def _is_iambig_tree(child):
|
|
return hasattr(child, 'data') and child.data == '_iambig'
|
|
|
|
def _collapse_iambig(children):
|
|
#--
|
|
|
|
##
|
|
|
|
##
|
|
|
|
if children and _is_iambig_tree(children[0]):
|
|
iambig_node = children[0]
|
|
result = []
|
|
for grandchild in iambig_node.children:
|
|
collapsed = _collapse_iambig(grandchild.children)
|
|
if collapsed:
|
|
for child in collapsed:
|
|
child.children += children[1:]
|
|
result += collapsed
|
|
else:
|
|
new_tree = self.tree_class('_inter', grandchild.children + children[1:])
|
|
result.append(new_tree)
|
|
return result
|
|
|
|
collapsed = _collapse_iambig(children)
|
|
if collapsed:
|
|
processed_nodes = [self.node_builder(c.children) for c in collapsed]
|
|
return self.tree_class('_ambig', processed_nodes)
|
|
|
|
return self.node_builder(children)
|
|
|
|
|
|
def ptb_inline_args(func):
|
|
@wraps(func)
|
|
def f(children):
|
|
return func(*children)
|
|
return f
|
|
|
|
|
|
def inplace_transformer(func):
|
|
@wraps(func)
|
|
def f(children):
|
|
##
|
|
|
|
tree = Tree(func.__name__, children)
|
|
return func(tree)
|
|
return f
|
|
|
|
|
|
def apply_visit_wrapper(func, name, wrapper):
|
|
if wrapper is _vargs_meta or wrapper is _vargs_meta_inline:
|
|
raise NotImplementedError("Meta args not supported for internal transformer")
|
|
|
|
@wraps(func)
|
|
def f(children):
|
|
return wrapper(func, name, children, None)
|
|
return f
|
|
|
|
|
|
class ParseTreeBuilder:
|
|
def __init__(self, rules, tree_class, propagate_positions=False, ambiguous=False, maybe_placeholders=False):
|
|
self.tree_class = tree_class
|
|
self.propagate_positions = propagate_positions
|
|
self.ambiguous = ambiguous
|
|
self.maybe_placeholders = maybe_placeholders
|
|
|
|
self.rule_builders = list(self._init_builders(rules))
|
|
|
|
def _init_builders(self, rules):
|
|
propagate_positions = make_propagate_positions(self.propagate_positions)
|
|
|
|
for rule in rules:
|
|
options = rule.options
|
|
keep_all_tokens = options.keep_all_tokens
|
|
expand_single_child = options.expand1
|
|
|
|
wrapper_chain = list(filter(None, [
|
|
(expand_single_child and not rule.alias) and ExpandSingleChild,
|
|
maybe_create_child_filter(rule.expansion, keep_all_tokens, self.ambiguous, options.empty_indices if self.maybe_placeholders else None),
|
|
propagate_positions,
|
|
self.ambiguous and maybe_create_ambiguous_expander(self.tree_class, rule.expansion, keep_all_tokens),
|
|
self.ambiguous and partial(AmbiguousIntermediateExpander, self.tree_class)
|
|
]))
|
|
|
|
yield rule, wrapper_chain
|
|
|
|
def create_callback(self, transformer=None):
|
|
callbacks = {}
|
|
|
|
for rule, wrapper_chain in self.rule_builders:
|
|
|
|
user_callback_name = rule.alias or rule.options.template_source or rule.origin.name
|
|
try:
|
|
f = getattr(transformer, user_callback_name)
|
|
##
|
|
|
|
wrapper = getattr(f, 'visit_wrapper', None)
|
|
if wrapper is not None:
|
|
f = apply_visit_wrapper(f, user_callback_name, wrapper)
|
|
else:
|
|
if isinstance(transformer, InlineTransformer):
|
|
f = ptb_inline_args(f)
|
|
elif isinstance(transformer, Transformer_InPlace):
|
|
f = inplace_transformer(f)
|
|
except AttributeError:
|
|
f = partial(self.tree_class, user_callback_name)
|
|
|
|
for w in wrapper_chain:
|
|
f = w(f)
|
|
|
|
if rule in callbacks:
|
|
raise GrammarError("Rule '%s' already exists" % (rule,))
|
|
|
|
callbacks[rule] = f
|
|
|
|
return callbacks
|
|
|
|
|
|
|
|
class LALR_Parser(Serialize):
|
|
def __init__(self, parser_conf, debug=False):
|
|
analysis = LALR_Analyzer(parser_conf, debug=debug)
|
|
analysis.compute_lalr()
|
|
callbacks = parser_conf.callbacks
|
|
|
|
self._parse_table = analysis.parse_table
|
|
self.parser_conf = parser_conf
|
|
self.parser = _Parser(analysis.parse_table, callbacks, debug)
|
|
|
|
@classmethod
|
|
def deserialize(cls, data, memo, callbacks, debug=False):
|
|
inst = cls.__new__(cls)
|
|
inst._parse_table = IntParseTable.deserialize(data, memo)
|
|
inst.parser = _Parser(inst._parse_table, callbacks, debug)
|
|
return inst
|
|
|
|
def serialize(self, memo):
|
|
return self._parse_table.serialize(memo)
|
|
|
|
def parse_interactive(self, lexer, start):
|
|
return self.parser.parse(lexer, start, start_interactive=True)
|
|
|
|
def parse(self, lexer, start, on_error=None):
|
|
try:
|
|
return self.parser.parse(lexer, start)
|
|
except UnexpectedInput as e:
|
|
if on_error is None:
|
|
raise
|
|
|
|
while True:
|
|
if isinstance(e, UnexpectedCharacters):
|
|
s = e.interactive_parser.lexer_state.state
|
|
p = s.line_ctr.char_pos
|
|
|
|
if not on_error(e):
|
|
raise e
|
|
|
|
if isinstance(e, UnexpectedCharacters):
|
|
##
|
|
|
|
if p == s.line_ctr.char_pos:
|
|
s.line_ctr.feed(s.text[p:p+1])
|
|
|
|
try:
|
|
return e.interactive_parser.resume_parse()
|
|
except UnexpectedToken as e2:
|
|
if (isinstance(e, UnexpectedToken)
|
|
and e.token.type == e2.token.type == '$END'
|
|
and e.interactive_parser == e2.interactive_parser):
|
|
##
|
|
|
|
raise e2
|
|
e = e2
|
|
except UnexpectedCharacters as e2:
|
|
e = e2
|
|
|
|
|
|
class ParseConf(object):
|
|
__slots__ = 'parse_table', 'callbacks', 'start', 'start_state', 'end_state', 'states'
|
|
|
|
def __init__(self, parse_table, callbacks, start):
|
|
self.parse_table = parse_table
|
|
|
|
self.start_state = self.parse_table.start_states[start]
|
|
self.end_state = self.parse_table.end_states[start]
|
|
self.states = self.parse_table.states
|
|
|
|
self.callbacks = callbacks
|
|
self.start = start
|
|
|
|
|
|
class ParserState(object):
|
|
__slots__ = 'parse_conf', 'lexer', 'state_stack', 'value_stack'
|
|
|
|
def __init__(self, parse_conf, lexer, state_stack=None, value_stack=None):
|
|
self.parse_conf = parse_conf
|
|
self.lexer = lexer
|
|
self.state_stack = state_stack or [self.parse_conf.start_state]
|
|
self.value_stack = value_stack or []
|
|
|
|
@property
|
|
def position(self):
|
|
return self.state_stack[-1]
|
|
|
|
##
|
|
|
|
def __eq__(self, other):
|
|
if not isinstance(other, ParserState):
|
|
return NotImplemented
|
|
return len(self.state_stack) == len(other.state_stack) and self.position == other.position
|
|
|
|
def __copy__(self):
|
|
return type(self)(
|
|
self.parse_conf,
|
|
self.lexer, ##
|
|
|
|
copy(self.state_stack),
|
|
deepcopy(self.value_stack),
|
|
)
|
|
|
|
def copy(self):
|
|
return copy(self)
|
|
|
|
def feed_token(self, token, is_end=False):
|
|
state_stack = self.state_stack
|
|
value_stack = self.value_stack
|
|
states = self.parse_conf.states
|
|
end_state = self.parse_conf.end_state
|
|
callbacks = self.parse_conf.callbacks
|
|
|
|
while True:
|
|
state = state_stack[-1]
|
|
try:
|
|
action, arg = states[state][token.type]
|
|
except KeyError:
|
|
expected = {s for s in states[state].keys() if s.isupper()}
|
|
raise UnexpectedToken(token, expected, state=self, interactive_parser=None)
|
|
|
|
assert arg != end_state
|
|
|
|
if action is Shift:
|
|
##
|
|
|
|
assert not is_end
|
|
state_stack.append(arg)
|
|
value_stack.append(token if token.type not in callbacks else callbacks[token.type](token))
|
|
return
|
|
else:
|
|
##
|
|
|
|
rule = arg
|
|
size = len(rule.expansion)
|
|
if size:
|
|
s = value_stack[-size:]
|
|
del state_stack[-size:]
|
|
del value_stack[-size:]
|
|
else:
|
|
s = []
|
|
|
|
value = callbacks[rule](s)
|
|
|
|
_action, new_state = states[state_stack[-1]][rule.origin.name]
|
|
assert _action is Shift
|
|
state_stack.append(new_state)
|
|
value_stack.append(value)
|
|
|
|
if is_end and state_stack[-1] == end_state:
|
|
return value_stack[-1]
|
|
|
|
class _Parser(object):
|
|
def __init__(self, parse_table, callbacks, debug=False):
|
|
self.parse_table = parse_table
|
|
self.callbacks = callbacks
|
|
self.debug = debug
|
|
|
|
def parse(self, lexer, start, value_stack=None, state_stack=None, start_interactive=False):
|
|
parse_conf = ParseConf(self.parse_table, self.callbacks, start)
|
|
parser_state = ParserState(parse_conf, lexer, state_stack, value_stack)
|
|
if start_interactive:
|
|
return InteractiveParser(self, parser_state, parser_state.lexer)
|
|
return self.parse_from_state(parser_state)
|
|
|
|
|
|
def parse_from_state(self, state):
|
|
##
|
|
|
|
try:
|
|
token = None
|
|
for token in state.lexer.lex(state):
|
|
state.feed_token(token)
|
|
|
|
end_token = Token.new_borrow_pos('$END', '', token) if token else Token('$END', '', 0, 1, 1)
|
|
return state.feed_token(end_token, True)
|
|
except UnexpectedInput as e:
|
|
try:
|
|
e.interactive_parser = InteractiveParser(self, state, state.lexer)
|
|
except NameError:
|
|
pass
|
|
raise e
|
|
except Exception as e:
|
|
if self.debug:
|
|
print("")
|
|
print("STATE STACK DUMP")
|
|
print("----------------")
|
|
for i, s in enumerate(state.state_stack):
|
|
print('%d)' % i , s)
|
|
print("")
|
|
|
|
raise
|
|
|
|
|
|
class Action:
|
|
def __init__(self, name):
|
|
self.name = name
|
|
def __str__(self):
|
|
return self.name
|
|
def __repr__(self):
|
|
return str(self)
|
|
|
|
Shift = Action('Shift')
|
|
Reduce = Action('Reduce')
|
|
|
|
|
|
class ParseTable:
|
|
def __init__(self, states, start_states, end_states):
|
|
self.states = states
|
|
self.start_states = start_states
|
|
self.end_states = end_states
|
|
|
|
def serialize(self, memo):
|
|
tokens = Enumerator()
|
|
rules = Enumerator()
|
|
|
|
states = {
|
|
state: {tokens.get(token): ((1, arg.serialize(memo)) if action is Reduce else (0, arg))
|
|
for token, (action, arg) in actions.items()}
|
|
for state, actions in self.states.items()
|
|
}
|
|
|
|
return {
|
|
'tokens': tokens.reversed(),
|
|
'states': states,
|
|
'start_states': self.start_states,
|
|
'end_states': self.end_states,
|
|
}
|
|
|
|
@classmethod
|
|
def deserialize(cls, data, memo):
|
|
tokens = data['tokens']
|
|
states = {
|
|
state: {tokens[token]: ((Reduce, Rule.deserialize(arg, memo)) if action==1 else (Shift, arg))
|
|
for token, (action, arg) in actions.items()}
|
|
for state, actions in data['states'].items()
|
|
}
|
|
return cls(states, data['start_states'], data['end_states'])
|
|
|
|
|
|
class IntParseTable(ParseTable):
|
|
|
|
@classmethod
|
|
def from_ParseTable(cls, parse_table):
|
|
enum = list(parse_table.states)
|
|
state_to_idx = {s:i for i,s in enumerate(enum)}
|
|
int_states = {}
|
|
|
|
for s, la in parse_table.states.items():
|
|
la = {k:(v[0], state_to_idx[v[1]]) if v[0] is Shift else v
|
|
for k,v in la.items()}
|
|
int_states[ state_to_idx[s] ] = la
|
|
|
|
|
|
start_states = {start:state_to_idx[s] for start, s in parse_table.start_states.items()}
|
|
end_states = {start:state_to_idx[s] for start, s in parse_table.end_states.items()}
|
|
return cls(int_states, start_states, end_states)
|
|
|
|
|
|
|
|
def _wrap_lexer(lexer_class):
|
|
future_interface = getattr(lexer_class, '__future_interface__', False)
|
|
if future_interface:
|
|
return lexer_class
|
|
else:
|
|
class CustomLexerWrapper(Lexer):
|
|
def __init__(self, lexer_conf):
|
|
self.lexer = lexer_class(lexer_conf)
|
|
def lex(self, lexer_state, parser_state):
|
|
return self.lexer.lex(lexer_state.text)
|
|
return CustomLexerWrapper
|
|
|
|
|
|
class MakeParsingFrontend:
|
|
def __init__(self, parser_type, lexer_type):
|
|
self.parser_type = parser_type
|
|
self.lexer_type = lexer_type
|
|
|
|
def __call__(self, lexer_conf, parser_conf, options):
|
|
assert isinstance(lexer_conf, LexerConf)
|
|
assert isinstance(parser_conf, ParserConf)
|
|
parser_conf.parser_type = self.parser_type
|
|
lexer_conf.lexer_type = self.lexer_type
|
|
return ParsingFrontend(lexer_conf, parser_conf, options)
|
|
|
|
def deserialize(self, data, memo, lexer_conf, callbacks, options):
|
|
parser_conf = ParserConf.deserialize(data['parser_conf'], memo)
|
|
parser = LALR_Parser.deserialize(data['parser'], memo, callbacks, options.debug)
|
|
parser_conf.callbacks = callbacks
|
|
return ParsingFrontend(lexer_conf, parser_conf, options, parser=parser)
|
|
|
|
|
|
|
|
|
|
class ParsingFrontend(Serialize):
|
|
__serialize_fields__ = 'lexer_conf', 'parser_conf', 'parser', 'options'
|
|
|
|
def __init__(self, lexer_conf, parser_conf, options, parser=None):
|
|
self.parser_conf = parser_conf
|
|
self.lexer_conf = lexer_conf
|
|
self.options = options
|
|
|
|
##
|
|
|
|
if parser: ##
|
|
|
|
self.parser = parser
|
|
else:
|
|
create_parser = {
|
|
'lalr': create_lalr_parser,
|
|
'earley': create_earley_parser,
|
|
'cyk': CYK_FrontEnd,
|
|
}[parser_conf.parser_type]
|
|
self.parser = create_parser(lexer_conf, parser_conf, options)
|
|
|
|
##
|
|
|
|
lexer_type = lexer_conf.lexer_type
|
|
self.skip_lexer = False
|
|
if lexer_type in ('dynamic', 'dynamic_complete'):
|
|
assert lexer_conf.postlex is None
|
|
self.skip_lexer = True
|
|
return
|
|
|
|
try:
|
|
create_lexer = {
|
|
'standard': create_traditional_lexer,
|
|
'contextual': create_contextual_lexer,
|
|
}[lexer_type]
|
|
except KeyError:
|
|
assert issubclass(lexer_type, Lexer), lexer_type
|
|
self.lexer = _wrap_lexer(lexer_type)(lexer_conf)
|
|
else:
|
|
self.lexer = create_lexer(lexer_conf, self.parser, lexer_conf.postlex)
|
|
|
|
if lexer_conf.postlex:
|
|
self.lexer = PostLexConnector(self.lexer, lexer_conf.postlex)
|
|
|
|
def _verify_start(self, start=None):
|
|
if start is None:
|
|
start_decls = self.parser_conf.start
|
|
if len(start_decls) > 1:
|
|
raise ConfigurationError("Lark initialized with more than 1 possible start rule. Must specify which start rule to parse", start_decls)
|
|
start ,= start_decls
|
|
elif start not in self.parser_conf.start:
|
|
raise ConfigurationError("Unknown start rule %s. Must be one of %r" % (start, self.parser_conf.start))
|
|
return start
|
|
|
|
def parse(self, text, start=None, on_error=None):
|
|
chosen_start = self._verify_start(start)
|
|
stream = text if self.skip_lexer else LexerThread(self.lexer, text)
|
|
kw = {} if on_error is None else {'on_error': on_error}
|
|
return self.parser.parse(stream, chosen_start, **kw)
|
|
|
|
def parse_interactive(self, text=None, start=None):
|
|
chosen_start = self._verify_start(start)
|
|
if self.parser_conf.parser_type != 'lalr':
|
|
raise ConfigurationError("parse_interactive() currently only works with parser='lalr' ")
|
|
stream = text if self.skip_lexer else LexerThread(self.lexer, text)
|
|
return self.parser.parse_interactive(stream, chosen_start)
|
|
|
|
|
|
def get_frontend(parser, lexer):
|
|
assert_config(parser, ('lalr', 'earley', 'cyk'))
|
|
if not isinstance(lexer, type): ##
|
|
|
|
expected = {
|
|
'lalr': ('standard', 'contextual'),
|
|
'earley': ('standard', 'dynamic', 'dynamic_complete'),
|
|
'cyk': ('standard', ),
|
|
}[parser]
|
|
assert_config(lexer, expected, 'Parser %r does not support lexer %%r, expected one of %%s' % parser)
|
|
|
|
return MakeParsingFrontend(parser, lexer)
|
|
|
|
|
|
def _get_lexer_callbacks(transformer, terminals):
|
|
result = {}
|
|
for terminal in terminals:
|
|
callback = getattr(transformer, terminal.name, None)
|
|
if callback is not None:
|
|
result[terminal.name] = callback
|
|
return result
|
|
|
|
class PostLexConnector:
|
|
def __init__(self, lexer, postlexer):
|
|
self.lexer = lexer
|
|
self.postlexer = postlexer
|
|
|
|
def make_lexer_state(self, text):
|
|
return self.lexer.make_lexer_state(text)
|
|
|
|
def lex(self, lexer_state, parser_state):
|
|
i = self.lexer.lex(lexer_state, parser_state)
|
|
return self.postlexer.process(i)
|
|
|
|
|
|
|
|
def create_traditional_lexer(lexer_conf, parser, postlex):
|
|
return TraditionalLexer(lexer_conf)
|
|
|
|
def create_contextual_lexer(lexer_conf, parser, postlex):
|
|
states = {idx:list(t.keys()) for idx, t in parser._parse_table.states.items()}
|
|
always_accept = postlex.always_accept if postlex else ()
|
|
return ContextualLexer(lexer_conf, states, always_accept=always_accept)
|
|
|
|
def create_lalr_parser(lexer_conf, parser_conf, options=None):
|
|
debug = options.debug if options else False
|
|
return LALR_Parser(parser_conf, debug=debug)
|
|
|
|
|
|
create_earley_parser = NotImplemented
|
|
CYK_FrontEnd = NotImplemented
|
|
|
|
|
|
|
|
class LarkOptions(Serialize):
|
|
#--
|
|
OPTIONS_DOC = """
|
|
**=== General Options ===**
|
|
|
|
start
|
|
The start symbol. Either a string, or a list of strings for multiple possible starts (Default: "start")
|
|
debug
|
|
Display debug information and extra warnings. Use only when debugging (default: False)
|
|
When used with Earley, it generates a forest graph as "sppf.png", if 'dot' is installed.
|
|
transformer
|
|
Applies the transformer to every parse tree (equivalent to applying it after the parse, but faster)
|
|
propagate_positions
|
|
Propagates (line, column, end_line, end_column) attributes into all tree branches.
|
|
Accepts ``False``, ``True``, or a callable, which will filter which nodes to ignore when propagating.
|
|
maybe_placeholders
|
|
When ``True``, the ``[]`` operator returns ``None`` when not matched.
|
|
|
|
When ``False``, ``[]`` behaves like the ``?`` operator, and returns no value at all.
|
|
(default= ``False``. Recommended to set to ``True``)
|
|
cache
|
|
Cache the results of the Lark grammar analysis, for x2 to x3 faster loading. LALR only for now.
|
|
|
|
- When ``False``, does nothing (default)
|
|
- When ``True``, caches to a temporary file in the local directory
|
|
- When given a string, caches to the path pointed by the string
|
|
regex
|
|
When True, uses the ``regex`` module instead of the stdlib ``re``.
|
|
g_regex_flags
|
|
Flags that are applied to all terminals (both regex and strings)
|
|
keep_all_tokens
|
|
Prevent the tree builder from automagically removing "punctuation" tokens (default: False)
|
|
tree_class
|
|
Lark will produce trees comprised of instances of this class instead of the default ``lark.Tree``.
|
|
|
|
**=== Algorithm Options ===**
|
|
|
|
parser
|
|
Decides which parser engine to use. Accepts "earley" or "lalr". (Default: "earley").
|
|
(there is also a "cyk" option for legacy)
|
|
lexer
|
|
Decides whether or not to use a lexer stage
|
|
|
|
- "auto" (default): Choose for me based on the parser
|
|
- "standard": Use a standard lexer
|
|
- "contextual": Stronger lexer (only works with parser="lalr")
|
|
- "dynamic": Flexible and powerful (only with parser="earley")
|
|
- "dynamic_complete": Same as dynamic, but tries *every* variation of tokenizing possible.
|
|
ambiguity
|
|
Decides how to handle ambiguity in the parse. Only relevant if parser="earley"
|
|
|
|
- "resolve": The parser will automatically choose the simplest derivation
|
|
(it chooses consistently: greedy for tokens, non-greedy for rules)
|
|
- "explicit": The parser will return all derivations wrapped in "_ambig" tree nodes (i.e. a forest).
|
|
- "forest": The parser will return the root of the shared packed parse forest.
|
|
|
|
**=== Misc. / Domain Specific Options ===**
|
|
|
|
postlex
|
|
Lexer post-processing (Default: None) Only works with the standard and contextual lexers.
|
|
priority
|
|
How priorities should be evaluated - auto, none, normal, invert (Default: auto)
|
|
lexer_callbacks
|
|
Dictionary of callbacks for the lexer. May alter tokens during lexing. Use with caution.
|
|
use_bytes
|
|
Accept an input of type ``bytes`` instead of ``str`` (Python 3 only).
|
|
edit_terminals
|
|
A callback for editing the terminals before parse.
|
|
import_paths
|
|
A List of either paths or loader functions to specify from where grammars are imported
|
|
source_path
|
|
Override the source of from where the grammar was loaded. Useful for relative imports and unconventional grammar loading
|
|
**=== End of Options ===**
|
|
"""
|
|
if __doc__:
|
|
__doc__ += OPTIONS_DOC
|
|
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
_defaults = {
|
|
'debug': False,
|
|
'keep_all_tokens': False,
|
|
'tree_class': None,
|
|
'cache': False,
|
|
'postlex': None,
|
|
'parser': 'earley',
|
|
'lexer': 'auto',
|
|
'transformer': None,
|
|
'start': 'start',
|
|
'priority': 'auto',
|
|
'ambiguity': 'auto',
|
|
'regex': False,
|
|
'propagate_positions': False,
|
|
'lexer_callbacks': {},
|
|
'maybe_placeholders': False,
|
|
'edit_terminals': None,
|
|
'g_regex_flags': 0,
|
|
'use_bytes': False,
|
|
'import_paths': [],
|
|
'source_path': None,
|
|
}
|
|
|
|
def __init__(self, options_dict):
|
|
o = dict(options_dict)
|
|
|
|
options = {}
|
|
for name, default in self._defaults.items():
|
|
if name in o:
|
|
value = o.pop(name)
|
|
if isinstance(default, bool) and name not in ('cache', 'use_bytes', 'propagate_positions'):
|
|
value = bool(value)
|
|
else:
|
|
value = default
|
|
|
|
options[name] = value
|
|
|
|
if isinstance(options['start'], STRING_TYPE):
|
|
options['start'] = [options['start']]
|
|
|
|
self.__dict__['options'] = options
|
|
|
|
|
|
assert_config(self.parser, ('earley', 'lalr', 'cyk', None))
|
|
|
|
if self.parser == 'earley' and self.transformer:
|
|
raise ConfigurationError('Cannot specify an embedded transformer when using the Earley algorithm. '
|
|
'Please use your transformer on the resulting parse tree, or use a different algorithm (i.e. LALR)')
|
|
|
|
if o:
|
|
raise ConfigurationError("Unknown options: %s" % o.keys())
|
|
|
|
def __getattr__(self, name):
|
|
try:
|
|
return self.__dict__['options'][name]
|
|
except KeyError as e:
|
|
raise AttributeError(e)
|
|
|
|
def __setattr__(self, name, value):
|
|
assert_config(name, self.options.keys(), "%r isn't a valid option. Expected one of: %s")
|
|
self.options[name] = value
|
|
|
|
def serialize(self, memo):
|
|
return self.options
|
|
|
|
@classmethod
|
|
def deserialize(cls, data, memo):
|
|
return cls(data)
|
|
|
|
|
|
##
|
|
|
|
##
|
|
|
|
_LOAD_ALLOWED_OPTIONS = {'postlex', 'transformer', 'lexer_callbacks', 'use_bytes', 'debug', 'g_regex_flags', 'regex', 'propagate_positions', 'tree_class'}
|
|
|
|
_VALID_PRIORITY_OPTIONS = ('auto', 'normal', 'invert', None)
|
|
_VALID_AMBIGUITY_OPTIONS = ('auto', 'resolve', 'explicit', 'forest')
|
|
|
|
|
|
class PostLex(ABC):
|
|
@abstractmethod
|
|
def process(self, stream):
|
|
return stream
|
|
|
|
always_accept = ()
|
|
|
|
|
|
class Lark(Serialize):
|
|
#--
|
|
def __init__(self, grammar, **options):
|
|
self.options = LarkOptions(options)
|
|
|
|
##
|
|
|
|
use_regex = self.options.regex
|
|
if use_regex:
|
|
if regex:
|
|
re_module = regex
|
|
else:
|
|
raise ImportError('`regex` module must be installed if calling `Lark(regex=True)`.')
|
|
else:
|
|
re_module = re
|
|
|
|
##
|
|
|
|
if self.options.source_path is None:
|
|
try:
|
|
self.source_path = grammar.name
|
|
except AttributeError:
|
|
self.source_path = '<string>'
|
|
else:
|
|
self.source_path = self.options.source_path
|
|
|
|
##
|
|
|
|
try:
|
|
read = grammar.read
|
|
except AttributeError:
|
|
pass
|
|
else:
|
|
grammar = read()
|
|
|
|
cache_fn = None
|
|
cache_md5 = None
|
|
if isinstance(grammar, STRING_TYPE):
|
|
self.source_grammar = grammar
|
|
if self.options.use_bytes:
|
|
if not isascii(grammar):
|
|
raise ConfigurationError("Grammar must be ascii only, when use_bytes=True")
|
|
if sys.version_info[0] == 2 and self.options.use_bytes != 'force':
|
|
raise ConfigurationError("`use_bytes=True` may have issues on python2."
|
|
"Use `use_bytes='force'` to use it at your own risk.")
|
|
|
|
if self.options.cache:
|
|
if self.options.parser != 'lalr':
|
|
raise ConfigurationError("cache only works with parser='lalr' for now")
|
|
|
|
unhashable = ('transformer', 'postlex', 'lexer_callbacks', 'edit_terminals')
|
|
options_str = ''.join(k+str(v) for k, v in options.items() if k not in unhashable)
|
|
from . import __version__
|
|
s = grammar + options_str + __version__ + str(sys.version_info[:2])
|
|
cache_md5 = hashlib.md5(s.encode('utf8')).hexdigest()
|
|
|
|
if isinstance(self.options.cache, STRING_TYPE):
|
|
cache_fn = self.options.cache
|
|
else:
|
|
if self.options.cache is not True:
|
|
raise ConfigurationError("cache argument must be bool or str")
|
|
##
|
|
|
|
cache_fn = tempfile.gettempdir() + '/.lark_cache_%s_%s_%s.tmp' % ((cache_md5,) + sys.version_info[:2])
|
|
|
|
if FS.exists(cache_fn):
|
|
logger.debug('Loading grammar from cache: %s', cache_fn)
|
|
##
|
|
|
|
for name in (set(options) - _LOAD_ALLOWED_OPTIONS):
|
|
del options[name]
|
|
with FS.open(cache_fn, 'rb') as f:
|
|
old_options = self.options
|
|
try:
|
|
file_md5 = f.readline().rstrip(b'\n')
|
|
cached_used_files = pickle.load(f)
|
|
if file_md5 == cache_md5.encode('utf8') and verify_used_files(cached_used_files):
|
|
cached_parser_data = pickle.load(f)
|
|
self._load(cached_parser_data, **options)
|
|
return
|
|
except Exception: ##
|
|
|
|
logger.exception("Failed to load Lark from cache: %r. We will try to carry on." % cache_fn)
|
|
|
|
##
|
|
|
|
##
|
|
|
|
self.options = old_options
|
|
|
|
|
|
##
|
|
|
|
self.grammar, used_files = load_grammar(grammar, self.source_path, self.options.import_paths, self.options.keep_all_tokens)
|
|
else:
|
|
assert isinstance(grammar, Grammar)
|
|
self.grammar = grammar
|
|
|
|
|
|
if self.options.lexer == 'auto':
|
|
if self.options.parser == 'lalr':
|
|
self.options.lexer = 'contextual'
|
|
elif self.options.parser == 'earley':
|
|
if self.options.postlex is not None:
|
|
logger.info("postlex can't be used with the dynamic lexer, so we use standard instead. "
|
|
"Consider using lalr with contextual instead of earley")
|
|
self.options.lexer = 'standard'
|
|
else:
|
|
self.options.lexer = 'dynamic'
|
|
elif self.options.parser == 'cyk':
|
|
self.options.lexer = 'standard'
|
|
else:
|
|
assert False, self.options.parser
|
|
lexer = self.options.lexer
|
|
if isinstance(lexer, type):
|
|
assert issubclass(lexer, Lexer) ##
|
|
|
|
else:
|
|
assert_config(lexer, ('standard', 'contextual', 'dynamic', 'dynamic_complete'))
|
|
if self.options.postlex is not None and 'dynamic' in lexer:
|
|
raise ConfigurationError("Can't use postlex with a dynamic lexer. Use standard or contextual instead")
|
|
|
|
if self.options.ambiguity == 'auto':
|
|
if self.options.parser == 'earley':
|
|
self.options.ambiguity = 'resolve'
|
|
else:
|
|
assert_config(self.options.parser, ('earley', 'cyk'), "%r doesn't support disambiguation. Use one of these parsers instead: %s")
|
|
|
|
if self.options.priority == 'auto':
|
|
self.options.priority = 'normal'
|
|
|
|
if self.options.priority not in _VALID_PRIORITY_OPTIONS:
|
|
raise ConfigurationError("invalid priority option: %r. Must be one of %r" % (self.options.priority, _VALID_PRIORITY_OPTIONS))
|
|
assert self.options.ambiguity not in ('resolve__antiscore_sum', ), 'resolve__antiscore_sum has been replaced with the option priority="invert"'
|
|
if self.options.ambiguity not in _VALID_AMBIGUITY_OPTIONS:
|
|
raise ConfigurationError("invalid ambiguity option: %r. Must be one of %r" % (self.options.ambiguity, _VALID_AMBIGUITY_OPTIONS))
|
|
|
|
if self.options.parser is None:
|
|
terminals_to_keep = '*'
|
|
elif self.options.postlex is not None:
|
|
terminals_to_keep = set(self.options.postlex.always_accept)
|
|
else:
|
|
terminals_to_keep = set()
|
|
|
|
##
|
|
|
|
self.terminals, self.rules, self.ignore_tokens = self.grammar.compile(self.options.start, terminals_to_keep)
|
|
|
|
if self.options.edit_terminals:
|
|
for t in self.terminals:
|
|
self.options.edit_terminals(t)
|
|
|
|
self._terminals_dict = {t.name: t for t in self.terminals}
|
|
|
|
##
|
|
|
|
##
|
|
|
|
if self.options.priority == 'invert':
|
|
for rule in self.rules:
|
|
if rule.options.priority is not None:
|
|
rule.options.priority = -rule.options.priority
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
elif self.options.priority is None:
|
|
for rule in self.rules:
|
|
if rule.options.priority is not None:
|
|
rule.options.priority = None
|
|
|
|
##
|
|
|
|
self.lexer_conf = LexerConf(
|
|
self.terminals, re_module, self.ignore_tokens, self.options.postlex,
|
|
self.options.lexer_callbacks, self.options.g_regex_flags, use_bytes=self.options.use_bytes
|
|
)
|
|
|
|
if self.options.parser:
|
|
self.parser = self._build_parser()
|
|
elif lexer:
|
|
self.lexer = self._build_lexer()
|
|
|
|
if cache_fn:
|
|
logger.debug('Saving grammar to cache: %s', cache_fn)
|
|
with FS.open(cache_fn, 'wb') as f:
|
|
f.write(cache_md5.encode('utf8') + b'\n')
|
|
pickle.dump(used_files, f)
|
|
self.save(f)
|
|
|
|
if __doc__:
|
|
__doc__ += "\n\n" + LarkOptions.OPTIONS_DOC
|
|
|
|
__serialize_fields__ = 'parser', 'rules', 'options'
|
|
|
|
def _build_lexer(self, dont_ignore=False):
|
|
lexer_conf = self.lexer_conf
|
|
if dont_ignore:
|
|
from copy import copy
|
|
lexer_conf = copy(lexer_conf)
|
|
lexer_conf.ignore = ()
|
|
return TraditionalLexer(lexer_conf)
|
|
|
|
def _prepare_callbacks(self):
|
|
self._callbacks = {}
|
|
##
|
|
|
|
if self.options.ambiguity != 'forest':
|
|
self._parse_tree_builder = ParseTreeBuilder(
|
|
self.rules,
|
|
self.options.tree_class or Tree,
|
|
self.options.propagate_positions,
|
|
self.options.parser != 'lalr' and self.options.ambiguity == 'explicit',
|
|
self.options.maybe_placeholders
|
|
)
|
|
self._callbacks = self._parse_tree_builder.create_callback(self.options.transformer)
|
|
self._callbacks.update(_get_lexer_callbacks(self.options.transformer, self.terminals))
|
|
|
|
def _build_parser(self):
|
|
self._prepare_callbacks()
|
|
parser_class = get_frontend(self.options.parser, self.options.lexer)
|
|
parser_conf = ParserConf(self.rules, self._callbacks, self.options.start)
|
|
return parser_class(self.lexer_conf, parser_conf, options=self.options)
|
|
|
|
def save(self, f):
|
|
#--
|
|
data, m = self.memo_serialize([TerminalDef, Rule])
|
|
pickle.dump({'data': data, 'memo': m}, f, protocol=pickle.HIGHEST_PROTOCOL)
|
|
|
|
@classmethod
|
|
def load(cls, f):
|
|
#--
|
|
inst = cls.__new__(cls)
|
|
return inst._load(f)
|
|
|
|
def _deserialize_lexer_conf(self, data, memo, options):
|
|
lexer_conf = LexerConf.deserialize(data['lexer_conf'], memo)
|
|
lexer_conf.callbacks = options.lexer_callbacks or {}
|
|
lexer_conf.re_module = regex if options.regex else re
|
|
lexer_conf.use_bytes = options.use_bytes
|
|
lexer_conf.g_regex_flags = options.g_regex_flags
|
|
lexer_conf.skip_validation = True
|
|
lexer_conf.postlex = options.postlex
|
|
return lexer_conf
|
|
|
|
def _load(self, f, **kwargs):
|
|
if isinstance(f, dict):
|
|
d = f
|
|
else:
|
|
d = pickle.load(f)
|
|
memo_json = d['memo']
|
|
data = d['data']
|
|
|
|
assert memo_json
|
|
memo = SerializeMemoizer.deserialize(memo_json, {'Rule': Rule, 'TerminalDef': TerminalDef}, {})
|
|
options = dict(data['options'])
|
|
if (set(kwargs) - _LOAD_ALLOWED_OPTIONS) & set(LarkOptions._defaults):
|
|
raise ConfigurationError("Some options are not allowed when loading a Parser: {}"
|
|
.format(set(kwargs) - _LOAD_ALLOWED_OPTIONS))
|
|
options.update(kwargs)
|
|
self.options = LarkOptions.deserialize(options, memo)
|
|
self.rules = [Rule.deserialize(r, memo) for r in data['rules']]
|
|
self.source_path = '<deserialized>'
|
|
parser_class = get_frontend(self.options.parser, self.options.lexer)
|
|
self.lexer_conf = self._deserialize_lexer_conf(data['parser'], memo, self.options)
|
|
self.terminals = self.lexer_conf.terminals
|
|
self._prepare_callbacks()
|
|
self._terminals_dict = {t.name: t for t in self.terminals}
|
|
self.parser = parser_class.deserialize(
|
|
data['parser'],
|
|
memo,
|
|
self.lexer_conf,
|
|
self._callbacks,
|
|
self.options, ##
|
|
|
|
)
|
|
return self
|
|
|
|
@classmethod
|
|
def _load_from_dict(cls, data, memo, **kwargs):
|
|
inst = cls.__new__(cls)
|
|
return inst._load({'data': data, 'memo': memo}, **kwargs)
|
|
|
|
@classmethod
|
|
def open(cls, grammar_filename, rel_to=None, **options):
|
|
#--
|
|
if rel_to:
|
|
basepath = os.path.dirname(rel_to)
|
|
grammar_filename = os.path.join(basepath, grammar_filename)
|
|
with open(grammar_filename, encoding='utf8') as f:
|
|
return cls(f, **options)
|
|
|
|
@classmethod
|
|
def open_from_package(cls, package, grammar_path, search_paths=("",), **options):
|
|
#--
|
|
package_loader = FromPackageLoader(package, search_paths)
|
|
full_path, text = package_loader(None, grammar_path)
|
|
options.setdefault('source_path', full_path)
|
|
options.setdefault('import_paths', [])
|
|
options['import_paths'].append(package_loader)
|
|
return cls(text, **options)
|
|
|
|
def __repr__(self):
|
|
return 'Lark(open(%r), parser=%r, lexer=%r, ...)' % (self.source_path, self.options.parser, self.options.lexer)
|
|
|
|
|
|
def lex(self, text, dont_ignore=False):
|
|
#--
|
|
if not hasattr(self, 'lexer') or dont_ignore:
|
|
lexer = self._build_lexer(dont_ignore)
|
|
else:
|
|
lexer = self.lexer
|
|
lexer_thread = LexerThread(lexer, text)
|
|
stream = lexer_thread.lex(None)
|
|
if self.options.postlex:
|
|
return self.options.postlex.process(stream)
|
|
return stream
|
|
|
|
def get_terminal(self, name):
|
|
#--
|
|
return self._terminals_dict[name]
|
|
|
|
def parse_interactive(self, text=None, start=None):
|
|
#--
|
|
return self.parser.parse_interactive(text, start=start)
|
|
|
|
def parse(self, text, start=None, on_error=None):
|
|
#--
|
|
return self.parser.parse(text, start=start, on_error=on_error)
|
|
|
|
@property
|
|
def source(self):
|
|
warn("Attribute Lark.source was renamed to Lark.source_path", DeprecationWarning)
|
|
return self.source_path
|
|
|
|
@source.setter
|
|
def source(self, value):
|
|
self.source_path = value
|
|
|
|
@property
|
|
def grammar_source(self):
|
|
warn("Attribute Lark.grammar_source was renamed to Lark.source_grammar", DeprecationWarning)
|
|
return self.source_grammar
|
|
|
|
@grammar_source.setter
|
|
def grammar_source(self, value):
|
|
self.source_grammar = value
|
|
|
|
|
|
|
|
class DedentError(LarkError):
|
|
pass
|
|
|
|
class Indenter(PostLex):
|
|
def __init__(self):
|
|
self.paren_level = None
|
|
self.indent_level = None
|
|
assert self.tab_len > 0
|
|
|
|
def handle_NL(self, token):
|
|
if self.paren_level > 0:
|
|
return
|
|
|
|
yield token
|
|
|
|
indent_str = token.rsplit('\n', 1)[1] ##
|
|
|
|
indent = indent_str.count(' ') + indent_str.count('\t') * self.tab_len
|
|
|
|
if indent > self.indent_level[-1]:
|
|
self.indent_level.append(indent)
|
|
yield Token.new_borrow_pos(self.INDENT_type, indent_str, token)
|
|
else:
|
|
while indent < self.indent_level[-1]:
|
|
self.indent_level.pop()
|
|
yield Token.new_borrow_pos(self.DEDENT_type, indent_str, token)
|
|
|
|
if indent != self.indent_level[-1]:
|
|
raise DedentError('Unexpected dedent to column %s. Expected dedent to %s' % (indent, self.indent_level[-1]))
|
|
|
|
def _process(self, stream):
|
|
for token in stream:
|
|
if token.type == self.NL_type:
|
|
for t in self.handle_NL(token):
|
|
yield t
|
|
else:
|
|
yield token
|
|
|
|
if token.type in self.OPEN_PAREN_types:
|
|
self.paren_level += 1
|
|
elif token.type in self.CLOSE_PAREN_types:
|
|
self.paren_level -= 1
|
|
assert self.paren_level >= 0
|
|
|
|
while len(self.indent_level) > 1:
|
|
self.indent_level.pop()
|
|
yield Token(self.DEDENT_type, '')
|
|
|
|
assert self.indent_level == [0], self.indent_level
|
|
|
|
def process(self, stream):
|
|
self.paren_level = 0
|
|
self.indent_level = [0]
|
|
return self._process(stream)
|
|
|
|
##
|
|
|
|
@property
|
|
def always_accept(self):
|
|
return (self.NL_type,)
|
|
|
|
|
|
import pickle, zlib, base64
|
|
DATA = (
|
|
{'parser': {'lexer_conf': {'terminals': [{'@': 0}, {'@': 1}, {'@': 2}, {'@': 3}, {'@': 4}, {'@': 5}, {'@': 6}, {'@': 7}, {'@': 8}, {'@': 9}, {'@': 10}, {'@': 11}, {'@': 12}, {'@': 13}, {'@': 14}, {'@': 15}, {'@': 16}, {'@': 17}, {'@': 18}, {'@': 19}, {'@': 20}], 'ignore': ['WS'], 'g_regex_flags': 0, 'use_bytes': False, 'lexer_type': 'contextual', '__type__': 'LexerConf'}, 'parser_conf': {'rules': [{'@': 21}, {'@': 22}, {'@': 23}, {'@': 24}, {'@': 25}, {'@': 26}, {'@': 27}, {'@': 28}, {'@': 29}, {'@': 30}, {'@': 31}, {'@': 32}, {'@': 33}, {'@': 34}, {'@': 35}, {'@': 36}, {'@': 37}, {'@': 38}, {'@': 39}, {'@': 40}, {'@': 41}, {'@': 42}, {'@': 43}, {'@': 44}, {'@': 45}, {'@': 46}, {'@': 47}, {'@': 48}, {'@': 49}, {'@': 50}, {'@': 51}, {'@': 52}, {'@': 53}, {'@': 54}, {'@': 55}, {'@': 56}, {'@': 57}, {'@': 58}, {'@': 59}, {'@': 60}, {'@': 61}, {'@': 62}, {'@': 63}, {'@': 64}, {'@': 65}, {'@': 66}, {'@': 67}, {'@': 68}, {'@': 69}, {'@': 70}, {'@': 71}, {'@': 72}, {'@': 73}, {'@': 74}, {'@': 75}, {'@': 76}, {'@': 77}, {'@': 78}, {'@': 79}, {'@': 80}, {'@': 81}, {'@': 82}, {'@': 83}, {'@': 84}, {'@': 85}, {'@': 86}, {'@': 87}, {'@': 88}, {'@': 89}, {'@': 90}, {'@': 91}, {'@': 92}, {'@': 93}, {'@': 94}, {'@': 95}, {'@': 96}, {'@': 97}, {'@': 98}, {'@': 99}, {'@': 100}, {'@': 101}, {'@': 102}, {'@': 103}, {'@': 104}, {'@': 105}, {'@': 106}, {'@': 107}, {'@': 108}, {'@': 109}, {'@': 110}, {'@': 111}, {'@': 112}, {'@': 113}, {'@': 114}, {'@': 115}, {'@': 116}, {'@': 117}, {'@': 118}, {'@': 119}, {'@': 120}, {'@': 121}], 'start': ['start'], 'parser_type': 'lalr', '__type__': 'ParserConf'}, 'parser': {'tokens': {0: 'INTERFACE', 1: '$END', 2: 'COMMENT', 3: 'OBJECT', 4: '__description_plus_6', 5: 'METHOD', 6: 'description', 7: 'method', 8: 'RBRACE', 9: 'name', 10: 'IDENTIFIER', 11: 'LBRACE', 12: 'LSQB', 13: 'options', 14: 'EXPOSE', 15: 'FUNCTION', 16: 'expose', 17: 'interface_param', 18: 'function', 19: 'PARAM', 20: 'PRIMITIVE', 21: 'type', 22: 'object_name', 23: '__object_star_2', 24: 'cname', 25: 'CNAME', 26: '__function_star_4', 27: 'param', 28: '__interface_star_3', 29: 'IMPORT', 30: 'OPTION', 31: 'RSQB', 32: 'uid', 33: '__ANON_0', 34: '__options_plus_5', 35: 'UID', 36: 'super', 37: 'COLON', 38: 'object', 39: 'interface', 40: 'PATH', 41: '__start_plus_1', 42: '__start_star_0', 43: 'start', 44: 'import_statement'}, 'states': {0: {0: (1, {'@': 55}), 1: (1, {'@': 55}), 2: (1, {'@': 55}), 3: (1, {'@': 55})}, 1: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 82)}, 2: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 153)}, 3: {9: (0, 18), 10: (0, 165)}, 4: {5: (1, {'@': 110}), 8: (1, {'@': 110}), 2: (1, {'@': 110})}, 5: {0: (1, {'@': 60}), 1: (1, {'@': 60}), 2: (1, {'@': 60}), 3: (1, {'@': 60})}, 6: {0: (1, {'@': 29}), 1: (1, {'@': 29}), 2: (1, {'@': 29}), 3: (1, {'@': 29})}, 7: {0: (1, {'@': 45}), 1: (1, {'@': 45}), 2: (1, {'@': 45}), 3: (1, {'@': 45})}, 8: {11: (0, 110), 12: (0, 140), 13: (0, 10), 14: (1, {'@': 75}), 15: (1, {'@': 75}), 8: (1, {'@': 75}), 2: (1, {'@': 75})}, 9: {9: (0, 92), 10: (0, 165)}, 10: {11: (0, 57), 14: (1, {'@': 72}), 15: (1, {'@': 72}), 8: (1, {'@': 72}), 2: (1, {'@': 72})}, 11: {4: (0, 94), 16: (0, 95), 6: (0, 68), 17: (0, 75), 15: (0, 34), 2: (0, 197), 8: (0, 105), 18: (0, 81), 14: (0, 16)}, 12: {0: (1, {'@': 30}), 1: (1, {'@': 30}), 2: (1, {'@': 30}), 3: (1, {'@': 30})}, 13: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 58)}, 14: {15: (1, {'@': 99}), 8: (1, {'@': 99}), 2: (1, {'@': 99}), 14: (1, {'@': 99}), 12: (1, {'@': 99}), 19: (1, {'@': 99})}, 15: {11: (0, 122), 14: (1, {'@': 78}), 15: (1, {'@': 78}), 8: (1, {'@': 78}), 2: (1, {'@': 78})}, 16: {20: (0, 76), 21: (0, 96), 22: (0, 14), 3: (0, 111)}, 17: {8: (0, 65), 4: (0, 94), 16: (0, 95), 6: (0, 68), 17: (0, 75), 15: (0, 34), 2: (0, 197), 18: (0, 81), 14: (0, 16)}, 18: {12: (0, 140), 11: (0, 25), 13: (0, 85), 5: (1, {'@': 93}), 8: (1, {'@': 93}), 2: (1, {'@': 93})}, 19: {4: (0, 94), 16: (0, 95), 6: (0, 68), 8: (0, 88), 17: (0, 75), 15: (0, 34), 2: (0, 197), 18: (0, 81), 14: (0, 16)}, 20: {4: (0, 94), 23: (0, 98), 5: (0, 3), 8: (0, 21), 6: (0, 104), 24: (0, 114), 2: (0, 197), 25: (0, 90), 7: (0, 4)}, 21: {0: (1, {'@': 51}), 1: (1, {'@': 51}), 2: (1, {'@': 51}), 3: (1, {'@': 51})}, 22: {0: (1, {'@': 36}), 1: (1, {'@': 36}), 2: (1, {'@': 36}), 3: (1, {'@': 36})}, 23: {4: (0, 94), 8: (0, 87), 23: (0, 42), 5: (0, 3), 24: (0, 37), 6: (0, 104), 2: (0, 197), 25: (0, 90), 7: (0, 4)}, 24: {4: (0, 94), 23: (0, 135), 5: (0, 3), 6: (0, 104), 2: (0, 197), 8: (0, 167), 7: (0, 4)}, 25: {19: (0, 120), 26: (0, 50), 8: (0, 66), 27: (0, 108)}, 26: {0: (1, {'@': 26}), 1: (1, {'@': 26}), 2: (1, {'@': 26}), 3: (1, {'@': 26})}, 27: {4: (0, 94), 23: (0, 2), 5: (0, 3), 6: (0, 104), 2: (0, 197), 25: (0, 90), 24: (0, 24), 7: (0, 4), 8: (0, 43)}, 28: {8: (0, 36), 4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35)}, 29: {0: (1, {'@': 59}), 1: (1, {'@': 59}), 2: (1, {'@': 59}), 3: (1, {'@': 59})}, 30: {8: (0, 152), 4: (0, 94), 16: (0, 95), 6: (0, 68), 15: (0, 34), 2: (0, 197), 17: (0, 101), 18: (0, 81), 14: (0, 16), 28: (0, 19)}, 31: {0: (1, {'@': 40}), 1: (1, {'@': 40}), 2: (1, {'@': 40}), 3: (1, {'@': 40})}, 32: {5: (1, {'@': 91}), 8: (1, {'@': 91}), 2: (1, {'@': 91})}, 33: {14: (1, {'@': 76}), 15: (1, {'@': 76}), 8: (1, {'@': 76}), 2: (1, {'@': 76})}, 34: {9: (0, 64), 10: (0, 165)}, 35: {5: (1, {'@': 111}), 8: (1, {'@': 111}), 2: (1, {'@': 111})}, 36: {0: (1, {'@': 48}), 1: (1, {'@': 48}), 2: (1, {'@': 48}), 3: (1, {'@': 48})}, 37: {4: (0, 94), 23: (0, 56), 5: (0, 3), 6: (0, 104), 2: (0, 197), 8: (0, 7), 7: (0, 4)}, 38: {19: (0, 120), 8: (0, 84), 27: (0, 61)}, 39: {0: (1, {'@': 46}), 1: (1, {'@': 46}), 2: (1, {'@': 46}), 3: (1, {'@': 46})}, 40: {0: (1, {'@': 32}), 1: (1, {'@': 32}), 2: (1, {'@': 32}), 3: (1, {'@': 32})}, 41: {11: (0, 176)}, 42: {4: (0, 94), 8: (0, 39), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35)}, 43: {0: (1, {'@': 43}), 1: (1, {'@': 43}), 2: (1, {'@': 43}), 3: (1, {'@': 43})}, 44: {8: (0, 109), 4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35)}, 45: {19: (0, 120), 27: (0, 61), 8: (0, 121)}, 46: {0: (1, {'@': 28}), 1: (1, {'@': 28}), 2: (1, {'@': 28}), 3: (1, {'@': 28})}, 47: {0: (1, {'@': 53}), 1: (1, {'@': 53}), 2: (1, {'@': 53}), 3: (1, {'@': 53})}, 48: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 12)}, 49: {4: (0, 94), 24: (0, 115), 23: (0, 48), 5: (0, 3), 6: (0, 104), 2: (0, 197), 25: (0, 90), 7: (0, 4), 8: (0, 55)}, 50: {19: (0, 120), 27: (0, 61), 8: (0, 32)}, 51: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 89)}, 52: {0: (1, {'@': 25}), 1: (1, {'@': 25}), 2: (1, {'@': 25}), 3: (1, {'@': 25})}, 53: {5: (1, {'@': 89}), 8: (1, {'@': 89}), 2: (1, {'@': 89})}, 54: {0: (1, {'@': 63}), 1: (1, {'@': 63}), 2: (1, {'@': 63}), 3: (1, {'@': 63})}, 55: {0: (1, {'@': 31}), 1: (1, {'@': 31}), 2: (1, {'@': 31}), 3: (1, {'@': 31})}, 56: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 179)}, 57: {26: (0, 127), 19: (0, 120), 27: (0, 108), 8: (0, 132)}, 58: {0: (1, {'@': 54}), 1: (1, {'@': 54}), 2: (1, {'@': 54}), 3: (1, {'@': 54})}, 59: {0: (1, {'@': 49}), 1: (1, {'@': 49}), 2: (1, {'@': 49}), 3: (1, {'@': 49})}, 60: {14: (1, {'@': 80}), 15: (1, {'@': 80}), 8: (1, {'@': 80}), 2: (1, {'@': 80})}, 61: {8: (1, {'@': 115}), 19: (1, {'@': 115})}, 62: {19: (0, 120), 27: (0, 108), 26: (0, 45), 8: (0, 53)}, 63: {4: (0, 94), 23: (0, 204), 5: (0, 3), 6: (0, 104), 24: (0, 198), 2: (0, 197), 8: (0, 196), 25: (0, 90), 7: (0, 4)}, 64: {12: (0, 140), 13: (0, 15), 11: (0, 106), 14: (1, {'@': 81}), 15: (1, {'@': 81}), 8: (1, {'@': 81}), 2: (1, {'@': 81})}, 65: {0: (1, {'@': 62}), 1: (1, {'@': 62}), 2: (1, {'@': 62}), 3: (1, {'@': 62})}, 66: {5: (1, {'@': 92}), 8: (1, {'@': 92}), 2: (1, {'@': 92})}, 67: {8: (0, 47), 4: (0, 94), 23: (0, 51), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 4)}, 68: {15: (0, 91)}, 69: {21: (0, 107), 20: (0, 76), 22: (0, 14), 3: (0, 111)}, 70: {5: (1, {'@': 68}), 8: (1, {'@': 68}), 2: (1, {'@': 68})}, 71: {4: (0, 94), 16: (0, 95), 6: (0, 68), 17: (0, 75), 15: (0, 34), 2: (0, 197), 18: (0, 81), 8: (0, 5), 14: (0, 16)}, 72: {19: (0, 120), 27: (0, 108), 26: (0, 80), 8: (0, 86)}, 73: {0: (1, {'@': 107}), 1: (1, {'@': 107}), 2: (1, {'@': 107}), 3: (1, {'@': 107})}, 74: {4: (0, 94), 16: (0, 95), 6: (0, 68), 15: (0, 34), 2: (0, 197), 28: (0, 71), 17: (0, 101), 18: (0, 81), 8: (0, 103), 14: (0, 16)}, 75: {14: (1, {'@': 113}), 15: (1, {'@': 113}), 8: (1, {'@': 113}), 2: (1, {'@': 113})}, 76: {15: (1, {'@': 98}), 8: (1, {'@': 98}), 2: (1, {'@': 98}), 14: (1, {'@': 98}), 12: (1, {'@': 98}), 19: (1, {'@': 98})}, 77: {4: (0, 94), 23: (0, 202), 5: (0, 3), 6: (0, 104), 2: (0, 197), 8: (0, 143), 7: (0, 4)}, 78: {8: (0, 124), 19: (0, 120), 27: (0, 61)}, 79: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 46)}, 80: {8: (0, 174), 19: (0, 120), 27: (0, 61)}, 81: {14: (1, {'@': 65}), 15: (1, {'@': 65}), 8: (1, {'@': 65}), 2: (1, {'@': 65})}, 82: {0: (1, {'@': 38}), 1: (1, {'@': 38}), 2: (1, {'@': 38}), 3: (1, {'@': 38})}, 83: {11: (0, 130), 5: (1, {'@': 84}), 8: (1, {'@': 84}), 2: (1, {'@': 84})}, 84: {14: (1, {'@': 79}), 15: (1, {'@': 79}), 8: (1, {'@': 79}), 2: (1, {'@': 79})}, 85: {11: (0, 62), 5: (1, {'@': 90}), 8: (1, {'@': 90}), 2: (1, {'@': 90})}, 86: {5: (1, {'@': 86}), 8: (1, {'@': 86}), 2: (1, {'@': 86})}, 87: {0: (1, {'@': 47}), 1: (1, {'@': 47}), 2: (1, {'@': 47}), 3: (1, {'@': 47})}, 88: {0: (1, {'@': 56}), 1: (1, {'@': 56}), 2: (1, {'@': 56}), 3: (1, {'@': 56})}, 89: {0: (1, {'@': 52}), 1: (1, {'@': 52}), 2: (1, {'@': 52}), 3: (1, {'@': 52})}, 90: {10: (0, 70)}, 91: {9: (0, 8), 10: (0, 165)}, 92: {13: (0, 83), 12: (0, 140), 11: (0, 72), 5: (1, {'@': 87}), 8: (1, {'@': 87}), 2: (1, {'@': 87})}, 93: {14: (1, {'@': 74}), 15: (1, {'@': 74}), 8: (1, {'@': 74}), 2: (1, {'@': 74})}, 94: {2: (0, 172), 5: (1, {'@': 103}), 15: (1, {'@': 103}), 8: (1, {'@': 103}), 19: (1, {'@': 103}), 3: (1, {'@': 103}), 0: (1, {'@': 103})}, 95: {14: (1, {'@': 64}), 15: (1, {'@': 64}), 8: (1, {'@': 64}), 2: (1, {'@': 64})}, 96: {14: (1, {'@': 66}), 15: (1, {'@': 66}), 8: (1, {'@': 66}), 2: (1, {'@': 66})}, 97: {19: (0, 120), 27: (0, 61), 8: (0, 33)}, 98: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 162)}, 99: {3: (0, 150), 0: (0, 158)}, 100: {14: (1, {'@': 100}), 15: (1, {'@': 100}), 8: (1, {'@': 100}), 2: (1, {'@': 100}), 12: (1, {'@': 100}), 19: (1, {'@': 100})}, 101: {14: (1, {'@': 112}), 15: (1, {'@': 112}), 8: (1, {'@': 112}), 2: (1, {'@': 112})}, 102: {14: (1, {'@': 77}), 15: (1, {'@': 77}), 8: (1, {'@': 77}), 2: (1, {'@': 77})}, 103: {0: (1, {'@': 61}), 1: (1, {'@': 61}), 2: (1, {'@': 61}), 3: (1, {'@': 61})}, 104: {5: (0, 9)}, 105: {0: (1, {'@': 58}), 1: (1, {'@': 58}), 2: (1, {'@': 58}), 3: (1, {'@': 58})}, 106: {19: (0, 120), 27: (0, 108), 26: (0, 38), 8: (0, 60)}, 107: {12: (0, 140), 4: (0, 94), 6: (0, 139), 13: (0, 144), 2: (0, 197), 8: (1, {'@': 97}), 19: (1, {'@': 97})}, 108: {8: (1, {'@': 114}), 19: (1, {'@': 114})}, 109: {0: (1, {'@': 24}), 1: (1, {'@': 24}), 2: (1, {'@': 24}), 3: (1, {'@': 24})}, 110: {19: (0, 120), 27: (0, 108), 26: (0, 78), 8: (0, 93)}, 111: {9: (0, 100), 10: (0, 165)}, 112: {8: (1, {'@': 94}), 19: (1, {'@': 94})}, 113: {0: (1, {'@': 39}), 1: (1, {'@': 39}), 2: (1, {'@': 39}), 3: (1, {'@': 39})}, 114: {4: (0, 94), 23: (0, 28), 5: (0, 3), 6: (0, 104), 2: (0, 197), 8: (0, 59), 7: (0, 4)}, 115: {4: (0, 94), 23: (0, 79), 5: (0, 3), 6: (0, 104), 2: (0, 197), 8: (0, 6), 7: (0, 4)}, 116: {5: (1, {'@': 83}), 8: (1, {'@': 83}), 2: (1, {'@': 83})}, 117: {0: (1, {'@': 27}), 1: (1, {'@': 27}), 2: (1, {'@': 27}), 3: (1, {'@': 27})}, 118: {19: (0, 120), 8: (0, 128), 27: (0, 61)}, 119: {0: (1, {'@': 104}), 29: (1, {'@': 104}), 2: (1, {'@': 104}), 3: (1, {'@': 104})}, 120: {10: (0, 165), 9: (0, 69)}, 121: {5: (1, {'@': 88}), 8: (1, {'@': 88}), 2: (1, {'@': 88})}, 122: {19: (0, 120), 27: (0, 108), 26: (0, 97), 8: (0, 102)}, 123: {10: (1, {'@': 117}), 30: (1, {'@': 117}), 31: (1, {'@': 117})}, 124: {14: (1, {'@': 73}), 15: (1, {'@': 73}), 8: (1, {'@': 73}), 2: (1, {'@': 73})}, 125: {0: (1, {'@': 23}), 29: (1, {'@': 23}), 2: (1, {'@': 23}), 3: (1, {'@': 23})}, 126: {0: (1, {'@': 108}), 1: (1, {'@': 108}), 2: (1, {'@': 108}), 3: (1, {'@': 108})}, 127: {19: (0, 120), 8: (0, 148), 27: (0, 61)}, 128: {5: (1, {'@': 82}), 8: (1, {'@': 82}), 2: (1, {'@': 82})}, 129: {12: (0, 140), 11: (0, 142), 13: (0, 181)}, 130: {19: (0, 120), 8: (0, 116), 27: (0, 108), 26: (0, 118)}, 131: {32: (0, 27), 33: (0, 141)}, 132: {14: (1, {'@': 71}), 15: (1, {'@': 71}), 8: (1, {'@': 71}), 2: (1, {'@': 71})}, 133: {5: (1, {'@': 67}), 8: (1, {'@': 67}), 25: (1, {'@': 67}), 2: (1, {'@': 67}), 14: (1, {'@': 67}), 15: (1, {'@': 67})}, 134: {10: (1, {'@': 118}), 30: (1, {'@': 118}), 31: (1, {'@': 118})}, 135: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 31)}, 136: {10: (1, {'@': 116}), 30: (1, {'@': 116}), 31: (1, {'@': 116})}, 137: {4: (0, 94), 28: (0, 11), 16: (0, 95), 6: (0, 68), 15: (0, 34), 2: (0, 197), 17: (0, 101), 18: (0, 81), 8: (0, 29), 14: (0, 16)}, 138: {32: (0, 30), 33: (0, 141)}, 139: {8: (1, {'@': 96}), 19: (1, {'@': 96})}, 140: {34: (0, 168), 30: (0, 136), 10: (0, 123)}, 141: {35: (0, 133)}, 142: {32: (0, 137), 33: (0, 141)}, 143: {0: (1, {'@': 37}), 1: (1, {'@': 37}), 2: (1, {'@': 37}), 3: (1, {'@': 37})}, 144: {6: (0, 112), 4: (0, 94), 2: (0, 197), 8: (1, {'@': 95}), 19: (1, {'@': 95})}, 145: {4: (0, 94), 23: (0, 1), 5: (0, 3), 6: (0, 104), 2: (0, 197), 24: (0, 77), 25: (0, 90), 8: (0, 113), 7: (0, 4)}, 146: {32: (0, 63), 33: (0, 141)}, 147: {13: (0, 191), 11: (0, 154), 36: (0, 185), 12: (0, 140), 37: (0, 151)}, 148: {14: (1, {'@': 70}), 15: (1, {'@': 70}), 8: (1, {'@': 70}), 2: (1, {'@': 70})}, 149: {}, 150: {9: (0, 147), 10: (0, 165)}, 151: {10: (0, 165), 9: (0, 190)}, 152: {0: (1, {'@': 57}), 1: (1, {'@': 57}), 2: (1, {'@': 57}), 3: (1, {'@': 57})}, 153: {0: (1, {'@': 42}), 1: (1, {'@': 42}), 2: (1, {'@': 42}), 3: (1, {'@': 42})}, 154: {32: (0, 145), 33: (0, 141)}, 155: {32: (0, 20), 33: (0, 141)}, 156: {32: (0, 74), 33: (0, 141)}, 157: {13: (0, 166), 12: (0, 140), 11: (0, 186)}, 158: {9: (0, 129), 10: (0, 165)}, 159: {38: (0, 126), 6: (0, 99), 4: (0, 94), 2: (0, 197), 3: (0, 164), 0: (0, 203), 39: (0, 188), 1: (1, {'@': 21})}, 160: {0: (1, {'@': 106}), 1: (1, {'@': 106}), 2: (1, {'@': 106}), 3: (1, {'@': 106})}, 161: {11: (1, {'@': 102}), 15: (1, {'@': 102}), 8: (1, {'@': 102}), 2: (1, {'@': 102}), 14: (1, {'@': 102}), 5: (1, {'@': 102}), 19: (1, {'@': 102}), 37: (1, {'@': 102})}, 162: {0: (1, {'@': 50}), 1: (1, {'@': 50}), 2: (1, {'@': 50}), 3: (1, {'@': 50})}, 163: {4: (0, 94), 5: (0, 3), 8: (0, 26), 6: (0, 104), 2: (0, 197), 7: (0, 35)}, 164: {9: (0, 173), 10: (0, 165)}, 165: {5: (1, {'@': 101}), 11: (1, {'@': 101}), 12: (1, {'@': 101}), 2: (1, {'@': 101}), 8: (1, {'@': 101}), 15: (1, {'@': 101}), 14: (1, {'@': 101}), 19: (1, {'@': 101}), 20: (1, {'@': 101}), 3: (1, {'@': 101}), 37: (1, {'@': 101})}, 166: {11: (0, 156)}, 167: {0: (1, {'@': 41}), 1: (1, {'@': 41}), 2: (1, {'@': 41}), 3: (1, {'@': 41})}, 168: {10: (0, 195), 31: (0, 161), 30: (0, 134)}, 169: {8: (0, 52), 4: (0, 94), 23: (0, 44), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 4)}, 170: {40: (0, 125)}, 171: {11: (0, 183), 37: (0, 151), 36: (0, 192)}, 172: {5: (1, {'@': 121}), 2: (1, {'@': 121}), 15: (1, {'@': 121}), 8: (1, {'@': 121}), 19: (1, {'@': 121}), 0: (1, {'@': 121}), 3: (1, {'@': 121})}, 173: {13: (0, 171), 36: (0, 201), 11: (0, 177), 12: (0, 140), 37: (0, 151)}, 174: {5: (1, {'@': 85}), 8: (1, {'@': 85}), 2: (1, {'@': 85})}, 175: {6: (0, 99), 4: (0, 94), 39: (0, 73), 2: (0, 197), 41: (0, 180), 29: (0, 170), 3: (0, 164), 42: (0, 193), 38: (0, 160), 0: (0, 203), 43: (0, 149), 44: (0, 119)}, 176: {32: (0, 178), 33: (0, 141)}, 177: {32: (0, 194), 33: (0, 141)}, 178: {8: (0, 117), 4: (0, 94), 23: (0, 163), 5: (0, 3), 6: (0, 104), 2: (0, 197), 24: (0, 169), 25: (0, 90), 7: (0, 4)}, 179: {0: (1, {'@': 44}), 1: (1, {'@': 44}), 2: (1, {'@': 44}), 3: (1, {'@': 44})}, 180: {38: (0, 126), 6: (0, 99), 4: (0, 94), 2: (0, 197), 3: (0, 164), 0: (0, 203), 39: (0, 188), 1: (1, {'@': 22})}, 181: {11: (0, 138)}, 182: {0: (1, {'@': 33}), 1: (1, {'@': 33}), 2: (1, {'@': 33}), 3: (1, {'@': 33})}, 183: {32: (0, 23), 33: (0, 141)}, 184: {32: (0, 49), 33: (0, 141)}, 185: {11: (0, 146)}, 186: {32: (0, 200), 33: (0, 141)}, 187: {4: (0, 94), 5: (0, 3), 8: (0, 40), 6: (0, 104), 2: (0, 197), 7: (0, 35)}, 188: {0: (1, {'@': 109}), 1: (1, {'@': 109}), 2: (1, {'@': 109}), 3: (1, {'@': 109})}, 189: {0: (1, {'@': 105}), 29: (1, {'@': 105}), 2: (1, {'@': 105}), 3: (1, {'@': 105})}, 190: {11: (1, {'@': 69})}, 191: {11: (0, 184), 36: (0, 41), 37: (0, 151)}, 192: {11: (0, 131)}, 193: {6: (0, 99), 4: (0, 94), 39: (0, 73), 41: (0, 159), 2: (0, 197), 3: (0, 164), 29: (0, 170), 38: (0, 160), 0: (0, 203), 44: (0, 189)}, 194: {4: (0, 94), 23: (0, 13), 5: (0, 3), 6: (0, 104), 2: (0, 197), 24: (0, 67), 25: (0, 90), 7: (0, 4), 8: (0, 0)}, 195: {10: (1, {'@': 119}), 30: (1, {'@': 119}), 31: (1, {'@': 119})}, 196: {0: (1, {'@': 35}), 1: (1, {'@': 35}), 2: (1, {'@': 35}), 3: (1, {'@': 35})}, 197: {5: (1, {'@': 120}), 2: (1, {'@': 120}), 15: (1, {'@': 120}), 8: (1, {'@': 120}), 19: (1, {'@': 120}), 0: (1, {'@': 120}), 3: (1, {'@': 120})}, 198: {4: (0, 94), 23: (0, 187), 5: (0, 3), 6: (0, 104), 2: (0, 197), 8: (0, 182), 7: (0, 4)}, 199: {0: (1, {'@': 34}), 1: (1, {'@': 34}), 2: (1, {'@': 34}), 3: (1, {'@': 34})}, 200: {4: (0, 94), 16: (0, 95), 6: (0, 68), 15: (0, 34), 2: (0, 197), 8: (0, 54), 17: (0, 101), 18: (0, 81), 28: (0, 17), 14: (0, 16)}, 201: {11: (0, 155)}, 202: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 22)}, 203: {10: (0, 165), 9: (0, 157)}, 204: {4: (0, 94), 5: (0, 3), 6: (0, 104), 2: (0, 197), 7: (0, 35), 8: (0, 199)}}, 'start_states': {'start': 175}, 'end_states': {'start': 149}}, 'options': {'debug': False, 'keep_all_tokens': False, 'tree_class': None, 'cache': False, 'postlex': None, 'parser': 'lalr', 'lexer': 'contextual', 'transformer': None, 'start': ['start'], 'priority': 'normal', 'ambiguity': 'auto', 'regex': False, 'propagate_positions': False, 'lexer_callbacks': {}, 'maybe_placeholders': False, 'edit_terminals': None, 'g_regex_flags': 0, 'use_bytes': False, 'import_paths': [], 'source_path': None}, '__type__': 'ParsingFrontend'}, 'rules': [{'@': 21}, {'@': 22}, {'@': 23}, {'@': 24}, {'@': 25}, {'@': 26}, {'@': 27}, {'@': 28}, {'@': 29}, {'@': 30}, {'@': 31}, {'@': 32}, {'@': 33}, {'@': 34}, {'@': 35}, {'@': 36}, {'@': 37}, {'@': 38}, {'@': 39}, {'@': 40}, {'@': 41}, {'@': 42}, {'@': 43}, {'@': 44}, {'@': 45}, {'@': 46}, {'@': 47}, {'@': 48}, {'@': 49}, {'@': 50}, {'@': 51}, {'@': 52}, {'@': 53}, {'@': 54}, {'@': 55}, {'@': 56}, {'@': 57}, {'@': 58}, {'@': 59}, {'@': 60}, {'@': 61}, {'@': 62}, {'@': 63}, {'@': 64}, {'@': 65}, {'@': 66}, {'@': 67}, {'@': 68}, {'@': 69}, {'@': 70}, {'@': 71}, {'@': 72}, {'@': 73}, {'@': 74}, {'@': 75}, {'@': 76}, {'@': 77}, {'@': 78}, {'@': 79}, {'@': 80}, {'@': 81}, {'@': 82}, {'@': 83}, {'@': 84}, {'@': 85}, {'@': 86}, {'@': 87}, {'@': 88}, {'@': 89}, {'@': 90}, {'@': 91}, {'@': 92}, {'@': 93}, {'@': 94}, {'@': 95}, {'@': 96}, {'@': 97}, {'@': 98}, {'@': 99}, {'@': 100}, {'@': 101}, {'@': 102}, {'@': 103}, {'@': 104}, {'@': 105}, {'@': 106}, {'@': 107}, {'@': 108}, {'@': 109}, {'@': 110}, {'@': 111}, {'@': 112}, {'@': 113}, {'@': 114}, {'@': 115}, {'@': 116}, {'@': 117}, {'@': 118}, {'@': 119}, {'@': 120}, {'@': 121}], 'options': {'debug': False, 'keep_all_tokens': False, 'tree_class': None, 'cache': False, 'postlex': None, 'parser': 'lalr', 'lexer': 'contextual', 'transformer': None, 'start': ['start'], 'priority': 'normal', 'ambiguity': 'auto', 'regex': False, 'propagate_positions': False, 'lexer_callbacks': {}, 'maybe_placeholders': False, 'edit_terminals': None, 'g_regex_flags': 0, 'use_bytes': False, 'import_paths': [], 'source_path': None}, '__type__': 'Lark'}
|
|
)
|
|
MEMO = (
|
|
{0: {'name': 'IDENTIFIER', 'pattern': {'value': '(?:(?:[A-Z]|[a-z])|_)(?:(?:(?:[A-Z]|[a-z])|[0-9]|_))*', 'flags': [], '_width': [1, 4294967295], '__type__': 'PatternRE'}, 'priority': 1, '__type__': 'TerminalDef'}, 1: {'name': 'WS', 'pattern': {'value': '(?:[ \t\x0c\r\n])+', 'flags': [], '_width': [1, 4294967295], '__type__': 'PatternRE'}, 'priority': 1, '__type__': 'TerminalDef'}, 2: {'name': 'PRIMITIVE', 'pattern': {'value': '(?:address|string|buffer|u?int(8|16|32|64)?|size)', 'flags': [], '_width': [3, 7], '__type__': 'PatternRE'}, 'priority': 1, '__type__': 'TerminalDef'}, 3: {'name': 'UID', 'pattern': {'value': '[0-9a-fA-F]{16}', 'flags': [], '_width': [16, 16], '__type__': 'PatternRE'}, 'priority': 1, '__type__': 'TerminalDef'}, 4: {'name': 'OPTION', 'pattern': {'value': '(?:(?:[A-Z]|[a-z])|_)(?:(?:(?:[A-Z]|[a-z])|[0-9]|_))*:(?:(?:[A-Z]|[a-z])|_)(?:(?:(?:[A-Z]|[a-z])|[0-9]|_))*', 'flags': [], '_width': [3, 4294967295], '__type__': 'PatternRE'}, 'priority': 2, '__type__': 'TerminalDef'}, 5: {'name': 'COMMENT', 'pattern': {'value': '#.*', 'flags': [], '_width': [1, 4294967295], '__type__': 'PatternRE'}, 'priority': 1, '__type__': 'TerminalDef'}, 6: {'name': 'PATH', 'pattern': {'value': '"[^"]*"', 'flags': [], '_width': [2, 4294967295], '__type__': 'PatternRE'}, 'priority': 1, '__type__': 'TerminalDef'}, 7: {'name': 'IMPORT', 'pattern': {'value': 'import', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 8: {'name': 'OBJECT', 'pattern': {'value': 'object', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 9: {'name': 'LBRACE', 'pattern': {'value': '{', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 10: {'name': 'RBRACE', 'pattern': {'value': '}', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 11: {'name': 'INTERFACE', 'pattern': {'value': 'interface', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 12: {'name': 'EXPOSE', 'pattern': {'value': 'expose', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 13: {'name': '__ANON_0', 'pattern': {'value': 'uid', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 14: {'name': 'CNAME', 'pattern': {'value': 'cname', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 15: {'name': 'COLON', 'pattern': {'value': ':', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 16: {'name': 'FUNCTION', 'pattern': {'value': 'function', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 17: {'name': 'METHOD', 'pattern': {'value': 'method', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 18: {'name': 'PARAM', 'pattern': {'value': 'param', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 19: {'name': 'LSQB', 'pattern': {'value': '[', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 20: {'name': 'RSQB', 'pattern': {'value': ']', 'flags': [], '__type__': 'PatternStr'}, 'priority': 1, '__type__': 'TerminalDef'}, 21: {'origin': {'name': 'start', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__start_star_0', '__type__': 'NonTerminal'}, {'name': '__start_plus_1', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 22: {'origin': {'name': 'start', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__start_plus_1', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 23: {'origin': {'name': 'import_statement', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'IMPORT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'PATH', 'filter_out': False, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 24: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 25: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 26: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 2, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 27: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 3, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 28: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 4, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 29: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 5, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 30: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 6, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 31: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 7, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 32: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 8, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 33: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 9, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 34: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 10, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 35: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 11, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 36: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 12, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 37: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 13, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 38: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 14, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 39: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 15, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 40: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 16, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 41: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 17, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 42: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 18, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 43: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 19, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 44: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 20, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 45: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 21, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 46: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 22, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 47: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 23, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 48: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 24, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 49: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 25, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 50: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 26, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 51: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'super', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 27, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 52: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 28, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 53: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'cname', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 29, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 54: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 30, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 55: {'origin': {'name': 'object', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 31, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 56: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__interface_star_3', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 57: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 58: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__interface_star_3', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 2, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 59: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 3, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 60: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__interface_star_3', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 4, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 61: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 5, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 62: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': '__interface_star_3', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 6, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 63: {'origin': {'name': 'interface', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'INTERFACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'uid', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 7, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 64: {'origin': {'name': 'interface_param', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'expose', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': True, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 65: {'origin': {'name': 'interface_param', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'function', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': True, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 66: {'origin': {'name': 'expose', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'EXPOSE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'type', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 67: {'origin': {'name': 'uid', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__ANON_0', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'UID', 'filter_out': False, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 68: {'origin': {'name': 'cname', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'CNAME', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'IDENTIFIER', 'filter_out': False, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 69: {'origin': {'name': 'super', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'COLON', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 70: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 71: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 72: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}], 'order': 2, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 73: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 3, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 74: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 4, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 75: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}], 'order': 5, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 76: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 6, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 77: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 7, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 78: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}], 'order': 8, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 79: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 9, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 80: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 10, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 81: {'origin': {'name': 'function', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'FUNCTION', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}], 'order': 11, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 82: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 83: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 84: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}], 'order': 2, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 85: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 3, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 86: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 4, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 87: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'description', '__type__': 'NonTerminal'}, {'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}], 'order': 5, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 88: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 6, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 89: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 7, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 90: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}], 'order': 8, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 91: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 9, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 92: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'LBRACE', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'RBRACE', 'filter_out': True, '__type__': 'Terminal'}], 'order': 10, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 93: {'origin': {'name': 'method', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'METHOD', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}], 'order': 11, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 94: {'origin': {'name': 'param', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'PARAM', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'type', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}, {'name': 'description', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 95: {'origin': {'name': 'param', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'PARAM', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'type', '__type__': 'NonTerminal'}, {'name': 'options', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 96: {'origin': {'name': 'param', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'PARAM', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'type', '__type__': 'NonTerminal'}, {'name': 'description', '__type__': 'NonTerminal'}], 'order': 2, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 97: {'origin': {'name': 'param', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'PARAM', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}, {'name': 'type', '__type__': 'NonTerminal'}], 'order': 3, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 98: {'origin': {'name': 'type', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'PRIMITIVE', 'filter_out': False, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': True, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 99: {'origin': {'name': 'type', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'object_name', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': True, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 100: {'origin': {'name': 'object_name', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OBJECT', 'filter_out': True, '__type__': 'Terminal'}, {'name': 'name', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 101: {'origin': {'name': 'name', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'IDENTIFIER', 'filter_out': False, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 102: {'origin': {'name': 'options', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'LSQB', 'filter_out': True, '__type__': 'Terminal'}, {'name': '__options_plus_5', '__type__': 'NonTerminal'}, {'name': 'RSQB', 'filter_out': True, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 103: {'origin': {'name': 'description', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__description_plus_6', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 104: {'origin': {'name': '__start_star_0', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'import_statement', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 105: {'origin': {'name': '__start_star_0', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__start_star_0', '__type__': 'NonTerminal'}, {'name': 'import_statement', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 106: {'origin': {'name': '__start_plus_1', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'object', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 107: {'origin': {'name': '__start_plus_1', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'interface', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 108: {'origin': {'name': '__start_plus_1', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__start_plus_1', '__type__': 'NonTerminal'}, {'name': 'object', '__type__': 'NonTerminal'}], 'order': 2, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 109: {'origin': {'name': '__start_plus_1', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__start_plus_1', '__type__': 'NonTerminal'}, {'name': 'interface', '__type__': 'NonTerminal'}], 'order': 3, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 110: {'origin': {'name': '__object_star_2', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'method', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 111: {'origin': {'name': '__object_star_2', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__object_star_2', '__type__': 'NonTerminal'}, {'name': 'method', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 112: {'origin': {'name': '__interface_star_3', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'interface_param', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 113: {'origin': {'name': '__interface_star_3', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__interface_star_3', '__type__': 'NonTerminal'}, {'name': 'interface_param', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 114: {'origin': {'name': '__function_star_4', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'param', '__type__': 'NonTerminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 115: {'origin': {'name': '__function_star_4', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__function_star_4', '__type__': 'NonTerminal'}, {'name': 'param', '__type__': 'NonTerminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 116: {'origin': {'name': '__options_plus_5', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'OPTION', 'filter_out': False, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 117: {'origin': {'name': '__options_plus_5', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'IDENTIFIER', 'filter_out': False, '__type__': 'Terminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 118: {'origin': {'name': '__options_plus_5', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__options_plus_5', '__type__': 'NonTerminal'}, {'name': 'OPTION', 'filter_out': False, '__type__': 'Terminal'}], 'order': 2, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 119: {'origin': {'name': '__options_plus_5', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__options_plus_5', '__type__': 'NonTerminal'}, {'name': 'IDENTIFIER', 'filter_out': False, '__type__': 'Terminal'}], 'order': 3, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 120: {'origin': {'name': '__description_plus_6', '__type__': 'NonTerminal'}, 'expansion': [{'name': 'COMMENT', 'filter_out': False, '__type__': 'Terminal'}], 'order': 0, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}, 121: {'origin': {'name': '__description_plus_6', '__type__': 'NonTerminal'}, 'expansion': [{'name': '__description_plus_6', '__type__': 'NonTerminal'}, {'name': 'COMMENT', 'filter_out': False, '__type__': 'Terminal'}], 'order': 1, 'alias': None, 'options': {'keep_all_tokens': False, 'expand1': False, 'priority': None, 'template_source': None, 'empty_indices': (), '__type__': 'RuleOptions'}, '__type__': 'Rule'}}
|
|
)
|
|
Shift = 0
|
|
Reduce = 1
|
|
def Lark_StandAlone(**kwargs):
|
|
return Lark._load_from_dict(DATA, MEMO, **kwargs)
|