expressions.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473
  1. """Subexpressions that make up a parsed grammar
  2. These do the parsing.
  3. """
  4. # TODO: Make sure all symbol refs are local--not class lookups or
  5. # anything--for speed. And kill all the dots.
  6. from collections import defaultdict
  7. from inspect import getfullargspec, isfunction, ismethod, ismethoddescriptor
  8. import regex as re
  9. from parsimonious.exceptions import ParseError, IncompleteParseError, LeftRecursionError
  10. from parsimonious.nodes import Node, RegexNode
  11. from parsimonious.utils import StrAndRepr
  12. def is_callable(value):
  13. criteria = [isfunction, ismethod, ismethoddescriptor]
  14. return any([criterion(value) for criterion in criteria])
  15. def expression(callable, rule_name, grammar):
  16. """Turn a plain callable into an Expression.
  17. The callable can be of this simple form::
  18. def foo(text, pos):
  19. '''If this custom expression matches starting at text[pos], return
  20. the index where it stops matching. Otherwise, return None.'''
  21. if the expression matched:
  22. return end_pos
  23. If there child nodes to return, return a tuple::
  24. return end_pos, children
  25. If the expression doesn't match at the given ``pos`` at all... ::
  26. return None
  27. If your callable needs to make sub-calls to other rules in the grammar or
  28. do error reporting, it can take this form, gaining additional arguments::
  29. def foo(text, pos, cache, error, grammar):
  30. # Call out to other rules:
  31. node = grammar['another_rule'].match_core(text, pos, cache, error)
  32. ...
  33. # Return values as above.
  34. The return value of the callable, if an int or a tuple, will be
  35. automatically transmuted into a :class:`~parsimonious.Node`. If it returns
  36. a Node-like class directly, it will be passed through unchanged.
  37. :arg rule_name: The rule name to attach to the resulting
  38. :class:`~parsimonious.Expression`
  39. :arg grammar: The :class:`~parsimonious.Grammar` this expression will be a
  40. part of, to make delegating to other rules possible
  41. """
  42. # Resolve unbound methods; allows grammars to use @staticmethod custom rules
  43. # https://stackoverflow.com/questions/41921255/staticmethod-object-is-not-callable
  44. if ismethoddescriptor(callable) and hasattr(callable, '__func__'):
  45. callable = callable.__func__
  46. num_args = len(getfullargspec(callable).args)
  47. if ismethod(callable):
  48. # do not count the first argument (typically 'self') for methods
  49. num_args -= 1
  50. if num_args == 2:
  51. is_simple = True
  52. elif num_args == 5:
  53. is_simple = False
  54. else:
  55. raise RuntimeError("Custom rule functions must take either 2 or 5 "
  56. "arguments, not %s." % num_args)
  57. class AdHocExpression(Expression):
  58. def _uncached_match(self, text, pos, cache, error):
  59. result = (callable(text, pos) if is_simple else
  60. callable(text, pos, cache, error, grammar))
  61. if isinstance(result, int):
  62. end, children = result, None
  63. elif isinstance(result, tuple):
  64. end, children = result
  65. else:
  66. # Node or None
  67. return result
  68. return Node(self, text, pos, end, children=children)
  69. def _as_rhs(self):
  70. return '{custom function "%s"}' % callable.__name__
  71. return AdHocExpression(name=rule_name)
  72. IN_PROGRESS = object()
  73. class Expression(StrAndRepr):
  74. """A thing that can be matched against a piece of text"""
  75. # Slots are about twice as fast as __dict__-based attributes:
  76. # http://stackoverflow.com/questions/1336791/dictionary-vs-object-which-is-more-efficient-and-why
  77. # Top-level expressions--rules--have names. Subexpressions are named ''.
  78. __slots__ = ['name', 'identity_tuple']
  79. def __init__(self, name=''):
  80. self.name = name
  81. self.identity_tuple = (self.name, )
  82. def __hash__(self):
  83. return hash(self.identity_tuple)
  84. def __eq__(self, other):
  85. return self._eq_check_cycles(other, set())
  86. def __ne__(self, other):
  87. return not (self == other)
  88. def _eq_check_cycles(self, other, checked):
  89. # keep a set of all pairs that are already checked, so we won't fall into infinite recursions.
  90. checked.add((id(self), id(other)))
  91. return other.__class__ is self.__class__ and self.identity_tuple == other.identity_tuple
  92. def resolve_refs(self, rule_map):
  93. # Nothing to do on the base expression.
  94. return self
  95. def parse(self, text, pos=0):
  96. """Return a parse tree of ``text``.
  97. Raise ``ParseError`` if the expression wasn't satisfied. Raise
  98. ``IncompleteParseError`` if the expression was satisfied but didn't
  99. consume the full string.
  100. """
  101. node = self.match(text, pos=pos)
  102. if node.end < len(text):
  103. raise IncompleteParseError(text, node.end, self)
  104. return node
  105. def match(self, text, pos=0):
  106. """Return the parse tree matching this expression at the given
  107. position, not necessarily extending all the way to the end of ``text``.
  108. Raise ``ParseError`` if there is no match there.
  109. :arg pos: The index at which to start matching
  110. """
  111. error = ParseError(text)
  112. node = self.match_core(text, pos, defaultdict(dict), error)
  113. if node is None:
  114. raise error
  115. return node
  116. def match_core(self, text, pos, cache, error):
  117. """Internal guts of ``match()``
  118. This is appropriate to call only from custom rules or Expression
  119. subclasses.
  120. :arg cache: The packrat cache::
  121. {(oid, pos): Node tree matched by object `oid` at index `pos` ...}
  122. :arg error: A ParseError instance with ``text`` already filled in but
  123. otherwise blank. We update the error reporting info on this object
  124. as we go. (Sticking references on an existing instance is faster
  125. than allocating a new one for each expression that fails.) We
  126. return None rather than raising and catching ParseErrors because
  127. catching is slow.
  128. """
  129. # TODO: Optimize. Probably a hot spot.
  130. #
  131. # Is there a faster way of looking up cached stuff?
  132. #
  133. # If this is slow, think about the array module. It might (or might
  134. # not!) use more RAM, but it'll likely be faster than hashing things
  135. # all the time. Also, can we move all the allocs up front?
  136. #
  137. # To save space, we have lots of choices: (0) Quit caching whole Node
  138. # objects. Cache just what you need to reconstitute them. (1) Cache
  139. # only the results of entire rules, not subexpressions (probably a
  140. # horrible idea for rules that need to backtrack internally a lot). (2)
  141. # Age stuff out of the cache somehow. LRU? (3) Cuts.
  142. expr_cache = cache[id(self)]
  143. if pos in expr_cache:
  144. node = expr_cache[pos]
  145. else:
  146. # TODO: Set default value to prevent infinite recursion in left-recursive rules.
  147. expr_cache[pos] = IN_PROGRESS # Mark as in progress
  148. node = expr_cache[pos] = self._uncached_match(text, pos, cache, error)
  149. if node is IN_PROGRESS:
  150. raise LeftRecursionError(text, pos=-1, expr=self)
  151. # Record progress for error reporting:
  152. if node is None and pos >= error.pos and (
  153. self.name or getattr(error.expr, 'name', None) is None):
  154. # Don't bother reporting on unnamed expressions (unless that's all
  155. # we've seen so far), as they're hard to track down for a human.
  156. # Perhaps we could include the unnamed subexpressions later as
  157. # auxiliary info.
  158. error.expr = self
  159. error.pos = pos
  160. return node
  161. def __str__(self):
  162. return '<%s %s>' % (
  163. self.__class__.__name__,
  164. self.as_rule())
  165. def as_rule(self):
  166. """Return the left- and right-hand sides of a rule that represents me.
  167. Return unicode. If I have no ``name``, omit the left-hand side.
  168. """
  169. rhs = self._as_rhs().strip()
  170. if rhs.startswith('(') and rhs.endswith(')'):
  171. rhs = rhs[1:-1]
  172. return ('%s = %s' % (self.name, rhs)) if self.name else rhs
  173. def _unicode_members(self):
  174. """Return an iterable of my unicode-represented children, stopping
  175. descent when we hit a named node so the returned value resembles the
  176. input rule."""
  177. return [(m.name or m._as_rhs()) for m in self.members]
  178. def _as_rhs(self):
  179. """Return the right-hand side of a rule that represents me.
  180. Implemented by subclasses.
  181. """
  182. raise NotImplementedError
  183. class Literal(Expression):
  184. """A string literal
  185. Use these if you can; they're the fastest.
  186. """
  187. __slots__ = ['literal']
  188. def __init__(self, literal, name=''):
  189. super().__init__(name)
  190. self.literal = literal
  191. self.identity_tuple = (name, literal)
  192. def _uncached_match(self, text, pos, cache, error):
  193. if text.startswith(self.literal, pos):
  194. return Node(self, text, pos, pos + len(self.literal))
  195. def _as_rhs(self):
  196. return repr(self.literal)
  197. class TokenMatcher(Literal):
  198. """An expression matching a single token of a given type
  199. This is for use only with TokenGrammars.
  200. """
  201. def _uncached_match(self, token_list, pos, cache, error):
  202. if token_list[pos].type == self.literal:
  203. return Node(self, token_list, pos, pos + 1)
  204. class Regex(Expression):
  205. """An expression that matches what a regex does.
  206. Use these as much as you can and jam as much into each one as you can;
  207. they're fast.
  208. """
  209. __slots__ = ['re']
  210. def __init__(self, pattern, name='', ignore_case=False, locale=False,
  211. multiline=False, dot_all=False, unicode=False, verbose=False, ascii=False):
  212. super().__init__(name)
  213. self.re = re.compile(pattern, (ignore_case and re.I) |
  214. (locale and re.L) |
  215. (multiline and re.M) |
  216. (dot_all and re.S) |
  217. (unicode and re.U) |
  218. (verbose and re.X) |
  219. (ascii and re.A))
  220. self.identity_tuple = (self.name, self.re)
  221. def _uncached_match(self, text, pos, cache, error):
  222. """Return length of match, ``None`` if no match."""
  223. m = self.re.match(text, pos)
  224. if m is not None:
  225. span = m.span()
  226. node = RegexNode(self, text, pos, pos + span[1] - span[0])
  227. node.match = m # TODO: A terrible idea for cache size?
  228. return node
  229. def _regex_flags_from_bits(self, bits):
  230. """Return the textual equivalent of numerically encoded regex flags."""
  231. flags = 'ilmsuxa'
  232. return ''.join(flags[i - 1] if (1 << i) & bits else '' for i in range(1, len(flags) + 1))
  233. def _as_rhs(self):
  234. return '~{!r}{}'.format(self.re.pattern,
  235. self._regex_flags_from_bits(self.re.flags))
  236. class Compound(Expression):
  237. """An abstract expression which contains other expressions"""
  238. __slots__ = ['members']
  239. def __init__(self, *members, **kwargs):
  240. """``members`` is a sequence of expressions."""
  241. super().__init__(kwargs.get('name', ''))
  242. self.members = members
  243. def resolve_refs(self, rule_map):
  244. self.members = tuple(m.resolve_refs(rule_map) for m in self.members)
  245. return self
  246. def _eq_check_cycles(self, other, checked):
  247. return (
  248. super()._eq_check_cycles(other, checked) and
  249. len(self.members) == len(other.members) and
  250. all(m._eq_check_cycles(mo, checked) for m, mo in zip(self.members, other.members) if (id(m), id(mo)) not in checked)
  251. )
  252. def __hash__(self):
  253. # Note we leave members out of the hash computation, since compounds can get added to
  254. # sets, then have their members mutated. See RuleVisitor._resolve_refs.
  255. # Equality should still work, but we want the rules to go into the correct hash bucket.
  256. return hash((self.__class__, self.name))
  257. class Sequence(Compound):
  258. """A series of expressions that must match contiguous, ordered pieces of
  259. the text
  260. In other words, it's a concatenation operator: each piece has to match, one
  261. after another.
  262. """
  263. def _uncached_match(self, text, pos, cache, error):
  264. new_pos = pos
  265. children = []
  266. for m in self.members:
  267. node = m.match_core(text, new_pos, cache, error)
  268. if node is None:
  269. return None
  270. children.append(node)
  271. length = node.end - node.start
  272. new_pos += length
  273. # Hooray! We got through all the members!
  274. return Node(self, text, pos, new_pos, children)
  275. def _as_rhs(self):
  276. return '({0})'.format(' '.join(self._unicode_members()))
  277. class OneOf(Compound):
  278. """A series of expressions, one of which must match
  279. Expressions are tested in order from first to last. The first to succeed
  280. wins.
  281. """
  282. def _uncached_match(self, text, pos, cache, error):
  283. for m in self.members:
  284. node = m.match_core(text, pos, cache, error)
  285. if node is not None:
  286. # Wrap the succeeding child in a node representing the OneOf:
  287. return Node(self, text, pos, node.end, children=[node])
  288. def _as_rhs(self):
  289. return '({0})'.format(' / '.join(self._unicode_members()))
  290. class Lookahead(Compound):
  291. """An expression which consumes nothing, even if its contained expression
  292. succeeds"""
  293. __slots__ = ['negativity']
  294. def __init__(self, member, *, negative=False, **kwargs):
  295. super().__init__(member, **kwargs)
  296. self.negativity = bool(negative)
  297. def _uncached_match(self, text, pos, cache, error):
  298. node = self.members[0].match_core(text, pos, cache, error)
  299. if (node is None) == self.negativity: # negative lookahead == match only if not found
  300. return Node(self, text, pos, pos)
  301. def _as_rhs(self):
  302. return '%s%s' % ('!' if self.negativity else '&', self._unicode_members()[0])
  303. def _eq_check_cycles(self, other, checked):
  304. return (
  305. super()._eq_check_cycles(other, checked) and
  306. self.negativity == other.negativity
  307. )
  308. def Not(term):
  309. return Lookahead(term, negative=True)
  310. # Quantifiers. None of these is strictly necessary, but they're darn handy.
  311. class Quantifier(Compound):
  312. """An expression wrapper like the */+/?/{n,m} quantifier in regexes."""
  313. __slots__ = ['min', 'max']
  314. def __init__(self, member, *, min=0, max=float('inf'), name='', **kwargs):
  315. super().__init__(member, name=name, **kwargs)
  316. self.min = min
  317. self.max = max
  318. def _uncached_match(self, text, pos, cache, error):
  319. new_pos = pos
  320. children = []
  321. size = len(text)
  322. while new_pos < size and len(children) < self.max:
  323. node = self.members[0].match_core(text, new_pos, cache, error)
  324. if node is None:
  325. break # no more matches
  326. children.append(node)
  327. length = node.end - node.start
  328. if len(children) >= self.min and length == 0: # Don't loop infinitely
  329. break
  330. new_pos += length
  331. if len(children) >= self.min:
  332. return Node(self, text, pos, new_pos, children)
  333. def _as_rhs(self):
  334. if self.min == 0 and self.max == 1:
  335. qualifier = '?'
  336. elif self.min == 0 and self.max == float('inf'):
  337. qualifier = '*'
  338. elif self.min == 1 and self.max == float('inf'):
  339. qualifier = '+'
  340. elif self.max == float('inf'):
  341. qualifier = '{%d,}' % self.min
  342. elif self.min == 0:
  343. qualifier = '{,%d}' % self.max
  344. else:
  345. qualifier = '{%d,%d}' % (self.min, self.max)
  346. return '%s%s' % (self._unicode_members()[0], qualifier)
  347. def _eq_check_cycles(self, other, checked):
  348. return (
  349. super()._eq_check_cycles(other, checked) and
  350. self.min == other.min and
  351. self.max == other.max
  352. )
  353. def ZeroOrMore(member, name=''):
  354. return Quantifier(member, name=name, min=0, max=float('inf'))
  355. def OneOrMore(member, name='', min=1):
  356. return Quantifier(member, name=name, min=min, max=float('inf'))
  357. def Optional(member, name=''):
  358. return Quantifier(member, name=name, min=0, max=1)