Tweak wording
[pytest.git] / Parser / spark.py
blob2c18623a029a4c6acdcf8a0f63c1e2947c4692e9
1 # Copyright (c) 1998-2002 John Aycock
3 # Permission is hereby granted, free of charge, to any person obtaining
4 # a copy of this software and associated documentation files (the
5 # "Software"), to deal in the Software without restriction, including
6 # without limitation the rights to use, copy, modify, merge, publish,
7 # distribute, sublicense, and/or sell copies of the Software, and to
8 # permit persons to whom the Software is furnished to do so, subject to
9 # the following conditions:
11 # The above copyright notice and this permission notice shall be
12 # included in all copies or substantial portions of the Software.
14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 __version__ = 'SPARK-0.7 (pre-alpha-5)'
24 import re
25 import sys
26 import string
28 def _namelist(instance):
29 namelist, namedict, classlist = [], {}, [instance.__class__]
30 for c in classlist:
31 for b in c.__bases__:
32 classlist.append(b)
33 for name in c.__dict__.keys():
34 if not namedict.has_key(name):
35 namelist.append(name)
36 namedict[name] = 1
37 return namelist
39 class GenericScanner:
40 def __init__(self, flags=0):
41 pattern = self.reflect()
42 self.re = re.compile(pattern, re.VERBOSE|flags)
44 self.index2func = {}
45 for name, number in self.re.groupindex.items():
46 self.index2func[number-1] = getattr(self, 't_' + name)
48 def makeRE(self, name):
49 doc = getattr(self, name).__doc__
50 rv = '(?P<%s>%s)' % (name[2:], doc)
51 return rv
53 def reflect(self):
54 rv = []
55 for name in _namelist(self):
56 if name[:2] == 't_' and name != 't_default':
57 rv.append(self.makeRE(name))
59 rv.append(self.makeRE('t_default'))
60 return string.join(rv, '|')
62 def error(self, s, pos):
63 print "Lexical error at position %s" % pos
64 raise SystemExit
66 def tokenize(self, s):
67 pos = 0
68 n = len(s)
69 while pos < n:
70 m = self.re.match(s, pos)
71 if m is None:
72 self.error(s, pos)
74 groups = m.groups()
75 for i in range(len(groups)):
76 if groups[i] and self.index2func.has_key(i):
77 self.index2func[i](groups[i])
78 pos = m.end()
80 def t_default(self, s):
81 r'( . | \n )+'
82 print "Specification error: unmatched input"
83 raise SystemExit
86 # Extracted from GenericParser and made global so that [un]picking works.
88 class _State:
89 def __init__(self, stateno, items):
90 self.T, self.complete, self.items = [], [], items
91 self.stateno = stateno
93 class GenericParser:
95 # An Earley parser, as per J. Earley, "An Efficient Context-Free
96 # Parsing Algorithm", CACM 13(2), pp. 94-102. Also J. C. Earley,
97 # "An Efficient Context-Free Parsing Algorithm", Ph.D. thesis,
98 # Carnegie-Mellon University, August 1968. New formulation of
99 # the parser according to J. Aycock, "Practical Earley Parsing
100 # and the SPARK Toolkit", Ph.D. thesis, University of Victoria,
101 # 2001, and J. Aycock and R. N. Horspool, "Practical Earley
102 # Parsing", unpublished paper, 2001.
105 def __init__(self, start):
106 self.rules = {}
107 self.rule2func = {}
108 self.rule2name = {}
109 self.collectRules()
110 self.augment(start)
111 self.ruleschanged = 1
113 _NULLABLE = '\e_'
114 _START = 'START'
115 _BOF = '|-'
118 # When pickling, take the time to generate the full state machine;
119 # some information is then extraneous, too. Unfortunately we
120 # can't save the rule2func map.
122 def __getstate__(self):
123 if self.ruleschanged:
125 # XXX - duplicated from parse()
127 self.computeNull()
128 self.newrules = {}
129 self.new2old = {}
130 self.makeNewRules()
131 self.ruleschanged = 0
132 self.edges, self.cores = {}, {}
133 self.states = { 0: self.makeState0() }
134 self.makeState(0, self._BOF)
136 # XXX - should find a better way to do this..
138 changes = 1
139 while changes:
140 changes = 0
141 for k, v in self.edges.items():
142 if v is None:
143 state, sym = k
144 if self.states.has_key(state):
145 self.goto(state, sym)
146 changes = 1
147 rv = self.__dict__.copy()
148 for s in self.states.values():
149 del s.items
150 del rv['rule2func']
151 del rv['nullable']
152 del rv['cores']
153 return rv
155 def __setstate__(self, D):
156 self.rules = {}
157 self.rule2func = {}
158 self.rule2name = {}
159 self.collectRules()
160 start = D['rules'][self._START][0][1][1] # Blech.
161 self.augment(start)
162 D['rule2func'] = self.rule2func
163 D['makeSet'] = self.makeSet_fast
164 self.__dict__ = D
167 # A hook for GenericASTBuilder and GenericASTMatcher. Mess
168 # thee not with this; nor shall thee toucheth the _preprocess
169 # argument to addRule.
171 def preprocess(self, rule, func): return rule, func
173 def addRule(self, doc, func, _preprocess=1):
174 fn = func
175 rules = string.split(doc)
177 index = []
178 for i in range(len(rules)):
179 if rules[i] == '::=':
180 index.append(i-1)
181 index.append(len(rules))
183 for i in range(len(index)-1):
184 lhs = rules[index[i]]
185 rhs = rules[index[i]+2:index[i+1]]
186 rule = (lhs, tuple(rhs))
188 if _preprocess:
189 rule, fn = self.preprocess(rule, func)
191 if self.rules.has_key(lhs):
192 self.rules[lhs].append(rule)
193 else:
194 self.rules[lhs] = [ rule ]
195 self.rule2func[rule] = fn
196 self.rule2name[rule] = func.__name__[2:]
197 self.ruleschanged = 1
199 def collectRules(self):
200 for name in _namelist(self):
201 if name[:2] == 'p_':
202 func = getattr(self, name)
203 doc = func.__doc__
204 self.addRule(doc, func)
206 def augment(self, start):
207 rule = '%s ::= %s %s' % (self._START, self._BOF, start)
208 self.addRule(rule, lambda args: args[1], 0)
210 def computeNull(self):
211 self.nullable = {}
212 tbd = []
214 for rulelist in self.rules.values():
215 lhs = rulelist[0][0]
216 self.nullable[lhs] = 0
217 for rule in rulelist:
218 rhs = rule[1]
219 if len(rhs) == 0:
220 self.nullable[lhs] = 1
221 continue
223 # We only need to consider rules which
224 # consist entirely of nonterminal symbols.
225 # This should be a savings on typical
226 # grammars.
228 for sym in rhs:
229 if not self.rules.has_key(sym):
230 break
231 else:
232 tbd.append(rule)
233 changes = 1
234 while changes:
235 changes = 0
236 for lhs, rhs in tbd:
237 if self.nullable[lhs]:
238 continue
239 for sym in rhs:
240 if not self.nullable[sym]:
241 break
242 else:
243 self.nullable[lhs] = 1
244 changes = 1
246 def makeState0(self):
247 s0 = _State(0, [])
248 for rule in self.newrules[self._START]:
249 s0.items.append((rule, 0))
250 return s0
252 def finalState(self, tokens):
254 # Yuck.
256 if len(self.newrules[self._START]) == 2 and len(tokens) == 0:
257 return 1
258 start = self.rules[self._START][0][1][1]
259 return self.goto(1, start)
261 def makeNewRules(self):
262 worklist = []
263 for rulelist in self.rules.values():
264 for rule in rulelist:
265 worklist.append((rule, 0, 1, rule))
267 for rule, i, candidate, oldrule in worklist:
268 lhs, rhs = rule
269 n = len(rhs)
270 while i < n:
271 sym = rhs[i]
272 if not self.rules.has_key(sym) or \
273 not self.nullable[sym]:
274 candidate = 0
275 i = i + 1
276 continue
278 newrhs = list(rhs)
279 newrhs[i] = self._NULLABLE+sym
280 newrule = (lhs, tuple(newrhs))
281 worklist.append((newrule, i+1,
282 candidate, oldrule))
283 candidate = 0
284 i = i + 1
285 else:
286 if candidate:
287 lhs = self._NULLABLE+lhs
288 rule = (lhs, rhs)
289 if self.newrules.has_key(lhs):
290 self.newrules[lhs].append(rule)
291 else:
292 self.newrules[lhs] = [ rule ]
293 self.new2old[rule] = oldrule
295 def typestring(self, token):
296 return None
298 def error(self, token):
299 print "Syntax error at or near `%s' token" % token
300 raise SystemExit
302 def parse(self, tokens):
303 sets = [ [(1,0), (2,0)] ]
304 self.links = {}
306 if self.ruleschanged:
307 self.computeNull()
308 self.newrules = {}
309 self.new2old = {}
310 self.makeNewRules()
311 self.ruleschanged = 0
312 self.edges, self.cores = {}, {}
313 self.states = { 0: self.makeState0() }
314 self.makeState(0, self._BOF)
316 for i in xrange(len(tokens)):
317 sets.append([])
319 if sets[i] == []:
320 break
321 self.makeSet(tokens[i], sets, i)
322 else:
323 sets.append([])
324 self.makeSet(None, sets, len(tokens))
326 #_dump(tokens, sets, self.states)
328 finalitem = (self.finalState(tokens), 0)
329 if finalitem not in sets[-2]:
330 if len(tokens) > 0:
331 self.error(tokens[i-1])
332 else:
333 self.error(None)
335 return self.buildTree(self._START, finalitem,
336 tokens, len(sets)-2)
338 def isnullable(self, sym):
340 # For symbols in G_e only. If we weren't supporting 1.5,
341 # could just use sym.startswith().
343 return self._NULLABLE == sym[0:len(self._NULLABLE)]
345 def skip(self, (lhs, rhs), pos=0):
346 n = len(rhs)
347 while pos < n:
348 if not self.isnullable(rhs[pos]):
349 break
350 pos = pos + 1
351 return pos
353 def makeState(self, state, sym):
354 assert sym is not None
356 # Compute \epsilon-kernel state's core and see if
357 # it exists already.
359 kitems = []
360 for rule, pos in self.states[state].items:
361 lhs, rhs = rule
362 if rhs[pos:pos+1] == (sym,):
363 kitems.append((rule, self.skip(rule, pos+1)))
364 core = kitems
366 core.sort()
367 tcore = tuple(core)
368 if self.cores.has_key(tcore):
369 return self.cores[tcore]
371 # Nope, doesn't exist. Compute it and the associated
372 # \epsilon-nonkernel state together; we'll need it right away.
374 k = self.cores[tcore] = len(self.states)
375 K, NK = _State(k, kitems), _State(k+1, [])
376 self.states[k] = K
377 predicted = {}
379 edges = self.edges
380 rules = self.newrules
381 for X in K, NK:
382 worklist = X.items
383 for item in worklist:
384 rule, pos = item
385 lhs, rhs = rule
386 if pos == len(rhs):
387 X.complete.append(rule)
388 continue
390 nextSym = rhs[pos]
391 key = (X.stateno, nextSym)
392 if not rules.has_key(nextSym):
393 if not edges.has_key(key):
394 edges[key] = None
395 X.T.append(nextSym)
396 else:
397 edges[key] = None
398 if not predicted.has_key(nextSym):
399 predicted[nextSym] = 1
400 for prule in rules[nextSym]:
401 ppos = self.skip(prule)
402 new = (prule, ppos)
403 NK.items.append(new)
405 # Problem: we know K needs generating, but we
406 # don't yet know about NK. Can't commit anything
407 # regarding NK to self.edges until we're sure. Should
408 # we delay committing on both K and NK to avoid this
409 # hacky code? This creates other problems..
411 if X is K:
412 edges = {}
414 if NK.items == []:
415 return k
418 # Check for \epsilon-nonkernel's core. Unfortunately we
419 # need to know the entire set of predicted nonterminals
420 # to do this without accidentally duplicating states.
422 core = predicted.keys()
423 core.sort()
424 tcore = tuple(core)
425 if self.cores.has_key(tcore):
426 self.edges[(k, None)] = self.cores[tcore]
427 return k
429 nk = self.cores[tcore] = self.edges[(k, None)] = NK.stateno
430 self.edges.update(edges)
431 self.states[nk] = NK
432 return k
434 def goto(self, state, sym):
435 key = (state, sym)
436 if not self.edges.has_key(key):
438 # No transitions from state on sym.
440 return None
442 rv = self.edges[key]
443 if rv is None:
445 # Target state isn't generated yet. Remedy this.
447 rv = self.makeState(state, sym)
448 self.edges[key] = rv
449 return rv
451 def gotoT(self, state, t):
452 return [self.goto(state, t)]
454 def gotoST(self, state, st):
455 rv = []
456 for t in self.states[state].T:
457 if st == t:
458 rv.append(self.goto(state, t))
459 return rv
461 def add(self, set, item, i=None, predecessor=None, causal=None):
462 if predecessor is None:
463 if item not in set:
464 set.append(item)
465 else:
466 key = (item, i)
467 if item not in set:
468 self.links[key] = []
469 set.append(item)
470 self.links[key].append((predecessor, causal))
472 def makeSet(self, token, sets, i):
473 cur, next = sets[i], sets[i+1]
475 ttype = token is not None and self.typestring(token) or None
476 if ttype is not None:
477 fn, arg = self.gotoT, ttype
478 else:
479 fn, arg = self.gotoST, token
481 for item in cur:
482 ptr = (item, i)
483 state, parent = item
484 add = fn(state, arg)
485 for k in add:
486 if k is not None:
487 self.add(next, (k, parent), i+1, ptr)
488 nk = self.goto(k, None)
489 if nk is not None:
490 self.add(next, (nk, i+1))
492 if parent == i:
493 continue
495 for rule in self.states[state].complete:
496 lhs, rhs = rule
497 for pitem in sets[parent]:
498 pstate, pparent = pitem
499 k = self.goto(pstate, lhs)
500 if k is not None:
501 why = (item, i, rule)
502 pptr = (pitem, parent)
503 self.add(cur, (k, pparent),
504 i, pptr, why)
505 nk = self.goto(k, None)
506 if nk is not None:
507 self.add(cur, (nk, i))
509 def makeSet_fast(self, token, sets, i):
511 # Call *only* when the entire state machine has been built!
512 # It relies on self.edges being filled in completely, and
513 # then duplicates and inlines code to boost speed at the
514 # cost of extreme ugliness.
516 cur, next = sets[i], sets[i+1]
517 ttype = token is not None and self.typestring(token) or None
519 for item in cur:
520 ptr = (item, i)
521 state, parent = item
522 if ttype is not None:
523 k = self.edges.get((state, ttype), None)
524 if k is not None:
525 #self.add(next, (k, parent), i+1, ptr)
526 #INLINED --v
527 new = (k, parent)
528 key = (new, i+1)
529 if new not in next:
530 self.links[key] = []
531 next.append(new)
532 self.links[key].append((ptr, None))
533 #INLINED --^
534 #nk = self.goto(k, None)
535 nk = self.edges.get((k, None), None)
536 if nk is not None:
537 #self.add(next, (nk, i+1))
538 #INLINED --v
539 new = (nk, i+1)
540 if new not in next:
541 next.append(new)
542 #INLINED --^
543 else:
544 add = self.gotoST(state, token)
545 for k in add:
546 if k is not None:
547 self.add(next, (k, parent), i+1, ptr)
548 #nk = self.goto(k, None)
549 nk = self.edges.get((k, None), None)
550 if nk is not None:
551 self.add(next, (nk, i+1))
553 if parent == i:
554 continue
556 for rule in self.states[state].complete:
557 lhs, rhs = rule
558 for pitem in sets[parent]:
559 pstate, pparent = pitem
560 #k = self.goto(pstate, lhs)
561 k = self.edges.get((pstate, lhs), None)
562 if k is not None:
563 why = (item, i, rule)
564 pptr = (pitem, parent)
565 #self.add(cur, (k, pparent),
566 # i, pptr, why)
567 #INLINED --v
568 new = (k, pparent)
569 key = (new, i)
570 if new not in cur:
571 self.links[key] = []
572 cur.append(new)
573 self.links[key].append((pptr, why))
574 #INLINED --^
575 #nk = self.goto(k, None)
576 nk = self.edges.get((k, None), None)
577 if nk is not None:
578 #self.add(cur, (nk, i))
579 #INLINED --v
580 new = (nk, i)
581 if new not in cur:
582 cur.append(new)
583 #INLINED --^
585 def predecessor(self, key, causal):
586 for p, c in self.links[key]:
587 if c == causal:
588 return p
589 assert 0
591 def causal(self, key):
592 links = self.links[key]
593 if len(links) == 1:
594 return links[0][1]
595 choices = []
596 rule2cause = {}
597 for p, c in links:
598 rule = c[2]
599 choices.append(rule)
600 rule2cause[rule] = c
601 return rule2cause[self.ambiguity(choices)]
603 def deriveEpsilon(self, nt):
604 if len(self.newrules[nt]) > 1:
605 rule = self.ambiguity(self.newrules[nt])
606 else:
607 rule = self.newrules[nt][0]
608 #print rule
610 rhs = rule[1]
611 attr = [None] * len(rhs)
613 for i in range(len(rhs)-1, -1, -1):
614 attr[i] = self.deriveEpsilon(rhs[i])
615 return self.rule2func[self.new2old[rule]](attr)
617 def buildTree(self, nt, item, tokens, k):
618 state, parent = item
620 choices = []
621 for rule in self.states[state].complete:
622 if rule[0] == nt:
623 choices.append(rule)
624 rule = choices[0]
625 if len(choices) > 1:
626 rule = self.ambiguity(choices)
627 #print rule
629 rhs = rule[1]
630 attr = [None] * len(rhs)
632 for i in range(len(rhs)-1, -1, -1):
633 sym = rhs[i]
634 if not self.newrules.has_key(sym):
635 if sym != self._BOF:
636 attr[i] = tokens[k-1]
637 key = (item, k)
638 item, k = self.predecessor(key, None)
639 #elif self.isnullable(sym):
640 elif self._NULLABLE == sym[0:len(self._NULLABLE)]:
641 attr[i] = self.deriveEpsilon(sym)
642 else:
643 key = (item, k)
644 why = self.causal(key)
645 attr[i] = self.buildTree(sym, why[0],
646 tokens, why[1])
647 item, k = self.predecessor(key, why)
648 return self.rule2func[self.new2old[rule]](attr)
650 def ambiguity(self, rules):
652 # XXX - problem here and in collectRules() if the same rule
653 # appears in >1 method. Also undefined results if rules
654 # causing the ambiguity appear in the same method.
656 sortlist = []
657 name2index = {}
658 for i in range(len(rules)):
659 lhs, rhs = rule = rules[i]
660 name = self.rule2name[self.new2old[rule]]
661 sortlist.append((len(rhs), name))
662 name2index[name] = i
663 sortlist.sort()
664 list = map(lambda (a,b): b, sortlist)
665 return rules[name2index[self.resolve(list)]]
667 def resolve(self, list):
669 # Resolve ambiguity in favor of the shortest RHS.
670 # Since we walk the tree from the top down, this
671 # should effectively resolve in favor of a "shift".
673 return list[0]
676 # GenericASTBuilder automagically constructs a concrete/abstract syntax tree
677 # for a given input. The extra argument is a class (not an instance!)
678 # which supports the "__setslice__" and "__len__" methods.
680 # XXX - silently overrides any user code in methods.
683 class GenericASTBuilder(GenericParser):
684 def __init__(self, AST, start):
685 GenericParser.__init__(self, start)
686 self.AST = AST
688 def preprocess(self, rule, func):
689 rebind = lambda lhs, self=self: \
690 lambda args, lhs=lhs, self=self: \
691 self.buildASTNode(args, lhs)
692 lhs, rhs = rule
693 return rule, rebind(lhs)
695 def buildASTNode(self, args, lhs):
696 children = []
697 for arg in args:
698 if isinstance(arg, self.AST):
699 children.append(arg)
700 else:
701 children.append(self.terminal(arg))
702 return self.nonterminal(lhs, children)
704 def terminal(self, token): return token
706 def nonterminal(self, type, args):
707 rv = self.AST(type)
708 rv[:len(args)] = args
709 return rv
712 # GenericASTTraversal is a Visitor pattern according to Design Patterns. For
713 # each node it attempts to invoke the method n_<node type>, falling
714 # back onto the default() method if the n_* can't be found. The preorder
715 # traversal also looks for an exit hook named n_<node type>_exit (no default
716 # routine is called if it's not found). To prematurely halt traversal
717 # of a subtree, call the prune() method -- this only makes sense for a
718 # preorder traversal. Node type is determined via the typestring() method.
721 class GenericASTTraversalPruningException:
722 pass
724 class GenericASTTraversal:
725 def __init__(self, ast):
726 self.ast = ast
728 def typestring(self, node):
729 return node.type
731 def prune(self):
732 raise GenericASTTraversalPruningException
734 def preorder(self, node=None):
735 if node is None:
736 node = self.ast
738 try:
739 name = 'n_' + self.typestring(node)
740 if hasattr(self, name):
741 func = getattr(self, name)
742 func(node)
743 else:
744 self.default(node)
745 except GenericASTTraversalPruningException:
746 return
748 for kid in node:
749 self.preorder(kid)
751 name = name + '_exit'
752 if hasattr(self, name):
753 func = getattr(self, name)
754 func(node)
756 def postorder(self, node=None):
757 if node is None:
758 node = self.ast
760 for kid in node:
761 self.postorder(kid)
763 name = 'n_' + self.typestring(node)
764 if hasattr(self, name):
765 func = getattr(self, name)
766 func(node)
767 else:
768 self.default(node)
771 def default(self, node):
772 pass
775 # GenericASTMatcher. AST nodes must have "__getitem__" and "__cmp__"
776 # implemented.
778 # XXX - makes assumptions about how GenericParser walks the parse tree.
781 class GenericASTMatcher(GenericParser):
782 def __init__(self, start, ast):
783 GenericParser.__init__(self, start)
784 self.ast = ast
786 def preprocess(self, rule, func):
787 rebind = lambda func, self=self: \
788 lambda args, func=func, self=self: \
789 self.foundMatch(args, func)
790 lhs, rhs = rule
791 rhslist = list(rhs)
792 rhslist.reverse()
794 return (lhs, tuple(rhslist)), rebind(func)
796 def foundMatch(self, args, func):
797 func(args[-1])
798 return args[-1]
800 def match_r(self, node):
801 self.input.insert(0, node)
802 children = 0
804 for child in node:
805 if children == 0:
806 self.input.insert(0, '(')
807 children = children + 1
808 self.match_r(child)
810 if children > 0:
811 self.input.insert(0, ')')
813 def match(self, ast=None):
814 if ast is None:
815 ast = self.ast
816 self.input = []
818 self.match_r(ast)
819 self.parse(self.input)
821 def resolve(self, list):
823 # Resolve ambiguity in favor of the longest RHS.
825 return list[-1]
827 def _dump(tokens, sets, states):
828 for i in range(len(sets)):
829 print 'set', i
830 for item in sets[i]:
831 print '\t', item
832 for (lhs, rhs), pos in states[item[0]].items:
833 print '\t\t', lhs, '::=',
834 print string.join(rhs[:pos]),
835 print '.',
836 print string.join(rhs[pos:])
837 if i < len(tokens):
838 print
839 print 'token', str(tokens[i])
840 print