Change 'values written by' to use the original Program's environment.
[l3full.git] / l3lang / ast.py
blob35dd98fa497594c44c03805bc7b93d937d8f0d06
2 # Author: Michael H. Hohn (mhhohn@lbl.gov)
4 # Copyright (c) 2006, The Regents of the University of California
5 #
6 # See legal.txt and license.txt
7 #
9 #* imports
10 from __future__ import generators # yield
11 import pdb, exceptions, sys, os, weakref
12 from types import *
13 from copy import deepcopy, copy
14 from pprint import pprint
15 import os, re
17 import l3lang.globals as glbl
18 from l3lang import utils
23 #* utilities
24 exclude_names = {
25 "_primary": 1,
26 "_eval_env": 1,
27 "_def_env": 1,
28 "_storage": 1,
29 "_primary_ids": 1,
30 "_matcher": 1,
34 def dict2str(t):
35 """ Produce the assignment sequence for a function call;
36 omit exclude_names entries.
37 """
38 s = ""
39 items = filter(lambda (k,v): not exclude_names.has_key(k),
40 t.items())
41 if items != []:
42 for k,v in items[:-1]:
43 # k = v, ...
44 s += str(k) + " = " + repr(v) + ', '
45 k,v = items[-1]
46 s += str(k) + " = " + repr(v)
47 return s
49 def tuple2str(t):
50 """Produce a tuple useable in a function call.
51 The simple [1:-1] hack fails for (1,)
52 """
53 assert(type(t) == TupleType)
54 if len(t) == 1:
55 return repr(t)[1:-2]
56 else:
57 return repr(t)[1:-1]
60 def to_plain_python_f(itm, storage):
61 item = storage.load( itm )
62 try:
63 return item.to_plain_python(storage)
64 except AttributeError:
65 return item
68 def row_adjust(tree):
69 # Reduce the source text row index by one, for all nodes in the
70 # tree.
71 def _adjust(self):
72 # Make rows start at 0
73 if self._first_char != None:
74 row, col = self._first_char
75 self._first_char = (row-1, col)
77 if self._last_char != None:
78 row, col = self._last_char
79 self._last_char = (row-1, col)
81 for node in tree.top_down():
82 _adjust(node)
84 return tree
86 class InterfaceOnly(Exception):
87 pass
89 def cross_reference_trees(storage, block, newblock):
90 # cross-reference tree elements, one-to-one, for later
91 # interaction.
92 src = block.raw_seq_expr().top_down()
93 dest = newblock.raw_seq_expr().top_down()
94 try:
95 while 1:
96 storage.push_attributes(src.next()._id,
97 "interp_clone", dest.next()._id)
98 except StopIteration:
99 pass
101 def cross_ref_trees(storage, block, newblock):
102 # cross-reference tree elements, one-to-one, for later
103 # interaction.
104 src = block.top_down()
105 dest = newblock.top_down()
106 try:
107 while 1:
108 storage.push_attributes(src.next()._id,
109 "interp_clone", dest.next()._id)
110 except StopIteration:
111 pass
114 def copy_char_info(source_tree, target_tree):
115 # Assuming identical tree structure, overlay the source_tree's
116 # source string info (the string and character positions) on the
117 # target tree.
118 source_string = source_tree._source_string
119 src = source_tree.top_down()
120 dest = target_tree.top_down()
121 try:
122 while 1:
123 ss = src.next(); dd = dest.next()
124 dd._source_string = source_string
125 dd._first_char = ss._first_char
126 dd._last_char = ss._last_char
127 except StopIteration:
128 pass
130 def copy_attribute(source_tree, target_tree, att):
131 # Assuming identical tree structure, overlay the source_tree's
132 # ATT attribute source on the target tree.
133 src = source_tree.top_down()
134 dest = target_tree.top_down()
135 try:
136 while 1:
137 ss = src.next(); dd = dest.next()
138 dd.__dict__[att] = ss.__dict__[att]
139 except StopIteration:
140 pass
143 def rowcol_to_index(rowcol, text):
144 # Convert the (row, column) index used by _first_char and
145 # _last_char into an index of the string.
146 # This assumes Universal file reads; all EOLs are \n.
147 row, col = rowcol
148 start = 0
149 for i in xrange(0, row):
150 start = text.find('\n', start) + 1
151 return start + col
154 #* core types
156 #** common base class
157 # class Value: ??
158 class astType:
160 Provide common functionality for Nested() and Immediate().
162 Also provide dummy functions for the real interface provided
163 by Nested and Immediate; these functions raise exceptions if not
164 overridden.
166 raw_* members work on in-memory trees, before tree.setup() is run.
169 def __str__(self):
170 raise InterfaceOnly
172 def __repr__(self):
173 raise InterfaceOnly
175 def prefix_dump(self, indent=0):
176 raise InterfaceOnly
178 def traverse_depth(self):
179 raise InterfaceOnly
181 def traverse_breadth(self, head=1):
182 raise InterfaceOnly
184 def __len__(self):
185 raise InterfaceOnly
187 def __getitem__(self, index):
188 raise InterfaceOnly
190 def interpret(self, env, storage):
191 raise InterfaceOnly
193 def __init__(self, *primary, **kwds):
194 self._source_string = None
195 self._source_file = None
196 self._pre_interp_hook = None # callable of type
197 # ((l3tree, env, storage) -> None)
198 self._attributes = {} # generic attributes, kept as
199 # (key -> value) pairs.
200 astType.__init__ = __init__
202 def set_source_string(self, source_string):
203 # Adjustment for block constructs at EOF
204 lines = len(source_string.split('\n'))
206 for node in self.top_down():
207 node._source_string = source_string
209 # Adjustment for block constructs at EOF
210 if node._last_char != None:
211 row2, col2 = node._last_char
212 if row2 >= lines:
213 node._last_char = (lines - 1, col2)
215 return self
216 astType.set_source_string = set_source_string
218 def set_source_file(self, source_file):
219 for node in self.top_down():
220 node._source_file = source_file
221 return self
222 astType.set_source_file = set_source_file
226 #** Nested base class
227 class Nested(astType):
229 Collection of common functionality for all language elements.
231 The _primary key is the constructor argument tuple, e.g.,
232 for Sum(1,2), _primary is (1,2).
234 Keyword arguments are merged with the instance __dict__.
236 pass
238 def __str__(self):
239 return self.__repr__()
240 Nested.__str__ = __str__
242 def __len__(self):
243 return len(self._primary)
244 Nested.__len__ = __len__
246 def repr_long(self):
247 dict_str = dict2str(self.__dict__)
248 if dict_str:
249 dict_str = ', ' + dict_str
250 return self.__class__.__name__ + '(' + \
251 tuple2str(self._primary) + dict_str + ')'
252 Nested.repr_long = repr_long
254 def __repr__(self):
255 return "%s(%s, %d)" % (self.__class__.__name__,
256 tuple2str(self._primary),
257 self._id)
258 def __repr__(self):
259 return "%s(%s)" % (self.__class__.__name__, tuple2str(self._primary))
260 Nested.__repr__ = __repr__
262 def __getitem__(self, i):
263 return (self._primary[i])
264 Nested.__getitem__ = __getitem__
266 def __getslice__(self, i):
267 raise Exception("slicing is not defined for Nested.")
268 Nested.__getslice__ = __getslice__
270 def __init__(self, *primary, **kwds):
271 self._primary = primary
272 self.__dict__.update(kwds)
273 self._init_kwds = kwds
274 # (row, column), 0-relative
275 self._first_char = None
276 self._last_char = None # column is one past last character.
278 return astType.__init__(self, *primary, **kwds)
279 Nested.__init__ = __init__
281 def childrens_ids(self):
282 return [ch._id for ch in self._primary]
283 Nested.childrens_ids = childrens_ids
285 def find_child_index(self, id):
286 idx = 0
287 for cc in self._primary:
288 if cc._id == id:
289 return idx
290 idx +=1
291 return None
292 Nested.find_child_index = find_child_index
294 def __deepcopy__(self, memo):
295 # Also see aList.__deepcopy__
297 # See copy_attribute() to restore missing attributes
299 rv = self.__class__(*self._primary, **self._init_kwds)
300 direct_ref = ['_arg_env',
301 '_block_env',
302 '_def_env',
303 '_matcher',
304 '_eval_env',
305 ## '_primary', # the subtree had better be copied...
306 '_storage',
307 '_pre_interp_hook',
309 stuff = {}
310 for k,v in self.__dict__.items():
311 if k in direct_ref:
312 stuff[k] = self.__dict__[k]
313 else:
314 stuff[k] = deepcopy(v)
315 rv.__dict__.update(stuff)
317 # Re-assign non-copyable members (e.g. from __init__) on a
318 # per-class basis, or invalidate the member to insure explicit
319 # errors.
321 # This is needed to maintain any graph structures in the original,
322 # instead of getting a tree (pickle mostly avoids this problem,
323 # but __deepcopy__ does not).
325 # Members to look at come from __init__(), setup(), and
326 # interpret().
328 # Re-establish graph structures where possible...
329 None
331 # ... but ensure exceptions for others.
332 try: del rv._parent
333 except: pass
335 try: del rv._id
336 except: pass
338 ###try: del rv._primary_ids
339 ###except: pass
341 return rv
342 Nested.__deepcopy__ = __deepcopy__
344 def deref(self, index):
345 return (self._primary[index])
346 Nested.deref = deref
349 def eql(self, other):
350 # Recursive content equality test -- not physical equality.
352 # as in Matcher.match,
353 # Trees are assumed to have interface functions
354 # __len__, __getitem__, and __class__
356 if self.__class__ == other.__class__: # identical head
357 # Test children
358 nc = self.__len__()
359 if nc != other.__len__():
360 return False
361 # Compare ALL children.
362 for c in range(0,nc):
363 if self[c].eql(other[c]):
364 continue
365 else:
366 return False
367 return True
368 else:
369 return False
370 Nested.eql = eql
373 def eql_1(self, other):
374 # Non-recursive value equality test -- not physical equality.
376 # As in Matcher.match,
377 # Trees are assumed to have interface functions
378 # __len__, __getitem__, and __class__
380 if self.__class__ == other.__class__: # identical head
381 # Same size?
382 nc = self.__len__()
383 if nc != other.__len__():
384 return False
385 return True
386 else:
387 return False
388 Nested.eql_1 = eql_1
392 #** Immediate base class
393 class Immediate(astType):
395 Common functionality for all Immediate types.
396 Note: this class cannot be instantiated -- it must be subclassed.
399 def traverse_depth(self):
400 yield self
402 def traverse_breadth(self, head=0):
403 yield self
405 def __len__(self):
406 return 0
408 def __getitem__(self, i):
409 raise IndexError
411 def __init__(self, *primary, **kwds):
412 assert not isinstance(primary[0], astType) # No nesting here!
413 self._first_char = None
414 self._last_char = None
415 self._primary = primary
416 self._init_kwds = kwds
417 self.__dict__.update(kwds)
418 self.__class__.__bases__[1].__init__(self)
419 return astType.__init__(self, *primary, **kwds)
420 Immediate.__init__ = __init__
423 def __deepcopy__(self, memo):
424 # Also see aList.__deepcopy__, Nested.__deepcopy__
425 rv = self.__class__(*self._primary, **self._init_kwds)
426 direct_ref = ['_arg_env',
427 '_block_env',
428 '_def_env',
429 '_matcher',
430 '_eval_env',
431 '_storage',
432 '_pre_interp_hook',
434 stuff = {}
435 for k,v in self.__dict__.items():
436 if k in direct_ref:
437 stuff[k] = self.__dict__[k]
438 else:
439 stuff[k] = deepcopy(v)
440 rv.__dict__.update(stuff)
442 # Re-establish graph structures where possible...
443 None
445 # ... but ensure exceptions for others.
446 try: del rv._parent
447 except: pass
449 try: del rv._id
450 except: pass
452 return rv
453 Immediate.__deepcopy__ = __deepcopy__
456 def childrens_ids(self):
457 return []
458 Immediate.childrens_ids = childrens_ids
460 def __str__(self):
461 return self.__repr__()
462 Immediate.__str__ = __str__
464 def eql(self, other):
465 # May be expanded...
466 if self.__class__ == other.__class__:
467 return self._primary[0] == other._primary[0]
468 else:
469 return False
470 Immediate.eql = eql
471 Immediate.eql_1 = eql
473 def repr_long(self):
474 dict_str = dict2str(self.__dict__)
475 if dict_str:
476 dict_str = ', ' + dict_str
477 return self.__class__.__name__ + '(' + \
478 self.__class__.__bases__[1].__repr__(self._primary[0]) + \
479 dict_str + ')'
480 Immediate.repr_long = repr_long
482 def __repr__(self):
483 if self.__dict__.has_key('_id'):
484 return "%s(%s, %d)" % (
485 self.__class__.__name__,
486 self.__class__.__bases__[1].__repr__(self._primary[0]),
487 self._id)
488 else:
489 return "%s(%s)" % (
490 self.__class__.__name__,
491 self.__class__.__bases__[1].__repr__(self._primary[0]),
493 def __repr__(self):
494 return "%s(%s)" % (
495 self.__class__.__name__,
496 self.__class__.__bases__[1].__repr__(self._primary[0]),
498 Immediate.__repr__ = __repr__
500 def to_plain_python(self, storage):
501 return self._primary[0]
502 Immediate.to_plain_python = to_plain_python
505 #** Language core
506 class InterpreterError(exceptions.Exception):
507 def __init__(self, args=None):
508 self.args = args
510 class UnboundSymbol(exceptions.Exception):
511 def __init__(self, args=None):
512 self.args = args
514 class Interpret_tail_call(exceptions.Exception):
515 # Used for passing tail call information UP the Python stack.
516 # Arg: (tree, env, finish_progs)
517 # where
518 # tree is the astType to continue executing
519 # env is the environment to evaluate in
520 # finish_progs is a list of functions to run after the tail call
521 # is finished. Args are the .interpret() return
522 # value.
523 def __init__(self, args=None):
524 self.args = args
526 class Interpret_return(exceptions.Exception):
527 def __init__(self, args=None):
528 self.args = args
531 #*** Nested() types
532 #**** Program
533 class Program(Nested):
535 # Program(seq_expr)
537 pass
540 def __init__(self, *primary, **kwds):
541 self._outl_parent = None # astType weakref.
542 self._outl_children = None # (astType weakref) vaList
543 self._outl_type = 'subtree' # "flat" | "nested" | "subtree"
545 self.p_label = None # StringType
546 return Nested.__init__(self, *primary, **kwds)
547 Program.__init__ = __init__
549 def set_label(self, lbl):
550 assert isinstance(lbl, StringType)
551 self.p_label = lbl
552 Program.set_label = set_label
554 def get_label(self):
555 return self.p_label
556 Program.get_label = get_label
558 def eval_env(self):
559 return getattr(self, "_eval_env", None)
560 Program.eval_env = eval_env
565 #**** Return
566 class Return(Nested):
567 # Return(expr)
568 pass
569 #**** Loop
570 class Loop(Nested):
571 # Loop(seq_expr)
572 # The name, args, modifiers, body split is internally available.
573 pass
576 #**** Function
577 class Function(Nested):
579 # Function(opt_block_args, seq_expr) { |a, ...| c;d;...}
581 pass
583 def nargs(self):
585 Return number of positional arguments.
587 return len(self.positional_block_args())
588 Function.nargs = nargs
590 def seq_expr(self):
591 return self.deref(1)
592 Function.seq_expr = seq_expr
594 def raw_seq_expr(self):
595 return self._primary[1]
596 Function.raw_seq_expr = raw_seq_expr
598 def __init__(self, *primary, **kwds):
599 # self._binding_env = None
600 self._binding_name = None # "foo" for def foo(): BLOCK and
601 # foo = { || ...}
602 ## self._clone_of = None # source block
603 return Nested.__init__(self, *primary, **kwds)
604 Function.__init__ = __init__
607 def block_copy(self, storage):
608 # copy all, including envs. This requires a all copies to come
609 # from the original skeleton.
610 rv = deepcopy(self)
611 copy_char_info(self, rv)
612 ## rv._clone_of = self._id
613 return rv
614 Function.block_copy = block_copy
616 def named_block_args(self, symbols = 0):
618 Return named block args as (name : string, value : astType) list.
620 ma = Matcher()
621 kv_pairs = []
622 for ba in self.block_args():
623 # Skip to keyword args.
624 if ma.match(ba, MarkerTyped(String('name'), Symbol('symbol'))):
625 continue
627 # name = val
628 if not ma.match(ba, Set(MarkerTyped(String('name'), Symbol('_')),
629 Marker('val'))):
630 raise InterpreterError, \
631 "Expected 'name' or 'key = value' argument, got: " + str(ba)
633 if symbols:
634 kv_pairs.append((ma._matches['name'], ma._matches['val']))
635 else:
636 kv_pairs.append((ma._matches['name'].as_index(),
637 ma._matches['val']))
638 return kv_pairs
639 Function.named_block_args = named_block_args
641 def positional_block_args(self, symbols = 0):
642 # Return a (string list) of argument names.
643 ma = Matcher()
644 arg_names = []
645 for ba in self.block_args():
646 # if not ma.match_exp_str(ba, '!! name symbol'):
647 if not ma.match(ba, MarkerTyped(String('name'), Symbol('symbol'))):
648 # Skip remaining (keyword) args.
649 return arg_names
650 ## raise InterpreterError, "Invalid argument type: " + str(ba)
651 if symbols:
652 arg_names.append(ma._matches['name'])
653 else:
654 arg_names.append(ma._matches['name'].as_index())
655 return arg_names
656 Function.positional_block_args = positional_block_args
658 def block_args(self):
659 # Return list of all arguments.
660 return self.deref(0)
661 Function.block_args = block_args
663 #**** Call
664 class Call(Nested):
666 # Call(simple_expr, simple_expr_list) f a b ...
668 # Interpretation
669 pass
671 def positional_args(self):
672 # Return (astType list) of positional args.
673 ma = Matcher()
674 lst = []
675 for ba in self.block_args():
676 # Grab all all but `name = val`.
677 if not ma.match(ba, Set(MarkerTyped(String('name'), Symbol('_')),
678 Marker('val'))):
679 lst.append(ba)
680 return lst
681 Call.positional_args = positional_args
683 def named_args(self):
685 Return named block args as (name : string, value : astType) list.
687 ma = Matcher()
688 kv_pairs = []
689 for ba in self.block_args():
690 # name = val
691 if ma.match(ba, Set(MarkerTyped(String('name'), Symbol('_')),
692 Marker('val'))):
694 kv_pairs.append((ma._matches['name'].as_index(),
695 ma._matches['val']))
696 return kv_pairs
697 Call.named_args = named_args
699 def nargs(self):
701 Return number of positional arguments.
703 return len(self.positional_args())
704 Call.nargs = nargs
706 def block_args(self):
707 # Return list of all arguments.
708 return self.deref(1)
709 Call.block_args = block_args
712 def __init__(self, *primary, **kwds):
713 # ?? self._id_count = 0
714 # ?? self._called_block = None
715 Nested.__init__(self,*primary, **kwds)
716 Call.__init__ = __init__
718 #**** Member
719 class Member(Nested):
721 # Member(simple_expr, simple_expr) [ a.b ]
723 pass
725 #**** If
726 class If(Nested):
727 # If(cond, yes, no)
728 pass
730 def __init__(self, *primary, **kwds):
731 return Nested.__init__(self, *primary, **kwds)
732 If.__init__ = __init__
735 #**** Labeled
736 class Labeled(Nested):
737 def interpret(self, env, storage):
738 pass
740 #**** Let
741 class Let(Nested):
742 # HERE. scheme-style implementation
743 def interpret(self, env, storage):
744 pass
746 #**** Set
747 class Set(Nested):
749 # Set(simple_expr, expr)
751 pass
753 def arg_names(self, raw_symbols = 0):
754 # Return argument names as string list.
755 # Also see Set.interpret
756 args = self.deref(0)
757 if isinstance(args, Tuple):
758 name_list = []
759 arg_list = args._primary[0]
760 for nm in arg_list:
761 # if not self._matcher.match_exp_str(nm, '!! name symbol'):
762 if not self._matcher.match(nm, MarkerTyped(String('name'),
763 Symbol('symbol'))):
764 raise InterpreterError, \
765 "Set: Invalid argument type: " + str(nm)
766 if raw_symbols:
767 name_list.append( self._matcher.get('name') )
768 else:
769 name_list.append( self._matcher.get('name').as_index() )
770 return name_list
772 # elif self._matcher.match_exp_str( args, '!! name symbol'):
773 elif (self._matcher.match(args, MarkerTyped(String('name'),
774 Symbol('_'))) or
775 self._matcher.match(args, MarkerTyped(String('name'),
776 String('_')))):
777 if raw_symbols:
778 return [ self._matcher.get('name') ]
779 else:
780 return [ self._matcher.get('name').as_index() ]
781 else:
782 raise InterpreterError, "Set: Invalid first argument type: " + \
783 str(self.deref(0))
784 Set.arg_names = arg_names
787 #**** List
788 class List(Nested):
790 # List(alist)
793 # All List operations should forward to the contained alist.
794 pass
796 def __init__(self, *primary, **kwds):
797 self.l_label = None # StringType
798 return Nested.__init__(self, *primary, **kwds)
799 List.__init__ = __init__
801 def set_label(self, lbl):
802 assert isinstance(lbl, StringType)
803 self.l_label = lbl
804 List.set_label = set_label
806 def get_label(self):
807 return self.l_label
808 List.get_label = get_label
810 ### eval_list ??
811 ### def to_plain_python(self, storage):
812 ### return [to_plain_python_f(itm, storage)
813 ### for itm in self._eval_list]
814 ### List.to_plain_python = to_plain_python
816 def find_child_index(self, id):
817 return self._primary[0].find_child_index(id)
818 List.find_child_index = find_child_index
820 ## def __len__(self):
821 ## return len(self._primary[0])
822 ## List.__len__ = __len__
824 # This special definition breaks much code; use explicit access instead.
825 # def __getitem__(self, i):
826 # return self._primary[0][i]
827 # List.__getitem__ = __getitem__
831 #**** viewList
832 def viewList(*primary, **kwds):
833 return Program(*primary, **kwds)
836 class cls_viewList(List):
838 A subclass of List for viewing of selected subtrees (outlining).
840 pass
843 def __repr__(self):
844 return "%s(%s, %s)" % (self.__class__.__name__,
845 self.l_label,
846 tuple2str(self._primary),
848 cls_viewList.__repr__ = __repr__
850 def __init__(self, *primary, **kwds):
851 self._outl_parent = None # astType weakref.
852 self._outl_children = None # (astType weakref) vaList
853 self._outl_type = 'nested' # "flat" | "nested" | "subtree"
854 return List.__init__(self, *primary, **kwds)
855 cls_viewList.__init__ = __init__
858 #**** Map
859 class Map(Nested):
861 # Map(alist)
864 The general mapping type.
866 pass
868 def __init__(self, *primary, **kwds):
869 self._binding_name = None
870 self.m_label = None # special display label.
871 return Nested.__init__(self, *primary, **kwds)
872 Map.__init__ = __init__
874 def set_label(self, lbl):
875 self.m_label = lbl
876 Map.set_label = set_label
878 def get(self, key):
879 raise "internally modified: update calling code"
880 return self._eval_env.ie_lookup_1(key)
881 Map.get = get
883 def to_plain_python(self, storage):
884 dct = (self._eval_env._bindings)
885 # dict_to_plain_python:
886 newdct = {}
887 for k,v in dct.items():
888 # HERE. Env *should* use Path()s ...
889 newdct[k] = v.to_plain_python(storage)
890 return newdct
891 Map.to_plain_python = to_plain_python
893 #**** Subdir(Map)
894 class Subdir(Map):
896 # Subdir is a Map in which some keys are file names with the
897 # absolute path as value. Other entries are regular Map entries.
899 pass
902 #**** Tuple
903 class Tuple(Nested):
905 # Tuple(alist)
907 pass
908 def __init__(self, *primary, **kwds):
909 Nested.__init__(self, *primary, **kwds)
910 self.l_label = None # StringType
911 self._primary[0].setthe(layout = 'horizontal')
912 Tuple.__init__ = __init__
916 #*** Immediate() types
918 # Broken:
919 # class Bool(Immediate, bool):
920 # pass
922 #**** Int
923 class Int(Immediate, IntType):
924 pass
926 #**** Float
927 class Float(Immediate, FloatType):
928 pass
930 #**** Complex
931 class Complex(Immediate, ComplexType): # To be added...
932 pass
934 #**** String
935 class String(Immediate, StringType):
936 pass
938 def as_index(self):
939 return self._primary[0]
940 String.as_index = as_index
942 def FilepathString(name):
943 assert isinstance(name, StringType), "Expecting string for file path."
944 rv = String(name)
945 if os.path.exists(name):
946 rv._isfile = True
947 else:
948 rv._isfile = False
949 return rv
951 def isfile(self):
952 ''' Is (or was) self a valid file name? '''
953 fe = getattr(self, "_isfile", None)
954 if fe is None:
955 if os.path.exists(self._primary[0]):
956 self._isfile = True
957 return True
958 else:
959 self._isfile = False
960 return False
961 else:
962 return fe
963 String.isfile = isfile
967 #**** Comment
968 class Comment(String, StringType):
970 A subclass of String with support for comments.
972 pass
974 # # def __init__(self):
975 # # String.__init__(self)
976 # # self._string = string
977 # # Comment.__init__ = __init__
981 #**** Symbol
982 class Symbol(Immediate, StringType):
983 pass
985 def as_index(self):
986 return self._primary[0]
987 Symbol.as_index = as_index
990 #*** Specials
991 #**** Native
992 class Native:
993 # Native is the general placeholder to use when native Python
994 # VALUES are to be included in a tree, or when an external value
995 # (say, the content of a file) is to be handled "natively".
997 # E.g. a Numeric array
998 # resulting from interpretation may be put into a new tree for
999 # display.
1001 # Native is similar to aNone and Immediates in behavior. It
1002 # evaluates to the Python value it holds.
1005 def __str__(self):
1006 return self.__repr__()
1008 def __repr__(self):
1009 return "%s(%s)" % (self.__class__.__name__,
1010 self._primary[0].__class__.__name__)
1012 def traverse_depth(self):
1013 yield self
1015 def traverse_breadth(self, head=0):
1016 yield self
1018 def __init__(self, primary, **kwds):
1019 self._primary = (primary,)
1020 self.__dict__.update(kwds)
1021 self._first_char = None
1022 self._last_char = None
1023 self._pre_interp_hook = None # callable of type
1024 # ((l3tree, env, storage) -> None)
1025 self._attributes = {} # generic attributes, kept as
1026 # (key -> value) pairs.
1027 Native.__init__ = __init__
1029 def to_plain_python(self, storage):
1030 return self._primary[0]
1031 Native.to_plain_python = to_plain_python
1033 def value(self):
1034 return self._primary[0]
1035 Native.value = value
1037 # # def set_source_string(self, source_string):
1038 # # # Adjustment for block constructs at EOF
1039 # # lines = len(source_string.split('\n'))
1040 # # self._source_string = source_string
1042 # # # Adjustment for block constructs at EOF
1043 # # if self._last_char != None:
1044 # # row2, col2 = self._last_char
1045 # # if row2 >= lines:
1046 # # self._last_char = (lines - 1, col2)
1048 # # return self
1049 # # Native.set_source_string = set_source_string
1051 # # def eql(self, other):
1052 # # if self.__class__ == other.__class__:
1053 # # return True
1054 # # else:
1055 # # return False
1056 # # Native.eql = eql
1059 #**** aNone
1060 class aNone:
1061 # To allow attachment of labels, and for uniformity, the ast.None()
1062 # class is instantiated on every use, just as the other Immediate()s
1064 # This class should inherit from None, but this is illegal.
1065 # All Immediate() functions must be provided.
1067 def __str__(self):
1068 return self.__repr__()
1070 def __repr__(self):
1071 return self.__class__.__name__ + '()'
1073 def traverse_depth(self):
1074 yield self
1076 def traverse_breadth(self, head=0):
1077 yield self
1079 def __len__(self):
1080 # For compatibility with Matcher().
1081 return 0
1082 aNone.__len__ = __len__
1084 def __init__(self, *primary, **kwds):
1085 self._primary = (None,)
1086 self.__dict__.update(kwds)
1087 self._first_char = None
1088 self._last_char = None
1089 self._pre_interp_hook = None # callable of type
1090 # ((l3tree, env, storage) -> None)
1091 self._attributes = {} # generic attributes, kept as
1092 # (key -> value) pairs.
1093 aNone.__init__ = __init__
1095 def to_plain_python(self, storage):
1096 return None
1097 aNone.to_plain_python = to_plain_python
1099 def set_source_string(self, source_string):
1100 # Adjustment for block constructs at EOF
1101 lines = len(source_string.split('\n'))
1103 self._source_string = source_string
1105 # Adjustment for block constructs at EOF
1106 if self._last_char != None:
1107 row2, col2 = self._last_char
1108 if row2 >= lines:
1109 self._last_char = (lines - 1, col2)
1111 return self
1112 aNone.set_source_string = set_source_string
1114 def eql(self, other):
1115 if self.__class__ == other.__class__:
1116 return True
1117 else:
1118 return False
1119 aNone.eql = eql
1120 aNone.eql_1 = eql
1124 # Also see class Nested, aTree
1125 # Note: this class' instances pickle improperly with protocol < 2, and
1126 # python versions < 2.3
1127 #**** aList
1128 class aList(ListType):
1130 # To simplify interpret() and setup() code, the regular list needs
1131 # some extra features.
1132 # In particular, access consistent with other astType requires the
1134 # self._primary
1136 # field; list access uses
1138 # self
1140 # as ususal.
1142 # Updates of either require updates of both.
1144 # Note: the List() class is for the L3 ast; the aList() class
1145 # is INTERNAL.
1148 def __str__(self):
1149 return self.__repr__()
1151 def __add__(self, other):
1152 return self.__class__( ListType.__add__(self, other) )
1154 def __repr__(self):
1155 return self.__class__.__name__ + '(' + \
1156 ListType.__repr__(self) + ')'
1157 aList.__repr__ = __repr__
1159 def repr_long(self):
1160 dict_str = dict2str(self.__dict__)
1161 if dict_str:
1162 dict_str = ', ' + dict_str
1163 return self.__class__.__name__ + '(' + \
1164 ListType.__repr__(self) + dict_str + ')'
1165 aList.repr_long = repr_long
1167 def __init__(self, arg, **kwds):
1168 ListType.__init__(self, arg)
1169 self._primary = (arg,) # Form is ( [...], )
1170 # ?? self._id = None
1171 # ?? for textual persistence. use with __repr__. Test.
1172 self.__dict__.update(kwds)
1173 self._first_char = None
1174 self._last_char = None
1175 self._parent = None
1176 self._pre_interp_hook = None # callable of type
1177 # ((l3tree, env, storage) -> None)
1178 self._attributes = {} # generic attributes, kept as
1179 # (key -> value) pairs.
1180 aList.__init__ = __init__
1183 def to_plain_python(self, storage):
1184 return self._primary[0]
1185 aList.to_plain_python = to_plain_python
1187 def deref(self, index):
1188 return (self[index])
1189 aList.deref = deref
1191 def dependencies(self, env):
1192 """ Set up post-setup, pre-execution structures.
1194 for child in self._primary: child.dependencies(env)
1195 aList.dependencies = dependencies
1197 def eql(self, other):
1198 # as in Matcher.match,
1199 # Trees are assumed to have interface functions
1200 # __len__, __getitem__, and __class__
1202 if self.__class__ == other.__class__: # identical head
1203 # Test children
1204 nc = self.__len__()
1205 if nc != other.__len__():
1206 return False
1207 # Compare ALL children.
1208 for c in range(0,nc):
1209 if self[c].eql(other[c]):
1210 continue
1211 else:
1212 return False
1213 return True
1214 else:
1215 return False
1216 aList.eql = eql
1219 def eql_1(self, other):
1220 if self.__class__ == other.__class__: # identical head
1221 # Same number of children?
1222 nc = self.__len__()
1223 if nc != other.__len__():
1224 return False
1225 return True
1226 else:
1227 return False
1228 aList.eql_1 = eql_1
1230 def find_child_index(self, id):
1231 # Also see Nested.
1232 idx = 0
1233 for cc in self:
1234 if cc._id == id:
1235 return idx
1236 idx +=1
1237 return None
1238 aList.find_child_index = find_child_index
1241 # This __deepcopy__ (of a list subclass) causes infinite recursion
1242 # during .interpret...
1243 # # def __deepcopy__(self, memo):
1244 # # # Also see Nested.__deepcopy__
1245 # # #
1246 # # # See copy_attribute() to restore missing attributes
1247 # # #
1248 # # rv = self.__class__(*self._primary)
1249 # # direct_ref = ['_arg_env',
1250 # # '_block_env',
1251 # # '_def_env',
1252 # # '_matcher',
1253 # # '_eval_env',
1254 # # ## '_primary', # the subtree had better be copied...
1255 # # '_storage',
1256 # # ]
1257 # # stuff = {}
1258 # # for k,v in self.__dict__.items():
1259 # # if k in direct_ref:
1260 # # stuff[k] = self.__dict__[k]
1261 # # else:
1262 # # stuff[k] = deepcopy(v)
1263 # # rv.__dict__.update(stuff)
1265 # # try: del rv._parent
1266 # # except: pass
1268 # # try: del rv._id
1269 # # except: pass
1271 # # return rv
1272 # # aList.__deepcopy__ = __deepcopy__
1276 # Defining this then also requires __getslice__;
1277 # it also makes other code very hard to check. Use explicit syntax
1278 # instead.
1279 # def __getitem__(self, index):
1280 # return storage.load(ListType.__getitem__(self, index))
1281 # aList.__getitem__ = __getitem__
1284 #**** vaList
1285 class vaList(aList):
1286 ''' subclass of aList with necessary restrictions for use in
1287 spanning trees.
1288 - no interpretation
1289 - no changes to children
1290 - no persistence
1292 pass
1295 def interpret(self, env, storage):
1296 raise Exception("vaList must not be interpreted. Internal error.")
1297 vaList.interpret = interpret
1300 def insert_child_rec(self, index, new, storage):
1301 raise Exception("Insertion into collapsed list is ambiguous, hence "
1302 "not possible. Expand the list first. ")
1303 vaList.insert_child_rec = insert_child_rec
1306 def replace_child(self, orig_id, new_node):
1307 assert isinstance(orig_id, IntType)
1308 idx = self.find_child_index(orig_id)
1309 if idx is None:
1310 raise ReplacementError, "Child not found."
1311 # Update direct references.
1312 foo = self._primary[0]
1313 foo[idx] = new_node
1314 self._primary = (foo,)
1315 self[idx] = new_node
1316 vaList.replace_child = replace_child
1319 def insert_child(self, index, child):
1320 assert self._id != None
1321 # Also see Nested.insert_child()
1322 foo = self._primary[0]
1323 foo.insert(index, child)
1324 self._primary = (foo,)
1325 self.insert(index, child)
1326 vaList.insert_child = insert_child
1329 def detach_child(self, orig_id, storage):
1330 assert isinstance(orig_id, IntType)
1331 idx = self.find_child_index(orig_id)
1332 if idx is None:
1333 raise ReplacementError, "Child not found."
1334 # Update direct references.
1335 foo = self._primary[0]
1336 del foo[idx]
1337 self._primary = (foo,)
1338 del self[idx]
1339 vaList.detach_child = detach_child
1342 def setup_valist(self, w_, parallel_nd):
1343 # Return an empty alist, .setup() using the environment of
1344 # `parallel_nd`.
1345 storage = w_.state_.storage
1346 parent = empty_parent()
1347 def_env = storage.get_attribute(parallel_nd._id, "stored_from_env")
1348 # Form the raw list.
1349 rv, _ = vaList([]).setup(parent, def_env, storage)
1350 return rv
1351 vaList.setup_valist = setup_valist
1356 #**** matcher elements
1357 class Marker(Immediate, StringType):
1358 # Marker('symbol')
1359 pass
1361 def name(self):
1362 # Note: no _storage use: MarkerTyped is Immediate()
1363 return (self._primary[0])
1364 Marker.name = name
1367 #**** MarkerTyped
1368 class MarkerTyped(Nested):
1369 # !! name expr -> MarkerTyped(Symbol('name'), expr)
1370 # where expr is a type sample.
1371 pass
1373 def name(self):
1374 # Notice _storage use: MarkerTyped is Nested()
1375 return self.deref(0).as_index()
1376 MarkerTyped.name = name
1378 def expr(self):
1379 return self.deref(1)
1380 MarkerTyped.expr = expr
1382 # class Set(astType):
1383 # pass
1386 #**** Inline
1387 class Inline(Nested):
1389 # Inline( python_init_string )
1390 # Raw Python code (especially module imports) can be provided here.
1392 pass
1394 def __init__(self, *primary, **kwds):
1395 ( self._python_init_string, ) = primary
1396 return Nested.__init__(self, *primary, **kwds)
1397 Inline.__init__ = __init__
1399 def __deepcopy__(self, memo):
1400 rv = Nested.__deepcopy__(self, memo)
1401 rv._update_refs()
1402 return rv
1403 Inline.__deepcopy__ = __deepcopy__
1407 #**** Macro
1409 class Macro(Nested):
1411 # Macro( in_args, body )
1413 pass
1415 def __init__(self, *primary, **kwds):
1416 Nested.__init__(self, *primary, **kwds)
1417 Macro.__init__ = __init__
1419 def block_copy(self, storage):
1420 rv = deepcopy(self)
1421 copy_char_info(self, rv)
1422 ## rv._clone_of = self._id
1423 return rv
1424 Macro.block_copy = block_copy
1427 def block_args(self):
1428 # Return list of all arguments.
1429 return self.deref(0)
1430 Macro.block_args = block_args
1432 def nargs(self):
1434 Return number of arguments.
1436 return len(self.block_args())
1437 Macro.nargs = nargs
1439 def raw_seq_expr(self):
1440 return self._primary[1]
1441 Macro.raw_seq_expr = raw_seq_expr
1445 #** Editing / high level support
1447 # Notes
1448 # =========
1449 # The a[A-Z]* classes are meant for explicit construction; they have
1450 # no parsing elements. Some a[A-Z]* classes (a)dd something to
1451 # built-in types.
1453 # The e[A-Z]* classes are meant for explicit construction; they
1454 # are intended for (e)diting functionality.
1459 #*** macro manager (selection tree)
1461 # The selection_tree format:
1463 # tree ::= [tree label leaf * ]
1464 # leaf ::= tree | value
1465 # value ::= anything not eTree
1468 class eTree(Nested):
1470 # eTree(label, l1, l2, ...)
1472 def __init__(self, *primary, **kwds):
1473 Nested.__init__(self, *primary, **kwds)
1474 self._label = primary[0]
1476 def __deepcopy__(self, memo):
1477 rv = Nested.__deepcopy__(self, memo)
1478 # Re-establish graph structures.
1479 rv._label = rv._primary[0]
1480 return rv
1481 eTree.__deepcopy__ = __deepcopy__
1484 def append(self, subtree,
1485 model_cb = lambda subtree, subtree_idx : 0 ):
1486 # Append subtree and return new child's logical index.
1487 self._primary += (subtree,)
1488 subtree_idx = len(self._primary) - 2
1490 # Run external updates.
1491 model_cb(subtree, subtree_idx)
1493 return subtree_idx
1494 eTree.append = append
1496 #** possible future derived types
1497 # class Future(Nested):
1498 # pass
1501 # class Cond(Nested):
1502 # pass
1504 # class Manual_and(Nested):
1505 # pass
1507 # class Manual_or(Nested):
1508 # pass
1510 # class Dynamic_and(Nested):
1511 # pass
1513 # class Dynamic_or(Nested):
1514 # pass
1516 # class Vector(Nested):
1517 # pass
1519 #* Low-level program printing support
1520 #** source_substring
1521 def source_substring(self):
1523 Return the leading substring for self, at most one line.
1525 if self._first_char != None:
1526 row1, col1 = self._first_char
1527 else:
1528 return "no_source"
1530 if self._last_char != None:
1531 row2, col2 = self._last_char
1532 str = self._source_string.split('\n')[row1]
1533 if row2 > row1:
1534 return str[col1:]
1535 else:
1536 return str[col1:col2]
1537 else:
1538 return "no_source"
1539 astType.source_substring = source_substring
1542 #** character range
1543 def set_char_range(self, first, last):
1544 self._first_char = first
1545 self._last_char = last
1546 return self
1547 Nested.set_char_range = set_char_range
1548 Immediate.set_char_range = set_char_range
1549 aNone.set_char_range = set_char_range
1550 aList.set_char_range = set_char_range
1552 def llet_char_range(self, first, last):
1553 # If self has no character range yet, set it.
1554 if self._first_char is None:
1555 self._first_char = first
1556 self._last_char = last
1557 return self
1558 Nested.llet_char_range = llet_char_range
1559 Immediate.llet_char_range = llet_char_range
1560 aNone.llet_char_range = llet_char_range
1561 aList.llet_char_range = llet_char_range
1564 def get_char_range(self):
1565 # Return (row, col), (row, col) pair. Upper left / lower right.
1566 if self._first_char is None:
1567 if isinstance(self, Comment):
1568 pass
1569 else:
1570 glbl.logger.info("%s has no character position info." % self)
1571 return (0,0), (0,0)
1572 return self._first_char, self._last_char
1573 Nested.get_char_range = get_char_range
1574 Immediate.get_char_range = get_char_range
1575 aList.get_char_range = get_char_range
1576 aNone.get_char_range = get_char_range
1579 #** source_string
1580 def source_string(self):
1581 tst = astType.source_string(self)
1582 if tst != "no_source":
1583 return tst
1584 return self[0].source_string()
1585 Program.source_string = source_string
1587 # This is clearly wrong:
1588 # # def source_string(self):
1589 # # return "\n".join( [child.source_string() for child in self] )
1590 # # aList.source_string = source_string
1592 def source_string(self):
1594 # Return the source string for self.
1596 if self._first_char != None:
1597 row1, col1 = self._first_char
1598 else:
1599 return "no_source"
1601 if not self._source_string:
1602 return "no_source"
1604 lines = self._source_string.split('\n')
1606 if self._last_char != None:
1607 row2, col2 = self._last_char
1608 if row2 > row1:
1609 lines = [ lines[i] for i in range(row1, row2 + 1) ]
1610 ### lines = [ lines[i] for i in range(row1, row2) ]
1611 lines[0] = lines[0][col1:None]
1612 lines[-1] = lines[-1][None:col2]
1613 return "\n".join(lines)
1614 else:
1615 return lines[row1][col1:col2]
1616 else:
1617 return "no_source"
1618 astType.source_string = source_string
1619 aNone.source_string = source_string
1620 aList.source_string = source_string
1623 def source_string(self):
1624 if (self._source_string == None) or (self._first_char == None):
1625 return self._primary[0]
1626 else:
1627 return astType.source_string(self)
1628 Symbol.source_string = source_string
1631 #** source string information
1632 def has_source_string(self):
1633 return self._first_char is not None
1634 astType.has_source_string = has_source_string
1635 aList.has_source_string = has_source_string
1636 aNone.has_source_string = has_source_string
1638 def has_source_string(self):
1639 return False
1640 Native.has_source_string = has_source_string
1642 def num_lines(self):
1643 if self.has_source_string():
1644 row1, col1 = self._first_char
1645 row2, col2 = self._last_char
1646 return (row2 - row1 + 1)
1647 else:
1648 raise Exception("No source string available.")
1649 astType.num_lines = num_lines
1650 aList.num_lines = num_lines
1651 aNone.num_lines = num_lines
1656 #** l3_repr
1657 def l3_repr_dedented(self):
1659 # Return (a parseable?) representation of self, with leading
1660 # whitespace/indentation stripped w.r.t. the first line.
1663 # E.g., given ::
1665 # |<--- margin
1666 # return for_mic(mic + 1, iteration + 1,
1667 # lastp, lastp + num_img(mic + 1))
1669 # l3_repr(for_mic) would return ::
1671 # |<--- margin
1672 # for_mic(mic + 1, iteration + 1,
1673 # lastp, lastp + num_img(mic + 1))
1675 # while l3_repr_dedented returns
1677 # |<--- margin
1678 # for_mic(mic + 1, iteration + 1,
1679 # lastp, lastp + num_img(mic + 1))
1681 # Note that split/join are inverses:
1682 # "\n".join( "\n\na".split('\n')) -> '\n\na'
1685 (frow, fcol), (lrow, lcol) = self.get_char_range()
1686 txt = self.l3_repr()
1687 if fcol > 0:
1688 txt_s = txt.split('\n')
1689 # Take the first line unchanged.
1690 txt_l = [ txt_s[0] ]
1691 chopped_lines = False
1692 for ll in txt_s[1:None] :
1693 # Strip leading junk from remaining lines.
1694 txt_l.append( ll[fcol:None] )
1695 # Warn about chopped characters
1696 if len(ll) > 0 and (not ll[0:fcol].isspace()):
1697 chopped_lines = True
1698 if chopped_lines:
1699 # For strings, chopping the left returns a different
1700 # string. Similarly, expressions must not be chopped.
1701 print ("WARNING: source expression has inverted indentation."
1702 " Expect display oddities.\n"
1703 " Expression: %(txt)s\n"
1704 % locals())
1705 txt = self.l3_repr()
1706 else:
1707 txt = "\n".join(txt_l)
1708 return txt
1709 astType.l3_repr_dedented = l3_repr_dedented
1710 aNone.l3_repr_dedented = l3_repr_dedented
1711 aList.l3_repr_dedented = l3_repr_dedented
1714 def l3_repr(self):
1715 # Return a parseable representation of self.
1716 if self._source_string in [None]:
1717 raise Exception("No valid l3 string available for " + repr(self))
1718 else:
1719 return self.source_string()
1720 astType.l3_repr = l3_repr
1721 astType.l3_string = l3_repr
1722 aNone.l3_repr = l3_repr
1723 aList.l3_repr = l3_repr
1727 def l3_repr(self):
1728 # Return a parseable representation of self.
1729 if self._source_string in [None]:
1730 return self._primary[0]
1731 else:
1732 return self.source_string()
1733 String.l3_repr = l3_repr
1734 Symbol.l3_repr = l3_repr
1735 Symbol.l3_string = l3_repr
1737 def l3_repr(self):
1738 # Return a parseable representation of self.
1739 if self._source_string in [None]:
1740 return str(self._primary[0])
1741 else:
1742 return self.source_string()
1743 Immediate.l3_repr = l3_repr
1747 #** l3_string
1748 def l3_string(self):
1749 # Return a more legible (but not parseable) representation of
1750 # self.
1751 st = self.l3_repr()
1752 if len(st) < 2:
1753 return st
1754 if len(st) < 6:
1755 if st[0] == st[-1] and st[0] in [ '"', "'" ]:
1756 return st[1:-1]
1757 else:
1758 return st
1759 else:
1760 "'''"
1761 '"""' # make etags happy.
1762 if st[0:3] == st[-3:None] and st[0:3] in [ '"""',
1763 "'''" ]:
1764 return st[3:-3]
1765 elif st[0] == st[-1] and st[0] in [ '"', "'" ]:
1766 return st[1:-1]
1767 else:
1768 return st
1769 String.l3_string = l3_string
1771 def l3_string(self):
1772 return "None"
1773 aNone.l3_string = l3_string
1775 def l3_string(self):
1776 return repr(self)
1777 Native.l3_string = l3_string
1779 #** py_string
1780 # Simple string representations of Immediate()s.
1781 # Use get_infix_string() for new code.
1783 def py_string(self):
1784 return StringType.__str__(self._primary[0])
1785 String.py_string = py_string
1787 def py_string(self):
1788 return StringType.__str__(self._primary[0])
1789 Symbol.py_string = py_string
1791 def py_string(self):
1792 return repr(self._primary[0])
1793 Native.py_string = py_string
1795 def py_string(self):
1796 return ""
1797 aNone.py_string = py_string
1799 def py_string(self):
1800 # Static dependency restrictions: for a.b, both must be Symbols().
1801 # Get b (of a.b)
1802 first = self.deref(0)
1803 second = self.deref(1)
1804 if isinstance(first, Symbol):
1805 if isinstance(second, Symbol):
1806 mem_name = second.py_string()
1807 else:
1808 raise DataDagError, "In a.b, b is not a name: " + str(second)
1809 else:
1810 raise DataDagError, "In a.b, a is not a name: " + str(first)
1812 return "%s.%s" % (first.py_string(), second.py_string())
1813 Member.py_string = py_string
1817 #* Interaction with program text, low level support
1819 # For the full program tree, recursive "painting" of character
1820 # ranges is a simple way to go from the hierachial information in
1821 # the code
1822 # def a(b,c): b + c
1823 # ~~~~~~~~~~~~~~~~~ , def
1824 # ~ , name
1825 # ~~~ , arg sequence
1826 # ~ ~ , individual args
1827 # ~ ~ , symbols
1828 # ~~~~~ , expression
1830 # to the necessary "flat" information needed in the text sequence
1831 # def a(b,c): b + c
1832 # ddddnaiaiaddseees
1833 # A top-down painting is in the needed order.
1835 def get_paint_array(code_str, max_val = 'inf'):
1836 """ Using the same string passed to reader.parse(),
1837 return a Program.paint_array() compatible structure.
1838 Direct use is DEPRECATED.
1840 # E.g.:
1841 # >>> get_paint_array("first\nsecond")
1842 # [[0, 1, 2, 3, 4], [0, 1, 2, 3, 4, 5]]
1844 max_val = float(max_val)
1846 lines = code_str.split("\n") # windows?
1847 lines_array = [max_val] * (len(lines))
1848 i = 0
1849 for line in lines:
1850 lines_array[i] = [max_val] * (len(line))
1851 i += 1
1852 return lines_array
1854 def paint_array_start(self):
1856 Form the _id array used for identification of tree nodes.
1857 Node ids are always integer; positions not corresponding to any
1858 node are filled with float values.
1860 # Get largest id.
1861 largest = 0
1862 for node in self.top_down():
1863 largest = max(largest, node._id)
1864 # Choose larger
1865 largest *= 10.0
1866 # Finish the array
1867 paint_a = get_paint_array(self._source_string, max_val = largest)
1868 self.paint_array(paint_a)
1869 return paint_a
1870 Nested.paint_array_start = paint_array_start
1872 def generic_paint_array(self, arr):
1873 # Paint self.
1874 if (self._first_char != None and self._last_char != None):
1875 row1, col1 = self._first_char
1876 row2, col2 = self._last_char # col2 is 1 past the last character
1877 if row2 > row1:
1878 # Paint only the first row.
1879 ll = len(arr[row1])
1880 arr[row1][col1 : ll] = [self._id] * (ll - col1)
1881 else:
1882 # Repaint the appropriate range with "color" self._id
1883 arr[row1][col1:col2] = [self._id] * (col2 - col1)
1885 def paint_array(self, arr):
1886 generic_paint_array(self,arr)
1887 # Paint children.
1888 for child in self._primary:
1889 child.paint_array(arr)
1890 Nested.paint_array = paint_array
1892 def paint_array(self, arr):
1893 generic_paint_array(self, arr)
1894 Immediate.paint_array = paint_array
1896 def paint_array(self, arr):
1897 pass
1898 aNone.paint_array = paint_array
1900 def paint_array(self, arr):
1901 generic_paint_array(self,arr)
1902 # Paint children.
1903 for child in self._primary[0]:
1904 child.paint_array(arr)
1905 aList.paint_array = paint_array
1907 #* Incremental evaluation
1908 # Classes to use as status markers.
1910 class IncEval:
1911 pass
1913 class IEInvalidTime(exceptions.Exception):
1914 def __init__(self, args="INV"):
1915 self.args = args
1917 def __cmp__(self, other):
1918 raise self
1920 # Time stamp ordering is ie_setup_time, ie_external_time, integers
1921 ie_setup_time = -22
1922 ie_external_time = -11
1923 ie_unusable_time = IEInvalidTime()
1925 def __init__(self, initial_time = 1):
1926 assert (initial_time > 0 )
1928 # Simple time tracking.
1929 # timestamp ::= int
1930 self._timestamp_dct = {}
1931 self._time = initial_time
1933 # Modified tree tracking.
1934 # _replacements ::= tree_id(new) -> (tree_id(orig), timestamp)
1935 self._replacements = {}
1937 # Function clone handling.
1938 # id -> program
1939 # (id, 'envs') -> (env, env)
1941 # (id, call_count, arg_index) -> program
1942 # ( (block_inv._id, ccount, arg_index) , 'envs' ) -> (env, env)
1943 # with types
1944 # id :: int,
1945 # program :: astType
1946 # call_count :: int
1947 # arg_index :: int
1948 # and (env, env) == eval_env, arg_env
1949 self._block_clones = {}
1951 # Env handling.
1952 self._envs = {}
1953 IncEval.__init__ = __init__
1956 #** General time stamping
1957 def touch(self, id):
1958 stamp = self._timestamp_dct[id] = self.time()
1959 return stamp
1960 IncEval.touch = touch
1962 def touch_setup(self, id):
1963 self._timestamp_dct[id] = ie_setup_time
1964 IncEval.touch_setup = touch_setup
1966 def is_setup_only(self, id):
1967 return self._timestamp_dct[id] == ie_setup_time
1968 IncEval.is_setup_only = is_setup_only
1970 def touch_value(self):
1971 return self.time()
1972 IncEval.touch_value = touch_value
1974 def time(self):
1975 self._time += 1
1976 return self._time - 1
1977 IncEval.time = time
1979 def get_timestamp(self, tree_id):
1980 return self._timestamp_dct[tree_id]
1981 IncEval.get_timestamp = get_timestamp
1983 def set_timestamp(self, id, stamp):
1984 # id ::= int | (int, string)
1985 self._timestamp_dct[id] = stamp
1986 IncEval.set_timestamp = set_timestamp
1988 def newest(self, fst, snd):
1989 # fst, snd are timestamps
1991 # For a compound result (list, tuple, etc.), changing a single
1992 # entry changes the compound. Use this function via e.g.
1993 # reduce(IncEval.newest, status_list)
1994 # to determine the compound's status.
1996 return max(fst, snd)
1997 IncEval.newest = newest
1999 def tree_is_older(self, tree_id, val_time):
2000 # Compare the tree's and leaf's timestamp.
2001 # special case:
2002 # tree value
2003 # ----------------------
2004 # setup < setup
2006 # The remaining tested cases are
2007 # tree value
2008 # ----------------------
2009 # external < int
2010 # setup < external
2011 # setup < int
2012 # and these correspond to the usual ordering; overall, use
2013 # setup < external < integer
2015 tree_time = self._timestamp_dct[tree_id]
2017 return (tree_time < val_time)
2018 IncEval.tree_is_older = tree_is_older
2020 # def is_newer(self, fst, snd):
2021 # # The standard ordering relations <, >, = require care with the
2022 # # special values here, so is_newer() is left undefined.
2023 # IncEval.is_newer = is_newer
2027 #** block clones
2028 # Function clones' semantics under incremental evaluation differ from
2029 # those of Call clones.
2030 def clone_table(self):
2031 return self._block_clones
2032 IncEval.clone_table = clone_table
2034 def has_clone(self, src):
2035 return self._block_clones.has_key(src)
2036 IncEval.has_clone = has_clone
2039 #** Env() handling
2040 # Env() semantics under incremental evaluation differ from defaults.
2041 # [ in at least Map.interpret() ]
2042 def env_table(self):
2043 return self._envs
2044 IncEval.env_table = env_table
2046 def has_env_for(self, id):
2047 return self._envs.has_key(id)
2048 IncEval.has_env_for = has_env_for
2051 #* modified tree evaluation
2052 class ModifiedEval:
2053 pass
2056 #** modified tree attributes
2057 def set_original_for(self, new_id, orig_id):
2058 pass
2059 ModifiedEval.set_original_for = set_original_for
2061 def original_for(self, tree_id):
2062 # Return the (id, timestamp) of the original tree replaced by
2063 # tree_id, or None.
2064 return self._replacements.get(tree_id)
2065 ModifiedEval.original_for = original_for
2067 def is_replacement(self, tree_id):
2068 # Return ACTIVE replacement status.
2069 return (self._replacements.has_key(tree_id) and
2070 not self._replacements_touched.has_key(tree_id) )
2072 ModifiedEval.is_replacement = is_replacement
2074 def not_replacement(self, tree_id):
2075 # Treat the tree_id as original from now on.
2076 self._replacements_touched[tree_id] = 1
2077 ModifiedEval.not_replacement = not_replacement
2080 #* post-parsing -- .setup()
2082 #** Nested types.
2083 def setup(self, parent, def_env, storage):
2084 """ Set up post-parsing, pre-execution structures.
2086 # Also see aList.setup
2087 self._parent = parent._id
2089 # Store in Memory()
2090 self._id = storage.store(self, def_env)
2091 child_id_list = [child.setup(self, def_env, storage)
2092 for child in self._primary]
2094 self._primary = tuple([child for child, id in child_id_list ])
2096 # Incremental evaluation prep.
2097 storage.ie_.touch_setup(self._id)
2099 return self, self._id
2100 Nested.setup = setup
2103 def setup_if_for(self):
2104 # Replace macro-only identifiers for the special case
2105 # if "for": ...
2107 # See also reader.py, rule
2108 # expr : FOR expr IN expr COLON py_block
2109 # and l3if_chooser()
2111 # Replace ITEMS, IDX, LEN, LOOP in
2113 # print reader.parse('''
2114 # if "for":
2115 # ! ITEMS = ! SEQ
2116 # ! IDX = 0
2117 # ! LEN = len(! ITEMS)
2118 # # orig. for
2119 # def "LOOP"():
2120 # if ! IDX < ! LEN:
2121 # ! IDX = ! IDX + 1
2122 # # V in S.
2123 # ! V = ! ITEMS[ ! IDX - 1 ]
2124 # # Body B
2125 # ! B
2126 # # Iterate.
2127 # return ! LOOP()
2128 # ! LOOP()
2129 # ''')
2131 # but keep original V, SEQ, B.
2133 # The following pattern is from above, with manual fix for "LOOP"().
2135 ma = Matcher()
2136 if ma.match(
2137 self,
2138 If(String('for'), aList([Set(Marker('ITEMS'), Marker('SEQ')), Set(Marker('IDX'), Int(0)), Set(Marker('LEN'), Call(Symbol('len'), aList([Marker('ITEMS')]))), Set(Marker('LOOP'), Macro(aList([]), aList([If(Call(Symbol('<'), aList([Marker('IDX'), Marker('LEN')])), aList([Set(Marker('IDX'), Call(Symbol('+'), aList([Marker('IDX'), Int(1)]))), Set(Marker('V'), Call(Member(Symbol('operator'), Symbol('getitem')), aList([Marker('ITEMS'), Call(Symbol('-'), aList([Marker('IDX'), Int(1)]))]))), Marker('B'), Return(Call(Marker('LOOP'), aList([])))]), aList([]))]))), Call(Marker('LOOP'), aList([]))]), aList([]))):
2140 id_s = str(self._id)
2142 # One-time manual tree conversions:
2143 # (query-replace "Marker('ITEMS')" "Symbol('ITEMS' + id_s)" )
2144 # (query-replace "Marker('IDX')" "Symbol('IDX' + id_s)" )
2145 # (query-replace "Marker('LEN')" "Symbol('LEN' + id_s)" )
2146 # (query-replace "Marker('LOOP')" "Symbol('LOOP' + id_s)" )
2148 # (query-replace "Marker('SEQ')" "ma['SEQ']" )
2149 # (query-replace "Marker('V')" "ma['V']" )
2150 # (query-replace "Marker('B')" "ma['B']" )
2152 foo = list(self._primary)
2153 foo[1] = aList([Set(Symbol('ITEMS' + id_s), ma['SEQ']), Set(Symbol('IDX' + id_s), Int(0)), Set(Symbol('LEN' + id_s), Call(Symbol('len'), aList([Symbol('ITEMS' + id_s)]))), Set(Symbol('LOOP' + id_s), Macro(aList([]), aList([If(Call(Symbol('<'), aList([Symbol('IDX' + id_s), Symbol('LEN' + id_s)])), aList([Set(Symbol('IDX' + id_s), Call(Symbol('+'), aList([Symbol('IDX' + id_s), Int(1)]))), Set(ma['V'], Call(Member(Symbol('operator'), Symbol('getitem')), aList([Symbol('ITEMS' + id_s), Call(Symbol('-'), aList([Symbol('IDX' + id_s), Int(1)]))]))), ma['B'], Return(Call(Symbol('LOOP' + id_s), aList([])))]), aList([]))]))), Call(Symbol('LOOP' + id_s), aList([]))])
2154 self._primary = tuple(foo)
2155 If.setup_if_for = setup_if_for
2158 def setup_if_while(self):
2159 # Replace macro-only identifiers for the special case
2160 # if "while": ...
2162 # See also reader.py, rule
2163 # expr : WHILE expr COLON py_block
2164 # and l3if_chooser()
2166 # Replace WLOOP in
2168 # print reader.parse('''
2169 # if "while":
2170 # def "WLOOP"():
2171 # if not !C: # force boolean evaluation via not
2172 # return
2173 # else:
2174 # !B
2175 # return !WLOOP()
2176 # !WLOOP()
2177 # ''')
2179 # but keep original C and B.
2181 # The following pattern is from above, with
2182 # Set(Symbol('WLOOP')...) -> Set(Marker('WLOOP')...)
2183 # aList([Marker('B')]) -> Marker('B')
2185 ma = Matcher()
2186 if ma.match(self,
2187 If(String('while'), aList([Set(Marker('WLOOP'), Macro(aList([]), aList([If(Call(Symbol('not'), aList([Marker('C')])), aList([Return(aNone())]), Marker('B')), Return(Call(Marker('WLOOP'), aList([])))]))), Call(Marker('WLOOP'), aList([]))]), aList([]))
2190 id_s = str(self._id)
2192 # One-time manual tree conversions:
2193 # (query-replace "Marker('WLOOP')" "Symbol('WLOOP' + id_s)" )
2194 # (query-replace "Marker('C')" "ma['C']")
2195 # (query-replace "Marker('B')" "ma['B']")
2197 foo = list(self._primary)
2198 foo[1] = aList([Set(Symbol('WLOOP' + id_s), Macro(aList([]), aList([If(Call(Symbol('not'), aList([ma['C']])), aList([Return(aNone())]), ma['B']), Return(Call(Symbol('WLOOP' + id_s), aList([])))]))), Call(Symbol('WLOOP' + id_s), aList([]))])
2199 self._primary = tuple(foo)
2200 If.setup_if_while = setup_if_while
2203 def setup(self, parent, def_env, storage):
2204 """ Set up post-parsing, pre-execution structures.
2206 # Also see Nested.setup
2207 self._parent = parent._id
2209 # Store in Memory()
2210 self._id = storage.store(self, def_env)
2212 # Replace macro-only identifiers for the special case
2213 # if "while": ...
2214 self.setup_if_while()
2216 # Replace macro-only identifiers for the special case
2217 # if "for": ...
2218 self.setup_if_for()
2220 # Continue regular setup.
2221 child_id_list = [child.setup(self, def_env, storage)
2222 for child in self._primary]
2224 self._primary = tuple([child for child, id in child_id_list ])
2226 # Incremental evaluation prep.
2227 storage.ie_.touch_setup(self._id)
2229 return self, self._id
2230 If.setup = setup
2233 def setup(self, parent, def_env, storage):
2234 """ Set up post-parsing, pre-execution structures.
2236 # Also see Nested.setup
2237 self._parent = parent._id
2239 # Store in Memory()
2240 self._id = storage.store(self, def_env)
2242 child_id_list = [child.setup(self, def_env, storage)
2243 for child in self]
2245 self._primary = ([child for child, id in child_id_list ],)
2247 # Incremental evaluation prep.
2248 storage.ie_.touch_setup(self._id)
2250 return self, self._id
2251 aList.setup = setup
2253 def setup(self, parent, def_env, storage):
2254 # Macro arguments must be set up in their expansion, but Inline
2255 # has no expansion.
2256 return Nested.setup(self, parent, def_env, storage)
2257 Inline.setup = setup
2260 def setup(self, parent, def_env, storage):
2261 # Program()s are GIVEN the appropriate Env(), so
2262 # instead of self._block_env = def_env.new_child(self):
2263 self._block_env = def_env
2264 return Nested.setup(self, parent, self._block_env, storage)
2265 Program.setup = setup
2267 def setup(self, parent, def_env, storage):
2268 # Bindings in environments.
2269 argument_env = def_env.new_child(self, name = "skel.arg")
2270 # Positional arguments.
2271 for ba in self.positional_block_args():
2272 argument_env.bind(ba, None) # static dependency bindings.
2273 # Named arguments.
2274 for (argn, argv) in self.named_block_args():
2275 argument_env.bind(argn, argv) # static dependency bindings.
2277 block_env = argument_env.new_child(self, name = "skel.blck")
2278 self._arg_env = argument_env
2279 self._block_env = block_env
2281 rv = Nested.setup(self, parent, block_env, storage)
2283 # Provide name->id bindings.
2284 block_arg_l = self.deref(0)
2285 ii = 0
2286 for ba in self.positional_block_args():
2287 argument_env.bind_id(ba, block_arg_l[ii]._id)
2288 ii += 1
2289 for argn, _ in self.named_block_args():
2290 # Use id of Set
2291 argument_env.bind_id(argn, block_arg_l[ii]._id)
2292 ii += 1
2293 return rv
2294 Function.setup = setup
2297 def setup(self, parent, def_env, storage):
2298 # Initialize internal tree.
2299 self.l_view_call.setup(parent, def_env, storage)
2301 rv = Nested.setup(self, parent, def_env, storage)
2302 return rv
2303 Loop.setup = setup
2306 def setup(self, parent, def_env, storage):
2307 # Environments
2308 self._arg_env = def_env
2309 self._block_env = def_env
2311 rv = Nested.setup(self, parent, def_env, storage)
2312 return rv
2313 Macro.setup = setup
2316 def setup(self, parent, def_env, storage):
2317 # extra store ??
2318 # self._stored_in = None
2319 self._matcher = Matcher()
2321 for name in self.arg_names():
2322 def_env.bind_df(name, None) # static dependency bindings.
2324 rv = Nested.setup(self, parent, def_env, storage)
2326 # .data_dag() preparation.
2327 arg_symbols = self.arg_names(raw_symbols = 1)
2328 ii = 0
2329 for name in self.arg_names():
2330 def_env.bind_id_df(name, arg_symbols[ii]._id)
2331 ii += 1
2333 # Check for
2334 # def foo():
2335 # ...
2336 # and use the name 'foo' for the body's _binding_name.
2338 # Pattern from
2339 # import reader ; reload(reader) ;
2340 # reader.parse('!! def_name string = !! the_block {|bb| bb}')
2341 # and modified.
2342 if self._matcher.match(self,
2343 Set(MarkerTyped(String('def_name'),
2344 Symbol('string')),
2345 MarkerTyped(String('the_block'), Function()))):
2346 ma = self._matcher._matches
2347 ma['the_block']._binding_name = ma['def_name']
2349 # .data_dag() preparation -- also provide block binding
2350 def_env.bind_df(ma['def_name'].py_string(), ma['the_block'])
2352 # Check for
2353 # foo = { ... }
2354 # ...
2355 # and use the name 'foo' for the Map()'s _binding_name.
2357 # Pattern from
2358 # import reader ; reload(reader) ;
2359 # reader.parse('!! def_name string = !! the_map { }')
2360 # and modified.
2361 if self._matcher.match(self,
2362 Set(MarkerTyped(String('def_name'),
2363 Symbol('string')),
2364 MarkerTyped(String('the_map'), Map()))):
2365 ma = self._matcher._matches
2366 ma['the_map']._binding_name = ma['def_name']
2368 # .data_dag() preparation -- also provide block binding
2369 def_env.bind_df(ma['def_name'].py_string(), ma['the_map'])
2371 return rv
2372 Set.setup = setup
2374 def setup(self, parent, def_env, storage):
2375 block_env = def_env.new_child(self, name = "skel.blck")
2376 self._block_env = block_env
2377 return Nested.setup(self, parent, block_env, storage)
2378 Map.setup = setup
2381 #** Immediates.
2382 def setup(self, parent, def_env, storage):
2383 """ Set up post-parsing, pre-execution structures.
2385 self._parent = parent._id
2387 # Store in Memory()
2388 self._id = storage.store(self, def_env)
2390 # Incremental evaluation prep.
2391 storage.ie_.touch_setup(self._id)
2393 return self, self._id
2394 Immediate.setup = setup
2396 def setup(self, parent, def_env, storage):
2397 """ Set up post-parsing, pre-execution structures.
2399 self._parent = parent._id
2401 # Store in Memory()
2402 self._id = storage.store(self, def_env)
2404 # Incremental evaluation prep.
2405 storage.ie_.touch_setup(self._id)
2407 return self, self._id
2408 aNone.setup = setup
2409 Native.setup = setup
2411 #* Interpretation -- .interpret()
2412 # Incremental evaluation note:
2413 # After evaluation, a node's status is always 'evaluated', but the
2414 # .interpret() return status may be different -- to inform the
2415 # parent of appropriate action.
2418 #** external entry point
2419 def interpret_start(self, env, storage):
2420 try:
2421 rv, ie_status = self.interpret(env, storage)
2422 return rv
2423 except Exception, e:
2424 print "Warning: Returning Exception"
2425 raise
2426 return e # ?? most useful info??
2427 Program.interpret_start = interpret_start
2430 #** Nested types
2431 def interpret(self, env, storage):
2432 raise InterfaceOnly
2433 Nested.interpret = interpret
2436 def interpret(self, env, storage):
2437 # Run the python code and import the resulting names into the
2438 # current environment.
2440 # The python fragment is ALWAYS interpreted, to avoid pickling
2441 # problems. The values returned by the Python code must be
2442 # CONSTANT over repeated calls.
2444 # The Python code fragment is considered atomic; changes to the
2445 # String() holding it will re-evaluate.
2447 # The Python string can use l3 values, but it cannot call l3
2448 # functions.
2449 # This is unlike the l3 call py_f(l3_g, x), for which special handling
2450 # allows calling l3 from python.
2452 if self._pre_interp_hook:
2453 self._pre_interp_hook(self, env, storage)
2454 py_string, ie_status = self.deref(0).interpret(env, storage)
2456 # Incremental evaluation check.
2457 if storage.ie_.tree_is_older(self._id, ie_status):
2459 # To
2460 # - get useful error messages, and
2461 # - allow pickling of executed inline code including functions
2462 # the string is written to a file, and that executed.
2463 # The file is kept.
2465 fname = os.path.join(os.getcwd(),"_l3_inline-%d" % self._id)
2466 py_file = open(fname, "w")
2467 py_file.write(py_string)
2468 py_file.close()
2470 # Evaluate and store.
2471 # -- Assignments are made in a local python dict [py_env];
2472 # new names and overwritten names are imported back to [env];
2473 # -- The [env] bindings are made available via globals() so
2474 # they are not changed outside of L3 control.
2476 # These name binding cases are illustrated via
2477 # py_env = {}
2478 # all_globals = {"aa" : 10}
2479 # all_globals.update(globals())
2480 # exec "aa = aa + 1; aa += 10" in all_globals, py_env
2481 # exec "global aa; aa = 1" in all_globals, py_env
2483 py_env = {'def_env' : env}
2484 all_globals = env.all_lexical_bindings({})
2485 all_globals.update(globals())
2486 execfile(fname, all_globals, py_env)
2487 env.import_all_names(py_env) # Uses bind_mem_only for
2488 # EXTERNAL time stamp.
2490 # Keeping py_env is a major pickle trap, so evaluate the
2491 # Python code body every time to get them.
2492 # # rv = py_env
2493 rv = None
2495 # Interpretation values.
2496 storage.id2tree(rv, self)
2497 storage.set_attributes(self._id,
2498 "interp_result", None,
2499 "interp_env", env)
2501 # Incremental evaluation data.
2502 ie_status = storage.ie_.touch(self._id)
2503 return rv, ie_status
2505 else:
2506 # Keeping py_env is a major pickle trap, so evaluate the
2507 # Python code body every time to get them.
2508 # The original evaluation's time stamp is used, so external
2509 # additions will not cause re-execution.
2510 fname = os.path.join(os.getcwd(),"_l3_inline-%d" % self._id)
2512 py_env = {'def_env' : env}
2513 all_globals = env.all_lexical_bindings({})
2514 all_globals.update(globals())
2515 execfile(fname, all_globals, py_env)
2516 env.import_all_names(py_env) # Override all EXTERNAL time
2517 # stamp values.
2518 return ( py_env, storage.ie_.get_timestamp(self._id) )
2519 Inline.interpret = interpret
2522 def interpret(self, env, storage):
2523 # See also Map.interpret(), aList.interpret
2524 # Return last expr value.
2525 if self._pre_interp_hook:
2526 self._pre_interp_hook(self, env, storage)
2528 seq_expr = self.deref(0)
2529 if len(seq_expr) == 0:
2530 raise InterpreterError, "Empty program"
2531 else:
2532 rv_stat_l = [(expr).interpret(env, storage) for expr in seq_expr]
2534 self._eval_env = env
2535 rv, last_stamp = rv_stat_l[-1]
2537 # Incremental evaluation check.
2538 newest_stamp = reduce(storage.ie_.newest,
2539 [status for _, status in rv_stat_l])
2540 if storage.ie_.tree_is_older(self._id, newest_stamp):
2541 # Interpretation values.
2542 storage.id2tree(rv, self)
2543 storage.set_attributes(self._id,
2544 "interp_result", rv,
2545 "interp_env", env)
2546 # Incremental evaluation data.
2547 storage.ie_.touch(self._id)
2548 storage.ie_.set_timestamp( (self._id, 'value'), last_stamp )
2550 return rv, last_stamp
2551 else:
2552 # ---- Nothing changed.
2553 return storage.get_attribute(self._id, "interp_result"), \
2554 storage.ie_.get_timestamp( (self._id, 'value') )
2556 Program.interpret = interpret
2558 def interpret(self, env, storage):
2559 # Also see Call.interpret.
2561 if self._pre_interp_hook:
2562 self._pre_interp_hook(self, env, storage)
2564 # Incremental evaluation check.
2565 if storage.ie_.is_setup_only(self._id):
2566 self._def_env = env
2568 # Evaluate named arguments.
2569 for (arg_name, expr) in self.named_block_args():
2570 arg_val, status = expr.interpret(env, storage)
2571 self._arg_env.bind_ptr(arg_name, arg_val, self._id)
2572 self._arg_env.bind_time_stamp_ptr(arg_name, status, self._id)
2574 # Interpretation values.
2575 storage.id2tree(self, self)
2576 storage.set_attributes(self._id,
2577 "interp_result", self,
2578 "interp_env", env)
2579 # Incremental evaluation data.
2580 stamp = storage.ie_.touch(self._id)
2582 return self, stamp
2583 else:
2584 return self, storage.ie_.get_timestamp(self._id)
2585 Function.interpret = interpret
2587 def interpret(self, env, storage):
2588 # Also see Call.interpret.
2590 if self._pre_interp_hook:
2591 self._pre_interp_hook(self, env, storage)
2593 # Incremental evaluation check.
2594 if storage.ie_.is_setup_only(self._id):
2595 self._def_env = env
2597 # Evaluate named arguments.
2599 # Interpretation values.
2600 storage.id2tree(self, self)
2601 storage.set_attributes(self._id,
2602 "interp_result", self,
2603 "interp_env", env)
2604 # Incremental evaluation data.
2605 stamp = storage.ie_.touch(self._id)
2607 return self, stamp
2608 else:
2609 return self, storage.ie_.get_timestamp(self._id)
2610 Macro.interpret = interpret
2612 def interpret(self, env, storage, tail_finishing_progs = []):
2613 # Also see CallableFunction.__call__
2614 # This code is for tail call handling.
2615 if self._pre_interp_hook:
2616 self._pre_interp_hook(self, env, storage)
2618 fin_progs = [] # Ensure NEW list.
2619 fin_progs.extend(tail_finishing_progs)
2620 while 1:
2621 try:
2622 # No Incremental evaluation data here.
2623 # # rv, ie_status = self._call_real_interpret(env, storage)
2624 rv, ie_status = self._dispatch(env, storage)
2625 except Interpret_tail_call, contin:
2626 self, env, _prog_l = contin.args
2627 fin_progs.extend(_prog_l)
2628 continue
2629 else:
2630 # Finish up after tail calls.
2631 fin_progs.reverse()
2632 for finish in fin_progs:
2633 finish(rv, ie_status)
2634 return rv, ie_status
2635 Call.interpret = interpret
2638 def _dispatch(self, env, storage):
2639 # Find block, ignore time stamp.
2640 try:
2641 block, _ = self.deref(0).interpret(env, storage)
2642 except UnboundSymbol:
2643 # No l3 binding found, check shell commands.
2644 block = self.deref(0)
2645 if isinstance(block, (String, Symbol)):
2646 if glbl.shell_cmds.exists( block.py_string() ):
2647 return self._interp_shell(block, env, storage)
2649 if not block:
2650 # Nothing found.
2651 raise InterpreterError, "Function/name not found: " + str(block)
2653 # Function / Macro
2654 # {|| ... }(a,b) or [|| ... ](a,b)
2655 if isinstance(block, (Function, Macro)):
2656 return self._interp_func(block, env, storage)
2658 # Handle native Python
2659 elif callable(block):
2660 return self._interp_native(block, env, storage)
2662 else:
2663 raise InterpreterError, "Expected Function(), got " + str(block)
2664 Call._dispatch = _dispatch
2667 def _interp_shell(self, block, env, storage):
2668 # Verify argument count.
2670 #---- Bind named block arguments from Call().
2671 named_arg_status_l = [expr.interpret(env, storage)
2672 for arg_name, expr in self.named_args()]
2674 # ---- Bind positional block arguments.
2675 pos_arg_status_l = [ba.interpret(env, storage)
2676 for ba in self.positional_args() ]
2678 def new_call_new_val():
2679 # Convert arguments to strings.
2680 # A list could be converted to different textual forms
2681 # requiring a much more complex setup, so ignore lists for
2682 # now.
2684 # Positional args.
2685 pos_wrapped = []
2686 arg_index = 0
2687 for ba, _ in pos_arg_status_l:
2688 if isinstance(ba, Nested):
2689 raise InterpreterError("no nested shell arguments")
2691 elif isinstance(ba, String):
2692 pos_wrapped.append(ba.py_string())
2694 elif not isinstance(ba, StringType):
2695 raise InterpreterError("shell arguments must evaluate "
2696 "to simple strings.")
2697 else:
2698 pos_wrapped.append(str(ba))
2699 arg_index += 1
2701 # Named args.
2702 named_wrapped = []
2703 arg_index = 0
2704 for (arg_name, _) in self.named_args():
2705 arg, _ = named_arg_status_l[arg_index]
2706 if isinstance(arg, Nested):
2707 raise InterpreterError("shell arguments must evaluate "
2708 "to simple strings.")
2709 else:
2710 named_wrapped.append([arg_name, arg.py_string()])
2711 arg_index += 1
2714 # ---- Evaluate.
2715 try:
2716 rv = (self._run_process(block.py_string(),
2717 pos_wrapped, named_wrapped))
2718 ie_status = storage.ie_.time()
2719 except Interpret_tail_call:
2720 raise # not reached ?
2721 except Interpret_return, e:
2722 rv, ie_status = e.args # not reached ?
2724 # Interpretation values.
2725 storage.id2tree(rv, self)
2726 storage.set_attributes(self._id,
2727 "interp_result", rv, "interp_env", env)
2729 # Incremental evaluation data.
2730 storage.ie_.touch(self._id)
2731 storage.ie_.set_timestamp( (self._id, 'value'), ie_status)
2733 return rv, ie_status
2734 # -----------
2736 # ---- Incremental evaluation check.
2737 if (len(named_arg_status_l) + len(pos_arg_status_l)) == 0:
2738 if storage.ie_.is_setup_only(self._id):
2739 return new_call_new_val()
2740 else:
2741 return storage.get_attribute(self._id, "interp_result"),\
2742 storage.ie_.get_timestamp(self._id)
2743 else:
2744 arg_eval_time = reduce(storage.ie_.newest,
2745 [status for _, status in named_arg_status_l] +
2746 [status for _, status in pos_arg_status_l])
2747 if storage.ie_.tree_is_older(self._id, arg_eval_time):
2748 return new_call_new_val()
2749 else:
2750 return storage.get_attribute(self._id, "interp_result"),\
2751 storage.ie_.get_timestamp(self._id)
2752 Call._interp_shell = _interp_shell
2755 def _run_process(self, cmd, pos_arg_l, name_val_l):
2756 # This member function can be replaced by the gui for interactive
2757 # use.
2758 return glbl.shell_cmds.system(cmd, pos_arg_l, name_val_l)
2759 Call._run_process = _run_process
2762 def _interp_native(self, py_func, env, storage):
2763 # Verify argument count.
2764 ## if func_.nargs() != call_.nargs():
2766 #---- Bind named block arguments from Call().
2767 named_arg_status_l = [expr.interpret(env, storage)
2768 for arg_name, expr in self.named_args()]
2770 # ---- Bind positional block arguments.
2771 pos_arg_status_l = [ba.interpret(env, storage)
2772 for ba in self.positional_args() ]
2774 def new_call_new_val():
2775 # ---- Check for any Function()s, and wrap them as callables.
2776 # E.g., for the call foo {|| ... }, where foo is a
2777 # Python function expecting a callable, the block is
2778 # wrapped.
2779 # Positional args.
2780 pos_wrapped = []
2781 arg_index = 0
2782 for ba, _ in pos_arg_status_l:
2783 if isinstance(ba, Function):
2784 pos_wrapped.append(
2785 CallableFunction(ba, env, storage, self, arg_index))
2786 else:
2787 pos_wrapped.append(ba)
2788 arg_index += 1
2790 # Named args.
2791 named_wrapped = {}
2792 arg_index = 0
2793 for (arg_name, _) in self.named_args():
2794 arg, _ = named_arg_status_l[arg_index]
2795 if isinstance(arg, Function):
2796 named_wrapped[arg_name] = (
2797 # Note use of arg_name as index for CallableFunction.
2798 CallableFunction(arg, env, storage, self, arg_name))
2799 else:
2800 named_wrapped[arg_name] = arg
2801 arg_index += 1
2804 # ---- Evaluate.
2805 try:
2806 if len(named_wrapped) > 0:
2807 rv = py_func(*pos_wrapped, **named_wrapped)
2808 else:
2809 rv = py_func(*pos_wrapped)
2810 ie_status = storage.ie_.time()
2812 except Interpret_tail_call:
2813 raise # not reached ?
2814 except Interpret_return, e:
2815 rv, ie_status = e.args # not reached ?
2817 # Interpretation values.
2818 storage.id2tree(rv, self)
2819 storage.set_attributes(self._id,
2820 "interp_result", rv, "interp_env", env)
2822 # Incremental evaluation data.
2823 storage.ie_.touch(self._id)
2824 storage.ie_.set_timestamp( (self._id, 'value'), ie_status)
2826 return rv, ie_status
2827 # -----------
2829 # ---- Incremental evaluation check.
2830 # Functions with no arguments are usually called for side
2831 # effects.
2832 # Assuming those do not influence the results of the
2833 # program, executing them on every pass is ok (but wasteful).
2835 # Or, by viewing zero-argument calls as constants, they can be
2836 # treated like Immediate()s -- and are only called one time.
2838 if (len(named_arg_status_l) + len(pos_arg_status_l)) == 0:
2839 if storage.ie_.is_setup_only(self._id):
2840 return new_call_new_val()
2841 else:
2842 return storage.get_attribute(self._id, "interp_result"),\
2843 storage.ie_.get_timestamp(self._id)
2844 else:
2845 arg_eval_time = reduce(storage.ie_.newest,
2846 [status for _, status in named_arg_status_l] +
2847 [status for _, status in pos_arg_status_l])
2848 if storage.ie_.tree_is_older(self._id, arg_eval_time):
2849 return new_call_new_val()
2850 else:
2851 return storage.get_attribute(self._id, "interp_result"),\
2852 storage.ie_.get_timestamp(self._id)
2853 Call._interp_native = _interp_native
2855 def _interp_func(self, block, env, storage):
2856 if isinstance(block, Function):
2857 is_new, program, eval_env, arg_env = \
2858 self.call_function_prep(env, storage, block)
2860 if isinstance(block, Macro):
2861 is_new, program, eval_env, arg_env = \
2862 self.call_macro_prep(env, storage, block)
2865 # ----------- Data for this block
2866 # Valid with tail call or without, so this MUST PRECEDE the
2867 # call to program.interpret(), below.
2869 if is_new:
2870 storage.push_attributes(block._id, "interp_clone", program._id)
2871 storage.set_attributes(self._id,
2872 "interp_program", program,
2873 "interp_env", arg_env)
2874 storage.set_attributes(program._id,
2875 # lexical information
2876 "clone_of", block._id,
2877 "interp_env", arg_env,
2878 # dynamic information
2879 "cloned_by", self._id
2882 # ----------- Evaluate
2883 # Affects: Program.interpret, Function_invo.interpret
2884 def finish(rv, ie_status):
2885 # Incremental evaluation check.
2886 if storage.ie_.tree_is_older(self._id, ie_status):
2887 # Interpretation values.
2888 storage.id2tree(rv, self)
2889 storage.set_attributes(self._id, "interp_result", rv)
2891 # Incremental evaluation data.
2892 storage.ie_.touch(self._id)
2893 storage.ie_.set_timestamp((self._id, 'value'), ie_status)
2894 return rv, ie_status
2896 else:
2897 return storage.get_attribute(self._id, "interp_result"),\
2898 storage.ie_.get_timestamp(self._id)
2900 try:
2901 rv, ie_status = program.interpret(eval_env, storage)
2902 except Interpret_tail_call, contin:
2903 new_tree, _env, _prog_l = contin.args
2904 _prog_l.append(finish)
2905 raise
2906 except Interpret_return, e:
2907 rv, ie_status = e.args
2909 return finish(rv, ie_status)
2910 Call._interp_func = _interp_func
2912 def call_function_prep(call_, env, storage, func_):
2913 # Prepare the block, evaluate arguments, and provide
2914 # environments.
2915 # See callable_interpret_prep().
2917 # Also see Function.interpret, CallableFunction.__call__
2918 # Verify argument matching.
2919 if func_.nargs() != call_.nargs():
2920 raise InterpreterError, \
2921 ("Argument count mismatch: \n"
2922 " %s\n"
2923 "called with \n"
2924 " %s" % (func_.source_string(),
2925 call_.source_string()))
2926 call_names = map(lambda (fst, _): fst, call_.named_args())
2927 func_names = map(lambda (fst, _): fst, func_.named_block_args())
2928 for cn in call_names:
2929 if cn not in func_names:
2930 raise InterpreterError, \
2931 ("Named argument mismatch: \n"
2932 " %s\n"
2933 "called with \n"
2934 " %s\n"
2935 "Name %s is not in definition." % (func_.source_string(),
2936 call_.source_string(),
2937 cn))
2938 # Incremental evaluation check.
2939 if storage.ie_.has_clone( call_._id):
2940 ctab = storage.ie_.clone_table()
2941 program = ctab[call_._id]
2942 eval_env, arg_env = ctab[(call_._id, 'envs')]
2943 is_new = False
2945 else:
2946 #---- Turn block into executable.
2947 # Under incremental evaluation this copy must not be formed if
2948 # a prior version exists.
2950 newfunc_ = func_.block_copy(storage)
2951 program = Program(newfunc_.raw_seq_expr())
2953 #---- Set up argument environment.
2954 if func_._binding_name != None:
2955 arg_env = func_._def_env.new_child(
2956 program, name = func_._binding_name.py_string())
2957 else:
2958 arg_env = func_._def_env.new_child(program, name = "run.arg")
2960 #---- Bind named block arguments from Function() definition.
2961 for (arg_name, _) in func_.named_block_args():
2962 arg_val, status = func_._arg_env.ie_lookup_ptr(arg_name)
2963 arg_env.bind_ptr(arg_name, arg_val, call_._id)
2964 arg_env.bind_time_stamp_ptr(arg_name, status, call_._id)
2966 #---- Bind positional block arguments
2967 # Get arguments' names.
2968 arg_names = func_.positional_block_args()
2970 ma = Matcher()
2971 position_index = 0
2972 for ba in call_.positional_args():
2973 arg_val, status = ba.interpret(env, storage)
2974 arg_env.bind_ptr(arg_names[position_index], arg_val, call_._id)
2975 # Incremental evaluation.
2976 arg_env.bind_time_stamp_ptr(arg_names[position_index],
2977 status, call_._id)
2978 position_index += 1
2980 #---- Bind named block arguments from Call().
2981 for arg_name, expr in call_.named_args():
2982 arg_val, status = expr.interpret(env, storage)
2983 arg_env.bind_ptr(arg_name, arg_val, call_._id)
2984 # Incremental evaluation.
2985 arg_env.bind_time_stamp_ptr(arg_name, status, call_._id)
2988 #---- Set up evaluation environment.
2989 eval_env = arg_env.new_child(program, name = "run.blck")
2991 # ---- Finish program.
2992 # program.setup(block, eval_env, storage)
2993 program.setup(empty_parent(), eval_env, storage)
2995 #---------------- later interaction
2996 cross_reference_trees(storage, func_, newfunc_)
2997 #----------------
2999 # Incremental evaluation data.
3000 ctab = storage.ie_.clone_table()
3001 ctab[call_._id] = program
3002 ctab[(call_._id, 'envs')] = eval_env, arg_env
3003 is_new = True
3005 return is_new, program, eval_env, arg_env
3006 Call.call_function_prep = call_function_prep
3008 def call_macro_prep(call_, env, storage, mac_):
3009 # Prepare the block, evaluate arguments, and provide
3010 # environments.
3011 if mac_.nargs() != 0:
3012 raise InterpreterError, \
3013 ("Macros do not take arguments yet:\n"
3014 " %s\n"
3015 "called with \n"
3016 " %s" % (mac_.source_string(),
3017 call_.source_string()))
3019 # Incremental evaluation check.
3020 if storage.ie_.has_clone( call_._id):
3021 ctab = storage.ie_.clone_table()
3022 program = ctab[call_._id]
3023 eval_env, arg_env = ctab[(call_._id, 'envs')]
3024 is_new = False
3026 else:
3027 #---- Turn block into executable.
3028 # Under incremental evaluation this copy must not be formed if
3029 # a prior version exists.
3031 newmac_ = mac_.block_copy(storage)
3032 program = Program(newmac_.raw_seq_expr())
3034 #---- Set up argument environment.
3035 arg_env = env
3037 #---- Set up evaluation environment.
3038 eval_env = env
3040 # ---- Finish program.
3041 # program.setup(block, eval_env, storage)
3042 program.setup(empty_parent(), eval_env, storage)
3044 #---------------- later interaction
3045 cross_reference_trees(storage, mac_, newmac_)
3046 #----------------
3048 # Incremental evaluation data.
3049 ctab = storage.ie_.clone_table()
3050 ctab[call_._id] = program
3051 ctab[(call_._id, 'envs')] = eval_env, arg_env
3052 is_new = True
3054 return is_new, program, eval_env, arg_env
3055 Call.call_macro_prep = call_macro_prep
3057 def interpret(self, env, storage):
3058 # A 'return' in dml (likely) requires many function exits here in
3059 # the interpreter.
3060 # Hence the exception-only exit.
3061 if self._pre_interp_hook:
3062 self._pre_interp_hook(self, env, storage)
3064 tree = self.deref(0)
3065 if isinstance(tree, Call):
3066 # Tail call.
3067 # -----------
3068 # The current env is only used by Call.interpret()
3069 # to get the function body and argument evaluation; the
3070 # function body is evaluated in separate Env()s, preserving
3071 # lexical scope.
3073 def finish(rv, ie_status):
3074 ### these timestamps are obtained when? They seem wrong
3075 ### under manual examination;
3076 ### As they are not used, make them invalid instead.
3077 # storage.ie_.set_timestamp( self._id, ie_unusable_time)
3078 # return
3080 # Incremental evaluation check.
3081 if storage.ie_.tree_is_older(self._id, ie_status):
3082 # Interpretation values.
3083 storage.id2tree(rv, self)
3084 storage.set_attributes(self._id,
3085 "interp_result", rv, "interp_env", env)
3087 # Incremental evaluation data.
3088 storage.ie_.touch(self._id)
3089 storage.ie_.set_timestamp( (self._id, 'value'), ie_status)
3091 _prog_l = [finish] # ensure new list.
3092 raise Interpret_tail_call( (tree, env, _prog_l) )
3093 else:
3094 # Normal return.
3095 # -----------
3096 rv, ie_status = tree.interpret(env, storage)
3098 # Incremental evaluation check.
3099 if storage.ie_.tree_is_older(self._id, ie_status):
3100 # Interpretation values.
3101 storage.id2tree(rv, self)
3102 storage.set_attributes(self._id,
3103 "interp_result", rv, "interp_env", env)
3105 # Incremental evaluation data.
3106 storage.ie_.touch(self._id)
3107 storage.ie_.set_timestamp( (self._id, 'value'), ie_status)
3108 raise Interpret_return( (rv, ie_status) )
3110 else:
3111 raise Interpret_return( (
3112 storage.get_attribute(self._id, "interp_result"),
3113 storage.ie_.get_timestamp( (self._id, 'value') )
3115 Return.interpret = interpret
3118 def interpret(self, env, storage):
3119 if self._pre_interp_hook:
3120 self._pre_interp_hook(self, env, storage)
3122 # -- Get a (of a.b)
3123 object, _ = self.deref(0).interpret(env, storage)
3125 # -- Get b (of a.b)
3126 # In a.b, b is only evaluated if it is not a Symbol().
3127 # This is done to avoid Symbol lookup in the regular
3128 # environments, as member access really has its own scoping
3129 # rules.
3130 second = self.deref(1)
3131 if isinstance(second, Symbol):
3132 mem_name = second.py_string()
3133 else:
3134 mem_name, _ = second.interpret(env, storage)
3135 if not isinstance(mem_name, StringType):
3136 raise InterpreterError, "In a.b, b is not a name: " + str(mem_name)
3138 # -- Get (a.b).
3139 if isinstance(object, DictType):
3140 binding = dict_ie_lookup_ptr( object, mem_name )
3141 if binding is None:
3142 raise InterpreterError, "No member '%s' found." % (mem_name)
3143 binding, ie_mem_status = binding
3145 elif isinstance(object, Env):
3146 # See also symbol.interpret.
3147 binding = object.ie_lookup_ptr( mem_name )
3148 if binding is None:
3149 raise InterpreterError, "No member '%s' found." % (mem_name)
3150 binding, ie_mem_status = binding
3152 else:
3153 # Just evaluate; let exceptions propagate as usual. Attribute
3154 # evaluation in Python differs for objects, functions, etc., so let
3155 # Python do the work.
3156 binding = eval('object.' + mem_name)
3158 # Incremental evaluation: external attributes cannot be controlled here.
3159 # If they are functions, their timestamps are ignored by Call(); if they
3160 # are values, they must be assumed constant.
3162 # Use external time for all member access. This will access
3163 # external members repeatedly, so they MUST be constant.
3164 ie_mem_status = ie_external_time
3166 # Use external time for callable members only?
3168 # Use self's time stamp. This will cause pickle failures for
3169 # callable external types.
3170 # # ie_mem_status = storage.ie_.get_timestamp(self._id)
3171 # # if ie_mem_status in [None, ie_setup_time]:
3172 # # ie_mem_status = ie_external_time
3174 # Incremental evaluation check.
3175 # Even for an unchanged a.b tree, the a.b value may have
3176 # changed. Only the value's time stamp is propagated.
3177 def new_sym_new_val():
3178 # Interpretation values.
3179 storage.id2tree(binding, self)
3181 # Do not retain references to external objects.
3182 if ie_mem_status == ie_external_time:
3183 storage.set_attributes(self._id,
3184 "interp_result", "not_kept",
3185 "interp_env", env)
3186 else:
3187 storage.set_attributes(self._id,
3188 "interp_result", binding,
3189 "interp_env", env)
3191 # Incremental evaluation data.
3192 storage.ie_.touch(self._id)
3194 return binding, ie_mem_status
3196 if storage.ie_.is_setup_only(self._id):
3197 return new_sym_new_val()
3199 else:
3200 if storage.ie_.tree_is_older(self._id, ie_mem_status):
3201 return new_sym_new_val()
3202 else:
3203 return binding, ie_mem_status
3204 Member.interpret = interpret
3207 def interpret(self, env, storage):
3208 # See also Program.interpret
3210 if self._pre_interp_hook:
3211 self._pre_interp_hook(self, env, storage)
3213 seq_expr = self.deref(0)
3214 if len(seq_expr) == 0:
3215 raise InterpreterError, "Empty Loop"
3216 else:
3217 rv_stat_l = [(expr).interpret(env, storage) for expr in seq_expr]
3219 rv, last_stamp = rv_stat_l[-1]
3221 # Incremental evaluation check.
3222 newest_stamp = reduce(storage.ie_.newest,
3223 [status for _, status in rv_stat_l])
3224 if storage.ie_.tree_is_older(self._id, newest_stamp):
3225 # Interpretation values.
3226 storage.id2tree(rv, self)
3227 storage.set_attributes(self._id,
3228 "interp_result", rv,
3229 "interp_env", env)
3230 # Incremental evaluation data.
3231 storage.ie_.touch(self._id)
3232 storage.ie_.set_timestamp( (self._id, 'value'), last_stamp )
3234 return rv, last_stamp
3235 else:
3236 # ---- Nothing changed.
3237 return storage.get_attribute(self._id, "interp_result"), \
3238 storage.ie_.get_timestamp( (self._id, 'value') )
3239 Loop.interpret = interpret
3242 def interpret(self, env, storage):
3243 # 'if !condition !true else !false ;
3244 if self._pre_interp_hook:
3245 self._pre_interp_hook(self, env, storage)
3247 condi, ie_condi = self.deref(0).interpret(env, storage)
3248 if condi:
3249 rv, ie_status = self.deref(1).interpret(env, storage)
3250 else:
3251 # aNone() evaluates to None, so no special case is needed here.
3252 rv, ie_status = self.deref(2).interpret(env, storage)
3254 # Interpretation values.
3255 storage.id2tree(rv, self)
3256 storage.set_attributes(self._id,
3257 "interp_result", rv,
3258 "interp_env", env)
3260 # Incremental evaluation data.
3261 # if's return timestamp is always that of its result; use time
3262 # of first interpretation for If() itself.
3263 if storage.ie_.is_setup_only(self._id):
3264 storage.ie_.touch(self._id)
3266 return rv, ie_status
3267 If.interpret = interpret
3269 #*** set.interpret
3270 def interpret(self, env, storage):
3271 # Also see Set.arg_names
3273 if self._pre_interp_hook:
3274 self._pre_interp_hook(self, env, storage)
3276 # Get rhs value.
3277 value, ie_status = self.deref(1).interpret(env, storage)
3279 # name(s) to assign
3280 names = self.deref(0)
3282 #*** handle_tuples:
3283 def handle_tuples():
3284 name_list = names._primary[0]
3285 if len(name_list) != len(value):
3286 raise InterpreterError, "Tuple lengths don't match: " + \
3287 str(name_list) + str(value)
3288 else:
3289 for ii in range(0, len(name_list)):
3290 nm = name_list[ii]
3291 # self._matcher.match_exp_str(nm, '!! name symbol'):
3292 if not self._matcher.match(nm,
3293 MarkerTyped(String('name'),
3294 Symbol('symbol'))):
3295 raise InterpreterError, \
3296 "Set: Invalid argument type: " + str(nm)
3297 # Add binding to env.
3298 lhs_symb = self._matcher.get('name')
3299 env.bind_ptr( lhs_symb.as_index(), value[ii], self._id)
3300 storage.id2tree(value[ii], nm)
3302 # Bind the values to the Symbol()s. ### TEST
3303 storage.set_attributes(lhs_symb._id,
3304 "interp_result", value[ii],
3305 "interp_env", env)
3307 # Incremental evaluation data.
3308 storage.ie_.touch(lhs_symb._id)
3309 env.bind_time_stamp_ptr(lhs_symb.as_index(), ie_status,
3310 self._id)
3312 storage.set_attributes(self._id,
3313 "interp_result", value,
3314 "interp_env", env)
3316 # Incremental evaluation data.
3317 storage.ie_.touch(self._id)
3318 storage.ie_.set_timestamp( (self._id, 'value'), ie_status )
3319 return value, ie_status
3320 #*** handle_single:
3321 def handle_single():
3322 #----- Single-name binding.
3323 # Get binding name.
3324 if not self._matcher.match(names,
3325 MarkerTyped(String('name'),
3326 Symbol('symbol'))):
3327 raise InterpreterError, "Set: Invalid first argument type: " + \
3328 str(names)
3329 # Add binding to env.
3330 lhs_symb = self._matcher.get('name')
3331 env.bind_ptr( lhs_symb.as_index(), value, self._id)
3333 # Interpretation values.
3334 storage.id2tree(value, self)
3335 storage.set_attributes(self._id,
3336 "interp_result", value,
3337 "interp_env", env)
3339 # Bind the value to the Symbol().
3340 storage.set_attributes(lhs_symb._id,
3341 "interp_result", value,
3342 "interp_env", env)
3344 # Incremental evaluation data.
3345 storage.ie_.touch(lhs_symb._id)
3346 storage.ie_.touch(self._id)
3347 storage.ie_.set_timestamp( (self._id, 'value'), ie_status )
3348 env.bind_time_stamp_ptr(lhs_symb.as_index(), ie_status, self._id)
3349 return value, ie_status
3350 #*** ptr_single:
3351 def ptr_single():
3352 #----- Single-name binding.
3353 # Get binding name.
3354 if not self._matcher.match(names,
3355 MarkerTyped(String('name'),
3356 Symbol('symbol'))):
3357 raise InterpreterError, "Set: Invalid first argument type: " + \
3358 str(names)
3359 # Add binding to env.
3360 lhs_symb = self._matcher.get('name')
3361 env.set_ptr( lhs_symb.as_index(), self._id )
3362 #*** ptr_tuples:
3363 def ptr_tuples():
3364 name_list = names._primary[0]
3365 if len(name_list) != len(value):
3366 raise InterpreterError, "Tuple lengths don't match: " + \
3367 str(name_list) + str(value)
3368 else:
3369 for ii in range(0, len(name_list)):
3370 nm = name_list[ii]
3371 if not self._matcher.match(nm,
3372 MarkerTyped(String('name'),
3373 Symbol('symbol'))):
3374 raise InterpreterError, \
3375 "Set: Invalid argument type: " + str(nm)
3376 # Add binding to env.
3377 lhs_symb = self._matcher.get('name')
3378 env.set_ptr( lhs_symb.as_index(), self._id )
3379 #*** body
3380 # Incremental evaluation check.
3381 if storage.ie_.tree_is_older(self._id, ie_status):
3382 #----- Destructuring binding for tuples.
3383 if isinstance(names, Tuple):
3384 if isinstance(value, (TupleType, ListType)):
3385 return handle_tuples()
3386 else:
3387 raise InterpreterError, \
3388 "Expected tuple return value, got: " + str(value)
3389 else:
3390 return handle_single()
3392 else:
3393 # Update the name pointer unconditionally, to emulate the
3394 # effect of overwriting.
3395 if isinstance(names, Tuple):
3396 ptr_tuples()
3397 else:
3398 ptr_single()
3399 return storage.get_attribute(self._id, "interp_result"), \
3400 storage.ie_.get_timestamp( (self._id, 'value') )
3401 Set.interpret = interpret
3403 def interpret(self, env, storage):
3404 # See also Program.interpret()
3406 if self._pre_interp_hook:
3407 self._pre_interp_hook(self, env, storage)
3409 # Incremental evaluation check.
3410 if storage.ie_.has_env_for(self._id):
3411 self._eval_env = storage.ie_.env_table()[self._id]
3413 else:
3414 # self._eval_env = Env(env.new_env_id(), env, self, storage)
3415 if self._binding_name != None:
3416 self._eval_env = env.new_child(
3417 self, name = self._binding_name.py_string())
3418 else:
3419 self._eval_env = env.new_child(self, name = "run.blck")
3421 # Incremental evaluation data.
3422 storage.ie_.env_table()[self._id] = self._eval_env
3424 # Evaluate body.
3425 seq_expr = self.deref(0)
3426 rv_stat_l = [(expr).interpret(self._eval_env, storage)
3427 for expr in seq_expr]
3429 # If any bindings changed, self changed.
3430 if len(seq_expr) == 0: # ?? use setup, previous time??
3431 ie_status = storage.ie_.time()
3432 elif len(seq_expr) == 1:
3433 _, ie_status = rv_stat_l[0]
3434 else:
3435 ie_status = reduce(storage.ie_.newest,
3436 [status for _, status in rv_stat_l])
3438 # Incremental evaluation checks.
3439 if storage.ie_.tree_is_older(self._id, ie_status):
3440 # Return value.
3441 # # rv = self._eval_env._bindings
3442 rv = self._eval_env
3444 # Interpretation values.
3445 storage.id2tree(rv, self)
3446 storage.set_attributes(self._id,
3447 "interp_result", rv,
3448 "interp_env", env)
3450 # Incremental evaluation data.
3451 storage.ie_.touch(self._id)
3452 return rv, ie_status
3454 else:
3455 return storage.get_attribute(self._id, "interp_result"), \
3456 storage.ie_.get_timestamp(self._id)
3457 Map.interpret = interpret
3460 def interpret(self, env, storage):
3461 # See also Program.interpret()
3463 if self._pre_interp_hook:
3464 self._pre_interp_hook(self, env, storage)
3466 # Incremental evaluation check.
3467 if storage.ie_.has_env_for(self._id):
3468 self._eval_env = storage.ie_.env_table()[self._id]
3469 else:
3470 # self._eval_env = Env(env.new_env_id(), env, self, storage)
3471 if self._binding_name != None:
3472 self._eval_env = env.new_child(
3473 self, name = self._binding_name.py_string())
3474 else:
3475 self._eval_env = env.new_child(self, name = "run.blck")
3477 # Incremental evaluation data.
3478 storage.ie_.env_table()[self._id] = self._eval_env
3480 # Form subdirectory.
3481 subdir = self._eval_env.into_directory()
3483 # Evaluate body.
3484 seq_expr = self.deref(0)
3485 try:
3486 rv_stat_l = [(expr).interpret(self._eval_env, storage)
3487 for expr in seq_expr]
3488 except:
3489 self._eval_env.outof_directory()
3490 raise
3491 self._eval_env.outof_directory()
3493 # If any bindings changed, self changed.
3494 if len(seq_expr) == 0: # ?? use setup, previous time??
3495 ie_status = storage.ie_.time()
3496 elif len(seq_expr) == 1:
3497 _, ie_status = rv_stat_l[0]
3498 else:
3499 ie_status = reduce(storage.ie_.newest,
3500 [status for _, status in rv_stat_l])
3502 # Incremental evaluation checks.
3503 if storage.ie_.tree_is_older(self._id, ie_status):
3504 # Return value.
3505 rv = self._eval_env
3507 # Interpretation values.
3508 storage.id2tree(rv, self)
3509 storage.set_attributes(self._id,
3510 "interp_result", rv,
3511 "interp_env", env)
3513 # Incremental evaluation data.
3514 storage.ie_.touch(self._id)
3515 return rv, ie_status
3517 else:
3518 return storage.get_attribute(self._id, "interp_result"), \
3519 storage.ie_.get_timestamp(self._id)
3520 Subdir.interpret = interpret
3522 def l3_dirname(self):
3523 return self._eval_env.l3_dirname()
3524 Subdir.l3_dirname = l3_dirname
3527 def t_l_interpret(self, env, storage, converter = list):
3528 # Tuple and list interpretation common parts.
3530 # Also see Tuple.interpret, aList.interpret
3531 val_status_l = [ child.interpret(env, storage)
3532 for child in self.deref(0) ]
3534 # Empty list
3535 if val_status_l == []:
3536 rv_l = converter([])
3537 stat_l = []
3539 # Incremental evaluation check.
3540 if storage.ie_.is_setup_only(self._id):
3541 # Interpretation values.
3542 storage.id2tree(rv_l, self)
3543 storage.set_attributes(self._id,
3544 "interp_result", rv_l,
3545 "interp_env", env)
3546 # Incremental evaluation data.
3547 ie_status = storage.ie_.touch(self._id)
3548 storage.ie_.set_timestamp( (self._id, 'value'), ie_status )
3549 return rv_l, ie_status
3551 else:
3552 # For Immediates(), the tree and value are one --
3553 # the tree_is_older() test is replaced by the previous
3554 # if cases
3555 return (storage.get_attribute(self._id, "interp_result"),
3556 storage.ie_.get_timestamp((self._id, 'value')) )
3558 # Non-empty list.
3559 else:
3560 rv_l = converter([rv for rv, stat in val_status_l])
3561 stat_l = [stat for rv, stat in val_status_l]
3563 # Find "most needy" status and propagate that.
3564 ie_status = reduce(storage.ie_.newest, stat_l)
3566 # Incremental evaluation check.
3567 # # print "t_l_interpret"
3568 if (storage.ie_.tree_is_older(self._id, ie_status)):
3569 # Interpretation values.
3570 storage.id2tree(rv_l, self)
3571 storage.set_attributes(self._id,
3572 "interp_result", rv_l,
3573 "interp_env", env)
3575 # Incremental evaluation data.
3576 storage.ie_.touch(self._id)
3577 storage.ie_.set_timestamp( (self._id, 'value'), ie_status )
3578 return rv_l, ie_status
3580 else:
3581 return storage.get_attribute(self._id, "interp_result"), \
3582 storage.ie_.get_timestamp( (self._id, 'value') )
3584 def interpret(self, env, storage):
3585 return t_l_interpret(self, env, storage, converter = list)
3586 List.interpret = interpret
3588 def interpret(self, env, storage):
3589 return t_l_interpret(self, env, storage, converter = tuple)
3590 Tuple.interpret = interpret
3593 #** Immediate types
3594 def interpret(self, env, storage):
3595 if self._pre_interp_hook:
3596 self._pre_interp_hook(self, env, storage)
3598 rv = self._primary[0]
3600 def new_sym_new_val():
3601 # Interpretation values.
3602 storage.id2tree(rv, self)
3603 storage.set_attributes(self._id,
3604 "interp_result", rv,
3605 "interp_env", env)
3606 # Incremental evaluation data.
3607 ie_status = storage.ie_.touch(self._id)
3608 return rv, ie_status
3610 # Incremental evaluation check.
3611 if storage.ie_.is_setup_only(self._id):
3612 return new_sym_new_val()
3614 else:
3615 # For Immediates(), the tree and value are one --
3616 # the tree_is_older() test is replaced by the previous
3617 # if cases
3618 return ( storage.get_attribute(self._id, "interp_result"),
3619 storage.ie_.get_timestamp(self._id) )
3620 Immediate.interpret = interpret
3621 aNone.interpret = interpret
3622 Native.interpret = interpret
3624 # modified tree check
3625 # if storage.ie_.is_replacement(self._id):
3626 # o_id, o_status = storage.ie_.original_for(self._id)
3627 # if storage.load(o_id).eql(self):
3628 # # This Immediate() is new, but its value is unchanged.
3629 # # Use the original's meta data.
3630 # storage.ie_.set_timestamp( self._id, o_status )
3631 # return (storage.get_attribute(o_id, "interp_result"),
3632 # o_status)
3633 # else:
3634 # bind, status = new_sym_new_val()
3635 # storage.ie_.not_replacement(self._id)
3636 # return bind, status
3639 def interpret(self, env, storage):
3640 # Also see Set.interpret().
3642 if self._pre_interp_hook:
3643 self._pre_interp_hook(self, env, storage)
3645 # Incremental evaluation checks
3647 # symbol timestamp
3648 # replaced | setup | value
3650 # binding timestamp
3651 # any | any | value
3652 # ---------------------------------
3653 # interpret, | interpret, | age
3654 # stamp | stamp | decides
3656 binding = env.ie_lookup_ptr( self.as_index() )
3657 if binding is None:
3658 # Unbound symbols may return themselves in the future...
3659 raise UnboundSymbol("No binding found for: " + self.as_index())
3660 binding, ie_status = binding
3662 def new_sym_new_val():
3663 # Also see BinaryOper, UnaryOper.
3665 # Interpretation values.
3666 storage.id2tree(binding, self)
3668 # Do not retain references to external objects.
3669 if ie_status == ie_external_time:
3670 storage.set_attributes(self._id,
3671 "interp_result", "not_kept",
3672 "interp_env", env)
3673 else:
3674 storage.set_attributes(self._id,
3675 "interp_result", binding,
3676 "interp_env", env)
3678 # Incremental evaluation data.
3679 storage.ie_.touch(self._id)
3681 return binding, ie_status
3683 if storage.ie_.is_setup_only(self._id):
3684 return new_sym_new_val()
3686 else:
3687 if storage.ie_.tree_is_older(self._id, ie_status):
3688 return new_sym_new_val()
3689 else:
3690 return binding, ie_status
3692 Symbol.interpret = interpret
3694 # modified tree check
3695 # if storage.ie_.is_replacement(self._id):
3696 # o_id, o_stamp = storage.ie_.original_for(self._id)
3697 # if storage.load(o_id).eql(self):
3698 # # This symbol is new, but its value is unchanged.
3699 # # ---------------------------------
3700 # storage.ie_.touch(self._id)
3701 # # ?? Symbol is newer than binding -- see Set.interpret()
3702 # # and below.
3703 # # This works correctly with tree_is_older().
3704 # storage.ie_.set_timestamp( (self._id, 'value'), o_stamp )
3705 # return storage.get_attribute(o_id, "interp_result"), o_stamp
3707 # else:
3708 # bind, status = new_sym_new_val()
3709 # storage.ie_.not_replacement(self._id)
3710 # return bind, status
3714 def interpret(self, env, storage):
3715 # See also Program.interpret()
3717 if self._pre_interp_hook:
3718 self._pre_interp_hook(self, env, storage)
3720 # Return last expr value.
3721 seq_expr = self
3722 if len(seq_expr) == 0:
3724 # Imitate aNone.interpret()
3726 rv = None
3727 def new_sym_new_val():
3728 # Interpretation values.
3729 storage.id2tree(rv, self)
3730 storage.set_attributes(self._id,
3731 "interp_result", rv,
3732 "interp_env", env)
3733 # Incremental evaluation data.
3734 ie_status = storage.ie_.touch(self._id)
3735 return rv, ie_status
3737 # Incremental evaluation check.
3738 if storage.ie_.is_setup_only(self._id):
3739 return new_sym_new_val()
3740 else:
3741 return ( storage.get_attribute(self._id, "interp_result"),
3742 storage.ie_.get_timestamp(self._id) )
3745 # List with content
3747 rv_stat_l = [(expr).interpret(env, storage) for expr in seq_expr]
3749 # This causes infinite recursions without __deepcopy__; but
3750 # __deepcopy__ of a list subclass works oddly.
3751 # # self._eval_env = env
3752 rv, last_stamp = rv_stat_l[-1]
3754 # Incremental evaluation check.
3755 newest_stamp = reduce(storage.ie_.newest,
3756 [status for _, status in rv_stat_l])
3757 if storage.ie_.tree_is_older(self._id, newest_stamp):
3758 # Interpretation values.
3759 storage.id2tree(rv, self)
3760 storage.set_attributes(self._id,
3761 "interp_result", rv,
3762 "interp_env", env)
3763 # Incremental evaluation data.
3764 storage.ie_.touch(self._id)
3765 storage.ie_.set_timestamp( (self._id, 'value'), last_stamp )
3767 return rv, last_stamp
3768 else:
3769 # ---- Nothing changed.
3770 return storage.get_attribute(self._id, "interp_result"), \
3771 storage.ie_.get_timestamp( (self._id, 'value') )
3772 aList.interpret = interpret
3774 #* post-execution data viewing
3775 # Support for retrieving "interesting" data.
3776 #** environments
3777 def directory(self, dyn_tree):
3778 # Convert get_dynamic_subtrees()'s output (or compatible) into
3779 # a l3 expression for display.
3780 # todo: FIXME: add a real heading/content element, not "foo"(bar)
3782 dir, content = dyn_tree
3783 argl = []
3784 for itm in content:
3785 if isinstance(itm, StringType):
3786 argl.append(Symbol(itm))
3787 elif isinstance(itm, TupleType):
3788 argl.append(directory(self, itm))
3790 rv = List(aList(argl))
3791 rv.set_label(String(dir))
3792 return rv
3793 Program.directory = directory
3795 def directory_l3(self):
3796 # Produce a post-run directory containing only l3 named data.
3797 # This version works only for the topmost environment.
3800 # Get list of desirable names.
3801 (_, list_1) = self._block_env.get_tree()
3802 top_names = filter(lambda xx: isinstance(xx, StringType), list_1)
3804 # Compare against names present.
3805 (nm, list_2) = self._eval_env.get_dynamic_subtrees()
3806 def tuple_or_top(xx):
3807 if isinstance(xx, TupleType) or (xx in top_names):
3808 return True
3809 return False
3810 list_3 = filter(tuple_or_top, list_2)
3812 # Finish.
3813 return self.directory( (nm, list_3) )
3814 Program.directory_l3 = directory_l3
3817 #** python -> l3 conversion
3818 def val2ast(val, file_contents = False, visited = None):
3820 Produce a raw astType from a Python value `val` (including list
3821 and dict). This is analogous to the parser; the astType is not
3822 .setup().
3824 Unrecognized types are wrapped as a Native().
3826 l3 types are returned unchanged.
3828 Recursion is detected and recursive structures are returned as
3829 `recurse_to_ID` strings.
3832 ARGS:
3833 file_contents If True, return file reference instead of name.
3836 if visited is None:
3837 visited = {}
3839 # Already an l3 type?
3840 if isinstance(val, (astType, aNone, aList, Native)):
3841 return val
3843 elif isinstance(val, Env):
3844 def filter_env():
3845 """ Return some bindings in this environment as a l3 Map.
3846 Only bindings of user interest are collected.
3849 # Children.
3850 for child in val._children:
3851 if child._name in ["skel.arg", "skel.blck", "anonymous"]:
3852 continue
3853 yield (child._name, "sub-env")
3855 # Local entries.
3856 for ky, vl in val.all_bindings().iteritems():
3857 if isinstance(ky, TupleType):
3858 if len(ky) == 2 and ky[1] == 'ptr':
3859 name, _ = ky
3860 yield (name, val.lookup_ptr_1(name))
3861 else:
3862 continue
3864 if visited.has_key(id(val)):
3865 return Symbol("recurse_to_" + str(id(val)))
3867 visited[id(val)] = 1
3869 return Map(aList([Set(val2ast(key, visited = visited,
3870 file_contents = file_contents),
3871 val2ast(entry, visited = visited,
3872 file_contents = file_contents))
3873 for key, entry in filter_env()]))
3875 # A known python type?
3876 elif isinstance(val, (IntType, LongType)): return Int(int(val))
3878 elif isinstance(val, FloatType): return Float(val)
3880 elif isinstance(val, StringType):
3881 if file_contents:
3882 return Native(FilepathString(val))
3883 else:
3884 return FilepathString(val)
3886 elif isinstance(val, ListType):
3887 if visited.has_key(id(val)):
3888 return Symbol("recurse_to_" + str(id(val)))
3890 visited[id(val)] = 1
3892 return List(aList([val2ast(entry, visited = visited,
3893 file_contents = file_contents)
3894 for entry in val]))
3896 elif isinstance(val, TupleType):
3897 if visited.has_key(id(val)):
3898 return Symbol("recurse_to_" + str(id(val)))
3900 visited[id(val)] = 1
3902 return Tuple(aList([val2ast(entry, visited = visited,
3903 file_contents = file_contents)
3904 for entry in val]))
3906 elif isinstance(val, DictType):
3907 if visited.has_key(id(val)):
3908 return Symbol("recurse_to_" + str(id(val)))
3910 visited[id(val)] = 1
3912 return Map(aList([ Set(val2ast(key, visited = visited,
3913 file_contents = file_contents),
3914 val2ast(entry, visited = visited,
3915 file_contents = file_contents))
3916 for key, entry in val.iteritems()]))
3917 # All others.
3918 else:
3919 return Native(val)
3922 #** value retrieval
3923 def get_values_list(self, w_):
3924 ''' Form and return ((id, value) list).
3925 The `id` is the expression producing `value`; for multi-valued
3926 expressions, the clone id is used.
3928 st = w_.state_.storage
3929 sid = self._id
3931 # Dynamic id(s).
3932 clone_l = st.get_attribute(sid, "interp_clone")
3933 if clone_l:
3934 # todo: unfiltered, this list could be huge. Limit size? Or provide
3935 # iterator instead.
3936 val_l = []
3937 for cid in clone_l:
3938 val_l += st.load(cid).get_values_list(w_)
3939 return val_l
3940 else:
3941 # Toplevel/final id.
3942 return [ (sid, st.get_attribute(sid, 'interp_result')) ]
3943 astType.get_values_list = get_values_list
3944 aList.get_values_list = get_values_list
3947 #** dirpath (disk location)
3948 def dirpath(self, w_):
3949 tw = TreeWork(w_.state_.storage)
3950 paths = [par.l3_dirname() for par in tw.find_all_parents(self, Subdir)]
3951 paths.reverse()
3952 return "/".join(paths)
3953 astType.dirpath = dirpath
3954 aList.dirpath = dirpath
3958 #** emphasis (special item properties)
3959 # (l_emph : string)
3960 # Emphasis may be used to distinguish special items.
3961 # l_emph should be a logical description, e.g. "filelist"
3962 # This member is only present when used.
3963 # # self.l_emph = None
3965 def set_emphasis(self, emph):
3966 ''' Set emphasis for this node to `emph`
3967 Emphasis may be used to distinguish special items.
3968 emph should be a logical description, e.g. "filelist".
3970 Any emphasis added here must have a corresponding entry in the
3971 deco.emph_color resource.
3973 self.l_emph = emph
3974 Native.set_emphasis = set_emphasis
3975 astType.set_emphasis = set_emphasis
3976 aList.set_emphasis = set_emphasis
3977 aNone.set_emphasis = set_emphasis
3979 def get_emphasis(self):
3980 return self.__dict__.get('l_emph', None)
3981 Native.get_emphasis = get_emphasis
3982 astType.get_emphasis = get_emphasis
3983 aList.get_emphasis = get_emphasis
3984 aNone.get_emphasis = get_emphasis
3987 #* tree attributes
3988 # Mechanism for adding generic attributes to astType trees, without
3989 # polluting the instance's dict, and with lexically obvious syntax:
3990 # foo.setthe(size = 10)
3991 # [ introduced after set_emphasis / get_emphasis]
3993 # Some attributes must be set before .setup(), including those that
3994 # need to be attached at tree-building time.
3996 def setthe(self, **dct):
3997 # Attach key = value pairs to self. Using None as `value` is
3998 # meaningless.
3999 self._attributes.update(dct)
4000 return self
4001 astType.setthe = setthe
4002 aList.setthe = setthe
4003 aNone.setthe = setthe
4004 Native.setthe = setthe
4006 def getthe(self, key):
4007 # Return the value attached to `key`, or None.
4008 return self._attributes.get(key, None)
4009 astType.getthe = getthe
4010 aList.getthe = getthe
4011 aNone.getthe = getthe
4012 Native.getthe = getthe
4016 #* Data flow -- .data_dag()
4017 class DataDagError(exceptions.Exception):
4018 def __init__(self, args=None):
4019 self.args = args
4021 class ReturnBranch:
4022 # No-value indicator for back-propagation of Return() effect.
4023 pass
4025 def __init__(self, gid):
4026 self._from_gid = [gid]
4027 ReturnBranch.__init__ = __init__
4029 def merge(self, other):
4030 # Combine targets.
4031 self._from_gid = self._from_gid + other._from_gid
4032 return self
4033 ReturnBranch.merge = merge
4036 #** Functions for dicts of lists
4037 # key -> val list
4039 # push val onto the end of the dict's key entry.
4040 def dl_push(dict, key, val):
4041 if dict.has_key(key):
4042 dict[key].append(val)
4043 else:
4044 dict[key] = [val]
4045 return dict
4047 # return the most recently pushed value.
4048 def dl_peek(dict, key):
4049 if dict.has_key(key):
4050 return dict[key][-1]
4051 else:
4052 raise Error, "No values."
4054 def dl_items(dict_):
4055 for k, val_l in dict_.items():
4056 for v1 in val_l:
4057 yield k, v1
4060 #** misc fns.
4061 def equal_leading(list_, tuple_):
4062 # Return true if the leading entries of list_ and tuple_ are equal.
4063 for lv, rv in zip(list_, tuple_):
4064 if lv != rv:
4065 return 0
4066 return 1
4068 #** Dag handling class
4069 class astDag:
4070 pass
4072 def __init__(self, name, tree_id, starting_id):
4073 # tree_ids may map to several graph_ids; graph_id's are unique.
4075 self._new_id = starting_id # make ids easier to (string) search
4077 # tree_id's are usually the int (or long) associated with a tree
4078 # node. Also possible:
4079 # tree_id ::= int | (ident, name, real_tree_id)
4081 # where ident is a logical identifier, e.g. "subgraph", and
4082 # real_tree_id is another int.
4083 self._graphid_2_treeid = {} # graph_id -> tree_id
4084 self._treeid_2_graphid= {} # tree_id -> (graph_id list)
4085 self._nodes = {} # graph_id -> (attribute list)
4086 # where
4087 # attribute ::= <(key, value), >*
4089 # self._edges = {} # graph_id -> graph_id list
4091 # _edges structure
4092 # [logical structure]
4093 # (node -> node) tuple -> attributes (via dict)
4094 # [physical structure]
4095 # (graph_id, graph_id) -> [ (key, val), * ]
4096 self._edges = {}
4097 self._new_id += 1
4098 self._graph_id = self._new_id
4100 # By including self in the _graph_stack, every node belongs to
4101 # a subgraph.
4102 # graph ::= [ graph_ident (, node)*]
4103 # graph_ident ::= ( graph_id, name, tree_id )
4104 # node ::= id | graph
4105 self._graph_stack = []
4106 self._graph_now = [(self._graph_id, name, tree_id)
4107 ] # The above nested graph type.
4108 # graph_ids of the enclosing subgraphs
4109 self._subgraph_stack = [] # graph_id list
4110 self._subg_stack_of = {} # graph_id -> graph_id tuple
4113 # self._subgraph_nodes = {} # (sub)graph_id -> graph_id list
4115 self._call_stack = [] # For .data_dag() use
4117 self._unique_names = {} # nodes unique for a given external name
4119 # Function() / Return() interaction ids
4120 self._block_stack = [] # graph_id list
4122 astDag.__init__ = __init__
4124 # s u b g r a p h s
4126 # The subgraph is only a logical grouping; nodes in subgraphs are
4127 # still global.
4129 # These subgraphs are tracked using explicit start/end calls to get
4130 # internal context switch. Internally, just use stacks.
4131 def start_subgraph(self, name, tree_id, attributes = []):
4132 # graph stack.
4133 self._graph_stack.append(self._graph_now)
4134 self._graph_now = []
4136 # content.
4137 self._new_id += 1
4138 self._graph_now.append( (self._new_id, name, tree_id) )
4139 if 0:
4140 # Physical node
4141 self._track(self._new_id, ("subgraph", name, tree_id),
4142 attributes)
4143 self._subgraph_stack.append(self._new_id)
4144 return self._new_id
4145 astDag.start_subgraph = start_subgraph
4147 def end_subgraph(self):
4148 # graph stack.
4149 self._graph_stack[-1].append(self._graph_now)
4150 self._graph_now = self._graph_stack[-1]
4151 del self._graph_stack[-1]
4152 del self._subgraph_stack[-1]
4153 astDag.end_subgraph = end_subgraph
4155 def get_graph_id(self, tree_id):
4156 if self._treeid_2_graphid.has_key(tree_id):
4157 all = self._treeid_2_graphid.get(tree_id)
4158 if len(all) > 1:
4159 print "warning: multiple graph ids. Using most recent."
4160 return dl_peek(self._treeid_2_graphid, tree_id)
4161 else:
4162 return None
4163 astDag.get_graph_id = get_graph_id
4165 def _track(self, graph_id, id, attributes):
4166 # Add a new node with graph_id, and
4167 # track associations between graph_id and incoming id.
4168 self._nodes[graph_id] = attributes
4169 self._graphid_2_treeid[graph_id] = id
4170 dl_push(self._treeid_2_graphid, id, graph_id)
4172 # Subgraph tracking.
4173 self._graph_now.append(graph_id)
4174 self._subg_stack_of[graph_id] = tuple(self._subgraph_stack) # immutable
4175 astDag._track = _track
4177 def add_node(self, id, attributes = []):
4178 # id is the node's external identifier; a new node with unique
4179 # internal identifier is created on every call.
4180 # The internal identifier is returned.
4181 self._new_id += 1
4182 self._track(self._new_id, id, attributes)
4183 return self._new_id
4184 astDag.add_node = add_node
4186 def add_unique_node(self, name, attributes = []):
4187 # For a given name, always return the same graph_id.
4188 # name is the node's external and internal identifier.
4189 if self._unique_names.has_key(name):
4190 return self._unique_names[name]
4191 else:
4192 graph_id = self.add_node(name, attributes)
4193 self._unique_names[name] = graph_id
4194 return graph_id
4195 astDag.add_unique_node = add_unique_node
4197 def add_locally_unique_node(self, tree_id, attributes = []):
4198 # Within the current _subgraph_id and
4199 # for a given tree_id, always return the same graph_id .
4200 # tree_id is the node's external and internal identifier.
4202 graph_id = self.get_graph_id(tree_id)
4203 if graph_id == None:
4204 return self.add_node(tree_id, attributes)
4205 else:
4206 # In this or enclosing graph.
4207 if equal_leading(self._subgraph_stack,
4208 self._subg_stack_of[graph_id]):
4209 return graph_id
4210 else:
4211 return self.add_node(tree_id, attributes)
4212 astDag.add_locally_unique_node = add_locally_unique_node
4214 def add_edge(self, id1, id2, attributes = []):
4215 # Usually, edges are child -> parent
4216 if self._nodes.has_key(id1) and self._nodes.has_key(id2):
4217 self._edges[(id1, id2)] = attributes
4218 else:
4219 raise DataDagError, "Edges must use existing nodes."
4220 return None
4221 astDag.add_edge = add_edge
4223 def sanity_check(self):
4224 if self._graph_stack != []:
4225 raise DataDagError, "Unbalanced start_subgraph/end_subgraph() use."
4226 astDag.sanity_check = sanity_check
4228 def dump_dot(self, file):
4229 # Header.
4230 ##labeling
4231 file.write("""
4232 /* -*- c -*-
4233 Generated by astDag.dump_dot() */
4234 digraph graph0 {
4235 page = "8.5,11.0"; /* size of single physical page */
4236 size="7.5,10.0"; /* graph size */
4237 /* rotate=90; */
4238 ratio=fill;
4239 /* rankdir=LR; */
4240 fontpath="%(SPXROOT)s/l3gui/fonts";
4241 node [shape=box,fontname="Courier", fontsize=12,
4242 width="0.1cm",height="0.1cm", /* snug fit around labels */
4244 edge [fontname="Courier", fontsize=12] ;
4245 edge [arrowsize=0.71];
4246 fontsize=12;
4247 fontname="Courier";
4249 """ % os.environ)
4251 subgraph_color = ['white', 'beige']
4253 def dump_subgraph(graph_now, use_beige):
4254 # Use graph_now to put nodes into the appropriate subgraphs.
4255 (gid, name, _) = graph_now[0]
4257 if " " in name:
4258 raise Exception, """dump_dot: Received name containing spaces.
4260 Internal error.\n""" % name
4262 file.write(""" /* Subgraph header gid=%d */
4263 subgraph cluster_%s_%d {
4264 label = "%s";
4265 bgcolor= %s;
4266 """ % (gid,
4267 name, gid,
4268 name,
4269 subgraph_color[use_beige]))
4271 # When using the block itself as return value.
4272 # file.write(""" /* Subgraph id, to provide a real node for dot's use. */
4273 # %d [style=invis];\n """ % (gid))
4275 # Subgraph nodes
4276 for gnode in graph_now[1:]:
4277 if isinstance(gnode, ListType):
4278 dump_subgraph(gnode, not use_beige)
4279 else:
4280 file.write("%s [" % gnode) # Node.
4281 for key, val in self._nodes[gnode]:
4282 file.write('%s = "%s",' % (key, val))
4283 file.write("];\n")
4285 # Subgraph trailer.
4286 file.write(""" }\n """)
4287 dump_subgraph(self._graph_now, 0)
4289 # Use _edges to connect nodes.
4290 for (src, dest), att_l in self._edges.items():
4291 file.write(""" %d -> %d [""" % (src, dest))
4292 for key, val in att_l:
4293 file.write('%s = "%s",' % (key, val))
4294 file.write("""];\n""" )
4295 # Trailer.
4296 file.write("}\n")
4297 file.flush()
4299 return file
4300 astDag.dump_dot = dump_dot
4303 def dump_dot_colored(self, file):
4304 # Use the graph_ids as colors.
4305 # Header.
4306 ##labeling
4307 file.write("""
4308 /* -*- c -*-
4309 Generated by astDag.dump_dot() */
4310 digraph graph0 {
4311 truecolor=1; /* needed for identification */
4312 page = "8.5,11.0"; /* size of single physical page */
4313 size="7.5,10.0"; /* graph size */
4314 /* rotate=90; */
4315 ratio=fill;
4316 /* rankdir=LR; */
4317 fontpath="%(SPXROOT)s/l3gui/fonts";
4318 node [shape=box,fontname="Courier", fontsize=12,
4319 width="0.1cm",height="0.1cm", /* snug fit around labels */
4321 edge [fontname="Courier", fontsize=12] ;
4322 edge [arrowsize=0.71];
4323 fontsize=12;
4324 fontname="Courier";
4326 """ % os.environ)
4328 def dump_subgraph(graph_now):
4329 # Use _graph_now to put nodes into the appropriate subgraphs.
4330 (gid, name, _) = graph_now[0]
4332 if " " in name:
4333 raise Exception, \
4334 """dump_dot_colored: Received name containing spaces.
4336 Internal error.\n""" % name
4338 file.write(""" /* Subgraph header gid=%d */
4339 subgraph cluster_%s_%d {
4340 label = "%s";
4341 bgcolor= "#%.6x";
4342 """ % (gid, #"
4343 name, gid,
4344 name,
4345 gid))
4346 # coloring
4348 # When using the block itself as return value.
4349 # file.write(""" /* Subgraph id, to provide a real node for dot's use. */
4350 # %d [style=invis];\n """ % (gid))
4352 # Subgraph nodes
4353 for gnode in graph_now[1:]:
4354 if isinstance(gnode, ListType):
4355 dump_subgraph(gnode)
4356 else:
4357 file.write("%s [" % gnode) # Node.
4358 for key, val in self._nodes[gnode]:
4359 file.write('%s = "%s",' % (key, val))
4360 # coloring
4361 file.write('style=filled, fillcolor= "#%.6x",' % gnode +
4362 'fontcolor="#%.6x"' % gnode);
4363 file.write("];\n")
4365 # Subgraph trailer.
4366 file.write(""" }\n """)
4367 dump_subgraph(self._graph_now)
4369 # Use _edges to connect nodes.
4370 for (src, dest), att_l in self._edges.items():
4371 file.write(""" %d -> %d [""" % (src, dest))
4372 for key, val in att_l:
4373 file.write('%s = "%s",' % (key, val))
4374 # coloring
4375 file.write('color= "#%.6x",' % (src*dest,));
4376 file.write("""];\n""" )
4377 # Trailer.
4378 file.write("}\n")
4379 file.flush()
4381 return file
4382 astDag.dump_dot_colored = dump_dot_colored
4385 #** Main external entry points.
4387 #*** Form initial data_dag, containing ALL nodes.
4388 def data_dag_start(self, name, storage, starting_id):
4389 # name: name for the output graph
4390 # starting_id: starting id for the output graph nodes
4391 # Note:
4392 # Run only *before* interpretation --
4393 # not very flexible wrt. incremental evaluation.
4394 dagraph = astDag(name, self._id, starting_id)
4395 self.data_dag(self._block_env, dagraph, storage)
4396 dagraph.sanity_check()
4397 return dagraph
4398 Program.data_dag_start = data_dag_start
4401 #*** Form viewer data_dag, containing only names and subgraphs.
4402 def get_reduced_dag(self):
4403 new_ = deepcopy(self)
4405 nodes, edges = new_._get_reduced_dag()
4407 new_._nodes = nodes
4408 new_._edges = edges
4410 def prune_graph(G):
4411 ngn = []
4412 for el in G:
4413 if isinstance(el, TupleType): # graph_ident
4414 ngn.append(el)
4416 elif isinstance(el, ListType): # graph
4417 ngn.append(prune_graph(el))
4419 elif isinstance(el, IntType): # node
4420 if nodes.has_key(el):
4421 ngn.append(el)
4422 else:
4423 raise DataDagError, "Invalid subgraph entry."
4424 return ngn
4426 new_._graph_now = prune_graph(new_._graph_now)
4428 # Sufficient for dump_dot; others?
4429 return new_
4430 astDag.get_reduced_dag = get_reduced_dag
4434 #** refiner
4435 def _get_reduced_dag(self):
4437 # Given the full data flow dag, find those nodes and edges
4438 # involving actual bound names.
4440 # This prunes function calls and Immediate()s.
4442 # Find labeled node connections
4443 named_nodes = [key
4444 for key, attr_l in self._nodes.iteritems()
4445 if not (('label', '') in attr_l)]
4447 # Get edges in nested dict format.
4448 # edges = { src : {dst1 : , dst2 : , }, ...}
4449 edges = {}
4450 for s in self._nodes.iterkeys(): # cover nodes w/o outgoing edges
4451 edges[s] = {}
4452 for s, d in self._edges.iterkeys():
4453 edges[s][d] = 1
4455 # Find all nodes connected to the start_node node by a path not crossing
4456 # other named nodes.
4457 def indirect_to(edges, start_node, distance, neighbors_of):
4458 # Skip leaves.
4459 if edges.has_key(neighbors_of):
4460 for neighbor in edges[neighbors_of].keys():
4461 edges[start_node][neighbor] = distance
4462 # Paths that require CROSSING of a named_node are NOT wanted.
4463 if neighbor in named_nodes:
4464 continue
4465 else:
4466 indirect_to(edges, start_node, distance + 1, neighbor)
4468 all_edges = deepcopy(edges)
4469 for start in named_nodes:
4470 indirect_to(all_edges, start, 1, start)
4472 # Get only name -> name edges.
4473 relevant_edges = {}
4474 for start in named_nodes:
4475 for dest in named_nodes:
4476 if start == dest: continue
4477 if all_edges[start].has_key(dest):
4478 relevant_edges[(start, dest)] = []
4480 # Use self._nodes format.
4481 relevant_nodes = dict([(ii, self._nodes[ii]) for ii in named_nodes])
4483 return relevant_nodes, relevant_edges
4484 astDag._get_reduced_dag = _get_reduced_dag
4487 # Notes
4488 # ================
4489 # This grouping is by function, not class.
4490 # Traversal is normal order, left-to-right.
4491 # Relevant ids are returned by children; edges are formed by parents.
4493 # valid graph_id s are None and the return values from .new_subgraph()
4494 # and .add_*node()
4496 # For at least Symbol(), Function() and Call(), single-type
4497 # dispatch (on class) is inadequate for retrieving nested information.
4498 # Hence, the nested if's, with recursive calls, below.
4500 # These effectively implement
4501 # multiple passes (one per nesting level). This
4502 # - allows viewing of increasingly more detailed graphs
4503 # - simplifies the code
4504 # - separates the block argument port handling (for the drawn graph)
4507 #** data_dag() member, nested types
4508 def data_dag(self, env, dagraph, storage):
4509 raise InterfaceOnly("%s has no .data_dag() member" % self)
4510 Nested.data_dag = data_dag
4513 #*** introducing their own Env()s
4514 def data_dag(self, env, dagraph, storage):
4515 sub_env = self._block_env
4516 seq_expr = self.deref(0)
4517 if len(seq_expr) == 0:
4518 raise DataDagError, "Empty program"
4519 else:
4520 for expr in seq_expr:
4521 rv = (expr).data_dag(sub_env, dagraph, storage)
4522 if isinstance(rv, ReturnBranch):# Rest is unreachable.
4523 break
4524 return rv
4525 Program.data_dag = data_dag
4527 def data_dag(self, env, dagraph, storage):
4528 # A function makes no data contribution unless executed, and every
4529 # distinct invocation provides its own expansion.
4530 return None
4531 Function.data_dag = data_dag
4533 def data_dag(self, env, dagraph, storage):
4534 # An Inline makes no data contribution until executed;
4535 # it is not parsed so only examination of the dictionary can show
4536 # what it contributed, and there is no simple way to know what it
4537 # read.
4538 return None
4539 Inline.data_dag = data_dag
4541 def data_dag(self, env, dagraph, storage):
4542 seq_expr = self.deref(0)
4543 sub_env = self._block_env
4545 # Start subgraph.
4546 if self._binding_name != None:
4547 sub_graph_id = dagraph.start_subgraph(
4548 self._binding_name.py_string(), self._id)
4549 else:
4550 sub_graph_id = dagraph.start_subgraph("anonymous_map", self._id)
4552 # Fill subgraph.
4553 for expr in seq_expr:
4554 rv = expr.data_dag(sub_env, dagraph, storage)
4555 if isinstance(rv, ReturnBranch):# Rest is unreachable.
4556 break
4558 rv = dagraph.add_locally_unique_node(
4559 self._id,
4560 attributes = [('label', "ENV-%d" % self._id ),
4561 ('shape', "text"),
4562 ('color', 'black'),
4563 # ('fillcolor', 'yellow'),
4566 # finish subgraph
4567 dagraph.end_subgraph()
4569 return rv
4570 # return None # original
4571 # return sub_graph_id # subgraphs aren't nodes.
4572 Map.data_dag = data_dag
4575 #*** using given Env()s
4576 def data_dag(self, env, dagraph, storage):
4577 # Find block.
4578 operator = self.deref(0)
4579 if isinstance(operator, (Symbol, Member)):
4580 bbinding = operator.data_dag_lookup(env)
4582 if isinstance(bbinding, Function):
4583 if bbinding._id in dagraph._call_stack:
4584 # Recursive call, use the name directly.
4585 ##labeling
4586 return self.data_dag_name(
4587 env, dagraph, storage, operator,
4588 [ ('label', operator.py_string()),
4589 ('shape', "hexagon"),
4590 ('color', 'black'),
4592 ## old:
4593 ## return operator.data_dag(env, dagraph, storage)
4594 else:
4595 # Use the looked-up Function()
4596 dagraph._call_stack.append(bbinding._id)
4597 rv = self.data_dag_block(env, dagraph, storage, bbinding)
4598 del dagraph._call_stack[-1]
4599 return rv
4601 elif callable(bbinding) or (bbinding == "unknown_fn"):
4602 ##labeling
4603 if 1: # full name of functions
4604 return self.data_dag_name(
4605 env, dagraph, storage, operator,
4606 [('label', operator.py_string()),
4607 ('shape', 'ellipse'),
4608 ('color', 'red'),
4610 else: # functions as circles
4611 return self.data_dag_name(
4612 env, dagraph, storage, operator,
4613 [('label', ""),
4614 ('shape', 'circle'),
4615 ('color', 'black'),
4616 ('style', 'filled'),
4617 ('width', '0.15cm'),
4618 ('fixedsize', 'true')])
4621 elif bbinding == "unknown_member":
4622 # Binding is unclear. Treat a.b(c) as b(a,c), without
4623 # attempting to lookup b.
4625 # Form new tree.
4626 ma = Matcher()
4627 # Pattern from reader.parse('! a . ! b( !c ) ') with mods
4628 ma.match(self,
4629 Call(
4630 Member(Marker('a'), Marker('b')),
4631 Marker('c'))) # c == aList([])
4633 # Tree from reader.parse('b(a,c) ')
4634 # The original tree is not to be touched; as python
4635 # doesn't have true lists, need a deepcopy here.
4636 new_args = deepcopy(ma['c']) # [ ... ]
4637 copy_attribute(ma['c'], new_args, '_id')
4638 new_args.insert(0, ma['a']) # [a, ...]
4639 tree = Call(ma['b'], new_args)
4640 return tree.data_dag(env, dagraph, storage)
4642 else:
4643 raise DataDagError, \
4644 ("Line: %d:col %d: Unknown static binding. Internal error."
4645 % operator._first_char)
4647 elif isinstance(operator, Function):
4648 return self.data_dag_block(env, dagraph, storage, operator)
4650 else:
4651 raise DataDagError, "Expected Function(), got " + str(operator)
4652 Call.data_dag = data_dag
4654 def data_dag_name(self, env, dagraph, storage, operator, attrib_list):
4655 # Get this invocation's arguments' dags. [a,b from f(a,b)]
4656 arg_seq = self.deref(1)
4657 arg_dags = []
4658 for expr in arg_seq:
4659 arg_dags.append(expr.data_dag(env, dagraph, storage))
4661 # Bind invocation arguments' dags to invoked function's name
4662 func_node = dagraph.add_node( operator._id, attributes = attrib_list)
4663 for ii in arg_dags:
4664 dagraph.add_edge(ii, func_node)
4665 return func_node
4666 Call.data_dag_name = data_dag_name
4668 def data_dag_block_body(env, dagraph, storage, block):
4669 # Get block dag via Program()
4670 # This includes references to argument names in _block_env, but no
4671 # connections are made to actual Function() arguments.
4673 # HERE. make proper function program_from_seq()
4674 program = Program()
4675 program.setup(empty_parent(), block._block_env, storage)
4676 program._primary = (block.raw_seq_expr(), )
4678 if block._binding_name != None:
4679 sg = dagraph.start_subgraph(block._binding_name.py_string(), block._id)
4680 else:
4681 sg = dagraph.start_subgraph("anonymous_block", block._id)
4682 #-- Special treatment for block argument names.
4683 for argname in block.positional_block_args():
4684 dagraph.add_locally_unique_node(
4685 block._block_env.lookup_symbol_id(argname),
4686 ##labeling
4687 attributes = [('label', "%s" % argname ),
4688 ('shape', "ellipse"),])
4691 #-- Return() marker
4692 return_mark = dagraph.add_locally_unique_node(
4693 block._id,
4694 attributes = [('label', ""),
4695 ('shape', "house"),
4696 ('color', 'black'),
4697 ('style', 'filled'),
4698 ('width', '0.15cm'),
4699 ('fixedsize', 'true')])
4701 # The block BINDING ids in dagraph._call_stack are not useful
4702 # here; the actual BLOCK id is needed.
4703 dagraph._block_stack.append(return_mark)
4705 #-- Function body.
4706 graph_id = program.data_dag(block._block_env, dagraph, storage)
4708 if isinstance(graph_id, ReturnBranch):
4709 for src in graph_id._from_gid:
4710 dagraph.add_edge(src, return_mark,
4711 attributes = [('color', 'blue'), ])
4712 else:
4713 dagraph.add_edge(graph_id, return_mark)
4715 del dagraph._block_stack[-1]
4716 dagraph.end_subgraph()
4718 return return_mark
4720 def data_dag_block(self, env, dagraph, storage, block):
4721 # Also see Function.data_dag, Symbol.data_dag
4722 if block.nargs() != self.deref(1).__len__():
4723 raise DataDagError, \
4724 "Argument count mismatch: %s -- %s" % (block.source_string(),
4725 self.source_string())
4727 last_value = data_dag_block_body(env, dagraph, storage, block)
4729 # Get this invocation's arguments' dags. [ a,b of f(a,b) ]
4730 arg_seq = self.deref(1)
4731 arg_dags = []
4732 for expr in arg_seq:
4733 arg_dags.append(expr.data_dag(env, dagraph, storage))
4734 # HERE. If f is a fnfn, certain arguments are Symbols() or
4735 # Function()s. Neither expands by default, but should do so here.
4737 # OR: look for specialized patterns in the .data_dag() routine.
4739 # Bind block argument names to invocation arg dags.
4740 blck_arg_syms = block.positional_block_args(symbols = 1)
4741 for ii in range(len(arg_dags)):
4742 # For multiple values, use most recent.
4743 name_id = dagraph.get_graph_id(blck_arg_syms[ii]._id)
4744 if name_id == None:
4745 raise DataDagError, \
4746 "No graph node found for tree_id %d" % blck_arg_syms[ii]._id
4747 dagraph.add_edge(arg_dags[ii], name_id)
4748 return last_value # The return value of the block.
4749 # return sg # The block itself as return value (physical node)
4750 Call.data_dag_block = data_dag_block
4752 def data_dag_lookup(self, env):
4753 # Static dependency restrictions: for a.b, both must be Symbols();
4754 # otherwise, evaluation would be needed.
4756 # Also see Member.interpret.
4758 # Get b (of a.b)
4759 first = self.deref(0)
4760 second = self.deref(1)
4761 if isinstance(first, Symbol):
4762 if isinstance(second, Symbol):
4763 mem_name = second.py_string()
4764 else:
4765 raise DataDagError, "In a.b, b is not a name: " + str(second)
4766 else:
4767 raise DataDagError, "In a.b, a is not a name: " + str(first)
4769 # Get a (of a.b) -- the actual binding.
4770 object = env.lookup_ptr( first.as_index() )
4772 # To suppress modules from explicitly showing in the dag, first
4773 # check whether first.as_index() is a module.
4774 try:
4775 exec('import ' + first.as_index())
4776 except ImportError:
4777 pass
4778 else:
4779 object = eval(first.as_index())
4781 if isinstance(object, (Function, Map)):
4782 return object._block_env.lookup_ptr( mem_name )
4784 elif isinstance(object, DictType):
4785 return object[ mem_name ]
4787 elif isinstance(object, ModuleType):
4788 member = eval('object.' + mem_name)
4789 return member
4791 elif isinstance(object, Env):
4792 # Env()s have 2 possible member sources:
4793 # the original Python class,
4794 # and the Env() bindings.
4795 # To allow overriding, first try the bindings, then the
4796 # python 'built-ins'.
4797 stat, rv = object.lookup_status( mem_name )
4798 if stat:
4799 return rv
4800 return eval('object.' + mem_name)
4802 elif object == None:
4803 # W/o static typing, any member access' validity is unclear.
4804 return "unknown_member"
4806 elif callable(object):
4807 return object
4809 else:
4810 raise DataDagError, \
4811 ("Line: %d:col %d: Invalid reference found. Internal error."
4812 % object._first_char)
4813 Member.data_dag_lookup = data_dag_lookup
4815 def data_dag(self, env, dagraph, storage):
4816 # For a.b, the dag references b -- b is treated like a single name.
4818 # Static dependency restrictions: for a.b, both must be Symbols();
4819 # otherwise, evaluation would be needed.
4821 first = self.deref(0)
4822 second = self.deref(1)
4823 if isinstance(first, Symbol):
4824 if isinstance(second, Symbol):
4825 # Get b (of a.b)
4826 mem_name = second.py_string()
4827 else:
4828 raise DataDagError, "In a.b, b is not a name: " + str(second)
4829 else:
4830 raise DataDagError, "In a.b, a is not a name: " + str(first)
4832 # # if first.as_index() in ['userdata']:
4833 # # pdb.set_trace()
4835 # Get a (of a.b) -- the actual binding.
4836 object = env.lookup_ptr( first.as_index() )
4838 if isinstance(object, Map):
4839 # In the Map() tree, find the Symbol() used in the
4840 # binding of b.
4841 tree_id = object._block_env.lookup_symbol_id( second.as_index() )
4842 if tree_id != None:
4843 return dagraph.get_graph_id(tree_id)
4844 else:
4845 raise DataDagError, ("Line: %d:col %d: " % object._first_char) + \
4846 "Undefined map binding."
4848 # return dagraph.add_locally_unique_node(
4849 # tree_id,
4850 # attributes = [('label', "%s" % second.as_index() )])
4851 elif isinstance(object, Function):
4852 raise DataDagError, ("Line: %d:col %d: " % first._first_char) + \
4853 "block bindings are not available " + \
4854 "outside block interpretation -- use a map instead."
4855 else:
4856 # Object is external. Link to the full name (a.b) instead.
4857 name = "%s.%s" % (first.py_string(), second.py_string())
4858 return dagraph.add_node(name, attributes = [('label', name)] )
4859 Member.data_dag = data_dag
4861 def data_dag(self, env, dagraph, storage):
4862 cond_gid = self.deref(0).data_dag(env, dagraph, storage)
4863 yes_gid = self.deref(1).data_dag(env, dagraph, storage)
4864 no_gid = self.deref(2).data_dag(env, dagraph, storage)
4866 if isinstance(cond_gid, ReturnBranch):
4867 # Odd, but possible.
4868 return cond_gid
4870 # All branches Return()
4871 if isinstance(yes_gid, ReturnBranch) & isinstance(no_gid, ReturnBranch):
4872 return yes_gid.merge(no_gid)
4875 # At least one real return value. This (these) value(s) will be
4876 # propagated further, so the ReturnBranch edge is drawn here.
4878 self_gid = dagraph.add_node(
4879 self._id,
4880 attributes = [('label', ''),
4881 ('shape', 'circle'),
4882 ('color', 'black'),
4883 ('style', 'filled'),
4884 ('width', '0.15cm'),
4885 ('fixedsize', 'true')])
4887 dagraph.add_edge(cond_gid, self_gid)
4889 if isinstance(yes_gid, ReturnBranch):
4890 for src in yes_gid._from_gid:
4891 dagraph.add_edge(src, dagraph._block_stack[-1],
4892 attributes = [('color', 'blue'), ])
4893 else:
4894 dagraph.add_edge(yes_gid, self_gid,
4895 attributes = [('color', '#00a000')]) # green
4898 if isinstance(no_gid, ReturnBranch):
4899 for src in no_gid._from_gid:
4900 dagraph.add_edge(src, dagraph._block_stack[-1],
4901 attributes = [('color', 'blue'), ])
4902 else:
4903 dagraph.add_edge(no_gid, self_gid,
4904 attributes = [('color', 'red')])
4906 return self_gid
4907 If.data_dag = data_dag
4909 def data_dag(self, env, dagraph, storage):
4910 # The real trick in Return() data flow is to ensure
4911 # never-reached parts are ignored. This is done in the other
4912 # .data_dag()s
4913 arg_gid = self.deref(0).data_dag(env, dagraph, storage)
4914 return ReturnBranch(arg_gid)
4915 Return.data_dag = data_dag
4917 def data_dag(self, env, dagraph, storage):
4918 # Note on ReturnBranch:
4919 # Assume no return occurs in the assigned expression.
4921 lhs = self.deref(0)
4922 rhs = self.deref(1)
4924 # Destructuring binding for tuples.
4925 if isinstance(lhs, Tuple):
4926 ii = 0
4927 name_ids = []
4928 names = lhs[0] # The list.
4929 for _ in self.arg_names():
4930 symbol = names[ii]
4931 name_ids.append(symbol.data_dag(env, dagraph, storage))
4932 ii += 1
4933 if isinstance(rhs, Tuple):
4934 values = rhs[0]
4935 if len(values) != len(names):
4936 raise DataDagError, \
4937 ("Line: %d:col %d:Tuple sizes in assignment "
4938 "don't match." % self._first_char)
4939 else:
4940 ii = 0
4941 for val in values:
4942 val_id = val.data_dag(env, dagraph, storage)
4943 # Must be local test below -- not in add_edge()
4944 if val_id != None:
4945 dagraph.add_edge(val_id, name_ids[ii])
4946 ii += 1
4947 return name_ids[-1]
4948 else:
4949 # The check
4950 # if isinstance(value, TupleType):
4951 # is not available w/o prior .interpret(); only a dag
4952 # making all binding names depend on all rhs contents can
4953 # be drawn.
4954 val_id = rhs.data_dag(env, dagraph, storage)
4955 if val_id != None: # local test below.
4956 for name_id in name_ids:
4957 dagraph.add_edge(val_id, name_id)
4958 return name_id
4960 # Plain symbol binding.
4961 elif isinstance(lhs, Symbol):
4962 val_id = rhs.data_dag(env, dagraph, storage)
4963 if val_id != None: # local test.
4964 if isinstance(val_id, ReturnBranch):
4965 raise DataDagError, (
4966 ("Line: %d:col %d:\n" %
4967 self._first_char)
4969 ("Return inside assignment: %s\n" %
4970 self.source_substring())
4972 name_id = lhs.data_dag(env, dagraph, storage)
4973 dagraph.add_edge(val_id, name_id)
4974 return name_id
4975 else:
4976 return None
4978 else:
4979 raise DataDagError, \
4980 ("Line: %d:col %d: Invalid lhs in assignment "
4981 % self._first_char)
4982 Set.data_dag = data_dag
4985 def data_dag(self, env, dagraph, storage):
4986 # Note on ReturnBranch:
4987 # Assume no return occurs in the list.
4989 self_id = dagraph.add_node(
4990 self._id,
4991 attributes = [('label', ""),
4992 ('shape', 'box'),
4993 ('color', 'black'),
4994 # ('style', 'filled'),
4995 ('width', '0.10cm'),
4996 ('height', '0.10cm'),
4997 ('fixedsize', 'true'),
4999 children = [ child.data_dag(env, dagraph, storage)
5000 for child in self.deref(0) ]
5001 for ch in children:
5002 dagraph.add_edge(ch, self_id)
5004 return self_id
5005 List.data_dag = data_dag
5008 def data_dag(self, env, dagraph, storage):
5009 # Note on ReturnBranch:
5010 # Assume no return occurs in the tuple.
5011 self_id = dagraph.add_node(
5012 self._id,
5013 # # attributes = [('label', "TUPLE-%d" % self._id )]
5014 attributes = [('label', ""),
5015 ('shape', 'box'),
5016 ('color', 'black'),
5017 # ('style', 'filled'),
5018 ('width', '0.10cm'),
5019 ('height', '0.10cm'),
5020 ('fixedsize', 'true'),
5022 children = [ child.data_dag(env, dagraph, storage)
5023 for child in self.deref(0) ]
5024 for ch in children:
5025 dagraph.add_edge(ch, self_id)
5027 return self_id
5028 Tuple.data_dag = data_dag
5030 def data_dag(self, env, dagraph, storage):
5031 seq_expr = self
5032 if len(seq_expr) == 0:
5033 raise DataDagError, "Empty program"
5034 else:
5035 for expr in seq_expr:
5036 rv = (expr).data_dag(env, dagraph, storage)
5037 if isinstance(rv, ReturnBranch): # Rest is unreachable.
5038 break
5039 return rv
5040 aList.data_dag = data_dag
5043 #** data_dag() member, Immediate() types
5044 def data_dag(self, env, dagraph, storage):
5045 return dagraph.add_node(self._id)
5046 aNone.data_dag = data_dag
5048 def data_dag(self, env, dagraph, storage):
5049 return dagraph.add_node(
5050 self._id,
5051 ##labeling
5052 # attributes = [('label', "%s" % self.to_plain_python(storage))]
5053 attributes = [('label', ""),
5054 ('shape', "diamond"),
5055 ('color', 'black'),
5056 ('width', '0.10cm'),
5057 ('height', '0.10cm'),
5058 ('fixedsize', 'true')]
5060 Immediate.data_dag = data_dag
5062 def data_dag(self, env, dagraph, storage):
5063 tree_id = env.lookup_symbol_id(self.as_index())
5064 binding = env.lookup_ptr(self.as_index())
5065 if tree_id == None:
5066 # External (or undefined) symbol. Use as-is, globally.
5067 return dagraph.add_unique_node(
5068 self.py_string(),
5069 attributes = [('label', "%s" % self.as_index() )])
5071 elif isinstance(binding, Function):
5072 return data_dag_block_body(env, dagraph, storage, binding)
5074 else:
5075 return dagraph.add_locally_unique_node(
5076 tree_id,
5077 attributes = [('label', "%s" % self.as_index() )])
5078 Symbol.data_dag = data_dag
5080 def data_dag_lookup(self, env):
5081 rv = env.lookup_ptr(self.as_index())
5082 if rv == None:
5083 return "unknown_fn"
5084 else:
5085 return rv
5086 Symbol.data_dag_lookup = data_dag_lookup
5090 #* Path access for trees
5091 class InvalidPath(exceptions.Exception):
5092 def __init__(self, args=None):
5093 self.args = args
5096 # The notion of paths in the gtk treeview is suitable for use here.
5097 # It is defined as follows.
5099 # A path is a list of integer indices, 0-relative.
5100 # The column is a single integer index, again 0-relative.
5102 # A path points to a value; the value may be indexed (by the column)
5103 # or simple (a None column index).
5105 # For the structure
5107 # a1
5108 # b0, b1, ...
5109 # a2
5110 # b21
5111 # b22
5112 # the paths
5113 # ([], None) is 'a',
5114 # ([0, 1], None) is invalid,
5115 # ([0], 1) is 'b1'
5117 # Thus, the path index runs down the tree, the column index across.
5118 # This means a node can be both a tree (have children), and a row
5119 # (have indices).
5121 # Because the syntax is foo.element_at(), indexing starts at foo's
5122 # children. Thus, foo.element_at([]) returns foo.
5124 # Applied to ASTs, indices are logical and ignore intermediate
5125 # structures (aList)
5128 #** Element at index
5131 #*** testing expressions
5133 >>> pp =reader.parse('(a, b) = f(x,y)')
5135 >>> pp.element_at([])
5136 Program(aList([Set(Tuple(aList([Symbol('a'), Symbol('b')])), Call(Symbol('f'), aList([Symbol('x'), Symbol('y')])))]))
5138 >>> pp.element_at([0])
5139 Set(Tuple(aList([Symbol('a'), Symbol('b')])), Call(Symbol('f'), aList([Symbol('x'), Symbol('y')])))
5141 >>> pp.element_at([0,0])
5142 Tuple(aList([Symbol('a'), Symbol('b')]))
5144 >>> pp.element_at([0,0,0])
5145 Symbol('a')
5147 >>> pp.element_at([0,1])
5148 Call(Symbol('f'), aList([Symbol('x'), Symbol('y')]))
5150 >>> pp.element_at([0,1,0])
5151 Symbol('f')
5155 #*** Nested
5156 def element_at(self, path):
5157 # See also aList.element_at.
5158 raise InterfaceOnly
5159 Nested.element_at = element_at
5161 def _element_path(self, path, func):
5162 if path == []:
5163 return self
5164 elif len(path) == 1:
5165 return func()
5166 else:
5167 return func().element_at(path[1:])
5169 def element_at(self, path):
5170 return _element_path(self, path,
5171 lambda : self._primary[0][ path[0] ] )
5172 Tuple.element_at = element_at
5173 Program.element_at = element_at
5174 List.element_at = element_at
5175 Map.element_at = element_at
5177 def element_at(self, path):
5178 return _element_path(self, path,
5179 lambda : self._primary[ path[0] ] )
5180 Set.element_at = element_at
5181 Macro.element_at = element_at
5182 Inline.element_at = element_at
5183 eTree.element_at = element_at
5186 def element_at(self, path):
5187 # the aList in e.g.
5188 # Call(Symbol('f'), aList([Symbol('x'), Symbol('y')]))
5189 # 0 1 2
5190 # is transparent in PATH; indexing is as indicated.
5192 def _fun():
5193 if path[0] == 0:
5194 return self._primary[ 0 ]
5195 else:
5196 return self._primary[ 1 ][ path[0] - 1 ]
5197 return _element_path(self, path, _fun)
5198 Call.element_at = element_at
5201 #*** Immediate
5202 def element_at(self, path):
5203 if path == []:
5204 return self # lvalue (via .deep_replace etc.)
5205 elif path == ['value']:
5206 return self._primary[0] # Python rvalue.
5207 else:
5208 raise InvalidPath
5209 Int.element_at = element_at
5211 def element_at(self, path):
5212 raise InterfaceOnly
5213 Immediate.element_at = element_at
5217 #*** specials
5218 def element_at(self, path):
5219 # See Nested.element_at
5220 raise Exception, \
5221 "internal error: aList() contents should be picked by parent."
5222 aList.element_at = element_at
5226 #** Form table of paths
5227 def make_path_table(tree):
5228 # Input:
5229 # TREE
5230 # A selection_tree, constructed using eTree(), Macro(), etc.
5232 # Return:
5233 # TBL
5234 # so that TBL[ path ] -> (name, macro)
5235 # TBL[ (path, 'num_leaves') ] -> int
5237 # A PATH is a tuple of indices, starting from 0
5239 # For trees, macro is None, for leaves the leaf itself.
5240 return tree._make_path_table((), {})
5243 def _make_path_table(self, path, tbl):
5244 tbl[ path ] = ( self._label, self)
5245 # The leaves are l1, l2, ...
5246 leaves = len(self._primary) - 1
5247 tbl[ (path, 'num_leaves') ] = leaves
5248 for idx in range(0, leaves):
5249 leaf = self._primary[idx + 1] # for leaf in self._primary(1:):
5250 leaf._make_path_table(path + (idx,), tbl)
5251 return tbl
5252 eTree._make_path_table = _make_path_table
5254 def _make_path_table(self, path, tbl):
5255 # Input:
5256 # TREE
5257 # A selection_tree, constructed using eTree(), Macro(), etc.
5259 # PATH
5260 # The path to TREE, as index tuple (a,b,...)
5262 tbl[ path ] = (self)
5263 return tbl
5264 Macro._make_path_table = _make_path_table
5265 Macro.update_tree_table = _make_path_table
5266 Inline._make_path_table = _make_path_table
5269 def _make_path_table(self, path, tbl):
5270 print "Warning: astType._make_path_table is deprecated."
5271 tbl[ path ] = (self, self)
5272 return tbl
5273 astType._make_path_table = _make_path_table
5277 #* Tree work
5279 #** parent_id
5280 def parent_id(self):
5281 return self.__dict__.get('_parent')
5282 astType.parent_id = parent_id
5284 def parent_id(self):
5285 return self.__dict__.get('_parent')
5286 aList.parent_id = parent_id
5287 Native.parent_id = parent_id
5289 def has_parent(self):
5290 return self.parent_id() != None
5291 astType.has_parent = has_parent
5292 aList.has_parent = has_parent
5293 cls_viewList.has_parent = has_parent
5294 Native.has_parent = has_parent
5297 #** misc
5298 def placeholder(parent, storage):
5299 # Provide a valid (but meaningless) child.
5300 # Must be inserted in proper slot by parent.
5301 zero_char = (0,0)
5302 child = (aNone()
5303 .set_char_range(zero_char, zero_char)
5304 .set_source_string('') )
5305 def_env = Env('dummy_env', None, None, storage)
5306 child.setup(parent, def_env, storage)
5307 return child
5309 def empty_parent():
5310 return aNone(_id = None)
5312 def body(self):
5313 return self._primary[0][0]
5314 Program.body = body
5316 def single_program(self):
5317 # In the case of a single program within this program, i.e.,
5318 # program:
5319 # program:
5320 # body
5321 # return the innermost program only (nothing is lost).
5322 if isinstance(self._primary[0][0], Program) and len(self._primary[0]) == 1:
5323 return self._primary[0][0].single_program()
5324 else:
5325 return self
5326 Program.single_program = single_program
5329 #** subtree replacement
5331 class ReplacementError(exceptions.Exception):
5332 def __init__(self, args=None):
5333 self.args = args
5336 def substitute(src, dst, tree):
5337 # Replace SRC with DST in tree. SRC must match exactly.
5338 # The tree's source string is not replaced.
5339 lst = find_exact(tree, src)
5340 for elem in lst:
5341 elem.shallow_replace(deepcopy(dst).setup(empty_parent(),
5342 def_env, storage)[0],
5343 storage)
5344 return tree
5347 def replacement_setup(self, new_val, storage):
5348 # Perform the .setup() action so that new_val can replace self.
5349 o_parent = storage.load(self._parent)
5350 o_def_env = storage.get_attribute(self._id, "stored_from_env")
5351 new_val.setup(o_parent, o_def_env, storage)
5352 return new_val._id
5353 astType.replacement_setup = replacement_setup
5357 #*** deep replacement
5358 def deep_replace(self, new, storage, indent = 0):
5359 # Replace self with new in parent and all clones.
5360 self.shallow_replace(new, storage)
5361 sl = storage.load
5363 clone_l = storage.get_attribute(self._id, "interp_clone")
5364 if clone_l != None:
5365 print ' '*indent, "---------------------- clone data"
5366 for cid in clone_l:
5367 clone = sl(cid)
5368 # Set up subtree clone with appropriate parent.
5369 new_clone = deepcopy(new)
5370 clone.replacement_setup(new_clone, storage)
5371 clone.deep_replace(new_clone, storage)
5372 astType.deep_replace = deep_replace
5373 ### Immediate.deep_replace = deep_replace
5376 #*** shallow replacement
5377 def shallow_replace(self, new, storage, indent = 0):
5378 # Replace self in parent with 'new'.
5380 # Parent referring to self.
5381 parent_id = self.parent_id()
5382 if parent_id is None:
5383 raise ReplacementError, "no parent"
5385 parent = storage.load(parent_id)
5386 parent.replace_child(self._id, new)
5387 astType.shallow_replace = shallow_replace
5388 ### Immediate.shallow_replace = shallow_replace
5392 #*** Re-establish object graph structures after edit or copy.
5394 def _update_refs(self):
5395 pass
5396 astType._update_refs = _update_refs
5398 def _update_refs(self):
5399 ( self._python_init_string, ) = self._primary
5400 Inline._update_refs = _update_refs
5403 #*** child replacement
5404 def replace_child(self, orig_id, new_node):
5406 assert isinstance(orig_id, IntType)
5407 idx = self.find_child_index(orig_id)
5408 if idx is None:
5409 raise ReplacementError("Child not found.")
5411 # Update direct references.
5412 foo = list(self._primary)
5413 foo[idx]._parent = None
5414 foo[idx] = new_node
5415 self._primary = tuple(foo)
5417 self._update_refs()
5419 new_node._parent = self._id
5420 Nested.replace_child = replace_child
5423 def replace_child(self, orig_id, new_node):
5424 return self._primary[0].replace_child(orig_id, new_node)
5425 List.replace_child = replace_child
5427 def replace_child(self, orig_id, new_node):
5428 # Also see Nested.replace_child()
5429 assert isinstance(orig_id, IntType)
5430 idx = self.find_child_index(orig_id)
5431 if idx is None:
5432 raise ReplacementError, "Child not found."
5434 # Update direct references.
5436 foo = self._primary[0]
5437 foo[idx]._parent = None
5438 foo[idx] = new_node
5439 self._primary = (foo,)
5441 self[idx] = new_node
5443 new_node._parent = self._id
5444 aList.replace_child = replace_child
5447 #** insert / append child
5448 def append_child(self, child):
5449 self.insert_child(len(self), child)
5450 aList.append_child = append_child
5451 List.append_child = append_child
5452 Program.append_child = append_child
5454 def insert_child(self, index, child):
5455 assert self._id != None
5456 # Also see Nested.insert_child()
5457 foo = self._primary[0]
5458 foo.insert(index, child)
5459 self._primary = (foo,)
5461 self.insert(index, child)
5463 child._parent = self._id
5464 aList.insert_child = insert_child
5466 def insert_child(self, index, child):
5467 assert self._id != None
5468 # Also see Nested.insert_child()
5469 foo = list(self._primary)
5470 foo.insert(index, child)
5471 self._primary = tuple(foo)
5473 child._parent = self._id
5474 Nested.insert_child = insert_child
5476 def insert_child(self, index, child):
5477 return self._primary[0].insert_child(index, child)
5478 List.insert_child = insert_child
5479 Program.insert_child = insert_child
5481 def alist_replace(self, obj):
5482 assert isinstance(obj, aList)
5483 self._primary = (obj, )
5484 cls_viewList.alist_replace = alist_replace
5487 #** insert into all clones
5488 def insert_child_rec(self, index, new, storage):
5489 load = storage.load
5491 # Insert 'new' in self and all clones.
5492 self.insert_child(index, new)
5493 clone_l = storage.get_attribute(self._id, "interp_clone")
5494 if clone_l != None:
5495 for cid in clone_l:
5496 self_cl = load(cid)
5497 # Set up subtree clone with appropriate parent.
5498 new_clone = deepcopy(new)
5499 copy_char_info(new, new_clone)
5500 new_clone.setup(
5501 self_cl,
5502 storage.get_attribute(self_cl._id, "stored_from_env"),
5503 storage)
5504 cross_ref_trees(storage, new, new_clone)
5505 self_cl.insert_child_rec(index, new_clone, storage)
5506 astType.insert_child_rec = insert_child_rec
5507 aList.insert_child_rec = insert_child_rec
5511 #** detach (delete) child
5512 def detach_child(self, orig_id, storage):
5513 # Detaching a child leaves the size unchanged.
5514 # Compare aList.detach_child()
5515 assert isinstance(orig_id, IntType)
5516 idx = self.find_child_index(orig_id)
5517 if idx is None:
5518 raise ReplacementError("Child not found.")
5520 # Update direct references.
5521 foo = list(self._primary)
5522 foo[idx]._parent = None
5523 ### del foo[idx]
5524 foo[idx] = placeholder(self, storage)
5525 self._primary = tuple(foo)
5527 self._update_refs()
5528 Nested.detach_child = detach_child
5530 def detach_child(self, orig_id, storage):
5531 # Both the contained alist (the real child) and alist's children
5532 # are considered children of List.
5534 assert isinstance(orig_id, IntType)
5536 if self._primary[0]._id == orig_id:
5537 idx = 0
5538 # Update direct references.
5539 foo = list(self._primary)
5540 foo[idx]._parent = None
5541 ### del foo[idx]
5542 foo[idx] = placeholder(self, storage)
5543 self._primary = tuple(foo)
5545 self._update_refs()
5547 else:
5548 return self._primary[0].detach_child(orig_id, storage)
5549 List.detach_child = detach_child
5551 def detach_child(self, orig_id, storage):
5553 # Detaching a child changes the size.
5554 # Compare Nested.detach_child()
5555 assert isinstance(orig_id, IntType)
5556 idx = self.find_child_index(orig_id)
5557 if idx is None:
5558 raise ReplacementError, "Child not found."
5560 # Update direct references.
5561 foo = self._primary[0]
5562 foo[idx]._parent = None
5563 del foo[idx]
5564 self._primary = (foo,)
5566 del self[idx]
5567 aList.detach_child = detach_child
5570 def detach_from_parent(self, storage):
5571 # Parent referring to self.
5572 parent_id = self.parent_id()
5573 if parent_id is None:
5574 raise ReplacementError("No parent.")
5576 parent = storage.load(parent_id)
5577 parent.detach_child(self._id, storage)
5578 astType.detach_from_parent = detach_from_parent
5579 aList.detach_from_parent = detach_from_parent
5580 Native.detach_from_parent = detach_from_parent
5583 # # def detach_from_parent(self, storage):
5584 # # # Parent referring to self.
5585 # # parent_id = self.parent_id()
5586 # # if parent_id is None:
5587 # # raise ReplacementError("No parent.")
5589 # # parent = storage.load(parent_id)
5590 # # chid = self._real_tree._id
5591 # # parent.detach_child(chid, storage)
5592 # # cls_viewList.detach_from_parent = detach_from_parent
5595 #** deletion
5596 def delete(self, storage):
5597 # Remove from parent and storage.
5598 if self.parent_id(): self.detach_from_parent(storage)
5599 for ch in self.top_down():
5600 storage.remove(ch._id)
5601 Nested.delete = delete
5602 Immediate.delete = delete
5603 aList.delete = delete
5604 Native.delete = delete
5606 # # def delete(self, storage):
5607 # # # Remove from storage.
5608 # # for ch in self.top_down():
5609 # # storage.remove(ch._id)
5610 # # aList.delete = delete
5613 #** traversal
5615 #*** Tabular; clones, indentation -- grouped
5616 def clones_grouped(id, storage, clone_level, indent):
5617 clone_l = storage.get_attribute(id, "interp_clone")
5618 if clone_l != None:
5619 for clone_id in clone_l:
5620 clone = storage.load(clone_id)
5621 # Recursive -- not too legible, but complete.
5622 yield None, '(', None
5623 for rv in clone.prefix_grpd(storage,
5624 clone_level = clone_level + 1,
5625 indent = indent):
5626 yield rv
5627 yield None, ')', None
5630 def prefix_grpd(self, storage, clone_level = 0, indent = 0):
5631 # yield all tree nodes and their clones in
5632 # top-down, left-right, depth first form.
5633 # yielded are (tree, clone_level, indent_level)
5635 # Clones are completed before children, so only uncloned trees are
5636 # returned uninterrupted.
5638 yield self, clone_level, indent
5640 for clone in clones_grouped(self._id, storage, clone_level, indent):
5641 yield clone
5643 yield None, None, '('
5644 for child in self._primary:
5645 for rv in child.prefix_grpd(storage,
5646 clone_level = clone_level,
5647 indent = indent + 1):
5648 yield rv
5649 yield None, None, ')'
5650 Nested.prefix_grpd = prefix_grpd
5652 def prefix_grpd(self, storage, clone_level = 0, indent = 0):
5653 yield self, clone_level, indent
5655 for clone in clones_grouped(self._id, storage, clone_level, indent):
5656 yield clone
5658 yield None, None, '('
5659 for child in self._primary[0]: # Only difference -- index.
5660 for rv in child.prefix_grpd(storage,
5661 clone_level = clone_level,
5662 indent = indent + 1):
5663 yield rv
5664 yield None, None, ')'
5665 aList.prefix_grpd = prefix_grpd
5667 def prefix_grpd(self, storage, clone_level = 0, indent = 0):
5668 yield self, clone_level, indent
5670 for clone in clones_grouped(self._id, storage, clone_level, indent):
5671 yield clone
5672 Immediate.prefix_grpd = prefix_grpd
5674 def prefix_grpd(self, storage, clone_level = 0, indent = 0):
5675 yield self, clone_level, indent
5677 for clone in clones_grouped(self._id, storage, clone_level, indent):
5678 yield clone
5679 aNone.prefix_grpd = prefix_grpd
5682 #*** Tabular, including clones, with 'indentation'
5683 def yield_clones(id, storage, clone_level, indent):
5684 clone_l = storage.get_attribute(id, "interp_clone")
5685 if clone_l != None:
5686 for clone_id in clone_l:
5687 clone = storage.load(clone_id)
5688 # Recursive -- not too legible, but complete.
5689 for rv in clone.prefix_all( storage,
5690 clone_level = clone_level + 1,
5691 indent = indent):
5692 yield rv
5695 def prefix_all(self, storage, clone_level = 0, indent = 0):
5696 # yield all tree nodes and their clones in
5697 # top-down, left-right, depth first form.
5698 # yielded are (tree, clone_level, indent_level)
5700 # Clones are completed before children, so only uncloned trees are
5701 # returned uninterrupted.
5703 # This output is messy, but complete; displaying these
5704 # high-dimensional structures in a 1-D sequence is ALWAYS
5705 # messy...
5706 yield self, clone_level, indent
5708 for clone in yield_clones(self._id, storage, clone_level, indent):
5709 yield clone
5711 for child in self._primary:
5712 for rv in child.prefix_all(storage,
5713 clone_level = clone_level,
5714 indent = indent + 1):
5715 yield rv
5716 Nested.prefix_all = prefix_all
5718 def prefix_all(self, storage, clone_level = 0, indent = 0):
5719 yield self, clone_level, indent
5721 for clone in yield_clones(self._id, storage, clone_level, indent):
5722 yield clone
5724 for child in self._primary[0]: # Only difference -- index.
5725 for rv in child.prefix_all(storage,
5726 clone_level = clone_level,
5727 indent = indent + 1):
5728 yield rv
5729 aList.prefix_all = prefix_all
5731 def prefix_all(self, storage, clone_level = 0, indent = 0):
5732 yield self, clone_level, indent
5733 for clone in yield_clones(self._id, storage, clone_level, indent):
5734 yield clone
5735 Immediate.prefix_all = prefix_all
5737 def prefix_all(self, storage, clone_level = 0, indent = 0):
5738 yield self, clone_level, indent
5739 for clone in yield_clones(self._id, storage, clone_level, indent):
5740 yield clone
5741 aNone.prefix_all = prefix_all
5745 #*** With 'indentation'
5746 def top_down_indented(self, indent = 0):
5747 # top-down, left-right, depth first.
5748 yield self, indent
5749 for child in self._primary:
5750 for cc, cind in child.top_down_indented(indent = indent + 1):
5751 yield cc, cind
5752 Nested.top_down_indented = top_down_indented
5754 def top_down_indented(self, indent = 0):
5755 yield self, indent
5756 Immediate.top_down_indented = top_down_indented
5758 def top_down_indented(self, indent = 0):
5759 yield self, indent
5760 aNone.top_down_indented = top_down_indented
5761 Native.top_down_indented = top_down_indented
5763 def top_down_indented(self, indent = 0):
5764 yield self, indent
5765 for child in self._primary[0]:
5766 for cc, cind in child.top_down_indented(indent = indent + 1):
5767 yield cc, cind
5768 aList.top_down_indented = top_down_indented
5771 #*** Outline elements only, with level
5772 def outl_top_down(self, level = 0):
5773 # top-down, left-right, depth first.
5774 yield self, level
5775 for child in self._outl_children:
5776 for cc, cind in child.outl_top_down(level = level + 1):
5777 yield cc, cind
5778 cls_viewList.outl_top_down = outl_top_down
5779 Program.outl_top_down = outl_top_down
5782 #*** Plain top-down
5783 # omit_children_of = [Function]
5784 # def top_down(self, omit_children_of = []):
5785 # # top-down, left-right, depth first.
5786 # yield self
5787 # if self.__class__ in omit_children_of:
5788 # return
5789 # for child in self._primary:
5790 # for cc in child.top_down():
5791 # yield cc
5792 # Nested.top_down = top_down
5794 def top_down(self):
5795 # top-down, left-right, depth first.
5796 yield self
5797 for child in self._primary:
5798 for cc in child.top_down():
5799 yield cc
5800 Nested.top_down = top_down
5802 def top_down(self):
5803 yield self
5804 Immediate.top_down = top_down
5806 def top_down(self):
5807 yield self
5808 aNone.top_down = top_down
5809 Native.top_down = top_down
5811 def top_down(self):
5812 yield self
5813 for child in self._primary[0]:
5814 for cc in child.top_down():
5815 yield cc
5816 aList.top_down = top_down
5819 #*** Top-down, truncate subtrees of given type(s)
5820 def top_down_truncate(self, omit_children_of):
5821 # Top-down, node-left-right, depth first traversal. Instances of
5822 # classes in omit_children_of are returned but not traversed
5823 # further.
5824 yield self
5825 if self.__class__ in omit_children_of:
5826 return
5827 for child in self._primary:
5828 for cc in child.top_down_truncate(omit_children_of):
5829 yield cc
5830 Nested.top_down_truncate = top_down_truncate
5832 def top_down_truncate(self, omit_children_of):
5833 yield self
5834 Immediate.top_down_truncate = top_down_truncate
5836 def top_down_truncate(self, omit_children_of):
5837 yield self
5838 aNone.top_down_truncate = top_down_truncate
5839 Native.top_down_truncate = top_down_truncate
5841 def top_down_truncate(self, omit_children_of):
5842 yield self
5843 if self.__class__ in omit_children_of:
5844 return
5845 for child in self._primary[0]:
5846 for cc in child.top_down_truncate(omit_children_of):
5847 yield cc
5848 aList.top_down_truncate = top_down_truncate
5851 #*** Flat, children only, generic callback interface
5852 def visit_children(self, func, **kwds):
5854 # Call FUNC(CHILD, **kwds) for every CHILD.
5856 for child in self._primary:
5857 func(child, **kwds)
5858 Nested.visit_children = visit_children
5860 def visit_children(self, func, **kwds):
5861 pass
5862 Immediate.visit_children = visit_children
5864 def visit_children(self, func, **kwds):
5865 pass
5866 aNone.visit_children = visit_children
5867 Native.visit_children = visit_children
5869 def visit_children(self, func, **kwds):
5870 for child in self._primary[0]:
5871 func(child, **kwds)
5872 aList.visit_children = visit_children
5875 #*** Flat, editable subtrees only, iterator
5877 # For a given l3 type, yield the subtrees intended to be editable,
5878 # bypassing containers (used by that type) they may be in.
5881 # Used for rendering of a textual program.
5882 def subtrees(self):
5883 for child in self._primary:
5884 yield child
5885 for cc in child.subtrees():
5886 yield cc # iterator forwarding...
5887 Nested.subtrees = subtrees
5889 def subtrees(self):
5890 for child in self[0]:
5891 yield child
5892 for cc in child.subtrees():
5893 yield cc # iterator forwarding...
5894 Program.subtrees = subtrees
5896 def subtrees(self):
5897 # Arguments.
5898 for child in self[0]:
5899 yield child
5900 for cc in child.subtrees():
5901 yield cc # iterator forwarding...
5902 # Body.
5903 for child in self[1]:
5904 yield child
5905 for cc in child.subtrees():
5906 yield cc # iterator forwarding...
5907 Function.subtrees = subtrees
5909 def subtrees(self):
5910 # For a(b,c), return only b and c.
5911 # yield self[0]
5912 for child in self[1]:
5913 yield child
5914 for cc in child.subtrees():
5915 yield cc # iterator forwarding...
5916 Call.subtrees = subtrees
5918 def subtrees(self):
5919 yield self[0]
5920 for cc in self[0].subtrees():
5921 yield cc # iterator forwarding...
5922 yield self[1]
5923 for cc in self[1].subtrees():
5924 yield cc # iterator forwarding...
5925 Member.subtrees = subtrees
5927 def subtrees(self):
5928 yield self[0]
5929 for cc in self[0].subtrees():
5930 yield cc # iterator forwarding...
5931 for child in self[1]:
5932 yield child
5933 for cc in child.subtrees():
5934 yield cc # iterator forwarding...
5935 if not isinstance(self[2], aNone):
5936 for child in self[2]:
5937 yield child
5938 for cc in child.subtrees():
5939 yield cc # iterator forwarding...
5940 If.subtrees = subtrees
5941 ## Set.subtrees = subtrees
5943 def subtrees(self):
5944 for child in self[0]:
5945 yield child
5946 for cc in child.subtrees():
5947 yield cc # iterator forwarding...
5948 List.subtrees = subtrees
5949 Map.subtrees = subtrees
5950 Tuple.subtrees = subtrees
5952 def subtrees(self):
5953 if 0: yield None
5954 return
5955 Immediate.subtrees = subtrees
5957 def subtrees(self):
5958 if 0: yield None
5959 return
5960 aNone.subtrees = subtrees
5961 Native.subtrees = subtrees
5963 def subtrees(self):
5964 for child in self._primary[0]:
5965 yield child
5966 for cc in child.subtrees():
5967 yield cc # iterator forwarding...
5968 aList.subtrees = subtrees
5971 #*** Flat, no recursion, editable subtrees1 only, iterator
5973 # For a given l3 type, yield the IMMEDIATE subtrees1 intended to be editable,
5974 # bypassing containers (used by that type) they may be in.
5977 # Used for rendering of a textual program.
5978 def subtrees1(self):
5979 for child in self._primary:
5980 yield child
5981 Nested.subtrees1 = subtrees1
5983 def subtrees1(self):
5984 for child in self[0]:
5985 yield child
5986 Program.subtrees1 = subtrees1
5988 def subtrees1(self):
5989 # Arguments.
5990 for child in self[0]:
5991 yield child
5992 # Body.
5993 for child in self[1]:
5994 yield child
5995 Function.subtrees1 = subtrees1
5997 def subtrees1(self):
5998 # For a(b,c), return only b and c.
5999 # yield self[0]
6000 for child in self[1]:
6001 yield child
6002 Call.subtrees1 = subtrees1
6004 def subtrees1(self):
6005 yield self[0]
6006 yield self[1]
6007 Member.subtrees1 = subtrees1
6009 def subtrees1(self):
6010 yield self[0]
6011 for child in self[1]:
6012 yield child
6013 if not isinstance(self[2], aNone):
6014 for child in self[2]:
6015 yield child
6016 If.subtrees1 = subtrees1
6017 ## Set.subtrees1 = subtrees1
6019 def subtrees1(self):
6020 for child in self[0]:
6021 yield child
6022 List.subtrees1 = subtrees1
6023 Map.subtrees1 = subtrees1
6024 Tuple.subtrees1 = subtrees1
6026 def subtrees1(self):
6027 if 0: yield None
6028 return
6029 Immediate.subtrees1 = subtrees1
6031 def subtrees1(self):
6032 if 0: yield None
6033 return
6034 aNone.subtrees1 = subtrees1
6035 Native.subtrees1 = subtrees1
6037 def subtrees1(self):
6038 for child in self._primary[0]:
6039 yield child
6040 aList.subtrees1 = subtrees1
6044 #*** Flat traversal (node only).
6045 def entries(self):
6046 # left-right
6047 for child in self._primary:
6048 yield child
6049 Nested.entries = entries
6051 def entries(self):
6052 yield self
6053 Immediate.entries = entries
6055 def entries(self):
6056 yield self
6057 aNone.entries = entries
6058 Native.entries = entries
6060 def entries(self):
6061 for cc in self._primary[0]:
6062 yield cc
6063 aList.entries = entries
6065 def entries(self):
6066 for child in self._primary[0]:
6067 yield child
6068 List.entries = entries
6069 Program.entries = entries
6071 def num_entries(self):
6072 return len(self._primary[0])
6073 List.num_entries = num_entries
6075 def get_child(self, idx):
6076 return (self._primary[0][idx])
6077 List.get_child = get_child
6080 #** searching
6081 def find_exact(tree, search_tree):
6082 matches = []
6083 for node in tree.top_down():
6084 if node.eql(search_tree):
6085 matches.append(node)
6086 return matches
6088 def find_type(tree, search_class):
6089 matches = []
6090 for node in tree.top_down():
6091 if node.__class__ == search_class:
6092 matches.append(node)
6093 return matches
6095 def find_non_nested_matches(tree, pattern):
6096 # Find matches of pattern in the outer tree, ie., those *not*
6097 # found inside Map(), Function()
6098 pass
6100 def find_type_non_nested(tree, pattern):
6101 # Find matches of type in the outer tree, ie., those *not*
6102 # found inside Function()
6103 pass
6105 def find_all_matches(tree, pattern):
6106 # Find *all* matches of pattern in tree, including *all* subtrees
6107 # of tree.
6108 pass
6111 # A sample use may look like this:
6112 # seq_expr = object.seq_expr()
6113 # # Pattern from
6114 # # import reader
6115 # # reload(reader)
6116 # # reader.parse('!! def_name string = ! foo')
6117 # # and modified.
6118 # match_list = find_non_nested_matches(
6119 # seq_expr,
6120 # Set( MarkerTyped( String('def_name'), Symbol('string')),
6121 # Marker('foo')))
6122 # if len(match_list) == 0:
6123 # raise DataDagError, \
6124 # ("In the reference %s.%s, %s has no member %s\n" %
6125 # (first, second, first, second))
6126 # elif len(match_list) > 1:
6127 # raise DataDagError, \
6128 # (("In the reference %s.%s, " +
6129 # "%s has multiple bindings for %s\n") %
6130 # (first, second, first, second))
6131 # else:
6134 #** verify tree structure
6135 def verify_tree(node):
6136 for par, chld, idx in node.top_down_parent():
6137 if chld != None:
6138 if chld._parent != par._id:
6139 print "child (%s) has wrong parent (%s)"% chld, par
6142 #** parents
6143 class TreeWork:
6144 def __init__(self, storage):
6145 self._storage = storage
6147 def parent(self, tree):
6148 return self._storage.load( tree._parent )
6150 def all_parents(self, tree):
6151 while tree._parent:
6152 parent = self.parent(tree)
6153 if parent:
6154 yield parent
6155 tree = parent
6156 else:
6157 break
6158 TreeWork.all_parents = all_parents
6160 # Also useful: tree.top_down()
6161 # tw = TreeWork(storage)
6164 #** enclosing elements
6165 def find_first_parent(self, tree, type_t):
6166 # Find first parent of type `type_t`
6167 for parent in self.all_parents(tree):
6168 if isinstance(parent, type_t):
6169 return parent
6170 TreeWork.find_first_parent = find_first_parent
6172 def find_all_parents(self, tree, type_t):
6173 # Find all parents of type `type_t`
6174 for parent in self.all_parents(tree):
6175 if isinstance(parent, type_t):
6176 yield parent
6177 TreeWork.find_all_parents = find_all_parents
6179 def find_root(self, tree):
6180 parent = None
6181 for parent in self.all_parents(tree):
6182 pass
6183 return parent
6184 TreeWork.find_root = find_root
6188 #** calling context
6189 class ContextDispError(exceptions.Exception):
6190 def __init__(self, args=None):
6191 self.args = args
6193 class CallContext:
6195 CallContext(K, C, O, J)
6197 The Call K occurs in the clone C of the tree O;
6198 C was made by Call J.
6199 '''
6200 pass
6202 def __repr__(self):
6203 return "%s(%s, %s, %s, %s)" % (self.__class__.__name__,
6204 self.K,
6205 self.C,
6206 self.O,
6207 self.J)
6208 CallContext.__repr__ = __repr__
6211 #*** get lexical + 1 dynamic (old)
6212 def get_calling_context(self, node):
6213 # prototype in v.1.1.2.10 of test-calc.py
6215 'node' is an integer.
6217 For a given node,
6218 return a nested list of (clone_src_id, clone_id) tuples.
6219 Full structure:
6220 rv ::= [ ]
6221 |= [ path+ ]
6222 path ::= [ (clone_src_id, clone_id) <, path>* ]
6224 This list represents (initial lexical + 1 dynamic) execution path
6225 leading to terminal (actually _used_*) copies of 'node'.
6227 clone_id:
6228 the current clone of node
6230 clone_src_id:
6231 the maker of the clone
6233 * In
6234 def A:
6235 def B:
6239 the first clone of B (CB1) is made by A(), but is never executed.
6240 This clone's parent tree is a Function(). '
6242 The second B clone is made by B() and executed. Its parent is
6243 therefore a Program().
6246 # Rough outline:
6247 # starting tree
6248 # while 1:
6249 # forward one clone (K)
6250 # is K inside a
6251 # Function():
6252 # find first enclosing Program()
6253 # add to context (cloned_by attribute)
6254 # continue with K as starting tree
6255 # Program():
6256 # add to context (cloned_by attribute)
6257 # break
6259 # Data possibilities:
6260 # node: []
6261 # node: clone / path -> clone / path -> 0
6262 # node: (clone / path -> (clone / path -> 0))
6263 node = self._storage.load(node)
6265 geta = self._storage.get_attribute
6266 clone_l = geta(node._id, "interp_clone")
6267 if clone_l: # int list
6268 clone_paths_l = []
6269 for clone_id in clone_l:
6270 K = self._storage.load(clone_id)
6271 cloned_by = -1
6272 # Is K inside a --
6273 parent = self.find_first_parent(K, (Function, Program))
6274 # -- Function?
6275 if isinstance(parent, Function):
6276 # Find enclosing Program()
6277 enclosing_prog = self.find_first_parent(parent, Program)
6278 if enclosing_prog == None:
6279 raise ContextDispError, "Unexpected program structure."
6281 # Get the context (cloning source)
6282 cloned_by = (geta(enclosing_prog._id,
6283 "cloned_by"), clone_id)
6284 if cloned_by[0] == None:
6285 raise ContextDispError, "No clone source. "\
6286 "Internal error"
6287 cloned_by = [cloned_by] + self.get_calling_context(K._id)
6289 # -- Program()?
6290 elif isinstance(parent, Program):
6291 #load = self._storage.load
6292 #print "program:cloned_by::", \
6293 # load(geta(parent._id, "cloned_by")).calltree_str()
6294 #print "program:clone_of::", \
6295 # load(geta(parent._id, "clone_of"))\
6296 # ._binding_name.calltree_str()
6298 # Get the context (cloning source)
6299 cloned_by = [(geta(parent._id, "cloned_by"), clone_id)]
6300 if cloned_by[0][0] == None:
6301 raise ContextDispError, "No clone source. "\
6302 "Internal error"
6304 if cloned_by == -1:
6305 raise ContextDispError, "Unexpected program structure."
6306 clone_paths_l.append(cloned_by)
6307 return clone_paths_l
6308 else:
6309 return []
6310 TreeWork.get_calling_context = get_calling_context
6313 #*** get full dynamic
6314 def get_call_ctxt(self, node):
6315 # Get all dynamic call paths reaching 'node'; these are in inverse
6316 # execution order; starting from an astType's clones, find the
6317 # execution paths that created them.
6319 # Returns a list of all traversal paths:
6320 # CallContext list list
6321 # list follows the Call chain
6322 # list for all clones of node
6325 # This list could be merged to a single tree.
6327 def _loop(clone_id, first = 0):
6328 # loop clone_id
6329 K = self._storage.load(clone_id)
6331 # Is K inside a --
6333 K_par = self.find_first_parent(K, (Function, Program, Macro))
6334 # -- Function?
6335 if isinstance(K_par, (Function, Macro)):
6336 # K is an inert clone (no value, not executable).
6337 if first:
6338 return [None]
6339 else:
6340 # There should be no intert nodes along an execution
6341 # path.
6342 raise ContextDispError, "Unexpected program structure."
6344 # -- Program()?
6345 elif isinstance(K_par, Program):
6346 # Get the context (cloning source).
6347 cc = CallContext()
6348 cc.K = K._id
6349 cc.C = K_par._id
6350 cc.O = geta(cc.C, "clone_of")
6351 cc.J = geta(cc.C, "cloned_by")
6352 if cc.J == None:
6353 return [cc]
6354 return [cc] + _loop(cc.J)
6355 node = self._storage.load(node)
6356 geta = self._storage.get_attribute
6357 clone_l = geta(node._id, "interp_clone")
6359 if clone_l :
6360 clone_paths_l = [_loop(clone_id, first = 1)[::-1]
6361 for clone_id in clone_l]
6362 return filter(None, clone_paths_l)
6363 else:
6364 return []
6365 pass
6366 TreeWork.get_call_ctxt = get_call_ctxt
6370 #*** prune to selection
6371 def prune_cctxt(self, cctxt_chains, must_include):
6373 # Find all chains containing the ids in must_include
6374 # must_include ::= (call id) list
6375 # [ id AND id ...]
6377 # The full case (not dealt with):
6378 # must_include ::= (call id) list list
6379 # where the meaning is [ [id AND id ...] OR [id AND id ...] ]
6380 # The ids must use the call chains' order.
6382 # This is a full tree pattern match: multiple input lists,
6383 # multiple match targets.
6385 # The full calling context list is potentially huge; forming it
6386 # just to prune it later is really ineffient...
6388 # The AND lists could differ only in their last entries, requiring
6389 # either
6390 # a. an advanced pruning algorithm
6391 # b. brute force
6393 # The case of brute force is likely sufficient for interactive
6394 # use. This simplifies must_include to
6395 # must_include ::= (call id) list
6396 # [ id AND id ...]
6398 if len(must_include) == 0:
6399 return copy(cctxt_chains)
6400 else:
6401 _kept_chains = []
6402 for call_chain in cctxt_chains:
6403 # Check if ALL entries of must_include are in ANY calling
6404 # context.
6405 found = map( lambda _: 0, must_include )
6406 for cctxt in call_chain:
6407 # Horribly inefficient...
6408 if cctxt.K in must_include:
6409 found[must_include.index(cctxt.K)] = 1
6410 if reduce(lambda x,y: x and y, found):
6411 # All there.
6412 _kept_chains.append(call_chain)
6414 return _kept_chains
6415 TreeWork.prune_cctxt = prune_cctxt
6419 #*** Get leaves and values.
6420 def cctxt_leaves(self, cctxt_chains):
6421 stg = self._storage.get_attribute
6422 values = []
6423 for chain in cctxt_chains:
6424 cid = (chain[-1].K)
6425 values.append( (cid, stg(cid, 'interp_result')) )
6426 return values
6427 TreeWork.cctxt_leaves = cctxt_leaves
6431 #*** displaying
6432 # Strings that taken together provide a meaningful display of the call
6433 # path.
6434 def calltree_str(self):
6435 return str(self._primary[0])
6436 Immediate.calltree_str = calltree_str
6438 def calltree_str(self):
6439 return str(self.__class__)
6440 Nested.calltree_str = calltree_str
6442 def calltree_str(self):
6443 return str(self.__class__)
6444 aList.calltree_str = calltree_str
6446 def calltree_str(self):
6447 operator = self.deref(0)
6448 if isinstance(operator, (Symbol, Member)):
6449 return operator.calltree_str()
6450 else:
6451 raise ContextDispError, "Invalid context: " + str(operator)
6452 Call.calltree_str = calltree_str
6454 def calltree_str(self):
6455 # For a.b, both a and b are assumed Symbols().
6456 return "%s.%s" % (self.deref(0).calltree_str(),
6457 self.deref(1).calltree_str())
6458 Member.calltree_str = calltree_str
6462 #** matching
6463 class MatchFailure(Exception): pass
6465 class Matcher:
6466 def __init__(self, matches = None):
6467 self._matches = matches or {} # (string -> astType) dict
6468 self._matches_l = utils.ListDict() # (string -> astType list) dict
6470 def _match_(self, tree, pattern):
6472 Traverse tree and pattern, matching nodes. The matcher nodes
6473 (! name) match any node, and what they match is bound to `name' in
6474 the Matcher instance.
6476 OLD:
6477 e.g.:
6478 def_env = Env(1, None, None)
6479 storage = RamMem('root_memory', 0)
6481 ma = Matcher()
6482 tree = reader.parse(' hello [short ; list] ')
6483 tree.setup(None, def_env, storage)
6484 patt = reader.parse(' hello [!aha ; list] ')
6485 patt.setup(None, def_env, storage)
6487 res = ma.match(tree, patt)
6488 print ma._matches
6491 # Trees are assumed to have interface functions
6492 # __len__, __getitem__, and __class__
6493 # Thus, they are compatible with Python arrays
6495 if pattern.__class__ == tree.__class__: # identical head
6496 # Test remaining tree
6497 nc = pattern.__len__()
6498 if nc != tree.__len__():
6499 raise MatchFailure
6500 # special case for childless types -- Immediate()s
6501 if nc == 0:
6502 # Immediate() match?
6503 if pattern.eql(tree): return True
6504 else: raise MatchFailure
6505 # Compare ALL children.
6506 for c in range(0,nc):
6507 self._match_(tree[c], pattern[c])
6508 return True
6510 elif pattern.__class__ == Marker:
6511 # Take subtree unconditionally
6512 self._matches[pattern.name()] = tree
6513 self._matches_l.push(pattern.name(), tree)
6514 return True
6516 elif pattern.__class__ == MarkerTyped:
6517 # Take subtree if it is of correct type.
6518 # !! name expr -> MarkerTyped(name, expr)
6519 expr = pattern.expr()
6520 if expr.__class__ == tree.__class__:
6521 self._matches[pattern.name()] = tree
6522 self._matches_l.push(pattern.name(), tree)
6523 return True
6524 else: raise MatchFailure
6526 else: # no match
6527 raise MatchFailure
6528 # ast.Matcher.match = match
6529 Matcher._match_ = _match_
6531 def match(self, tree, pattern):
6533 Traverse tree and pattern, matching nodes. The matcher nodes
6534 (! name) match any node, and what they match is bound to `name' in
6535 the Matcher instance.
6537 try:
6538 self._match_(tree, pattern)
6539 return self
6540 except MatchFailure:
6541 self._matches = {}
6542 self._matches_l = utils.ListDict()
6543 return False
6544 Matcher.match = match
6546 def match_exp_str(self, tree, pattern_str):
6548 Match tree against a single expression given in pattern_str.
6549 For a sequence, uses the first expression.
6551 from l3lang import reader
6552 full = reader.parse(pattern_str)
6553 patt = full._primary[0][0]
6554 return self.match(tree, patt)
6555 Matcher.match_exp_str = match_exp_str
6557 def get(self, name):
6558 return self._matches.get(name)
6559 Matcher.get = get
6561 def get_all(self, name):
6562 '''Return the list of matches for `name`.
6564 return self._matches_l.get(name)
6565 Matcher.get_all = get_all
6567 def __setitem__(self, name, val):
6568 self._matches[name] = val
6569 self._matches_l.push(name, val)
6570 Matcher.__setitem__ = __setitem__
6572 def __getitem__(self, name):
6573 return self.get(name)
6574 Matcher.__getitem__ = __getitem__
6576 # Interferes with class names, and caused the rather cryptic
6577 # TypeError: 'NoneType' object is not callable
6578 # def __getattr__(self, name):
6579 # return self.get(name)
6580 # Matcher.__getattr__ = __getattr__
6582 def construct(self, tree):
6583 # HERE. add.
6584 # Construct a new tree from a string and using tree elements stored in
6585 # the Matcher instance.
6586 # new_tree = ma.construct("{ |a,b| !rest }")
6587 pass
6588 Matcher.construct = construct
6590 #* Outline construction
6592 #** set outline edges to viewList-containing tree
6593 # set_outl_edges
6594 # clears existing state and sets new outline edges; can
6595 # be used after any tree updates or copies.
6596 # Outline edges are NOT needed for interpretation.
6597 def set_outl_edges(self, w_, parent_outline):
6598 # Reset child outlines.
6599 self._outl_children = vaList([]).setup_valist(w_, self)
6601 # Link to/from outline parent.
6602 if parent_outline is None:
6603 self._outl_parent = None
6604 else:
6605 self._outl_parent = parent_outline
6606 parent_outline._outl_children.append_child(weakref.proxy(self))
6608 # Form edges to/from children.
6609 self._primary[0].set_outl_edges(w_, weakref.proxy(self))
6611 return self
6612 cls_viewList.set_outl_edges = set_outl_edges
6614 def set_outl_edges(self, w_, parent_outline):
6615 for child in self._primary:
6616 child.set_outl_edges(w_, parent_outline)
6617 return self
6618 Nested.set_outl_edges = set_outl_edges
6620 def set_outl_edges(self, w_, parent_outline):
6621 # Reset child outlines.
6622 self._outl_children = vaList([]).setup_valist(w_, self)
6624 # Link to/from outline parent.
6625 if parent_outline is None:
6626 self._outl_parent = None
6627 else:
6628 self._outl_parent = parent_outline
6629 parent_outline._outl_children.append_child(weakref.proxy(self))
6631 # Form edges to/from children.
6632 for child in self._primary:
6633 child.set_outl_edges(w_, weakref.proxy(self))
6635 return self
6636 Program.set_outl_edges = set_outl_edges
6639 def set_outl_edges(self, w_, parent_outline):
6640 for child in self._primary[0]:
6641 child.set_outl_edges(w_, parent_outline)
6642 return self
6643 aList.set_outl_edges = set_outl_edges
6645 def set_outl_edges(self, w_, parent_outline):
6646 return self
6647 Immediate.set_outl_edges = set_outl_edges
6648 aNone.set_outl_edges = set_outl_edges
6649 Native.set_outl_edges = set_outl_edges
6652 #** util
6653 def set_outline(self, outline_type):
6654 assert outline_type in ['nested', 'subtree', 'flat']
6655 self._outl_type = outline_type
6656 return self
6657 cls_viewList.set_outline = set_outline
6658 Program.set_outline = set_outline
6660 def get_outline_type(self):
6661 return self._outl_type
6662 cls_viewList.get_outline_type = get_outline_type
6663 Program.get_outline_type = get_outline_type
6665 def setup_alist(self, w_, parallel_nd):
6666 # Return an empty alist, .setup() using the environment of
6667 # `parallel_nd`.
6668 storage = w_.state_.storage
6669 parent = empty_parent()
6670 def_env = storage.get_attribute(parallel_nd._id, "stored_from_env")
6671 # Form the raw list.
6672 rv, _ = aList([]).setup(parent, def_env, storage)
6673 return rv
6674 aList.setup_alist = setup_alist
6677 #** calculate outline heading index
6678 def heading_index(self):
6679 # Return (level, index) tuple, both starting from 0;
6680 # `level` is the depth in the outline,
6681 # `index` the position at that `level`.
6683 # Level.
6684 if self._outl_parent:
6685 par_l, _ = self._outl_parent.heading_index()
6686 else:
6687 par_l = -1
6688 level = par_l + 1
6690 # index.
6691 if self._outl_parent:
6692 index = self._outl_parent.outl_find_child(self)
6693 else:
6694 index = 0
6695 return (level, index)
6696 cls_viewList.heading_index = heading_index
6697 Program.heading_index = heading_index
6699 def outl_find_child(self, chld):
6700 tmp = weakref.proxy(chld)
6701 return self._outl_children.index(tmp)
6702 cls_viewList.outl_find_child = outl_find_child
6703 Program.outl_find_child = outl_find_child
6706 #** iterate top-down
6707 def outl_iter(self, level = 0):
6708 'Iterate top-down, return (level, node)'
6709 yield (level, self)
6710 for sub in self._outl_children:
6711 for nd in sub.outl_iter(level = level + 1):
6712 yield nd
6713 cls_viewList.outl_iter = outl_iter
6714 Program.outl_iter = outl_iter
6717 #** hide below 'level'
6718 def outl_flat_display(self, level):
6719 for (lev, nd) in self.outl_iter():
6720 if lev >= level:
6721 nd.set_outline('flat')
6722 cls_viewList.outl_flat_display = outl_flat_display
6723 Program.outl_flat_display = outl_flat_display
6726 #** copies
6727 def __deepcopy__(self, memo):
6728 # A copy must set its own outline cross-references
6729 # (_outl_children) when needed, but the desired outline type
6730 # (_outl_type) can be kept.
6731 op = self._outl_parent
6732 oc = self._outl_children
6733 self._outl_parent = None
6734 self._outl_children = None
6736 rv = Nested.__deepcopy__(self, memo)
6737 # The w_ is not available yet.
6738 # rv.set_outl_edges(w_, parent_outline)
6739 self._outl_parent = op
6740 self._outl_children = oc
6741 return rv
6742 cls_viewList.__deepcopy__ = __deepcopy__
6743 Program.__deepcopy__ = __deepcopy__
6746 #** persistence
6747 # The only safe persistent format is the ususal tree (no outline).
6748 def __getstate__(self):
6749 rv = {}
6750 rv.update(self.__dict__)
6751 rv['_outl_parent'] = None
6752 rv['_outl_children'] = None
6753 rv['_pre_interp_hook'] = None
6754 return rv
6755 cls_viewList.__getstate__ = __getstate__
6756 Program.__getstate__ = __getstate__
6758 def __setstate__(self, stuff):
6759 self.__dict__.update(stuff)
6760 cls_viewList.__setstate__ = __setstate__
6761 Program.__setstate__ = __setstate__
6764 # Apparently (python 2.4), list pickling is special. A python.list
6765 # method
6766 # <built-in method __reduce_ex__ of vaList object at 0xb7da60cc>
6767 # is used to get the tuple
6768 # (<function __newobj__ at 0xb7fb9e9c>,
6769 # (<class 'l3lang.ast.vaList'>,),
6770 # [],
6771 # <listiterator object at 0xb7cf9aec>,
6772 # None)
6773 # and the listiterator is actually used to get the contents.
6775 # See http://www.python.org/dev/peps/pep-0307
6777 # Thus, we must preempt this by providing __reduce_ex__
6778 def valist_restore(*ignore):
6779 return vaList([])
6781 def __reduce_ex__(self, protocol):
6782 return (valist_restore, ())
6783 vaList.__reduce_ex__ = __reduce_ex__
6787 #* Python callback wrapper
6788 class CallBackError:
6789 pass
6791 class CallableFunction:
6792 pass
6794 def __init__(self, block, env, storage, block_invocation, arg_index):
6795 # self._env = env
6796 self._block = block
6797 self._storage = storage
6798 self._block_invocation = block_invocation
6800 # Incremental evaluation data
6801 # arg_index is the position of self in another call,
6802 # E.g., for f(a, self, b), arg_index is 1
6803 self._arg_index = arg_index
6804 self._call_count = 0
6805 # See also CallableFunction.__call__
6806 storage.ie_.set_timestamp(
6807 (block_invocation._id, self._call_count, arg_index),
6808 # ### which timestamp to use?
6809 storage.ie_.get_timestamp(block_invocation._id))
6810 CallableFunction.__init__ = __init__
6812 def callable_interpret_prep(call_block, args):
6813 # Prepare the block, evaluate arguments, and provide
6814 # environments.
6815 # See call_function_prep
6817 block = call_block._block
6818 storage = call_block._storage
6819 block_inv = call_block._block_invocation
6820 ccount = call_block._call_count
6821 arg_index = call_block._arg_index
6823 # Incremental evaluation check.
6824 if storage.ie_.has_clone( (block_inv._id, ccount, arg_index) ):
6825 cc = storage.ie_.clone_table()
6826 program = cc[ (block_inv._id, ccount, arg_index) ]
6827 eval_env, arg_env = cc[( (block_inv._id, ccount, arg_index) , 'envs')]
6829 else:
6830 if block.nargs() != len(args):
6831 raise CallBackError, "Argument count mismatch: " + \
6832 str(block) + str(args)
6834 # ---- Turn block into executable.
6835 newblock = block.block_copy(storage)
6836 program = Program(newblock.raw_seq_expr())
6838 # ---- Set up argument environment.
6839 if block._binding_name != None:
6840 arg_env = block._def_env.new_child(
6841 program, name = block._binding_name.py_string())
6842 else:
6843 arg_env = block._def_env.new_child(program, name = "run.arg")
6845 # ---- Get positional block arguments' names.
6846 ma = Matcher()
6847 arg_names = []
6848 for ba in block.block_args():
6849 # if not ma.match_exp_str(ba, '!! name symbol'):
6850 if not ma.match(ba, MarkerTyped(String('name'), Symbol('symbol'))):
6851 raise InterpreterError, "Invalid argument type: " + str(ba)
6852 arg_names.append(ma._matches['name'])
6854 # ---- Bind block argument names to actual arguments.
6855 ma = Matcher()
6856 position_index = 0
6857 for ba in args:
6858 # To account for repeated external calls, use the call_count as id
6859 # for the pointer.
6860 arg_env.bind_ptr(arg_names[position_index].as_index(),
6862 ccount)
6863 # Incremental evaluation.
6864 arg_env.bind_time_stamp_ptr(arg_names[position_index].as_index(),
6865 ie_setup_time,
6866 ccount)
6867 position_index += 1
6869 # ---- Set up evaluation environment.
6870 eval_env = arg_env.new_child(program, name = "run.blck")
6872 # ---- Finish program.
6873 program.setup(empty_parent(), eval_env, storage)
6875 #---------------- later interaction
6876 cross_reference_trees(storage, block, newblock)
6877 #----------------
6879 # Incremental evaluation data.
6880 cc = storage.ie_.clone_table()
6881 cc[ (block_inv._id, ccount, arg_index) ] = program
6882 cc[( (block_inv._id, ccount, arg_index) , 'envs')] = eval_env, arg_env
6883 return program, eval_env, arg_env
6886 def callable_call(self, *args):
6887 # incremental
6888 # Also see call_real_interpret
6889 # if isinstance(block, Function):
6890 # section
6891 block = self._block
6892 storage = self._storage
6893 ccount = self._call_count
6894 arg_index = self._arg_index
6895 block_inv = self._block_invocation
6897 program, eval_env, arg_env = callable_interpret_prep(self, args)
6899 # Note: CallableFunction has no _id, but corresponds to
6900 # Call. Here, use the Program()'s _id
6902 # ----------- Data for this block
6903 # Valid with tail call or without, so this MUST PRECEDE the
6904 # call to program.interpret(), below.
6906 storage.push_attributes(block._id, "interp_clone", program._id)
6907 storage.set_attributes(program._id,
6908 "interp_program", program, "interp_env", arg_env)
6909 # lexical information
6910 storage.set_attributes(program._id,
6911 # lexical information
6912 "clone_of", block._id,
6913 "interp_env", arg_env,
6914 # dynamic information
6915 "cloned_by", self._block_invocation._id
6919 # ---- Evaluate
6920 # Affects: Program.interpret, Call.interpret(, storage)
6921 def finish(rv, ie_status):
6922 # Incremental evaluation check.
6923 if storage.ie_.tree_is_older( (block_inv._id, ccount, arg_index),
6924 ie_status):
6925 # Interpretation values.
6926 storage.id2tree(rv, program)
6927 storage.set_attributes(program._id, "interp_result", rv)
6929 # Incremental evaluation data.
6930 storage.ie_.touch( (block_inv._id, ccount, arg_index) )
6931 storage.ie_.set_timestamp(((block_inv._id, ccount, arg_index),
6932 'value'),
6933 ie_status)
6934 return rv, ie_status
6936 else:
6937 return storage.get_attribute(program._id, "interp_result"), \
6938 storage.ie_.get_timestamp((block_inv._id, ccount,
6939 arg_index))
6940 try:
6941 # Interpretation
6942 rv, ie_status = program.interpret(eval_env, storage)
6944 except Interpret_tail_call, contin:
6945 new_tree, _env, _prog_l = contin.args
6946 _prog_l.append(finish)
6947 raise
6948 except Interpret_return, e:
6949 rv, ie_status = e.args
6951 return finish(rv, ie_status)
6952 # -----------
6955 def __call__(self, *args):
6956 # Also see Call.interpret
6957 try:
6958 try:
6959 rv, ie_status = callable_call(self, *args)
6960 finally:
6961 # Prepare for possible next call; similar to __init__ for
6962 # the first call.
6964 # Incremental evaluation data.
6965 self._call_count += 1
6966 self._storage.ie_.set_timestamp(
6967 ( self._block_invocation._id,
6968 self._call_count,
6969 self._arg_index),
6970 # ?? use previous call's timestamp? '
6971 self._storage.ie_.get_timestamp(self._block_invocation._id))
6973 except Interpret_tail_call, contin:
6974 tree, env, _prog_l = contin.args
6975 rv, ie_status = Call.interpret(
6976 tree, env, self._storage, tail_finishing_progs = _prog_l)
6978 return rv
6979 CallableFunction.__call__ = __call__
6983 #* I/O
6984 class File_io(astType):
6985 pass
6987 #* Memory
6988 class Unpickleable:
6989 pass
6991 def __repr__(self):
6992 return "%s(%s,%r)" % (self.__class__.__name__,
6993 self._problem_id,
6994 self._problem_str)
6995 Unpickleable.__repr__ = __repr__
6998 def __init__(self, problem_id, problem_str):
7000 Unpickleable indicates an unpickleable value.
7001 problem_id: the value's generating expression
7002 problem_str: the value's string representation
7004 self._problem_id = problem_id
7005 self._problem_str = problem_str
7006 Unpickleable.__init__ = __init__
7009 #* Memory
7010 class Memory:
7011 pass
7013 class RamMem(Memory):
7014 pass
7016 def __init__(self, unique_prefix, starting_index, initial_time = 1):
7017 # Memory is split into temporary and persistent, but lookup is
7018 # done identically.
7019 # Note:
7020 # most types contain a reference to their Memory(), so
7021 # CONTROLLED PICKLING OF Memory() TYPES IS REQUIRED -- via
7022 # __getstate__ etc.
7023 self._store = {} # persistent
7024 self._store_memory = {} # temporary
7025 self._counter = starting_index
7026 self._unique_prefix = unique_prefix
7027 ## self._main_programs = [] # type [program * ], persistent
7029 # Two-level storage for .interpret() use:
7030 # id -> key1 -> value1
7031 # -> value2
7032 # ...
7033 # -> key2 -> value1
7034 # -> value2
7035 # (key1 -> ( key2 -> value) dict) dict
7036 self._attr_tables = {}
7037 self._id2tree = {}
7039 # Incremental evaluation.
7040 self.ie_ = IncEval(initial_time = initial_time)
7042 # Pickle testing cache.
7043 self._pickle_tested = {} # id() -> status dict.
7044 # status ::= None | "pickles"
7045 # | "no_pickle"
7047 RamMem.__init__ = __init__
7050 def display_text(self, out = sys.stdout, indent = 0, width = 80):
7051 # out is a stdout - compatible stream
7052 # NOTE:
7053 # Full, pretty-printed output is nice, but very tedious w/
7054 # "manual" indentation. It also has no interaction possibilities.
7055 # Better to produce structurally marked-up output in the first
7056 # place.
7057 # So cheat here.
7058 import pprint
7059 pp = pprint.PrettyPrinter(stream = out, indent = indent, width = width)
7060 pp.pprint(self.__dict__)
7061 RamMem.display_text = display_text
7063 # def add_program(self, prog):
7064 # self._main_programs.append(prog)
7065 # RamMem.add_program = add_program
7068 #** Persistence
7069 # pickle/unpickle methods.
7070 def __getstate__(self):
7071 return [self._store,
7073 self._counter,
7074 self._unique_prefix,
7075 self._attr_tables,
7076 self._id2tree,
7077 self.ie_,
7078 self._pickle_tested,
7080 RamMem.__getstate__ = __getstate__
7081 RamMem.__real_getstate__ = __getstate__
7084 def __null_getstate__(self):
7085 ''' Suppress __getstate__ during pickle testing.
7086 This is faster, and also avoids re-reporting errors for previous
7087 values.
7089 return "RamMem-null-state"
7090 RamMem.__null_getstate__ = __null_getstate__
7093 def __setstate__(self, stuff):
7094 (self._store,
7095 self._store_memory,
7096 self._counter,
7097 self._unique_prefix,
7098 self._attr_tables,
7099 self._id2tree,
7100 self.ie_,
7101 self._pickle_tested
7102 ) = stuff
7103 RamMem.__setstate__ = __setstate__
7106 #** Value access
7107 def store(self, val, env, persistent = True):
7108 # pathname = Path(self._unique_prefix,
7109 # env.full_path(), self._counter)
7110 self.set_attributes(self._counter, "stored_from_env", env)
7111 if persistent:
7112 self._store[ self._counter ] = val
7113 else:
7114 self._store_memory[ self._counter ] = val
7115 self._counter += 1
7116 return (self._counter - 1)
7117 RamMem.store = store
7119 def load(self, key):
7120 assert isinstance(key, (IntType, LongType))
7121 return self._store.get(key, self._store_memory.get(key))
7122 RamMem.load = load
7124 def remove(self, key):
7125 assert isinstance(key, (IntType, LongType))
7126 foo = self._store.get(key, self._store_memory.get(key))
7127 if self._store.has_key(key):
7128 del self._store[key]
7129 if self._store_memory.has_key(key):
7130 del self._store[key]
7131 return foo
7132 RamMem.remove = remove
7136 #** id handling
7137 def new_id(self):
7138 self._counter += 1
7139 return (self._counter - 1)
7140 RamMem.new_id = new_id
7142 def id2tree(self, value, tree):
7143 # the mapping from id(foo) -> <tree that created foo>
7144 self._id2tree[ id(value) ] = tree
7145 RamMem.id2tree = id2tree
7147 def generator_of(self, id):
7148 # Return the expression that produced `id`
7149 # SUBTLE PYTHON PROBLEM
7150 # Mapping id(val) may be wrong when val is a string.
7151 # Python strings may be interned, so multiple distinct strings
7152 # can have the same id, invalidating the id->tree mapping.
7153 return self._id2tree.get(id)
7154 RamMem.generator_of = generator_of
7157 #** misc.
7158 def get_type(self, key):
7159 # This can be optimized for real disk access.
7160 return self.deref(self, key).__class__
7161 RamMem.get_type = get_type
7163 def _set_counter(self, val):
7165 Change the id counter to val. This will break almost any L3 code.
7166 Useful only for experimentation.
7168 self._counter = val
7169 RamMem._set_counter = _set_counter
7172 #** interpret() attributes
7173 def set_attributes(self, nid, *args):
7174 # typical call:
7175 # self._storage.set_attributes(self._id,
7176 # "interp_result", rv,
7177 # "interp_env", eval_env)
7178 if (len(args) % 2) != 0:
7179 raise InterpreterError, "expected key/value pairs, got " + \
7180 str(args)
7181 kvpairs = [(args[i], args[i+1]) for i in range(0, len(args),2)]
7182 dct = self._attr_tables.get(nid)
7183 if not dct:
7184 dct = {}
7185 self._attr_tables[nid] = dct
7186 if not isinstance(dct, DictType):
7187 raise InterpreterError, "storage key is already bound to non-dict."
7188 for kk, vv in kvpairs:
7190 # Check pickleability.
7191 # For values that don't pickle,
7192 # - issue a warning
7193 # - substitute Unpickleable
7195 if glbl.L3_PICKLE_IMMEDIATELY and (kk == 'interp_result'):
7196 # Tested already?
7197 stat = self._pickle_tested.get(id(vv))
7198 if stat == 'pickles':
7199 dct[kk] = vv
7200 elif stat == 'no_pickle':
7201 dct[kk] = Unpickleable(nid,
7202 self.load(nid).source_string(),
7204 dct[kk] = vv # Allow rest of program to run.
7205 else:
7206 ## import pickle
7207 import cPickle as pickle
7208 try:
7209 RamMem.__getstate__ = RamMem.__null_getstate__
7210 Env.__getstate__ = Env.__null_getstate__
7211 try:
7212 ps = pickle.dumps(vv, protocol=2)
7213 if glbl.L3_PICKLE_SIZE:
7214 glbl.logger.info("%d, %s: %d bytes" %
7215 (nid, kk, len(ps)))
7216 del ps
7217 except Exception, e:
7218 glbl.logger.warn(
7219 'Value from interpretation of %d does not pickle.'
7220 '\n%d: %r'
7221 % (nid, nid, self.load(nid).source_string()))
7222 glbl.logger.warn('Message was: %s' % e)
7223 dct[kk] = Unpickleable(nid,
7224 self.load(nid).source_string(),
7226 dct[kk] = vv # Allow rest of program to run.
7227 self._pickle_tested[id(vv)] = 'no_pickle'
7229 else:
7230 dct[kk] = vv
7231 self._pickle_tested[id(vv)] = 'pickles'
7232 finally:
7233 RamMem.__getstate__ = RamMem.__real_getstate__
7234 Env.__getstate__ = Env.__real_getstate__
7235 else:
7236 dct[kk] = vv
7237 RamMem.set_attributes = set_attributes
7239 def get_attribute_table(self, id):
7240 dct = self._attr_tables.get(id)
7241 if not dct:
7242 dct = {}
7243 self._attr_tables[id] = dct
7244 return dct
7245 RamMem.get_attribute_table = get_attribute_table
7247 def get_attributes(self, id, key_list):
7248 # typical call:
7249 # self._storage.get_attributes(self._id, ['foo'])
7250 assert type(key_list) == ListType
7251 dct = self._attr_tables.get(id)
7252 if not dct:
7253 dct = {}
7254 self._attr_tables[id] = dct
7255 return [dct.get(key)
7256 for key in key_list ]
7257 RamMem.get_attributes = get_attributes
7259 def get_attribute(self, id, key):
7260 dct = self._attr_tables.get(id)
7261 if not dct:
7262 dct = {}
7263 self._attr_tables[id] = dct
7264 return dct.get(key)
7265 RamMem.get_attribute = get_attribute
7267 def get_attribute_names(self, id):
7268 # self._storage.get_attribute_names(self._id)
7269 dct = self._attr_tables.get(id)
7270 if not dct:
7271 dct = {}
7272 self._attr_tables[id] = dct
7273 return dct.keys()
7274 RamMem.get_attribute_names = get_attribute_names
7276 def push_attributes(self, id, key, val):
7277 # Append val to value list of
7278 # id -> key -> value list
7279 # from self._attr_tables
7280 curr = self.get_attribute(id, key) # the entry, None or [val1,...]
7281 if curr:
7282 curr.append(val)
7283 else:
7284 self.set_attributes(id, key, [val])
7285 RamMem.push_attributes = push_attributes
7287 def get_clones(self, id):
7288 # Return a list of trees.
7289 return [self.load(tree_id)
7290 for tree_id in self.get_attribute(id, "interp_clone")]
7291 RamMem.get_clones = get_clones
7293 def get_leaf_clones(self, orig_id, leaves = []):
7295 # Find the outermost clones of orig_id.
7296 # Returns a list of clones for single functions/loops.
7297 # For functions/loops lexically nested N deep, returns list of
7298 # lists, nested N deep.
7300 clones = lambda oid : self.get_attribute(oid, "interp_clone")
7301 maybe = clones(orig_id)
7302 if maybe != None:
7303 if clones(maybe[0]) != None:
7304 return [self.get_leaf_clones(new_id, maybe) for new_id in maybe]
7305 else:
7306 return maybe
7307 else:
7308 return leaves
7309 RamMem.get_leaf_clones = get_leaf_clones
7312 #* pds
7313 # These interfaces may not be needed...
7314 class PdsInterface:
7315 """ General PDS model interface.
7318 def __init__(self,dirname="ast-test", create = 0):
7319 raise InterfaceOnly
7321 def set(self, key, val, overwrite = 0):
7322 raise InterfaceOnly
7324 def get(self, key, type=None):
7325 raise InterfaceOnly
7327 def delete(self, key):
7328 raise InterfaceOnly
7330 def commit(self):
7331 raise InterfaceOnly
7333 def abort(self):
7334 raise InterfaceOnly
7336 class RamIO(PdsInterface):
7337 """ Simulated PDS interface in memory; fast and simple for
7338 experimentation.
7340 def __init__(self, dirname="ast-test", create = 0):
7341 self._dirname = dirname
7342 self._dir = {}
7344 def set(self, key, val, overwrite = 0):
7345 if self._dir.has_key(key):
7346 raise OverwriteError
7347 self._dir[key] = val
7349 def get(self, key, type=None):
7350 return self._dir[key]
7352 #* Evaluation environment
7353 class NoValue:
7354 '''Substitute for None inside class Env'''
7355 pass
7357 class Env:
7358 pass
7360 def __init__(self, id, definition_env, program, storage, name = "anonymous"):
7361 self._primary = (id, definition_env, program)
7362 if definition_env != None:
7363 self._def_env_id = definition_env._id
7364 else:
7365 self._def_env_id = None
7366 self._env_subid = id # local id
7367 self._program = program # _id may not be available yet...
7369 # The Env() storage is a ( name -> val ) map, but with an indirection to
7370 # get the "current" value:
7372 # (name, 'ptr') -> set_id
7373 # (name, set_id) -> val
7374 # (name, 'ie_status', set_id) -> status
7377 # self._bindings:
7378 # (name, 'ptr') -> set_id # current index
7379 # (name, set_id) -> val # values
7380 # (name, 'ie_status', set_id) -> time # time stamps
7383 # (name -> val)
7384 # (name, 'ie_status') -> timestamp
7385 # dict, accessed via Env.bind()
7386 self._bindings = {}
7388 self._bindings_memory = {}
7389 self._sub_env_id_count = 0
7390 self._children = [] # Env list, formed by new_child()
7392 # Directory linking support.
7393 self._dir_stack = [] # Working directory stack.
7394 self._dir_known = {} # files already encountered.
7396 # (name -> tree_id) dict, accessed via Env.bind_id()
7397 # tree_id is usually the Symbol's id in the Set(Symbol(), ...)
7398 # tree making the assignment.
7399 self._bindings_ids = {}
7402 self._id = storage.store(self, None) # global id
7403 self._storage = storage
7404 self._name = name
7405 Env.__init__ = __init__
7408 #** environment examination
7409 def all_bindings(self):
7410 """ Return all bindings in this environment in a new dictionary.
7412 bb = {}
7413 bb.update(self._bindings)
7414 bb.update(self._bindings_memory)
7415 return bb
7416 Env.all_bindings = all_bindings
7418 def all_lexical_bindings(self, dct):
7419 """ Put all bindings in this environment and all lexically
7420 enclosing ones into the given dictionary.
7422 Additions are made inwards from the outermost environment to
7423 preserve binding order.
7425 if self._def_env_id != None:
7426 self._storage.load(self._def_env_id).all_lexical_bindings(dct)
7428 dct.update(self._bindings)
7429 dct.update(self._bindings_memory)
7430 return dct
7431 Env.all_lexical_bindings = all_lexical_bindings
7433 def children(self):
7434 return self._children
7435 Env.children = children
7437 def dump(self,
7438 show_prog = 1,
7439 show_vals = 1,
7441 rv = [('_name', self._name),
7442 ('_id', self._id),
7445 if self._program and show_prog:
7446 rv.append( ('_program._id', self._program._id) )
7448 if show_vals:
7449 rv.append([
7450 ('_bindings', self._bindings),
7451 ('_bindings_ids', self._bindings_ids),
7454 rv.append([ dump(child, show_prog, show_vals)
7455 for child in self._children ])
7456 return rv
7457 Env.dump = dump
7459 def print_tree(self, indent = ""):
7461 # Print a nested view of this environment's entries, including only
7462 # - key
7463 # - Set() id
7465 # This Env.
7466 print indent, "%s-%d" % (self._name, self._id), '['
7468 # Local entries.
7469 indnt = indent + " "
7470 for ky in self.all_bindings().iterkeys():
7471 if len(ky) == 2:
7472 if isinstance(ky[1], IntType):
7473 # (name, set_id)? Show name, set_id.
7474 # Get fixed column for set_id
7475 tree_s = "%s %s" % (indnt, ky[0])
7476 print "%-40s %s" % (tree_s, ky[1])
7477 # Children.
7478 for child in self._children:
7479 child.print_tree(indent = indnt)
7481 print indent, ']'
7482 Env.print_tree = print_tree
7484 def print_tree_tex(self, indent = "", out = sys.stdout, first = 1,
7485 ignore_skel = 0):
7486 # FIXME: update for 'ptr' indirection!
7489 # Print a filtered view of this environment and its contents.
7490 # Filter: - values
7491 # - tuple keys
7493 print self._name.startswith("skel"), \
7494 ignore_skel, \
7495 ignore_skel and self._name.startswith("skel")
7497 if ignore_skel and self._name.startswith("skel"):
7498 print "ignored"
7499 return
7501 # This Env.
7502 if first:
7503 print >> out, indent, \
7504 r"""\begin{directory}[3in]{%s-%d}""" % (self._name, self._id)
7505 else:
7506 print >> out, indent, \
7507 r"""\file{\begin{directory}{%s-%d}""" % \
7508 (self._name, self._id)
7510 # Local entries.
7511 indnt = indent + " "
7512 for ky in self.all_bindings().iterkeys():
7513 if isinstance(ky, TupleType):
7514 continue
7515 print >> out, indnt, r"\file{%s}" % ky
7517 # Children.
7518 for child in self._children:
7519 child.print_tree_tex(indent = indnt, out = out, first = 0,
7520 ignore_skel = ignore_skel,
7523 if first:
7524 print >> out, indent, '\end{directory}'
7525 else:
7526 print >> out, indent, '\end{directory}}'
7527 Env.print_tree_tex = print_tree_tex
7529 def get_tree(self):
7530 # FIXME: update for 'ptr' indirection!
7533 # Return a FILTERED view of this environment and its contents.
7534 # Filtered out: - values (keys only)
7535 # - tuple keys
7537 # Local entries.
7538 subl = []
7539 for ky in self.all_bindings().iterkeys():
7540 if isinstance(ky, TupleType):
7541 continue
7542 subl.append(ky)
7544 # Children.
7545 for child in self._children:
7546 subl.append(child.get_tree())
7548 # Combine with this Env.
7549 return ("%s-%d" % (self._name, self._id), subl)
7550 Env.get_tree = get_tree
7553 def get_dynamic_subtrees(self):
7554 # FIXME: update for 'ptr' indirection!
7557 # Return a FILTERED view of this environment and its contents.
7558 # Filter: - values (keys only)
7559 # - tuple keys
7560 # - anonymous envs
7561 # - skel.arg
7562 # - skel.blck
7564 # Structure:
7565 # direc ::= ( dirname, content )
7566 # content ::= [ (name | direc)* ]
7568 # Note: - the first environment is always returned (even if anonymous)
7569 # - Program.directory() can convert this structure.
7571 subl = []
7573 # Children.
7574 for child in self._children:
7575 if child._name in ["skel.arg", "skel.blck", "anonymous"]:
7576 continue
7577 subl.append(child.get_dynamic_subtrees())
7579 # Local entries.
7580 for ky in self.all_bindings().iterkeys():
7581 if isinstance(ky, TupleType):
7582 continue
7583 subl.append(ky)
7585 # Combine with this Env.
7586 return ("%s-%d" % (self._name, self._id), subl)
7587 Env.get_dynamic_subtrees = get_dynamic_subtrees
7589 def all_bindings_recursive(self, level = 0):
7590 # FIXME: update for 'ptr' indirection!
7593 # Return (key, value, Env()) tuples for all bindings in this Env()
7594 # and its children.
7595 for key, val in self._bindings.iteritems():
7596 yield key, val, self, level
7598 # for key, val in self._bindings_ids.iteritems():
7599 # yield key, val, self, level
7601 for child in self._children:
7602 for substuff in child.all_bindings_recursive(level + 1):
7603 yield substuff
7604 Env.all_bindings_recursive = all_bindings_recursive
7606 def new_child(self, program, name = "anonymous"):
7607 child = Env(self.new_env_id(), self, program, self._storage,
7608 name = name)
7609 self._children.append(child)
7610 return child
7611 Env.new_child = new_child
7614 #** persistence
7615 def __getstate__(self):
7616 from copy import copy
7617 dct = copy(self.__dict__)
7618 del dct['_bindings_memory']
7619 return dct
7620 Env.__getstate__ = __getstate__
7621 Env.__real_getstate__ = __getstate__
7623 def __null_getstate__(self):
7624 ' Suppress __getstate__ during pickle testing. '
7625 return "Env-null-state"
7626 Env.__null_getstate__ = __null_getstate__
7629 def __setstate__(self, stuff):
7630 self.__dict__.update(stuff)
7631 self._bindings_memory = {}
7632 Env.__setstate__ = __setstate__
7634 # Give a new id for ENCLOSED environments (not self)
7635 def new_env_id(self):
7636 self._sub_env_id_count += 1
7637 return self._sub_env_id_count
7638 Env.new_env_id = new_env_id
7640 def __repr__(self):
7641 def short_repr(obj):
7642 try:
7643 ss = "[Program at 0x%x, id %d]" % (id(obj), obj._id)
7644 except AttributeError:
7645 ss = str(obj)
7646 if len(ss) > 30:
7647 ss = ss[0:10] + ' ... ' + ss[-10:-1]
7648 return ss
7649 return self.__class__.__name__ + \
7650 ( '(%d, %s, %s)' %
7651 (self._id, self._primary[1], short_repr(self._primary[2])))
7652 Env.__repr__ = __repr__
7654 def __str__(self):
7655 return self.__class__.__name__ + \
7656 ( '-%d-%s' % (self._id, self._name))
7657 Env.__str__ = __str__
7660 #** Interpretation support
7661 def _clr_wrn(self, name, val):
7662 if glbl.L3_TRACE_BIND:
7663 sys.stderr.write("trace: binding " + name + '\n')
7664 utils.callchain()
7666 def warn():
7668 stuff = self.ie_lookup_1(name)
7669 if stuff != None:
7670 if stuff[1] == ie_external_time:
7671 pass
7672 else:
7673 sys.stderr.write("warning: overriding definition of %(name)s:\n"
7674 "warning: original: %(stuff)s\n"
7675 "warning: new: %(val)s\n"
7676 % locals())
7677 sys.stderr.flush()
7679 if self._bindings.has_key(name):
7680 warn()
7681 del self._bindings[name]
7682 try:
7683 del self._bindings[ (name, 'ie_status') ]
7684 except KeyError:
7685 pass
7687 if self._bindings_memory.has_key(name):
7688 warn()
7689 del self._bindings_memory[name]
7690 try:
7691 del self._bindings[ (name, 'ie_status') ]
7692 except KeyError:
7693 pass
7694 Env._clr_wrn = _clr_wrn
7696 def bind(self, name, val):
7697 # Name
7698 # bind(name, val)
7699 # Description
7700 # Make VAL available under NAME in this Env.
7702 # This function should be used in conjunction with
7703 # bind_time_stamp; see Env.import_names
7705 assert name.__class__ == StringType # No subclasses allowed.
7706 # assert isinstance(name, StringType) # Allows subclasses...
7707 self._clr_wrn(name, val)
7708 self._bindings[name] = val
7709 Env.bind = bind
7711 def bind_id(self, name, id):
7712 assert name.__class__ == StringType # No subclasses allowed.
7713 # assert isinstance(name, StringType) # Allows subclasses...
7714 self._bindings_ids[name] = id
7715 Env.bind_id = bind_id
7717 def bind_mem_only(self, name, val):
7718 # # (probably) No longer needed.
7719 # # return self.bind(name, val)
7720 assert name.__class__ == StringType
7721 self._clr_wrn(name, val)
7722 self._bindings_memory[name] = val
7723 Env.bind_mem_only = bind_mem_only
7725 # HERE. the __hash__ and __cmp__ (p. 35) methods must work properly
7726 # for Immediate()s
7729 #** data flow
7730 def bind_df(self, name, val):
7731 # Make VAL available under NAME in this Env, as dataflow
7732 # information only.
7734 assert name.__class__ == StringType # No subclasses allowed.
7735 self._clr_wrn((name, "df"), val)
7736 self._bindings[(name, "df")] = val
7737 Env.bind_df = bind_df
7739 def bind_id_df(self, name, id):
7740 assert name.__class__ == StringType # No subclasses allowed.
7741 self._bindings_ids[(name, "df")] = id
7742 Env.bind_id_df = bind_id_df
7745 #** rebinding semantics (for Set())
7746 # Allows multiple bindings to a single name, with an index to current
7747 # binding.
7749 ptr_external_id = -111
7751 def bind_ptr(self, name, val, set_id):
7753 # Make `val` available under `name` in this Env,
7754 # - AND -
7755 # set the pointer for `name` to this `val`. Prior values are
7756 # kept, but this one is retrieved by default
7758 # This allows Set() and Symbol() to simulate imperative semantics.
7760 assert name.__class__ == StringType # No subclasses allowed.
7761 # Add the new value.
7762 self._clr_wrn( (name, set_id), val)
7763 self._bindings[ (name, set_id) ] = val
7764 self.set_ptr(name, set_id)
7765 Env.bind_ptr = bind_ptr
7767 def bind_mem_only_ptr(self, name, val, set_id):
7769 # In-memory only version of bind_ptr; `val` is not retained
7770 # accross sessions.
7772 assert name.__class__ == StringType # No subclasses allowed.
7773 # Add the new value.
7774 self._clr_wrn( (name, set_id), val)
7775 self._bindings_memory[ (name, set_id) ] = val
7776 self.set_ptr(name, set_id)
7777 Env.bind_mem_only_ptr = bind_mem_only_ptr
7779 def set_ptr(self, name, set_id):
7780 # Set the current pointer for `name` to `set_id`
7781 assert isinstance(set_id, (IntType, LongType)), "An l3 id must be integer."
7782 self._bindings[ (name, 'ptr') ] = set_id
7783 Env.set_ptr = set_ptr
7785 def lookup_ptr(self, name):
7787 Find the *current* `name` in self or any enclosing *defining* (not
7788 calling) environment.
7790 set_id = self.lookup( (name, 'ptr') )
7791 return self.lookup( (name, set_id) )
7792 Env.lookup_ptr = lookup_ptr
7794 def lookup_ptr_1(self, name):
7796 Find the *current* `name` in self or raise KeyError.
7798 set_id = self.lookup_1( (name, 'ptr') )
7799 return self.lookup_1( (name, set_id) )
7800 Env.lookup_ptr_1 = lookup_ptr_1
7802 def bind_time_stamp_ptr(self, name, time, set_id):
7803 'Set the time stamp for the `name` produced by Set with id `set_id`.'
7804 try:
7805 self.lookup_ptr_1(name)
7806 except KeyError:
7807 glbl.logger.error("Timestamping nonexistent name '" +
7808 name + "'\n")
7809 self._bindings[ (name, 'ie_status', set_id) ] = time
7810 Env.bind_time_stamp_ptr = bind_time_stamp_ptr
7812 def ie_lookup_ptr(self, name):
7814 Incremental evaluation lookup.
7815 Find CURRENT name in self or any enclosing *defining* (not
7816 calling) environment.
7817 Returns:
7818 None when no binding is found
7819 (value, timestamp) otherwise
7821 Export members starting with 'l3_'
7823 # Export members starting with l3_
7824 if name.startswith('l3_'):
7825 return getattr(self, name), ie_external_time
7827 # got pointer?
7828 if self._has_key( (name, 'ptr') ):
7829 set_id = self._get_val( (name, 'ptr') )
7830 # got value for pointer?
7831 if self._has_key( (name, set_id) ):
7832 status = self._bindings.get( (name, 'ie_status', set_id) )
7833 if status is None:
7834 status = ie_external_time # Force eval of parent
7835 return self._get_val((name, set_id)), status
7836 else:
7837 raise InterpreterError(
7838 "Found ptr to %s, but %s has no value -- internal error." %
7839 (name, name))
7840 else:
7841 if self._def_env_id is None:
7842 return None
7843 else:
7844 return self._storage.load(self._def_env_id).ie_lookup_ptr(name)
7845 Env.ie_lookup_ptr = ie_lookup_ptr
7847 def dict_ie_lookup_ptr(dct, name):
7848 set_id = dct.get( (name, 'ptr') )
7849 if set_id is not None:
7850 val = dct.get( (name, set_id) )
7851 assert (v is not None) # ptr w/o value -- internal error.
7852 status = dct.get( (name, 'ie_status', set_id) )
7853 if status is None:
7854 status = ie_external_time # Force eval of parent
7855 return v, status
7857 else:
7858 return (getattr(dct, name), ie_external_time)
7859 # Note: Using
7860 # return (dct.get(name), ie_external_time)
7861 # fails; member functions are ignored.
7865 #** lookup functions
7866 def _get_val(self, key):
7867 # (Internal) Lookup `key` in this Env only.
7868 if self._bindings.has_key( key ):
7869 return self._bindings.get( key )
7870 if self._bindings_memory.has_key( key ):
7871 return self._bindings_memory.get( key )
7872 raise KeyError("No value found for %s" % key)
7873 Env._get_val = _get_val
7876 def _has_key(self, key):
7877 # (Internal) Lookup `key` in this Env only.
7878 # Returns (gotvalue : Bool)
7879 # A value can be retrieved via _get_val()
7880 if self._bindings.has_key( key ):
7881 return True
7882 if self._bindings_memory.has_key( key ):
7883 return True
7884 return False
7885 Env._has_key = _has_key
7887 def lookup(self, name):
7889 Find name in self or any enclosing *defining* (not calling) environment.
7890 Raise KeyError if no binding was found.
7892 if self._has_key(name):
7893 return self._get_val(name)
7894 else:
7895 if self._def_env_id is None:
7896 raise KeyError("No value found for %s" % name)
7897 else:
7898 return self._storage.load(self._def_env_id).lookup(name)
7899 Env.lookup = lookup
7901 def lookup_status(self, name):
7903 Find name in self or any enclosing *defining* (not calling)
7904 environment.
7905 Return (status, binding).
7906 status indicates whether the binding was found.
7908 if self._has_key(name):
7909 return (1, self._get_val(name))
7910 else:
7911 if self._def_env_id is None:
7912 return (0, None)
7913 else:
7914 return self._storage.load(self._def_env_id).lookup_status(name)
7915 Env.lookup_status = lookup_status
7918 def lookup_symbol_id(self, name):
7919 v = self._bindings_ids.get(name)
7920 if v != None:
7921 return v
7922 else:
7923 if self._def_env_id is None:
7924 return None
7925 else:
7926 return self._storage.load(self._def_env_id).lookup_symbol_id(name)
7927 Env.lookup_symbol_id = lookup_symbol_id
7929 def lookup_1(self, name):
7931 Find name in self only.
7933 if self._has_key(name):
7934 return self._get_val(name)
7935 raise KeyError("No value found for %s" % name)
7936 Env.lookup_1 = lookup_1
7938 def full_path(self, stack=[]):
7939 up = self._def_env_id
7940 if up != None:
7941 # [].append() returns None... Cute.
7942 # return up.full_path() + [(self._env_subid)]
7943 return self._storage.load(up).full_path() + [(self._env_subid)]
7944 else:
7945 return [self._env_subid]
7946 Env.full_path = full_path
7949 #** Incremental evaluation
7950 # Incremental evaluation.
7951 def bind_time_stamp(self, name, time):
7952 # The (name, 'ie_status') binding is needed externally, hence
7953 # visible.
7954 try:
7955 self.lookup_1(name)
7956 except KeyError:
7957 glbl.logger.error("Timestamping nonexistent name '" +
7958 name + "'\n")
7959 self._bindings[ (name, 'ie_status') ] = time
7960 Env.bind_time_stamp = bind_time_stamp
7962 # Time binding is separate from name binding to keep those program
7963 # parts separated -- necessary when storing in dicts() and across
7964 # multiple lists.
7966 def ie_lookup(self, name):
7968 Incremental evaluation lookup.
7969 Find name in self or any enclosing *defining* (not calling) environment.
7970 Returns:
7971 None when no binding is found
7972 (value, timestamp) otherwise
7974 if glbl.L3_TRACE:
7975 l3_trace("ie_lookup", str(self))
7976 if self._has_key(name):
7977 status = self._bindings.get( (name, 'ie_status') )
7978 if status is None:
7979 status = ie_external_time # Force eval of parent
7980 if glbl.L3_TRACE:
7981 l3_trace("ie_lookup", "found %s" % name)
7982 return self._get_val(name), status
7983 else:
7984 if self._def_env_id is None:
7985 return None
7986 else:
7987 return self._storage.load(self._def_env_id).ie_lookup(name)
7988 Env.ie_lookup = ie_lookup
7991 def ie_lookup_1(self, name):
7993 Find name in self only.
7994 Return:
7995 None when no binding is found
7996 (value, timestamp) otherwise
7998 if self._has_key(name):
7999 status = self._bindings.get( (name, 'ie_status') )
8000 if status is None:
8001 status = ie_external_time # Force eval of parent
8002 return self._get_val(name), status
8003 else:
8004 return None
8005 Env.ie_lookup_1 = ie_lookup_1
8008 def find_unstamped(self):
8009 # Find bindings w/o time stamp.
8010 from types import TupleType
8011 dct = self._bindings
8012 unstamped = []
8013 for k, v in dct.iteritems():
8014 if isinstance(k, TupleType): continue
8015 if dct.has_key( (k, 'ie_status') ): continue
8016 unstamped.append( (k, v) )
8017 return unstamped
8018 Env.find_unstamped = find_unstamped
8021 #** python connection
8022 def import_all_names(self, module):
8024 Import all Python names (not __...__ specials) from the named
8025 module into this environment. All are given an "external" time stamp.
8027 if isinstance(module, StringType):
8028 exec('import ' + module)
8029 for name in dir(eval(module)):
8030 if name.startswith('__'):
8031 continue
8032 self.bind_mem_only_ptr(name, eval(module + '.' + name),
8033 ptr_external_id)
8034 self.bind_time_stamp_ptr(name, ie_external_time, ptr_external_id)
8036 elif isinstance(module, DictType):
8037 for name in module.iterkeys():
8038 if name.startswith('__'):
8039 continue
8040 self.bind_mem_only_ptr(name, module[name], ptr_external_id)
8041 self.bind_time_stamp_ptr(name, ie_external_time, ptr_external_id)
8042 else:
8043 for name in dir(module):
8044 if name.startswith('__'):
8045 continue
8046 self.bind_mem_only_ptr(name, module.__dict__[name],
8047 ptr_external_id)
8048 self.bind_time_stamp_ptr(name, ie_external_time,
8049 ptr_external_id)
8050 Env.import_all_names = import_all_names
8052 def import_names(self, module, name_list):
8054 Import specified Python names from the named module into this
8055 environment.
8057 if isinstance(module, StringType):
8058 exec('import ' + module)
8059 for name in dir(eval(module)):
8060 if name in name_list:
8061 self.bind_mem_only_ptr(name, eval(module + '.' + name),
8062 ptr_external_id)
8063 self.bind_time_stamp_ptr(name, ie_external_time,
8064 ptr_external_id)
8065 else:
8066 for name in dir(module):
8067 if name in name_list:
8068 self.bind_mem_only_ptr(name, eval(module + '.' + name),
8069 ptr_external_id)
8070 self.bind_time_stamp_ptr(name, ie_external_time,
8071 ptr_external_id)
8072 Env.import_names = import_names
8074 def import_module(self, module):
8076 Import the named Python module into this environment.
8078 assert isinstance(module, StringType)
8079 exec('import ' + module)
8080 lead_name = module.split(".")[0]
8081 self.bind_mem_only_ptr(lead_name, eval(lead_name), ptr_external_id)
8082 self.bind_time_stamp_ptr(lead_name, ie_external_time,
8083 ptr_external_id)
8084 Env.import_module = import_module
8087 def import_def(self, name, py_global):
8089 Import the Python binding into this environment.
8091 assert isinstance(name, StringType)
8092 self.bind_mem_only_ptr(name, eval(name, py_global), ptr_external_id)
8093 self.bind_time_stamp_ptr(name, ie_external_time, ptr_external_id)
8094 Env.import_def = import_def
8097 def import_external(self, name, obj):
8099 Import the obj into this environment as a constant.
8101 self.bind_mem_only_ptr(name, obj, ptr_external_id)
8102 self.bind_time_stamp_ptr(name, ie_external_time, ptr_external_id)
8103 Env.import_external = import_external
8104 Env.import_constant = import_external
8107 #** shell connection
8108 def into_directory(self):
8109 # os.chdir to the directory corresponding to `self`, collect file
8110 # information, and return the name of the directory.
8112 name = 'Subdir-' + str(self._id)
8113 # Create the directory if it doesn't exist yet.
8114 if not os.path.isdir(name):
8115 glbl.logger.info("Adding subdirectory %s\n" % name)
8116 os.mkdir(name)
8117 self._dir_stack.append(os.getcwd())
8118 os.chdir(name)
8119 self._dir_name = name
8120 return name
8121 Env.into_directory = into_directory
8124 def directory_name(self):
8125 return 'Subdir-' + str(self._id)
8126 Env.directory_name = directory_name
8127 Env.l3_dirname = directory_name
8130 def outof_directory(self):
8131 # Collect new file information from current directory.
8132 # Change to the previous working directory.
8134 # Collect new file information from current directory.
8135 known = self._dir_known
8136 visited = {}
8138 # File names need to be accessible as l3 identifiers. For this,
8139 # characters other than [A-Z][a-z][0-9]_ are mapped to the _;
8140 # [todo: name collisions are avoided via a number suffix if necessary.]
8142 # To get consistent renaming, file names could be traversed in
8143 # alphabetical order, but this would not help when using
8144 # incremental evaluation.
8145 regex = re.compile(r'[^A-Za-z0-9_]', re.IGNORECASE)
8146 for name in os.listdir(os.getcwd()):
8147 id_name = regex.sub('_', name)
8148 if known.has_key(name): continue
8149 if known.has_key(id_name):
8150 glbl.logger.warn("File %s maps to already used identifier %s.",
8151 (name, id_name))
8152 # Only include new files. [todo: warn about modified files]
8153 self.bind_ptr(name, String(os.path.abspath(name)), self._id)
8154 self.bind_time_stamp_ptr(name, self._storage.ie_.time(), self._id)
8155 # Provide identifier bindings
8156 self.bind_ptr(id_name, String(os.path.abspath(name)), self._id)
8157 self.bind_time_stamp_ptr(id_name, self._storage.ie_.time(), self._id)
8158 visited[name] = None
8159 known.update(visited)
8161 # Restore working directory.
8162 prev = self._dir_stack.pop()
8163 os.chdir(prev)
8165 return
8166 Env.outof_directory = outof_directory
8169 #** interactive use shortcuts
8170 # (file content) display a binding's value
8171 Env.cat = Env.ie_lookup_ptr
8173 # file listing
8174 Env.ls = Env.print_tree