2 # Author: Michael H. Hohn (mhhohn@lbl.gov)
4 # Copyright (c) 2006, The Regents of the University of California
6 # See legal.txt and license.txt
10 from __future__
import generators
# yield
11 import pdb
, exceptions
, sys
, os
, weakref
13 from copy
import deepcopy
, copy
14 from pprint
import pprint
17 import l3lang
.globals as glbl
18 from l3lang
import utils
35 """ Produce the assignment sequence for a function call;
36 omit exclude_names entries.
39 items
= filter(lambda (k
,v
): not exclude_names
.has_key(k
),
42 for k
,v
in items
[:-1]:
44 s
+= str(k
) + " = " + repr(v
) + ', '
46 s
+= str(k
) + " = " + repr(v
)
50 """Produce a tuple useable in a function call.
51 The simple [1:-1] hack fails for (1,)
53 assert(type(t
) == TupleType
)
60 def to_plain_python_f(itm
, storage
):
61 item
= storage
.load( itm
)
63 return item
.to_plain_python(storage
)
64 except AttributeError:
69 # Reduce the source text row index by one, for all nodes in the
72 # Make rows start at 0
73 if self
._first
_char
!= None:
74 row
, col
= self
._first
_char
75 self
._first
_char
= (row
-1, col
)
77 if self
._last
_char
!= None:
78 row
, col
= self
._last
_char
79 self
._last
_char
= (row
-1, col
)
81 for node
in tree
.top_down():
86 class InterfaceOnly(Exception):
89 def cross_reference_trees(storage
, block
, newblock
):
90 # cross-reference tree elements, one-to-one, for later
92 src
= block
.raw_seq_expr().top_down()
93 dest
= newblock
.raw_seq_expr().top_down()
96 storage
.push_attributes(src
.next()._id
,
97 "interp_clone", dest
.next()._id
)
101 def cross_ref_trees(storage
, block
, newblock
):
102 # cross-reference tree elements, one-to-one, for later
104 src
= block
.top_down()
105 dest
= newblock
.top_down()
108 storage
.push_attributes(src
.next()._id
,
109 "interp_clone", dest
.next()._id
)
110 except StopIteration:
114 def copy_char_info(source_tree
, target_tree
):
115 # Assuming identical tree structure, overlay the source_tree's
116 # source string info (the string and character positions) on the
118 source_string
= source_tree
._source
_string
119 src
= source_tree
.top_down()
120 dest
= target_tree
.top_down()
123 ss
= src
.next(); dd
= dest
.next()
124 dd
._source
_string
= source_string
125 dd
._first
_char
= ss
._first
_char
126 dd
._last
_char
= ss
._last
_char
127 except StopIteration:
130 def copy_attribute(source_tree
, target_tree
, att
):
131 # Assuming identical tree structure, overlay the source_tree's
132 # ATT attribute source on the target tree.
133 src
= source_tree
.top_down()
134 dest
= target_tree
.top_down()
137 ss
= src
.next(); dd
= dest
.next()
138 dd
.__dict
__[att
] = ss
.__dict
__[att
]
139 except StopIteration:
143 def rowcol_to_index(rowcol
, text
):
144 # Convert the (row, column) index used by _first_char and
145 # _last_char into an index of the string.
146 # This assumes Universal file reads; all EOLs are \n.
149 for i
in xrange(0, row
):
150 start
= text
.find('\n', start
) + 1
156 #** common base class
160 Provide common functionality for Nested() and Immediate().
162 Also provide dummy functions for the real interface provided
163 by Nested and Immediate; these functions raise exceptions if not
166 raw_* members work on in-memory trees, before tree.setup() is run.
175 def prefix_dump(self
, indent
=0):
178 def traverse_depth(self
):
181 def traverse_breadth(self
, head
=1):
187 def __getitem__(self
, index
):
190 def interpret(self
, env
, storage
):
193 def __init__(self
, *primary
, **kwds
):
194 self
._source
_string
= None
195 self
._source
_file
= None
196 self
._pre
_interp
_hook
= None # callable of type
197 # ((l3tree, env, storage) -> None)
198 self
._attributes
= {} # generic attributes, kept as
199 # (key -> value) pairs.
200 astType
.__init
__ = __init__
202 def set_source_string(self
, source_string
):
203 # Adjustment for block constructs at EOF
204 lines
= len(source_string
.split('\n'))
206 for node
in self
.top_down():
207 node
._source
_string
= source_string
209 # Adjustment for block constructs at EOF
210 if node
._last
_char
!= None:
211 row2
, col2
= node
._last
_char
213 node
._last
_char
= (lines
- 1, col2
)
216 astType
.set_source_string
= set_source_string
218 def set_source_file(self
, source_file
):
219 for node
in self
.top_down():
220 node
._source
_file
= source_file
222 astType
.set_source_file
= set_source_file
226 #** Nested base class
227 class Nested(astType
):
229 Collection of common functionality for all language elements.
231 The _primary key is the constructor argument tuple, e.g.,
232 for Sum(1,2), _primary is (1,2).
234 Keyword arguments are merged with the instance __dict__.
239 return self
.__repr
__()
240 Nested
.__str
__ = __str__
243 return len(self
._primary
)
244 Nested
.__len
__ = __len__
247 dict_str
= dict2str(self
.__dict
__)
249 dict_str
= ', ' + dict_str
250 return self
.__class
__.__name
__ + '(' + \
251 tuple2str(self
._primary
) + dict_str
+ ')'
252 Nested
.repr_long
= repr_long
255 return "%s(%s, %d)" % (self
.__class
__.__name
__,
256 tuple2str(self
._primary
),
259 return "%s(%s)" % (self
.__class
__.__name
__, tuple2str(self
._primary
))
260 Nested
.__repr
__ = __repr__
262 def __getitem__(self
, i
):
263 return (self
._primary
[i
])
264 Nested
.__getitem
__ = __getitem__
266 def __getslice__(self
, i
):
267 raise Exception("slicing is not defined for Nested.")
268 Nested
.__getslice
__ = __getslice__
270 def __init__(self
, *primary
, **kwds
):
271 self
._primary
= primary
272 self
.__dict
__.update(kwds
)
273 self
._init
_kwds
= kwds
274 # (row, column), 0-relative
275 self
._first
_char
= None
276 self
._last
_char
= None # column is one past last character.
278 return astType
.__init
__(self
, *primary
, **kwds
)
279 Nested
.__init
__ = __init__
281 def childrens_ids(self
):
282 return [ch
._id
for ch
in self
._primary
]
283 Nested
.childrens_ids
= childrens_ids
285 def find_child_index(self
, id):
287 for cc
in self
._primary
:
292 Nested
.find_child_index
= find_child_index
294 def __deepcopy__(self
, memo
):
295 # Also see aList.__deepcopy__
297 # See copy_attribute() to restore missing attributes
299 rv
= self
.__class
__(*self
._primary
, **self
._init
_kwds
)
300 direct_ref
= ['_arg_env',
305 ## '_primary', # the subtree had better be copied...
310 for k
,v
in self
.__dict
__.items():
312 stuff
[k
] = self
.__dict
__[k
]
314 stuff
[k
] = deepcopy(v
)
315 rv
.__dict
__.update(stuff
)
317 # Re-assign non-copyable members (e.g. from __init__) on a
318 # per-class basis, or invalidate the member to insure explicit
321 # This is needed to maintain any graph structures in the original,
322 # instead of getting a tree (pickle mostly avoids this problem,
323 # but __deepcopy__ does not).
325 # Members to look at come from __init__(), setup(), and
328 # Re-establish graph structures where possible...
331 # ... but ensure exceptions for others.
338 ###try: del rv._primary_ids
342 Nested
.__deepcopy
__ = __deepcopy__
344 def deref(self
, index
):
345 return (self
._primary
[index
])
349 def eql(self
, other
):
350 # Recursive content equality test -- not physical equality.
352 # as in Matcher.match,
353 # Trees are assumed to have interface functions
354 # __len__, __getitem__, and __class__
356 if self
.__class
__ == other
.__class
__: # identical head
359 if nc
!= other
.__len
__():
361 # Compare ALL children.
362 for c
in range(0,nc
):
363 if self
[c
].eql(other
[c
]):
373 def eql_1(self
, other
):
374 # Non-recursive value equality test -- not physical equality.
376 # As in Matcher.match,
377 # Trees are assumed to have interface functions
378 # __len__, __getitem__, and __class__
380 if self
.__class
__ == other
.__class
__: # identical head
383 if nc
!= other
.__len
__():
392 #** Immediate base class
393 class Immediate(astType
):
395 Common functionality for all Immediate types.
396 Note: this class cannot be instantiated -- it must be subclassed.
399 def traverse_depth(self
):
402 def traverse_breadth(self
, head
=0):
408 def __getitem__(self
, i
):
411 def __init__(self
, *primary
, **kwds
):
412 assert not isinstance(primary
[0], astType
) # No nesting here!
413 self
._first
_char
= None
414 self
._last
_char
= None
415 self
._primary
= primary
416 self
._init
_kwds
= kwds
417 self
.__dict
__.update(kwds
)
418 self
.__class
__.__bases
__[1].__init
__(self
)
419 return astType
.__init
__(self
, *primary
, **kwds
)
420 Immediate
.__init
__ = __init__
423 def __deepcopy__(self
, memo
):
424 # Also see aList.__deepcopy__, Nested.__deepcopy__
425 rv
= self
.__class
__(*self
._primary
, **self
._init
_kwds
)
426 direct_ref
= ['_arg_env',
435 for k
,v
in self
.__dict
__.items():
437 stuff
[k
] = self
.__dict
__[k
]
439 stuff
[k
] = deepcopy(v
)
440 rv
.__dict
__.update(stuff
)
442 # Re-establish graph structures where possible...
445 # ... but ensure exceptions for others.
453 Immediate
.__deepcopy
__ = __deepcopy__
456 def childrens_ids(self
):
458 Immediate
.childrens_ids
= childrens_ids
461 return self
.__repr
__()
462 Immediate
.__str
__ = __str__
464 def eql(self
, other
):
466 if self
.__class
__ == other
.__class
__:
467 return self
._primary
[0] == other
._primary
[0]
471 Immediate
.eql_1
= eql
474 dict_str
= dict2str(self
.__dict
__)
476 dict_str
= ', ' + dict_str
477 return self
.__class
__.__name
__ + '(' + \
478 self
.__class
__.__bases
__[1].__repr
__(self
._primary
[0]) + \
480 Immediate
.repr_long
= repr_long
483 if self
.__dict
__.has_key('_id'):
484 return "%s(%s, %d)" % (
485 self
.__class
__.__name
__,
486 self
.__class
__.__bases
__[1].__repr
__(self
._primary
[0]),
490 self
.__class
__.__name
__,
491 self
.__class
__.__bases
__[1].__repr
__(self
._primary
[0]),
495 self
.__class
__.__name
__,
496 self
.__class
__.__bases
__[1].__repr
__(self
._primary
[0]),
498 Immediate
.__repr
__ = __repr__
500 def to_plain_python(self
, storage
):
501 return self
._primary
[0]
502 Immediate
.to_plain_python
= to_plain_python
506 class InterpreterError(exceptions
.Exception):
507 def __init__(self
, args
=None):
510 class UnboundSymbol(exceptions
.Exception):
511 def __init__(self
, args
=None):
514 class Interpret_tail_call(exceptions
.Exception):
515 # Used for passing tail call information UP the Python stack.
516 # Arg: (tree, env, finish_progs)
518 # tree is the astType to continue executing
519 # env is the environment to evaluate in
520 # finish_progs is a list of functions to run after the tail call
521 # is finished. Args are the .interpret() return
523 def __init__(self
, args
=None):
526 class Interpret_return(exceptions
.Exception):
527 def __init__(self
, args
=None):
533 class Program(Nested
):
540 def __init__(self
, *primary
, **kwds
):
541 self
._outl
_parent
= None # astType weakref.
542 self
._outl
_children
= None # (astType weakref) vaList
543 self
._outl
_type
= 'subtree' # "flat" | "nested" | "subtree"
545 self
.p_label
= None # StringType
546 return Nested
.__init
__(self
, *primary
, **kwds
)
547 Program
.__init
__ = __init__
549 def set_label(self
, lbl
):
550 assert isinstance(lbl
, StringType
)
552 Program
.set_label
= set_label
556 Program
.get_label
= get_label
559 return getattr(self
, "_eval_env", None)
560 Program
.eval_env
= eval_env
566 class Return(Nested
):
572 # The name, args, modifiers, body split is internally available.
577 class Function(Nested
):
579 # Function(opt_block_args, seq_expr) { |a, ...| c;d;...}
585 Return number of positional arguments.
587 return len(self
.positional_block_args())
588 Function
.nargs
= nargs
592 Function
.seq_expr
= seq_expr
594 def raw_seq_expr(self
):
595 return self
._primary
[1]
596 Function
.raw_seq_expr
= raw_seq_expr
598 def __init__(self
, *primary
, **kwds
):
599 # self._binding_env = None
600 self
._binding
_name
= None # "foo" for def foo(): BLOCK and
602 ## self._clone_of = None # source block
603 return Nested
.__init
__(self
, *primary
, **kwds
)
604 Function
.__init
__ = __init__
607 def block_copy(self
, storage
):
608 # copy all, including envs. This requires a all copies to come
609 # from the original skeleton.
611 copy_char_info(self
, rv
)
612 ## rv._clone_of = self._id
614 Function
.block_copy
= block_copy
616 def named_block_args(self
, symbols
= 0):
618 Return named block args as (name : string, value : astType) list.
622 for ba
in self
.block_args():
623 # Skip to keyword args.
624 if ma
.match(ba
, MarkerTyped(String('name'), Symbol('symbol'))):
628 if not ma
.match(ba
, Set(MarkerTyped(String('name'), Symbol('_')),
630 raise InterpreterError
, \
631 "Expected 'name' or 'key = value' argument, got: " + str(ba
)
634 kv_pairs
.append((ma
._matches
['name'], ma
._matches
['val']))
636 kv_pairs
.append((ma
._matches
['name'].as_index(),
639 Function
.named_block_args
= named_block_args
641 def positional_block_args(self
, symbols
= 0):
642 # Return a (string list) of argument names.
645 for ba
in self
.block_args():
646 # if not ma.match_exp_str(ba, '!! name symbol'):
647 if not ma
.match(ba
, MarkerTyped(String('name'), Symbol('symbol'))):
648 # Skip remaining (keyword) args.
650 ## raise InterpreterError, "Invalid argument type: " + str(ba)
652 arg_names
.append(ma
._matches
['name'])
654 arg_names
.append(ma
._matches
['name'].as_index())
656 Function
.positional_block_args
= positional_block_args
658 def block_args(self
):
659 # Return list of all arguments.
661 Function
.block_args
= block_args
666 # Call(simple_expr, simple_expr_list) f a b ...
671 def positional_args(self
):
672 # Return (astType list) of positional args.
675 for ba
in self
.block_args():
676 # Grab all all but `name = val`.
677 if not ma
.match(ba
, Set(MarkerTyped(String('name'), Symbol('_')),
681 Call
.positional_args
= positional_args
683 def named_args(self
):
685 Return named block args as (name : string, value : astType) list.
689 for ba
in self
.block_args():
691 if ma
.match(ba
, Set(MarkerTyped(String('name'), Symbol('_')),
694 kv_pairs
.append((ma
._matches
['name'].as_index(),
697 Call
.named_args
= named_args
701 Return number of positional arguments.
703 return len(self
.positional_args())
706 def block_args(self
):
707 # Return list of all arguments.
709 Call
.block_args
= block_args
712 def __init__(self
, *primary
, **kwds
):
713 # ?? self._id_count = 0
714 # ?? self._called_block = None
715 Nested
.__init
__(self
,*primary
, **kwds
)
716 Call
.__init
__ = __init__
719 class Member(Nested
):
721 # Member(simple_expr, simple_expr) [ a.b ]
730 def __init__(self
, *primary
, **kwds
):
731 return Nested
.__init
__(self
, *primary
, **kwds
)
732 If
.__init
__ = __init__
736 class Labeled(Nested
):
737 def interpret(self
, env
, storage
):
742 # HERE. scheme-style implementation
743 def interpret(self
, env
, storage
):
749 # Set(simple_expr, expr)
753 def arg_names(self
, raw_symbols
= 0):
754 # Return argument names as string list.
755 # Also see Set.interpret
757 if isinstance(args
, Tuple
):
759 arg_list
= args
._primary
[0]
761 # if not self._matcher.match_exp_str(nm, '!! name symbol'):
762 if not self
._matcher
.match(nm
, MarkerTyped(String('name'),
764 raise InterpreterError
, \
765 "Set: Invalid argument type: " + str(nm
)
767 name_list
.append( self
._matcher
.get('name') )
769 name_list
.append( self
._matcher
.get('name').as_index() )
772 # elif self._matcher.match_exp_str( args, '!! name symbol'):
773 elif (self
._matcher
.match(args
, MarkerTyped(String('name'),
775 self
._matcher
.match(args
, MarkerTyped(String('name'),
778 return [ self
._matcher
.get('name') ]
780 return [ self
._matcher
.get('name').as_index() ]
782 raise InterpreterError
, "Set: Invalid first argument type: " + \
784 Set
.arg_names
= arg_names
793 # All List operations should forward to the contained alist.
796 def __init__(self
, *primary
, **kwds
):
797 self
.l_label
= None # StringType
798 return Nested
.__init
__(self
, *primary
, **kwds
)
799 List
.__init
__ = __init__
801 def set_label(self
, lbl
):
802 assert isinstance(lbl
, StringType
)
804 List
.set_label
= set_label
808 List
.get_label
= get_label
811 ### def to_plain_python(self, storage):
812 ### return [to_plain_python_f(itm, storage)
813 ### for itm in self._eval_list]
814 ### List.to_plain_python = to_plain_python
816 def find_child_index(self
, id):
817 return self
._primary
[0].find_child_index(id)
818 List
.find_child_index
= find_child_index
820 ## def __len__(self):
821 ## return len(self._primary[0])
822 ## List.__len__ = __len__
824 # This special definition breaks much code; use explicit access instead.
825 # def __getitem__(self, i):
826 # return self._primary[0][i]
827 # List.__getitem__ = __getitem__
832 def viewList(*primary
, **kwds
):
833 return Program(*primary
, **kwds
)
836 class cls_viewList(List
):
838 A subclass of List for viewing of selected subtrees (outlining).
844 return "%s(%s, %s)" % (self
.__class
__.__name
__,
846 tuple2str(self
._primary
),
848 cls_viewList
.__repr
__ = __repr__
850 def __init__(self
, *primary
, **kwds
):
851 self
._outl
_parent
= None # astType weakref.
852 self
._outl
_children
= None # (astType weakref) vaList
853 self
._outl
_type
= 'nested' # "flat" | "nested" | "subtree"
854 return List
.__init
__(self
, *primary
, **kwds
)
855 cls_viewList
.__init
__ = __init__
864 The general mapping type.
868 def __init__(self
, *primary
, **kwds
):
869 self
._binding
_name
= None
870 self
.m_label
= None # special display label.
871 return Nested
.__init
__(self
, *primary
, **kwds
)
872 Map
.__init
__ = __init__
874 def set_label(self
, lbl
):
876 Map
.set_label
= set_label
879 raise "internally modified: update calling code"
880 return self
._eval
_env
.ie_lookup_1(key
)
883 def to_plain_python(self
, storage
):
884 dct
= (self
._eval
_env
._bindings
)
885 # dict_to_plain_python:
887 for k
,v
in dct
.items():
888 # HERE. Env *should* use Path()s ...
889 newdct
[k
] = v
.to_plain_python(storage
)
891 Map
.to_plain_python
= to_plain_python
896 # Subdir is a Map in which some keys are file names with the
897 # absolute path as value. Other entries are regular Map entries.
908 def __init__(self
, *primary
, **kwds
):
909 Nested
.__init
__(self
, *primary
, **kwds
)
910 self
.l_label
= None # StringType
911 self
._primary
[0].setthe(layout
= 'horizontal')
912 Tuple
.__init
__ = __init__
916 #*** Immediate() types
919 # class Bool(Immediate, bool):
923 class Int(Immediate
, IntType
):
927 class Float(Immediate
, FloatType
):
931 class Complex(Immediate
, ComplexType
): # To be added...
935 class String(Immediate
, StringType
):
939 return self
._primary
[0]
940 String
.as_index
= as_index
942 def FilepathString(name
):
943 assert isinstance(name
, StringType
), "Expecting string for file path."
945 if os
.path
.exists(name
):
952 ''' Is (or was) self a valid file name? '''
953 fe
= getattr(self
, "_isfile", None)
955 if os
.path
.exists(self
._primary
[0]):
963 String
.isfile
= isfile
968 class Comment(String
, StringType
):
970 A subclass of String with support for comments.
974 # # def __init__(self):
975 # # String.__init__(self)
976 # # self._string = string
977 # # Comment.__init__ = __init__
982 class Symbol(Immediate
, StringType
):
986 return self
._primary
[0]
987 Symbol
.as_index
= as_index
993 # Native is the general placeholder to use when native Python
994 # VALUES are to be included in a tree, or when an external value
995 # (say, the content of a file) is to be handled "natively".
997 # E.g. a Numeric array
998 # resulting from interpretation may be put into a new tree for
1001 # Native is similar to aNone and Immediates in behavior. It
1002 # evaluates to the Python value it holds.
1006 return self
.__repr
__()
1009 return "%s(%s)" % (self
.__class
__.__name
__,
1010 self
._primary
[0].__class
__.__name
__)
1012 def traverse_depth(self
):
1015 def traverse_breadth(self
, head
=0):
1018 def __init__(self
, primary
, **kwds
):
1019 self
._primary
= (primary
,)
1020 self
.__dict
__.update(kwds
)
1021 self
._first
_char
= None
1022 self
._last
_char
= None
1023 self
._pre
_interp
_hook
= None # callable of type
1024 # ((l3tree, env, storage) -> None)
1025 self
._attributes
= {} # generic attributes, kept as
1026 # (key -> value) pairs.
1027 Native
.__init
__ = __init__
1029 def to_plain_python(self
, storage
):
1030 return self
._primary
[0]
1031 Native
.to_plain_python
= to_plain_python
1034 return self
._primary
[0]
1035 Native
.value
= value
1037 # # def set_source_string(self, source_string):
1038 # # # Adjustment for block constructs at EOF
1039 # # lines = len(source_string.split('\n'))
1040 # # self._source_string = source_string
1042 # # # Adjustment for block constructs at EOF
1043 # # if self._last_char != None:
1044 # # row2, col2 = self._last_char
1045 # # if row2 >= lines:
1046 # # self._last_char = (lines - 1, col2)
1049 # # Native.set_source_string = set_source_string
1051 # # def eql(self, other):
1052 # # if self.__class__ == other.__class__:
1056 # # Native.eql = eql
1061 # To allow attachment of labels, and for uniformity, the ast.None()
1062 # class is instantiated on every use, just as the other Immediate()s
1064 # This class should inherit from None, but this is illegal.
1065 # All Immediate() functions must be provided.
1068 return self
.__repr
__()
1071 return self
.__class
__.__name
__ + '()'
1073 def traverse_depth(self
):
1076 def traverse_breadth(self
, head
=0):
1080 # For compatibility with Matcher().
1082 aNone
.__len
__ = __len__
1084 def __init__(self
, *primary
, **kwds
):
1085 self
._primary
= (None,)
1086 self
.__dict
__.update(kwds
)
1087 self
._first
_char
= None
1088 self
._last
_char
= None
1089 self
._pre
_interp
_hook
= None # callable of type
1090 # ((l3tree, env, storage) -> None)
1091 self
._attributes
= {} # generic attributes, kept as
1092 # (key -> value) pairs.
1093 aNone
.__init
__ = __init__
1095 def to_plain_python(self
, storage
):
1097 aNone
.to_plain_python
= to_plain_python
1099 def set_source_string(self
, source_string
):
1100 # Adjustment for block constructs at EOF
1101 lines
= len(source_string
.split('\n'))
1103 self
._source
_string
= source_string
1105 # Adjustment for block constructs at EOF
1106 if self
._last
_char
!= None:
1107 row2
, col2
= self
._last
_char
1109 self
._last
_char
= (lines
- 1, col2
)
1112 aNone
.set_source_string
= set_source_string
1114 def eql(self
, other
):
1115 if self
.__class
__ == other
.__class
__:
1124 # Also see class Nested, aTree
1125 # Note: this class' instances pickle improperly with protocol < 2, and
1126 # python versions < 2.3
1128 class aList(ListType
):
1130 # To simplify interpret() and setup() code, the regular list needs
1131 # some extra features.
1132 # In particular, access consistent with other astType requires the
1136 # field; list access uses
1142 # Updates of either require updates of both.
1144 # Note: the List() class is for the L3 ast; the aList() class
1149 return self
.__repr
__()
1151 def __add__(self
, other
):
1152 return self
.__class
__( ListType
.__add
__(self
, other
) )
1155 return self
.__class
__.__name
__ + '(' + \
1156 ListType
.__repr
__(self
) + ')'
1157 aList
.__repr
__ = __repr__
1159 def repr_long(self
):
1160 dict_str
= dict2str(self
.__dict
__)
1162 dict_str
= ', ' + dict_str
1163 return self
.__class
__.__name
__ + '(' + \
1164 ListType
.__repr
__(self
) + dict_str
+ ')'
1165 aList
.repr_long
= repr_long
1167 def __init__(self
, arg
, **kwds
):
1168 ListType
.__init
__(self
, arg
)
1169 self
._primary
= (arg
,) # Form is ( [...], )
1170 # ?? self._id = None
1171 # ?? for textual persistence. use with __repr__. Test.
1172 self
.__dict
__.update(kwds
)
1173 self
._first
_char
= None
1174 self
._last
_char
= None
1176 self
._pre
_interp
_hook
= None # callable of type
1177 # ((l3tree, env, storage) -> None)
1178 self
._attributes
= {} # generic attributes, kept as
1179 # (key -> value) pairs.
1180 aList
.__init
__ = __init__
1183 def to_plain_python(self
, storage
):
1184 return self
._primary
[0]
1185 aList
.to_plain_python
= to_plain_python
1187 def deref(self
, index
):
1188 return (self
[index
])
1191 def dependencies(self
, env
):
1192 """ Set up post-setup, pre-execution structures.
1194 for child
in self
._primary
: child
.dependencies(env
)
1195 aList
.dependencies
= dependencies
1197 def eql(self
, other
):
1198 # as in Matcher.match,
1199 # Trees are assumed to have interface functions
1200 # __len__, __getitem__, and __class__
1202 if self
.__class
__ == other
.__class
__: # identical head
1205 if nc
!= other
.__len
__():
1207 # Compare ALL children.
1208 for c
in range(0,nc
):
1209 if self
[c
].eql(other
[c
]):
1219 def eql_1(self
, other
):
1220 if self
.__class
__ == other
.__class
__: # identical head
1221 # Same number of children?
1223 if nc
!= other
.__len
__():
1230 def find_child_index(self
, id):
1238 aList
.find_child_index
= find_child_index
1241 # This __deepcopy__ (of a list subclass) causes infinite recursion
1242 # during .interpret...
1243 # # def __deepcopy__(self, memo):
1244 # # # Also see Nested.__deepcopy__
1246 # # # See copy_attribute() to restore missing attributes
1248 # # rv = self.__class__(*self._primary)
1249 # # direct_ref = ['_arg_env',
1254 # # ## '_primary', # the subtree had better be copied...
1258 # # for k,v in self.__dict__.items():
1259 # # if k in direct_ref:
1260 # # stuff[k] = self.__dict__[k]
1262 # # stuff[k] = deepcopy(v)
1263 # # rv.__dict__.update(stuff)
1265 # # try: del rv._parent
1272 # # aList.__deepcopy__ = __deepcopy__
1276 # Defining this then also requires __getslice__;
1277 # it also makes other code very hard to check. Use explicit syntax
1279 # def __getitem__(self, index):
1280 # return storage.load(ListType.__getitem__(self, index))
1281 # aList.__getitem__ = __getitem__
1285 class vaList(aList
):
1286 ''' subclass of aList with necessary restrictions for use in
1289 - no changes to children
1295 def interpret(self
, env
, storage
):
1296 raise Exception("vaList must not be interpreted. Internal error.")
1297 vaList
.interpret
= interpret
1300 def insert_child_rec(self
, index
, new
, storage
):
1301 raise Exception("Insertion into collapsed list is ambiguous, hence "
1302 "not possible. Expand the list first. ")
1303 vaList
.insert_child_rec
= insert_child_rec
1306 def replace_child(self
, orig_id
, new_node
):
1307 assert isinstance(orig_id
, IntType
)
1308 idx
= self
.find_child_index(orig_id
)
1310 raise ReplacementError
, "Child not found."
1311 # Update direct references.
1312 foo
= self
._primary
[0]
1314 self
._primary
= (foo
,)
1315 self
[idx
] = new_node
1316 vaList
.replace_child
= replace_child
1319 def insert_child(self
, index
, child
):
1320 assert self
._id
!= None
1321 # Also see Nested.insert_child()
1322 foo
= self
._primary
[0]
1323 foo
.insert(index
, child
)
1324 self
._primary
= (foo
,)
1325 self
.insert(index
, child
)
1326 vaList
.insert_child
= insert_child
1329 def detach_child(self
, orig_id
, storage
):
1330 assert isinstance(orig_id
, IntType
)
1331 idx
= self
.find_child_index(orig_id
)
1333 raise ReplacementError
, "Child not found."
1334 # Update direct references.
1335 foo
= self
._primary
[0]
1337 self
._primary
= (foo
,)
1339 vaList
.detach_child
= detach_child
1342 def setup_valist(self
, w_
, parallel_nd
):
1343 # Return an empty alist, .setup() using the environment of
1345 storage
= w_
.state_
.storage
1346 parent
= empty_parent()
1347 def_env
= storage
.get_attribute(parallel_nd
._id
, "stored_from_env")
1348 # Form the raw list.
1349 rv
, _
= vaList([]).setup(parent
, def_env
, storage
)
1351 vaList
.setup_valist
= setup_valist
1356 #**** matcher elements
1357 class Marker(Immediate
, StringType
):
1362 # Note: no _storage use: MarkerTyped is Immediate()
1363 return (self
._primary
[0])
1368 class MarkerTyped(Nested
):
1369 # !! name expr -> MarkerTyped(Symbol('name'), expr)
1370 # where expr is a type sample.
1374 # Notice _storage use: MarkerTyped is Nested()
1375 return self
.deref(0).as_index()
1376 MarkerTyped
.name
= name
1379 return self
.deref(1)
1380 MarkerTyped
.expr
= expr
1382 # class Set(astType):
1387 class Inline(Nested
):
1389 # Inline( python_init_string )
1390 # Raw Python code (especially module imports) can be provided here.
1394 def __init__(self
, *primary
, **kwds
):
1395 ( self
._python
_init
_string
, ) = primary
1396 return Nested
.__init
__(self
, *primary
, **kwds
)
1397 Inline
.__init
__ = __init__
1399 def __deepcopy__(self
, memo
):
1400 rv
= Nested
.__deepcopy
__(self
, memo
)
1403 Inline
.__deepcopy
__ = __deepcopy__
1409 class Macro(Nested
):
1411 # Macro( in_args, body )
1415 def __init__(self
, *primary
, **kwds
):
1416 Nested
.__init
__(self
, *primary
, **kwds
)
1417 Macro
.__init
__ = __init__
1419 def block_copy(self
, storage
):
1421 copy_char_info(self
, rv
)
1422 ## rv._clone_of = self._id
1424 Macro
.block_copy
= block_copy
1427 def block_args(self
):
1428 # Return list of all arguments.
1429 return self
.deref(0)
1430 Macro
.block_args
= block_args
1434 Return number of arguments.
1436 return len(self
.block_args())
1439 def raw_seq_expr(self
):
1440 return self
._primary
[1]
1441 Macro
.raw_seq_expr
= raw_seq_expr
1445 #** Editing / high level support
1449 # The a[A-Z]* classes are meant for explicit construction; they have
1450 # no parsing elements. Some a[A-Z]* classes (a)dd something to
1453 # The e[A-Z]* classes are meant for explicit construction; they
1454 # are intended for (e)diting functionality.
1459 #*** macro manager (selection tree)
1461 # The selection_tree format:
1463 # tree ::= [tree label leaf * ]
1464 # leaf ::= tree | value
1465 # value ::= anything not eTree
1468 class eTree(Nested
):
1470 # eTree(label, l1, l2, ...)
1472 def __init__(self
, *primary
, **kwds
):
1473 Nested
.__init
__(self
, *primary
, **kwds
)
1474 self
._label
= primary
[0]
1476 def __deepcopy__(self
, memo
):
1477 rv
= Nested
.__deepcopy
__(self
, memo
)
1478 # Re-establish graph structures.
1479 rv
._label
= rv
._primary
[0]
1481 eTree
.__deepcopy
__ = __deepcopy__
1484 def append(self
, subtree
,
1485 model_cb
= lambda subtree
, subtree_idx
: 0 ):
1486 # Append subtree and return new child's logical index.
1487 self
._primary
+= (subtree
,)
1488 subtree_idx
= len(self
._primary
) - 2
1490 # Run external updates.
1491 model_cb(subtree
, subtree_idx
)
1494 eTree
.append
= append
1496 #** possible future derived types
1497 # class Future(Nested):
1501 # class Cond(Nested):
1504 # class Manual_and(Nested):
1507 # class Manual_or(Nested):
1510 # class Dynamic_and(Nested):
1513 # class Dynamic_or(Nested):
1516 # class Vector(Nested):
1519 #* Low-level program printing support
1520 #** source_substring
1521 def source_substring(self
):
1523 Return the leading substring for self, at most one line.
1525 if self
._first
_char
!= None:
1526 row1
, col1
= self
._first
_char
1530 if self
._last
_char
!= None:
1531 row2
, col2
= self
._last
_char
1532 str = self
._source
_string
.split('\n')[row1
]
1536 return str[col1
:col2
]
1539 astType
.source_substring
= source_substring
1543 def set_char_range(self
, first
, last
):
1544 self
._first
_char
= first
1545 self
._last
_char
= last
1547 Nested
.set_char_range
= set_char_range
1548 Immediate
.set_char_range
= set_char_range
1549 aNone
.set_char_range
= set_char_range
1550 aList
.set_char_range
= set_char_range
1552 def llet_char_range(self
, first
, last
):
1553 # If self has no character range yet, set it.
1554 if self
._first
_char
is None:
1555 self
._first
_char
= first
1556 self
._last
_char
= last
1558 Nested
.llet_char_range
= llet_char_range
1559 Immediate
.llet_char_range
= llet_char_range
1560 aNone
.llet_char_range
= llet_char_range
1561 aList
.llet_char_range
= llet_char_range
1564 def get_char_range(self
):
1565 # Return (row, col), (row, col) pair. Upper left / lower right.
1566 if self
._first
_char
is None:
1567 if isinstance(self
, Comment
):
1570 glbl
.logger
.info("%s has no character position info." % self
)
1572 return self
._first
_char
, self
._last
_char
1573 Nested
.get_char_range
= get_char_range
1574 Immediate
.get_char_range
= get_char_range
1575 aList
.get_char_range
= get_char_range
1576 aNone
.get_char_range
= get_char_range
1580 def source_string(self
):
1581 tst
= astType
.source_string(self
)
1582 if tst
!= "no_source":
1584 return self
[0].source_string()
1585 Program
.source_string
= source_string
1587 # This is clearly wrong:
1588 # # def source_string(self):
1589 # # return "\n".join( [child.source_string() for child in self] )
1590 # # aList.source_string = source_string
1592 def source_string(self
):
1594 # Return the source string for self.
1596 if self
._first
_char
!= None:
1597 row1
, col1
= self
._first
_char
1601 if not self
._source
_string
:
1604 lines
= self
._source
_string
.split('\n')
1606 if self
._last
_char
!= None:
1607 row2
, col2
= self
._last
_char
1609 lines
= [ lines
[i
] for i
in range(row1
, row2
+ 1) ]
1610 ### lines = [ lines[i] for i in range(row1, row2) ]
1611 lines
[0] = lines
[0][col1
:None]
1612 lines
[-1] = lines
[-1][None:col2
]
1613 return "\n".join(lines
)
1615 return lines
[row1
][col1
:col2
]
1618 astType
.source_string
= source_string
1619 aNone
.source_string
= source_string
1620 aList
.source_string
= source_string
1623 def source_string(self
):
1624 if (self
._source
_string
== None) or (self
._first
_char
== None):
1625 return self
._primary
[0]
1627 return astType
.source_string(self
)
1628 Symbol
.source_string
= source_string
1631 #** source string information
1632 def has_source_string(self
):
1633 return self
._first
_char
is not None
1634 astType
.has_source_string
= has_source_string
1635 aList
.has_source_string
= has_source_string
1636 aNone
.has_source_string
= has_source_string
1638 def has_source_string(self
):
1640 Native
.has_source_string
= has_source_string
1642 def num_lines(self
):
1643 if self
.has_source_string():
1644 row1
, col1
= self
._first
_char
1645 row2
, col2
= self
._last
_char
1646 return (row2
- row1
+ 1)
1648 raise Exception("No source string available.")
1649 astType
.num_lines
= num_lines
1650 aList
.num_lines
= num_lines
1651 aNone
.num_lines
= num_lines
1657 def l3_repr_dedented(self
):
1659 # Return (a parseable?) representation of self, with leading
1660 # whitespace/indentation stripped w.r.t. the first line.
1666 # return for_mic(mic + 1, iteration + 1,
1667 # lastp, lastp + num_img(mic + 1))
1669 # l3_repr(for_mic) would return ::
1672 # for_mic(mic + 1, iteration + 1,
1673 # lastp, lastp + num_img(mic + 1))
1675 # while l3_repr_dedented returns
1678 # for_mic(mic + 1, iteration + 1,
1679 # lastp, lastp + num_img(mic + 1))
1681 # Note that split/join are inverses:
1682 # "\n".join( "\n\na".split('\n')) -> '\n\na'
1685 (frow
, fcol
), (lrow
, lcol
) = self
.get_char_range()
1686 txt
= self
.l3_repr()
1688 txt_s
= txt
.split('\n')
1689 # Take the first line unchanged.
1690 txt_l
= [ txt_s
[0] ]
1691 chopped_lines
= False
1692 for ll
in txt_s
[1:None] :
1693 # Strip leading junk from remaining lines.
1694 txt_l
.append( ll
[fcol
:None] )
1695 # Warn about chopped characters
1696 if len(ll
) > 0 and (not ll
[0:fcol
].isspace()):
1697 chopped_lines
= True
1699 # For strings, chopping the left returns a different
1700 # string. Similarly, expressions must not be chopped.
1701 print ("WARNING: source expression has inverted indentation."
1702 " Expect display oddities.\n"
1703 " Expression: %(txt)s\n"
1705 txt
= self
.l3_repr()
1707 txt
= "\n".join(txt_l
)
1709 astType
.l3_repr_dedented
= l3_repr_dedented
1710 aNone
.l3_repr_dedented
= l3_repr_dedented
1711 aList
.l3_repr_dedented
= l3_repr_dedented
1715 # Return a parseable representation of self.
1716 if self
._source
_string
in [None]:
1717 raise Exception("No valid l3 string available for " + repr(self
))
1719 return self
.source_string()
1720 astType
.l3_repr
= l3_repr
1721 astType
.l3_string
= l3_repr
1722 aNone
.l3_repr
= l3_repr
1723 aList
.l3_repr
= l3_repr
1728 # Return a parseable representation of self.
1729 if self
._source
_string
in [None]:
1730 return self
._primary
[0]
1732 return self
.source_string()
1733 String
.l3_repr
= l3_repr
1734 Symbol
.l3_repr
= l3_repr
1735 Symbol
.l3_string
= l3_repr
1738 # Return a parseable representation of self.
1739 if self
._source
_string
in [None]:
1740 return str(self
._primary
[0])
1742 return self
.source_string()
1743 Immediate
.l3_repr
= l3_repr
1748 def l3_string(self
):
1749 # Return a more legible (but not parseable) representation of
1755 if st
[0] == st
[-1] and st
[0] in [ '"', "'" ]:
1761 '"""' # make etags happy.
1762 if st
[0:3] == st
[-3:None] and st
[0:3] in [ '"""',
1765 elif st
[0] == st
[-1] and st
[0] in [ '"', "'" ]:
1769 String
.l3_string
= l3_string
1771 def l3_string(self
):
1773 aNone
.l3_string
= l3_string
1775 def l3_string(self
):
1777 Native
.l3_string
= l3_string
1780 # Simple string representations of Immediate()s.
1781 # Use get_infix_string() for new code.
1783 def py_string(self
):
1784 return StringType
.__str
__(self
._primary
[0])
1785 String
.py_string
= py_string
1787 def py_string(self
):
1788 return StringType
.__str
__(self
._primary
[0])
1789 Symbol
.py_string
= py_string
1791 def py_string(self
):
1792 return repr(self
._primary
[0])
1793 Native
.py_string
= py_string
1795 def py_string(self
):
1797 aNone
.py_string
= py_string
1799 def py_string(self
):
1800 # Static dependency restrictions: for a.b, both must be Symbols().
1802 first
= self
.deref(0)
1803 second
= self
.deref(1)
1804 if isinstance(first
, Symbol
):
1805 if isinstance(second
, Symbol
):
1806 mem_name
= second
.py_string()
1808 raise DataDagError
, "In a.b, b is not a name: " + str(second
)
1810 raise DataDagError
, "In a.b, a is not a name: " + str(first
)
1812 return "%s.%s" % (first
.py_string(), second
.py_string())
1813 Member
.py_string
= py_string
1817 #* Interaction with program text, low level support
1819 # For the full program tree, recursive "painting" of character
1820 # ranges is a simple way to go from the hierachial information in
1823 # ~~~~~~~~~~~~~~~~~ , def
1825 # ~~~ , arg sequence
1826 # ~ ~ , individual args
1828 # ~~~~~ , expression
1830 # to the necessary "flat" information needed in the text sequence
1833 # A top-down painting is in the needed order.
1835 def get_paint_array(code_str
, max_val
= 'inf'):
1836 """ Using the same string passed to reader.parse(),
1837 return a Program.paint_array() compatible structure.
1838 Direct use is DEPRECATED.
1841 # >>> get_paint_array("first\nsecond")
1842 # [[0, 1, 2, 3, 4], [0, 1, 2, 3, 4, 5]]
1844 max_val
= float(max_val
)
1846 lines
= code_str
.split("\n") # windows?
1847 lines_array
= [max_val
] * (len(lines
))
1850 lines_array
[i
] = [max_val
] * (len(line
))
1854 def paint_array_start(self
):
1856 Form the _id array used for identification of tree nodes.
1857 Node ids are always integer; positions not corresponding to any
1858 node are filled with float values.
1862 for node
in self
.top_down():
1863 largest
= max(largest
, node
._id
)
1867 paint_a
= get_paint_array(self
._source
_string
, max_val
= largest
)
1868 self
.paint_array(paint_a
)
1870 Nested
.paint_array_start
= paint_array_start
1872 def generic_paint_array(self
, arr
):
1874 if (self
._first
_char
!= None and self
._last
_char
!= None):
1875 row1
, col1
= self
._first
_char
1876 row2
, col2
= self
._last
_char
# col2 is 1 past the last character
1878 # Paint only the first row.
1880 arr
[row1
][col1
: ll
] = [self
._id
] * (ll
- col1
)
1882 # Repaint the appropriate range with "color" self._id
1883 arr
[row1
][col1
:col2
] = [self
._id
] * (col2
- col1
)
1885 def paint_array(self
, arr
):
1886 generic_paint_array(self
,arr
)
1888 for child
in self
._primary
:
1889 child
.paint_array(arr
)
1890 Nested
.paint_array
= paint_array
1892 def paint_array(self
, arr
):
1893 generic_paint_array(self
, arr
)
1894 Immediate
.paint_array
= paint_array
1896 def paint_array(self
, arr
):
1898 aNone
.paint_array
= paint_array
1900 def paint_array(self
, arr
):
1901 generic_paint_array(self
,arr
)
1903 for child
in self
._primary
[0]:
1904 child
.paint_array(arr
)
1905 aList
.paint_array
= paint_array
1907 #* Incremental evaluation
1908 # Classes to use as status markers.
1913 class IEInvalidTime(exceptions
.Exception):
1914 def __init__(self
, args
="INV"):
1917 def __cmp__(self
, other
):
1920 # Time stamp ordering is ie_setup_time, ie_external_time, integers
1922 ie_external_time
= -11
1923 ie_unusable_time
= IEInvalidTime()
1925 def __init__(self
, initial_time
= 1):
1926 assert (initial_time
> 0 )
1928 # Simple time tracking.
1930 self
._timestamp
_dct
= {}
1931 self
._time
= initial_time
1933 # Modified tree tracking.
1934 # _replacements ::= tree_id(new) -> (tree_id(orig), timestamp)
1935 self
._replacements
= {}
1937 # Function clone handling.
1939 # (id, 'envs') -> (env, env)
1941 # (id, call_count, arg_index) -> program
1942 # ( (block_inv._id, ccount, arg_index) , 'envs' ) -> (env, env)
1945 # program :: astType
1948 # and (env, env) == eval_env, arg_env
1949 self
._block
_clones
= {}
1953 IncEval
.__init
__ = __init__
1956 #** General time stamping
1957 def touch(self
, id):
1958 stamp
= self
._timestamp
_dct
[id] = self
.time()
1960 IncEval
.touch
= touch
1962 def touch_setup(self
, id):
1963 self
._timestamp
_dct
[id] = ie_setup_time
1964 IncEval
.touch_setup
= touch_setup
1966 def is_setup_only(self
, id):
1967 return self
._timestamp
_dct
[id] == ie_setup_time
1968 IncEval
.is_setup_only
= is_setup_only
1970 def touch_value(self
):
1972 IncEval
.touch_value
= touch_value
1976 return self
._time
- 1
1979 def get_timestamp(self
, tree_id
):
1980 return self
._timestamp
_dct
[tree_id
]
1981 IncEval
.get_timestamp
= get_timestamp
1983 def set_timestamp(self
, id, stamp
):
1984 # id ::= int | (int, string)
1985 self
._timestamp
_dct
[id] = stamp
1986 IncEval
.set_timestamp
= set_timestamp
1988 def newest(self
, fst
, snd
):
1989 # fst, snd are timestamps
1991 # For a compound result (list, tuple, etc.), changing a single
1992 # entry changes the compound. Use this function via e.g.
1993 # reduce(IncEval.newest, status_list)
1994 # to determine the compound's status.
1996 return max(fst
, snd
)
1997 IncEval
.newest
= newest
1999 def tree_is_older(self
, tree_id
, val_time
):
2000 # Compare the tree's and leaf's timestamp.
2003 # ----------------------
2006 # The remaining tested cases are
2008 # ----------------------
2012 # and these correspond to the usual ordering; overall, use
2013 # setup < external < integer
2015 tree_time
= self
._timestamp
_dct
[tree_id
]
2017 return (tree_time
< val_time
)
2018 IncEval
.tree_is_older
= tree_is_older
2020 # def is_newer(self, fst, snd):
2021 # # The standard ordering relations <, >, = require care with the
2022 # # special values here, so is_newer() is left undefined.
2023 # IncEval.is_newer = is_newer
2028 # Function clones' semantics under incremental evaluation differ from
2029 # those of Call clones.
2030 def clone_table(self
):
2031 return self
._block
_clones
2032 IncEval
.clone_table
= clone_table
2034 def has_clone(self
, src
):
2035 return self
._block
_clones
.has_key(src
)
2036 IncEval
.has_clone
= has_clone
2040 # Env() semantics under incremental evaluation differ from defaults.
2041 # [ in at least Map.interpret() ]
2042 def env_table(self
):
2044 IncEval
.env_table
= env_table
2046 def has_env_for(self
, id):
2047 return self
._envs
.has_key(id)
2048 IncEval
.has_env_for
= has_env_for
2051 #* modified tree evaluation
2056 #** modified tree attributes
2057 def set_original_for(self
, new_id
, orig_id
):
2059 ModifiedEval
.set_original_for
= set_original_for
2061 def original_for(self
, tree_id
):
2062 # Return the (id, timestamp) of the original tree replaced by
2064 return self
._replacements
.get(tree_id
)
2065 ModifiedEval
.original_for
= original_for
2067 def is_replacement(self
, tree_id
):
2068 # Return ACTIVE replacement status.
2069 return (self
._replacements
.has_key(tree_id
) and
2070 not self
._replacements
_touched
.has_key(tree_id
) )
2072 ModifiedEval
.is_replacement
= is_replacement
2074 def not_replacement(self
, tree_id
):
2075 # Treat the tree_id as original from now on.
2076 self
._replacements
_touched
[tree_id
] = 1
2077 ModifiedEval
.not_replacement
= not_replacement
2080 #* post-parsing -- .setup()
2083 def setup(self
, parent
, def_env
, storage
):
2084 """ Set up post-parsing, pre-execution structures.
2086 # Also see aList.setup
2087 self
._parent
= parent
._id
2090 self
._id
= storage
.store(self
, def_env
)
2091 child_id_list
= [child
.setup(self
, def_env
, storage
)
2092 for child
in self
._primary
]
2094 self
._primary
= tuple([child
for child
, id in child_id_list
])
2096 # Incremental evaluation prep.
2097 storage
.ie_
.touch_setup(self
._id
)
2099 return self
, self
._id
2100 Nested
.setup
= setup
2103 def setup_if_for(self
):
2104 # Replace macro-only identifiers for the special case
2107 # See also reader.py, rule
2108 # expr : FOR expr IN expr COLON py_block
2109 # and l3if_chooser()
2111 # Replace ITEMS, IDX, LEN, LOOP in
2113 # print reader.parse('''
2117 # ! LEN = len(! ITEMS)
2123 # ! V = ! ITEMS[ ! IDX - 1 ]
2131 # but keep original V, SEQ, B.
2133 # The following pattern is from above, with manual fix for "LOOP"().
2138 If(String('for'), aList([Set(Marker('ITEMS'), Marker('SEQ')), Set(Marker('IDX'), Int(0)), Set(Marker('LEN'), Call(Symbol('len'), aList([Marker('ITEMS')]))), Set(Marker('LOOP'), Macro(aList([]), aList([If(Call(Symbol('<'), aList([Marker('IDX'), Marker('LEN')])), aList([Set(Marker('IDX'), Call(Symbol('+'), aList([Marker('IDX'), Int(1)]))), Set(Marker('V'), Call(Member(Symbol('operator'), Symbol('getitem')), aList([Marker('ITEMS'), Call(Symbol('-'), aList([Marker('IDX'), Int(1)]))]))), Marker('B'), Return(Call(Marker('LOOP'), aList([])))]), aList([]))]))), Call(Marker('LOOP'), aList([]))]), aList([]))):
2140 id_s
= str(self
._id
)
2142 # One-time manual tree conversions:
2143 # (query-replace "Marker('ITEMS')" "Symbol('ITEMS' + id_s)" )
2144 # (query-replace "Marker('IDX')" "Symbol('IDX' + id_s)" )
2145 # (query-replace "Marker('LEN')" "Symbol('LEN' + id_s)" )
2146 # (query-replace "Marker('LOOP')" "Symbol('LOOP' + id_s)" )
2148 # (query-replace "Marker('SEQ')" "ma['SEQ']" )
2149 # (query-replace "Marker('V')" "ma['V']" )
2150 # (query-replace "Marker('B')" "ma['B']" )
2152 foo
= list(self
._primary
)
2153 foo
[1] = aList([Set(Symbol('ITEMS' + id_s
), ma
['SEQ']), Set(Symbol('IDX' + id_s
), Int(0)), Set(Symbol('LEN' + id_s
), Call(Symbol('len'), aList([Symbol('ITEMS' + id_s
)]))), Set(Symbol('LOOP' + id_s
), Macro(aList([]), aList([If(Call(Symbol('<'), aList([Symbol('IDX' + id_s
), Symbol('LEN' + id_s
)])), aList([Set(Symbol('IDX' + id_s
), Call(Symbol('+'), aList([Symbol('IDX' + id_s
), Int(1)]))), Set(ma
['V'], Call(Member(Symbol('operator'), Symbol('getitem')), aList([Symbol('ITEMS' + id_s
), Call(Symbol('-'), aList([Symbol('IDX' + id_s
), Int(1)]))]))), ma
['B'], Return(Call(Symbol('LOOP' + id_s
), aList([])))]), aList([]))]))), Call(Symbol('LOOP' + id_s
), aList([]))])
2154 self
._primary
= tuple(foo
)
2155 If
.setup_if_for
= setup_if_for
2158 def setup_if_while(self
):
2159 # Replace macro-only identifiers for the special case
2162 # See also reader.py, rule
2163 # expr : WHILE expr COLON py_block
2164 # and l3if_chooser()
2168 # print reader.parse('''
2171 # if not !C: # force boolean evaluation via not
2179 # but keep original C and B.
2181 # The following pattern is from above, with
2182 # Set(Symbol('WLOOP')...) -> Set(Marker('WLOOP')...)
2183 # aList([Marker('B')]) -> Marker('B')
2187 If(String('while'), aList([Set(Marker('WLOOP'), Macro(aList([]), aList([If(Call(Symbol('not'), aList([Marker('C')])), aList([Return(aNone())]), Marker('B')), Return(Call(Marker('WLOOP'), aList([])))]))), Call(Marker('WLOOP'), aList([]))]), aList([]))
2190 id_s
= str(self
._id
)
2192 # One-time manual tree conversions:
2193 # (query-replace "Marker('WLOOP')" "Symbol('WLOOP' + id_s)" )
2194 # (query-replace "Marker('C')" "ma['C']")
2195 # (query-replace "Marker('B')" "ma['B']")
2197 foo
= list(self
._primary
)
2198 foo
[1] = aList([Set(Symbol('WLOOP' + id_s
), Macro(aList([]), aList([If(Call(Symbol('not'), aList([ma
['C']])), aList([Return(aNone())]), ma
['B']), Return(Call(Symbol('WLOOP' + id_s
), aList([])))]))), Call(Symbol('WLOOP' + id_s
), aList([]))])
2199 self
._primary
= tuple(foo
)
2200 If
.setup_if_while
= setup_if_while
2203 def setup(self
, parent
, def_env
, storage
):
2204 """ Set up post-parsing, pre-execution structures.
2206 # Also see Nested.setup
2207 self
._parent
= parent
._id
2210 self
._id
= storage
.store(self
, def_env
)
2212 # Replace macro-only identifiers for the special case
2214 self
.setup_if_while()
2216 # Replace macro-only identifiers for the special case
2220 # Continue regular setup.
2221 child_id_list
= [child
.setup(self
, def_env
, storage
)
2222 for child
in self
._primary
]
2224 self
._primary
= tuple([child
for child
, id in child_id_list
])
2226 # Incremental evaluation prep.
2227 storage
.ie_
.touch_setup(self
._id
)
2229 return self
, self
._id
2233 def setup(self
, parent
, def_env
, storage
):
2234 """ Set up post-parsing, pre-execution structures.
2236 # Also see Nested.setup
2237 self
._parent
= parent
._id
2240 self
._id
= storage
.store(self
, def_env
)
2242 child_id_list
= [child
.setup(self
, def_env
, storage
)
2245 self
._primary
= ([child
for child
, id in child_id_list
],)
2247 # Incremental evaluation prep.
2248 storage
.ie_
.touch_setup(self
._id
)
2250 return self
, self
._id
2253 def setup(self
, parent
, def_env
, storage
):
2254 # Macro arguments must be set up in their expansion, but Inline
2256 return Nested
.setup(self
, parent
, def_env
, storage
)
2257 Inline
.setup
= setup
2260 def setup(self
, parent
, def_env
, storage
):
2261 # Program()s are GIVEN the appropriate Env(), so
2262 # instead of self._block_env = def_env.new_child(self):
2263 self
._block
_env
= def_env
2264 return Nested
.setup(self
, parent
, self
._block
_env
, storage
)
2265 Program
.setup
= setup
2267 def setup(self
, parent
, def_env
, storage
):
2268 # Bindings in environments.
2269 argument_env
= def_env
.new_child(self
, name
= "skel.arg")
2270 # Positional arguments.
2271 for ba
in self
.positional_block_args():
2272 argument_env
.bind(ba
, None) # static dependency bindings.
2274 for (argn
, argv
) in self
.named_block_args():
2275 argument_env
.bind(argn
, argv
) # static dependency bindings.
2277 block_env
= argument_env
.new_child(self
, name
= "skel.blck")
2278 self
._arg
_env
= argument_env
2279 self
._block
_env
= block_env
2281 rv
= Nested
.setup(self
, parent
, block_env
, storage
)
2283 # Provide name->id bindings.
2284 block_arg_l
= self
.deref(0)
2286 for ba
in self
.positional_block_args():
2287 argument_env
.bind_id(ba
, block_arg_l
[ii
]._id
)
2289 for argn
, _
in self
.named_block_args():
2291 argument_env
.bind_id(argn
, block_arg_l
[ii
]._id
)
2294 Function
.setup
= setup
2297 def setup(self
, parent
, def_env
, storage
):
2298 # Initialize internal tree.
2299 self
.l_view_call
.setup(parent
, def_env
, storage
)
2301 rv
= Nested
.setup(self
, parent
, def_env
, storage
)
2306 def setup(self
, parent
, def_env
, storage
):
2308 self
._arg
_env
= def_env
2309 self
._block
_env
= def_env
2311 rv
= Nested
.setup(self
, parent
, def_env
, storage
)
2316 def setup(self
, parent
, def_env
, storage
):
2318 # self._stored_in = None
2319 self
._matcher
= Matcher()
2321 for name
in self
.arg_names():
2322 def_env
.bind_df(name
, None) # static dependency bindings.
2324 rv
= Nested
.setup(self
, parent
, def_env
, storage
)
2326 # .data_dag() preparation.
2327 arg_symbols
= self
.arg_names(raw_symbols
= 1)
2329 for name
in self
.arg_names():
2330 def_env
.bind_id_df(name
, arg_symbols
[ii
]._id
)
2336 # and use the name 'foo' for the body's _binding_name.
2339 # import reader ; reload(reader) ;
2340 # reader.parse('!! def_name string = !! the_block {|bb| bb}')
2342 if self
._matcher
.match(self
,
2343 Set(MarkerTyped(String('def_name'),
2345 MarkerTyped(String('the_block'), Function()))):
2346 ma
= self
._matcher
._matches
2347 ma
['the_block']._binding
_name
= ma
['def_name']
2349 # .data_dag() preparation -- also provide block binding
2350 def_env
.bind_df(ma
['def_name'].py_string(), ma
['the_block'])
2355 # and use the name 'foo' for the Map()'s _binding_name.
2358 # import reader ; reload(reader) ;
2359 # reader.parse('!! def_name string = !! the_map { }')
2361 if self
._matcher
.match(self
,
2362 Set(MarkerTyped(String('def_name'),
2364 MarkerTyped(String('the_map'), Map()))):
2365 ma
= self
._matcher
._matches
2366 ma
['the_map']._binding
_name
= ma
['def_name']
2368 # .data_dag() preparation -- also provide block binding
2369 def_env
.bind_df(ma
['def_name'].py_string(), ma
['the_map'])
2374 def setup(self
, parent
, def_env
, storage
):
2375 block_env
= def_env
.new_child(self
, name
= "skel.blck")
2376 self
._block
_env
= block_env
2377 return Nested
.setup(self
, parent
, block_env
, storage
)
2382 def setup(self
, parent
, def_env
, storage
):
2383 """ Set up post-parsing, pre-execution structures.
2385 self
._parent
= parent
._id
2388 self
._id
= storage
.store(self
, def_env
)
2390 # Incremental evaluation prep.
2391 storage
.ie_
.touch_setup(self
._id
)
2393 return self
, self
._id
2394 Immediate
.setup
= setup
2396 def setup(self
, parent
, def_env
, storage
):
2397 """ Set up post-parsing, pre-execution structures.
2399 self
._parent
= parent
._id
2402 self
._id
= storage
.store(self
, def_env
)
2404 # Incremental evaluation prep.
2405 storage
.ie_
.touch_setup(self
._id
)
2407 return self
, self
._id
2409 Native
.setup
= setup
2411 #* Interpretation -- .interpret()
2412 # Incremental evaluation note:
2413 # After evaluation, a node's status is always 'evaluated', but the
2414 # .interpret() return status may be different -- to inform the
2415 # parent of appropriate action.
2418 #** external entry point
2419 def interpret_start(self
, env
, storage
):
2421 rv
, ie_status
= self
.interpret(env
, storage
)
2423 except Exception, e
:
2424 print "Warning: Returning Exception"
2426 return e
# ?? most useful info??
2427 Program
.interpret_start
= interpret_start
2431 def interpret(self
, env
, storage
):
2433 Nested
.interpret
= interpret
2436 def interpret(self
, env
, storage
):
2437 # Run the python code and import the resulting names into the
2438 # current environment.
2440 # The python fragment is ALWAYS interpreted, to avoid pickling
2441 # problems. The values returned by the Python code must be
2442 # CONSTANT over repeated calls.
2444 # The Python code fragment is considered atomic; changes to the
2445 # String() holding it will re-evaluate.
2447 # The Python string can use l3 values, but it cannot call l3
2449 # This is unlike the l3 call py_f(l3_g, x), for which special handling
2450 # allows calling l3 from python.
2452 if self
._pre
_interp
_hook
:
2453 self
._pre
_interp
_hook
(self
, env
, storage
)
2454 py_string
, ie_status
= self
.deref(0).interpret(env
, storage
)
2456 # Incremental evaluation check.
2457 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
2460 # - get useful error messages, and
2461 # - allow pickling of executed inline code including functions
2462 # the string is written to a file, and that executed.
2465 fname
= os
.path
.join(os
.getcwd(),"_l3_inline-%d" % self
._id
)
2466 py_file
= open(fname
, "w")
2467 py_file
.write(py_string
)
2470 # Evaluate and store.
2471 # -- Assignments are made in a local python dict [py_env];
2472 # new names and overwritten names are imported back to [env];
2473 # -- The [env] bindings are made available via globals() so
2474 # they are not changed outside of L3 control.
2476 # These name binding cases are illustrated via
2478 # all_globals = {"aa" : 10}
2479 # all_globals.update(globals())
2480 # exec "aa = aa + 1; aa += 10" in all_globals, py_env
2481 # exec "global aa; aa = 1" in all_globals, py_env
2483 py_env
= {'def_env' : env
}
2484 all_globals
= env
.all_lexical_bindings({})
2485 all_globals
.update(globals())
2486 execfile(fname
, all_globals
, py_env
)
2487 env
.import_all_names(py_env
) # Uses bind_mem_only for
2488 # EXTERNAL time stamp.
2490 # Keeping py_env is a major pickle trap, so evaluate the
2491 # Python code body every time to get them.
2495 # Interpretation values.
2496 storage
.id2tree(rv
, self
)
2497 storage
.set_attributes(self
._id
,
2498 "interp_result", None,
2501 # Incremental evaluation data.
2502 ie_status
= storage
.ie_
.touch(self
._id
)
2503 return rv
, ie_status
2506 # Keeping py_env is a major pickle trap, so evaluate the
2507 # Python code body every time to get them.
2508 # The original evaluation's time stamp is used, so external
2509 # additions will not cause re-execution.
2510 fname
= os
.path
.join(os
.getcwd(),"_l3_inline-%d" % self
._id
)
2512 py_env
= {'def_env' : env
}
2513 all_globals
= env
.all_lexical_bindings({})
2514 all_globals
.update(globals())
2515 execfile(fname
, all_globals
, py_env
)
2516 env
.import_all_names(py_env
) # Override all EXTERNAL time
2518 return ( py_env
, storage
.ie_
.get_timestamp(self
._id
) )
2519 Inline
.interpret
= interpret
2522 def interpret(self
, env
, storage
):
2523 # See also Map.interpret(), aList.interpret
2524 # Return last expr value.
2525 if self
._pre
_interp
_hook
:
2526 self
._pre
_interp
_hook
(self
, env
, storage
)
2528 seq_expr
= self
.deref(0)
2529 if len(seq_expr
) == 0:
2530 raise InterpreterError
, "Empty program"
2532 rv_stat_l
= [(expr
).interpret(env
, storage
) for expr
in seq_expr
]
2534 self
._eval
_env
= env
2535 rv
, last_stamp
= rv_stat_l
[-1]
2537 # Incremental evaluation check.
2538 newest_stamp
= reduce(storage
.ie_
.newest
,
2539 [status
for _
, status
in rv_stat_l
])
2540 if storage
.ie_
.tree_is_older(self
._id
, newest_stamp
):
2541 # Interpretation values.
2542 storage
.id2tree(rv
, self
)
2543 storage
.set_attributes(self
._id
,
2544 "interp_result", rv
,
2546 # Incremental evaluation data.
2547 storage
.ie_
.touch(self
._id
)
2548 storage
.ie_
.set_timestamp( (self
._id
, 'value'), last_stamp
)
2550 return rv
, last_stamp
2552 # ---- Nothing changed.
2553 return storage
.get_attribute(self
._id
, "interp_result"), \
2554 storage
.ie_
.get_timestamp( (self
._id
, 'value') )
2556 Program
.interpret
= interpret
2558 def interpret(self
, env
, storage
):
2559 # Also see Call.interpret.
2561 if self
._pre
_interp
_hook
:
2562 self
._pre
_interp
_hook
(self
, env
, storage
)
2564 # Incremental evaluation check.
2565 if storage
.ie_
.is_setup_only(self
._id
):
2568 # Evaluate named arguments.
2569 for (arg_name
, expr
) in self
.named_block_args():
2570 arg_val
, status
= expr
.interpret(env
, storage
)
2571 self
._arg
_env
.bind_ptr(arg_name
, arg_val
, self
._id
)
2572 self
._arg
_env
.bind_time_stamp_ptr(arg_name
, status
, self
._id
)
2574 # Interpretation values.
2575 storage
.id2tree(self
, self
)
2576 storage
.set_attributes(self
._id
,
2577 "interp_result", self
,
2579 # Incremental evaluation data.
2580 stamp
= storage
.ie_
.touch(self
._id
)
2584 return self
, storage
.ie_
.get_timestamp(self
._id
)
2585 Function
.interpret
= interpret
2587 def interpret(self
, env
, storage
):
2588 # Also see Call.interpret.
2590 if self
._pre
_interp
_hook
:
2591 self
._pre
_interp
_hook
(self
, env
, storage
)
2593 # Incremental evaluation check.
2594 if storage
.ie_
.is_setup_only(self
._id
):
2597 # Evaluate named arguments.
2599 # Interpretation values.
2600 storage
.id2tree(self
, self
)
2601 storage
.set_attributes(self
._id
,
2602 "interp_result", self
,
2604 # Incremental evaluation data.
2605 stamp
= storage
.ie_
.touch(self
._id
)
2609 return self
, storage
.ie_
.get_timestamp(self
._id
)
2610 Macro
.interpret
= interpret
2612 def interpret(self
, env
, storage
, tail_finishing_progs
= []):
2613 # Also see CallableFunction.__call__
2614 # This code is for tail call handling.
2615 if self
._pre
_interp
_hook
:
2616 self
._pre
_interp
_hook
(self
, env
, storage
)
2618 fin_progs
= [] # Ensure NEW list.
2619 fin_progs
.extend(tail_finishing_progs
)
2622 # No Incremental evaluation data here.
2623 # # rv, ie_status = self._call_real_interpret(env, storage)
2624 rv
, ie_status
= self
._dispatch
(env
, storage
)
2625 except Interpret_tail_call
, contin
:
2626 self
, env
, _prog_l
= contin
.args
2627 fin_progs
.extend(_prog_l
)
2630 # Finish up after tail calls.
2632 for finish
in fin_progs
:
2633 finish(rv
, ie_status
)
2634 return rv
, ie_status
2635 Call
.interpret
= interpret
2638 def _dispatch(self
, env
, storage
):
2639 # Find block, ignore time stamp.
2641 block
, _
= self
.deref(0).interpret(env
, storage
)
2642 except UnboundSymbol
:
2643 # No l3 binding found, check shell commands.
2644 block
= self
.deref(0)
2645 if isinstance(block
, (String
, Symbol
)):
2646 if glbl
.shell_cmds
.exists( block
.py_string() ):
2647 return self
._interp
_shell
(block
, env
, storage
)
2651 raise InterpreterError
, "Function/name not found: " + str(block
)
2654 # {|| ... }(a,b) or [|| ... ](a,b)
2655 if isinstance(block
, (Function
, Macro
)):
2656 return self
._interp
_func
(block
, env
, storage
)
2658 # Handle native Python
2659 elif callable(block
):
2660 return self
._interp
_native
(block
, env
, storage
)
2663 raise InterpreterError
, "Expected Function(), got " + str(block
)
2664 Call
._dispatch
= _dispatch
2667 def _interp_shell(self
, block
, env
, storage
):
2668 # Verify argument count.
2670 #---- Bind named block arguments from Call().
2671 named_arg_status_l
= [expr
.interpret(env
, storage
)
2672 for arg_name
, expr
in self
.named_args()]
2674 # ---- Bind positional block arguments.
2675 pos_arg_status_l
= [ba
.interpret(env
, storage
)
2676 for ba
in self
.positional_args() ]
2678 def new_call_new_val():
2679 # Convert arguments to strings.
2680 # A list could be converted to different textual forms
2681 # requiring a much more complex setup, so ignore lists for
2687 for ba
, _
in pos_arg_status_l
:
2688 if isinstance(ba
, Nested
):
2689 raise InterpreterError("no nested shell arguments")
2691 elif isinstance(ba
, String
):
2692 pos_wrapped
.append(ba
.py_string())
2694 elif not isinstance(ba
, StringType
):
2695 raise InterpreterError("shell arguments must evaluate "
2696 "to simple strings.")
2698 pos_wrapped
.append(str(ba
))
2704 for (arg_name
, _
) in self
.named_args():
2705 arg
, _
= named_arg_status_l
[arg_index
]
2706 if isinstance(arg
, Nested
):
2707 raise InterpreterError("shell arguments must evaluate "
2708 "to simple strings.")
2710 named_wrapped
.append([arg_name
, arg
.py_string()])
2716 rv
= (self
._run
_process
(block
.py_string(),
2717 pos_wrapped
, named_wrapped
))
2718 ie_status
= storage
.ie_
.time()
2719 except Interpret_tail_call
:
2720 raise # not reached ?
2721 except Interpret_return
, e
:
2722 rv
, ie_status
= e
.args
# not reached ?
2724 # Interpretation values.
2725 storage
.id2tree(rv
, self
)
2726 storage
.set_attributes(self
._id
,
2727 "interp_result", rv
, "interp_env", env
)
2729 # Incremental evaluation data.
2730 storage
.ie_
.touch(self
._id
)
2731 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
2733 return rv
, ie_status
2736 # ---- Incremental evaluation check.
2737 if (len(named_arg_status_l
) + len(pos_arg_status_l
)) == 0:
2738 if storage
.ie_
.is_setup_only(self
._id
):
2739 return new_call_new_val()
2741 return storage
.get_attribute(self
._id
, "interp_result"),\
2742 storage
.ie_
.get_timestamp(self
._id
)
2744 arg_eval_time
= reduce(storage
.ie_
.newest
,
2745 [status
for _
, status
in named_arg_status_l
] +
2746 [status
for _
, status
in pos_arg_status_l
])
2747 if storage
.ie_
.tree_is_older(self
._id
, arg_eval_time
):
2748 return new_call_new_val()
2750 return storage
.get_attribute(self
._id
, "interp_result"),\
2751 storage
.ie_
.get_timestamp(self
._id
)
2752 Call
._interp
_shell
= _interp_shell
2755 def _run_process(self
, cmd
, pos_arg_l
, name_val_l
):
2756 # This member function can be replaced by the gui for interactive
2758 return glbl
.shell_cmds
.system(cmd
, pos_arg_l
, name_val_l
)
2759 Call
._run
_process
= _run_process
2762 def _interp_native(self
, py_func
, env
, storage
):
2763 # Verify argument count.
2764 ## if func_.nargs() != call_.nargs():
2766 #---- Bind named block arguments from Call().
2767 named_arg_status_l
= [expr
.interpret(env
, storage
)
2768 for arg_name
, expr
in self
.named_args()]
2770 # ---- Bind positional block arguments.
2771 pos_arg_status_l
= [ba
.interpret(env
, storage
)
2772 for ba
in self
.positional_args() ]
2774 def new_call_new_val():
2775 # ---- Check for any Function()s, and wrap them as callables.
2776 # E.g., for the call foo {|| ... }, where foo is a
2777 # Python function expecting a callable, the block is
2782 for ba
, _
in pos_arg_status_l
:
2783 if isinstance(ba
, Function
):
2785 CallableFunction(ba
, env
, storage
, self
, arg_index
))
2787 pos_wrapped
.append(ba
)
2793 for (arg_name
, _
) in self
.named_args():
2794 arg
, _
= named_arg_status_l
[arg_index
]
2795 if isinstance(arg
, Function
):
2796 named_wrapped
[arg_name
] = (
2797 # Note use of arg_name as index for CallableFunction.
2798 CallableFunction(arg
, env
, storage
, self
, arg_name
))
2800 named_wrapped
[arg_name
] = arg
2806 if len(named_wrapped
) > 0:
2807 rv
= py_func(*pos_wrapped
, **named_wrapped
)
2809 rv
= py_func(*pos_wrapped
)
2810 ie_status
= storage
.ie_
.time()
2812 except Interpret_tail_call
:
2813 raise # not reached ?
2814 except Interpret_return
, e
:
2815 rv
, ie_status
= e
.args
# not reached ?
2817 # Interpretation values.
2818 storage
.id2tree(rv
, self
)
2819 storage
.set_attributes(self
._id
,
2820 "interp_result", rv
, "interp_env", env
)
2822 # Incremental evaluation data.
2823 storage
.ie_
.touch(self
._id
)
2824 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
2826 return rv
, ie_status
2829 # ---- Incremental evaluation check.
2830 # Functions with no arguments are usually called for side
2832 # Assuming those do not influence the results of the
2833 # program, executing them on every pass is ok (but wasteful).
2835 # Or, by viewing zero-argument calls as constants, they can be
2836 # treated like Immediate()s -- and are only called one time.
2838 if (len(named_arg_status_l
) + len(pos_arg_status_l
)) == 0:
2839 if storage
.ie_
.is_setup_only(self
._id
):
2840 return new_call_new_val()
2842 return storage
.get_attribute(self
._id
, "interp_result"),\
2843 storage
.ie_
.get_timestamp(self
._id
)
2845 arg_eval_time
= reduce(storage
.ie_
.newest
,
2846 [status
for _
, status
in named_arg_status_l
] +
2847 [status
for _
, status
in pos_arg_status_l
])
2848 if storage
.ie_
.tree_is_older(self
._id
, arg_eval_time
):
2849 return new_call_new_val()
2851 return storage
.get_attribute(self
._id
, "interp_result"),\
2852 storage
.ie_
.get_timestamp(self
._id
)
2853 Call
._interp
_native
= _interp_native
2855 def _interp_func(self
, block
, env
, storage
):
2856 if isinstance(block
, Function
):
2857 is_new
, program
, eval_env
, arg_env
= \
2858 self
.call_function_prep(env
, storage
, block
)
2860 if isinstance(block
, Macro
):
2861 is_new
, program
, eval_env
, arg_env
= \
2862 self
.call_macro_prep(env
, storage
, block
)
2865 # ----------- Data for this block
2866 # Valid with tail call or without, so this MUST PRECEDE the
2867 # call to program.interpret(), below.
2870 storage
.push_attributes(block
._id
, "interp_clone", program
._id
)
2871 storage
.set_attributes(self
._id
,
2872 "interp_program", program
,
2873 "interp_env", arg_env
)
2874 storage
.set_attributes(program
._id
,
2875 # lexical information
2876 "clone_of", block
._id
,
2877 "interp_env", arg_env
,
2878 # dynamic information
2879 "cloned_by", self
._id
2882 # ----------- Evaluate
2883 # Affects: Program.interpret, Function_invo.interpret
2884 def finish(rv
, ie_status
):
2885 # Incremental evaluation check.
2886 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
2887 # Interpretation values.
2888 storage
.id2tree(rv
, self
)
2889 storage
.set_attributes(self
._id
, "interp_result", rv
)
2891 # Incremental evaluation data.
2892 storage
.ie_
.touch(self
._id
)
2893 storage
.ie_
.set_timestamp((self
._id
, 'value'), ie_status
)
2894 return rv
, ie_status
2897 return storage
.get_attribute(self
._id
, "interp_result"),\
2898 storage
.ie_
.get_timestamp(self
._id
)
2901 rv
, ie_status
= program
.interpret(eval_env
, storage
)
2902 except Interpret_tail_call
, contin
:
2903 new_tree
, _env
, _prog_l
= contin
.args
2904 _prog_l
.append(finish
)
2906 except Interpret_return
, e
:
2907 rv
, ie_status
= e
.args
2909 return finish(rv
, ie_status
)
2910 Call
._interp
_func
= _interp_func
2912 def call_function_prep(call_
, env
, storage
, func_
):
2913 # Prepare the block, evaluate arguments, and provide
2915 # See callable_interpret_prep().
2917 # Also see Function.interpret, CallableFunction.__call__
2918 # Verify argument matching.
2919 if func_
.nargs() != call_
.nargs():
2920 raise InterpreterError
, \
2921 ("Argument count mismatch: \n"
2924 " %s" % (func_
.source_string(),
2925 call_
.source_string()))
2926 call_names
= map(lambda (fst
, _
): fst
, call_
.named_args())
2927 func_names
= map(lambda (fst
, _
): fst
, func_
.named_block_args())
2928 for cn
in call_names
:
2929 if cn
not in func_names
:
2930 raise InterpreterError
, \
2931 ("Named argument mismatch: \n"
2935 "Name %s is not in definition." % (func_
.source_string(),
2936 call_
.source_string(),
2938 # Incremental evaluation check.
2939 if storage
.ie_
.has_clone( call_
._id
):
2940 ctab
= storage
.ie_
.clone_table()
2941 program
= ctab
[call_
._id
]
2942 eval_env
, arg_env
= ctab
[(call_
._id
, 'envs')]
2946 #---- Turn block into executable.
2947 # Under incremental evaluation this copy must not be formed if
2948 # a prior version exists.
2950 newfunc_
= func_
.block_copy(storage
)
2951 program
= Program(newfunc_
.raw_seq_expr())
2953 #---- Set up argument environment.
2954 if func_
._binding
_name
!= None:
2955 arg_env
= func_
._def
_env
.new_child(
2956 program
, name
= func_
._binding
_name
.py_string())
2958 arg_env
= func_
._def
_env
.new_child(program
, name
= "run.arg")
2960 #---- Bind named block arguments from Function() definition.
2961 for (arg_name
, _
) in func_
.named_block_args():
2962 arg_val
, status
= func_
._arg
_env
.ie_lookup_ptr(arg_name
)
2963 arg_env
.bind_ptr(arg_name
, arg_val
, call_
._id
)
2964 arg_env
.bind_time_stamp_ptr(arg_name
, status
, call_
._id
)
2966 #---- Bind positional block arguments
2967 # Get arguments' names.
2968 arg_names
= func_
.positional_block_args()
2972 for ba
in call_
.positional_args():
2973 arg_val
, status
= ba
.interpret(env
, storage
)
2974 arg_env
.bind_ptr(arg_names
[position_index
], arg_val
, call_
._id
)
2975 # Incremental evaluation.
2976 arg_env
.bind_time_stamp_ptr(arg_names
[position_index
],
2980 #---- Bind named block arguments from Call().
2981 for arg_name
, expr
in call_
.named_args():
2982 arg_val
, status
= expr
.interpret(env
, storage
)
2983 arg_env
.bind_ptr(arg_name
, arg_val
, call_
._id
)
2984 # Incremental evaluation.
2985 arg_env
.bind_time_stamp_ptr(arg_name
, status
, call_
._id
)
2988 #---- Set up evaluation environment.
2989 eval_env
= arg_env
.new_child(program
, name
= "run.blck")
2991 # ---- Finish program.
2992 # program.setup(block, eval_env, storage)
2993 program
.setup(empty_parent(), eval_env
, storage
)
2995 #---------------- later interaction
2996 cross_reference_trees(storage
, func_
, newfunc_
)
2999 # Incremental evaluation data.
3000 ctab
= storage
.ie_
.clone_table()
3001 ctab
[call_
._id
] = program
3002 ctab
[(call_
._id
, 'envs')] = eval_env
, arg_env
3005 return is_new
, program
, eval_env
, arg_env
3006 Call
.call_function_prep
= call_function_prep
3008 def call_macro_prep(call_
, env
, storage
, mac_
):
3009 # Prepare the block, evaluate arguments, and provide
3011 if mac_
.nargs() != 0:
3012 raise InterpreterError
, \
3013 ("Macros do not take arguments yet:\n"
3016 " %s" % (mac_
.source_string(),
3017 call_
.source_string()))
3019 # Incremental evaluation check.
3020 if storage
.ie_
.has_clone( call_
._id
):
3021 ctab
= storage
.ie_
.clone_table()
3022 program
= ctab
[call_
._id
]
3023 eval_env
, arg_env
= ctab
[(call_
._id
, 'envs')]
3027 #---- Turn block into executable.
3028 # Under incremental evaluation this copy must not be formed if
3029 # a prior version exists.
3031 newmac_
= mac_
.block_copy(storage
)
3032 program
= Program(newmac_
.raw_seq_expr())
3034 #---- Set up argument environment.
3037 #---- Set up evaluation environment.
3040 # ---- Finish program.
3041 # program.setup(block, eval_env, storage)
3042 program
.setup(empty_parent(), eval_env
, storage
)
3044 #---------------- later interaction
3045 cross_reference_trees(storage
, mac_
, newmac_
)
3048 # Incremental evaluation data.
3049 ctab
= storage
.ie_
.clone_table()
3050 ctab
[call_
._id
] = program
3051 ctab
[(call_
._id
, 'envs')] = eval_env
, arg_env
3054 return is_new
, program
, eval_env
, arg_env
3055 Call
.call_macro_prep
= call_macro_prep
3057 def interpret(self
, env
, storage
):
3058 # A 'return' in dml (likely) requires many function exits here in
3060 # Hence the exception-only exit.
3061 if self
._pre
_interp
_hook
:
3062 self
._pre
_interp
_hook
(self
, env
, storage
)
3064 tree
= self
.deref(0)
3065 if isinstance(tree
, Call
):
3068 # The current env is only used by Call.interpret()
3069 # to get the function body and argument evaluation; the
3070 # function body is evaluated in separate Env()s, preserving
3073 def finish(rv
, ie_status
):
3074 ### these timestamps are obtained when? They seem wrong
3075 ### under manual examination;
3076 ### As they are not used, make them invalid instead.
3077 # storage.ie_.set_timestamp( self._id, ie_unusable_time)
3080 # Incremental evaluation check.
3081 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
3082 # Interpretation values.
3083 storage
.id2tree(rv
, self
)
3084 storage
.set_attributes(self
._id
,
3085 "interp_result", rv
, "interp_env", env
)
3087 # Incremental evaluation data.
3088 storage
.ie_
.touch(self
._id
)
3089 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
3091 _prog_l
= [finish
] # ensure new list.
3092 raise Interpret_tail_call( (tree
, env
, _prog_l
) )
3096 rv
, ie_status
= tree
.interpret(env
, storage
)
3098 # Incremental evaluation check.
3099 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
3100 # Interpretation values.
3101 storage
.id2tree(rv
, self
)
3102 storage
.set_attributes(self
._id
,
3103 "interp_result", rv
, "interp_env", env
)
3105 # Incremental evaluation data.
3106 storage
.ie_
.touch(self
._id
)
3107 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
3108 raise Interpret_return( (rv
, ie_status
) )
3111 raise Interpret_return( (
3112 storage
.get_attribute(self
._id
, "interp_result"),
3113 storage
.ie_
.get_timestamp( (self
._id
, 'value') )
3115 Return
.interpret
= interpret
3118 def interpret(self
, env
, storage
):
3119 if self
._pre
_interp
_hook
:
3120 self
._pre
_interp
_hook
(self
, env
, storage
)
3123 object, _
= self
.deref(0).interpret(env
, storage
)
3126 # In a.b, b is only evaluated if it is not a Symbol().
3127 # This is done to avoid Symbol lookup in the regular
3128 # environments, as member access really has its own scoping
3130 second
= self
.deref(1)
3131 if isinstance(second
, Symbol
):
3132 mem_name
= second
.py_string()
3134 mem_name
, _
= second
.interpret(env
, storage
)
3135 if not isinstance(mem_name
, StringType
):
3136 raise InterpreterError
, "In a.b, b is not a name: " + str(mem_name
)
3139 if isinstance(object, DictType
):
3140 binding
= dict_ie_lookup_ptr( object, mem_name
)
3142 raise InterpreterError
, "No member '%s' found." % (mem_name
)
3143 binding
, ie_mem_status
= binding
3145 elif isinstance(object, Env
):
3146 # See also symbol.interpret.
3147 binding
= object.ie_lookup_ptr( mem_name
)
3149 raise InterpreterError
, "No member '%s' found." % (mem_name
)
3150 binding
, ie_mem_status
= binding
3153 # Just evaluate; let exceptions propagate as usual. Attribute
3154 # evaluation in Python differs for objects, functions, etc., so let
3155 # Python do the work.
3156 binding
= eval('object.' + mem_name
)
3158 # Incremental evaluation: external attributes cannot be controlled here.
3159 # If they are functions, their timestamps are ignored by Call(); if they
3160 # are values, they must be assumed constant.
3162 # Use external time for all member access. This will access
3163 # external members repeatedly, so they MUST be constant.
3164 ie_mem_status
= ie_external_time
3166 # Use external time for callable members only?
3168 # Use self's time stamp. This will cause pickle failures for
3169 # callable external types.
3170 # # ie_mem_status = storage.ie_.get_timestamp(self._id)
3171 # # if ie_mem_status in [None, ie_setup_time]:
3172 # # ie_mem_status = ie_external_time
3174 # Incremental evaluation check.
3175 # Even for an unchanged a.b tree, the a.b value may have
3176 # changed. Only the value's time stamp is propagated.
3177 def new_sym_new_val():
3178 # Interpretation values.
3179 storage
.id2tree(binding
, self
)
3181 # Do not retain references to external objects.
3182 if ie_mem_status
== ie_external_time
:
3183 storage
.set_attributes(self
._id
,
3184 "interp_result", "not_kept",
3187 storage
.set_attributes(self
._id
,
3188 "interp_result", binding
,
3191 # Incremental evaluation data.
3192 storage
.ie_
.touch(self
._id
)
3194 return binding
, ie_mem_status
3196 if storage
.ie_
.is_setup_only(self
._id
):
3197 return new_sym_new_val()
3200 if storage
.ie_
.tree_is_older(self
._id
, ie_mem_status
):
3201 return new_sym_new_val()
3203 return binding
, ie_mem_status
3204 Member
.interpret
= interpret
3207 def interpret(self
, env
, storage
):
3208 # See also Program.interpret
3210 if self
._pre
_interp
_hook
:
3211 self
._pre
_interp
_hook
(self
, env
, storage
)
3213 seq_expr
= self
.deref(0)
3214 if len(seq_expr
) == 0:
3215 raise InterpreterError
, "Empty Loop"
3217 rv_stat_l
= [(expr
).interpret(env
, storage
) for expr
in seq_expr
]
3219 rv
, last_stamp
= rv_stat_l
[-1]
3221 # Incremental evaluation check.
3222 newest_stamp
= reduce(storage
.ie_
.newest
,
3223 [status
for _
, status
in rv_stat_l
])
3224 if storage
.ie_
.tree_is_older(self
._id
, newest_stamp
):
3225 # Interpretation values.
3226 storage
.id2tree(rv
, self
)
3227 storage
.set_attributes(self
._id
,
3228 "interp_result", rv
,
3230 # Incremental evaluation data.
3231 storage
.ie_
.touch(self
._id
)
3232 storage
.ie_
.set_timestamp( (self
._id
, 'value'), last_stamp
)
3234 return rv
, last_stamp
3236 # ---- Nothing changed.
3237 return storage
.get_attribute(self
._id
, "interp_result"), \
3238 storage
.ie_
.get_timestamp( (self
._id
, 'value') )
3239 Loop
.interpret
= interpret
3242 def interpret(self
, env
, storage
):
3243 # 'if !condition !true else !false ;
3244 if self
._pre
_interp
_hook
:
3245 self
._pre
_interp
_hook
(self
, env
, storage
)
3247 condi
, ie_condi
= self
.deref(0).interpret(env
, storage
)
3249 rv
, ie_status
= self
.deref(1).interpret(env
, storage
)
3251 # aNone() evaluates to None, so no special case is needed here.
3252 rv
, ie_status
= self
.deref(2).interpret(env
, storage
)
3254 # Interpretation values.
3255 storage
.id2tree(rv
, self
)
3256 storage
.set_attributes(self
._id
,
3257 "interp_result", rv
,
3260 # Incremental evaluation data.
3261 # if's return timestamp is always that of its result; use time
3262 # of first interpretation for If() itself.
3263 if storage
.ie_
.is_setup_only(self
._id
):
3264 storage
.ie_
.touch(self
._id
)
3266 return rv
, ie_status
3267 If
.interpret
= interpret
3270 def interpret(self
, env
, storage
):
3271 # Also see Set.arg_names
3273 if self
._pre
_interp
_hook
:
3274 self
._pre
_interp
_hook
(self
, env
, storage
)
3277 value
, ie_status
= self
.deref(1).interpret(env
, storage
)
3280 names
= self
.deref(0)
3283 def handle_tuples():
3284 name_list
= names
._primary
[0]
3285 if len(name_list
) != len(value
):
3286 raise InterpreterError
, "Tuple lengths don't match: " + \
3287 str(name_list
) + str(value
)
3289 for ii
in range(0, len(name_list
)):
3291 # self._matcher.match_exp_str(nm, '!! name symbol'):
3292 if not self
._matcher
.match(nm
,
3293 MarkerTyped(String('name'),
3295 raise InterpreterError
, \
3296 "Set: Invalid argument type: " + str(nm
)
3297 # Add binding to env.
3298 lhs_symb
= self
._matcher
.get('name')
3299 env
.bind_ptr( lhs_symb
.as_index(), value
[ii
], self
._id
)
3300 storage
.id2tree(value
[ii
], nm
)
3302 # Bind the values to the Symbol()s. ### TEST
3303 storage
.set_attributes(lhs_symb
._id
,
3304 "interp_result", value
[ii
],
3307 # Incremental evaluation data.
3308 storage
.ie_
.touch(lhs_symb
._id
)
3309 env
.bind_time_stamp_ptr(lhs_symb
.as_index(), ie_status
,
3312 storage
.set_attributes(self
._id
,
3313 "interp_result", value
,
3316 # Incremental evaluation data.
3317 storage
.ie_
.touch(self
._id
)
3318 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
3319 return value
, ie_status
3321 def handle_single():
3322 #----- Single-name binding.
3324 if not self
._matcher
.match(names
,
3325 MarkerTyped(String('name'),
3327 raise InterpreterError
, "Set: Invalid first argument type: " + \
3329 # Add binding to env.
3330 lhs_symb
= self
._matcher
.get('name')
3331 env
.bind_ptr( lhs_symb
.as_index(), value
, self
._id
)
3333 # Interpretation values.
3334 storage
.id2tree(value
, self
)
3335 storage
.set_attributes(self
._id
,
3336 "interp_result", value
,
3339 # Bind the value to the Symbol().
3340 storage
.set_attributes(lhs_symb
._id
,
3341 "interp_result", value
,
3344 # Incremental evaluation data.
3345 storage
.ie_
.touch(lhs_symb
._id
)
3346 storage
.ie_
.touch(self
._id
)
3347 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
3348 env
.bind_time_stamp_ptr(lhs_symb
.as_index(), ie_status
, self
._id
)
3349 return value
, ie_status
3352 #----- Single-name binding.
3354 if not self
._matcher
.match(names
,
3355 MarkerTyped(String('name'),
3357 raise InterpreterError
, "Set: Invalid first argument type: " + \
3359 # Add binding to env.
3360 lhs_symb
= self
._matcher
.get('name')
3361 env
.set_ptr( lhs_symb
.as_index(), self
._id
)
3364 name_list
= names
._primary
[0]
3365 if len(name_list
) != len(value
):
3366 raise InterpreterError
, "Tuple lengths don't match: " + \
3367 str(name_list
) + str(value
)
3369 for ii
in range(0, len(name_list
)):
3371 if not self
._matcher
.match(nm
,
3372 MarkerTyped(String('name'),
3374 raise InterpreterError
, \
3375 "Set: Invalid argument type: " + str(nm
)
3376 # Add binding to env.
3377 lhs_symb
= self
._matcher
.get('name')
3378 env
.set_ptr( lhs_symb
.as_index(), self
._id
)
3380 # Incremental evaluation check.
3381 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
3382 #----- Destructuring binding for tuples.
3383 if isinstance(names
, Tuple
):
3384 if isinstance(value
, (TupleType
, ListType
)):
3385 return handle_tuples()
3387 raise InterpreterError
, \
3388 "Expected tuple return value, got: " + str(value
)
3390 return handle_single()
3393 # Update the name pointer unconditionally, to emulate the
3394 # effect of overwriting.
3395 if isinstance(names
, Tuple
):
3399 return storage
.get_attribute(self
._id
, "interp_result"), \
3400 storage
.ie_
.get_timestamp( (self
._id
, 'value') )
3401 Set
.interpret
= interpret
3403 def interpret(self
, env
, storage
):
3404 # See also Program.interpret()
3406 if self
._pre
_interp
_hook
:
3407 self
._pre
_interp
_hook
(self
, env
, storage
)
3409 # Incremental evaluation check.
3410 if storage
.ie_
.has_env_for(self
._id
):
3411 self
._eval
_env
= storage
.ie_
.env_table()[self
._id
]
3414 # self._eval_env = Env(env.new_env_id(), env, self, storage)
3415 if self
._binding
_name
!= None:
3416 self
._eval
_env
= env
.new_child(
3417 self
, name
= self
._binding
_name
.py_string())
3419 self
._eval
_env
= env
.new_child(self
, name
= "run.blck")
3421 # Incremental evaluation data.
3422 storage
.ie_
.env_table()[self
._id
] = self
._eval
_env
3425 seq_expr
= self
.deref(0)
3426 rv_stat_l
= [(expr
).interpret(self
._eval
_env
, storage
)
3427 for expr
in seq_expr
]
3429 # If any bindings changed, self changed.
3430 if len(seq_expr
) == 0: # ?? use setup, previous time??
3431 ie_status
= storage
.ie_
.time()
3432 elif len(seq_expr
) == 1:
3433 _
, ie_status
= rv_stat_l
[0]
3435 ie_status
= reduce(storage
.ie_
.newest
,
3436 [status
for _
, status
in rv_stat_l
])
3438 # Incremental evaluation checks.
3439 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
3441 # # rv = self._eval_env._bindings
3444 # Interpretation values.
3445 storage
.id2tree(rv
, self
)
3446 storage
.set_attributes(self
._id
,
3447 "interp_result", rv
,
3450 # Incremental evaluation data.
3451 storage
.ie_
.touch(self
._id
)
3452 return rv
, ie_status
3455 return storage
.get_attribute(self
._id
, "interp_result"), \
3456 storage
.ie_
.get_timestamp(self
._id
)
3457 Map
.interpret
= interpret
3460 def interpret(self
, env
, storage
):
3461 # See also Program.interpret()
3463 if self
._pre
_interp
_hook
:
3464 self
._pre
_interp
_hook
(self
, env
, storage
)
3466 # Incremental evaluation check.
3467 if storage
.ie_
.has_env_for(self
._id
):
3468 self
._eval
_env
= storage
.ie_
.env_table()[self
._id
]
3470 # self._eval_env = Env(env.new_env_id(), env, self, storage)
3471 if self
._binding
_name
!= None:
3472 self
._eval
_env
= env
.new_child(
3473 self
, name
= self
._binding
_name
.py_string())
3475 self
._eval
_env
= env
.new_child(self
, name
= "run.blck")
3477 # Incremental evaluation data.
3478 storage
.ie_
.env_table()[self
._id
] = self
._eval
_env
3480 # Form subdirectory.
3481 subdir
= self
._eval
_env
.into_directory()
3484 seq_expr
= self
.deref(0)
3486 rv_stat_l
= [(expr
).interpret(self
._eval
_env
, storage
)
3487 for expr
in seq_expr
]
3489 self
._eval
_env
.outof_directory()
3491 self
._eval
_env
.outof_directory()
3493 # If any bindings changed, self changed.
3494 if len(seq_expr
) == 0: # ?? use setup, previous time??
3495 ie_status
= storage
.ie_
.time()
3496 elif len(seq_expr
) == 1:
3497 _
, ie_status
= rv_stat_l
[0]
3499 ie_status
= reduce(storage
.ie_
.newest
,
3500 [status
for _
, status
in rv_stat_l
])
3502 # Incremental evaluation checks.
3503 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
3507 # Interpretation values.
3508 storage
.id2tree(rv
, self
)
3509 storage
.set_attributes(self
._id
,
3510 "interp_result", rv
,
3513 # Incremental evaluation data.
3514 storage
.ie_
.touch(self
._id
)
3515 return rv
, ie_status
3518 return storage
.get_attribute(self
._id
, "interp_result"), \
3519 storage
.ie_
.get_timestamp(self
._id
)
3520 Subdir
.interpret
= interpret
3522 def l3_dirname(self
):
3523 return self
._eval
_env
.l3_dirname()
3524 Subdir
.l3_dirname
= l3_dirname
3527 def t_l_interpret(self
, env
, storage
, converter
= list):
3528 # Tuple and list interpretation common parts.
3530 # Also see Tuple.interpret, aList.interpret
3531 val_status_l
= [ child
.interpret(env
, storage
)
3532 for child
in self
.deref(0) ]
3535 if val_status_l
== []:
3536 rv_l
= converter([])
3539 # Incremental evaluation check.
3540 if storage
.ie_
.is_setup_only(self
._id
):
3541 # Interpretation values.
3542 storage
.id2tree(rv_l
, self
)
3543 storage
.set_attributes(self
._id
,
3544 "interp_result", rv_l
,
3546 # Incremental evaluation data.
3547 ie_status
= storage
.ie_
.touch(self
._id
)
3548 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
3549 return rv_l
, ie_status
3552 # For Immediates(), the tree and value are one --
3553 # the tree_is_older() test is replaced by the previous
3555 return (storage
.get_attribute(self
._id
, "interp_result"),
3556 storage
.ie_
.get_timestamp((self
._id
, 'value')) )
3560 rv_l
= converter([rv
for rv
, stat
in val_status_l
])
3561 stat_l
= [stat
for rv
, stat
in val_status_l
]
3563 # Find "most needy" status and propagate that.
3564 ie_status
= reduce(storage
.ie_
.newest
, stat_l
)
3566 # Incremental evaluation check.
3567 # # print "t_l_interpret"
3568 if (storage
.ie_
.tree_is_older(self
._id
, ie_status
)):
3569 # Interpretation values.
3570 storage
.id2tree(rv_l
, self
)
3571 storage
.set_attributes(self
._id
,
3572 "interp_result", rv_l
,
3575 # Incremental evaluation data.
3576 storage
.ie_
.touch(self
._id
)
3577 storage
.ie_
.set_timestamp( (self
._id
, 'value'), ie_status
)
3578 return rv_l
, ie_status
3581 return storage
.get_attribute(self
._id
, "interp_result"), \
3582 storage
.ie_
.get_timestamp( (self
._id
, 'value') )
3584 def interpret(self
, env
, storage
):
3585 return t_l_interpret(self
, env
, storage
, converter
= list)
3586 List
.interpret
= interpret
3588 def interpret(self
, env
, storage
):
3589 return t_l_interpret(self
, env
, storage
, converter
= tuple)
3590 Tuple
.interpret
= interpret
3594 def interpret(self
, env
, storage
):
3595 if self
._pre
_interp
_hook
:
3596 self
._pre
_interp
_hook
(self
, env
, storage
)
3598 rv
= self
._primary
[0]
3600 def new_sym_new_val():
3601 # Interpretation values.
3602 storage
.id2tree(rv
, self
)
3603 storage
.set_attributes(self
._id
,
3604 "interp_result", rv
,
3606 # Incremental evaluation data.
3607 ie_status
= storage
.ie_
.touch(self
._id
)
3608 return rv
, ie_status
3610 # Incremental evaluation check.
3611 if storage
.ie_
.is_setup_only(self
._id
):
3612 return new_sym_new_val()
3615 # For Immediates(), the tree and value are one --
3616 # the tree_is_older() test is replaced by the previous
3618 return ( storage
.get_attribute(self
._id
, "interp_result"),
3619 storage
.ie_
.get_timestamp(self
._id
) )
3620 Immediate
.interpret
= interpret
3621 aNone
.interpret
= interpret
3622 Native
.interpret
= interpret
3624 # modified tree check
3625 # if storage.ie_.is_replacement(self._id):
3626 # o_id, o_status = storage.ie_.original_for(self._id)
3627 # if storage.load(o_id).eql(self):
3628 # # This Immediate() is new, but its value is unchanged.
3629 # # Use the original's meta data.
3630 # storage.ie_.set_timestamp( self._id, o_status )
3631 # return (storage.get_attribute(o_id, "interp_result"),
3634 # bind, status = new_sym_new_val()
3635 # storage.ie_.not_replacement(self._id)
3636 # return bind, status
3639 def interpret(self
, env
, storage
):
3640 # Also see Set.interpret().
3642 if self
._pre
_interp
_hook
:
3643 self
._pre
_interp
_hook
(self
, env
, storage
)
3645 # Incremental evaluation checks
3648 # replaced | setup | value
3652 # ---------------------------------
3653 # interpret, | interpret, | age
3654 # stamp | stamp | decides
3656 binding
= env
.ie_lookup_ptr( self
.as_index() )
3658 # Unbound symbols may return themselves in the future...
3659 raise UnboundSymbol("No binding found for: " + self
.as_index())
3660 binding
, ie_status
= binding
3662 def new_sym_new_val():
3663 # Also see BinaryOper, UnaryOper.
3665 # Interpretation values.
3666 storage
.id2tree(binding
, self
)
3668 # Do not retain references to external objects.
3669 if ie_status
== ie_external_time
:
3670 storage
.set_attributes(self
._id
,
3671 "interp_result", "not_kept",
3674 storage
.set_attributes(self
._id
,
3675 "interp_result", binding
,
3678 # Incremental evaluation data.
3679 storage
.ie_
.touch(self
._id
)
3681 return binding
, ie_status
3683 if storage
.ie_
.is_setup_only(self
._id
):
3684 return new_sym_new_val()
3687 if storage
.ie_
.tree_is_older(self
._id
, ie_status
):
3688 return new_sym_new_val()
3690 return binding
, ie_status
3692 Symbol
.interpret
= interpret
3694 # modified tree check
3695 # if storage.ie_.is_replacement(self._id):
3696 # o_id, o_stamp = storage.ie_.original_for(self._id)
3697 # if storage.load(o_id).eql(self):
3698 # # This symbol is new, but its value is unchanged.
3699 # # ---------------------------------
3700 # storage.ie_.touch(self._id)
3701 # # ?? Symbol is newer than binding -- see Set.interpret()
3703 # # This works correctly with tree_is_older().
3704 # storage.ie_.set_timestamp( (self._id, 'value'), o_stamp )
3705 # return storage.get_attribute(o_id, "interp_result"), o_stamp
3708 # bind, status = new_sym_new_val()
3709 # storage.ie_.not_replacement(self._id)
3710 # return bind, status
3714 def interpret(self
, env
, storage
):
3715 # See also Program.interpret()
3717 if self
._pre
_interp
_hook
:
3718 self
._pre
_interp
_hook
(self
, env
, storage
)
3720 # Return last expr value.
3722 if len(seq_expr
) == 0:
3724 # Imitate aNone.interpret()
3727 def new_sym_new_val():
3728 # Interpretation values.
3729 storage
.id2tree(rv
, self
)
3730 storage
.set_attributes(self
._id
,
3731 "interp_result", rv
,
3733 # Incremental evaluation data.
3734 ie_status
= storage
.ie_
.touch(self
._id
)
3735 return rv
, ie_status
3737 # Incremental evaluation check.
3738 if storage
.ie_
.is_setup_only(self
._id
):
3739 return new_sym_new_val()
3741 return ( storage
.get_attribute(self
._id
, "interp_result"),
3742 storage
.ie_
.get_timestamp(self
._id
) )
3747 rv_stat_l
= [(expr
).interpret(env
, storage
) for expr
in seq_expr
]
3749 # This causes infinite recursions without __deepcopy__; but
3750 # __deepcopy__ of a list subclass works oddly.
3751 # # self._eval_env = env
3752 rv
, last_stamp
= rv_stat_l
[-1]
3754 # Incremental evaluation check.
3755 newest_stamp
= reduce(storage
.ie_
.newest
,
3756 [status
for _
, status
in rv_stat_l
])
3757 if storage
.ie_
.tree_is_older(self
._id
, newest_stamp
):
3758 # Interpretation values.
3759 storage
.id2tree(rv
, self
)
3760 storage
.set_attributes(self
._id
,
3761 "interp_result", rv
,
3763 # Incremental evaluation data.
3764 storage
.ie_
.touch(self
._id
)
3765 storage
.ie_
.set_timestamp( (self
._id
, 'value'), last_stamp
)
3767 return rv
, last_stamp
3769 # ---- Nothing changed.
3770 return storage
.get_attribute(self
._id
, "interp_result"), \
3771 storage
.ie_
.get_timestamp( (self
._id
, 'value') )
3772 aList
.interpret
= interpret
3774 #* post-execution data viewing
3775 # Support for retrieving "interesting" data.
3777 def directory(self
, dyn_tree
):
3778 # Convert get_dynamic_subtrees()'s output (or compatible) into
3779 # a l3 expression for display.
3780 # todo: FIXME: add a real heading/content element, not "foo"(bar)
3782 dir, content
= dyn_tree
3785 if isinstance(itm
, StringType
):
3786 argl
.append(Symbol(itm
))
3787 elif isinstance(itm
, TupleType
):
3788 argl
.append(directory(self
, itm
))
3790 rv
= List(aList(argl
))
3791 rv
.set_label(String(dir))
3793 Program
.directory
= directory
3795 def directory_l3(self
):
3796 # Produce a post-run directory containing only l3 named data.
3797 # This version works only for the topmost environment.
3800 # Get list of desirable names.
3801 (_
, list_1
) = self
._block
_env
.get_tree()
3802 top_names
= filter(lambda xx
: isinstance(xx
, StringType
), list_1
)
3804 # Compare against names present.
3805 (nm
, list_2
) = self
._eval
_env
.get_dynamic_subtrees()
3806 def tuple_or_top(xx
):
3807 if isinstance(xx
, TupleType
) or (xx
in top_names
):
3810 list_3
= filter(tuple_or_top
, list_2
)
3813 return self
.directory( (nm
, list_3
) )
3814 Program
.directory_l3
= directory_l3
3817 #** python -> l3 conversion
3818 def val2ast(val
, file_contents
= False, visited
= None):
3820 Produce a raw astType from a Python value `val` (including list
3821 and dict). This is analogous to the parser; the astType is not
3824 Unrecognized types are wrapped as a Native().
3826 l3 types are returned unchanged.
3828 Recursion is detected and recursive structures are returned as
3829 `recurse_to_ID` strings.
3833 file_contents If True, return file reference instead of name.
3839 # Already an l3 type?
3840 if isinstance(val
, (astType
, aNone
, aList
, Native
)):
3843 elif isinstance(val
, Env
):
3845 """ Return some bindings in this environment as a l3 Map.
3846 Only bindings of user interest are collected.
3850 for child
in val
._children
:
3851 if child
._name
in ["skel.arg", "skel.blck", "anonymous"]:
3853 yield (child
._name
, "sub-env")
3856 for ky
, vl
in val
.all_bindings().iteritems():
3857 if isinstance(ky
, TupleType
):
3858 if len(ky
) == 2 and ky
[1] == 'ptr':
3860 yield (name
, val
.lookup_ptr_1(name
))
3864 if visited
.has_key(id(val
)):
3865 return Symbol("recurse_to_" + str(id(val
)))
3867 visited
[id(val
)] = 1
3869 return Map(aList([Set(val2ast(key
, visited
= visited
,
3870 file_contents
= file_contents
),
3871 val2ast(entry
, visited
= visited
,
3872 file_contents
= file_contents
))
3873 for key
, entry
in filter_env()]))
3875 # A known python type?
3876 elif isinstance(val
, (IntType
, LongType
)): return Int(int(val
))
3878 elif isinstance(val
, FloatType
): return Float(val
)
3880 elif isinstance(val
, StringType
):
3882 return Native(FilepathString(val
))
3884 return FilepathString(val
)
3886 elif isinstance(val
, ListType
):
3887 if visited
.has_key(id(val
)):
3888 return Symbol("recurse_to_" + str(id(val
)))
3890 visited
[id(val
)] = 1
3892 return List(aList([val2ast(entry
, visited
= visited
,
3893 file_contents
= file_contents
)
3896 elif isinstance(val
, TupleType
):
3897 if visited
.has_key(id(val
)):
3898 return Symbol("recurse_to_" + str(id(val
)))
3900 visited
[id(val
)] = 1
3902 return Tuple(aList([val2ast(entry
, visited
= visited
,
3903 file_contents
= file_contents
)
3906 elif isinstance(val
, DictType
):
3907 if visited
.has_key(id(val
)):
3908 return Symbol("recurse_to_" + str(id(val
)))
3910 visited
[id(val
)] = 1
3912 return Map(aList([ Set(val2ast(key
, visited
= visited
,
3913 file_contents
= file_contents
),
3914 val2ast(entry
, visited
= visited
,
3915 file_contents
= file_contents
))
3916 for key
, entry
in val
.iteritems()]))
3923 def get_values_list(self
, w_
):
3924 ''' Form and return ((id, value) list).
3925 The `id` is the expression producing `value`; for multi-valued
3926 expressions, the clone id is used.
3928 st
= w_
.state_
.storage
3932 clone_l
= st
.get_attribute(sid
, "interp_clone")
3934 # todo: unfiltered, this list could be huge. Limit size? Or provide
3938 val_l
+= st
.load(cid
).get_values_list(w_
)
3941 # Toplevel/final id.
3942 return [ (sid
, st
.get_attribute(sid
, 'interp_result')) ]
3943 astType
.get_values_list
= get_values_list
3944 aList
.get_values_list
= get_values_list
3947 #** dirpath (disk location)
3948 def dirpath(self
, w_
):
3949 tw
= TreeWork(w_
.state_
.storage
)
3950 paths
= [par
.l3_dirname() for par
in tw
.find_all_parents(self
, Subdir
)]
3952 return "/".join(paths
)
3953 astType
.dirpath
= dirpath
3954 aList
.dirpath
= dirpath
3958 #** emphasis (special item properties)
3960 # Emphasis may be used to distinguish special items.
3961 # l_emph should be a logical description, e.g. "filelist"
3962 # This member is only present when used.
3963 # # self.l_emph = None
3965 def set_emphasis(self
, emph
):
3966 ''' Set emphasis for this node to `emph`
3967 Emphasis may be used to distinguish special items.
3968 emph should be a logical description, e.g. "filelist".
3970 Any emphasis added here must have a corresponding entry in the
3971 deco.emph_color resource.
3974 Native
.set_emphasis
= set_emphasis
3975 astType
.set_emphasis
= set_emphasis
3976 aList
.set_emphasis
= set_emphasis
3977 aNone
.set_emphasis
= set_emphasis
3979 def get_emphasis(self
):
3980 return self
.__dict
__.get('l_emph', None)
3981 Native
.get_emphasis
= get_emphasis
3982 astType
.get_emphasis
= get_emphasis
3983 aList
.get_emphasis
= get_emphasis
3984 aNone
.get_emphasis
= get_emphasis
3988 # Mechanism for adding generic attributes to astType trees, without
3989 # polluting the instance's dict, and with lexically obvious syntax:
3990 # foo.setthe(size = 10)
3991 # [ introduced after set_emphasis / get_emphasis]
3993 # Some attributes must be set before .setup(), including those that
3994 # need to be attached at tree-building time.
3996 def setthe(self
, **dct
):
3997 # Attach key = value pairs to self. Using None as `value` is
3999 self
._attributes
.update(dct
)
4001 astType
.setthe
= setthe
4002 aList
.setthe
= setthe
4003 aNone
.setthe
= setthe
4004 Native
.setthe
= setthe
4006 def getthe(self
, key
):
4007 # Return the value attached to `key`, or None.
4008 return self
._attributes
.get(key
, None)
4009 astType
.getthe
= getthe
4010 aList
.getthe
= getthe
4011 aNone
.getthe
= getthe
4012 Native
.getthe
= getthe
4016 #* Data flow -- .data_dag()
4017 class DataDagError(exceptions
.Exception):
4018 def __init__(self
, args
=None):
4022 # No-value indicator for back-propagation of Return() effect.
4025 def __init__(self
, gid
):
4026 self
._from
_gid
= [gid
]
4027 ReturnBranch
.__init
__ = __init__
4029 def merge(self
, other
):
4031 self
._from
_gid
= self
._from
_gid
+ other
._from
_gid
4033 ReturnBranch
.merge
= merge
4036 #** Functions for dicts of lists
4039 # push val onto the end of the dict's key entry.
4040 def dl_push(dict, key
, val
):
4041 if dict.has_key(key
):
4042 dict[key
].append(val
)
4047 # return the most recently pushed value.
4048 def dl_peek(dict, key
):
4049 if dict.has_key(key
):
4050 return dict[key
][-1]
4052 raise Error
, "No values."
4054 def dl_items(dict_
):
4055 for k
, val_l
in dict_
.items():
4061 def equal_leading(list_
, tuple_
):
4062 # Return true if the leading entries of list_ and tuple_ are equal.
4063 for lv
, rv
in zip(list_
, tuple_
):
4068 #** Dag handling class
4072 def __init__(self
, name
, tree_id
, starting_id
):
4073 # tree_ids may map to several graph_ids; graph_id's are unique.
4075 self
._new
_id
= starting_id
# make ids easier to (string) search
4077 # tree_id's are usually the int (or long) associated with a tree
4078 # node. Also possible:
4079 # tree_id ::= int | (ident, name, real_tree_id)
4081 # where ident is a logical identifier, e.g. "subgraph", and
4082 # real_tree_id is another int.
4083 self
._graphid
_2_treeid
= {} # graph_id -> tree_id
4084 self
._treeid
_2_graphid
= {} # tree_id -> (graph_id list)
4085 self
._nodes
= {} # graph_id -> (attribute list)
4087 # attribute ::= <(key, value), >*
4089 # self._edges = {} # graph_id -> graph_id list
4092 # [logical structure]
4093 # (node -> node) tuple -> attributes (via dict)
4094 # [physical structure]
4095 # (graph_id, graph_id) -> [ (key, val), * ]
4098 self
._graph
_id
= self
._new
_id
4100 # By including self in the _graph_stack, every node belongs to
4102 # graph ::= [ graph_ident (, node)*]
4103 # graph_ident ::= ( graph_id, name, tree_id )
4104 # node ::= id | graph
4105 self
._graph
_stack
= []
4106 self
._graph
_now
= [(self
._graph
_id
, name
, tree_id
)
4107 ] # The above nested graph type.
4108 # graph_ids of the enclosing subgraphs
4109 self
._subgraph
_stack
= [] # graph_id list
4110 self
._subg
_stack
_of
= {} # graph_id -> graph_id tuple
4113 # self._subgraph_nodes = {} # (sub)graph_id -> graph_id list
4115 self
._call
_stack
= [] # For .data_dag() use
4117 self
._unique
_names
= {} # nodes unique for a given external name
4119 # Function() / Return() interaction ids
4120 self
._block
_stack
= [] # graph_id list
4122 astDag
.__init
__ = __init__
4126 # The subgraph is only a logical grouping; nodes in subgraphs are
4129 # These subgraphs are tracked using explicit start/end calls to get
4130 # internal context switch. Internally, just use stacks.
4131 def start_subgraph(self
, name
, tree_id
, attributes
= []):
4133 self
._graph
_stack
.append(self
._graph
_now
)
4134 self
._graph
_now
= []
4138 self
._graph
_now
.append( (self
._new
_id
, name
, tree_id
) )
4141 self
._track
(self
._new
_id
, ("subgraph", name
, tree_id
),
4143 self
._subgraph
_stack
.append(self
._new
_id
)
4145 astDag
.start_subgraph
= start_subgraph
4147 def end_subgraph(self
):
4149 self
._graph
_stack
[-1].append(self
._graph
_now
)
4150 self
._graph
_now
= self
._graph
_stack
[-1]
4151 del self
._graph
_stack
[-1]
4152 del self
._subgraph
_stack
[-1]
4153 astDag
.end_subgraph
= end_subgraph
4155 def get_graph_id(self
, tree_id
):
4156 if self
._treeid
_2_graphid
.has_key(tree_id
):
4157 all
= self
._treeid
_2_graphid
.get(tree_id
)
4159 print "warning: multiple graph ids. Using most recent."
4160 return dl_peek(self
._treeid
_2_graphid
, tree_id
)
4163 astDag
.get_graph_id
= get_graph_id
4165 def _track(self
, graph_id
, id, attributes
):
4166 # Add a new node with graph_id, and
4167 # track associations between graph_id and incoming id.
4168 self
._nodes
[graph_id
] = attributes
4169 self
._graphid
_2_treeid
[graph_id
] = id
4170 dl_push(self
._treeid
_2_graphid
, id, graph_id
)
4172 # Subgraph tracking.
4173 self
._graph
_now
.append(graph_id
)
4174 self
._subg
_stack
_of
[graph_id
] = tuple(self
._subgraph
_stack
) # immutable
4175 astDag
._track
= _track
4177 def add_node(self
, id, attributes
= []):
4178 # id is the node's external identifier; a new node with unique
4179 # internal identifier is created on every call.
4180 # The internal identifier is returned.
4182 self
._track
(self
._new
_id
, id, attributes
)
4184 astDag
.add_node
= add_node
4186 def add_unique_node(self
, name
, attributes
= []):
4187 # For a given name, always return the same graph_id.
4188 # name is the node's external and internal identifier.
4189 if self
._unique
_names
.has_key(name
):
4190 return self
._unique
_names
[name
]
4192 graph_id
= self
.add_node(name
, attributes
)
4193 self
._unique
_names
[name
] = graph_id
4195 astDag
.add_unique_node
= add_unique_node
4197 def add_locally_unique_node(self
, tree_id
, attributes
= []):
4198 # Within the current _subgraph_id and
4199 # for a given tree_id, always return the same graph_id .
4200 # tree_id is the node's external and internal identifier.
4202 graph_id
= self
.get_graph_id(tree_id
)
4203 if graph_id
== None:
4204 return self
.add_node(tree_id
, attributes
)
4206 # In this or enclosing graph.
4207 if equal_leading(self
._subgraph
_stack
,
4208 self
._subg
_stack
_of
[graph_id
]):
4211 return self
.add_node(tree_id
, attributes
)
4212 astDag
.add_locally_unique_node
= add_locally_unique_node
4214 def add_edge(self
, id1
, id2
, attributes
= []):
4215 # Usually, edges are child -> parent
4216 if self
._nodes
.has_key(id1
) and self
._nodes
.has_key(id2
):
4217 self
._edges
[(id1
, id2
)] = attributes
4219 raise DataDagError
, "Edges must use existing nodes."
4221 astDag
.add_edge
= add_edge
4223 def sanity_check(self
):
4224 if self
._graph
_stack
!= []:
4225 raise DataDagError
, "Unbalanced start_subgraph/end_subgraph() use."
4226 astDag
.sanity_check
= sanity_check
4228 def dump_dot(self
, file):
4233 Generated by astDag.dump_dot() */
4235 page = "8.5,11.0"; /* size of single physical page */
4236 size="7.5,10.0"; /* graph size */
4240 fontpath="%(SPXROOT)s/l3gui/fonts";
4241 node [shape=box,fontname="Courier", fontsize=12,
4242 width="0.1cm",height="0.1cm", /* snug fit around labels */
4244 edge [fontname="Courier", fontsize=12] ;
4245 edge [arrowsize=0.71];
4251 subgraph_color
= ['white', 'beige']
4253 def dump_subgraph(graph_now
, use_beige
):
4254 # Use graph_now to put nodes into the appropriate subgraphs.
4255 (gid
, name
, _
) = graph_now
[0]
4258 raise Exception, """dump_dot: Received name containing spaces.
4260 Internal error.\n""" % name
4262 file.write(""" /* Subgraph header gid=%d */
4263 subgraph cluster_%s_%d {
4269 subgraph_color
[use_beige
]))
4271 # When using the block itself as return value.
4272 # file.write(""" /* Subgraph id, to provide a real node for dot's use. */
4273 # %d [style=invis];\n """ % (gid))
4276 for gnode
in graph_now
[1:]:
4277 if isinstance(gnode
, ListType
):
4278 dump_subgraph(gnode
, not use_beige
)
4280 file.write("%s [" % gnode
) # Node.
4281 for key
, val
in self
._nodes
[gnode
]:
4282 file.write('%s = "%s",' % (key
, val
))
4286 file.write(""" }\n """)
4287 dump_subgraph(self
._graph
_now
, 0)
4289 # Use _edges to connect nodes.
4290 for (src
, dest
), att_l
in self
._edges
.items():
4291 file.write(""" %d -> %d [""" % (src
, dest
))
4292 for key
, val
in att_l
:
4293 file.write('%s = "%s",' % (key
, val
))
4294 file.write("""];\n""" )
4300 astDag
.dump_dot
= dump_dot
4303 def dump_dot_colored(self
, file):
4304 # Use the graph_ids as colors.
4309 Generated by astDag.dump_dot() */
4311 truecolor=1; /* needed for identification */
4312 page = "8.5,11.0"; /* size of single physical page */
4313 size="7.5,10.0"; /* graph size */
4317 fontpath="%(SPXROOT)s/l3gui/fonts";
4318 node [shape=box,fontname="Courier", fontsize=12,
4319 width="0.1cm",height="0.1cm", /* snug fit around labels */
4321 edge [fontname="Courier", fontsize=12] ;
4322 edge [arrowsize=0.71];
4328 def dump_subgraph(graph_now
):
4329 # Use _graph_now to put nodes into the appropriate subgraphs.
4330 (gid
, name
, _
) = graph_now
[0]
4334 """dump_dot_colored: Received name containing spaces.
4336 Internal error.\n""" % name
4338 file.write(""" /* Subgraph header gid=%d */
4339 subgraph cluster_%s_%d {
4348 # When using the block itself as return value.
4349 # file.write(""" /* Subgraph id, to provide a real node for dot's use. */
4350 # %d [style=invis];\n """ % (gid))
4353 for gnode
in graph_now
[1:]:
4354 if isinstance(gnode
, ListType
):
4355 dump_subgraph(gnode
)
4357 file.write("%s [" % gnode
) # Node.
4358 for key
, val
in self
._nodes
[gnode
]:
4359 file.write('%s = "%s",' % (key
, val
))
4361 file.write('style=filled, fillcolor= "#%.6x",' % gnode
+
4362 'fontcolor="#%.6x"' % gnode
);
4366 file.write(""" }\n """)
4367 dump_subgraph(self
._graph
_now
)
4369 # Use _edges to connect nodes.
4370 for (src
, dest
), att_l
in self
._edges
.items():
4371 file.write(""" %d -> %d [""" % (src
, dest
))
4372 for key
, val
in att_l
:
4373 file.write('%s = "%s",' % (key
, val
))
4375 file.write('color= "#%.6x",' % (src
*dest
,));
4376 file.write("""];\n""" )
4382 astDag
.dump_dot_colored
= dump_dot_colored
4385 #** Main external entry points.
4387 #*** Form initial data_dag, containing ALL nodes.
4388 def data_dag_start(self
, name
, storage
, starting_id
):
4389 # name: name for the output graph
4390 # starting_id: starting id for the output graph nodes
4392 # Run only *before* interpretation --
4393 # not very flexible wrt. incremental evaluation.
4394 dagraph
= astDag(name
, self
._id
, starting_id
)
4395 self
.data_dag(self
._block
_env
, dagraph
, storage
)
4396 dagraph
.sanity_check()
4398 Program
.data_dag_start
= data_dag_start
4401 #*** Form viewer data_dag, containing only names and subgraphs.
4402 def get_reduced_dag(self
):
4403 new_
= deepcopy(self
)
4405 nodes
, edges
= new_
._get
_reduced
_dag
()
4413 if isinstance(el
, TupleType
): # graph_ident
4416 elif isinstance(el
, ListType
): # graph
4417 ngn
.append(prune_graph(el
))
4419 elif isinstance(el
, IntType
): # node
4420 if nodes
.has_key(el
):
4423 raise DataDagError
, "Invalid subgraph entry."
4426 new_
._graph
_now
= prune_graph(new_
._graph
_now
)
4428 # Sufficient for dump_dot; others?
4430 astDag
.get_reduced_dag
= get_reduced_dag
4435 def _get_reduced_dag(self
):
4437 # Given the full data flow dag, find those nodes and edges
4438 # involving actual bound names.
4440 # This prunes function calls and Immediate()s.
4442 # Find labeled node connections
4444 for key
, attr_l
in self
._nodes
.iteritems()
4445 if not (('label', '') in attr_l
)]
4447 # Get edges in nested dict format.
4448 # edges = { src : {dst1 : , dst2 : , }, ...}
4450 for s
in self
._nodes
.iterkeys(): # cover nodes w/o outgoing edges
4452 for s
, d
in self
._edges
.iterkeys():
4455 # Find all nodes connected to the start_node node by a path not crossing
4456 # other named nodes.
4457 def indirect_to(edges
, start_node
, distance
, neighbors_of
):
4459 if edges
.has_key(neighbors_of
):
4460 for neighbor
in edges
[neighbors_of
].keys():
4461 edges
[start_node
][neighbor
] = distance
4462 # Paths that require CROSSING of a named_node are NOT wanted.
4463 if neighbor
in named_nodes
:
4466 indirect_to(edges
, start_node
, distance
+ 1, neighbor
)
4468 all_edges
= deepcopy(edges
)
4469 for start
in named_nodes
:
4470 indirect_to(all_edges
, start
, 1, start
)
4472 # Get only name -> name edges.
4474 for start
in named_nodes
:
4475 for dest
in named_nodes
:
4476 if start
== dest
: continue
4477 if all_edges
[start
].has_key(dest
):
4478 relevant_edges
[(start
, dest
)] = []
4480 # Use self._nodes format.
4481 relevant_nodes
= dict([(ii
, self
._nodes
[ii
]) for ii
in named_nodes
])
4483 return relevant_nodes
, relevant_edges
4484 astDag
._get
_reduced
_dag
= _get_reduced_dag
4489 # This grouping is by function, not class.
4490 # Traversal is normal order, left-to-right.
4491 # Relevant ids are returned by children; edges are formed by parents.
4493 # valid graph_id s are None and the return values from .new_subgraph()
4496 # For at least Symbol(), Function() and Call(), single-type
4497 # dispatch (on class) is inadequate for retrieving nested information.
4498 # Hence, the nested if's, with recursive calls, below.
4500 # These effectively implement
4501 # multiple passes (one per nesting level). This
4502 # - allows viewing of increasingly more detailed graphs
4503 # - simplifies the code
4504 # - separates the block argument port handling (for the drawn graph)
4507 #** data_dag() member, nested types
4508 def data_dag(self
, env
, dagraph
, storage
):
4509 raise InterfaceOnly("%s has no .data_dag() member" % self
)
4510 Nested
.data_dag
= data_dag
4513 #*** introducing their own Env()s
4514 def data_dag(self
, env
, dagraph
, storage
):
4515 sub_env
= self
._block
_env
4516 seq_expr
= self
.deref(0)
4517 if len(seq_expr
) == 0:
4518 raise DataDagError
, "Empty program"
4520 for expr
in seq_expr
:
4521 rv
= (expr
).data_dag(sub_env
, dagraph
, storage
)
4522 if isinstance(rv
, ReturnBranch
):# Rest is unreachable.
4525 Program
.data_dag
= data_dag
4527 def data_dag(self
, env
, dagraph
, storage
):
4528 # A function makes no data contribution unless executed, and every
4529 # distinct invocation provides its own expansion.
4531 Function
.data_dag
= data_dag
4533 def data_dag(self
, env
, dagraph
, storage
):
4534 # An Inline makes no data contribution until executed;
4535 # it is not parsed so only examination of the dictionary can show
4536 # what it contributed, and there is no simple way to know what it
4539 Inline
.data_dag
= data_dag
4541 def data_dag(self
, env
, dagraph
, storage
):
4542 seq_expr
= self
.deref(0)
4543 sub_env
= self
._block
_env
4546 if self
._binding
_name
!= None:
4547 sub_graph_id
= dagraph
.start_subgraph(
4548 self
._binding
_name
.py_string(), self
._id
)
4550 sub_graph_id
= dagraph
.start_subgraph("anonymous_map", self
._id
)
4553 for expr
in seq_expr
:
4554 rv
= expr
.data_dag(sub_env
, dagraph
, storage
)
4555 if isinstance(rv
, ReturnBranch
):# Rest is unreachable.
4558 rv
= dagraph
.add_locally_unique_node(
4560 attributes
= [('label', "ENV-%d" % self
._id
),
4563 # ('fillcolor', 'yellow'),
4567 dagraph
.end_subgraph()
4570 # return None # original
4571 # return sub_graph_id # subgraphs aren't nodes.
4572 Map
.data_dag
= data_dag
4575 #*** using given Env()s
4576 def data_dag(self
, env
, dagraph
, storage
):
4578 operator
= self
.deref(0)
4579 if isinstance(operator
, (Symbol
, Member
)):
4580 bbinding
= operator
.data_dag_lookup(env
)
4582 if isinstance(bbinding
, Function
):
4583 if bbinding
._id
in dagraph
._call
_stack
:
4584 # Recursive call, use the name directly.
4586 return self
.data_dag_name(
4587 env
, dagraph
, storage
, operator
,
4588 [ ('label', operator
.py_string()),
4589 ('shape', "hexagon"),
4593 ## return operator.data_dag(env, dagraph, storage)
4595 # Use the looked-up Function()
4596 dagraph
._call
_stack
.append(bbinding
._id
)
4597 rv
= self
.data_dag_block(env
, dagraph
, storage
, bbinding
)
4598 del dagraph
._call
_stack
[-1]
4601 elif callable(bbinding
) or (bbinding
== "unknown_fn"):
4603 if 1: # full name of functions
4604 return self
.data_dag_name(
4605 env
, dagraph
, storage
, operator
,
4606 [('label', operator
.py_string()),
4607 ('shape', 'ellipse'),
4610 else: # functions as circles
4611 return self
.data_dag_name(
4612 env
, dagraph
, storage
, operator
,
4614 ('shape', 'circle'),
4616 ('style', 'filled'),
4617 ('width', '0.15cm'),
4618 ('fixedsize', 'true')])
4621 elif bbinding
== "unknown_member":
4622 # Binding is unclear. Treat a.b(c) as b(a,c), without
4623 # attempting to lookup b.
4627 # Pattern from reader.parse('! a . ! b( !c ) ') with mods
4630 Member(Marker('a'), Marker('b')),
4631 Marker('c'))) # c == aList([])
4633 # Tree from reader.parse('b(a,c) ')
4634 # The original tree is not to be touched; as python
4635 # doesn't have true lists, need a deepcopy here.
4636 new_args
= deepcopy(ma
['c']) # [ ... ]
4637 copy_attribute(ma
['c'], new_args
, '_id')
4638 new_args
.insert(0, ma
['a']) # [a, ...]
4639 tree
= Call(ma
['b'], new_args
)
4640 return tree
.data_dag(env
, dagraph
, storage
)
4643 raise DataDagError
, \
4644 ("Line: %d:col %d: Unknown static binding. Internal error."
4645 % operator
._first
_char
)
4647 elif isinstance(operator
, Function
):
4648 return self
.data_dag_block(env
, dagraph
, storage
, operator
)
4651 raise DataDagError
, "Expected Function(), got " + str(operator
)
4652 Call
.data_dag
= data_dag
4654 def data_dag_name(self
, env
, dagraph
, storage
, operator
, attrib_list
):
4655 # Get this invocation's arguments' dags. [a,b from f(a,b)]
4656 arg_seq
= self
.deref(1)
4658 for expr
in arg_seq
:
4659 arg_dags
.append(expr
.data_dag(env
, dagraph
, storage
))
4661 # Bind invocation arguments' dags to invoked function's name
4662 func_node
= dagraph
.add_node( operator
._id
, attributes
= attrib_list
)
4664 dagraph
.add_edge(ii
, func_node
)
4666 Call
.data_dag_name
= data_dag_name
4668 def data_dag_block_body(env
, dagraph
, storage
, block
):
4669 # Get block dag via Program()
4670 # This includes references to argument names in _block_env, but no
4671 # connections are made to actual Function() arguments.
4673 # HERE. make proper function program_from_seq()
4675 program
.setup(empty_parent(), block
._block
_env
, storage
)
4676 program
._primary
= (block
.raw_seq_expr(), )
4678 if block
._binding
_name
!= None:
4679 sg
= dagraph
.start_subgraph(block
._binding
_name
.py_string(), block
._id
)
4681 sg
= dagraph
.start_subgraph("anonymous_block", block
._id
)
4682 #-- Special treatment for block argument names.
4683 for argname
in block
.positional_block_args():
4684 dagraph
.add_locally_unique_node(
4685 block
._block
_env
.lookup_symbol_id(argname
),
4687 attributes
= [('label', "%s" % argname
),
4688 ('shape', "ellipse"),])
4692 return_mark
= dagraph
.add_locally_unique_node(
4694 attributes
= [('label', ""),
4697 ('style', 'filled'),
4698 ('width', '0.15cm'),
4699 ('fixedsize', 'true')])
4701 # The block BINDING ids in dagraph._call_stack are not useful
4702 # here; the actual BLOCK id is needed.
4703 dagraph
._block
_stack
.append(return_mark
)
4706 graph_id
= program
.data_dag(block
._block
_env
, dagraph
, storage
)
4708 if isinstance(graph_id
, ReturnBranch
):
4709 for src
in graph_id
._from
_gid
:
4710 dagraph
.add_edge(src
, return_mark
,
4711 attributes
= [('color', 'blue'), ])
4713 dagraph
.add_edge(graph_id
, return_mark
)
4715 del dagraph
._block
_stack
[-1]
4716 dagraph
.end_subgraph()
4720 def data_dag_block(self
, env
, dagraph
, storage
, block
):
4721 # Also see Function.data_dag, Symbol.data_dag
4722 if block
.nargs() != self
.deref(1).__len
__():
4723 raise DataDagError
, \
4724 "Argument count mismatch: %s -- %s" % (block
.source_string(),
4725 self
.source_string())
4727 last_value
= data_dag_block_body(env
, dagraph
, storage
, block
)
4729 # Get this invocation's arguments' dags. [ a,b of f(a,b) ]
4730 arg_seq
= self
.deref(1)
4732 for expr
in arg_seq
:
4733 arg_dags
.append(expr
.data_dag(env
, dagraph
, storage
))
4734 # HERE. If f is a fnfn, certain arguments are Symbols() or
4735 # Function()s. Neither expands by default, but should do so here.
4737 # OR: look for specialized patterns in the .data_dag() routine.
4739 # Bind block argument names to invocation arg dags.
4740 blck_arg_syms
= block
.positional_block_args(symbols
= 1)
4741 for ii
in range(len(arg_dags
)):
4742 # For multiple values, use most recent.
4743 name_id
= dagraph
.get_graph_id(blck_arg_syms
[ii
]._id
)
4745 raise DataDagError
, \
4746 "No graph node found for tree_id %d" % blck_arg_syms
[ii
]._id
4747 dagraph
.add_edge(arg_dags
[ii
], name_id
)
4748 return last_value
# The return value of the block.
4749 # return sg # The block itself as return value (physical node)
4750 Call
.data_dag_block
= data_dag_block
4752 def data_dag_lookup(self
, env
):
4753 # Static dependency restrictions: for a.b, both must be Symbols();
4754 # otherwise, evaluation would be needed.
4756 # Also see Member.interpret.
4759 first
= self
.deref(0)
4760 second
= self
.deref(1)
4761 if isinstance(first
, Symbol
):
4762 if isinstance(second
, Symbol
):
4763 mem_name
= second
.py_string()
4765 raise DataDagError
, "In a.b, b is not a name: " + str(second
)
4767 raise DataDagError
, "In a.b, a is not a name: " + str(first
)
4769 # Get a (of a.b) -- the actual binding.
4770 object = env
.lookup_ptr( first
.as_index() )
4772 # To suppress modules from explicitly showing in the dag, first
4773 # check whether first.as_index() is a module.
4775 exec('import ' + first
.as_index())
4779 object = eval(first
.as_index())
4781 if isinstance(object, (Function
, Map
)):
4782 return object._block
_env
.lookup_ptr( mem_name
)
4784 elif isinstance(object, DictType
):
4785 return object[ mem_name
]
4787 elif isinstance(object, ModuleType
):
4788 member
= eval('object.' + mem_name
)
4791 elif isinstance(object, Env
):
4792 # Env()s have 2 possible member sources:
4793 # the original Python class,
4794 # and the Env() bindings.
4795 # To allow overriding, first try the bindings, then the
4796 # python 'built-ins'.
4797 stat
, rv
= object.lookup_status( mem_name
)
4800 return eval('object.' + mem_name
)
4802 elif object == None:
4803 # W/o static typing, any member access' validity is unclear.
4804 return "unknown_member"
4806 elif callable(object):
4810 raise DataDagError
, \
4811 ("Line: %d:col %d: Invalid reference found. Internal error."
4812 % object._first
_char
)
4813 Member
.data_dag_lookup
= data_dag_lookup
4815 def data_dag(self
, env
, dagraph
, storage
):
4816 # For a.b, the dag references b -- b is treated like a single name.
4818 # Static dependency restrictions: for a.b, both must be Symbols();
4819 # otherwise, evaluation would be needed.
4821 first
= self
.deref(0)
4822 second
= self
.deref(1)
4823 if isinstance(first
, Symbol
):
4824 if isinstance(second
, Symbol
):
4826 mem_name
= second
.py_string()
4828 raise DataDagError
, "In a.b, b is not a name: " + str(second
)
4830 raise DataDagError
, "In a.b, a is not a name: " + str(first
)
4832 # # if first.as_index() in ['userdata']:
4835 # Get a (of a.b) -- the actual binding.
4836 object = env
.lookup_ptr( first
.as_index() )
4838 if isinstance(object, Map
):
4839 # In the Map() tree, find the Symbol() used in the
4841 tree_id
= object._block
_env
.lookup_symbol_id( second
.as_index() )
4843 return dagraph
.get_graph_id(tree_id
)
4845 raise DataDagError
, ("Line: %d:col %d: " % object._first
_char
) + \
4846 "Undefined map binding."
4848 # return dagraph.add_locally_unique_node(
4850 # attributes = [('label', "%s" % second.as_index() )])
4851 elif isinstance(object, Function
):
4852 raise DataDagError
, ("Line: %d:col %d: " % first
._first
_char
) + \
4853 "block bindings are not available " + \
4854 "outside block interpretation -- use a map instead."
4856 # Object is external. Link to the full name (a.b) instead.
4857 name
= "%s.%s" % (first
.py_string(), second
.py_string())
4858 return dagraph
.add_node(name
, attributes
= [('label', name
)] )
4859 Member
.data_dag
= data_dag
4861 def data_dag(self
, env
, dagraph
, storage
):
4862 cond_gid
= self
.deref(0).data_dag(env
, dagraph
, storage
)
4863 yes_gid
= self
.deref(1).data_dag(env
, dagraph
, storage
)
4864 no_gid
= self
.deref(2).data_dag(env
, dagraph
, storage
)
4866 if isinstance(cond_gid
, ReturnBranch
):
4867 # Odd, but possible.
4870 # All branches Return()
4871 if isinstance(yes_gid
, ReturnBranch
) & isinstance(no_gid
, ReturnBranch
):
4872 return yes_gid
.merge(no_gid
)
4875 # At least one real return value. This (these) value(s) will be
4876 # propagated further, so the ReturnBranch edge is drawn here.
4878 self_gid
= dagraph
.add_node(
4880 attributes
= [('label', ''),
4881 ('shape', 'circle'),
4883 ('style', 'filled'),
4884 ('width', '0.15cm'),
4885 ('fixedsize', 'true')])
4887 dagraph
.add_edge(cond_gid
, self_gid
)
4889 if isinstance(yes_gid
, ReturnBranch
):
4890 for src
in yes_gid
._from
_gid
:
4891 dagraph
.add_edge(src
, dagraph
._block
_stack
[-1],
4892 attributes
= [('color', 'blue'), ])
4894 dagraph
.add_edge(yes_gid
, self_gid
,
4895 attributes
= [('color', '#00a000')]) # green
4898 if isinstance(no_gid
, ReturnBranch
):
4899 for src
in no_gid
._from
_gid
:
4900 dagraph
.add_edge(src
, dagraph
._block
_stack
[-1],
4901 attributes
= [('color', 'blue'), ])
4903 dagraph
.add_edge(no_gid
, self_gid
,
4904 attributes
= [('color', 'red')])
4907 If
.data_dag
= data_dag
4909 def data_dag(self
, env
, dagraph
, storage
):
4910 # The real trick in Return() data flow is to ensure
4911 # never-reached parts are ignored. This is done in the other
4913 arg_gid
= self
.deref(0).data_dag(env
, dagraph
, storage
)
4914 return ReturnBranch(arg_gid
)
4915 Return
.data_dag
= data_dag
4917 def data_dag(self
, env
, dagraph
, storage
):
4918 # Note on ReturnBranch:
4919 # Assume no return occurs in the assigned expression.
4924 # Destructuring binding for tuples.
4925 if isinstance(lhs
, Tuple
):
4928 names
= lhs
[0] # The list.
4929 for _
in self
.arg_names():
4931 name_ids
.append(symbol
.data_dag(env
, dagraph
, storage
))
4933 if isinstance(rhs
, Tuple
):
4935 if len(values
) != len(names
):
4936 raise DataDagError
, \
4937 ("Line: %d:col %d:Tuple sizes in assignment "
4938 "don't match." % self
._first
_char
)
4942 val_id
= val
.data_dag(env
, dagraph
, storage
)
4943 # Must be local test below -- not in add_edge()
4945 dagraph
.add_edge(val_id
, name_ids
[ii
])
4950 # if isinstance(value, TupleType):
4951 # is not available w/o prior .interpret(); only a dag
4952 # making all binding names depend on all rhs contents can
4954 val_id
= rhs
.data_dag(env
, dagraph
, storage
)
4955 if val_id
!= None: # local test below.
4956 for name_id
in name_ids
:
4957 dagraph
.add_edge(val_id
, name_id
)
4960 # Plain symbol binding.
4961 elif isinstance(lhs
, Symbol
):
4962 val_id
= rhs
.data_dag(env
, dagraph
, storage
)
4963 if val_id
!= None: # local test.
4964 if isinstance(val_id
, ReturnBranch
):
4965 raise DataDagError
, (
4966 ("Line: %d:col %d:\n" %
4969 ("Return inside assignment: %s\n" %
4970 self
.source_substring())
4972 name_id
= lhs
.data_dag(env
, dagraph
, storage
)
4973 dagraph
.add_edge(val_id
, name_id
)
4979 raise DataDagError
, \
4980 ("Line: %d:col %d: Invalid lhs in assignment "
4982 Set
.data_dag
= data_dag
4985 def data_dag(self
, env
, dagraph
, storage
):
4986 # Note on ReturnBranch:
4987 # Assume no return occurs in the list.
4989 self_id
= dagraph
.add_node(
4991 attributes
= [('label', ""),
4994 # ('style', 'filled'),
4995 ('width', '0.10cm'),
4996 ('height', '0.10cm'),
4997 ('fixedsize', 'true'),
4999 children
= [ child
.data_dag(env
, dagraph
, storage
)
5000 for child
in self
.deref(0) ]
5002 dagraph
.add_edge(ch
, self_id
)
5005 List
.data_dag
= data_dag
5008 def data_dag(self
, env
, dagraph
, storage
):
5009 # Note on ReturnBranch:
5010 # Assume no return occurs in the tuple.
5011 self_id
= dagraph
.add_node(
5013 # # attributes = [('label', "TUPLE-%d" % self._id )]
5014 attributes
= [('label', ""),
5017 # ('style', 'filled'),
5018 ('width', '0.10cm'),
5019 ('height', '0.10cm'),
5020 ('fixedsize', 'true'),
5022 children
= [ child
.data_dag(env
, dagraph
, storage
)
5023 for child
in self
.deref(0) ]
5025 dagraph
.add_edge(ch
, self_id
)
5028 Tuple
.data_dag
= data_dag
5030 def data_dag(self
, env
, dagraph
, storage
):
5032 if len(seq_expr
) == 0:
5033 raise DataDagError
, "Empty program"
5035 for expr
in seq_expr
:
5036 rv
= (expr
).data_dag(env
, dagraph
, storage
)
5037 if isinstance(rv
, ReturnBranch
): # Rest is unreachable.
5040 aList
.data_dag
= data_dag
5043 #** data_dag() member, Immediate() types
5044 def data_dag(self
, env
, dagraph
, storage
):
5045 return dagraph
.add_node(self
._id
)
5046 aNone
.data_dag
= data_dag
5048 def data_dag(self
, env
, dagraph
, storage
):
5049 return dagraph
.add_node(
5052 # attributes = [('label', "%s" % self.to_plain_python(storage))]
5053 attributes
= [('label', ""),
5054 ('shape', "diamond"),
5056 ('width', '0.10cm'),
5057 ('height', '0.10cm'),
5058 ('fixedsize', 'true')]
5060 Immediate
.data_dag
= data_dag
5062 def data_dag(self
, env
, dagraph
, storage
):
5063 tree_id
= env
.lookup_symbol_id(self
.as_index())
5064 binding
= env
.lookup_ptr(self
.as_index())
5066 # External (or undefined) symbol. Use as-is, globally.
5067 return dagraph
.add_unique_node(
5069 attributes
= [('label', "%s" % self
.as_index() )])
5071 elif isinstance(binding
, Function
):
5072 return data_dag_block_body(env
, dagraph
, storage
, binding
)
5075 return dagraph
.add_locally_unique_node(
5077 attributes
= [('label', "%s" % self
.as_index() )])
5078 Symbol
.data_dag
= data_dag
5080 def data_dag_lookup(self
, env
):
5081 rv
= env
.lookup_ptr(self
.as_index())
5086 Symbol
.data_dag_lookup
= data_dag_lookup
5090 #* Path access for trees
5091 class InvalidPath(exceptions
.Exception):
5092 def __init__(self
, args
=None):
5096 # The notion of paths in the gtk treeview is suitable for use here.
5097 # It is defined as follows.
5099 # A path is a list of integer indices, 0-relative.
5100 # The column is a single integer index, again 0-relative.
5102 # A path points to a value; the value may be indexed (by the column)
5103 # or simple (a None column index).
5113 # ([], None) is 'a',
5114 # ([0, 1], None) is invalid,
5117 # Thus, the path index runs down the tree, the column index across.
5118 # This means a node can be both a tree (have children), and a row
5121 # Because the syntax is foo.element_at(), indexing starts at foo's
5122 # children. Thus, foo.element_at([]) returns foo.
5124 # Applied to ASTs, indices are logical and ignore intermediate
5125 # structures (aList)
5128 #** Element at index
5131 #*** testing expressions
5133 >>> pp =reader.parse('(a, b) = f(x,y)')
5135 >>> pp.element_at([])
5136 Program(aList([Set(Tuple(aList([Symbol('a'), Symbol('b')])), Call(Symbol('f'), aList([Symbol('x'), Symbol('y')])))]))
5138 >>> pp.element_at([0])
5139 Set(Tuple(aList([Symbol('a'), Symbol('b')])), Call(Symbol('f'), aList([Symbol('x'), Symbol('y')])))
5141 >>> pp.element_at([0,0])
5142 Tuple(aList([Symbol('a'), Symbol('b')]))
5144 >>> pp.element_at([0,0,0])
5147 >>> pp.element_at([0,1])
5148 Call(Symbol('f'), aList([Symbol('x'), Symbol('y')]))
5150 >>> pp.element_at([0,1,0])
5156 def element_at(self
, path
):
5157 # See also aList.element_at.
5159 Nested
.element_at
= element_at
5161 def _element_path(self
, path
, func
):
5164 elif len(path
) == 1:
5167 return func().element_at(path
[1:])
5169 def element_at(self
, path
):
5170 return _element_path(self
, path
,
5171 lambda : self
._primary
[0][ path
[0] ] )
5172 Tuple
.element_at
= element_at
5173 Program
.element_at
= element_at
5174 List
.element_at
= element_at
5175 Map
.element_at
= element_at
5177 def element_at(self
, path
):
5178 return _element_path(self
, path
,
5179 lambda : self
._primary
[ path
[0] ] )
5180 Set
.element_at
= element_at
5181 Macro
.element_at
= element_at
5182 Inline
.element_at
= element_at
5183 eTree
.element_at
= element_at
5186 def element_at(self
, path
):
5188 # Call(Symbol('f'), aList([Symbol('x'), Symbol('y')]))
5190 # is transparent in PATH; indexing is as indicated.
5194 return self
._primary
[ 0 ]
5196 return self
._primary
[ 1 ][ path
[0] - 1 ]
5197 return _element_path(self
, path
, _fun
)
5198 Call
.element_at
= element_at
5202 def element_at(self
, path
):
5204 return self
# lvalue (via .deep_replace etc.)
5205 elif path
== ['value']:
5206 return self
._primary
[0] # Python rvalue.
5209 Int
.element_at
= element_at
5211 def element_at(self
, path
):
5213 Immediate
.element_at
= element_at
5218 def element_at(self
, path
):
5219 # See Nested.element_at
5221 "internal error: aList() contents should be picked by parent."
5222 aList
.element_at
= element_at
5226 #** Form table of paths
5227 def make_path_table(tree
):
5230 # A selection_tree, constructed using eTree(), Macro(), etc.
5234 # so that TBL[ path ] -> (name, macro)
5235 # TBL[ (path, 'num_leaves') ] -> int
5237 # A PATH is a tuple of indices, starting from 0
5239 # For trees, macro is None, for leaves the leaf itself.
5240 return tree
._make
_path
_table
((), {})
5243 def _make_path_table(self
, path
, tbl
):
5244 tbl
[ path
] = ( self
._label
, self
)
5245 # The leaves are l1, l2, ...
5246 leaves
= len(self
._primary
) - 1
5247 tbl
[ (path
, 'num_leaves') ] = leaves
5248 for idx
in range(0, leaves
):
5249 leaf
= self
._primary
[idx
+ 1] # for leaf in self._primary(1:):
5250 leaf
._make
_path
_table
(path
+ (idx
,), tbl
)
5252 eTree
._make
_path
_table
= _make_path_table
5254 def _make_path_table(self
, path
, tbl
):
5257 # A selection_tree, constructed using eTree(), Macro(), etc.
5260 # The path to TREE, as index tuple (a,b,...)
5262 tbl
[ path
] = (self
)
5264 Macro
._make
_path
_table
= _make_path_table
5265 Macro
.update_tree_table
= _make_path_table
5266 Inline
._make
_path
_table
= _make_path_table
5269 def _make_path_table(self
, path
, tbl
):
5270 print "Warning: astType._make_path_table is deprecated."
5271 tbl
[ path
] = (self
, self
)
5273 astType
._make
_path
_table
= _make_path_table
5280 def parent_id(self
):
5281 return self
.__dict
__.get('_parent')
5282 astType
.parent_id
= parent_id
5284 def parent_id(self
):
5285 return self
.__dict
__.get('_parent')
5286 aList
.parent_id
= parent_id
5287 Native
.parent_id
= parent_id
5289 def has_parent(self
):
5290 return self
.parent_id() != None
5291 astType
.has_parent
= has_parent
5292 aList
.has_parent
= has_parent
5293 cls_viewList
.has_parent
= has_parent
5294 Native
.has_parent
= has_parent
5298 def placeholder(parent
, storage
):
5299 # Provide a valid (but meaningless) child.
5300 # Must be inserted in proper slot by parent.
5303 .set_char_range(zero_char
, zero_char
)
5304 .set_source_string('') )
5305 def_env
= Env('dummy_env', None, None, storage
)
5306 child
.setup(parent
, def_env
, storage
)
5310 return aNone(_id
= None)
5313 return self
._primary
[0][0]
5316 def single_program(self
):
5317 # In the case of a single program within this program, i.e.,
5321 # return the innermost program only (nothing is lost).
5322 if isinstance(self
._primary
[0][0], Program
) and len(self
._primary
[0]) == 1:
5323 return self
._primary
[0][0].single_program()
5326 Program
.single_program
= single_program
5329 #** subtree replacement
5331 class ReplacementError(exceptions
.Exception):
5332 def __init__(self
, args
=None):
5336 def substitute(src
, dst
, tree
):
5337 # Replace SRC with DST in tree. SRC must match exactly.
5338 # The tree's source string is not replaced.
5339 lst
= find_exact(tree
, src
)
5341 elem
.shallow_replace(deepcopy(dst
).setup(empty_parent(),
5342 def_env
, storage
)[0],
5347 def replacement_setup(self
, new_val
, storage
):
5348 # Perform the .setup() action so that new_val can replace self.
5349 o_parent
= storage
.load(self
._parent
)
5350 o_def_env
= storage
.get_attribute(self
._id
, "stored_from_env")
5351 new_val
.setup(o_parent
, o_def_env
, storage
)
5353 astType
.replacement_setup
= replacement_setup
5357 #*** deep replacement
5358 def deep_replace(self
, new
, storage
, indent
= 0):
5359 # Replace self with new in parent and all clones.
5360 self
.shallow_replace(new
, storage
)
5363 clone_l
= storage
.get_attribute(self
._id
, "interp_clone")
5365 print ' '*indent
, "---------------------- clone data"
5368 # Set up subtree clone with appropriate parent.
5369 new_clone
= deepcopy(new
)
5370 clone
.replacement_setup(new_clone
, storage
)
5371 clone
.deep_replace(new_clone
, storage
)
5372 astType
.deep_replace
= deep_replace
5373 ### Immediate.deep_replace = deep_replace
5376 #*** shallow replacement
5377 def shallow_replace(self
, new
, storage
, indent
= 0):
5378 # Replace self in parent with 'new'.
5380 # Parent referring to self.
5381 parent_id
= self
.parent_id()
5382 if parent_id
is None:
5383 raise ReplacementError
, "no parent"
5385 parent
= storage
.load(parent_id
)
5386 parent
.replace_child(self
._id
, new
)
5387 astType
.shallow_replace
= shallow_replace
5388 ### Immediate.shallow_replace = shallow_replace
5392 #*** Re-establish object graph structures after edit or copy.
5394 def _update_refs(self
):
5396 astType
._update
_refs
= _update_refs
5398 def _update_refs(self
):
5399 ( self
._python
_init
_string
, ) = self
._primary
5400 Inline
._update
_refs
= _update_refs
5403 #*** child replacement
5404 def replace_child(self
, orig_id
, new_node
):
5406 assert isinstance(orig_id
, IntType
)
5407 idx
= self
.find_child_index(orig_id
)
5409 raise ReplacementError("Child not found.")
5411 # Update direct references.
5412 foo
= list(self
._primary
)
5413 foo
[idx
]._parent
= None
5415 self
._primary
= tuple(foo
)
5419 new_node
._parent
= self
._id
5420 Nested
.replace_child
= replace_child
5423 def replace_child(self
, orig_id
, new_node
):
5424 return self
._primary
[0].replace_child(orig_id
, new_node
)
5425 List
.replace_child
= replace_child
5427 def replace_child(self
, orig_id
, new_node
):
5428 # Also see Nested.replace_child()
5429 assert isinstance(orig_id
, IntType
)
5430 idx
= self
.find_child_index(orig_id
)
5432 raise ReplacementError
, "Child not found."
5434 # Update direct references.
5436 foo
= self
._primary
[0]
5437 foo
[idx
]._parent
= None
5439 self
._primary
= (foo
,)
5441 self
[idx
] = new_node
5443 new_node
._parent
= self
._id
5444 aList
.replace_child
= replace_child
5447 #** insert / append child
5448 def append_child(self
, child
):
5449 self
.insert_child(len(self
), child
)
5450 aList
.append_child
= append_child
5451 List
.append_child
= append_child
5452 Program
.append_child
= append_child
5454 def insert_child(self
, index
, child
):
5455 assert self
._id
!= None
5456 # Also see Nested.insert_child()
5457 foo
= self
._primary
[0]
5458 foo
.insert(index
, child
)
5459 self
._primary
= (foo
,)
5461 self
.insert(index
, child
)
5463 child
._parent
= self
._id
5464 aList
.insert_child
= insert_child
5466 def insert_child(self
, index
, child
):
5467 assert self
._id
!= None
5468 # Also see Nested.insert_child()
5469 foo
= list(self
._primary
)
5470 foo
.insert(index
, child
)
5471 self
._primary
= tuple(foo
)
5473 child
._parent
= self
._id
5474 Nested
.insert_child
= insert_child
5476 def insert_child(self
, index
, child
):
5477 return self
._primary
[0].insert_child(index
, child
)
5478 List
.insert_child
= insert_child
5479 Program
.insert_child
= insert_child
5481 def alist_replace(self
, obj
):
5482 assert isinstance(obj
, aList
)
5483 self
._primary
= (obj
, )
5484 cls_viewList
.alist_replace
= alist_replace
5487 #** insert into all clones
5488 def insert_child_rec(self
, index
, new
, storage
):
5491 # Insert 'new' in self and all clones.
5492 self
.insert_child(index
, new
)
5493 clone_l
= storage
.get_attribute(self
._id
, "interp_clone")
5497 # Set up subtree clone with appropriate parent.
5498 new_clone
= deepcopy(new
)
5499 copy_char_info(new
, new_clone
)
5502 storage
.get_attribute(self_cl
._id
, "stored_from_env"),
5504 cross_ref_trees(storage
, new
, new_clone
)
5505 self_cl
.insert_child_rec(index
, new_clone
, storage
)
5506 astType
.insert_child_rec
= insert_child_rec
5507 aList
.insert_child_rec
= insert_child_rec
5511 #** detach (delete) child
5512 def detach_child(self
, orig_id
, storage
):
5513 # Detaching a child leaves the size unchanged.
5514 # Compare aList.detach_child()
5515 assert isinstance(orig_id
, IntType
)
5516 idx
= self
.find_child_index(orig_id
)
5518 raise ReplacementError("Child not found.")
5520 # Update direct references.
5521 foo
= list(self
._primary
)
5522 foo
[idx
]._parent
= None
5524 foo
[idx
] = placeholder(self
, storage
)
5525 self
._primary
= tuple(foo
)
5528 Nested
.detach_child
= detach_child
5530 def detach_child(self
, orig_id
, storage
):
5531 # Both the contained alist (the real child) and alist's children
5532 # are considered children of List.
5534 assert isinstance(orig_id
, IntType
)
5536 if self
._primary
[0]._id
== orig_id
:
5538 # Update direct references.
5539 foo
= list(self
._primary
)
5540 foo
[idx
]._parent
= None
5542 foo
[idx
] = placeholder(self
, storage
)
5543 self
._primary
= tuple(foo
)
5548 return self
._primary
[0].detach_child(orig_id
, storage
)
5549 List
.detach_child
= detach_child
5551 def detach_child(self
, orig_id
, storage
):
5553 # Detaching a child changes the size.
5554 # Compare Nested.detach_child()
5555 assert isinstance(orig_id
, IntType
)
5556 idx
= self
.find_child_index(orig_id
)
5558 raise ReplacementError
, "Child not found."
5560 # Update direct references.
5561 foo
= self
._primary
[0]
5562 foo
[idx
]._parent
= None
5564 self
._primary
= (foo
,)
5567 aList
.detach_child
= detach_child
5570 def detach_from_parent(self
, storage
):
5571 # Parent referring to self.
5572 parent_id
= self
.parent_id()
5573 if parent_id
is None:
5574 raise ReplacementError("No parent.")
5576 parent
= storage
.load(parent_id
)
5577 parent
.detach_child(self
._id
, storage
)
5578 astType
.detach_from_parent
= detach_from_parent
5579 aList
.detach_from_parent
= detach_from_parent
5580 Native
.detach_from_parent
= detach_from_parent
5583 # # def detach_from_parent(self, storage):
5584 # # # Parent referring to self.
5585 # # parent_id = self.parent_id()
5586 # # if parent_id is None:
5587 # # raise ReplacementError("No parent.")
5589 # # parent = storage.load(parent_id)
5590 # # chid = self._real_tree._id
5591 # # parent.detach_child(chid, storage)
5592 # # cls_viewList.detach_from_parent = detach_from_parent
5596 def delete(self
, storage
):
5597 # Remove from parent and storage.
5598 if self
.parent_id(): self
.detach_from_parent(storage
)
5599 for ch
in self
.top_down():
5600 storage
.remove(ch
._id
)
5601 Nested
.delete
= delete
5602 Immediate
.delete
= delete
5603 aList
.delete
= delete
5604 Native
.delete
= delete
5606 # # def delete(self, storage):
5607 # # # Remove from storage.
5608 # # for ch in self.top_down():
5609 # # storage.remove(ch._id)
5610 # # aList.delete = delete
5615 #*** Tabular; clones, indentation -- grouped
5616 def clones_grouped(id, storage
, clone_level
, indent
):
5617 clone_l
= storage
.get_attribute(id, "interp_clone")
5619 for clone_id
in clone_l
:
5620 clone
= storage
.load(clone_id
)
5621 # Recursive -- not too legible, but complete.
5622 yield None, '(', None
5623 for rv
in clone
.prefix_grpd(storage
,
5624 clone_level
= clone_level
+ 1,
5627 yield None, ')', None
5630 def prefix_grpd(self
, storage
, clone_level
= 0, indent
= 0):
5631 # yield all tree nodes and their clones in
5632 # top-down, left-right, depth first form.
5633 # yielded are (tree, clone_level, indent_level)
5635 # Clones are completed before children, so only uncloned trees are
5636 # returned uninterrupted.
5638 yield self
, clone_level
, indent
5640 for clone
in clones_grouped(self
._id
, storage
, clone_level
, indent
):
5643 yield None, None, '('
5644 for child
in self
._primary
:
5645 for rv
in child
.prefix_grpd(storage
,
5646 clone_level
= clone_level
,
5647 indent
= indent
+ 1):
5649 yield None, None, ')'
5650 Nested
.prefix_grpd
= prefix_grpd
5652 def prefix_grpd(self
, storage
, clone_level
= 0, indent
= 0):
5653 yield self
, clone_level
, indent
5655 for clone
in clones_grouped(self
._id
, storage
, clone_level
, indent
):
5658 yield None, None, '('
5659 for child
in self
._primary
[0]: # Only difference -- index.
5660 for rv
in child
.prefix_grpd(storage
,
5661 clone_level
= clone_level
,
5662 indent
= indent
+ 1):
5664 yield None, None, ')'
5665 aList
.prefix_grpd
= prefix_grpd
5667 def prefix_grpd(self
, storage
, clone_level
= 0, indent
= 0):
5668 yield self
, clone_level
, indent
5670 for clone
in clones_grouped(self
._id
, storage
, clone_level
, indent
):
5672 Immediate
.prefix_grpd
= prefix_grpd
5674 def prefix_grpd(self
, storage
, clone_level
= 0, indent
= 0):
5675 yield self
, clone_level
, indent
5677 for clone
in clones_grouped(self
._id
, storage
, clone_level
, indent
):
5679 aNone
.prefix_grpd
= prefix_grpd
5682 #*** Tabular, including clones, with 'indentation'
5683 def yield_clones(id, storage
, clone_level
, indent
):
5684 clone_l
= storage
.get_attribute(id, "interp_clone")
5686 for clone_id
in clone_l
:
5687 clone
= storage
.load(clone_id
)
5688 # Recursive -- not too legible, but complete.
5689 for rv
in clone
.prefix_all( storage
,
5690 clone_level
= clone_level
+ 1,
5695 def prefix_all(self
, storage
, clone_level
= 0, indent
= 0):
5696 # yield all tree nodes and their clones in
5697 # top-down, left-right, depth first form.
5698 # yielded are (tree, clone_level, indent_level)
5700 # Clones are completed before children, so only uncloned trees are
5701 # returned uninterrupted.
5703 # This output is messy, but complete; displaying these
5704 # high-dimensional structures in a 1-D sequence is ALWAYS
5706 yield self
, clone_level
, indent
5708 for clone
in yield_clones(self
._id
, storage
, clone_level
, indent
):
5711 for child
in self
._primary
:
5712 for rv
in child
.prefix_all(storage
,
5713 clone_level
= clone_level
,
5714 indent
= indent
+ 1):
5716 Nested
.prefix_all
= prefix_all
5718 def prefix_all(self
, storage
, clone_level
= 0, indent
= 0):
5719 yield self
, clone_level
, indent
5721 for clone
in yield_clones(self
._id
, storage
, clone_level
, indent
):
5724 for child
in self
._primary
[0]: # Only difference -- index.
5725 for rv
in child
.prefix_all(storage
,
5726 clone_level
= clone_level
,
5727 indent
= indent
+ 1):
5729 aList
.prefix_all
= prefix_all
5731 def prefix_all(self
, storage
, clone_level
= 0, indent
= 0):
5732 yield self
, clone_level
, indent
5733 for clone
in yield_clones(self
._id
, storage
, clone_level
, indent
):
5735 Immediate
.prefix_all
= prefix_all
5737 def prefix_all(self
, storage
, clone_level
= 0, indent
= 0):
5738 yield self
, clone_level
, indent
5739 for clone
in yield_clones(self
._id
, storage
, clone_level
, indent
):
5741 aNone
.prefix_all
= prefix_all
5745 #*** With 'indentation'
5746 def top_down_indented(self
, indent
= 0):
5747 # top-down, left-right, depth first.
5749 for child
in self
._primary
:
5750 for cc
, cind
in child
.top_down_indented(indent
= indent
+ 1):
5752 Nested
.top_down_indented
= top_down_indented
5754 def top_down_indented(self
, indent
= 0):
5756 Immediate
.top_down_indented
= top_down_indented
5758 def top_down_indented(self
, indent
= 0):
5760 aNone
.top_down_indented
= top_down_indented
5761 Native
.top_down_indented
= top_down_indented
5763 def top_down_indented(self
, indent
= 0):
5765 for child
in self
._primary
[0]:
5766 for cc
, cind
in child
.top_down_indented(indent
= indent
+ 1):
5768 aList
.top_down_indented
= top_down_indented
5771 #*** Outline elements only, with level
5772 def outl_top_down(self
, level
= 0):
5773 # top-down, left-right, depth first.
5775 for child
in self
._outl
_children
:
5776 for cc
, cind
in child
.outl_top_down(level
= level
+ 1):
5778 cls_viewList
.outl_top_down
= outl_top_down
5779 Program
.outl_top_down
= outl_top_down
5783 # omit_children_of = [Function]
5784 # def top_down(self, omit_children_of = []):
5785 # # top-down, left-right, depth first.
5787 # if self.__class__ in omit_children_of:
5789 # for child in self._primary:
5790 # for cc in child.top_down():
5792 # Nested.top_down = top_down
5795 # top-down, left-right, depth first.
5797 for child
in self
._primary
:
5798 for cc
in child
.top_down():
5800 Nested
.top_down
= top_down
5804 Immediate
.top_down
= top_down
5808 aNone
.top_down
= top_down
5809 Native
.top_down
= top_down
5813 for child
in self
._primary
[0]:
5814 for cc
in child
.top_down():
5816 aList
.top_down
= top_down
5819 #*** Top-down, truncate subtrees of given type(s)
5820 def top_down_truncate(self
, omit_children_of
):
5821 # Top-down, node-left-right, depth first traversal. Instances of
5822 # classes in omit_children_of are returned but not traversed
5825 if self
.__class
__ in omit_children_of
:
5827 for child
in self
._primary
:
5828 for cc
in child
.top_down_truncate(omit_children_of
):
5830 Nested
.top_down_truncate
= top_down_truncate
5832 def top_down_truncate(self
, omit_children_of
):
5834 Immediate
.top_down_truncate
= top_down_truncate
5836 def top_down_truncate(self
, omit_children_of
):
5838 aNone
.top_down_truncate
= top_down_truncate
5839 Native
.top_down_truncate
= top_down_truncate
5841 def top_down_truncate(self
, omit_children_of
):
5843 if self
.__class
__ in omit_children_of
:
5845 for child
in self
._primary
[0]:
5846 for cc
in child
.top_down_truncate(omit_children_of
):
5848 aList
.top_down_truncate
= top_down_truncate
5851 #*** Flat, children only, generic callback interface
5852 def visit_children(self
, func
, **kwds
):
5854 # Call FUNC(CHILD, **kwds) for every CHILD.
5856 for child
in self
._primary
:
5858 Nested
.visit_children
= visit_children
5860 def visit_children(self
, func
, **kwds
):
5862 Immediate
.visit_children
= visit_children
5864 def visit_children(self
, func
, **kwds
):
5866 aNone
.visit_children
= visit_children
5867 Native
.visit_children
= visit_children
5869 def visit_children(self
, func
, **kwds
):
5870 for child
in self
._primary
[0]:
5872 aList
.visit_children
= visit_children
5875 #*** Flat, editable subtrees only, iterator
5877 # For a given l3 type, yield the subtrees intended to be editable,
5878 # bypassing containers (used by that type) they may be in.
5881 # Used for rendering of a textual program.
5883 for child
in self
._primary
:
5885 for cc
in child
.subtrees():
5886 yield cc
# iterator forwarding...
5887 Nested
.subtrees
= subtrees
5890 for child
in self
[0]:
5892 for cc
in child
.subtrees():
5893 yield cc
# iterator forwarding...
5894 Program
.subtrees
= subtrees
5898 for child
in self
[0]:
5900 for cc
in child
.subtrees():
5901 yield cc
# iterator forwarding...
5903 for child
in self
[1]:
5905 for cc
in child
.subtrees():
5906 yield cc
# iterator forwarding...
5907 Function
.subtrees
= subtrees
5910 # For a(b,c), return only b and c.
5912 for child
in self
[1]:
5914 for cc
in child
.subtrees():
5915 yield cc
# iterator forwarding...
5916 Call
.subtrees
= subtrees
5920 for cc
in self
[0].subtrees():
5921 yield cc
# iterator forwarding...
5923 for cc
in self
[1].subtrees():
5924 yield cc
# iterator forwarding...
5925 Member
.subtrees
= subtrees
5929 for cc
in self
[0].subtrees():
5930 yield cc
# iterator forwarding...
5931 for child
in self
[1]:
5933 for cc
in child
.subtrees():
5934 yield cc
# iterator forwarding...
5935 if not isinstance(self
[2], aNone
):
5936 for child
in self
[2]:
5938 for cc
in child
.subtrees():
5939 yield cc
# iterator forwarding...
5940 If
.subtrees
= subtrees
5941 ## Set.subtrees = subtrees
5944 for child
in self
[0]:
5946 for cc
in child
.subtrees():
5947 yield cc
# iterator forwarding...
5948 List
.subtrees
= subtrees
5949 Map
.subtrees
= subtrees
5950 Tuple
.subtrees
= subtrees
5955 Immediate
.subtrees
= subtrees
5960 aNone
.subtrees
= subtrees
5961 Native
.subtrees
= subtrees
5964 for child
in self
._primary
[0]:
5966 for cc
in child
.subtrees():
5967 yield cc
# iterator forwarding...
5968 aList
.subtrees
= subtrees
5971 #*** Flat, no recursion, editable subtrees1 only, iterator
5973 # For a given l3 type, yield the IMMEDIATE subtrees1 intended to be editable,
5974 # bypassing containers (used by that type) they may be in.
5977 # Used for rendering of a textual program.
5978 def subtrees1(self
):
5979 for child
in self
._primary
:
5981 Nested
.subtrees1
= subtrees1
5983 def subtrees1(self
):
5984 for child
in self
[0]:
5986 Program
.subtrees1
= subtrees1
5988 def subtrees1(self
):
5990 for child
in self
[0]:
5993 for child
in self
[1]:
5995 Function
.subtrees1
= subtrees1
5997 def subtrees1(self
):
5998 # For a(b,c), return only b and c.
6000 for child
in self
[1]:
6002 Call
.subtrees1
= subtrees1
6004 def subtrees1(self
):
6007 Member
.subtrees1
= subtrees1
6009 def subtrees1(self
):
6011 for child
in self
[1]:
6013 if not isinstance(self
[2], aNone
):
6014 for child
in self
[2]:
6016 If
.subtrees1
= subtrees1
6017 ## Set.subtrees1 = subtrees1
6019 def subtrees1(self
):
6020 for child
in self
[0]:
6022 List
.subtrees1
= subtrees1
6023 Map
.subtrees1
= subtrees1
6024 Tuple
.subtrees1
= subtrees1
6026 def subtrees1(self
):
6029 Immediate
.subtrees1
= subtrees1
6031 def subtrees1(self
):
6034 aNone
.subtrees1
= subtrees1
6035 Native
.subtrees1
= subtrees1
6037 def subtrees1(self
):
6038 for child
in self
._primary
[0]:
6040 aList
.subtrees1
= subtrees1
6044 #*** Flat traversal (node only).
6047 for child
in self
._primary
:
6049 Nested
.entries
= entries
6053 Immediate
.entries
= entries
6057 aNone
.entries
= entries
6058 Native
.entries
= entries
6061 for cc
in self
._primary
[0]:
6063 aList
.entries
= entries
6066 for child
in self
._primary
[0]:
6068 List
.entries
= entries
6069 Program
.entries
= entries
6071 def num_entries(self
):
6072 return len(self
._primary
[0])
6073 List
.num_entries
= num_entries
6075 def get_child(self
, idx
):
6076 return (self
._primary
[0][idx
])
6077 List
.get_child
= get_child
6081 def find_exact(tree
, search_tree
):
6083 for node
in tree
.top_down():
6084 if node
.eql(search_tree
):
6085 matches
.append(node
)
6088 def find_type(tree
, search_class
):
6090 for node
in tree
.top_down():
6091 if node
.__class
__ == search_class
:
6092 matches
.append(node
)
6095 def find_non_nested_matches(tree
, pattern
):
6096 # Find matches of pattern in the outer tree, ie., those *not*
6097 # found inside Map(), Function()
6100 def find_type_non_nested(tree
, pattern
):
6101 # Find matches of type in the outer tree, ie., those *not*
6102 # found inside Function()
6105 def find_all_matches(tree
, pattern
):
6106 # Find *all* matches of pattern in tree, including *all* subtrees
6111 # A sample use may look like this:
6112 # seq_expr = object.seq_expr()
6116 # # reader.parse('!! def_name string = ! foo')
6118 # match_list = find_non_nested_matches(
6120 # Set( MarkerTyped( String('def_name'), Symbol('string')),
6122 # if len(match_list) == 0:
6123 # raise DataDagError, \
6124 # ("In the reference %s.%s, %s has no member %s\n" %
6125 # (first, second, first, second))
6126 # elif len(match_list) > 1:
6127 # raise DataDagError, \
6128 # (("In the reference %s.%s, " +
6129 # "%s has multiple bindings for %s\n") %
6130 # (first, second, first, second))
6134 #** verify tree structure
6135 def verify_tree(node
):
6136 for par
, chld
, idx
in node
.top_down_parent():
6138 if chld
._parent
!= par
._id
:
6139 print "child (%s) has wrong parent (%s)"% chld
, par
6144 def __init__(self
, storage
):
6145 self
._storage
= storage
6147 def parent(self
, tree
):
6148 return self
._storage
.load( tree
._parent
)
6150 def all_parents(self
, tree
):
6152 parent
= self
.parent(tree
)
6158 TreeWork
.all_parents
= all_parents
6160 # Also useful: tree.top_down()
6161 # tw = TreeWork(storage)
6164 #** enclosing elements
6165 def find_first_parent(self
, tree
, type_t
):
6166 # Find first parent of type `type_t`
6167 for parent
in self
.all_parents(tree
):
6168 if isinstance(parent
, type_t
):
6170 TreeWork
.find_first_parent
= find_first_parent
6172 def find_all_parents(self
, tree
, type_t
):
6173 # Find all parents of type `type_t`
6174 for parent
in self
.all_parents(tree
):
6175 if isinstance(parent
, type_t
):
6177 TreeWork
.find_all_parents
= find_all_parents
6179 def find_root(self
, tree
):
6181 for parent
in self
.all_parents(tree
):
6184 TreeWork
.find_root
= find_root
6189 class ContextDispError(exceptions
.Exception):
6190 def __init__(self
, args
=None):
6195 CallContext(K, C, O, J)
6197 The Call K occurs in the clone C of the tree O;
6198 C was made by Call J.
6203 return "%s(%s, %s, %s, %s)" % (self
.__class
__.__name
__,
6208 CallContext
.__repr
__ = __repr__
6211 #*** get lexical + 1 dynamic (old)
6212 def get_calling_context(self
, node
):
6213 # prototype in v.1.1.2.10 of test-calc.py
6215 'node' is an integer.
6218 return a nested list of (clone_src_id, clone_id) tuples.
6222 path ::= [ (clone_src_id, clone_id) <, path>* ]
6224 This list represents (initial lexical + 1 dynamic) execution path
6225 leading to terminal (actually _used_*) copies of 'node'.
6228 the current clone of node
6231 the maker of the clone
6239 the first clone of B (CB1) is made by A(), but is never executed.
6240 This clone's parent tree is a Function(). '
6242 The second B clone is made by B() and executed. Its parent is
6243 therefore a Program().
6249 # forward one clone (K)
6252 # find first enclosing Program()
6253 # add to context (cloned_by attribute)
6254 # continue with K as starting tree
6256 # add to context (cloned_by attribute)
6259 # Data possibilities:
6261 # node: clone / path -> clone / path -> 0
6262 # node: (clone / path -> (clone / path -> 0))
6263 node
= self
._storage
.load(node
)
6265 geta
= self
._storage
.get_attribute
6266 clone_l
= geta(node
._id
, "interp_clone")
6267 if clone_l
: # int list
6269 for clone_id
in clone_l
:
6270 K
= self
._storage
.load(clone_id
)
6273 parent
= self
.find_first_parent(K
, (Function
, Program
))
6275 if isinstance(parent
, Function
):
6276 # Find enclosing Program()
6277 enclosing_prog
= self
.find_first_parent(parent
, Program
)
6278 if enclosing_prog
== None:
6279 raise ContextDispError
, "Unexpected program structure."
6281 # Get the context (cloning source)
6282 cloned_by
= (geta(enclosing_prog
._id
,
6283 "cloned_by"), clone_id
)
6284 if cloned_by
[0] == None:
6285 raise ContextDispError
, "No clone source. "\
6287 cloned_by
= [cloned_by
] + self
.get_calling_context(K
._id
)
6290 elif isinstance(parent
, Program
):
6291 #load = self._storage.load
6292 #print "program:cloned_by::", \
6293 # load(geta(parent._id, "cloned_by")).calltree_str()
6294 #print "program:clone_of::", \
6295 # load(geta(parent._id, "clone_of"))\
6296 # ._binding_name.calltree_str()
6298 # Get the context (cloning source)
6299 cloned_by
= [(geta(parent
._id
, "cloned_by"), clone_id
)]
6300 if cloned_by
[0][0] == None:
6301 raise ContextDispError
, "No clone source. "\
6305 raise ContextDispError
, "Unexpected program structure."
6306 clone_paths_l
.append(cloned_by
)
6307 return clone_paths_l
6310 TreeWork
.get_calling_context
= get_calling_context
6313 #*** get full dynamic
6314 def get_call_ctxt(self
, node
):
6315 # Get all dynamic call paths reaching 'node'; these are in inverse
6316 # execution order; starting from an astType's clones, find the
6317 # execution paths that created them.
6319 # Returns a list of all traversal paths:
6320 # CallContext list list
6321 # list follows the Call chain
6322 # list for all clones of node
6325 # This list could be merged to a single tree.
6327 def _loop(clone_id
, first
= 0):
6329 K
= self
._storage
.load(clone_id
)
6333 K_par
= self
.find_first_parent(K
, (Function
, Program
, Macro
))
6335 if isinstance(K_par
, (Function
, Macro
)):
6336 # K is an inert clone (no value, not executable).
6340 # There should be no intert nodes along an execution
6342 raise ContextDispError
, "Unexpected program structure."
6345 elif isinstance(K_par
, Program
):
6346 # Get the context (cloning source).
6350 cc
.O
= geta(cc
.C
, "clone_of")
6351 cc
.J
= geta(cc
.C
, "cloned_by")
6354 return [cc
] + _loop(cc
.J
)
6355 node
= self
._storage
.load(node
)
6356 geta
= self
._storage
.get_attribute
6357 clone_l
= geta(node
._id
, "interp_clone")
6360 clone_paths_l
= [_loop(clone_id
, first
= 1)[::-1]
6361 for clone_id
in clone_l
]
6362 return filter(None, clone_paths_l
)
6366 TreeWork
.get_call_ctxt
= get_call_ctxt
6370 #*** prune to selection
6371 def prune_cctxt(self
, cctxt_chains
, must_include
):
6373 # Find all chains containing the ids in must_include
6374 # must_include ::= (call id) list
6377 # The full case (not dealt with):
6378 # must_include ::= (call id) list list
6379 # where the meaning is [ [id AND id ...] OR [id AND id ...] ]
6380 # The ids must use the call chains' order.
6382 # This is a full tree pattern match: multiple input lists,
6383 # multiple match targets.
6385 # The full calling context list is potentially huge; forming it
6386 # just to prune it later is really ineffient...
6388 # The AND lists could differ only in their last entries, requiring
6390 # a. an advanced pruning algorithm
6393 # The case of brute force is likely sufficient for interactive
6394 # use. This simplifies must_include to
6395 # must_include ::= (call id) list
6398 if len(must_include
) == 0:
6399 return copy(cctxt_chains
)
6402 for call_chain
in cctxt_chains
:
6403 # Check if ALL entries of must_include are in ANY calling
6405 found
= map( lambda _
: 0, must_include
)
6406 for cctxt
in call_chain
:
6407 # Horribly inefficient...
6408 if cctxt
.K
in must_include
:
6409 found
[must_include
.index(cctxt
.K
)] = 1
6410 if reduce(lambda x
,y
: x
and y
, found
):
6412 _kept_chains
.append(call_chain
)
6415 TreeWork
.prune_cctxt
= prune_cctxt
6419 #*** Get leaves and values.
6420 def cctxt_leaves(self
, cctxt_chains
):
6421 stg
= self
._storage
.get_attribute
6423 for chain
in cctxt_chains
:
6425 values
.append( (cid
, stg(cid
, 'interp_result')) )
6427 TreeWork
.cctxt_leaves
= cctxt_leaves
6432 # Strings that taken together provide a meaningful display of the call
6434 def calltree_str(self
):
6435 return str(self
._primary
[0])
6436 Immediate
.calltree_str
= calltree_str
6438 def calltree_str(self
):
6439 return str(self
.__class
__)
6440 Nested
.calltree_str
= calltree_str
6442 def calltree_str(self
):
6443 return str(self
.__class
__)
6444 aList
.calltree_str
= calltree_str
6446 def calltree_str(self
):
6447 operator
= self
.deref(0)
6448 if isinstance(operator
, (Symbol
, Member
)):
6449 return operator
.calltree_str()
6451 raise ContextDispError
, "Invalid context: " + str(operator
)
6452 Call
.calltree_str
= calltree_str
6454 def calltree_str(self
):
6455 # For a.b, both a and b are assumed Symbols().
6456 return "%s.%s" % (self
.deref(0).calltree_str(),
6457 self
.deref(1).calltree_str())
6458 Member
.calltree_str
= calltree_str
6463 class MatchFailure(Exception): pass
6466 def __init__(self
, matches
= None):
6467 self
._matches
= matches
or {} # (string -> astType) dict
6468 self
._matches
_l = utils
.ListDict() # (string -> astType list) dict
6470 def _match_(self
, tree
, pattern
):
6472 Traverse tree and pattern, matching nodes. The matcher nodes
6473 (! name) match any node, and what they match is bound to `name' in
6474 the Matcher instance.
6478 def_env = Env(1, None, None)
6479 storage = RamMem('root_memory', 0)
6482 tree = reader.parse(' hello [short ; list] ')
6483 tree.setup(None, def_env, storage)
6484 patt = reader.parse(' hello [!aha ; list] ')
6485 patt.setup(None, def_env, storage)
6487 res = ma.match(tree, patt)
6491 # Trees are assumed to have interface functions
6492 # __len__, __getitem__, and __class__
6493 # Thus, they are compatible with Python arrays
6495 if pattern
.__class
__ == tree
.__class
__: # identical head
6496 # Test remaining tree
6497 nc
= pattern
.__len
__()
6498 if nc
!= tree
.__len
__():
6500 # special case for childless types -- Immediate()s
6502 # Immediate() match?
6503 if pattern
.eql(tree
): return True
6504 else: raise MatchFailure
6505 # Compare ALL children.
6506 for c
in range(0,nc
):
6507 self
._match
_(tree
[c
], pattern
[c
])
6510 elif pattern
.__class
__ == Marker
:
6511 # Take subtree unconditionally
6512 self
._matches
[pattern
.name()] = tree
6513 self
._matches
_l.push(pattern
.name(), tree
)
6516 elif pattern
.__class
__ == MarkerTyped
:
6517 # Take subtree if it is of correct type.
6518 # !! name expr -> MarkerTyped(name, expr)
6519 expr
= pattern
.expr()
6520 if expr
.__class
__ == tree
.__class
__:
6521 self
._matches
[pattern
.name()] = tree
6522 self
._matches
_l.push(pattern
.name(), tree
)
6524 else: raise MatchFailure
6528 # ast.Matcher.match = match
6529 Matcher
._match
_ = _match_
6531 def match(self
, tree
, pattern
):
6533 Traverse tree and pattern, matching nodes. The matcher nodes
6534 (! name) match any node, and what they match is bound to `name' in
6535 the Matcher instance.
6538 self
._match
_(tree
, pattern
)
6540 except MatchFailure
:
6542 self
._matches
_l = utils
.ListDict()
6544 Matcher
.match
= match
6546 def match_exp_str(self
, tree
, pattern_str
):
6548 Match tree against a single expression given in pattern_str.
6549 For a sequence, uses the first expression.
6551 from l3lang
import reader
6552 full
= reader
.parse(pattern_str
)
6553 patt
= full
._primary
[0][0]
6554 return self
.match(tree
, patt
)
6555 Matcher
.match_exp_str
= match_exp_str
6557 def get(self
, name
):
6558 return self
._matches
.get(name
)
6561 def get_all(self
, name
):
6562 '''Return the list of matches for `name`.
6564 return self
._matches
_l.get(name
)
6565 Matcher
.get_all
= get_all
6567 def __setitem__(self
, name
, val
):
6568 self
._matches
[name
] = val
6569 self
._matches
_l.push(name
, val
)
6570 Matcher
.__setitem
__ = __setitem__
6572 def __getitem__(self
, name
):
6573 return self
.get(name
)
6574 Matcher
.__getitem
__ = __getitem__
6576 # Interferes with class names, and caused the rather cryptic
6577 # TypeError: 'NoneType' object is not callable
6578 # def __getattr__(self, name):
6579 # return self.get(name)
6580 # Matcher.__getattr__ = __getattr__
6582 def construct(self
, tree
):
6584 # Construct a new tree from a string and using tree elements stored in
6585 # the Matcher instance.
6586 # new_tree = ma.construct("{ |a,b| !rest }")
6588 Matcher
.construct
= construct
6590 #* Outline construction
6592 #** set outline edges to viewList-containing tree
6594 # clears existing state and sets new outline edges; can
6595 # be used after any tree updates or copies.
6596 # Outline edges are NOT needed for interpretation.
6597 def set_outl_edges(self
, w_
, parent_outline
):
6598 # Reset child outlines.
6599 self
._outl
_children
= vaList([]).setup_valist(w_
, self
)
6601 # Link to/from outline parent.
6602 if parent_outline
is None:
6603 self
._outl
_parent
= None
6605 self
._outl
_parent
= parent_outline
6606 parent_outline
._outl
_children
.append_child(weakref
.proxy(self
))
6608 # Form edges to/from children.
6609 self
._primary
[0].set_outl_edges(w_
, weakref
.proxy(self
))
6612 cls_viewList
.set_outl_edges
= set_outl_edges
6614 def set_outl_edges(self
, w_
, parent_outline
):
6615 for child
in self
._primary
:
6616 child
.set_outl_edges(w_
, parent_outline
)
6618 Nested
.set_outl_edges
= set_outl_edges
6620 def set_outl_edges(self
, w_
, parent_outline
):
6621 # Reset child outlines.
6622 self
._outl
_children
= vaList([]).setup_valist(w_
, self
)
6624 # Link to/from outline parent.
6625 if parent_outline
is None:
6626 self
._outl
_parent
= None
6628 self
._outl
_parent
= parent_outline
6629 parent_outline
._outl
_children
.append_child(weakref
.proxy(self
))
6631 # Form edges to/from children.
6632 for child
in self
._primary
:
6633 child
.set_outl_edges(w_
, weakref
.proxy(self
))
6636 Program
.set_outl_edges
= set_outl_edges
6639 def set_outl_edges(self
, w_
, parent_outline
):
6640 for child
in self
._primary
[0]:
6641 child
.set_outl_edges(w_
, parent_outline
)
6643 aList
.set_outl_edges
= set_outl_edges
6645 def set_outl_edges(self
, w_
, parent_outline
):
6647 Immediate
.set_outl_edges
= set_outl_edges
6648 aNone
.set_outl_edges
= set_outl_edges
6649 Native
.set_outl_edges
= set_outl_edges
6653 def set_outline(self
, outline_type
):
6654 assert outline_type
in ['nested', 'subtree', 'flat']
6655 self
._outl
_type
= outline_type
6657 cls_viewList
.set_outline
= set_outline
6658 Program
.set_outline
= set_outline
6660 def get_outline_type(self
):
6661 return self
._outl
_type
6662 cls_viewList
.get_outline_type
= get_outline_type
6663 Program
.get_outline_type
= get_outline_type
6665 def setup_alist(self
, w_
, parallel_nd
):
6666 # Return an empty alist, .setup() using the environment of
6668 storage
= w_
.state_
.storage
6669 parent
= empty_parent()
6670 def_env
= storage
.get_attribute(parallel_nd
._id
, "stored_from_env")
6671 # Form the raw list.
6672 rv
, _
= aList([]).setup(parent
, def_env
, storage
)
6674 aList
.setup_alist
= setup_alist
6677 #** calculate outline heading index
6678 def heading_index(self
):
6679 # Return (level, index) tuple, both starting from 0;
6680 # `level` is the depth in the outline,
6681 # `index` the position at that `level`.
6684 if self
._outl
_parent
:
6685 par_l
, _
= self
._outl
_parent
.heading_index()
6691 if self
._outl
_parent
:
6692 index
= self
._outl
_parent
.outl_find_child(self
)
6695 return (level
, index
)
6696 cls_viewList
.heading_index
= heading_index
6697 Program
.heading_index
= heading_index
6699 def outl_find_child(self
, chld
):
6700 tmp
= weakref
.proxy(chld
)
6701 return self
._outl
_children
.index(tmp
)
6702 cls_viewList
.outl_find_child
= outl_find_child
6703 Program
.outl_find_child
= outl_find_child
6706 #** iterate top-down
6707 def outl_iter(self
, level
= 0):
6708 'Iterate top-down, return (level, node)'
6710 for sub
in self
._outl
_children
:
6711 for nd
in sub
.outl_iter(level
= level
+ 1):
6713 cls_viewList
.outl_iter
= outl_iter
6714 Program
.outl_iter
= outl_iter
6717 #** hide below 'level'
6718 def outl_flat_display(self
, level
):
6719 for (lev
, nd
) in self
.outl_iter():
6721 nd
.set_outline('flat')
6722 cls_viewList
.outl_flat_display
= outl_flat_display
6723 Program
.outl_flat_display
= outl_flat_display
6727 def __deepcopy__(self
, memo
):
6728 # A copy must set its own outline cross-references
6729 # (_outl_children) when needed, but the desired outline type
6730 # (_outl_type) can be kept.
6731 op
= self
._outl
_parent
6732 oc
= self
._outl
_children
6733 self
._outl
_parent
= None
6734 self
._outl
_children
= None
6736 rv
= Nested
.__deepcopy
__(self
, memo
)
6737 # The w_ is not available yet.
6738 # rv.set_outl_edges(w_, parent_outline)
6739 self
._outl
_parent
= op
6740 self
._outl
_children
= oc
6742 cls_viewList
.__deepcopy
__ = __deepcopy__
6743 Program
.__deepcopy
__ = __deepcopy__
6747 # The only safe persistent format is the ususal tree (no outline).
6748 def __getstate__(self
):
6750 rv
.update(self
.__dict
__)
6751 rv
['_outl_parent'] = None
6752 rv
['_outl_children'] = None
6753 rv
['_pre_interp_hook'] = None
6755 cls_viewList
.__getstate
__ = __getstate__
6756 Program
.__getstate
__ = __getstate__
6758 def __setstate__(self
, stuff
):
6759 self
.__dict
__.update(stuff
)
6760 cls_viewList
.__setstate
__ = __setstate__
6761 Program
.__setstate
__ = __setstate__
6764 # Apparently (python 2.4), list pickling is special. A python.list
6766 # <built-in method __reduce_ex__ of vaList object at 0xb7da60cc>
6767 # is used to get the tuple
6768 # (<function __newobj__ at 0xb7fb9e9c>,
6769 # (<class 'l3lang.ast.vaList'>,),
6771 # <listiterator object at 0xb7cf9aec>,
6773 # and the listiterator is actually used to get the contents.
6775 # See http://www.python.org/dev/peps/pep-0307
6777 # Thus, we must preempt this by providing __reduce_ex__
6778 def valist_restore(*ignore
):
6781 def __reduce_ex__(self
, protocol
):
6782 return (valist_restore
, ())
6783 vaList
.__reduce
_ex
__ = __reduce_ex__
6787 #* Python callback wrapper
6788 class CallBackError
:
6791 class CallableFunction
:
6794 def __init__(self
, block
, env
, storage
, block_invocation
, arg_index
):
6797 self
._storage
= storage
6798 self
._block
_invocation
= block_invocation
6800 # Incremental evaluation data
6801 # arg_index is the position of self in another call,
6802 # E.g., for f(a, self, b), arg_index is 1
6803 self
._arg
_index
= arg_index
6804 self
._call
_count
= 0
6805 # See also CallableFunction.__call__
6806 storage
.ie_
.set_timestamp(
6807 (block_invocation
._id
, self
._call
_count
, arg_index
),
6808 # ### which timestamp to use?
6809 storage
.ie_
.get_timestamp(block_invocation
._id
))
6810 CallableFunction
.__init
__ = __init__
6812 def callable_interpret_prep(call_block
, args
):
6813 # Prepare the block, evaluate arguments, and provide
6815 # See call_function_prep
6817 block
= call_block
._block
6818 storage
= call_block
._storage
6819 block_inv
= call_block
._block
_invocation
6820 ccount
= call_block
._call
_count
6821 arg_index
= call_block
._arg
_index
6823 # Incremental evaluation check.
6824 if storage
.ie_
.has_clone( (block_inv
._id
, ccount
, arg_index
) ):
6825 cc
= storage
.ie_
.clone_table()
6826 program
= cc
[ (block_inv
._id
, ccount
, arg_index
) ]
6827 eval_env
, arg_env
= cc
[( (block_inv
._id
, ccount
, arg_index
) , 'envs')]
6830 if block
.nargs() != len(args
):
6831 raise CallBackError
, "Argument count mismatch: " + \
6832 str(block
) + str(args
)
6834 # ---- Turn block into executable.
6835 newblock
= block
.block_copy(storage
)
6836 program
= Program(newblock
.raw_seq_expr())
6838 # ---- Set up argument environment.
6839 if block
._binding
_name
!= None:
6840 arg_env
= block
._def
_env
.new_child(
6841 program
, name
= block
._binding
_name
.py_string())
6843 arg_env
= block
._def
_env
.new_child(program
, name
= "run.arg")
6845 # ---- Get positional block arguments' names.
6848 for ba
in block
.block_args():
6849 # if not ma.match_exp_str(ba, '!! name symbol'):
6850 if not ma
.match(ba
, MarkerTyped(String('name'), Symbol('symbol'))):
6851 raise InterpreterError
, "Invalid argument type: " + str(ba
)
6852 arg_names
.append(ma
._matches
['name'])
6854 # ---- Bind block argument names to actual arguments.
6858 # To account for repeated external calls, use the call_count as id
6860 arg_env
.bind_ptr(arg_names
[position_index
].as_index(),
6863 # Incremental evaluation.
6864 arg_env
.bind_time_stamp_ptr(arg_names
[position_index
].as_index(),
6869 # ---- Set up evaluation environment.
6870 eval_env
= arg_env
.new_child(program
, name
= "run.blck")
6872 # ---- Finish program.
6873 program
.setup(empty_parent(), eval_env
, storage
)
6875 #---------------- later interaction
6876 cross_reference_trees(storage
, block
, newblock
)
6879 # Incremental evaluation data.
6880 cc
= storage
.ie_
.clone_table()
6881 cc
[ (block_inv
._id
, ccount
, arg_index
) ] = program
6882 cc
[( (block_inv
._id
, ccount
, arg_index
) , 'envs')] = eval_env
, arg_env
6883 return program
, eval_env
, arg_env
6886 def callable_call(self
, *args
):
6888 # Also see call_real_interpret
6889 # if isinstance(block, Function):
6892 storage
= self
._storage
6893 ccount
= self
._call
_count
6894 arg_index
= self
._arg
_index
6895 block_inv
= self
._block
_invocation
6897 program
, eval_env
, arg_env
= callable_interpret_prep(self
, args
)
6899 # Note: CallableFunction has no _id, but corresponds to
6900 # Call. Here, use the Program()'s _id
6902 # ----------- Data for this block
6903 # Valid with tail call or without, so this MUST PRECEDE the
6904 # call to program.interpret(), below.
6906 storage
.push_attributes(block
._id
, "interp_clone", program
._id
)
6907 storage
.set_attributes(program
._id
,
6908 "interp_program", program
, "interp_env", arg_env
)
6909 # lexical information
6910 storage
.set_attributes(program
._id
,
6911 # lexical information
6912 "clone_of", block
._id
,
6913 "interp_env", arg_env
,
6914 # dynamic information
6915 "cloned_by", self
._block
_invocation
._id
6920 # Affects: Program.interpret, Call.interpret(, storage)
6921 def finish(rv
, ie_status
):
6922 # Incremental evaluation check.
6923 if storage
.ie_
.tree_is_older( (block_inv
._id
, ccount
, arg_index
),
6925 # Interpretation values.
6926 storage
.id2tree(rv
, program
)
6927 storage
.set_attributes(program
._id
, "interp_result", rv
)
6929 # Incremental evaluation data.
6930 storage
.ie_
.touch( (block_inv
._id
, ccount
, arg_index
) )
6931 storage
.ie_
.set_timestamp(((block_inv
._id
, ccount
, arg_index
),
6934 return rv
, ie_status
6937 return storage
.get_attribute(program
._id
, "interp_result"), \
6938 storage
.ie_
.get_timestamp((block_inv
._id
, ccount
,
6942 rv
, ie_status
= program
.interpret(eval_env
, storage
)
6944 except Interpret_tail_call
, contin
:
6945 new_tree
, _env
, _prog_l
= contin
.args
6946 _prog_l
.append(finish
)
6948 except Interpret_return
, e
:
6949 rv
, ie_status
= e
.args
6951 return finish(rv
, ie_status
)
6955 def __call__(self
, *args
):
6956 # Also see Call.interpret
6959 rv
, ie_status
= callable_call(self
, *args
)
6961 # Prepare for possible next call; similar to __init__ for
6964 # Incremental evaluation data.
6965 self
._call
_count
+= 1
6966 self
._storage
.ie_
.set_timestamp(
6967 ( self
._block
_invocation
._id
,
6970 # ?? use previous call's timestamp? '
6971 self
._storage
.ie_
.get_timestamp(self
._block
_invocation
._id
))
6973 except Interpret_tail_call
, contin
:
6974 tree
, env
, _prog_l
= contin
.args
6975 rv
, ie_status
= Call
.interpret(
6976 tree
, env
, self
._storage
, tail_finishing_progs
= _prog_l
)
6979 CallableFunction
.__call
__ = __call__
6984 class File_io(astType
):
6992 return "%s(%s,%r)" % (self
.__class
__.__name
__,
6995 Unpickleable
.__repr
__ = __repr__
6998 def __init__(self
, problem_id
, problem_str
):
7000 Unpickleable indicates an unpickleable value.
7001 problem_id: the value's generating expression
7002 problem_str: the value's string representation
7004 self
._problem
_id
= problem_id
7005 self
._problem
_str
= problem_str
7006 Unpickleable
.__init
__ = __init__
7013 class RamMem(Memory
):
7016 def __init__(self
, unique_prefix
, starting_index
, initial_time
= 1):
7017 # Memory is split into temporary and persistent, but lookup is
7020 # most types contain a reference to their Memory(), so
7021 # CONTROLLED PICKLING OF Memory() TYPES IS REQUIRED -- via
7023 self
._store
= {} # persistent
7024 self
._store
_memory
= {} # temporary
7025 self
._counter
= starting_index
7026 self
._unique
_prefix
= unique_prefix
7027 ## self._main_programs = [] # type [program * ], persistent
7029 # Two-level storage for .interpret() use:
7030 # id -> key1 -> value1
7035 # (key1 -> ( key2 -> value) dict) dict
7036 self
._attr
_tables
= {}
7039 # Incremental evaluation.
7040 self
.ie_
= IncEval(initial_time
= initial_time
)
7042 # Pickle testing cache.
7043 self
._pickle
_tested
= {} # id() -> status dict.
7044 # status ::= None | "pickles"
7047 RamMem
.__init
__ = __init__
7050 def display_text(self
, out
= sys
.stdout
, indent
= 0, width
= 80):
7051 # out is a stdout - compatible stream
7053 # Full, pretty-printed output is nice, but very tedious w/
7054 # "manual" indentation. It also has no interaction possibilities.
7055 # Better to produce structurally marked-up output in the first
7059 pp
= pprint
.PrettyPrinter(stream
= out
, indent
= indent
, width
= width
)
7060 pp
.pprint(self
.__dict
__)
7061 RamMem
.display_text
= display_text
7063 # def add_program(self, prog):
7064 # self._main_programs.append(prog)
7065 # RamMem.add_program = add_program
7069 # pickle/unpickle methods.
7070 def __getstate__(self
):
7071 return [self
._store
,
7074 self
._unique
_prefix
,
7078 self
._pickle
_tested
,
7080 RamMem
.__getstate
__ = __getstate__
7081 RamMem
.__real
_getstate
__ = __getstate__
7084 def __null_getstate__(self
):
7085 ''' Suppress __getstate__ during pickle testing.
7086 This is faster, and also avoids re-reporting errors for previous
7089 return "RamMem-null-state"
7090 RamMem
.__null
_getstate
__ = __null_getstate__
7093 def __setstate__(self
, stuff
):
7097 self
._unique
_prefix
,
7103 RamMem
.__setstate
__ = __setstate__
7107 def store(self
, val
, env
, persistent
= True):
7108 # pathname = Path(self._unique_prefix,
7109 # env.full_path(), self._counter)
7110 self
.set_attributes(self
._counter
, "stored_from_env", env
)
7112 self
._store
[ self
._counter
] = val
7114 self
._store
_memory
[ self
._counter
] = val
7116 return (self
._counter
- 1)
7117 RamMem
.store
= store
7119 def load(self
, key
):
7120 assert isinstance(key
, (IntType
, LongType
))
7121 return self
._store
.get(key
, self
._store
_memory
.get(key
))
7124 def remove(self
, key
):
7125 assert isinstance(key
, (IntType
, LongType
))
7126 foo
= self
._store
.get(key
, self
._store
_memory
.get(key
))
7127 if self
._store
.has_key(key
):
7128 del self
._store
[key
]
7129 if self
._store
_memory
.has_key(key
):
7130 del self
._store
[key
]
7132 RamMem
.remove
= remove
7139 return (self
._counter
- 1)
7140 RamMem
.new_id
= new_id
7142 def id2tree(self
, value
, tree
):
7143 # the mapping from id(foo) -> <tree that created foo>
7144 self
._id
2tree
[ id(value
) ] = tree
7145 RamMem
.id2tree
= id2tree
7147 def generator_of(self
, id):
7148 # Return the expression that produced `id`
7149 # SUBTLE PYTHON PROBLEM
7150 # Mapping id(val) may be wrong when val is a string.
7151 # Python strings may be interned, so multiple distinct strings
7152 # can have the same id, invalidating the id->tree mapping.
7153 return self
._id
2tree
.get(id)
7154 RamMem
.generator_of
= generator_of
7158 def get_type(self
, key
):
7159 # This can be optimized for real disk access.
7160 return self
.deref(self
, key
).__class
__
7161 RamMem
.get_type
= get_type
7163 def _set_counter(self
, val
):
7165 Change the id counter to val. This will break almost any L3 code.
7166 Useful only for experimentation.
7169 RamMem
._set
_counter
= _set_counter
7172 #** interpret() attributes
7173 def set_attributes(self
, nid
, *args
):
7175 # self._storage.set_attributes(self._id,
7176 # "interp_result", rv,
7177 # "interp_env", eval_env)
7178 if (len(args
) % 2) != 0:
7179 raise InterpreterError
, "expected key/value pairs, got " + \
7181 kvpairs
= [(args
[i
], args
[i
+1]) for i
in range(0, len(args
),2)]
7182 dct
= self
._attr
_tables
.get(nid
)
7185 self
._attr
_tables
[nid
] = dct
7186 if not isinstance(dct
, DictType
):
7187 raise InterpreterError
, "storage key is already bound to non-dict."
7188 for kk
, vv
in kvpairs
:
7190 # Check pickleability.
7191 # For values that don't pickle,
7193 # - substitute Unpickleable
7195 if glbl
.L3_PICKLE_IMMEDIATELY
and (kk
== 'interp_result'):
7197 stat
= self
._pickle
_tested
.get(id(vv
))
7198 if stat
== 'pickles':
7200 elif stat
== 'no_pickle':
7201 dct
[kk
] = Unpickleable(nid
,
7202 self
.load(nid
).source_string(),
7204 dct
[kk
] = vv
# Allow rest of program to run.
7207 import cPickle
as pickle
7209 RamMem
.__getstate
__ = RamMem
.__null
_getstate
__
7210 Env
.__getstate
__ = Env
.__null
_getstate
__
7212 ps
= pickle
.dumps(vv
, protocol
=2)
7213 if glbl
.L3_PICKLE_SIZE
:
7214 glbl
.logger
.info("%d, %s: %d bytes" %
7217 except Exception, e
:
7219 'Value from interpretation of %d does not pickle.'
7221 % (nid
, nid
, self
.load(nid
).source_string()))
7222 glbl
.logger
.warn('Message was: %s' % e
)
7223 dct
[kk
] = Unpickleable(nid
,
7224 self
.load(nid
).source_string(),
7226 dct
[kk
] = vv
# Allow rest of program to run.
7227 self
._pickle
_tested
[id(vv
)] = 'no_pickle'
7231 self
._pickle
_tested
[id(vv
)] = 'pickles'
7233 RamMem
.__getstate
__ = RamMem
.__real
_getstate
__
7234 Env
.__getstate
__ = Env
.__real
_getstate
__
7237 RamMem
.set_attributes
= set_attributes
7239 def get_attribute_table(self
, id):
7240 dct
= self
._attr
_tables
.get(id)
7243 self
._attr
_tables
[id] = dct
7245 RamMem
.get_attribute_table
= get_attribute_table
7247 def get_attributes(self
, id, key_list
):
7249 # self._storage.get_attributes(self._id, ['foo'])
7250 assert type(key_list
) == ListType
7251 dct
= self
._attr
_tables
.get(id)
7254 self
._attr
_tables
[id] = dct
7255 return [dct
.get(key
)
7256 for key
in key_list
]
7257 RamMem
.get_attributes
= get_attributes
7259 def get_attribute(self
, id, key
):
7260 dct
= self
._attr
_tables
.get(id)
7263 self
._attr
_tables
[id] = dct
7265 RamMem
.get_attribute
= get_attribute
7267 def get_attribute_names(self
, id):
7268 # self._storage.get_attribute_names(self._id)
7269 dct
= self
._attr
_tables
.get(id)
7272 self
._attr
_tables
[id] = dct
7274 RamMem
.get_attribute_names
= get_attribute_names
7276 def push_attributes(self
, id, key
, val
):
7277 # Append val to value list of
7278 # id -> key -> value list
7279 # from self._attr_tables
7280 curr
= self
.get_attribute(id, key
) # the entry, None or [val1,...]
7284 self
.set_attributes(id, key
, [val
])
7285 RamMem
.push_attributes
= push_attributes
7287 def get_clones(self
, id):
7288 # Return a list of trees.
7289 return [self
.load(tree_id
)
7290 for tree_id
in self
.get_attribute(id, "interp_clone")]
7291 RamMem
.get_clones
= get_clones
7293 def get_leaf_clones(self
, orig_id
, leaves
= []):
7295 # Find the outermost clones of orig_id.
7296 # Returns a list of clones for single functions/loops.
7297 # For functions/loops lexically nested N deep, returns list of
7298 # lists, nested N deep.
7300 clones
= lambda oid
: self
.get_attribute(oid
, "interp_clone")
7301 maybe
= clones(orig_id
)
7303 if clones(maybe
[0]) != None:
7304 return [self
.get_leaf_clones(new_id
, maybe
) for new_id
in maybe
]
7309 RamMem
.get_leaf_clones
= get_leaf_clones
7313 # These interfaces may not be needed...
7315 """ General PDS model interface.
7318 def __init__(self
,dirname
="ast-test", create
= 0):
7321 def set(self
, key
, val
, overwrite
= 0):
7324 def get(self
, key
, type=None):
7327 def delete(self
, key
):
7336 class RamIO(PdsInterface
):
7337 """ Simulated PDS interface in memory; fast and simple for
7340 def __init__(self
, dirname
="ast-test", create
= 0):
7341 self
._dirname
= dirname
7344 def set(self
, key
, val
, overwrite
= 0):
7345 if self
._dir
.has_key(key
):
7346 raise OverwriteError
7347 self
._dir
[key
] = val
7349 def get(self
, key
, type=None):
7350 return self
._dir
[key
]
7352 #* Evaluation environment
7354 '''Substitute for None inside class Env'''
7360 def __init__(self
, id, definition_env
, program
, storage
, name
= "anonymous"):
7361 self
._primary
= (id, definition_env
, program
)
7362 if definition_env
!= None:
7363 self
._def
_env
_id
= definition_env
._id
7365 self
._def
_env
_id
= None
7366 self
._env
_subid
= id # local id
7367 self
._program
= program
# _id may not be available yet...
7369 # The Env() storage is a ( name -> val ) map, but with an indirection to
7370 # get the "current" value:
7372 # (name, 'ptr') -> set_id
7373 # (name, set_id) -> val
7374 # (name, 'ie_status', set_id) -> status
7378 # (name, 'ptr') -> set_id # current index
7379 # (name, set_id) -> val # values
7380 # (name, 'ie_status', set_id) -> time # time stamps
7384 # (name, 'ie_status') -> timestamp
7385 # dict, accessed via Env.bind()
7388 self
._bindings
_memory
= {}
7389 self
._sub
_env
_id
_count
= 0
7390 self
._children
= [] # Env list, formed by new_child()
7392 # Directory linking support.
7393 self
._dir
_stack
= [] # Working directory stack.
7394 self
._dir
_known
= {} # files already encountered.
7396 # (name -> tree_id) dict, accessed via Env.bind_id()
7397 # tree_id is usually the Symbol's id in the Set(Symbol(), ...)
7398 # tree making the assignment.
7399 self
._bindings
_ids
= {}
7402 self
._id
= storage
.store(self
, None) # global id
7403 self
._storage
= storage
7405 Env
.__init
__ = __init__
7408 #** environment examination
7409 def all_bindings(self
):
7410 """ Return all bindings in this environment in a new dictionary.
7413 bb
.update(self
._bindings
)
7414 bb
.update(self
._bindings
_memory
)
7416 Env
.all_bindings
= all_bindings
7418 def all_lexical_bindings(self
, dct
):
7419 """ Put all bindings in this environment and all lexically
7420 enclosing ones into the given dictionary.
7422 Additions are made inwards from the outermost environment to
7423 preserve binding order.
7425 if self
._def
_env
_id
!= None:
7426 self
._storage
.load(self
._def
_env
_id
).all_lexical_bindings(dct
)
7428 dct
.update(self
._bindings
)
7429 dct
.update(self
._bindings
_memory
)
7431 Env
.all_lexical_bindings
= all_lexical_bindings
7434 return self
._children
7435 Env
.children
= children
7441 rv
= [('_name', self
._name
),
7445 if self
._program
and show_prog
:
7446 rv
.append( ('_program._id', self
._program
._id
) )
7450 ('_bindings', self
._bindings
),
7451 ('_bindings_ids', self
._bindings
_ids
),
7454 rv
.append([ dump(child
, show_prog
, show_vals
)
7455 for child
in self
._children
])
7459 def print_tree(self
, indent
= ""):
7461 # Print a nested view of this environment's entries, including only
7466 print indent
, "%s-%d" % (self
._name
, self
._id
), '['
7469 indnt
= indent
+ " "
7470 for ky
in self
.all_bindings().iterkeys():
7472 if isinstance(ky
[1], IntType
):
7473 # (name, set_id)? Show name, set_id.
7474 # Get fixed column for set_id
7475 tree_s
= "%s %s" % (indnt
, ky
[0])
7476 print "%-40s %s" % (tree_s
, ky
[1])
7478 for child
in self
._children
:
7479 child
.print_tree(indent
= indnt
)
7482 Env
.print_tree
= print_tree
7484 def print_tree_tex(self
, indent
= "", out
= sys
.stdout
, first
= 1,
7486 # FIXME: update for 'ptr' indirection!
7489 # Print a filtered view of this environment and its contents.
7493 print self
._name
.startswith("skel"), \
7495 ignore_skel
and self
._name
.startswith("skel")
7497 if ignore_skel
and self
._name
.startswith("skel"):
7503 print >> out
, indent
, \
7504 r
"""\begin{directory}[3in]{%s-%d}""" % (self
._name
, self
._id
)
7506 print >> out
, indent
, \
7507 r
"""\file{\begin{directory}{%s-%d}""" % \
7508 (self
._name
, self
._id
)
7511 indnt
= indent
+ " "
7512 for ky
in self
.all_bindings().iterkeys():
7513 if isinstance(ky
, TupleType
):
7515 print >> out
, indnt
, r
"\file{%s}" % ky
7518 for child
in self
._children
:
7519 child
.print_tree_tex(indent
= indnt
, out
= out
, first
= 0,
7520 ignore_skel
= ignore_skel
,
7524 print >> out
, indent
, '\end{directory}'
7526 print >> out
, indent
, '\end{directory}}'
7527 Env
.print_tree_tex
= print_tree_tex
7530 # FIXME: update for 'ptr' indirection!
7533 # Return a FILTERED view of this environment and its contents.
7534 # Filtered out: - values (keys only)
7539 for ky
in self
.all_bindings().iterkeys():
7540 if isinstance(ky
, TupleType
):
7545 for child
in self
._children
:
7546 subl
.append(child
.get_tree())
7548 # Combine with this Env.
7549 return ("%s-%d" % (self
._name
, self
._id
), subl
)
7550 Env
.get_tree
= get_tree
7553 def get_dynamic_subtrees(self
):
7554 # FIXME: update for 'ptr' indirection!
7557 # Return a FILTERED view of this environment and its contents.
7558 # Filter: - values (keys only)
7565 # direc ::= ( dirname, content )
7566 # content ::= [ (name | direc)* ]
7568 # Note: - the first environment is always returned (even if anonymous)
7569 # - Program.directory() can convert this structure.
7574 for child
in self
._children
:
7575 if child
._name
in ["skel.arg", "skel.blck", "anonymous"]:
7577 subl
.append(child
.get_dynamic_subtrees())
7580 for ky
in self
.all_bindings().iterkeys():
7581 if isinstance(ky
, TupleType
):
7585 # Combine with this Env.
7586 return ("%s-%d" % (self
._name
, self
._id
), subl
)
7587 Env
.get_dynamic_subtrees
= get_dynamic_subtrees
7589 def all_bindings_recursive(self
, level
= 0):
7590 # FIXME: update for 'ptr' indirection!
7593 # Return (key, value, Env()) tuples for all bindings in this Env()
7595 for key
, val
in self
._bindings
.iteritems():
7596 yield key
, val
, self
, level
7598 # for key, val in self._bindings_ids.iteritems():
7599 # yield key, val, self, level
7601 for child
in self
._children
:
7602 for substuff
in child
.all_bindings_recursive(level
+ 1):
7604 Env
.all_bindings_recursive
= all_bindings_recursive
7606 def new_child(self
, program
, name
= "anonymous"):
7607 child
= Env(self
.new_env_id(), self
, program
, self
._storage
,
7609 self
._children
.append(child
)
7611 Env
.new_child
= new_child
7615 def __getstate__(self
):
7616 from copy
import copy
7617 dct
= copy(self
.__dict
__)
7618 del dct
['_bindings_memory']
7620 Env
.__getstate
__ = __getstate__
7621 Env
.__real
_getstate
__ = __getstate__
7623 def __null_getstate__(self
):
7624 ' Suppress __getstate__ during pickle testing. '
7625 return "Env-null-state"
7626 Env
.__null
_getstate
__ = __null_getstate__
7629 def __setstate__(self
, stuff
):
7630 self
.__dict
__.update(stuff
)
7631 self
._bindings
_memory
= {}
7632 Env
.__setstate
__ = __setstate__
7634 # Give a new id for ENCLOSED environments (not self)
7635 def new_env_id(self
):
7636 self
._sub
_env
_id
_count
+= 1
7637 return self
._sub
_env
_id
_count
7638 Env
.new_env_id
= new_env_id
7641 def short_repr(obj
):
7643 ss
= "[Program at 0x%x, id %d]" % (id(obj
), obj
._id
)
7644 except AttributeError:
7647 ss
= ss
[0:10] + ' ... ' + ss
[-10:-1]
7649 return self
.__class
__.__name
__ + \
7651 (self
._id
, self
._primary
[1], short_repr(self
._primary
[2])))
7652 Env
.__repr
__ = __repr__
7655 return self
.__class
__.__name
__ + \
7656 ( '-%d-%s' % (self
._id
, self
._name
))
7657 Env
.__str
__ = __str__
7660 #** Interpretation support
7661 def _clr_wrn(self
, name
, val
):
7662 if glbl
.L3_TRACE_BIND
:
7663 sys
.stderr
.write("trace: binding " + name
+ '\n')
7668 stuff
= self
.ie_lookup_1(name
)
7670 if stuff
[1] == ie_external_time
:
7673 sys
.stderr
.write("warning: overriding definition of %(name)s:\n"
7674 "warning: original: %(stuff)s\n"
7675 "warning: new: %(val)s\n"
7679 if self
._bindings
.has_key(name
):
7681 del self
._bindings
[name
]
7683 del self
._bindings
[ (name
, 'ie_status') ]
7687 if self
._bindings
_memory
.has_key(name
):
7689 del self
._bindings
_memory
[name
]
7691 del self
._bindings
[ (name
, 'ie_status') ]
7694 Env
._clr
_wrn
= _clr_wrn
7696 def bind(self
, name
, val
):
7700 # Make VAL available under NAME in this Env.
7702 # This function should be used in conjunction with
7703 # bind_time_stamp; see Env.import_names
7705 assert name
.__class
__ == StringType
# No subclasses allowed.
7706 # assert isinstance(name, StringType) # Allows subclasses...
7707 self
._clr
_wrn
(name
, val
)
7708 self
._bindings
[name
] = val
7711 def bind_id(self
, name
, id):
7712 assert name
.__class
__ == StringType
# No subclasses allowed.
7713 # assert isinstance(name, StringType) # Allows subclasses...
7714 self
._bindings
_ids
[name
] = id
7715 Env
.bind_id
= bind_id
7717 def bind_mem_only(self
, name
, val
):
7718 # # (probably) No longer needed.
7719 # # return self.bind(name, val)
7720 assert name
.__class
__ == StringType
7721 self
._clr
_wrn
(name
, val
)
7722 self
._bindings
_memory
[name
] = val
7723 Env
.bind_mem_only
= bind_mem_only
7725 # HERE. the __hash__ and __cmp__ (p. 35) methods must work properly
7730 def bind_df(self
, name
, val
):
7731 # Make VAL available under NAME in this Env, as dataflow
7734 assert name
.__class
__ == StringType
# No subclasses allowed.
7735 self
._clr
_wrn
((name
, "df"), val
)
7736 self
._bindings
[(name
, "df")] = val
7737 Env
.bind_df
= bind_df
7739 def bind_id_df(self
, name
, id):
7740 assert name
.__class
__ == StringType
# No subclasses allowed.
7741 self
._bindings
_ids
[(name
, "df")] = id
7742 Env
.bind_id_df
= bind_id_df
7745 #** rebinding semantics (for Set())
7746 # Allows multiple bindings to a single name, with an index to current
7749 ptr_external_id
= -111
7751 def bind_ptr(self
, name
, val
, set_id
):
7753 # Make `val` available under `name` in this Env,
7755 # set the pointer for `name` to this `val`. Prior values are
7756 # kept, but this one is retrieved by default
7758 # This allows Set() and Symbol() to simulate imperative semantics.
7760 assert name
.__class
__ == StringType
# No subclasses allowed.
7761 # Add the new value.
7762 self
._clr
_wrn
( (name
, set_id
), val
)
7763 self
._bindings
[ (name
, set_id
) ] = val
7764 self
.set_ptr(name
, set_id
)
7765 Env
.bind_ptr
= bind_ptr
7767 def bind_mem_only_ptr(self
, name
, val
, set_id
):
7769 # In-memory only version of bind_ptr; `val` is not retained
7772 assert name
.__class
__ == StringType
# No subclasses allowed.
7773 # Add the new value.
7774 self
._clr
_wrn
( (name
, set_id
), val
)
7775 self
._bindings
_memory
[ (name
, set_id
) ] = val
7776 self
.set_ptr(name
, set_id
)
7777 Env
.bind_mem_only_ptr
= bind_mem_only_ptr
7779 def set_ptr(self
, name
, set_id
):
7780 # Set the current pointer for `name` to `set_id`
7781 assert isinstance(set_id
, (IntType
, LongType
)), "An l3 id must be integer."
7782 self
._bindings
[ (name
, 'ptr') ] = set_id
7783 Env
.set_ptr
= set_ptr
7785 def lookup_ptr(self
, name
):
7787 Find the *current* `name` in self or any enclosing *defining* (not
7788 calling) environment.
7790 set_id
= self
.lookup( (name
, 'ptr') )
7791 return self
.lookup( (name
, set_id
) )
7792 Env
.lookup_ptr
= lookup_ptr
7794 def lookup_ptr_1(self
, name
):
7796 Find the *current* `name` in self or raise KeyError.
7798 set_id
= self
.lookup_1( (name
, 'ptr') )
7799 return self
.lookup_1( (name
, set_id
) )
7800 Env
.lookup_ptr_1
= lookup_ptr_1
7802 def bind_time_stamp_ptr(self
, name
, time
, set_id
):
7803 'Set the time stamp for the `name` produced by Set with id `set_id`.'
7805 self
.lookup_ptr_1(name
)
7807 glbl
.logger
.error("Timestamping nonexistent name '" +
7809 self
._bindings
[ (name
, 'ie_status', set_id
) ] = time
7810 Env
.bind_time_stamp_ptr
= bind_time_stamp_ptr
7812 def ie_lookup_ptr(self
, name
):
7814 Incremental evaluation lookup.
7815 Find CURRENT name in self or any enclosing *defining* (not
7816 calling) environment.
7818 None when no binding is found
7819 (value, timestamp) otherwise
7821 Export members starting with 'l3_'
7823 # Export members starting with l3_
7824 if name
.startswith('l3_'):
7825 return getattr(self
, name
), ie_external_time
7828 if self
._has
_key
( (name
, 'ptr') ):
7829 set_id
= self
._get
_val
( (name
, 'ptr') )
7830 # got value for pointer?
7831 if self
._has
_key
( (name
, set_id
) ):
7832 status
= self
._bindings
.get( (name
, 'ie_status', set_id
) )
7834 status
= ie_external_time
# Force eval of parent
7835 return self
._get
_val
((name
, set_id
)), status
7837 raise InterpreterError(
7838 "Found ptr to %s, but %s has no value -- internal error." %
7841 if self
._def
_env
_id
is None:
7844 return self
._storage
.load(self
._def
_env
_id
).ie_lookup_ptr(name
)
7845 Env
.ie_lookup_ptr
= ie_lookup_ptr
7847 def dict_ie_lookup_ptr(dct
, name
):
7848 set_id
= dct
.get( (name
, 'ptr') )
7849 if set_id
is not None:
7850 val
= dct
.get( (name
, set_id
) )
7851 assert (v
is not None) # ptr w/o value -- internal error.
7852 status
= dct
.get( (name
, 'ie_status', set_id
) )
7854 status
= ie_external_time
# Force eval of parent
7858 return (getattr(dct
, name
), ie_external_time
)
7860 # return (dct.get(name), ie_external_time)
7861 # fails; member functions are ignored.
7865 #** lookup functions
7866 def _get_val(self
, key
):
7867 # (Internal) Lookup `key` in this Env only.
7868 if self
._bindings
.has_key( key
):
7869 return self
._bindings
.get( key
)
7870 if self
._bindings
_memory
.has_key( key
):
7871 return self
._bindings
_memory
.get( key
)
7872 raise KeyError("No value found for %s" % key
)
7873 Env
._get
_val
= _get_val
7876 def _has_key(self
, key
):
7877 # (Internal) Lookup `key` in this Env only.
7878 # Returns (gotvalue : Bool)
7879 # A value can be retrieved via _get_val()
7880 if self
._bindings
.has_key( key
):
7882 if self
._bindings
_memory
.has_key( key
):
7885 Env
._has
_key
= _has_key
7887 def lookup(self
, name
):
7889 Find name in self or any enclosing *defining* (not calling) environment.
7890 Raise KeyError if no binding was found.
7892 if self
._has
_key
(name
):
7893 return self
._get
_val
(name
)
7895 if self
._def
_env
_id
is None:
7896 raise KeyError("No value found for %s" % name
)
7898 return self
._storage
.load(self
._def
_env
_id
).lookup(name
)
7901 def lookup_status(self
, name
):
7903 Find name in self or any enclosing *defining* (not calling)
7905 Return (status, binding).
7906 status indicates whether the binding was found.
7908 if self
._has
_key
(name
):
7909 return (1, self
._get
_val
(name
))
7911 if self
._def
_env
_id
is None:
7914 return self
._storage
.load(self
._def
_env
_id
).lookup_status(name
)
7915 Env
.lookup_status
= lookup_status
7918 def lookup_symbol_id(self
, name
):
7919 v
= self
._bindings
_ids
.get(name
)
7923 if self
._def
_env
_id
is None:
7926 return self
._storage
.load(self
._def
_env
_id
).lookup_symbol_id(name
)
7927 Env
.lookup_symbol_id
= lookup_symbol_id
7929 def lookup_1(self
, name
):
7931 Find name in self only.
7933 if self
._has
_key
(name
):
7934 return self
._get
_val
(name
)
7935 raise KeyError("No value found for %s" % name
)
7936 Env
.lookup_1
= lookup_1
7938 def full_path(self
, stack
=[]):
7939 up
= self
._def
_env
_id
7941 # [].append() returns None... Cute.
7942 # return up.full_path() + [(self._env_subid)]
7943 return self
._storage
.load(up
).full_path() + [(self
._env
_subid
)]
7945 return [self
._env
_subid
]
7946 Env
.full_path
= full_path
7949 #** Incremental evaluation
7950 # Incremental evaluation.
7951 def bind_time_stamp(self
, name
, time
):
7952 # The (name, 'ie_status') binding is needed externally, hence
7957 glbl
.logger
.error("Timestamping nonexistent name '" +
7959 self
._bindings
[ (name
, 'ie_status') ] = time
7960 Env
.bind_time_stamp
= bind_time_stamp
7962 # Time binding is separate from name binding to keep those program
7963 # parts separated -- necessary when storing in dicts() and across
7966 def ie_lookup(self
, name
):
7968 Incremental evaluation lookup.
7969 Find name in self or any enclosing *defining* (not calling) environment.
7971 None when no binding is found
7972 (value, timestamp) otherwise
7975 l3_trace("ie_lookup", str(self
))
7976 if self
._has
_key
(name
):
7977 status
= self
._bindings
.get( (name
, 'ie_status') )
7979 status
= ie_external_time
# Force eval of parent
7981 l3_trace("ie_lookup", "found %s" % name
)
7982 return self
._get
_val
(name
), status
7984 if self
._def
_env
_id
is None:
7987 return self
._storage
.load(self
._def
_env
_id
).ie_lookup(name
)
7988 Env
.ie_lookup
= ie_lookup
7991 def ie_lookup_1(self
, name
):
7993 Find name in self only.
7995 None when no binding is found
7996 (value, timestamp) otherwise
7998 if self
._has
_key
(name
):
7999 status
= self
._bindings
.get( (name
, 'ie_status') )
8001 status
= ie_external_time
# Force eval of parent
8002 return self
._get
_val
(name
), status
8005 Env
.ie_lookup_1
= ie_lookup_1
8008 def find_unstamped(self
):
8009 # Find bindings w/o time stamp.
8010 from types
import TupleType
8011 dct
= self
._bindings
8013 for k
, v
in dct
.iteritems():
8014 if isinstance(k
, TupleType
): continue
8015 if dct
.has_key( (k
, 'ie_status') ): continue
8016 unstamped
.append( (k
, v
) )
8018 Env
.find_unstamped
= find_unstamped
8021 #** python connection
8022 def import_all_names(self
, module
):
8024 Import all Python names (not __...__ specials) from the named
8025 module into this environment. All are given an "external" time stamp.
8027 if isinstance(module
, StringType
):
8028 exec('import ' + module
)
8029 for name
in dir(eval(module
)):
8030 if name
.startswith('__'):
8032 self
.bind_mem_only_ptr(name
, eval(module
+ '.' + name
),
8034 self
.bind_time_stamp_ptr(name
, ie_external_time
, ptr_external_id
)
8036 elif isinstance(module
, DictType
):
8037 for name
in module
.iterkeys():
8038 if name
.startswith('__'):
8040 self
.bind_mem_only_ptr(name
, module
[name
], ptr_external_id
)
8041 self
.bind_time_stamp_ptr(name
, ie_external_time
, ptr_external_id
)
8043 for name
in dir(module
):
8044 if name
.startswith('__'):
8046 self
.bind_mem_only_ptr(name
, module
.__dict
__[name
],
8048 self
.bind_time_stamp_ptr(name
, ie_external_time
,
8050 Env
.import_all_names
= import_all_names
8052 def import_names(self
, module
, name_list
):
8054 Import specified Python names from the named module into this
8057 if isinstance(module
, StringType
):
8058 exec('import ' + module
)
8059 for name
in dir(eval(module
)):
8060 if name
in name_list
:
8061 self
.bind_mem_only_ptr(name
, eval(module
+ '.' + name
),
8063 self
.bind_time_stamp_ptr(name
, ie_external_time
,
8066 for name
in dir(module
):
8067 if name
in name_list
:
8068 self
.bind_mem_only_ptr(name
, eval(module
+ '.' + name
),
8070 self
.bind_time_stamp_ptr(name
, ie_external_time
,
8072 Env
.import_names
= import_names
8074 def import_module(self
, module
):
8076 Import the named Python module into this environment.
8078 assert isinstance(module
, StringType
)
8079 exec('import ' + module
)
8080 lead_name
= module
.split(".")[0]
8081 self
.bind_mem_only_ptr(lead_name
, eval(lead_name
), ptr_external_id
)
8082 self
.bind_time_stamp_ptr(lead_name
, ie_external_time
,
8084 Env
.import_module
= import_module
8087 def import_def(self
, name
, py_global
):
8089 Import the Python binding into this environment.
8091 assert isinstance(name
, StringType
)
8092 self
.bind_mem_only_ptr(name
, eval(name
, py_global
), ptr_external_id
)
8093 self
.bind_time_stamp_ptr(name
, ie_external_time
, ptr_external_id
)
8094 Env
.import_def
= import_def
8097 def import_external(self
, name
, obj
):
8099 Import the obj into this environment as a constant.
8101 self
.bind_mem_only_ptr(name
, obj
, ptr_external_id
)
8102 self
.bind_time_stamp_ptr(name
, ie_external_time
, ptr_external_id
)
8103 Env
.import_external
= import_external
8104 Env
.import_constant
= import_external
8107 #** shell connection
8108 def into_directory(self
):
8109 # os.chdir to the directory corresponding to `self`, collect file
8110 # information, and return the name of the directory.
8112 name
= 'Subdir-' + str(self
._id
)
8113 # Create the directory if it doesn't exist yet.
8114 if not os
.path
.isdir(name
):
8115 glbl
.logger
.info("Adding subdirectory %s\n" % name
)
8117 self
._dir
_stack
.append(os
.getcwd())
8119 self
._dir
_name
= name
8121 Env
.into_directory
= into_directory
8124 def directory_name(self
):
8125 return 'Subdir-' + str(self
._id
)
8126 Env
.directory_name
= directory_name
8127 Env
.l3_dirname
= directory_name
8130 def outof_directory(self
):
8131 # Collect new file information from current directory.
8132 # Change to the previous working directory.
8134 # Collect new file information from current directory.
8135 known
= self
._dir
_known
8138 # File names need to be accessible as l3 identifiers. For this,
8139 # characters other than [A-Z][a-z][0-9]_ are mapped to the _;
8140 # [todo: name collisions are avoided via a number suffix if necessary.]
8142 # To get consistent renaming, file names could be traversed in
8143 # alphabetical order, but this would not help when using
8144 # incremental evaluation.
8145 regex
= re
.compile(r
'[^A-Za-z0-9_]', re
.IGNORECASE
)
8146 for name
in os
.listdir(os
.getcwd()):
8147 id_name
= regex
.sub('_', name
)
8148 if known
.has_key(name
): continue
8149 if known
.has_key(id_name
):
8150 glbl
.logger
.warn("File %s maps to already used identifier %s.",
8152 # Only include new files. [todo: warn about modified files]
8153 self
.bind_ptr(name
, String(os
.path
.abspath(name
)), self
._id
)
8154 self
.bind_time_stamp_ptr(name
, self
._storage
.ie_
.time(), self
._id
)
8155 # Provide identifier bindings
8156 self
.bind_ptr(id_name
, String(os
.path
.abspath(name
)), self
._id
)
8157 self
.bind_time_stamp_ptr(id_name
, self
._storage
.ie_
.time(), self
._id
)
8158 visited
[name
] = None
8159 known
.update(visited
)
8161 # Restore working directory.
8162 prev
= self
._dir
_stack
.pop()
8166 Env
.outof_directory
= outof_directory
8169 #** interactive use shortcuts
8170 # (file content) display a binding's value
8171 Env
.cat
= Env
.ie_lookup_ptr
8174 Env
.ls
= Env
.print_tree